1 /* Statement simplification on GIMPLE. 2 Copyright (C) 2010-2017 Free Software Foundation, Inc. 3 Split out from tree-ssa-ccp.c. 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify it 8 under the terms of the GNU General Public License as published by the 9 Free Software Foundation; either version 3, or (at your option) any 10 later version. 11 12 GCC is distributed in the hope that it will be useful, but WITHOUT 13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15 for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #include "config.h" 22 #include "system.h" 23 #include "coretypes.h" 24 #include "backend.h" 25 #include "target.h" 26 #include "rtl.h" 27 #include "tree.h" 28 #include "gimple.h" 29 #include "predict.h" 30 #include "ssa.h" 31 #include "cgraph.h" 32 #include "gimple-pretty-print.h" 33 #include "fold-const.h" 34 #include "stmt.h" 35 #include "expr.h" 36 #include "stor-layout.h" 37 #include "dumpfile.h" 38 #include "gimple-fold.h" 39 #include "gimplify.h" 40 #include "gimple-iterator.h" 41 #include "tree-into-ssa.h" 42 #include "tree-dfa.h" 43 #include "tree-ssa.h" 44 #include "tree-ssa-propagate.h" 45 #include "ipa-utils.h" 46 #include "tree-ssa-address.h" 47 #include "langhooks.h" 48 #include "gimplify-me.h" 49 #include "dbgcnt.h" 50 #include "builtins.h" 51 #include "tree-eh.h" 52 #include "gimple-match.h" 53 #include "gomp-constants.h" 54 #include "optabs-query.h" 55 #include "omp-general.h" 56 #include "ipa-chkp.h" 57 #include "tree-cfg.h" 58 #include "fold-const-call.h" 59 60 /* Return true when DECL can be referenced from current unit. 61 FROM_DECL (if non-null) specify constructor of variable DECL was taken from. 62 We can get declarations that are not possible to reference for various 63 reasons: 64 65 1) When analyzing C++ virtual tables. 66 C++ virtual tables do have known constructors even 67 when they are keyed to other compilation unit. 68 Those tables can contain pointers to methods and vars 69 in other units. Those methods have both STATIC and EXTERNAL 70 set. 71 2) In WHOPR mode devirtualization might lead to reference 72 to method that was partitioned elsehwere. 73 In this case we have static VAR_DECL or FUNCTION_DECL 74 that has no corresponding callgraph/varpool node 75 declaring the body. 76 3) COMDAT functions referred by external vtables that 77 we devirtualize only during final compilation stage. 78 At this time we already decided that we will not output 79 the function body and thus we can't reference the symbol 80 directly. */ 81 82 static bool 83 can_refer_decl_in_current_unit_p (tree decl, tree from_decl) 84 { 85 varpool_node *vnode; 86 struct cgraph_node *node; 87 symtab_node *snode; 88 89 if (DECL_ABSTRACT_P (decl)) 90 return false; 91 92 /* We are concerned only about static/external vars and functions. */ 93 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl)) 94 || !VAR_OR_FUNCTION_DECL_P (decl)) 95 return true; 96 97 /* Static objects can be referred only if they was not optimized out yet. */ 98 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl)) 99 { 100 /* Before we start optimizing unreachable code we can be sure all 101 static objects are defined. */ 102 if (symtab->function_flags_ready) 103 return true; 104 snode = symtab_node::get (decl); 105 if (!snode || !snode->definition) 106 return false; 107 node = dyn_cast <cgraph_node *> (snode); 108 return !node || !node->global.inlined_to; 109 } 110 111 /* We will later output the initializer, so we can refer to it. 112 So we are concerned only when DECL comes from initializer of 113 external var or var that has been optimized out. */ 114 if (!from_decl 115 || !VAR_P (from_decl) 116 || (!DECL_EXTERNAL (from_decl) 117 && (vnode = varpool_node::get (from_decl)) != NULL 118 && vnode->definition) 119 || (flag_ltrans 120 && (vnode = varpool_node::get (from_decl)) != NULL 121 && vnode->in_other_partition)) 122 return true; 123 /* We are folding reference from external vtable. The vtable may reffer 124 to a symbol keyed to other compilation unit. The other compilation 125 unit may be in separate DSO and the symbol may be hidden. */ 126 if (DECL_VISIBILITY_SPECIFIED (decl) 127 && DECL_EXTERNAL (decl) 128 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT 129 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition)) 130 return false; 131 /* When function is public, we always can introduce new reference. 132 Exception are the COMDAT functions where introducing a direct 133 reference imply need to include function body in the curren tunit. */ 134 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl)) 135 return true; 136 /* We have COMDAT. We are going to check if we still have definition 137 or if the definition is going to be output in other partition. 138 Bypass this when gimplifying; all needed functions will be produced. 139 140 As observed in PR20991 for already optimized out comdat virtual functions 141 it may be tempting to not necessarily give up because the copy will be 142 output elsewhere when corresponding vtable is output. 143 This is however not possible - ABI specify that COMDATs are output in 144 units where they are used and when the other unit was compiled with LTO 145 it is possible that vtable was kept public while the function itself 146 was privatized. */ 147 if (!symtab->function_flags_ready) 148 return true; 149 150 snode = symtab_node::get (decl); 151 if (!snode 152 || ((!snode->definition || DECL_EXTERNAL (decl)) 153 && (!snode->in_other_partition 154 || (!snode->forced_by_abi && !snode->force_output)))) 155 return false; 156 node = dyn_cast <cgraph_node *> (snode); 157 return !node || !node->global.inlined_to; 158 } 159 160 /* Create a temporary for TYPE for a statement STMT. If the current function 161 is in SSA form, a SSA name is created. Otherwise a temporary register 162 is made. */ 163 164 static tree 165 create_tmp_reg_or_ssa_name (tree type, gimple *stmt = NULL) 166 { 167 if (gimple_in_ssa_p (cfun)) 168 return make_ssa_name (type, stmt); 169 else 170 return create_tmp_reg (type); 171 } 172 173 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into 174 acceptable form for is_gimple_min_invariant. 175 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */ 176 177 tree 178 canonicalize_constructor_val (tree cval, tree from_decl) 179 { 180 tree orig_cval = cval; 181 STRIP_NOPS (cval); 182 if (TREE_CODE (cval) == POINTER_PLUS_EXPR 183 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST) 184 { 185 tree ptr = TREE_OPERAND (cval, 0); 186 if (is_gimple_min_invariant (ptr)) 187 cval = build1_loc (EXPR_LOCATION (cval), 188 ADDR_EXPR, TREE_TYPE (ptr), 189 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)), 190 ptr, 191 fold_convert (ptr_type_node, 192 TREE_OPERAND (cval, 1)))); 193 } 194 if (TREE_CODE (cval) == ADDR_EXPR) 195 { 196 tree base = NULL_TREE; 197 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR) 198 { 199 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0)); 200 if (base) 201 TREE_OPERAND (cval, 0) = base; 202 } 203 else 204 base = get_base_address (TREE_OPERAND (cval, 0)); 205 if (!base) 206 return NULL_TREE; 207 208 if (VAR_OR_FUNCTION_DECL_P (base) 209 && !can_refer_decl_in_current_unit_p (base, from_decl)) 210 return NULL_TREE; 211 if (TREE_TYPE (base) == error_mark_node) 212 return NULL_TREE; 213 if (VAR_P (base)) 214 TREE_ADDRESSABLE (base) = 1; 215 else if (TREE_CODE (base) == FUNCTION_DECL) 216 { 217 /* Make sure we create a cgraph node for functions we'll reference. 218 They can be non-existent if the reference comes from an entry 219 of an external vtable for example. */ 220 cgraph_node::get_create (base); 221 } 222 /* Fixup types in global initializers. */ 223 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0))) 224 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0)); 225 226 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval))) 227 cval = fold_convert (TREE_TYPE (orig_cval), cval); 228 return cval; 229 } 230 if (TREE_OVERFLOW_P (cval)) 231 return drop_tree_overflow (cval); 232 return orig_cval; 233 } 234 235 /* If SYM is a constant variable with known value, return the value. 236 NULL_TREE is returned otherwise. */ 237 238 tree 239 get_symbol_constant_value (tree sym) 240 { 241 tree val = ctor_for_folding (sym); 242 if (val != error_mark_node) 243 { 244 if (val) 245 { 246 val = canonicalize_constructor_val (unshare_expr (val), sym); 247 if (val && is_gimple_min_invariant (val)) 248 return val; 249 else 250 return NULL_TREE; 251 } 252 /* Variables declared 'const' without an initializer 253 have zero as the initializer if they may not be 254 overridden at link or run time. */ 255 if (!val 256 && is_gimple_reg_type (TREE_TYPE (sym))) 257 return build_zero_cst (TREE_TYPE (sym)); 258 } 259 260 return NULL_TREE; 261 } 262 263 264 265 /* Subroutine of fold_stmt. We perform several simplifications of the 266 memory reference tree EXPR and make sure to re-gimplify them properly 267 after propagation of constant addresses. IS_LHS is true if the 268 reference is supposed to be an lvalue. */ 269 270 static tree 271 maybe_fold_reference (tree expr, bool is_lhs) 272 { 273 tree result; 274 275 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR 276 || TREE_CODE (expr) == REALPART_EXPR 277 || TREE_CODE (expr) == IMAGPART_EXPR) 278 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0))) 279 return fold_unary_loc (EXPR_LOCATION (expr), 280 TREE_CODE (expr), 281 TREE_TYPE (expr), 282 TREE_OPERAND (expr, 0)); 283 else if (TREE_CODE (expr) == BIT_FIELD_REF 284 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0))) 285 return fold_ternary_loc (EXPR_LOCATION (expr), 286 TREE_CODE (expr), 287 TREE_TYPE (expr), 288 TREE_OPERAND (expr, 0), 289 TREE_OPERAND (expr, 1), 290 TREE_OPERAND (expr, 2)); 291 292 if (!is_lhs 293 && (result = fold_const_aggregate_ref (expr)) 294 && is_gimple_min_invariant (result)) 295 return result; 296 297 return NULL_TREE; 298 } 299 300 301 /* Attempt to fold an assignment statement pointed-to by SI. Returns a 302 replacement rhs for the statement or NULL_TREE if no simplification 303 could be made. It is assumed that the operands have been previously 304 folded. */ 305 306 static tree 307 fold_gimple_assign (gimple_stmt_iterator *si) 308 { 309 gimple *stmt = gsi_stmt (*si); 310 enum tree_code subcode = gimple_assign_rhs_code (stmt); 311 location_t loc = gimple_location (stmt); 312 313 tree result = NULL_TREE; 314 315 switch (get_gimple_rhs_class (subcode)) 316 { 317 case GIMPLE_SINGLE_RHS: 318 { 319 tree rhs = gimple_assign_rhs1 (stmt); 320 321 if (TREE_CLOBBER_P (rhs)) 322 return NULL_TREE; 323 324 if (REFERENCE_CLASS_P (rhs)) 325 return maybe_fold_reference (rhs, false); 326 327 else if (TREE_CODE (rhs) == OBJ_TYPE_REF) 328 { 329 tree val = OBJ_TYPE_REF_EXPR (rhs); 330 if (is_gimple_min_invariant (val)) 331 return val; 332 else if (flag_devirtualize && virtual_method_call_p (rhs)) 333 { 334 bool final; 335 vec <cgraph_node *>targets 336 = possible_polymorphic_call_targets (rhs, stmt, &final); 337 if (final && targets.length () <= 1 && dbg_cnt (devirt)) 338 { 339 if (dump_enabled_p ()) 340 { 341 location_t loc = gimple_location_safe (stmt); 342 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc, 343 "resolving virtual function address " 344 "reference to function %s\n", 345 targets.length () == 1 346 ? targets[0]->name () 347 : "NULL"); 348 } 349 if (targets.length () == 1) 350 { 351 val = fold_convert (TREE_TYPE (val), 352 build_fold_addr_expr_loc 353 (loc, targets[0]->decl)); 354 STRIP_USELESS_TYPE_CONVERSION (val); 355 } 356 else 357 /* We can not use __builtin_unreachable here because it 358 can not have address taken. */ 359 val = build_int_cst (TREE_TYPE (val), 0); 360 return val; 361 } 362 } 363 } 364 365 else if (TREE_CODE (rhs) == ADDR_EXPR) 366 { 367 tree ref = TREE_OPERAND (rhs, 0); 368 tree tem = maybe_fold_reference (ref, true); 369 if (tem 370 && TREE_CODE (tem) == MEM_REF 371 && integer_zerop (TREE_OPERAND (tem, 1))) 372 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0)); 373 else if (tem) 374 result = fold_convert (TREE_TYPE (rhs), 375 build_fold_addr_expr_loc (loc, tem)); 376 else if (TREE_CODE (ref) == MEM_REF 377 && integer_zerop (TREE_OPERAND (ref, 1))) 378 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0)); 379 380 if (result) 381 { 382 /* Strip away useless type conversions. Both the 383 NON_LVALUE_EXPR that may have been added by fold, and 384 "useless" type conversions that might now be apparent 385 due to propagation. */ 386 STRIP_USELESS_TYPE_CONVERSION (result); 387 388 if (result != rhs && valid_gimple_rhs_p (result)) 389 return result; 390 } 391 } 392 393 else if (TREE_CODE (rhs) == CONSTRUCTOR 394 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE) 395 { 396 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */ 397 unsigned i; 398 tree val; 399 400 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val) 401 if (! CONSTANT_CLASS_P (val)) 402 return NULL_TREE; 403 404 return build_vector_from_ctor (TREE_TYPE (rhs), 405 CONSTRUCTOR_ELTS (rhs)); 406 } 407 408 else if (DECL_P (rhs)) 409 return get_symbol_constant_value (rhs); 410 } 411 break; 412 413 case GIMPLE_UNARY_RHS: 414 break; 415 416 case GIMPLE_BINARY_RHS: 417 break; 418 419 case GIMPLE_TERNARY_RHS: 420 result = fold_ternary_loc (loc, subcode, 421 TREE_TYPE (gimple_assign_lhs (stmt)), 422 gimple_assign_rhs1 (stmt), 423 gimple_assign_rhs2 (stmt), 424 gimple_assign_rhs3 (stmt)); 425 426 if (result) 427 { 428 STRIP_USELESS_TYPE_CONVERSION (result); 429 if (valid_gimple_rhs_p (result)) 430 return result; 431 } 432 break; 433 434 case GIMPLE_INVALID_RHS: 435 gcc_unreachable (); 436 } 437 438 return NULL_TREE; 439 } 440 441 442 /* Replace a statement at *SI_P with a sequence of statements in STMTS, 443 adjusting the replacement stmts location and virtual operands. 444 If the statement has a lhs the last stmt in the sequence is expected 445 to assign to that lhs. */ 446 447 static void 448 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts) 449 { 450 gimple *stmt = gsi_stmt (*si_p); 451 452 if (gimple_has_location (stmt)) 453 annotate_all_with_location (stmts, gimple_location (stmt)); 454 455 /* First iterate over the replacement statements backward, assigning 456 virtual operands to their defining statements. */ 457 gimple *laststore = NULL; 458 for (gimple_stmt_iterator i = gsi_last (stmts); 459 !gsi_end_p (i); gsi_prev (&i)) 460 { 461 gimple *new_stmt = gsi_stmt (i); 462 if ((gimple_assign_single_p (new_stmt) 463 && !is_gimple_reg (gimple_assign_lhs (new_stmt))) 464 || (is_gimple_call (new_stmt) 465 && (gimple_call_flags (new_stmt) 466 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0)) 467 { 468 tree vdef; 469 if (!laststore) 470 vdef = gimple_vdef (stmt); 471 else 472 vdef = make_ssa_name (gimple_vop (cfun), new_stmt); 473 gimple_set_vdef (new_stmt, vdef); 474 if (vdef && TREE_CODE (vdef) == SSA_NAME) 475 SSA_NAME_DEF_STMT (vdef) = new_stmt; 476 laststore = new_stmt; 477 } 478 } 479 480 /* Second iterate over the statements forward, assigning virtual 481 operands to their uses. */ 482 tree reaching_vuse = gimple_vuse (stmt); 483 for (gimple_stmt_iterator i = gsi_start (stmts); 484 !gsi_end_p (i); gsi_next (&i)) 485 { 486 gimple *new_stmt = gsi_stmt (i); 487 /* If the new statement possibly has a VUSE, update it with exact SSA 488 name we know will reach this one. */ 489 if (gimple_has_mem_ops (new_stmt)) 490 gimple_set_vuse (new_stmt, reaching_vuse); 491 gimple_set_modified (new_stmt, true); 492 if (gimple_vdef (new_stmt)) 493 reaching_vuse = gimple_vdef (new_stmt); 494 } 495 496 /* If the new sequence does not do a store release the virtual 497 definition of the original statement. */ 498 if (reaching_vuse 499 && reaching_vuse == gimple_vuse (stmt)) 500 { 501 tree vdef = gimple_vdef (stmt); 502 if (vdef 503 && TREE_CODE (vdef) == SSA_NAME) 504 { 505 unlink_stmt_vdef (stmt); 506 release_ssa_name (vdef); 507 } 508 } 509 510 /* Finally replace the original statement with the sequence. */ 511 gsi_replace_with_seq (si_p, stmts, false); 512 } 513 514 /* Convert EXPR into a GIMPLE value suitable for substitution on the 515 RHS of an assignment. Insert the necessary statements before 516 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL 517 is replaced. If the call is expected to produces a result, then it 518 is replaced by an assignment of the new RHS to the result variable. 519 If the result is to be ignored, then the call is replaced by a 520 GIMPLE_NOP. A proper VDEF chain is retained by making the first 521 VUSE and the last VDEF of the whole sequence be the same as the replaced 522 statement and using new SSA names for stores in between. */ 523 524 void 525 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr) 526 { 527 tree lhs; 528 gimple *stmt, *new_stmt; 529 gimple_stmt_iterator i; 530 gimple_seq stmts = NULL; 531 532 stmt = gsi_stmt (*si_p); 533 534 gcc_assert (is_gimple_call (stmt)); 535 536 push_gimplify_context (gimple_in_ssa_p (cfun)); 537 538 lhs = gimple_call_lhs (stmt); 539 if (lhs == NULL_TREE) 540 { 541 gimplify_and_add (expr, &stmts); 542 /* We can end up with folding a memcpy of an empty class assignment 543 which gets optimized away by C++ gimplification. */ 544 if (gimple_seq_empty_p (stmts)) 545 { 546 pop_gimplify_context (NULL); 547 if (gimple_in_ssa_p (cfun)) 548 { 549 unlink_stmt_vdef (stmt); 550 release_defs (stmt); 551 } 552 gsi_replace (si_p, gimple_build_nop (), false); 553 return; 554 } 555 } 556 else 557 { 558 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE); 559 new_stmt = gimple_build_assign (lhs, tmp); 560 i = gsi_last (stmts); 561 gsi_insert_after_without_update (&i, new_stmt, 562 GSI_CONTINUE_LINKING); 563 } 564 565 pop_gimplify_context (NULL); 566 567 gsi_replace_with_seq_vops (si_p, stmts); 568 } 569 570 571 /* Replace the call at *GSI with the gimple value VAL. */ 572 573 static void 574 replace_call_with_value (gimple_stmt_iterator *gsi, tree val) 575 { 576 gimple *stmt = gsi_stmt (*gsi); 577 tree lhs = gimple_call_lhs (stmt); 578 gimple *repl; 579 if (lhs) 580 { 581 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val))) 582 val = fold_convert (TREE_TYPE (lhs), val); 583 repl = gimple_build_assign (lhs, val); 584 } 585 else 586 repl = gimple_build_nop (); 587 tree vdef = gimple_vdef (stmt); 588 if (vdef && TREE_CODE (vdef) == SSA_NAME) 589 { 590 unlink_stmt_vdef (stmt); 591 release_ssa_name (vdef); 592 } 593 gsi_replace (gsi, repl, false); 594 } 595 596 /* Replace the call at *GSI with the new call REPL and fold that 597 again. */ 598 599 static void 600 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl) 601 { 602 gimple *stmt = gsi_stmt (*gsi); 603 gimple_call_set_lhs (repl, gimple_call_lhs (stmt)); 604 gimple_set_location (repl, gimple_location (stmt)); 605 if (gimple_vdef (stmt) 606 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) 607 { 608 gimple_set_vdef (repl, gimple_vdef (stmt)); 609 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl; 610 } 611 if (gimple_vuse (stmt)) 612 gimple_set_vuse (repl, gimple_vuse (stmt)); 613 gsi_replace (gsi, repl, false); 614 fold_stmt (gsi); 615 } 616 617 /* Return true if VAR is a VAR_DECL or a component thereof. */ 618 619 static bool 620 var_decl_component_p (tree var) 621 { 622 tree inner = var; 623 while (handled_component_p (inner)) 624 inner = TREE_OPERAND (inner, 0); 625 return SSA_VAR_P (inner); 626 } 627 628 /* Fold function call to builtin mem{{,p}cpy,move}. Return 629 false if no simplification can be made. 630 If ENDP is 0, return DEST (like memcpy). 631 If ENDP is 1, return DEST+LEN (like mempcpy). 632 If ENDP is 2, return DEST+LEN-1 (like stpcpy). 633 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap 634 (memmove). */ 635 636 static bool 637 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi, 638 tree dest, tree src, int endp) 639 { 640 gimple *stmt = gsi_stmt (*gsi); 641 tree lhs = gimple_call_lhs (stmt); 642 tree len = gimple_call_arg (stmt, 2); 643 tree destvar, srcvar; 644 location_t loc = gimple_location (stmt); 645 646 /* If the LEN parameter is zero, return DEST. */ 647 if (integer_zerop (len)) 648 { 649 gimple *repl; 650 if (gimple_call_lhs (stmt)) 651 repl = gimple_build_assign (gimple_call_lhs (stmt), dest); 652 else 653 repl = gimple_build_nop (); 654 tree vdef = gimple_vdef (stmt); 655 if (vdef && TREE_CODE (vdef) == SSA_NAME) 656 { 657 unlink_stmt_vdef (stmt); 658 release_ssa_name (vdef); 659 } 660 gsi_replace (gsi, repl, false); 661 return true; 662 } 663 664 /* If SRC and DEST are the same (and not volatile), return 665 DEST{,+LEN,+LEN-1}. */ 666 if (operand_equal_p (src, dest, 0)) 667 { 668 unlink_stmt_vdef (stmt); 669 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) 670 release_ssa_name (gimple_vdef (stmt)); 671 if (!lhs) 672 { 673 gsi_replace (gsi, gimple_build_nop (), false); 674 return true; 675 } 676 goto done; 677 } 678 else 679 { 680 tree srctype, desttype; 681 unsigned int src_align, dest_align; 682 tree off0; 683 684 /* Inlining of memcpy/memmove may cause bounds lost (if we copy 685 pointers as wide integer) and also may result in huge function 686 size because of inlined bounds copy. Thus don't inline for 687 functions we want to instrument. */ 688 if (flag_check_pointer_bounds 689 && chkp_instrumentable_p (cfun->decl) 690 /* Even if data may contain pointers we can inline if copy 691 less than a pointer size. */ 692 && (!tree_fits_uhwi_p (len) 693 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0)) 694 return false; 695 696 /* Build accesses at offset zero with a ref-all character type. */ 697 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node, 698 ptr_mode, true), 0); 699 700 /* If we can perform the copy efficiently with first doing all loads 701 and then all stores inline it that way. Currently efficiently 702 means that we can load all the memory into a single integer 703 register which is what MOVE_MAX gives us. */ 704 src_align = get_pointer_alignment (src); 705 dest_align = get_pointer_alignment (dest); 706 if (tree_fits_uhwi_p (len) 707 && compare_tree_int (len, MOVE_MAX) <= 0 708 /* ??? Don't transform copies from strings with known length this 709 confuses the tree-ssa-strlen.c. This doesn't handle 710 the case in gcc.dg/strlenopt-8.c which is XFAILed for that 711 reason. */ 712 && !c_strlen (src, 2)) 713 { 714 unsigned ilen = tree_to_uhwi (len); 715 if (pow2p_hwi (ilen)) 716 { 717 tree type = lang_hooks.types.type_for_size (ilen * 8, 1); 718 if (type 719 && TYPE_MODE (type) != BLKmode 720 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT 721 == ilen * 8) 722 /* If the destination pointer is not aligned we must be able 723 to emit an unaligned store. */ 724 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type)) 725 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align) 726 || (optab_handler (movmisalign_optab, TYPE_MODE (type)) 727 != CODE_FOR_nothing))) 728 { 729 tree srctype = type; 730 tree desttype = type; 731 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))) 732 srctype = build_aligned_type (type, src_align); 733 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0); 734 tree tem = fold_const_aggregate_ref (srcmem); 735 if (tem) 736 srcmem = tem; 737 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)) 738 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), 739 src_align) 740 && (optab_handler (movmisalign_optab, 741 TYPE_MODE (type)) 742 == CODE_FOR_nothing)) 743 srcmem = NULL_TREE; 744 if (srcmem) 745 { 746 gimple *new_stmt; 747 if (is_gimple_reg_type (TREE_TYPE (srcmem))) 748 { 749 new_stmt = gimple_build_assign (NULL_TREE, srcmem); 750 srcmem 751 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem), 752 new_stmt); 753 gimple_assign_set_lhs (new_stmt, srcmem); 754 gimple_set_vuse (new_stmt, gimple_vuse (stmt)); 755 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); 756 } 757 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))) 758 desttype = build_aligned_type (type, dest_align); 759 new_stmt 760 = gimple_build_assign (fold_build2 (MEM_REF, desttype, 761 dest, off0), 762 srcmem); 763 gimple_set_vuse (new_stmt, gimple_vuse (stmt)); 764 gimple_set_vdef (new_stmt, gimple_vdef (stmt)); 765 if (gimple_vdef (new_stmt) 766 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME) 767 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt; 768 if (!lhs) 769 { 770 gsi_replace (gsi, new_stmt, false); 771 return true; 772 } 773 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); 774 goto done; 775 } 776 } 777 } 778 } 779 780 if (endp == 3) 781 { 782 /* Both DEST and SRC must be pointer types. 783 ??? This is what old code did. Is the testing for pointer types 784 really mandatory? 785 786 If either SRC is readonly or length is 1, we can use memcpy. */ 787 if (!dest_align || !src_align) 788 return false; 789 if (readonly_data_expr (src) 790 || (tree_fits_uhwi_p (len) 791 && (MIN (src_align, dest_align) / BITS_PER_UNIT 792 >= tree_to_uhwi (len)))) 793 { 794 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY); 795 if (!fn) 796 return false; 797 gimple_call_set_fndecl (stmt, fn); 798 gimple_call_set_arg (stmt, 0, dest); 799 gimple_call_set_arg (stmt, 1, src); 800 fold_stmt (gsi); 801 return true; 802 } 803 804 /* If *src and *dest can't overlap, optimize into memcpy as well. */ 805 if (TREE_CODE (src) == ADDR_EXPR 806 && TREE_CODE (dest) == ADDR_EXPR) 807 { 808 tree src_base, dest_base, fn; 809 HOST_WIDE_INT src_offset = 0, dest_offset = 0; 810 HOST_WIDE_INT maxsize; 811 812 srcvar = TREE_OPERAND (src, 0); 813 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset); 814 if (src_base == NULL) 815 src_base = srcvar; 816 destvar = TREE_OPERAND (dest, 0); 817 dest_base = get_addr_base_and_unit_offset (destvar, 818 &dest_offset); 819 if (dest_base == NULL) 820 dest_base = destvar; 821 if (tree_fits_uhwi_p (len)) 822 maxsize = tree_to_uhwi (len); 823 else 824 maxsize = -1; 825 if (SSA_VAR_P (src_base) 826 && SSA_VAR_P (dest_base)) 827 { 828 if (operand_equal_p (src_base, dest_base, 0) 829 && ranges_overlap_p (src_offset, maxsize, 830 dest_offset, maxsize)) 831 return false; 832 } 833 else if (TREE_CODE (src_base) == MEM_REF 834 && TREE_CODE (dest_base) == MEM_REF) 835 { 836 if (! operand_equal_p (TREE_OPERAND (src_base, 0), 837 TREE_OPERAND (dest_base, 0), 0)) 838 return false; 839 offset_int off = mem_ref_offset (src_base) + src_offset; 840 if (!wi::fits_shwi_p (off)) 841 return false; 842 src_offset = off.to_shwi (); 843 844 off = mem_ref_offset (dest_base) + dest_offset; 845 if (!wi::fits_shwi_p (off)) 846 return false; 847 dest_offset = off.to_shwi (); 848 if (ranges_overlap_p (src_offset, maxsize, 849 dest_offset, maxsize)) 850 return false; 851 } 852 else 853 return false; 854 855 fn = builtin_decl_implicit (BUILT_IN_MEMCPY); 856 if (!fn) 857 return false; 858 gimple_call_set_fndecl (stmt, fn); 859 gimple_call_set_arg (stmt, 0, dest); 860 gimple_call_set_arg (stmt, 1, src); 861 fold_stmt (gsi); 862 return true; 863 } 864 865 /* If the destination and source do not alias optimize into 866 memcpy as well. */ 867 if ((is_gimple_min_invariant (dest) 868 || TREE_CODE (dest) == SSA_NAME) 869 && (is_gimple_min_invariant (src) 870 || TREE_CODE (src) == SSA_NAME)) 871 { 872 ao_ref destr, srcr; 873 ao_ref_init_from_ptr_and_size (&destr, dest, len); 874 ao_ref_init_from_ptr_and_size (&srcr, src, len); 875 if (!refs_may_alias_p_1 (&destr, &srcr, false)) 876 { 877 tree fn; 878 fn = builtin_decl_implicit (BUILT_IN_MEMCPY); 879 if (!fn) 880 return false; 881 gimple_call_set_fndecl (stmt, fn); 882 gimple_call_set_arg (stmt, 0, dest); 883 gimple_call_set_arg (stmt, 1, src); 884 fold_stmt (gsi); 885 return true; 886 } 887 } 888 889 return false; 890 } 891 892 if (!tree_fits_shwi_p (len)) 893 return false; 894 /* FIXME: 895 This logic lose for arguments like (type *)malloc (sizeof (type)), 896 since we strip the casts of up to VOID return value from malloc. 897 Perhaps we ought to inherit type from non-VOID argument here? */ 898 STRIP_NOPS (src); 899 STRIP_NOPS (dest); 900 if (!POINTER_TYPE_P (TREE_TYPE (src)) 901 || !POINTER_TYPE_P (TREE_TYPE (dest))) 902 return false; 903 /* In the following try to find a type that is most natural to be 904 used for the memcpy source and destination and that allows 905 the most optimization when memcpy is turned into a plain assignment 906 using that type. In theory we could always use a char[len] type 907 but that only gains us that the destination and source possibly 908 no longer will have their address taken. */ 909 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */ 910 if (TREE_CODE (src) == POINTER_PLUS_EXPR) 911 { 912 tree tem = TREE_OPERAND (src, 0); 913 STRIP_NOPS (tem); 914 if (tem != TREE_OPERAND (src, 0)) 915 src = build1 (NOP_EXPR, TREE_TYPE (tem), src); 916 } 917 if (TREE_CODE (dest) == POINTER_PLUS_EXPR) 918 { 919 tree tem = TREE_OPERAND (dest, 0); 920 STRIP_NOPS (tem); 921 if (tem != TREE_OPERAND (dest, 0)) 922 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest); 923 } 924 srctype = TREE_TYPE (TREE_TYPE (src)); 925 if (TREE_CODE (srctype) == ARRAY_TYPE 926 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)) 927 { 928 srctype = TREE_TYPE (srctype); 929 STRIP_NOPS (src); 930 src = build1 (NOP_EXPR, build_pointer_type (srctype), src); 931 } 932 desttype = TREE_TYPE (TREE_TYPE (dest)); 933 if (TREE_CODE (desttype) == ARRAY_TYPE 934 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)) 935 { 936 desttype = TREE_TYPE (desttype); 937 STRIP_NOPS (dest); 938 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest); 939 } 940 if (TREE_ADDRESSABLE (srctype) 941 || TREE_ADDRESSABLE (desttype)) 942 return false; 943 944 /* Make sure we are not copying using a floating-point mode or 945 a type whose size possibly does not match its precision. */ 946 if (FLOAT_MODE_P (TYPE_MODE (desttype)) 947 || TREE_CODE (desttype) == BOOLEAN_TYPE 948 || TREE_CODE (desttype) == ENUMERAL_TYPE) 949 desttype = bitwise_type_for_mode (TYPE_MODE (desttype)); 950 if (FLOAT_MODE_P (TYPE_MODE (srctype)) 951 || TREE_CODE (srctype) == BOOLEAN_TYPE 952 || TREE_CODE (srctype) == ENUMERAL_TYPE) 953 srctype = bitwise_type_for_mode (TYPE_MODE (srctype)); 954 if (!srctype) 955 srctype = desttype; 956 if (!desttype) 957 desttype = srctype; 958 if (!srctype) 959 return false; 960 961 src_align = get_pointer_alignment (src); 962 dest_align = get_pointer_alignment (dest); 963 if (dest_align < TYPE_ALIGN (desttype) 964 || src_align < TYPE_ALIGN (srctype)) 965 return false; 966 967 destvar = dest; 968 STRIP_NOPS (destvar); 969 if (TREE_CODE (destvar) == ADDR_EXPR 970 && var_decl_component_p (TREE_OPERAND (destvar, 0)) 971 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)) 972 destvar = fold_build2 (MEM_REF, desttype, destvar, off0); 973 else 974 destvar = NULL_TREE; 975 976 srcvar = src; 977 STRIP_NOPS (srcvar); 978 if (TREE_CODE (srcvar) == ADDR_EXPR 979 && var_decl_component_p (TREE_OPERAND (srcvar, 0)) 980 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)) 981 { 982 if (!destvar 983 || src_align >= TYPE_ALIGN (desttype)) 984 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype, 985 srcvar, off0); 986 else if (!STRICT_ALIGNMENT) 987 { 988 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype), 989 src_align); 990 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0); 991 } 992 else 993 srcvar = NULL_TREE; 994 } 995 else 996 srcvar = NULL_TREE; 997 998 if (srcvar == NULL_TREE && destvar == NULL_TREE) 999 return false; 1000 1001 if (srcvar == NULL_TREE) 1002 { 1003 STRIP_NOPS (src); 1004 if (src_align >= TYPE_ALIGN (desttype)) 1005 srcvar = fold_build2 (MEM_REF, desttype, src, off0); 1006 else 1007 { 1008 if (STRICT_ALIGNMENT) 1009 return false; 1010 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype), 1011 src_align); 1012 srcvar = fold_build2 (MEM_REF, srctype, src, off0); 1013 } 1014 } 1015 else if (destvar == NULL_TREE) 1016 { 1017 STRIP_NOPS (dest); 1018 if (dest_align >= TYPE_ALIGN (srctype)) 1019 destvar = fold_build2 (MEM_REF, srctype, dest, off0); 1020 else 1021 { 1022 if (STRICT_ALIGNMENT) 1023 return false; 1024 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype), 1025 dest_align); 1026 destvar = fold_build2 (MEM_REF, desttype, dest, off0); 1027 } 1028 } 1029 1030 gimple *new_stmt; 1031 if (is_gimple_reg_type (TREE_TYPE (srcvar))) 1032 { 1033 tree tem = fold_const_aggregate_ref (srcvar); 1034 if (tem) 1035 srcvar = tem; 1036 if (! is_gimple_min_invariant (srcvar)) 1037 { 1038 new_stmt = gimple_build_assign (NULL_TREE, srcvar); 1039 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar), 1040 new_stmt); 1041 gimple_assign_set_lhs (new_stmt, srcvar); 1042 gimple_set_vuse (new_stmt, gimple_vuse (stmt)); 1043 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); 1044 } 1045 } 1046 new_stmt = gimple_build_assign (destvar, srcvar); 1047 gimple_set_vuse (new_stmt, gimple_vuse (stmt)); 1048 gimple_set_vdef (new_stmt, gimple_vdef (stmt)); 1049 if (gimple_vdef (new_stmt) 1050 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME) 1051 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt; 1052 if (!lhs) 1053 { 1054 gsi_replace (gsi, new_stmt, false); 1055 return true; 1056 } 1057 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); 1058 } 1059 1060 done: 1061 gimple_seq stmts = NULL; 1062 if (endp == 0 || endp == 3) 1063 len = NULL_TREE; 1064 else if (endp == 2) 1065 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len, 1066 ssize_int (1)); 1067 if (endp == 2 || endp == 1) 1068 { 1069 len = gimple_convert_to_ptrofftype (&stmts, loc, len); 1070 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR, 1071 TREE_TYPE (dest), dest, len); 1072 } 1073 1074 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); 1075 gimple *repl = gimple_build_assign (lhs, dest); 1076 gsi_replace (gsi, repl, false); 1077 return true; 1078 } 1079 1080 /* Fold function call to builtin memset or bzero at *GSI setting the 1081 memory of size LEN to VAL. Return whether a simplification was made. */ 1082 1083 static bool 1084 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len) 1085 { 1086 gimple *stmt = gsi_stmt (*gsi); 1087 tree etype; 1088 unsigned HOST_WIDE_INT length, cval; 1089 1090 /* If the LEN parameter is zero, return DEST. */ 1091 if (integer_zerop (len)) 1092 { 1093 replace_call_with_value (gsi, gimple_call_arg (stmt, 0)); 1094 return true; 1095 } 1096 1097 if (! tree_fits_uhwi_p (len)) 1098 return false; 1099 1100 if (TREE_CODE (c) != INTEGER_CST) 1101 return false; 1102 1103 tree dest = gimple_call_arg (stmt, 0); 1104 tree var = dest; 1105 if (TREE_CODE (var) != ADDR_EXPR) 1106 return false; 1107 1108 var = TREE_OPERAND (var, 0); 1109 if (TREE_THIS_VOLATILE (var)) 1110 return false; 1111 1112 etype = TREE_TYPE (var); 1113 if (TREE_CODE (etype) == ARRAY_TYPE) 1114 etype = TREE_TYPE (etype); 1115 1116 if (!INTEGRAL_TYPE_P (etype) 1117 && !POINTER_TYPE_P (etype)) 1118 return NULL_TREE; 1119 1120 if (! var_decl_component_p (var)) 1121 return NULL_TREE; 1122 1123 length = tree_to_uhwi (len); 1124 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length 1125 || get_pointer_alignment (dest) / BITS_PER_UNIT < length) 1126 return NULL_TREE; 1127 1128 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) 1129 return NULL_TREE; 1130 1131 if (integer_zerop (c)) 1132 cval = 0; 1133 else 1134 { 1135 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64) 1136 return NULL_TREE; 1137 1138 cval = TREE_INT_CST_LOW (c); 1139 cval &= 0xff; 1140 cval |= cval << 8; 1141 cval |= cval << 16; 1142 cval |= (cval << 31) << 1; 1143 } 1144 1145 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0)); 1146 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval)); 1147 gimple_set_vuse (store, gimple_vuse (stmt)); 1148 tree vdef = gimple_vdef (stmt); 1149 if (vdef && TREE_CODE (vdef) == SSA_NAME) 1150 { 1151 gimple_set_vdef (store, gimple_vdef (stmt)); 1152 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store; 1153 } 1154 gsi_insert_before (gsi, store, GSI_SAME_STMT); 1155 if (gimple_call_lhs (stmt)) 1156 { 1157 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest); 1158 gsi_replace (gsi, asgn, false); 1159 } 1160 else 1161 { 1162 gimple_stmt_iterator gsi2 = *gsi; 1163 gsi_prev (gsi); 1164 gsi_remove (&gsi2, true); 1165 } 1166 1167 return true; 1168 } 1169 1170 1171 /* Obtain the minimum and maximum string length or minimum and maximum 1172 value of ARG in LENGTH[0] and LENGTH[1], respectively. 1173 If ARG is an SSA name variable, follow its use-def chains. When 1174 TYPE == 0, if LENGTH[1] is not equal to the length we determine or 1175 if we are unable to determine the length or value, return False. 1176 VISITED is a bitmap of visited variables. 1177 TYPE is 0 if string length should be obtained, 1 for maximum string 1178 length and 2 for maximum value ARG can have. 1179 When FUZZY is set and the length of a string cannot be determined, 1180 the function instead considers as the maximum possible length the 1181 size of a character array it may refer to. 1182 Set *FLEXP to true if the range of the string lengths has been 1183 obtained from the upper bound of an array at the end of a struct. 1184 Such an array may hold a string that's longer than its upper bound 1185 due to it being used as a poor-man's flexible array member. */ 1186 1187 static bool 1188 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type, 1189 bool fuzzy, bool *flexp) 1190 { 1191 tree var, val; 1192 gimple *def_stmt; 1193 1194 /* The minimum and maximum length. The MAXLEN pointer stays unchanged 1195 but MINLEN may be cleared during the execution of the function. */ 1196 tree *minlen = length; 1197 tree *const maxlen = length + 1; 1198 1199 if (TREE_CODE (arg) != SSA_NAME) 1200 { 1201 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */ 1202 if (TREE_CODE (arg) == ADDR_EXPR 1203 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF 1204 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1))) 1205 { 1206 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0); 1207 if (TREE_CODE (aop0) == INDIRECT_REF 1208 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME) 1209 return get_range_strlen (TREE_OPERAND (aop0, 0), 1210 length, visited, type, fuzzy, flexp); 1211 } 1212 1213 if (type == 2) 1214 { 1215 val = arg; 1216 if (TREE_CODE (val) != INTEGER_CST 1217 || tree_int_cst_sgn (val) < 0) 1218 return false; 1219 } 1220 else 1221 val = c_strlen (arg, 1); 1222 1223 if (!val && fuzzy) 1224 { 1225 if (TREE_CODE (arg) == ADDR_EXPR) 1226 return get_range_strlen (TREE_OPERAND (arg, 0), length, 1227 visited, type, fuzzy, flexp); 1228 1229 if (TREE_CODE (arg) == COMPONENT_REF 1230 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE) 1231 { 1232 /* Use the type of the member array to determine the upper 1233 bound on the length of the array. This may be overly 1234 optimistic if the array itself isn't NUL-terminated and 1235 the caller relies on the subsequent member to contain 1236 the NUL. 1237 Set *FLEXP to true if the array whose bound is being 1238 used is at the end of a struct. */ 1239 if (array_at_struct_end_p (arg)) 1240 *flexp = true; 1241 1242 arg = TREE_OPERAND (arg, 1); 1243 val = TYPE_SIZE_UNIT (TREE_TYPE (arg)); 1244 if (!val || integer_zerop (val)) 1245 return false; 1246 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val, 1247 integer_one_node); 1248 /* Set the minimum size to zero since the string in 1249 the array could have zero length. */ 1250 *minlen = ssize_int (0); 1251 } 1252 } 1253 1254 if (!val) 1255 return false; 1256 1257 if (minlen 1258 && (!*minlen 1259 || (type > 0 1260 && TREE_CODE (*minlen) == INTEGER_CST 1261 && TREE_CODE (val) == INTEGER_CST 1262 && tree_int_cst_lt (val, *minlen)))) 1263 *minlen = val; 1264 1265 if (*maxlen) 1266 { 1267 if (type > 0) 1268 { 1269 if (TREE_CODE (*maxlen) != INTEGER_CST 1270 || TREE_CODE (val) != INTEGER_CST) 1271 return false; 1272 1273 if (tree_int_cst_lt (*maxlen, val)) 1274 *maxlen = val; 1275 return true; 1276 } 1277 else if (simple_cst_equal (val, *maxlen) != 1) 1278 return false; 1279 } 1280 1281 *maxlen = val; 1282 return true; 1283 } 1284 1285 /* If ARG is registered for SSA update we cannot look at its defining 1286 statement. */ 1287 if (name_registered_for_update_p (arg)) 1288 return false; 1289 1290 /* If we were already here, break the infinite cycle. */ 1291 if (!*visited) 1292 *visited = BITMAP_ALLOC (NULL); 1293 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg))) 1294 return true; 1295 1296 var = arg; 1297 def_stmt = SSA_NAME_DEF_STMT (var); 1298 1299 switch (gimple_code (def_stmt)) 1300 { 1301 case GIMPLE_ASSIGN: 1302 /* The RHS of the statement defining VAR must either have a 1303 constant length or come from another SSA_NAME with a constant 1304 length. */ 1305 if (gimple_assign_single_p (def_stmt) 1306 || gimple_assign_unary_nop_p (def_stmt)) 1307 { 1308 tree rhs = gimple_assign_rhs1 (def_stmt); 1309 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp); 1310 } 1311 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR) 1312 { 1313 tree op2 = gimple_assign_rhs2 (def_stmt); 1314 tree op3 = gimple_assign_rhs3 (def_stmt); 1315 return get_range_strlen (op2, length, visited, type, fuzzy, flexp) 1316 && get_range_strlen (op3, length, visited, type, fuzzy, flexp); 1317 } 1318 return false; 1319 1320 case GIMPLE_PHI: 1321 { 1322 /* All the arguments of the PHI node must have the same constant 1323 length. */ 1324 unsigned i; 1325 1326 for (i = 0; i < gimple_phi_num_args (def_stmt); i++) 1327 { 1328 tree arg = gimple_phi_arg (def_stmt, i)->def; 1329 1330 /* If this PHI has itself as an argument, we cannot 1331 determine the string length of this argument. However, 1332 if we can find a constant string length for the other 1333 PHI args then we can still be sure that this is a 1334 constant string length. So be optimistic and just 1335 continue with the next argument. */ 1336 if (arg == gimple_phi_result (def_stmt)) 1337 continue; 1338 1339 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp)) 1340 { 1341 if (fuzzy) 1342 *maxlen = build_all_ones_cst (size_type_node); 1343 else 1344 return false; 1345 } 1346 } 1347 } 1348 return true; 1349 1350 default: 1351 return false; 1352 } 1353 } 1354 1355 /* Determine the minimum and maximum value or string length that ARG 1356 refers to and store each in the first two elements of MINMAXLEN. 1357 For expressions that point to strings of unknown lengths that are 1358 character arrays, use the upper bound of the array as the maximum 1359 length. For example, given an expression like 'x ? array : "xyz"' 1360 and array declared as 'char array[8]', MINMAXLEN[0] will be set 1361 to 3 and MINMAXLEN[1] to 7, the longest string that could be 1362 stored in array. 1363 Return true if the range of the string lengths has been obtained 1364 from the upper bound of an array at the end of a struct. Such 1365 an array may hold a string that's longer than its upper bound 1366 due to it being used as a poor-man's flexible array member. */ 1367 1368 bool 1369 get_range_strlen (tree arg, tree minmaxlen[2]) 1370 { 1371 bitmap visited = NULL; 1372 1373 minmaxlen[0] = NULL_TREE; 1374 minmaxlen[1] = NULL_TREE; 1375 1376 bool flexarray = false; 1377 get_range_strlen (arg, minmaxlen, &visited, 1, true, &flexarray); 1378 1379 if (visited) 1380 BITMAP_FREE (visited); 1381 1382 return flexarray; 1383 } 1384 1385 tree 1386 get_maxval_strlen (tree arg, int type) 1387 { 1388 bitmap visited = NULL; 1389 tree len[2] = { NULL_TREE, NULL_TREE }; 1390 1391 bool dummy; 1392 if (!get_range_strlen (arg, len, &visited, type, false, &dummy)) 1393 len[1] = NULL_TREE; 1394 if (visited) 1395 BITMAP_FREE (visited); 1396 1397 return len[1]; 1398 } 1399 1400 1401 /* Fold function call to builtin strcpy with arguments DEST and SRC. 1402 If LEN is not NULL, it represents the length of the string to be 1403 copied. Return NULL_TREE if no simplification can be made. */ 1404 1405 static bool 1406 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi, 1407 tree dest, tree src) 1408 { 1409 location_t loc = gimple_location (gsi_stmt (*gsi)); 1410 tree fn; 1411 1412 /* If SRC and DEST are the same (and not volatile), return DEST. */ 1413 if (operand_equal_p (src, dest, 0)) 1414 { 1415 replace_call_with_value (gsi, dest); 1416 return true; 1417 } 1418 1419 if (optimize_function_for_size_p (cfun)) 1420 return false; 1421 1422 fn = builtin_decl_implicit (BUILT_IN_MEMCPY); 1423 if (!fn) 1424 return false; 1425 1426 tree len = get_maxval_strlen (src, 0); 1427 if (!len) 1428 return false; 1429 1430 len = fold_convert_loc (loc, size_type_node, len); 1431 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1)); 1432 len = force_gimple_operand_gsi (gsi, len, true, 1433 NULL_TREE, true, GSI_SAME_STMT); 1434 gimple *repl = gimple_build_call (fn, 3, dest, src, len); 1435 replace_call_with_call_and_fold (gsi, repl); 1436 return true; 1437 } 1438 1439 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN. 1440 If SLEN is not NULL, it represents the length of the source string. 1441 Return NULL_TREE if no simplification can be made. */ 1442 1443 static bool 1444 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi, 1445 tree dest, tree src, tree len) 1446 { 1447 location_t loc = gimple_location (gsi_stmt (*gsi)); 1448 tree fn; 1449 1450 /* If the LEN parameter is zero, return DEST. */ 1451 if (integer_zerop (len)) 1452 { 1453 replace_call_with_value (gsi, dest); 1454 return true; 1455 } 1456 1457 /* We can't compare slen with len as constants below if len is not a 1458 constant. */ 1459 if (TREE_CODE (len) != INTEGER_CST) 1460 return false; 1461 1462 /* Now, we must be passed a constant src ptr parameter. */ 1463 tree slen = get_maxval_strlen (src, 0); 1464 if (!slen || TREE_CODE (slen) != INTEGER_CST) 1465 return false; 1466 1467 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1)); 1468 1469 /* We do not support simplification of this case, though we do 1470 support it when expanding trees into RTL. */ 1471 /* FIXME: generate a call to __builtin_memset. */ 1472 if (tree_int_cst_lt (slen, len)) 1473 return false; 1474 1475 /* OK transform into builtin memcpy. */ 1476 fn = builtin_decl_implicit (BUILT_IN_MEMCPY); 1477 if (!fn) 1478 return false; 1479 1480 len = fold_convert_loc (loc, size_type_node, len); 1481 len = force_gimple_operand_gsi (gsi, len, true, 1482 NULL_TREE, true, GSI_SAME_STMT); 1483 gimple *repl = gimple_build_call (fn, 3, dest, src, len); 1484 replace_call_with_call_and_fold (gsi, repl); 1485 return true; 1486 } 1487 1488 /* Fold function call to builtin strchr or strrchr. 1489 If both arguments are constant, evaluate and fold the result, 1490 otherwise simplify str(r)chr (str, 0) into str + strlen (str). 1491 In general strlen is significantly faster than strchr 1492 due to being a simpler operation. */ 1493 static bool 1494 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr) 1495 { 1496 gimple *stmt = gsi_stmt (*gsi); 1497 tree str = gimple_call_arg (stmt, 0); 1498 tree c = gimple_call_arg (stmt, 1); 1499 location_t loc = gimple_location (stmt); 1500 const char *p; 1501 char ch; 1502 1503 if (!gimple_call_lhs (stmt)) 1504 return false; 1505 1506 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch)) 1507 { 1508 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch); 1509 1510 if (p1 == NULL) 1511 { 1512 replace_call_with_value (gsi, integer_zero_node); 1513 return true; 1514 } 1515 1516 tree len = build_int_cst (size_type_node, p1 - p); 1517 gimple_seq stmts = NULL; 1518 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt), 1519 POINTER_PLUS_EXPR, str, len); 1520 gimple_seq_add_stmt_without_update (&stmts, new_stmt); 1521 gsi_replace_with_seq_vops (gsi, stmts); 1522 return true; 1523 } 1524 1525 if (!integer_zerop (c)) 1526 return false; 1527 1528 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */ 1529 if (is_strrchr && optimize_function_for_size_p (cfun)) 1530 { 1531 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR); 1532 1533 if (strchr_fn) 1534 { 1535 gimple *repl = gimple_build_call (strchr_fn, 2, str, c); 1536 replace_call_with_call_and_fold (gsi, repl); 1537 return true; 1538 } 1539 1540 return false; 1541 } 1542 1543 tree len; 1544 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN); 1545 1546 if (!strlen_fn) 1547 return false; 1548 1549 /* Create newstr = strlen (str). */ 1550 gimple_seq stmts = NULL; 1551 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str); 1552 gimple_set_location (new_stmt, loc); 1553 len = create_tmp_reg_or_ssa_name (size_type_node); 1554 gimple_call_set_lhs (new_stmt, len); 1555 gimple_seq_add_stmt_without_update (&stmts, new_stmt); 1556 1557 /* Create (str p+ strlen (str)). */ 1558 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), 1559 POINTER_PLUS_EXPR, str, len); 1560 gimple_seq_add_stmt_without_update (&stmts, new_stmt); 1561 gsi_replace_with_seq_vops (gsi, stmts); 1562 /* gsi now points at the assignment to the lhs, get a 1563 stmt iterator to the strlen. 1564 ??? We can't use gsi_for_stmt as that doesn't work when the 1565 CFG isn't built yet. */ 1566 gimple_stmt_iterator gsi2 = *gsi; 1567 gsi_prev (&gsi2); 1568 fold_stmt (&gsi2); 1569 return true; 1570 } 1571 1572 /* Fold function call to builtin strstr. 1573 If both arguments are constant, evaluate and fold the result, 1574 additionally fold strstr (x, "") into x and strstr (x, "c") 1575 into strchr (x, 'c'). */ 1576 static bool 1577 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi) 1578 { 1579 gimple *stmt = gsi_stmt (*gsi); 1580 tree haystack = gimple_call_arg (stmt, 0); 1581 tree needle = gimple_call_arg (stmt, 1); 1582 const char *p, *q; 1583 1584 if (!gimple_call_lhs (stmt)) 1585 return false; 1586 1587 q = c_getstr (needle); 1588 if (q == NULL) 1589 return false; 1590 1591 if ((p = c_getstr (haystack))) 1592 { 1593 const char *r = strstr (p, q); 1594 1595 if (r == NULL) 1596 { 1597 replace_call_with_value (gsi, integer_zero_node); 1598 return true; 1599 } 1600 1601 tree len = build_int_cst (size_type_node, r - p); 1602 gimple_seq stmts = NULL; 1603 gimple *new_stmt 1604 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR, 1605 haystack, len); 1606 gimple_seq_add_stmt_without_update (&stmts, new_stmt); 1607 gsi_replace_with_seq_vops (gsi, stmts); 1608 return true; 1609 } 1610 1611 /* For strstr (x, "") return x. */ 1612 if (q[0] == '\0') 1613 { 1614 replace_call_with_value (gsi, haystack); 1615 return true; 1616 } 1617 1618 /* Transform strstr (x, "c") into strchr (x, 'c'). */ 1619 if (q[1] == '\0') 1620 { 1621 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR); 1622 if (strchr_fn) 1623 { 1624 tree c = build_int_cst (integer_type_node, q[0]); 1625 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c); 1626 replace_call_with_call_and_fold (gsi, repl); 1627 return true; 1628 } 1629 } 1630 1631 return false; 1632 } 1633 1634 /* Simplify a call to the strcat builtin. DST and SRC are the arguments 1635 to the call. 1636 1637 Return NULL_TREE if no simplification was possible, otherwise return the 1638 simplified form of the call as a tree. 1639 1640 The simplified form may be a constant or other expression which 1641 computes the same value, but in a more efficient manner (including 1642 calls to other builtin functions). 1643 1644 The call may contain arguments which need to be evaluated, but 1645 which are not useful to determine the result of the call. In 1646 this case we return a chain of COMPOUND_EXPRs. The LHS of each 1647 COMPOUND_EXPR will be an argument which must be evaluated. 1648 COMPOUND_EXPRs are chained through their RHS. The RHS of the last 1649 COMPOUND_EXPR in the chain will contain the tree for the simplified 1650 form of the builtin function call. */ 1651 1652 static bool 1653 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src) 1654 { 1655 gimple *stmt = gsi_stmt (*gsi); 1656 location_t loc = gimple_location (stmt); 1657 1658 const char *p = c_getstr (src); 1659 1660 /* If the string length is zero, return the dst parameter. */ 1661 if (p && *p == '\0') 1662 { 1663 replace_call_with_value (gsi, dst); 1664 return true; 1665 } 1666 1667 if (!optimize_bb_for_speed_p (gimple_bb (stmt))) 1668 return false; 1669 1670 /* See if we can store by pieces into (dst + strlen(dst)). */ 1671 tree newdst; 1672 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN); 1673 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY); 1674 1675 if (!strlen_fn || !memcpy_fn) 1676 return false; 1677 1678 /* If the length of the source string isn't computable don't 1679 split strcat into strlen and memcpy. */ 1680 tree len = get_maxval_strlen (src, 0); 1681 if (! len) 1682 return false; 1683 1684 /* Create strlen (dst). */ 1685 gimple_seq stmts = NULL, stmts2; 1686 gimple *repl = gimple_build_call (strlen_fn, 1, dst); 1687 gimple_set_location (repl, loc); 1688 newdst = create_tmp_reg_or_ssa_name (size_type_node); 1689 gimple_call_set_lhs (repl, newdst); 1690 gimple_seq_add_stmt_without_update (&stmts, repl); 1691 1692 /* Create (dst p+ strlen (dst)). */ 1693 newdst = fold_build_pointer_plus_loc (loc, dst, newdst); 1694 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE); 1695 gimple_seq_add_seq_without_update (&stmts, stmts2); 1696 1697 len = fold_convert_loc (loc, size_type_node, len); 1698 len = size_binop_loc (loc, PLUS_EXPR, len, 1699 build_int_cst (size_type_node, 1)); 1700 len = force_gimple_operand (len, &stmts2, true, NULL_TREE); 1701 gimple_seq_add_seq_without_update (&stmts, stmts2); 1702 1703 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len); 1704 gimple_seq_add_stmt_without_update (&stmts, repl); 1705 if (gimple_call_lhs (stmt)) 1706 { 1707 repl = gimple_build_assign (gimple_call_lhs (stmt), dst); 1708 gimple_seq_add_stmt_without_update (&stmts, repl); 1709 gsi_replace_with_seq_vops (gsi, stmts); 1710 /* gsi now points at the assignment to the lhs, get a 1711 stmt iterator to the memcpy call. 1712 ??? We can't use gsi_for_stmt as that doesn't work when the 1713 CFG isn't built yet. */ 1714 gimple_stmt_iterator gsi2 = *gsi; 1715 gsi_prev (&gsi2); 1716 fold_stmt (&gsi2); 1717 } 1718 else 1719 { 1720 gsi_replace_with_seq_vops (gsi, stmts); 1721 fold_stmt (gsi); 1722 } 1723 return true; 1724 } 1725 1726 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE 1727 are the arguments to the call. */ 1728 1729 static bool 1730 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi) 1731 { 1732 gimple *stmt = gsi_stmt (*gsi); 1733 tree dest = gimple_call_arg (stmt, 0); 1734 tree src = gimple_call_arg (stmt, 1); 1735 tree size = gimple_call_arg (stmt, 2); 1736 tree fn; 1737 const char *p; 1738 1739 1740 p = c_getstr (src); 1741 /* If the SRC parameter is "", return DEST. */ 1742 if (p && *p == '\0') 1743 { 1744 replace_call_with_value (gsi, dest); 1745 return true; 1746 } 1747 1748 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size)) 1749 return false; 1750 1751 /* If __builtin_strcat_chk is used, assume strcat is available. */ 1752 fn = builtin_decl_explicit (BUILT_IN_STRCAT); 1753 if (!fn) 1754 return false; 1755 1756 gimple *repl = gimple_build_call (fn, 2, dest, src); 1757 replace_call_with_call_and_fold (gsi, repl); 1758 return true; 1759 } 1760 1761 /* Simplify a call to the strncat builtin. */ 1762 1763 static bool 1764 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi) 1765 { 1766 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi)); 1767 tree dst = gimple_call_arg (stmt, 0); 1768 tree src = gimple_call_arg (stmt, 1); 1769 tree len = gimple_call_arg (stmt, 2); 1770 1771 const char *p = c_getstr (src); 1772 1773 /* If the requested length is zero, or the src parameter string 1774 length is zero, return the dst parameter. */ 1775 if (integer_zerop (len) || (p && *p == '\0')) 1776 { 1777 replace_call_with_value (gsi, dst); 1778 return true; 1779 } 1780 1781 /* If the requested len is greater than or equal to the string 1782 length, call strcat. */ 1783 if (TREE_CODE (len) == INTEGER_CST && p 1784 && compare_tree_int (len, strlen (p)) >= 0) 1785 { 1786 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT); 1787 1788 /* If the replacement _DECL isn't initialized, don't do the 1789 transformation. */ 1790 if (!fn) 1791 return false; 1792 1793 gcall *repl = gimple_build_call (fn, 2, dst, src); 1794 replace_call_with_call_and_fold (gsi, repl); 1795 return true; 1796 } 1797 1798 return false; 1799 } 1800 1801 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC, 1802 LEN, and SIZE. */ 1803 1804 static bool 1805 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi) 1806 { 1807 gimple *stmt = gsi_stmt (*gsi); 1808 tree dest = gimple_call_arg (stmt, 0); 1809 tree src = gimple_call_arg (stmt, 1); 1810 tree len = gimple_call_arg (stmt, 2); 1811 tree size = gimple_call_arg (stmt, 3); 1812 tree fn; 1813 const char *p; 1814 1815 p = c_getstr (src); 1816 /* If the SRC parameter is "" or if LEN is 0, return DEST. */ 1817 if ((p && *p == '\0') 1818 || integer_zerop (len)) 1819 { 1820 replace_call_with_value (gsi, dest); 1821 return true; 1822 } 1823 1824 if (! tree_fits_uhwi_p (size)) 1825 return false; 1826 1827 if (! integer_all_onesp (size)) 1828 { 1829 tree src_len = c_strlen (src, 1); 1830 if (src_len 1831 && tree_fits_uhwi_p (src_len) 1832 && tree_fits_uhwi_p (len) 1833 && ! tree_int_cst_lt (len, src_len)) 1834 { 1835 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */ 1836 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK); 1837 if (!fn) 1838 return false; 1839 1840 gimple *repl = gimple_build_call (fn, 3, dest, src, size); 1841 replace_call_with_call_and_fold (gsi, repl); 1842 return true; 1843 } 1844 return false; 1845 } 1846 1847 /* If __builtin_strncat_chk is used, assume strncat is available. */ 1848 fn = builtin_decl_explicit (BUILT_IN_STRNCAT); 1849 if (!fn) 1850 return false; 1851 1852 gimple *repl = gimple_build_call (fn, 3, dest, src, len); 1853 replace_call_with_call_and_fold (gsi, repl); 1854 return true; 1855 } 1856 1857 /* Build and append gimple statements to STMTS that would load a first 1858 character of a memory location identified by STR. LOC is location 1859 of the statement. */ 1860 1861 static tree 1862 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts) 1863 { 1864 tree var; 1865 1866 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0); 1867 tree cst_uchar_ptr_node 1868 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true); 1869 tree off0 = build_int_cst (cst_uchar_ptr_node, 0); 1870 1871 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0); 1872 gassign *stmt = gimple_build_assign (NULL_TREE, temp); 1873 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt); 1874 1875 gimple_assign_set_lhs (stmt, var); 1876 gimple_seq_add_stmt_without_update (stmts, stmt); 1877 1878 return var; 1879 } 1880 1881 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. 1882 FCODE is the name of the builtin. */ 1883 1884 static bool 1885 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi) 1886 { 1887 gimple *stmt = gsi_stmt (*gsi); 1888 tree callee = gimple_call_fndecl (stmt); 1889 enum built_in_function fcode = DECL_FUNCTION_CODE (callee); 1890 1891 tree type = integer_type_node; 1892 tree str1 = gimple_call_arg (stmt, 0); 1893 tree str2 = gimple_call_arg (stmt, 1); 1894 tree lhs = gimple_call_lhs (stmt); 1895 HOST_WIDE_INT length = -1; 1896 1897 /* Handle strncmp and strncasecmp functions. */ 1898 if (gimple_call_num_args (stmt) == 3) 1899 { 1900 tree len = gimple_call_arg (stmt, 2); 1901 if (tree_fits_uhwi_p (len)) 1902 length = tree_to_uhwi (len); 1903 } 1904 1905 /* If the LEN parameter is zero, return zero. */ 1906 if (length == 0) 1907 { 1908 replace_call_with_value (gsi, integer_zero_node); 1909 return true; 1910 } 1911 1912 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */ 1913 if (operand_equal_p (str1, str2, 0)) 1914 { 1915 replace_call_with_value (gsi, integer_zero_node); 1916 return true; 1917 } 1918 1919 const char *p1 = c_getstr (str1); 1920 const char *p2 = c_getstr (str2); 1921 1922 /* For known strings, return an immediate value. */ 1923 if (p1 && p2) 1924 { 1925 int r = 0; 1926 bool known_result = false; 1927 1928 switch (fcode) 1929 { 1930 case BUILT_IN_STRCMP: 1931 { 1932 r = strcmp (p1, p2); 1933 known_result = true; 1934 break; 1935 } 1936 case BUILT_IN_STRNCMP: 1937 { 1938 if (length == -1) 1939 break; 1940 r = strncmp (p1, p2, length); 1941 known_result = true; 1942 break; 1943 } 1944 /* Only handleable situation is where the string are equal (result 0), 1945 which is already handled by operand_equal_p case. */ 1946 case BUILT_IN_STRCASECMP: 1947 break; 1948 case BUILT_IN_STRNCASECMP: 1949 { 1950 if (length == -1) 1951 break; 1952 r = strncmp (p1, p2, length); 1953 if (r == 0) 1954 known_result = true; 1955 break;; 1956 } 1957 default: 1958 gcc_unreachable (); 1959 } 1960 1961 if (known_result) 1962 { 1963 replace_call_with_value (gsi, build_cmp_result (type, r)); 1964 return true; 1965 } 1966 } 1967 1968 bool nonzero_length = length >= 1 1969 || fcode == BUILT_IN_STRCMP 1970 || fcode == BUILT_IN_STRCASECMP; 1971 1972 location_t loc = gimple_location (stmt); 1973 1974 /* If the second arg is "", return *(const unsigned char*)arg1. */ 1975 if (p2 && *p2 == '\0' && nonzero_length) 1976 { 1977 gimple_seq stmts = NULL; 1978 tree var = gimple_load_first_char (loc, str1, &stmts); 1979 if (lhs) 1980 { 1981 stmt = gimple_build_assign (lhs, NOP_EXPR, var); 1982 gimple_seq_add_stmt_without_update (&stmts, stmt); 1983 } 1984 1985 gsi_replace_with_seq_vops (gsi, stmts); 1986 return true; 1987 } 1988 1989 /* If the first arg is "", return -*(const unsigned char*)arg2. */ 1990 if (p1 && *p1 == '\0' && nonzero_length) 1991 { 1992 gimple_seq stmts = NULL; 1993 tree var = gimple_load_first_char (loc, str2, &stmts); 1994 1995 if (lhs) 1996 { 1997 tree c = create_tmp_reg_or_ssa_name (integer_type_node); 1998 stmt = gimple_build_assign (c, NOP_EXPR, var); 1999 gimple_seq_add_stmt_without_update (&stmts, stmt); 2000 2001 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c); 2002 gimple_seq_add_stmt_without_update (&stmts, stmt); 2003 } 2004 2005 gsi_replace_with_seq_vops (gsi, stmts); 2006 return true; 2007 } 2008 2009 /* If len parameter is one, return an expression corresponding to 2010 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */ 2011 if (fcode == BUILT_IN_STRNCMP && length == 1) 2012 { 2013 gimple_seq stmts = NULL; 2014 tree temp1 = gimple_load_first_char (loc, str1, &stmts); 2015 tree temp2 = gimple_load_first_char (loc, str2, &stmts); 2016 2017 if (lhs) 2018 { 2019 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node); 2020 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1); 2021 gimple_seq_add_stmt_without_update (&stmts, convert1); 2022 2023 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node); 2024 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2); 2025 gimple_seq_add_stmt_without_update (&stmts, convert2); 2026 2027 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2); 2028 gimple_seq_add_stmt_without_update (&stmts, stmt); 2029 } 2030 2031 gsi_replace_with_seq_vops (gsi, stmts); 2032 return true; 2033 } 2034 2035 return false; 2036 } 2037 2038 /* Fold a call to the memchr pointed by GSI iterator. */ 2039 2040 static bool 2041 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi) 2042 { 2043 gimple *stmt = gsi_stmt (*gsi); 2044 tree lhs = gimple_call_lhs (stmt); 2045 tree arg1 = gimple_call_arg (stmt, 0); 2046 tree arg2 = gimple_call_arg (stmt, 1); 2047 tree len = gimple_call_arg (stmt, 2); 2048 2049 /* If the LEN parameter is zero, return zero. */ 2050 if (integer_zerop (len)) 2051 { 2052 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0)); 2053 return true; 2054 } 2055 2056 char c; 2057 if (TREE_CODE (arg2) != INTEGER_CST 2058 || !tree_fits_uhwi_p (len) 2059 || !target_char_cst_p (arg2, &c)) 2060 return false; 2061 2062 unsigned HOST_WIDE_INT length = tree_to_uhwi (len); 2063 unsigned HOST_WIDE_INT string_length; 2064 const char *p1 = c_getstr (arg1, &string_length); 2065 2066 if (p1) 2067 { 2068 const char *r = (const char *)memchr (p1, c, MIN (length, string_length)); 2069 if (r == NULL) 2070 { 2071 if (length <= string_length) 2072 { 2073 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0)); 2074 return true; 2075 } 2076 } 2077 else 2078 { 2079 unsigned HOST_WIDE_INT offset = r - p1; 2080 gimple_seq stmts = NULL; 2081 if (lhs != NULL_TREE) 2082 { 2083 tree offset_cst = build_int_cst (TREE_TYPE (len), offset); 2084 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR, 2085 arg1, offset_cst); 2086 gimple_seq_add_stmt_without_update (&stmts, stmt); 2087 } 2088 else 2089 gimple_seq_add_stmt_without_update (&stmts, 2090 gimple_build_nop ()); 2091 2092 gsi_replace_with_seq_vops (gsi, stmts); 2093 return true; 2094 } 2095 } 2096 2097 return false; 2098 } 2099 2100 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments 2101 to the call. IGNORE is true if the value returned 2102 by the builtin will be ignored. UNLOCKED is true is true if this 2103 actually a call to fputs_unlocked. If LEN in non-NULL, it represents 2104 the known length of the string. Return NULL_TREE if no simplification 2105 was possible. */ 2106 2107 static bool 2108 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi, 2109 tree arg0, tree arg1, 2110 bool unlocked) 2111 { 2112 gimple *stmt = gsi_stmt (*gsi); 2113 2114 /* If we're using an unlocked function, assume the other unlocked 2115 functions exist explicitly. */ 2116 tree const fn_fputc = (unlocked 2117 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED) 2118 : builtin_decl_implicit (BUILT_IN_FPUTC)); 2119 tree const fn_fwrite = (unlocked 2120 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED) 2121 : builtin_decl_implicit (BUILT_IN_FWRITE)); 2122 2123 /* If the return value is used, don't do the transformation. */ 2124 if (gimple_call_lhs (stmt)) 2125 return false; 2126 2127 /* Get the length of the string passed to fputs. If the length 2128 can't be determined, punt. */ 2129 tree len = get_maxval_strlen (arg0, 0); 2130 if (!len 2131 || TREE_CODE (len) != INTEGER_CST) 2132 return false; 2133 2134 switch (compare_tree_int (len, 1)) 2135 { 2136 case -1: /* length is 0, delete the call entirely . */ 2137 replace_call_with_value (gsi, integer_zero_node); 2138 return true; 2139 2140 case 0: /* length is 1, call fputc. */ 2141 { 2142 const char *p = c_getstr (arg0); 2143 if (p != NULL) 2144 { 2145 if (!fn_fputc) 2146 return false; 2147 2148 gimple *repl = gimple_build_call (fn_fputc, 2, 2149 build_int_cst 2150 (integer_type_node, p[0]), arg1); 2151 replace_call_with_call_and_fold (gsi, repl); 2152 return true; 2153 } 2154 } 2155 /* FALLTHROUGH */ 2156 case 1: /* length is greater than 1, call fwrite. */ 2157 { 2158 /* If optimizing for size keep fputs. */ 2159 if (optimize_function_for_size_p (cfun)) 2160 return false; 2161 /* New argument list transforming fputs(string, stream) to 2162 fwrite(string, 1, len, stream). */ 2163 if (!fn_fwrite) 2164 return false; 2165 2166 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0, 2167 size_one_node, len, arg1); 2168 replace_call_with_call_and_fold (gsi, repl); 2169 return true; 2170 } 2171 default: 2172 gcc_unreachable (); 2173 } 2174 return false; 2175 } 2176 2177 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin. 2178 DEST, SRC, LEN, and SIZE are the arguments to the call. 2179 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_* 2180 code of the builtin. If MAXLEN is not NULL, it is maximum length 2181 passed as third argument. */ 2182 2183 static bool 2184 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi, 2185 tree dest, tree src, tree len, tree size, 2186 enum built_in_function fcode) 2187 { 2188 gimple *stmt = gsi_stmt (*gsi); 2189 location_t loc = gimple_location (stmt); 2190 bool ignore = gimple_call_lhs (stmt) == NULL_TREE; 2191 tree fn; 2192 2193 /* If SRC and DEST are the same (and not volatile), return DEST 2194 (resp. DEST+LEN for __mempcpy_chk). */ 2195 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0)) 2196 { 2197 if (fcode != BUILT_IN_MEMPCPY_CHK) 2198 { 2199 replace_call_with_value (gsi, dest); 2200 return true; 2201 } 2202 else 2203 { 2204 gimple_seq stmts = NULL; 2205 len = gimple_convert_to_ptrofftype (&stmts, loc, len); 2206 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR, 2207 TREE_TYPE (dest), dest, len); 2208 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); 2209 replace_call_with_value (gsi, temp); 2210 return true; 2211 } 2212 } 2213 2214 if (! tree_fits_uhwi_p (size)) 2215 return false; 2216 2217 tree maxlen = get_maxval_strlen (len, 2); 2218 if (! integer_all_onesp (size)) 2219 { 2220 if (! tree_fits_uhwi_p (len)) 2221 { 2222 /* If LEN is not constant, try MAXLEN too. 2223 For MAXLEN only allow optimizing into non-_ocs function 2224 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */ 2225 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen)) 2226 { 2227 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore) 2228 { 2229 /* (void) __mempcpy_chk () can be optimized into 2230 (void) __memcpy_chk (). */ 2231 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK); 2232 if (!fn) 2233 return false; 2234 2235 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size); 2236 replace_call_with_call_and_fold (gsi, repl); 2237 return true; 2238 } 2239 return false; 2240 } 2241 } 2242 else 2243 maxlen = len; 2244 2245 if (tree_int_cst_lt (size, maxlen)) 2246 return false; 2247 } 2248 2249 fn = NULL_TREE; 2250 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume 2251 mem{cpy,pcpy,move,set} is available. */ 2252 switch (fcode) 2253 { 2254 case BUILT_IN_MEMCPY_CHK: 2255 fn = builtin_decl_explicit (BUILT_IN_MEMCPY); 2256 break; 2257 case BUILT_IN_MEMPCPY_CHK: 2258 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY); 2259 break; 2260 case BUILT_IN_MEMMOVE_CHK: 2261 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE); 2262 break; 2263 case BUILT_IN_MEMSET_CHK: 2264 fn = builtin_decl_explicit (BUILT_IN_MEMSET); 2265 break; 2266 default: 2267 break; 2268 } 2269 2270 if (!fn) 2271 return false; 2272 2273 gimple *repl = gimple_build_call (fn, 3, dest, src, len); 2274 replace_call_with_call_and_fold (gsi, repl); 2275 return true; 2276 } 2277 2278 /* Fold a call to the __st[rp]cpy_chk builtin. 2279 DEST, SRC, and SIZE are the arguments to the call. 2280 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_* 2281 code of the builtin. If MAXLEN is not NULL, it is maximum length of 2282 strings passed as second argument. */ 2283 2284 static bool 2285 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi, 2286 tree dest, 2287 tree src, tree size, 2288 enum built_in_function fcode) 2289 { 2290 gimple *stmt = gsi_stmt (*gsi); 2291 location_t loc = gimple_location (stmt); 2292 bool ignore = gimple_call_lhs (stmt) == NULL_TREE; 2293 tree len, fn; 2294 2295 /* If SRC and DEST are the same (and not volatile), return DEST. */ 2296 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0)) 2297 { 2298 replace_call_with_value (gsi, dest); 2299 return true; 2300 } 2301 2302 if (! tree_fits_uhwi_p (size)) 2303 return false; 2304 2305 tree maxlen = get_maxval_strlen (src, 1); 2306 if (! integer_all_onesp (size)) 2307 { 2308 len = c_strlen (src, 1); 2309 if (! len || ! tree_fits_uhwi_p (len)) 2310 { 2311 /* If LEN is not constant, try MAXLEN too. 2312 For MAXLEN only allow optimizing into non-_ocs function 2313 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */ 2314 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen)) 2315 { 2316 if (fcode == BUILT_IN_STPCPY_CHK) 2317 { 2318 if (! ignore) 2319 return false; 2320 2321 /* If return value of __stpcpy_chk is ignored, 2322 optimize into __strcpy_chk. */ 2323 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK); 2324 if (!fn) 2325 return false; 2326 2327 gimple *repl = gimple_build_call (fn, 3, dest, src, size); 2328 replace_call_with_call_and_fold (gsi, repl); 2329 return true; 2330 } 2331 2332 if (! len || TREE_SIDE_EFFECTS (len)) 2333 return false; 2334 2335 /* If c_strlen returned something, but not a constant, 2336 transform __strcpy_chk into __memcpy_chk. */ 2337 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK); 2338 if (!fn) 2339 return false; 2340 2341 gimple_seq stmts = NULL; 2342 len = gimple_convert (&stmts, loc, size_type_node, len); 2343 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len, 2344 build_int_cst (size_type_node, 1)); 2345 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); 2346 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size); 2347 replace_call_with_call_and_fold (gsi, repl); 2348 return true; 2349 } 2350 } 2351 else 2352 maxlen = len; 2353 2354 if (! tree_int_cst_lt (maxlen, size)) 2355 return false; 2356 } 2357 2358 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */ 2359 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK 2360 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY); 2361 if (!fn) 2362 return false; 2363 2364 gimple *repl = gimple_build_call (fn, 2, dest, src); 2365 replace_call_with_call_and_fold (gsi, repl); 2366 return true; 2367 } 2368 2369 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE 2370 are the arguments to the call. If MAXLEN is not NULL, it is maximum 2371 length passed as third argument. IGNORE is true if return value can be 2372 ignored. FCODE is the BUILT_IN_* code of the builtin. */ 2373 2374 static bool 2375 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi, 2376 tree dest, tree src, 2377 tree len, tree size, 2378 enum built_in_function fcode) 2379 { 2380 gimple *stmt = gsi_stmt (*gsi); 2381 bool ignore = gimple_call_lhs (stmt) == NULL_TREE; 2382 tree fn; 2383 2384 if (fcode == BUILT_IN_STPNCPY_CHK && ignore) 2385 { 2386 /* If return value of __stpncpy_chk is ignored, 2387 optimize into __strncpy_chk. */ 2388 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK); 2389 if (fn) 2390 { 2391 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size); 2392 replace_call_with_call_and_fold (gsi, repl); 2393 return true; 2394 } 2395 } 2396 2397 if (! tree_fits_uhwi_p (size)) 2398 return false; 2399 2400 tree maxlen = get_maxval_strlen (len, 2); 2401 if (! integer_all_onesp (size)) 2402 { 2403 if (! tree_fits_uhwi_p (len)) 2404 { 2405 /* If LEN is not constant, try MAXLEN too. 2406 For MAXLEN only allow optimizing into non-_ocs function 2407 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */ 2408 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen)) 2409 return false; 2410 } 2411 else 2412 maxlen = len; 2413 2414 if (tree_int_cst_lt (size, maxlen)) 2415 return false; 2416 } 2417 2418 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */ 2419 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK 2420 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY); 2421 if (!fn) 2422 return false; 2423 2424 gimple *repl = gimple_build_call (fn, 3, dest, src, len); 2425 replace_call_with_call_and_fold (gsi, repl); 2426 return true; 2427 } 2428 2429 /* Fold function call to builtin stpcpy with arguments DEST and SRC. 2430 Return NULL_TREE if no simplification can be made. */ 2431 2432 static bool 2433 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi) 2434 { 2435 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi)); 2436 location_t loc = gimple_location (stmt); 2437 tree dest = gimple_call_arg (stmt, 0); 2438 tree src = gimple_call_arg (stmt, 1); 2439 tree fn, len, lenp1; 2440 2441 /* If the result is unused, replace stpcpy with strcpy. */ 2442 if (gimple_call_lhs (stmt) == NULL_TREE) 2443 { 2444 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY); 2445 if (!fn) 2446 return false; 2447 gimple_call_set_fndecl (stmt, fn); 2448 fold_stmt (gsi); 2449 return true; 2450 } 2451 2452 len = c_strlen (src, 1); 2453 if (!len 2454 || TREE_CODE (len) != INTEGER_CST) 2455 return false; 2456 2457 if (optimize_function_for_size_p (cfun) 2458 /* If length is zero it's small enough. */ 2459 && !integer_zerop (len)) 2460 return false; 2461 2462 /* If the source has a known length replace stpcpy with memcpy. */ 2463 fn = builtin_decl_implicit (BUILT_IN_MEMCPY); 2464 if (!fn) 2465 return false; 2466 2467 gimple_seq stmts = NULL; 2468 tree tem = gimple_convert (&stmts, loc, size_type_node, len); 2469 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, 2470 tem, build_int_cst (size_type_node, 1)); 2471 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); 2472 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1); 2473 gimple_set_vuse (repl, gimple_vuse (stmt)); 2474 gimple_set_vdef (repl, gimple_vdef (stmt)); 2475 if (gimple_vdef (repl) 2476 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME) 2477 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl; 2478 gsi_insert_before (gsi, repl, GSI_SAME_STMT); 2479 /* Replace the result with dest + len. */ 2480 stmts = NULL; 2481 tem = gimple_convert (&stmts, loc, sizetype, len); 2482 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); 2483 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt), 2484 POINTER_PLUS_EXPR, dest, tem); 2485 gsi_replace (gsi, ret, false); 2486 /* Finally fold the memcpy call. */ 2487 gimple_stmt_iterator gsi2 = *gsi; 2488 gsi_prev (&gsi2); 2489 fold_stmt (&gsi2); 2490 return true; 2491 } 2492 2493 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return 2494 NULL_TREE if a normal call should be emitted rather than expanding 2495 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or 2496 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length 2497 passed as second argument. */ 2498 2499 static bool 2500 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi, 2501 enum built_in_function fcode) 2502 { 2503 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi)); 2504 tree dest, size, len, fn, fmt, flag; 2505 const char *fmt_str; 2506 2507 /* Verify the required arguments in the original call. */ 2508 if (gimple_call_num_args (stmt) < 5) 2509 return false; 2510 2511 dest = gimple_call_arg (stmt, 0); 2512 len = gimple_call_arg (stmt, 1); 2513 flag = gimple_call_arg (stmt, 2); 2514 size = gimple_call_arg (stmt, 3); 2515 fmt = gimple_call_arg (stmt, 4); 2516 2517 if (! tree_fits_uhwi_p (size)) 2518 return false; 2519 2520 if (! integer_all_onesp (size)) 2521 { 2522 tree maxlen = get_maxval_strlen (len, 2); 2523 if (! tree_fits_uhwi_p (len)) 2524 { 2525 /* If LEN is not constant, try MAXLEN too. 2526 For MAXLEN only allow optimizing into non-_ocs function 2527 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */ 2528 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen)) 2529 return false; 2530 } 2531 else 2532 maxlen = len; 2533 2534 if (tree_int_cst_lt (size, maxlen)) 2535 return false; 2536 } 2537 2538 if (!init_target_chars ()) 2539 return false; 2540 2541 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0 2542 or if format doesn't contain % chars or is "%s". */ 2543 if (! integer_zerop (flag)) 2544 { 2545 fmt_str = c_getstr (fmt); 2546 if (fmt_str == NULL) 2547 return false; 2548 if (strchr (fmt_str, target_percent) != NULL 2549 && strcmp (fmt_str, target_percent_s)) 2550 return false; 2551 } 2552 2553 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is 2554 available. */ 2555 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK 2556 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF); 2557 if (!fn) 2558 return false; 2559 2560 /* Replace the called function and the first 5 argument by 3 retaining 2561 trailing varargs. */ 2562 gimple_call_set_fndecl (stmt, fn); 2563 gimple_call_set_fntype (stmt, TREE_TYPE (fn)); 2564 gimple_call_set_arg (stmt, 0, dest); 2565 gimple_call_set_arg (stmt, 1, len); 2566 gimple_call_set_arg (stmt, 2, fmt); 2567 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i) 2568 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2)); 2569 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2); 2570 fold_stmt (gsi); 2571 return true; 2572 } 2573 2574 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS. 2575 Return NULL_TREE if a normal call should be emitted rather than 2576 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK 2577 or BUILT_IN_VSPRINTF_CHK. */ 2578 2579 static bool 2580 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi, 2581 enum built_in_function fcode) 2582 { 2583 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi)); 2584 tree dest, size, len, fn, fmt, flag; 2585 const char *fmt_str; 2586 unsigned nargs = gimple_call_num_args (stmt); 2587 2588 /* Verify the required arguments in the original call. */ 2589 if (nargs < 4) 2590 return false; 2591 dest = gimple_call_arg (stmt, 0); 2592 flag = gimple_call_arg (stmt, 1); 2593 size = gimple_call_arg (stmt, 2); 2594 fmt = gimple_call_arg (stmt, 3); 2595 2596 if (! tree_fits_uhwi_p (size)) 2597 return false; 2598 2599 len = NULL_TREE; 2600 2601 if (!init_target_chars ()) 2602 return false; 2603 2604 /* Check whether the format is a literal string constant. */ 2605 fmt_str = c_getstr (fmt); 2606 if (fmt_str != NULL) 2607 { 2608 /* If the format doesn't contain % args or %%, we know the size. */ 2609 if (strchr (fmt_str, target_percent) == 0) 2610 { 2611 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4) 2612 len = build_int_cstu (size_type_node, strlen (fmt_str)); 2613 } 2614 /* If the format is "%s" and first ... argument is a string literal, 2615 we know the size too. */ 2616 else if (fcode == BUILT_IN_SPRINTF_CHK 2617 && strcmp (fmt_str, target_percent_s) == 0) 2618 { 2619 tree arg; 2620 2621 if (nargs == 5) 2622 { 2623 arg = gimple_call_arg (stmt, 4); 2624 if (POINTER_TYPE_P (TREE_TYPE (arg))) 2625 { 2626 len = c_strlen (arg, 1); 2627 if (! len || ! tree_fits_uhwi_p (len)) 2628 len = NULL_TREE; 2629 } 2630 } 2631 } 2632 } 2633 2634 if (! integer_all_onesp (size)) 2635 { 2636 if (! len || ! tree_int_cst_lt (len, size)) 2637 return false; 2638 } 2639 2640 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0 2641 or if format doesn't contain % chars or is "%s". */ 2642 if (! integer_zerop (flag)) 2643 { 2644 if (fmt_str == NULL) 2645 return false; 2646 if (strchr (fmt_str, target_percent) != NULL 2647 && strcmp (fmt_str, target_percent_s)) 2648 return false; 2649 } 2650 2651 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */ 2652 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK 2653 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF); 2654 if (!fn) 2655 return false; 2656 2657 /* Replace the called function and the first 4 argument by 2 retaining 2658 trailing varargs. */ 2659 gimple_call_set_fndecl (stmt, fn); 2660 gimple_call_set_fntype (stmt, TREE_TYPE (fn)); 2661 gimple_call_set_arg (stmt, 0, dest); 2662 gimple_call_set_arg (stmt, 1, fmt); 2663 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i) 2664 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2)); 2665 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2); 2666 fold_stmt (gsi); 2667 return true; 2668 } 2669 2670 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG. 2671 ORIG may be null if this is a 2-argument call. We don't attempt to 2672 simplify calls with more than 3 arguments. 2673 2674 Return NULL_TREE if no simplification was possible, otherwise return the 2675 simplified form of the call as a tree. If IGNORED is true, it means that 2676 the caller does not use the returned value of the function. */ 2677 2678 static bool 2679 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi) 2680 { 2681 gimple *stmt = gsi_stmt (*gsi); 2682 tree dest = gimple_call_arg (stmt, 0); 2683 tree fmt = gimple_call_arg (stmt, 1); 2684 tree orig = NULL_TREE; 2685 const char *fmt_str = NULL; 2686 2687 /* Verify the required arguments in the original call. We deal with two 2688 types of sprintf() calls: 'sprintf (str, fmt)' and 2689 'sprintf (dest, "%s", orig)'. */ 2690 if (gimple_call_num_args (stmt) > 3) 2691 return false; 2692 2693 if (gimple_call_num_args (stmt) == 3) 2694 orig = gimple_call_arg (stmt, 2); 2695 2696 /* Check whether the format is a literal string constant. */ 2697 fmt_str = c_getstr (fmt); 2698 if (fmt_str == NULL) 2699 return false; 2700 2701 if (!init_target_chars ()) 2702 return false; 2703 2704 /* If the format doesn't contain % args or %%, use strcpy. */ 2705 if (strchr (fmt_str, target_percent) == NULL) 2706 { 2707 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY); 2708 2709 if (!fn) 2710 return false; 2711 2712 /* Don't optimize sprintf (buf, "abc", ptr++). */ 2713 if (orig) 2714 return false; 2715 2716 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when 2717 'format' is known to contain no % formats. */ 2718 gimple_seq stmts = NULL; 2719 gimple *repl = gimple_build_call (fn, 2, dest, fmt); 2720 gimple_seq_add_stmt_without_update (&stmts, repl); 2721 if (tree lhs = gimple_call_lhs (stmt)) 2722 { 2723 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs), 2724 strlen (fmt_str))); 2725 gimple_seq_add_stmt_without_update (&stmts, repl); 2726 gsi_replace_with_seq_vops (gsi, stmts); 2727 /* gsi now points at the assignment to the lhs, get a 2728 stmt iterator to the memcpy call. 2729 ??? We can't use gsi_for_stmt as that doesn't work when the 2730 CFG isn't built yet. */ 2731 gimple_stmt_iterator gsi2 = *gsi; 2732 gsi_prev (&gsi2); 2733 fold_stmt (&gsi2); 2734 } 2735 else 2736 { 2737 gsi_replace_with_seq_vops (gsi, stmts); 2738 fold_stmt (gsi); 2739 } 2740 return true; 2741 } 2742 2743 /* If the format is "%s", use strcpy if the result isn't used. */ 2744 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0) 2745 { 2746 tree fn; 2747 fn = builtin_decl_implicit (BUILT_IN_STRCPY); 2748 2749 if (!fn) 2750 return false; 2751 2752 /* Don't crash on sprintf (str1, "%s"). */ 2753 if (!orig) 2754 return false; 2755 2756 tree orig_len = NULL_TREE; 2757 if (gimple_call_lhs (stmt)) 2758 { 2759 orig_len = get_maxval_strlen (orig, 0); 2760 if (!orig_len) 2761 return false; 2762 } 2763 2764 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */ 2765 gimple_seq stmts = NULL; 2766 gimple *repl = gimple_build_call (fn, 2, dest, orig); 2767 gimple_seq_add_stmt_without_update (&stmts, repl); 2768 if (tree lhs = gimple_call_lhs (stmt)) 2769 { 2770 if (!useless_type_conversion_p (TREE_TYPE (lhs), 2771 TREE_TYPE (orig_len))) 2772 orig_len = fold_convert (TREE_TYPE (lhs), orig_len); 2773 repl = gimple_build_assign (lhs, orig_len); 2774 gimple_seq_add_stmt_without_update (&stmts, repl); 2775 gsi_replace_with_seq_vops (gsi, stmts); 2776 /* gsi now points at the assignment to the lhs, get a 2777 stmt iterator to the memcpy call. 2778 ??? We can't use gsi_for_stmt as that doesn't work when the 2779 CFG isn't built yet. */ 2780 gimple_stmt_iterator gsi2 = *gsi; 2781 gsi_prev (&gsi2); 2782 fold_stmt (&gsi2); 2783 } 2784 else 2785 { 2786 gsi_replace_with_seq_vops (gsi, stmts); 2787 fold_stmt (gsi); 2788 } 2789 return true; 2790 } 2791 return false; 2792 } 2793 2794 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE, 2795 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't 2796 attempt to simplify calls with more than 4 arguments. 2797 2798 Return NULL_TREE if no simplification was possible, otherwise return the 2799 simplified form of the call as a tree. If IGNORED is true, it means that 2800 the caller does not use the returned value of the function. */ 2801 2802 static bool 2803 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi) 2804 { 2805 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi)); 2806 tree dest = gimple_call_arg (stmt, 0); 2807 tree destsize = gimple_call_arg (stmt, 1); 2808 tree fmt = gimple_call_arg (stmt, 2); 2809 tree orig = NULL_TREE; 2810 const char *fmt_str = NULL; 2811 2812 if (gimple_call_num_args (stmt) > 4) 2813 return false; 2814 2815 if (gimple_call_num_args (stmt) == 4) 2816 orig = gimple_call_arg (stmt, 3); 2817 2818 if (!tree_fits_uhwi_p (destsize)) 2819 return false; 2820 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize); 2821 2822 /* Check whether the format is a literal string constant. */ 2823 fmt_str = c_getstr (fmt); 2824 if (fmt_str == NULL) 2825 return false; 2826 2827 if (!init_target_chars ()) 2828 return false; 2829 2830 /* If the format doesn't contain % args or %%, use strcpy. */ 2831 if (strchr (fmt_str, target_percent) == NULL) 2832 { 2833 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY); 2834 if (!fn) 2835 return false; 2836 2837 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */ 2838 if (orig) 2839 return false; 2840 2841 /* We could expand this as 2842 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0'; 2843 or to 2844 memcpy (str, fmt_with_nul_at_cstm1, cst); 2845 but in the former case that might increase code size 2846 and in the latter case grow .rodata section too much. 2847 So punt for now. */ 2848 size_t len = strlen (fmt_str); 2849 if (len >= destlen) 2850 return false; 2851 2852 gimple_seq stmts = NULL; 2853 gimple *repl = gimple_build_call (fn, 2, dest, fmt); 2854 gimple_seq_add_stmt_without_update (&stmts, repl); 2855 if (tree lhs = gimple_call_lhs (stmt)) 2856 { 2857 repl = gimple_build_assign (lhs, 2858 build_int_cst (TREE_TYPE (lhs), len)); 2859 gimple_seq_add_stmt_without_update (&stmts, repl); 2860 gsi_replace_with_seq_vops (gsi, stmts); 2861 /* gsi now points at the assignment to the lhs, get a 2862 stmt iterator to the memcpy call. 2863 ??? We can't use gsi_for_stmt as that doesn't work when the 2864 CFG isn't built yet. */ 2865 gimple_stmt_iterator gsi2 = *gsi; 2866 gsi_prev (&gsi2); 2867 fold_stmt (&gsi2); 2868 } 2869 else 2870 { 2871 gsi_replace_with_seq_vops (gsi, stmts); 2872 fold_stmt (gsi); 2873 } 2874 return true; 2875 } 2876 2877 /* If the format is "%s", use strcpy if the result isn't used. */ 2878 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0) 2879 { 2880 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY); 2881 if (!fn) 2882 return false; 2883 2884 /* Don't crash on snprintf (str1, cst, "%s"). */ 2885 if (!orig) 2886 return false; 2887 2888 tree orig_len = get_maxval_strlen (orig, 0); 2889 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST) 2890 return false; 2891 2892 /* We could expand this as 2893 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0'; 2894 or to 2895 memcpy (str1, str2_with_nul_at_cstm1, cst); 2896 but in the former case that might increase code size 2897 and in the latter case grow .rodata section too much. 2898 So punt for now. */ 2899 if (compare_tree_int (orig_len, destlen) >= 0) 2900 return false; 2901 2902 /* Convert snprintf (str1, cst, "%s", str2) into 2903 strcpy (str1, str2) if strlen (str2) < cst. */ 2904 gimple_seq stmts = NULL; 2905 gimple *repl = gimple_build_call (fn, 2, dest, orig); 2906 gimple_seq_add_stmt_without_update (&stmts, repl); 2907 if (tree lhs = gimple_call_lhs (stmt)) 2908 { 2909 if (!useless_type_conversion_p (TREE_TYPE (lhs), 2910 TREE_TYPE (orig_len))) 2911 orig_len = fold_convert (TREE_TYPE (lhs), orig_len); 2912 repl = gimple_build_assign (lhs, orig_len); 2913 gimple_seq_add_stmt_without_update (&stmts, repl); 2914 gsi_replace_with_seq_vops (gsi, stmts); 2915 /* gsi now points at the assignment to the lhs, get a 2916 stmt iterator to the memcpy call. 2917 ??? We can't use gsi_for_stmt as that doesn't work when the 2918 CFG isn't built yet. */ 2919 gimple_stmt_iterator gsi2 = *gsi; 2920 gsi_prev (&gsi2); 2921 fold_stmt (&gsi2); 2922 } 2923 else 2924 { 2925 gsi_replace_with_seq_vops (gsi, stmts); 2926 fold_stmt (gsi); 2927 } 2928 return true; 2929 } 2930 return false; 2931 } 2932 2933 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins. 2934 FP, FMT, and ARG are the arguments to the call. We don't fold calls with 2935 more than 3 arguments, and ARG may be null in the 2-argument case. 2936 2937 Return NULL_TREE if no simplification was possible, otherwise return the 2938 simplified form of the call as a tree. FCODE is the BUILT_IN_* 2939 code of the function to be simplified. */ 2940 2941 static bool 2942 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi, 2943 tree fp, tree fmt, tree arg, 2944 enum built_in_function fcode) 2945 { 2946 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi)); 2947 tree fn_fputc, fn_fputs; 2948 const char *fmt_str = NULL; 2949 2950 /* If the return value is used, don't do the transformation. */ 2951 if (gimple_call_lhs (stmt) != NULL_TREE) 2952 return false; 2953 2954 /* Check whether the format is a literal string constant. */ 2955 fmt_str = c_getstr (fmt); 2956 if (fmt_str == NULL) 2957 return false; 2958 2959 if (fcode == BUILT_IN_FPRINTF_UNLOCKED) 2960 { 2961 /* If we're using an unlocked function, assume the other 2962 unlocked functions exist explicitly. */ 2963 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED); 2964 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED); 2965 } 2966 else 2967 { 2968 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC); 2969 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS); 2970 } 2971 2972 if (!init_target_chars ()) 2973 return false; 2974 2975 /* If the format doesn't contain % args or %%, use strcpy. */ 2976 if (strchr (fmt_str, target_percent) == NULL) 2977 { 2978 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK 2979 && arg) 2980 return false; 2981 2982 /* If the format specifier was "", fprintf does nothing. */ 2983 if (fmt_str[0] == '\0') 2984 { 2985 replace_call_with_value (gsi, NULL_TREE); 2986 return true; 2987 } 2988 2989 /* When "string" doesn't contain %, replace all cases of 2990 fprintf (fp, string) with fputs (string, fp). The fputs 2991 builtin will take care of special cases like length == 1. */ 2992 if (fn_fputs) 2993 { 2994 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp); 2995 replace_call_with_call_and_fold (gsi, repl); 2996 return true; 2997 } 2998 } 2999 3000 /* The other optimizations can be done only on the non-va_list variants. */ 3001 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK) 3002 return false; 3003 3004 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */ 3005 else if (strcmp (fmt_str, target_percent_s) == 0) 3006 { 3007 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg))) 3008 return false; 3009 if (fn_fputs) 3010 { 3011 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp); 3012 replace_call_with_call_and_fold (gsi, repl); 3013 return true; 3014 } 3015 } 3016 3017 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */ 3018 else if (strcmp (fmt_str, target_percent_c) == 0) 3019 { 3020 if (!arg 3021 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg))) 3022 return false; 3023 if (fn_fputc) 3024 { 3025 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp); 3026 replace_call_with_call_and_fold (gsi, repl); 3027 return true; 3028 } 3029 } 3030 3031 return false; 3032 } 3033 3034 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins. 3035 FMT and ARG are the arguments to the call; we don't fold cases with 3036 more than 2 arguments, and ARG may be null if this is a 1-argument case. 3037 3038 Return NULL_TREE if no simplification was possible, otherwise return the 3039 simplified form of the call as a tree. FCODE is the BUILT_IN_* 3040 code of the function to be simplified. */ 3041 3042 static bool 3043 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt, 3044 tree arg, enum built_in_function fcode) 3045 { 3046 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi)); 3047 tree fn_putchar, fn_puts, newarg; 3048 const char *fmt_str = NULL; 3049 3050 /* If the return value is used, don't do the transformation. */ 3051 if (gimple_call_lhs (stmt) != NULL_TREE) 3052 return false; 3053 3054 /* Check whether the format is a literal string constant. */ 3055 fmt_str = c_getstr (fmt); 3056 if (fmt_str == NULL) 3057 return false; 3058 3059 if (fcode == BUILT_IN_PRINTF_UNLOCKED) 3060 { 3061 /* If we're using an unlocked function, assume the other 3062 unlocked functions exist explicitly. */ 3063 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED); 3064 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED); 3065 } 3066 else 3067 { 3068 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR); 3069 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS); 3070 } 3071 3072 if (!init_target_chars ()) 3073 return false; 3074 3075 if (strcmp (fmt_str, target_percent_s) == 0 3076 || strchr (fmt_str, target_percent) == NULL) 3077 { 3078 const char *str; 3079 3080 if (strcmp (fmt_str, target_percent_s) == 0) 3081 { 3082 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK) 3083 return false; 3084 3085 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg))) 3086 return false; 3087 3088 str = c_getstr (arg); 3089 if (str == NULL) 3090 return false; 3091 } 3092 else 3093 { 3094 /* The format specifier doesn't contain any '%' characters. */ 3095 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK 3096 && arg) 3097 return false; 3098 str = fmt_str; 3099 } 3100 3101 /* If the string was "", printf does nothing. */ 3102 if (str[0] == '\0') 3103 { 3104 replace_call_with_value (gsi, NULL_TREE); 3105 return true; 3106 } 3107 3108 /* If the string has length of 1, call putchar. */ 3109 if (str[1] == '\0') 3110 { 3111 /* Given printf("c"), (where c is any one character,) 3112 convert "c"[0] to an int and pass that to the replacement 3113 function. */ 3114 newarg = build_int_cst (integer_type_node, str[0]); 3115 if (fn_putchar) 3116 { 3117 gcall *repl = gimple_build_call (fn_putchar, 1, newarg); 3118 replace_call_with_call_and_fold (gsi, repl); 3119 return true; 3120 } 3121 } 3122 else 3123 { 3124 /* If the string was "string\n", call puts("string"). */ 3125 size_t len = strlen (str); 3126 if ((unsigned char)str[len - 1] == target_newline 3127 && (size_t) (int) len == len 3128 && (int) len > 0) 3129 { 3130 char *newstr; 3131 tree offset_node, string_cst; 3132 3133 /* Create a NUL-terminated string that's one char shorter 3134 than the original, stripping off the trailing '\n'. */ 3135 newarg = build_string_literal (len, str); 3136 string_cst = string_constant (newarg, &offset_node); 3137 gcc_checking_assert (string_cst 3138 && (TREE_STRING_LENGTH (string_cst) 3139 == (int) len) 3140 && integer_zerop (offset_node) 3141 && (unsigned char) 3142 TREE_STRING_POINTER (string_cst)[len - 1] 3143 == target_newline); 3144 /* build_string_literal creates a new STRING_CST, 3145 modify it in place to avoid double copying. */ 3146 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst)); 3147 newstr[len - 1] = '\0'; 3148 if (fn_puts) 3149 { 3150 gcall *repl = gimple_build_call (fn_puts, 1, newarg); 3151 replace_call_with_call_and_fold (gsi, repl); 3152 return true; 3153 } 3154 } 3155 else 3156 /* We'd like to arrange to call fputs(string,stdout) here, 3157 but we need stdout and don't have a way to get it yet. */ 3158 return false; 3159 } 3160 } 3161 3162 /* The other optimizations can be done only on the non-va_list variants. */ 3163 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK) 3164 return false; 3165 3166 /* If the format specifier was "%s\n", call __builtin_puts(arg). */ 3167 else if (strcmp (fmt_str, target_percent_s_newline) == 0) 3168 { 3169 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg))) 3170 return false; 3171 if (fn_puts) 3172 { 3173 gcall *repl = gimple_build_call (fn_puts, 1, arg); 3174 replace_call_with_call_and_fold (gsi, repl); 3175 return true; 3176 } 3177 } 3178 3179 /* If the format specifier was "%c", call __builtin_putchar(arg). */ 3180 else if (strcmp (fmt_str, target_percent_c) == 0) 3181 { 3182 if (!arg || ! useless_type_conversion_p (integer_type_node, 3183 TREE_TYPE (arg))) 3184 return false; 3185 if (fn_putchar) 3186 { 3187 gcall *repl = gimple_build_call (fn_putchar, 1, arg); 3188 replace_call_with_call_and_fold (gsi, repl); 3189 return true; 3190 } 3191 } 3192 3193 return false; 3194 } 3195 3196 3197 3198 /* Fold a call to __builtin_strlen with known length LEN. */ 3199 3200 static bool 3201 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi) 3202 { 3203 gimple *stmt = gsi_stmt (*gsi); 3204 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0); 3205 if (!len) 3206 return false; 3207 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT); 3208 replace_call_with_value (gsi, len); 3209 return true; 3210 } 3211 3212 /* Fold a call to __builtin_acc_on_device. */ 3213 3214 static bool 3215 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0) 3216 { 3217 /* Defer folding until we know which compiler we're in. */ 3218 if (symtab->state != EXPANSION) 3219 return false; 3220 3221 unsigned val_host = GOMP_DEVICE_HOST; 3222 unsigned val_dev = GOMP_DEVICE_NONE; 3223 3224 #ifdef ACCEL_COMPILER 3225 val_host = GOMP_DEVICE_NOT_HOST; 3226 val_dev = ACCEL_COMPILER_acc_device; 3227 #endif 3228 3229 location_t loc = gimple_location (gsi_stmt (*gsi)); 3230 3231 tree host_eq = make_ssa_name (boolean_type_node); 3232 gimple *host_ass = gimple_build_assign 3233 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host)); 3234 gimple_set_location (host_ass, loc); 3235 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT); 3236 3237 tree dev_eq = make_ssa_name (boolean_type_node); 3238 gimple *dev_ass = gimple_build_assign 3239 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev)); 3240 gimple_set_location (dev_ass, loc); 3241 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT); 3242 3243 tree result = make_ssa_name (boolean_type_node); 3244 gimple *result_ass = gimple_build_assign 3245 (result, BIT_IOR_EXPR, host_eq, dev_eq); 3246 gimple_set_location (result_ass, loc); 3247 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT); 3248 3249 replace_call_with_value (gsi, result); 3250 3251 return true; 3252 } 3253 3254 /* Fold the non-target builtin at *GSI and return whether any simplification 3255 was made. */ 3256 3257 static bool 3258 gimple_fold_builtin (gimple_stmt_iterator *gsi) 3259 { 3260 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi)); 3261 tree callee = gimple_call_fndecl (stmt); 3262 3263 /* Give up for always_inline inline builtins until they are 3264 inlined. */ 3265 if (avoid_folding_inline_builtin (callee)) 3266 return false; 3267 3268 unsigned n = gimple_call_num_args (stmt); 3269 enum built_in_function fcode = DECL_FUNCTION_CODE (callee); 3270 switch (fcode) 3271 { 3272 case BUILT_IN_BZERO: 3273 return gimple_fold_builtin_memset (gsi, integer_zero_node, 3274 gimple_call_arg (stmt, 1)); 3275 case BUILT_IN_MEMSET: 3276 return gimple_fold_builtin_memset (gsi, 3277 gimple_call_arg (stmt, 1), 3278 gimple_call_arg (stmt, 2)); 3279 case BUILT_IN_BCOPY: 3280 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1), 3281 gimple_call_arg (stmt, 0), 3); 3282 case BUILT_IN_MEMCPY: 3283 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0), 3284 gimple_call_arg (stmt, 1), 0); 3285 case BUILT_IN_MEMPCPY: 3286 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0), 3287 gimple_call_arg (stmt, 1), 1); 3288 case BUILT_IN_MEMMOVE: 3289 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0), 3290 gimple_call_arg (stmt, 1), 3); 3291 case BUILT_IN_SPRINTF_CHK: 3292 case BUILT_IN_VSPRINTF_CHK: 3293 return gimple_fold_builtin_sprintf_chk (gsi, fcode); 3294 case BUILT_IN_STRCAT_CHK: 3295 return gimple_fold_builtin_strcat_chk (gsi); 3296 case BUILT_IN_STRNCAT_CHK: 3297 return gimple_fold_builtin_strncat_chk (gsi); 3298 case BUILT_IN_STRLEN: 3299 return gimple_fold_builtin_strlen (gsi); 3300 case BUILT_IN_STRCPY: 3301 return gimple_fold_builtin_strcpy (gsi, 3302 gimple_call_arg (stmt, 0), 3303 gimple_call_arg (stmt, 1)); 3304 case BUILT_IN_STRNCPY: 3305 return gimple_fold_builtin_strncpy (gsi, 3306 gimple_call_arg (stmt, 0), 3307 gimple_call_arg (stmt, 1), 3308 gimple_call_arg (stmt, 2)); 3309 case BUILT_IN_STRCAT: 3310 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0), 3311 gimple_call_arg (stmt, 1)); 3312 case BUILT_IN_STRNCAT: 3313 return gimple_fold_builtin_strncat (gsi); 3314 case BUILT_IN_INDEX: 3315 case BUILT_IN_STRCHR: 3316 return gimple_fold_builtin_strchr (gsi, false); 3317 case BUILT_IN_RINDEX: 3318 case BUILT_IN_STRRCHR: 3319 return gimple_fold_builtin_strchr (gsi, true); 3320 case BUILT_IN_STRSTR: 3321 return gimple_fold_builtin_strstr (gsi); 3322 case BUILT_IN_STRCMP: 3323 case BUILT_IN_STRCASECMP: 3324 case BUILT_IN_STRNCMP: 3325 case BUILT_IN_STRNCASECMP: 3326 return gimple_fold_builtin_string_compare (gsi); 3327 case BUILT_IN_MEMCHR: 3328 return gimple_fold_builtin_memchr (gsi); 3329 case BUILT_IN_FPUTS: 3330 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0), 3331 gimple_call_arg (stmt, 1), false); 3332 case BUILT_IN_FPUTS_UNLOCKED: 3333 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0), 3334 gimple_call_arg (stmt, 1), true); 3335 case BUILT_IN_MEMCPY_CHK: 3336 case BUILT_IN_MEMPCPY_CHK: 3337 case BUILT_IN_MEMMOVE_CHK: 3338 case BUILT_IN_MEMSET_CHK: 3339 return gimple_fold_builtin_memory_chk (gsi, 3340 gimple_call_arg (stmt, 0), 3341 gimple_call_arg (stmt, 1), 3342 gimple_call_arg (stmt, 2), 3343 gimple_call_arg (stmt, 3), 3344 fcode); 3345 case BUILT_IN_STPCPY: 3346 return gimple_fold_builtin_stpcpy (gsi); 3347 case BUILT_IN_STRCPY_CHK: 3348 case BUILT_IN_STPCPY_CHK: 3349 return gimple_fold_builtin_stxcpy_chk (gsi, 3350 gimple_call_arg (stmt, 0), 3351 gimple_call_arg (stmt, 1), 3352 gimple_call_arg (stmt, 2), 3353 fcode); 3354 case BUILT_IN_STRNCPY_CHK: 3355 case BUILT_IN_STPNCPY_CHK: 3356 return gimple_fold_builtin_stxncpy_chk (gsi, 3357 gimple_call_arg (stmt, 0), 3358 gimple_call_arg (stmt, 1), 3359 gimple_call_arg (stmt, 2), 3360 gimple_call_arg (stmt, 3), 3361 fcode); 3362 case BUILT_IN_SNPRINTF_CHK: 3363 case BUILT_IN_VSNPRINTF_CHK: 3364 return gimple_fold_builtin_snprintf_chk (gsi, fcode); 3365 case BUILT_IN_SNPRINTF: 3366 return gimple_fold_builtin_snprintf (gsi); 3367 case BUILT_IN_SPRINTF: 3368 return gimple_fold_builtin_sprintf (gsi); 3369 case BUILT_IN_FPRINTF: 3370 case BUILT_IN_FPRINTF_UNLOCKED: 3371 case BUILT_IN_VFPRINTF: 3372 if (n == 2 || n == 3) 3373 return gimple_fold_builtin_fprintf (gsi, 3374 gimple_call_arg (stmt, 0), 3375 gimple_call_arg (stmt, 1), 3376 n == 3 3377 ? gimple_call_arg (stmt, 2) 3378 : NULL_TREE, 3379 fcode); 3380 break; 3381 case BUILT_IN_FPRINTF_CHK: 3382 case BUILT_IN_VFPRINTF_CHK: 3383 if (n == 3 || n == 4) 3384 return gimple_fold_builtin_fprintf (gsi, 3385 gimple_call_arg (stmt, 0), 3386 gimple_call_arg (stmt, 2), 3387 n == 4 3388 ? gimple_call_arg (stmt, 3) 3389 : NULL_TREE, 3390 fcode); 3391 break; 3392 case BUILT_IN_PRINTF: 3393 case BUILT_IN_PRINTF_UNLOCKED: 3394 case BUILT_IN_VPRINTF: 3395 if (n == 1 || n == 2) 3396 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0), 3397 n == 2 3398 ? gimple_call_arg (stmt, 1) 3399 : NULL_TREE, fcode); 3400 break; 3401 case BUILT_IN_PRINTF_CHK: 3402 case BUILT_IN_VPRINTF_CHK: 3403 if (n == 2 || n == 3) 3404 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1), 3405 n == 3 3406 ? gimple_call_arg (stmt, 2) 3407 : NULL_TREE, fcode); 3408 break; 3409 case BUILT_IN_ACC_ON_DEVICE: 3410 return gimple_fold_builtin_acc_on_device (gsi, 3411 gimple_call_arg (stmt, 0)); 3412 default:; 3413 } 3414 3415 /* Try the generic builtin folder. */ 3416 bool ignore = (gimple_call_lhs (stmt) == NULL); 3417 tree result = fold_call_stmt (stmt, ignore); 3418 if (result) 3419 { 3420 if (ignore) 3421 STRIP_NOPS (result); 3422 else 3423 result = fold_convert (gimple_call_return_type (stmt), result); 3424 if (!update_call_from_tree (gsi, result)) 3425 gimplify_and_update_call_from_tree (gsi, result); 3426 return true; 3427 } 3428 3429 return false; 3430 } 3431 3432 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal 3433 function calls to constants, where possible. */ 3434 3435 static tree 3436 fold_internal_goacc_dim (const gimple *call) 3437 { 3438 int axis = oacc_get_ifn_dim_arg (call); 3439 int size = oacc_get_fn_dim_size (current_function_decl, axis); 3440 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS; 3441 tree result = NULL_TREE; 3442 3443 /* If the size is 1, or we only want the size and it is not dynamic, 3444 we know the answer. */ 3445 if (size == 1 || (!is_pos && size)) 3446 { 3447 tree type = TREE_TYPE (gimple_call_lhs (call)); 3448 result = build_int_cst (type, size - is_pos); 3449 } 3450 3451 return result; 3452 } 3453 3454 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable 3455 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is 3456 &var where var is only addressable because of such calls. */ 3457 3458 bool 3459 optimize_atomic_compare_exchange_p (gimple *stmt) 3460 { 3461 if (gimple_call_num_args (stmt) != 6 3462 || !flag_inline_atomics 3463 || !optimize 3464 || (flag_sanitize & (SANITIZE_THREAD | SANITIZE_ADDRESS)) != 0 3465 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL) 3466 || !gimple_vdef (stmt) 3467 || !gimple_vuse (stmt)) 3468 return false; 3469 3470 tree fndecl = gimple_call_fndecl (stmt); 3471 switch (DECL_FUNCTION_CODE (fndecl)) 3472 { 3473 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1: 3474 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2: 3475 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4: 3476 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8: 3477 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16: 3478 break; 3479 default: 3480 return false; 3481 } 3482 3483 tree expected = gimple_call_arg (stmt, 1); 3484 if (TREE_CODE (expected) != ADDR_EXPR 3485 || !SSA_VAR_P (TREE_OPERAND (expected, 0))) 3486 return false; 3487 3488 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0)); 3489 if (!is_gimple_reg_type (etype) 3490 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl) 3491 || TREE_THIS_VOLATILE (etype) 3492 || VECTOR_TYPE_P (etype) 3493 || TREE_CODE (etype) == COMPLEX_TYPE 3494 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs 3495 might not preserve all the bits. See PR71716. */ 3496 || SCALAR_FLOAT_TYPE_P (etype) 3497 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype))) 3498 return false; 3499 3500 tree weak = gimple_call_arg (stmt, 3); 3501 if (!integer_zerop (weak) && !integer_onep (weak)) 3502 return false; 3503 3504 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl)); 3505 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt))); 3506 machine_mode mode = TYPE_MODE (itype); 3507 3508 if (direct_optab_handler (atomic_compare_and_swap_optab, mode) 3509 == CODE_FOR_nothing 3510 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing) 3511 return false; 3512 3513 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode)) 3514 return false; 3515 3516 return true; 3517 } 3518 3519 /* Fold 3520 r = __atomic_compare_exchange_N (p, &e, d, w, s, f); 3521 into 3522 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f); 3523 i = IMAGPART_EXPR <t>; 3524 r = (_Bool) i; 3525 e = REALPART_EXPR <t>; */ 3526 3527 void 3528 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi) 3529 { 3530 gimple *stmt = gsi_stmt (*gsi); 3531 tree fndecl = gimple_call_fndecl (stmt); 3532 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl)); 3533 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt))); 3534 tree ctype = build_complex_type (itype); 3535 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0); 3536 bool throws = false; 3537 edge e = NULL; 3538 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)), 3539 expected); 3540 gsi_insert_before (gsi, g, GSI_SAME_STMT); 3541 gimple_stmt_iterator gsiret = gsi_for_stmt (g); 3542 if (!useless_type_conversion_p (itype, TREE_TYPE (expected))) 3543 { 3544 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR, 3545 build1 (VIEW_CONVERT_EXPR, itype, 3546 gimple_assign_lhs (g))); 3547 gsi_insert_before (gsi, g, GSI_SAME_STMT); 3548 } 3549 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0) 3550 + int_size_in_bytes (itype); 3551 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6, 3552 gimple_call_arg (stmt, 0), 3553 gimple_assign_lhs (g), 3554 gimple_call_arg (stmt, 2), 3555 build_int_cst (integer_type_node, flag), 3556 gimple_call_arg (stmt, 4), 3557 gimple_call_arg (stmt, 5)); 3558 tree lhs = make_ssa_name (ctype); 3559 gimple_call_set_lhs (g, lhs); 3560 gimple_set_vdef (g, gimple_vdef (stmt)); 3561 gimple_set_vuse (g, gimple_vuse (stmt)); 3562 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g; 3563 tree oldlhs = gimple_call_lhs (stmt); 3564 if (stmt_can_throw_internal (stmt)) 3565 { 3566 throws = true; 3567 e = find_fallthru_edge (gsi_bb (*gsi)->succs); 3568 } 3569 gimple_call_set_nothrow (as_a <gcall *> (g), 3570 gimple_call_nothrow_p (as_a <gcall *> (stmt))); 3571 gimple_call_set_lhs (stmt, NULL_TREE); 3572 gsi_replace (gsi, g, true); 3573 if (oldlhs) 3574 { 3575 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR, 3576 build1 (IMAGPART_EXPR, itype, lhs)); 3577 if (throws) 3578 { 3579 gsi_insert_on_edge_immediate (e, g); 3580 *gsi = gsi_for_stmt (g); 3581 } 3582 else 3583 gsi_insert_after (gsi, g, GSI_NEW_STMT); 3584 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g)); 3585 gsi_insert_after (gsi, g, GSI_NEW_STMT); 3586 } 3587 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR, 3588 build1 (REALPART_EXPR, itype, lhs)); 3589 if (throws && oldlhs == NULL_TREE) 3590 { 3591 gsi_insert_on_edge_immediate (e, g); 3592 *gsi = gsi_for_stmt (g); 3593 } 3594 else 3595 gsi_insert_after (gsi, g, GSI_NEW_STMT); 3596 if (!useless_type_conversion_p (TREE_TYPE (expected), itype)) 3597 { 3598 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)), 3599 VIEW_CONVERT_EXPR, 3600 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected), 3601 gimple_assign_lhs (g))); 3602 gsi_insert_after (gsi, g, GSI_NEW_STMT); 3603 } 3604 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g)); 3605 gsi_insert_after (gsi, g, GSI_NEW_STMT); 3606 *gsi = gsiret; 3607 } 3608 3609 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation 3610 doesn't fit into TYPE. The test for overflow should be regardless of 3611 -fwrapv, and even for unsigned types. */ 3612 3613 bool 3614 arith_overflowed_p (enum tree_code code, const_tree type, 3615 const_tree arg0, const_tree arg1) 3616 { 3617 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int; 3618 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> > 3619 widest2_int_cst; 3620 widest2_int warg0 = widest2_int_cst (arg0); 3621 widest2_int warg1 = widest2_int_cst (arg1); 3622 widest2_int wres; 3623 switch (code) 3624 { 3625 case PLUS_EXPR: wres = wi::add (warg0, warg1); break; 3626 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break; 3627 case MULT_EXPR: wres = wi::mul (warg0, warg1); break; 3628 default: gcc_unreachable (); 3629 } 3630 signop sign = TYPE_SIGN (type); 3631 if (sign == UNSIGNED && wi::neg_p (wres)) 3632 return true; 3633 return wi::min_precision (wres, sign) > TYPE_PRECISION (type); 3634 } 3635 3636 /* Attempt to fold a call statement referenced by the statement iterator GSI. 3637 The statement may be replaced by another statement, e.g., if the call 3638 simplifies to a constant value. Return true if any changes were made. 3639 It is assumed that the operands have been previously folded. */ 3640 3641 static bool 3642 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace) 3643 { 3644 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi)); 3645 tree callee; 3646 bool changed = false; 3647 unsigned i; 3648 3649 /* Fold *& in call arguments. */ 3650 for (i = 0; i < gimple_call_num_args (stmt); ++i) 3651 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i))) 3652 { 3653 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false); 3654 if (tmp) 3655 { 3656 gimple_call_set_arg (stmt, i, tmp); 3657 changed = true; 3658 } 3659 } 3660 3661 /* Check for virtual calls that became direct calls. */ 3662 callee = gimple_call_fn (stmt); 3663 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF) 3664 { 3665 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE) 3666 { 3667 if (dump_file && virtual_method_call_p (callee) 3668 && !possible_polymorphic_call_target_p 3669 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl 3670 (OBJ_TYPE_REF_EXPR (callee))))) 3671 { 3672 fprintf (dump_file, 3673 "Type inheritance inconsistent devirtualization of "); 3674 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); 3675 fprintf (dump_file, " to "); 3676 print_generic_expr (dump_file, callee, TDF_SLIM); 3677 fprintf (dump_file, "\n"); 3678 } 3679 3680 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee)); 3681 changed = true; 3682 } 3683 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee)) 3684 { 3685 bool final; 3686 vec <cgraph_node *>targets 3687 = possible_polymorphic_call_targets (callee, stmt, &final); 3688 if (final && targets.length () <= 1 && dbg_cnt (devirt)) 3689 { 3690 tree lhs = gimple_call_lhs (stmt); 3691 if (dump_enabled_p ()) 3692 { 3693 location_t loc = gimple_location_safe (stmt); 3694 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc, 3695 "folding virtual function call to %s\n", 3696 targets.length () == 1 3697 ? targets[0]->name () 3698 : "__builtin_unreachable"); 3699 } 3700 if (targets.length () == 1) 3701 { 3702 tree fndecl = targets[0]->decl; 3703 gimple_call_set_fndecl (stmt, fndecl); 3704 changed = true; 3705 /* If changing the call to __cxa_pure_virtual 3706 or similar noreturn function, adjust gimple_call_fntype 3707 too. */ 3708 if (gimple_call_noreturn_p (stmt) 3709 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl))) 3710 && TYPE_ARG_TYPES (TREE_TYPE (fndecl)) 3711 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl))) 3712 == void_type_node)) 3713 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl)); 3714 /* If the call becomes noreturn, remove the lhs. */ 3715 if (lhs 3716 && gimple_call_noreturn_p (stmt) 3717 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt))) 3718 || should_remove_lhs_p (lhs))) 3719 { 3720 if (TREE_CODE (lhs) == SSA_NAME) 3721 { 3722 tree var = create_tmp_var (TREE_TYPE (lhs)); 3723 tree def = get_or_create_ssa_default_def (cfun, var); 3724 gimple *new_stmt = gimple_build_assign (lhs, def); 3725 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); 3726 } 3727 gimple_call_set_lhs (stmt, NULL_TREE); 3728 } 3729 maybe_remove_unused_call_args (cfun, stmt); 3730 } 3731 else 3732 { 3733 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE); 3734 gimple *new_stmt = gimple_build_call (fndecl, 0); 3735 gimple_set_location (new_stmt, gimple_location (stmt)); 3736 /* If the call had a SSA name as lhs morph that into 3737 an uninitialized value. */ 3738 if (lhs && TREE_CODE (lhs) == SSA_NAME) 3739 { 3740 tree var = create_tmp_var (TREE_TYPE (lhs)); 3741 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var); 3742 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop (); 3743 set_ssa_default_def (cfun, var, lhs); 3744 } 3745 gimple_set_vuse (new_stmt, gimple_vuse (stmt)); 3746 gimple_set_vdef (new_stmt, gimple_vdef (stmt)); 3747 gsi_replace (gsi, new_stmt, false); 3748 return true; 3749 } 3750 } 3751 } 3752 } 3753 3754 /* Check for indirect calls that became direct calls, and then 3755 no longer require a static chain. */ 3756 if (gimple_call_chain (stmt)) 3757 { 3758 tree fn = gimple_call_fndecl (stmt); 3759 if (fn && !DECL_STATIC_CHAIN (fn)) 3760 { 3761 gimple_call_set_chain (stmt, NULL); 3762 changed = true; 3763 } 3764 else 3765 { 3766 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false); 3767 if (tmp) 3768 { 3769 gimple_call_set_chain (stmt, tmp); 3770 changed = true; 3771 } 3772 } 3773 } 3774 3775 if (inplace) 3776 return changed; 3777 3778 /* Check for builtins that CCP can handle using information not 3779 available in the generic fold routines. */ 3780 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) 3781 { 3782 if (gimple_fold_builtin (gsi)) 3783 changed = true; 3784 } 3785 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD)) 3786 { 3787 changed |= targetm.gimple_fold_builtin (gsi); 3788 } 3789 else if (gimple_call_internal_p (stmt)) 3790 { 3791 enum tree_code subcode = ERROR_MARK; 3792 tree result = NULL_TREE; 3793 bool cplx_result = false; 3794 tree overflow = NULL_TREE; 3795 switch (gimple_call_internal_fn (stmt)) 3796 { 3797 case IFN_BUILTIN_EXPECT: 3798 result = fold_builtin_expect (gimple_location (stmt), 3799 gimple_call_arg (stmt, 0), 3800 gimple_call_arg (stmt, 1), 3801 gimple_call_arg (stmt, 2)); 3802 break; 3803 case IFN_UBSAN_OBJECT_SIZE: 3804 if (integer_all_onesp (gimple_call_arg (stmt, 2)) 3805 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST 3806 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST 3807 && tree_int_cst_le (gimple_call_arg (stmt, 1), 3808 gimple_call_arg (stmt, 2)))) 3809 { 3810 gsi_replace (gsi, gimple_build_nop (), false); 3811 unlink_stmt_vdef (stmt); 3812 release_defs (stmt); 3813 return true; 3814 } 3815 break; 3816 case IFN_GOACC_DIM_SIZE: 3817 case IFN_GOACC_DIM_POS: 3818 result = fold_internal_goacc_dim (stmt); 3819 break; 3820 case IFN_UBSAN_CHECK_ADD: 3821 subcode = PLUS_EXPR; 3822 break; 3823 case IFN_UBSAN_CHECK_SUB: 3824 subcode = MINUS_EXPR; 3825 break; 3826 case IFN_UBSAN_CHECK_MUL: 3827 subcode = MULT_EXPR; 3828 break; 3829 case IFN_ADD_OVERFLOW: 3830 subcode = PLUS_EXPR; 3831 cplx_result = true; 3832 break; 3833 case IFN_SUB_OVERFLOW: 3834 subcode = MINUS_EXPR; 3835 cplx_result = true; 3836 break; 3837 case IFN_MUL_OVERFLOW: 3838 subcode = MULT_EXPR; 3839 cplx_result = true; 3840 break; 3841 default: 3842 break; 3843 } 3844 if (subcode != ERROR_MARK) 3845 { 3846 tree arg0 = gimple_call_arg (stmt, 0); 3847 tree arg1 = gimple_call_arg (stmt, 1); 3848 tree type = TREE_TYPE (arg0); 3849 if (cplx_result) 3850 { 3851 tree lhs = gimple_call_lhs (stmt); 3852 if (lhs == NULL_TREE) 3853 type = NULL_TREE; 3854 else 3855 type = TREE_TYPE (TREE_TYPE (lhs)); 3856 } 3857 if (type == NULL_TREE) 3858 ; 3859 /* x = y + 0; x = y - 0; x = y * 0; */ 3860 else if (integer_zerop (arg1)) 3861 result = subcode == MULT_EXPR ? integer_zero_node : arg0; 3862 /* x = 0 + y; x = 0 * y; */ 3863 else if (subcode != MINUS_EXPR && integer_zerop (arg0)) 3864 result = subcode == MULT_EXPR ? integer_zero_node : arg1; 3865 /* x = y - y; */ 3866 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0)) 3867 result = integer_zero_node; 3868 /* x = y * 1; x = 1 * y; */ 3869 else if (subcode == MULT_EXPR && integer_onep (arg1)) 3870 result = arg0; 3871 else if (subcode == MULT_EXPR && integer_onep (arg0)) 3872 result = arg1; 3873 else if (TREE_CODE (arg0) == INTEGER_CST 3874 && TREE_CODE (arg1) == INTEGER_CST) 3875 { 3876 if (cplx_result) 3877 result = int_const_binop (subcode, fold_convert (type, arg0), 3878 fold_convert (type, arg1)); 3879 else 3880 result = int_const_binop (subcode, arg0, arg1); 3881 if (result && arith_overflowed_p (subcode, type, arg0, arg1)) 3882 { 3883 if (cplx_result) 3884 overflow = build_one_cst (type); 3885 else 3886 result = NULL_TREE; 3887 } 3888 } 3889 if (result) 3890 { 3891 if (result == integer_zero_node) 3892 result = build_zero_cst (type); 3893 else if (cplx_result && TREE_TYPE (result) != type) 3894 { 3895 if (TREE_CODE (result) == INTEGER_CST) 3896 { 3897 if (arith_overflowed_p (PLUS_EXPR, type, result, 3898 integer_zero_node)) 3899 overflow = build_one_cst (type); 3900 } 3901 else if ((!TYPE_UNSIGNED (TREE_TYPE (result)) 3902 && TYPE_UNSIGNED (type)) 3903 || (TYPE_PRECISION (type) 3904 < (TYPE_PRECISION (TREE_TYPE (result)) 3905 + (TYPE_UNSIGNED (TREE_TYPE (result)) 3906 && !TYPE_UNSIGNED (type))))) 3907 result = NULL_TREE; 3908 if (result) 3909 result = fold_convert (type, result); 3910 } 3911 } 3912 } 3913 3914 if (result) 3915 { 3916 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result)) 3917 result = drop_tree_overflow (result); 3918 if (cplx_result) 3919 { 3920 if (overflow == NULL_TREE) 3921 overflow = build_zero_cst (TREE_TYPE (result)); 3922 tree ctype = build_complex_type (TREE_TYPE (result)); 3923 if (TREE_CODE (result) == INTEGER_CST 3924 && TREE_CODE (overflow) == INTEGER_CST) 3925 result = build_complex (ctype, result, overflow); 3926 else 3927 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR, 3928 ctype, result, overflow); 3929 } 3930 if (!update_call_from_tree (gsi, result)) 3931 gimplify_and_update_call_from_tree (gsi, result); 3932 changed = true; 3933 } 3934 } 3935 3936 return changed; 3937 } 3938 3939 3940 /* Return true whether NAME has a use on STMT. */ 3941 3942 static bool 3943 has_use_on_stmt (tree name, gimple *stmt) 3944 { 3945 imm_use_iterator iter; 3946 use_operand_p use_p; 3947 FOR_EACH_IMM_USE_FAST (use_p, iter, name) 3948 if (USE_STMT (use_p) == stmt) 3949 return true; 3950 return false; 3951 } 3952 3953 /* Worker for fold_stmt_1 dispatch to pattern based folding with 3954 gimple_simplify. 3955 3956 Replaces *GSI with the simplification result in RCODE and OPS 3957 and the associated statements in *SEQ. Does the replacement 3958 according to INPLACE and returns true if the operation succeeded. */ 3959 3960 static bool 3961 replace_stmt_with_simplification (gimple_stmt_iterator *gsi, 3962 code_helper rcode, tree *ops, 3963 gimple_seq *seq, bool inplace) 3964 { 3965 gimple *stmt = gsi_stmt (*gsi); 3966 3967 /* Play safe and do not allow abnormals to be mentioned in 3968 newly created statements. See also maybe_push_res_to_seq. 3969 As an exception allow such uses if there was a use of the 3970 same SSA name on the old stmt. */ 3971 if ((TREE_CODE (ops[0]) == SSA_NAME 3972 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]) 3973 && !has_use_on_stmt (ops[0], stmt)) 3974 || (ops[1] 3975 && TREE_CODE (ops[1]) == SSA_NAME 3976 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]) 3977 && !has_use_on_stmt (ops[1], stmt)) 3978 || (ops[2] 3979 && TREE_CODE (ops[2]) == SSA_NAME 3980 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2]) 3981 && !has_use_on_stmt (ops[2], stmt)) 3982 || (COMPARISON_CLASS_P (ops[0]) 3983 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME 3984 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0)) 3985 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt)) 3986 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME 3987 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1)) 3988 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt))))) 3989 return false; 3990 3991 /* Don't insert new statements when INPLACE is true, even if we could 3992 reuse STMT for the final statement. */ 3993 if (inplace && !gimple_seq_empty_p (*seq)) 3994 return false; 3995 3996 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt)) 3997 { 3998 gcc_assert (rcode.is_tree_code ()); 3999 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison 4000 /* GIMPLE_CONDs condition may not throw. */ 4001 && (!flag_exceptions 4002 || !cfun->can_throw_non_call_exceptions 4003 || !operation_could_trap_p (rcode, 4004 FLOAT_TYPE_P (TREE_TYPE (ops[0])), 4005 false, NULL_TREE))) 4006 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]); 4007 else if (rcode == SSA_NAME) 4008 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0], 4009 build_zero_cst (TREE_TYPE (ops[0]))); 4010 else if (rcode == INTEGER_CST) 4011 { 4012 if (integer_zerop (ops[0])) 4013 gimple_cond_make_false (cond_stmt); 4014 else 4015 gimple_cond_make_true (cond_stmt); 4016 } 4017 else if (!inplace) 4018 { 4019 tree res = maybe_push_res_to_seq (rcode, boolean_type_node, 4020 ops, seq); 4021 if (!res) 4022 return false; 4023 gimple_cond_set_condition (cond_stmt, NE_EXPR, res, 4024 build_zero_cst (TREE_TYPE (res))); 4025 } 4026 else 4027 return false; 4028 if (dump_file && (dump_flags & TDF_DETAILS)) 4029 { 4030 fprintf (dump_file, "gimple_simplified to "); 4031 if (!gimple_seq_empty_p (*seq)) 4032 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM); 4033 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 4034 0, TDF_SLIM); 4035 } 4036 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT); 4037 return true; 4038 } 4039 else if (is_gimple_assign (stmt) 4040 && rcode.is_tree_code ()) 4041 { 4042 if (!inplace 4043 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode)) 4044 { 4045 maybe_build_generic_op (rcode, 4046 TREE_TYPE (gimple_assign_lhs (stmt)), ops); 4047 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]); 4048 if (dump_file && (dump_flags & TDF_DETAILS)) 4049 { 4050 fprintf (dump_file, "gimple_simplified to "); 4051 if (!gimple_seq_empty_p (*seq)) 4052 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM); 4053 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 4054 0, TDF_SLIM); 4055 } 4056 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT); 4057 return true; 4058 } 4059 } 4060 else if (rcode.is_fn_code () 4061 && gimple_call_combined_fn (stmt) == rcode) 4062 { 4063 unsigned i; 4064 for (i = 0; i < gimple_call_num_args (stmt); ++i) 4065 { 4066 gcc_assert (ops[i] != NULL_TREE); 4067 gimple_call_set_arg (stmt, i, ops[i]); 4068 } 4069 if (i < 3) 4070 gcc_assert (ops[i] == NULL_TREE); 4071 if (dump_file && (dump_flags & TDF_DETAILS)) 4072 { 4073 fprintf (dump_file, "gimple_simplified to "); 4074 if (!gimple_seq_empty_p (*seq)) 4075 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM); 4076 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM); 4077 } 4078 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT); 4079 return true; 4080 } 4081 else if (!inplace) 4082 { 4083 if (gimple_has_lhs (stmt)) 4084 { 4085 tree lhs = gimple_get_lhs (stmt); 4086 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs), 4087 ops, seq, lhs)) 4088 return false; 4089 if (dump_file && (dump_flags & TDF_DETAILS)) 4090 { 4091 fprintf (dump_file, "gimple_simplified to "); 4092 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM); 4093 } 4094 gsi_replace_with_seq_vops (gsi, *seq); 4095 return true; 4096 } 4097 else 4098 gcc_unreachable (); 4099 } 4100 4101 return false; 4102 } 4103 4104 /* Canonicalize MEM_REFs invariant address operand after propagation. */ 4105 4106 static bool 4107 maybe_canonicalize_mem_ref_addr (tree *t) 4108 { 4109 bool res = false; 4110 4111 if (TREE_CODE (*t) == ADDR_EXPR) 4112 t = &TREE_OPERAND (*t, 0); 4113 4114 /* The C and C++ frontends use an ARRAY_REF for indexing with their 4115 generic vector extension. The actual vector referenced is 4116 view-converted to an array type for this purpose. If the index 4117 is constant the canonical representation in the middle-end is a 4118 BIT_FIELD_REF so re-write the former to the latter here. */ 4119 if (TREE_CODE (*t) == ARRAY_REF 4120 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR 4121 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST 4122 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))) 4123 { 4124 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)); 4125 if (VECTOR_TYPE_P (vtype)) 4126 { 4127 tree low = array_ref_low_bound (*t); 4128 if (TREE_CODE (low) == INTEGER_CST) 4129 { 4130 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1))) 4131 { 4132 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)), 4133 wi::to_widest (low)); 4134 idx = wi::mul (idx, wi::to_widest 4135 (TYPE_SIZE (TREE_TYPE (*t)))); 4136 widest_int ext 4137 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t)))); 4138 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype)))) 4139 { 4140 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF, 4141 TREE_TYPE (*t), 4142 TREE_OPERAND (TREE_OPERAND (*t, 0), 0), 4143 TYPE_SIZE (TREE_TYPE (*t)), 4144 wide_int_to_tree (sizetype, idx)); 4145 res = true; 4146 } 4147 } 4148 } 4149 } 4150 } 4151 4152 while (handled_component_p (*t)) 4153 t = &TREE_OPERAND (*t, 0); 4154 4155 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating 4156 of invariant addresses into a SSA name MEM_REF address. */ 4157 if (TREE_CODE (*t) == MEM_REF 4158 || TREE_CODE (*t) == TARGET_MEM_REF) 4159 { 4160 tree addr = TREE_OPERAND (*t, 0); 4161 if (TREE_CODE (addr) == ADDR_EXPR 4162 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF 4163 || handled_component_p (TREE_OPERAND (addr, 0)))) 4164 { 4165 tree base; 4166 HOST_WIDE_INT coffset; 4167 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0), 4168 &coffset); 4169 if (!base) 4170 gcc_unreachable (); 4171 4172 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base); 4173 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR, 4174 TREE_OPERAND (*t, 1), 4175 size_int (coffset)); 4176 res = true; 4177 } 4178 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL 4179 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0))); 4180 } 4181 4182 /* Canonicalize back MEM_REFs to plain reference trees if the object 4183 accessed is a decl that has the same access semantics as the MEM_REF. */ 4184 if (TREE_CODE (*t) == MEM_REF 4185 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR 4186 && integer_zerop (TREE_OPERAND (*t, 1)) 4187 && MR_DEPENDENCE_CLIQUE (*t) == 0) 4188 { 4189 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0); 4190 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1)); 4191 if (/* Same volatile qualification. */ 4192 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl) 4193 /* Same TBAA behavior with -fstrict-aliasing. */ 4194 && !TYPE_REF_CAN_ALIAS_ALL (alias_type) 4195 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl)) 4196 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type))) 4197 /* Same alignment. */ 4198 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t)) 4199 /* We have to look out here to not drop a required conversion 4200 from the rhs to the lhs if *t appears on the lhs or vice-versa 4201 if it appears on the rhs. Thus require strict type 4202 compatibility. */ 4203 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl))) 4204 { 4205 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0); 4206 res = true; 4207 } 4208 } 4209 4210 /* Canonicalize TARGET_MEM_REF in particular with respect to 4211 the indexes becoming constant. */ 4212 else if (TREE_CODE (*t) == TARGET_MEM_REF) 4213 { 4214 tree tem = maybe_fold_tmr (*t); 4215 if (tem) 4216 { 4217 *t = tem; 4218 res = true; 4219 } 4220 } 4221 4222 return res; 4223 } 4224 4225 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument 4226 distinguishes both cases. */ 4227 4228 static bool 4229 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree)) 4230 { 4231 bool changed = false; 4232 gimple *stmt = gsi_stmt (*gsi); 4233 bool nowarning = gimple_no_warning_p (stmt); 4234 unsigned i; 4235 fold_defer_overflow_warnings (); 4236 4237 /* First do required canonicalization of [TARGET_]MEM_REF addresses 4238 after propagation. 4239 ??? This shouldn't be done in generic folding but in the 4240 propagation helpers which also know whether an address was 4241 propagated. 4242 Also canonicalize operand order. */ 4243 switch (gimple_code (stmt)) 4244 { 4245 case GIMPLE_ASSIGN: 4246 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS) 4247 { 4248 tree *rhs = gimple_assign_rhs1_ptr (stmt); 4249 if ((REFERENCE_CLASS_P (*rhs) 4250 || TREE_CODE (*rhs) == ADDR_EXPR) 4251 && maybe_canonicalize_mem_ref_addr (rhs)) 4252 changed = true; 4253 tree *lhs = gimple_assign_lhs_ptr (stmt); 4254 if (REFERENCE_CLASS_P (*lhs) 4255 && maybe_canonicalize_mem_ref_addr (lhs)) 4256 changed = true; 4257 } 4258 else 4259 { 4260 /* Canonicalize operand order. */ 4261 enum tree_code code = gimple_assign_rhs_code (stmt); 4262 if (TREE_CODE_CLASS (code) == tcc_comparison 4263 || commutative_tree_code (code) 4264 || commutative_ternary_tree_code (code)) 4265 { 4266 tree rhs1 = gimple_assign_rhs1 (stmt); 4267 tree rhs2 = gimple_assign_rhs2 (stmt); 4268 if (tree_swap_operands_p (rhs1, rhs2)) 4269 { 4270 gimple_assign_set_rhs1 (stmt, rhs2); 4271 gimple_assign_set_rhs2 (stmt, rhs1); 4272 if (TREE_CODE_CLASS (code) == tcc_comparison) 4273 gimple_assign_set_rhs_code (stmt, 4274 swap_tree_comparison (code)); 4275 changed = true; 4276 } 4277 } 4278 } 4279 break; 4280 case GIMPLE_CALL: 4281 { 4282 for (i = 0; i < gimple_call_num_args (stmt); ++i) 4283 { 4284 tree *arg = gimple_call_arg_ptr (stmt, i); 4285 if (REFERENCE_CLASS_P (*arg) 4286 && maybe_canonicalize_mem_ref_addr (arg)) 4287 changed = true; 4288 } 4289 tree *lhs = gimple_call_lhs_ptr (stmt); 4290 if (*lhs 4291 && REFERENCE_CLASS_P (*lhs) 4292 && maybe_canonicalize_mem_ref_addr (lhs)) 4293 changed = true; 4294 break; 4295 } 4296 case GIMPLE_ASM: 4297 { 4298 gasm *asm_stmt = as_a <gasm *> (stmt); 4299 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i) 4300 { 4301 tree link = gimple_asm_output_op (asm_stmt, i); 4302 tree op = TREE_VALUE (link); 4303 if (REFERENCE_CLASS_P (op) 4304 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link))) 4305 changed = true; 4306 } 4307 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i) 4308 { 4309 tree link = gimple_asm_input_op (asm_stmt, i); 4310 tree op = TREE_VALUE (link); 4311 if ((REFERENCE_CLASS_P (op) 4312 || TREE_CODE (op) == ADDR_EXPR) 4313 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link))) 4314 changed = true; 4315 } 4316 } 4317 break; 4318 case GIMPLE_DEBUG: 4319 if (gimple_debug_bind_p (stmt)) 4320 { 4321 tree *val = gimple_debug_bind_get_value_ptr (stmt); 4322 if (*val 4323 && (REFERENCE_CLASS_P (*val) 4324 || TREE_CODE (*val) == ADDR_EXPR) 4325 && maybe_canonicalize_mem_ref_addr (val)) 4326 changed = true; 4327 } 4328 break; 4329 case GIMPLE_COND: 4330 { 4331 /* Canonicalize operand order. */ 4332 tree lhs = gimple_cond_lhs (stmt); 4333 tree rhs = gimple_cond_rhs (stmt); 4334 if (tree_swap_operands_p (lhs, rhs)) 4335 { 4336 gcond *gc = as_a <gcond *> (stmt); 4337 gimple_cond_set_lhs (gc, rhs); 4338 gimple_cond_set_rhs (gc, lhs); 4339 gimple_cond_set_code (gc, 4340 swap_tree_comparison (gimple_cond_code (gc))); 4341 changed = true; 4342 } 4343 } 4344 default:; 4345 } 4346 4347 /* Dispatch to pattern-based folding. */ 4348 if (!inplace 4349 || is_gimple_assign (stmt) 4350 || gimple_code (stmt) == GIMPLE_COND) 4351 { 4352 gimple_seq seq = NULL; 4353 code_helper rcode; 4354 tree ops[3] = {}; 4355 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq, 4356 valueize, valueize)) 4357 { 4358 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace)) 4359 changed = true; 4360 else 4361 gimple_seq_discard (seq); 4362 } 4363 } 4364 4365 stmt = gsi_stmt (*gsi); 4366 4367 /* Fold the main computation performed by the statement. */ 4368 switch (gimple_code (stmt)) 4369 { 4370 case GIMPLE_ASSIGN: 4371 { 4372 /* Try to canonicalize for boolean-typed X the comparisons 4373 X == 0, X == 1, X != 0, and X != 1. */ 4374 if (gimple_assign_rhs_code (stmt) == EQ_EXPR 4375 || gimple_assign_rhs_code (stmt) == NE_EXPR) 4376 { 4377 tree lhs = gimple_assign_lhs (stmt); 4378 tree op1 = gimple_assign_rhs1 (stmt); 4379 tree op2 = gimple_assign_rhs2 (stmt); 4380 tree type = TREE_TYPE (op1); 4381 4382 /* Check whether the comparison operands are of the same boolean 4383 type as the result type is. 4384 Check that second operand is an integer-constant with value 4385 one or zero. */ 4386 if (TREE_CODE (op2) == INTEGER_CST 4387 && (integer_zerop (op2) || integer_onep (op2)) 4388 && useless_type_conversion_p (TREE_TYPE (lhs), type)) 4389 { 4390 enum tree_code cmp_code = gimple_assign_rhs_code (stmt); 4391 bool is_logical_not = false; 4392 4393 /* X == 0 and X != 1 is a logical-not.of X 4394 X == 1 and X != 0 is X */ 4395 if ((cmp_code == EQ_EXPR && integer_zerop (op2)) 4396 || (cmp_code == NE_EXPR && integer_onep (op2))) 4397 is_logical_not = true; 4398 4399 if (is_logical_not == false) 4400 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1); 4401 /* Only for one-bit precision typed X the transformation 4402 !X -> ~X is valied. */ 4403 else if (TYPE_PRECISION (type) == 1) 4404 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1); 4405 /* Otherwise we use !X -> X ^ 1. */ 4406 else 4407 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1, 4408 build_int_cst (type, 1)); 4409 changed = true; 4410 break; 4411 } 4412 } 4413 4414 unsigned old_num_ops = gimple_num_ops (stmt); 4415 tree lhs = gimple_assign_lhs (stmt); 4416 tree new_rhs = fold_gimple_assign (gsi); 4417 if (new_rhs 4418 && !useless_type_conversion_p (TREE_TYPE (lhs), 4419 TREE_TYPE (new_rhs))) 4420 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs); 4421 if (new_rhs 4422 && (!inplace 4423 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)) 4424 { 4425 gimple_assign_set_rhs_from_tree (gsi, new_rhs); 4426 changed = true; 4427 } 4428 break; 4429 } 4430 4431 case GIMPLE_CALL: 4432 changed |= gimple_fold_call (gsi, inplace); 4433 break; 4434 4435 case GIMPLE_ASM: 4436 /* Fold *& in asm operands. */ 4437 { 4438 gasm *asm_stmt = as_a <gasm *> (stmt); 4439 size_t noutputs; 4440 const char **oconstraints; 4441 const char *constraint; 4442 bool allows_mem, allows_reg; 4443 4444 noutputs = gimple_asm_noutputs (asm_stmt); 4445 oconstraints = XALLOCAVEC (const char *, noutputs); 4446 4447 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i) 4448 { 4449 tree link = gimple_asm_output_op (asm_stmt, i); 4450 tree op = TREE_VALUE (link); 4451 oconstraints[i] 4452 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); 4453 if (REFERENCE_CLASS_P (op) 4454 && (op = maybe_fold_reference (op, true)) != NULL_TREE) 4455 { 4456 TREE_VALUE (link) = op; 4457 changed = true; 4458 } 4459 } 4460 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i) 4461 { 4462 tree link = gimple_asm_input_op (asm_stmt, i); 4463 tree op = TREE_VALUE (link); 4464 constraint 4465 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); 4466 parse_input_constraint (&constraint, 0, 0, noutputs, 0, 4467 oconstraints, &allows_mem, &allows_reg); 4468 if (REFERENCE_CLASS_P (op) 4469 && (op = maybe_fold_reference (op, !allows_reg && allows_mem)) 4470 != NULL_TREE) 4471 { 4472 TREE_VALUE (link) = op; 4473 changed = true; 4474 } 4475 } 4476 } 4477 break; 4478 4479 case GIMPLE_DEBUG: 4480 if (gimple_debug_bind_p (stmt)) 4481 { 4482 tree val = gimple_debug_bind_get_value (stmt); 4483 if (val 4484 && REFERENCE_CLASS_P (val)) 4485 { 4486 tree tem = maybe_fold_reference (val, false); 4487 if (tem) 4488 { 4489 gimple_debug_bind_set_value (stmt, tem); 4490 changed = true; 4491 } 4492 } 4493 else if (val 4494 && TREE_CODE (val) == ADDR_EXPR) 4495 { 4496 tree ref = TREE_OPERAND (val, 0); 4497 tree tem = maybe_fold_reference (ref, false); 4498 if (tem) 4499 { 4500 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val)); 4501 gimple_debug_bind_set_value (stmt, tem); 4502 changed = true; 4503 } 4504 } 4505 } 4506 break; 4507 4508 case GIMPLE_RETURN: 4509 { 4510 greturn *ret_stmt = as_a<greturn *> (stmt); 4511 tree ret = gimple_return_retval(ret_stmt); 4512 4513 if (ret && TREE_CODE (ret) == SSA_NAME && valueize) 4514 { 4515 tree val = valueize (ret); 4516 if (val && val != ret 4517 && may_propagate_copy (ret, val)) 4518 { 4519 gimple_return_set_retval (ret_stmt, val); 4520 changed = true; 4521 } 4522 } 4523 } 4524 break; 4525 4526 default:; 4527 } 4528 4529 stmt = gsi_stmt (*gsi); 4530 4531 /* Fold *& on the lhs. */ 4532 if (gimple_has_lhs (stmt)) 4533 { 4534 tree lhs = gimple_get_lhs (stmt); 4535 if (lhs && REFERENCE_CLASS_P (lhs)) 4536 { 4537 tree new_lhs = maybe_fold_reference (lhs, true); 4538 if (new_lhs) 4539 { 4540 gimple_set_lhs (stmt, new_lhs); 4541 changed = true; 4542 } 4543 } 4544 } 4545 4546 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0); 4547 return changed; 4548 } 4549 4550 /* Valueziation callback that ends up not following SSA edges. */ 4551 4552 tree 4553 no_follow_ssa_edges (tree) 4554 { 4555 return NULL_TREE; 4556 } 4557 4558 /* Valueization callback that ends up following single-use SSA edges only. */ 4559 4560 tree 4561 follow_single_use_edges (tree val) 4562 { 4563 if (TREE_CODE (val) == SSA_NAME 4564 && !has_single_use (val)) 4565 return NULL_TREE; 4566 return val; 4567 } 4568 4569 /* Fold the statement pointed to by GSI. In some cases, this function may 4570 replace the whole statement with a new one. Returns true iff folding 4571 makes any changes. 4572 The statement pointed to by GSI should be in valid gimple form but may 4573 be in unfolded state as resulting from for example constant propagation 4574 which can produce *&x = 0. */ 4575 4576 bool 4577 fold_stmt (gimple_stmt_iterator *gsi) 4578 { 4579 return fold_stmt_1 (gsi, false, no_follow_ssa_edges); 4580 } 4581 4582 bool 4583 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree)) 4584 { 4585 return fold_stmt_1 (gsi, false, valueize); 4586 } 4587 4588 /* Perform the minimal folding on statement *GSI. Only operations like 4589 *&x created by constant propagation are handled. The statement cannot 4590 be replaced with a new one. Return true if the statement was 4591 changed, false otherwise. 4592 The statement *GSI should be in valid gimple form but may 4593 be in unfolded state as resulting from for example constant propagation 4594 which can produce *&x = 0. */ 4595 4596 bool 4597 fold_stmt_inplace (gimple_stmt_iterator *gsi) 4598 { 4599 gimple *stmt = gsi_stmt (*gsi); 4600 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges); 4601 gcc_assert (gsi_stmt (*gsi) == stmt); 4602 return changed; 4603 } 4604 4605 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE 4606 if EXPR is null or we don't know how. 4607 If non-null, the result always has boolean type. */ 4608 4609 static tree 4610 canonicalize_bool (tree expr, bool invert) 4611 { 4612 if (!expr) 4613 return NULL_TREE; 4614 else if (invert) 4615 { 4616 if (integer_nonzerop (expr)) 4617 return boolean_false_node; 4618 else if (integer_zerop (expr)) 4619 return boolean_true_node; 4620 else if (TREE_CODE (expr) == SSA_NAME) 4621 return fold_build2 (EQ_EXPR, boolean_type_node, expr, 4622 build_int_cst (TREE_TYPE (expr), 0)); 4623 else if (COMPARISON_CLASS_P (expr)) 4624 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false), 4625 boolean_type_node, 4626 TREE_OPERAND (expr, 0), 4627 TREE_OPERAND (expr, 1)); 4628 else 4629 return NULL_TREE; 4630 } 4631 else 4632 { 4633 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE) 4634 return expr; 4635 if (integer_nonzerop (expr)) 4636 return boolean_true_node; 4637 else if (integer_zerop (expr)) 4638 return boolean_false_node; 4639 else if (TREE_CODE (expr) == SSA_NAME) 4640 return fold_build2 (NE_EXPR, boolean_type_node, expr, 4641 build_int_cst (TREE_TYPE (expr), 0)); 4642 else if (COMPARISON_CLASS_P (expr)) 4643 return fold_build2 (TREE_CODE (expr), 4644 boolean_type_node, 4645 TREE_OPERAND (expr, 0), 4646 TREE_OPERAND (expr, 1)); 4647 else 4648 return NULL_TREE; 4649 } 4650 } 4651 4652 /* Check to see if a boolean expression EXPR is logically equivalent to the 4653 comparison (OP1 CODE OP2). Check for various identities involving 4654 SSA_NAMEs. */ 4655 4656 static bool 4657 same_bool_comparison_p (const_tree expr, enum tree_code code, 4658 const_tree op1, const_tree op2) 4659 { 4660 gimple *s; 4661 4662 /* The obvious case. */ 4663 if (TREE_CODE (expr) == code 4664 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0) 4665 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0)) 4666 return true; 4667 4668 /* Check for comparing (name, name != 0) and the case where expr 4669 is an SSA_NAME with a definition matching the comparison. */ 4670 if (TREE_CODE (expr) == SSA_NAME 4671 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE) 4672 { 4673 if (operand_equal_p (expr, op1, 0)) 4674 return ((code == NE_EXPR && integer_zerop (op2)) 4675 || (code == EQ_EXPR && integer_nonzerop (op2))); 4676 s = SSA_NAME_DEF_STMT (expr); 4677 if (is_gimple_assign (s) 4678 && gimple_assign_rhs_code (s) == code 4679 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0) 4680 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0)) 4681 return true; 4682 } 4683 4684 /* If op1 is of the form (name != 0) or (name == 0), and the definition 4685 of name is a comparison, recurse. */ 4686 if (TREE_CODE (op1) == SSA_NAME 4687 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE) 4688 { 4689 s = SSA_NAME_DEF_STMT (op1); 4690 if (is_gimple_assign (s) 4691 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison) 4692 { 4693 enum tree_code c = gimple_assign_rhs_code (s); 4694 if ((c == NE_EXPR && integer_zerop (op2)) 4695 || (c == EQ_EXPR && integer_nonzerop (op2))) 4696 return same_bool_comparison_p (expr, c, 4697 gimple_assign_rhs1 (s), 4698 gimple_assign_rhs2 (s)); 4699 if ((c == EQ_EXPR && integer_zerop (op2)) 4700 || (c == NE_EXPR && integer_nonzerop (op2))) 4701 return same_bool_comparison_p (expr, 4702 invert_tree_comparison (c, false), 4703 gimple_assign_rhs1 (s), 4704 gimple_assign_rhs2 (s)); 4705 } 4706 } 4707 return false; 4708 } 4709 4710 /* Check to see if two boolean expressions OP1 and OP2 are logically 4711 equivalent. */ 4712 4713 static bool 4714 same_bool_result_p (const_tree op1, const_tree op2) 4715 { 4716 /* Simple cases first. */ 4717 if (operand_equal_p (op1, op2, 0)) 4718 return true; 4719 4720 /* Check the cases where at least one of the operands is a comparison. 4721 These are a bit smarter than operand_equal_p in that they apply some 4722 identifies on SSA_NAMEs. */ 4723 if (COMPARISON_CLASS_P (op2) 4724 && same_bool_comparison_p (op1, TREE_CODE (op2), 4725 TREE_OPERAND (op2, 0), 4726 TREE_OPERAND (op2, 1))) 4727 return true; 4728 if (COMPARISON_CLASS_P (op1) 4729 && same_bool_comparison_p (op2, TREE_CODE (op1), 4730 TREE_OPERAND (op1, 0), 4731 TREE_OPERAND (op1, 1))) 4732 return true; 4733 4734 /* Default case. */ 4735 return false; 4736 } 4737 4738 /* Forward declarations for some mutually recursive functions. */ 4739 4740 static tree 4741 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b, 4742 enum tree_code code2, tree op2a, tree op2b); 4743 static tree 4744 and_var_with_comparison (tree var, bool invert, 4745 enum tree_code code2, tree op2a, tree op2b); 4746 static tree 4747 and_var_with_comparison_1 (gimple *stmt, 4748 enum tree_code code2, tree op2a, tree op2b); 4749 static tree 4750 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b, 4751 enum tree_code code2, tree op2a, tree op2b); 4752 static tree 4753 or_var_with_comparison (tree var, bool invert, 4754 enum tree_code code2, tree op2a, tree op2b); 4755 static tree 4756 or_var_with_comparison_1 (gimple *stmt, 4757 enum tree_code code2, tree op2a, tree op2b); 4758 4759 /* Helper function for and_comparisons_1: try to simplify the AND of the 4760 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B). 4761 If INVERT is true, invert the value of the VAR before doing the AND. 4762 Return NULL_EXPR if we can't simplify this to a single expression. */ 4763 4764 static tree 4765 and_var_with_comparison (tree var, bool invert, 4766 enum tree_code code2, tree op2a, tree op2b) 4767 { 4768 tree t; 4769 gimple *stmt = SSA_NAME_DEF_STMT (var); 4770 4771 /* We can only deal with variables whose definitions are assignments. */ 4772 if (!is_gimple_assign (stmt)) 4773 return NULL_TREE; 4774 4775 /* If we have an inverted comparison, apply DeMorgan's law and rewrite 4776 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b)) 4777 Then we only have to consider the simpler non-inverted cases. */ 4778 if (invert) 4779 t = or_var_with_comparison_1 (stmt, 4780 invert_tree_comparison (code2, false), 4781 op2a, op2b); 4782 else 4783 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b); 4784 return canonicalize_bool (t, invert); 4785 } 4786 4787 /* Try to simplify the AND of the ssa variable defined by the assignment 4788 STMT with the comparison specified by (OP2A CODE2 OP2B). 4789 Return NULL_EXPR if we can't simplify this to a single expression. */ 4790 4791 static tree 4792 and_var_with_comparison_1 (gimple *stmt, 4793 enum tree_code code2, tree op2a, tree op2b) 4794 { 4795 tree var = gimple_assign_lhs (stmt); 4796 tree true_test_var = NULL_TREE; 4797 tree false_test_var = NULL_TREE; 4798 enum tree_code innercode = gimple_assign_rhs_code (stmt); 4799 4800 /* Check for identities like (var AND (var == 0)) => false. */ 4801 if (TREE_CODE (op2a) == SSA_NAME 4802 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE) 4803 { 4804 if ((code2 == NE_EXPR && integer_zerop (op2b)) 4805 || (code2 == EQ_EXPR && integer_nonzerop (op2b))) 4806 { 4807 true_test_var = op2a; 4808 if (var == true_test_var) 4809 return var; 4810 } 4811 else if ((code2 == EQ_EXPR && integer_zerop (op2b)) 4812 || (code2 == NE_EXPR && integer_nonzerop (op2b))) 4813 { 4814 false_test_var = op2a; 4815 if (var == false_test_var) 4816 return boolean_false_node; 4817 } 4818 } 4819 4820 /* If the definition is a comparison, recurse on it. */ 4821 if (TREE_CODE_CLASS (innercode) == tcc_comparison) 4822 { 4823 tree t = and_comparisons_1 (innercode, 4824 gimple_assign_rhs1 (stmt), 4825 gimple_assign_rhs2 (stmt), 4826 code2, 4827 op2a, 4828 op2b); 4829 if (t) 4830 return t; 4831 } 4832 4833 /* If the definition is an AND or OR expression, we may be able to 4834 simplify by reassociating. */ 4835 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE 4836 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)) 4837 { 4838 tree inner1 = gimple_assign_rhs1 (stmt); 4839 tree inner2 = gimple_assign_rhs2 (stmt); 4840 gimple *s; 4841 tree t; 4842 tree partial = NULL_TREE; 4843 bool is_and = (innercode == BIT_AND_EXPR); 4844 4845 /* Check for boolean identities that don't require recursive examination 4846 of inner1/inner2: 4847 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var 4848 inner1 AND (inner1 OR inner2) => inner1 4849 !inner1 AND (inner1 AND inner2) => false 4850 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2 4851 Likewise for similar cases involving inner2. */ 4852 if (inner1 == true_test_var) 4853 return (is_and ? var : inner1); 4854 else if (inner2 == true_test_var) 4855 return (is_and ? var : inner2); 4856 else if (inner1 == false_test_var) 4857 return (is_and 4858 ? boolean_false_node 4859 : and_var_with_comparison (inner2, false, code2, op2a, op2b)); 4860 else if (inner2 == false_test_var) 4861 return (is_and 4862 ? boolean_false_node 4863 : and_var_with_comparison (inner1, false, code2, op2a, op2b)); 4864 4865 /* Next, redistribute/reassociate the AND across the inner tests. 4866 Compute the first partial result, (inner1 AND (op2a code op2b)) */ 4867 if (TREE_CODE (inner1) == SSA_NAME 4868 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1)) 4869 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison 4870 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s), 4871 gimple_assign_rhs1 (s), 4872 gimple_assign_rhs2 (s), 4873 code2, op2a, op2b))) 4874 { 4875 /* Handle the AND case, where we are reassociating: 4876 (inner1 AND inner2) AND (op2a code2 op2b) 4877 => (t AND inner2) 4878 If the partial result t is a constant, we win. Otherwise 4879 continue on to try reassociating with the other inner test. */ 4880 if (is_and) 4881 { 4882 if (integer_onep (t)) 4883 return inner2; 4884 else if (integer_zerop (t)) 4885 return boolean_false_node; 4886 } 4887 4888 /* Handle the OR case, where we are redistributing: 4889 (inner1 OR inner2) AND (op2a code2 op2b) 4890 => (t OR (inner2 AND (op2a code2 op2b))) */ 4891 else if (integer_onep (t)) 4892 return boolean_true_node; 4893 4894 /* Save partial result for later. */ 4895 partial = t; 4896 } 4897 4898 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */ 4899 if (TREE_CODE (inner2) == SSA_NAME 4900 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2)) 4901 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison 4902 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s), 4903 gimple_assign_rhs1 (s), 4904 gimple_assign_rhs2 (s), 4905 code2, op2a, op2b))) 4906 { 4907 /* Handle the AND case, where we are reassociating: 4908 (inner1 AND inner2) AND (op2a code2 op2b) 4909 => (inner1 AND t) */ 4910 if (is_and) 4911 { 4912 if (integer_onep (t)) 4913 return inner1; 4914 else if (integer_zerop (t)) 4915 return boolean_false_node; 4916 /* If both are the same, we can apply the identity 4917 (x AND x) == x. */ 4918 else if (partial && same_bool_result_p (t, partial)) 4919 return t; 4920 } 4921 4922 /* Handle the OR case. where we are redistributing: 4923 (inner1 OR inner2) AND (op2a code2 op2b) 4924 => (t OR (inner1 AND (op2a code2 op2b))) 4925 => (t OR partial) */ 4926 else 4927 { 4928 if (integer_onep (t)) 4929 return boolean_true_node; 4930 else if (partial) 4931 { 4932 /* We already got a simplification for the other 4933 operand to the redistributed OR expression. The 4934 interesting case is when at least one is false. 4935 Or, if both are the same, we can apply the identity 4936 (x OR x) == x. */ 4937 if (integer_zerop (partial)) 4938 return t; 4939 else if (integer_zerop (t)) 4940 return partial; 4941 else if (same_bool_result_p (t, partial)) 4942 return t; 4943 } 4944 } 4945 } 4946 } 4947 return NULL_TREE; 4948 } 4949 4950 /* Try to simplify the AND of two comparisons defined by 4951 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively. 4952 If this can be done without constructing an intermediate value, 4953 return the resulting tree; otherwise NULL_TREE is returned. 4954 This function is deliberately asymmetric as it recurses on SSA_DEFs 4955 in the first comparison but not the second. */ 4956 4957 static tree 4958 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b, 4959 enum tree_code code2, tree op2a, tree op2b) 4960 { 4961 tree truth_type = truth_type_for (TREE_TYPE (op1a)); 4962 4963 /* First check for ((x CODE1 y) AND (x CODE2 y)). */ 4964 if (operand_equal_p (op1a, op2a, 0) 4965 && operand_equal_p (op1b, op2b, 0)) 4966 { 4967 /* Result will be either NULL_TREE, or a combined comparison. */ 4968 tree t = combine_comparisons (UNKNOWN_LOCATION, 4969 TRUTH_ANDIF_EXPR, code1, code2, 4970 truth_type, op1a, op1b); 4971 if (t) 4972 return t; 4973 } 4974 4975 /* Likewise the swapped case of the above. */ 4976 if (operand_equal_p (op1a, op2b, 0) 4977 && operand_equal_p (op1b, op2a, 0)) 4978 { 4979 /* Result will be either NULL_TREE, or a combined comparison. */ 4980 tree t = combine_comparisons (UNKNOWN_LOCATION, 4981 TRUTH_ANDIF_EXPR, code1, 4982 swap_tree_comparison (code2), 4983 truth_type, op1a, op1b); 4984 if (t) 4985 return t; 4986 } 4987 4988 /* If both comparisons are of the same value against constants, we might 4989 be able to merge them. */ 4990 if (operand_equal_p (op1a, op2a, 0) 4991 && TREE_CODE (op1b) == INTEGER_CST 4992 && TREE_CODE (op2b) == INTEGER_CST) 4993 { 4994 int cmp = tree_int_cst_compare (op1b, op2b); 4995 4996 /* If we have (op1a == op1b), we should either be able to 4997 return that or FALSE, depending on whether the constant op1b 4998 also satisfies the other comparison against op2b. */ 4999 if (code1 == EQ_EXPR) 5000 { 5001 bool done = true; 5002 bool val; 5003 switch (code2) 5004 { 5005 case EQ_EXPR: val = (cmp == 0); break; 5006 case NE_EXPR: val = (cmp != 0); break; 5007 case LT_EXPR: val = (cmp < 0); break; 5008 case GT_EXPR: val = (cmp > 0); break; 5009 case LE_EXPR: val = (cmp <= 0); break; 5010 case GE_EXPR: val = (cmp >= 0); break; 5011 default: done = false; 5012 } 5013 if (done) 5014 { 5015 if (val) 5016 return fold_build2 (code1, boolean_type_node, op1a, op1b); 5017 else 5018 return boolean_false_node; 5019 } 5020 } 5021 /* Likewise if the second comparison is an == comparison. */ 5022 else if (code2 == EQ_EXPR) 5023 { 5024 bool done = true; 5025 bool val; 5026 switch (code1) 5027 { 5028 case EQ_EXPR: val = (cmp == 0); break; 5029 case NE_EXPR: val = (cmp != 0); break; 5030 case LT_EXPR: val = (cmp > 0); break; 5031 case GT_EXPR: val = (cmp < 0); break; 5032 case LE_EXPR: val = (cmp >= 0); break; 5033 case GE_EXPR: val = (cmp <= 0); break; 5034 default: done = false; 5035 } 5036 if (done) 5037 { 5038 if (val) 5039 return fold_build2 (code2, boolean_type_node, op2a, op2b); 5040 else 5041 return boolean_false_node; 5042 } 5043 } 5044 5045 /* Same business with inequality tests. */ 5046 else if (code1 == NE_EXPR) 5047 { 5048 bool val; 5049 switch (code2) 5050 { 5051 case EQ_EXPR: val = (cmp != 0); break; 5052 case NE_EXPR: val = (cmp == 0); break; 5053 case LT_EXPR: val = (cmp >= 0); break; 5054 case GT_EXPR: val = (cmp <= 0); break; 5055 case LE_EXPR: val = (cmp > 0); break; 5056 case GE_EXPR: val = (cmp < 0); break; 5057 default: 5058 val = false; 5059 } 5060 if (val) 5061 return fold_build2 (code2, boolean_type_node, op2a, op2b); 5062 } 5063 else if (code2 == NE_EXPR) 5064 { 5065 bool val; 5066 switch (code1) 5067 { 5068 case EQ_EXPR: val = (cmp == 0); break; 5069 case NE_EXPR: val = (cmp != 0); break; 5070 case LT_EXPR: val = (cmp <= 0); break; 5071 case GT_EXPR: val = (cmp >= 0); break; 5072 case LE_EXPR: val = (cmp < 0); break; 5073 case GE_EXPR: val = (cmp > 0); break; 5074 default: 5075 val = false; 5076 } 5077 if (val) 5078 return fold_build2 (code1, boolean_type_node, op1a, op1b); 5079 } 5080 5081 /* Chose the more restrictive of two < or <= comparisons. */ 5082 else if ((code1 == LT_EXPR || code1 == LE_EXPR) 5083 && (code2 == LT_EXPR || code2 == LE_EXPR)) 5084 { 5085 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR)) 5086 return fold_build2 (code1, boolean_type_node, op1a, op1b); 5087 else 5088 return fold_build2 (code2, boolean_type_node, op2a, op2b); 5089 } 5090 5091 /* Likewise chose the more restrictive of two > or >= comparisons. */ 5092 else if ((code1 == GT_EXPR || code1 == GE_EXPR) 5093 && (code2 == GT_EXPR || code2 == GE_EXPR)) 5094 { 5095 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR)) 5096 return fold_build2 (code1, boolean_type_node, op1a, op1b); 5097 else 5098 return fold_build2 (code2, boolean_type_node, op2a, op2b); 5099 } 5100 5101 /* Check for singleton ranges. */ 5102 else if (cmp == 0 5103 && ((code1 == LE_EXPR && code2 == GE_EXPR) 5104 || (code1 == GE_EXPR && code2 == LE_EXPR))) 5105 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b); 5106 5107 /* Check for disjoint ranges. */ 5108 else if (cmp <= 0 5109 && (code1 == LT_EXPR || code1 == LE_EXPR) 5110 && (code2 == GT_EXPR || code2 == GE_EXPR)) 5111 return boolean_false_node; 5112 else if (cmp >= 0 5113 && (code1 == GT_EXPR || code1 == GE_EXPR) 5114 && (code2 == LT_EXPR || code2 == LE_EXPR)) 5115 return boolean_false_node; 5116 } 5117 5118 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where 5119 NAME's definition is a truth value. See if there are any simplifications 5120 that can be done against the NAME's definition. */ 5121 if (TREE_CODE (op1a) == SSA_NAME 5122 && (code1 == NE_EXPR || code1 == EQ_EXPR) 5123 && (integer_zerop (op1b) || integer_onep (op1b))) 5124 { 5125 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b)) 5126 || (code1 == NE_EXPR && integer_onep (op1b))); 5127 gimple *stmt = SSA_NAME_DEF_STMT (op1a); 5128 switch (gimple_code (stmt)) 5129 { 5130 case GIMPLE_ASSIGN: 5131 /* Try to simplify by copy-propagating the definition. */ 5132 return and_var_with_comparison (op1a, invert, code2, op2a, op2b); 5133 5134 case GIMPLE_PHI: 5135 /* If every argument to the PHI produces the same result when 5136 ANDed with the second comparison, we win. 5137 Do not do this unless the type is bool since we need a bool 5138 result here anyway. */ 5139 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE) 5140 { 5141 tree result = NULL_TREE; 5142 unsigned i; 5143 for (i = 0; i < gimple_phi_num_args (stmt); i++) 5144 { 5145 tree arg = gimple_phi_arg_def (stmt, i); 5146 5147 /* If this PHI has itself as an argument, ignore it. 5148 If all the other args produce the same result, 5149 we're still OK. */ 5150 if (arg == gimple_phi_result (stmt)) 5151 continue; 5152 else if (TREE_CODE (arg) == INTEGER_CST) 5153 { 5154 if (invert ? integer_nonzerop (arg) : integer_zerop (arg)) 5155 { 5156 if (!result) 5157 result = boolean_false_node; 5158 else if (!integer_zerop (result)) 5159 return NULL_TREE; 5160 } 5161 else if (!result) 5162 result = fold_build2 (code2, boolean_type_node, 5163 op2a, op2b); 5164 else if (!same_bool_comparison_p (result, 5165 code2, op2a, op2b)) 5166 return NULL_TREE; 5167 } 5168 else if (TREE_CODE (arg) == SSA_NAME 5169 && !SSA_NAME_IS_DEFAULT_DEF (arg)) 5170 { 5171 tree temp; 5172 gimple *def_stmt = SSA_NAME_DEF_STMT (arg); 5173 /* In simple cases we can look through PHI nodes, 5174 but we have to be careful with loops. 5175 See PR49073. */ 5176 if (! dom_info_available_p (CDI_DOMINATORS) 5177 || gimple_bb (def_stmt) == gimple_bb (stmt) 5178 || dominated_by_p (CDI_DOMINATORS, 5179 gimple_bb (def_stmt), 5180 gimple_bb (stmt))) 5181 return NULL_TREE; 5182 temp = and_var_with_comparison (arg, invert, code2, 5183 op2a, op2b); 5184 if (!temp) 5185 return NULL_TREE; 5186 else if (!result) 5187 result = temp; 5188 else if (!same_bool_result_p (result, temp)) 5189 return NULL_TREE; 5190 } 5191 else 5192 return NULL_TREE; 5193 } 5194 return result; 5195 } 5196 5197 default: 5198 break; 5199 } 5200 } 5201 return NULL_TREE; 5202 } 5203 5204 /* Try to simplify the AND of two comparisons, specified by 5205 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively. 5206 If this can be simplified to a single expression (without requiring 5207 introducing more SSA variables to hold intermediate values), 5208 return the resulting tree. Otherwise return NULL_TREE. 5209 If the result expression is non-null, it has boolean type. */ 5210 5211 tree 5212 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b, 5213 enum tree_code code2, tree op2a, tree op2b) 5214 { 5215 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b); 5216 if (t) 5217 return t; 5218 else 5219 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b); 5220 } 5221 5222 /* Helper function for or_comparisons_1: try to simplify the OR of the 5223 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B). 5224 If INVERT is true, invert the value of VAR before doing the OR. 5225 Return NULL_EXPR if we can't simplify this to a single expression. */ 5226 5227 static tree 5228 or_var_with_comparison (tree var, bool invert, 5229 enum tree_code code2, tree op2a, tree op2b) 5230 { 5231 tree t; 5232 gimple *stmt = SSA_NAME_DEF_STMT (var); 5233 5234 /* We can only deal with variables whose definitions are assignments. */ 5235 if (!is_gimple_assign (stmt)) 5236 return NULL_TREE; 5237 5238 /* If we have an inverted comparison, apply DeMorgan's law and rewrite 5239 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b)) 5240 Then we only have to consider the simpler non-inverted cases. */ 5241 if (invert) 5242 t = and_var_with_comparison_1 (stmt, 5243 invert_tree_comparison (code2, false), 5244 op2a, op2b); 5245 else 5246 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b); 5247 return canonicalize_bool (t, invert); 5248 } 5249 5250 /* Try to simplify the OR of the ssa variable defined by the assignment 5251 STMT with the comparison specified by (OP2A CODE2 OP2B). 5252 Return NULL_EXPR if we can't simplify this to a single expression. */ 5253 5254 static tree 5255 or_var_with_comparison_1 (gimple *stmt, 5256 enum tree_code code2, tree op2a, tree op2b) 5257 { 5258 tree var = gimple_assign_lhs (stmt); 5259 tree true_test_var = NULL_TREE; 5260 tree false_test_var = NULL_TREE; 5261 enum tree_code innercode = gimple_assign_rhs_code (stmt); 5262 5263 /* Check for identities like (var OR (var != 0)) => true . */ 5264 if (TREE_CODE (op2a) == SSA_NAME 5265 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE) 5266 { 5267 if ((code2 == NE_EXPR && integer_zerop (op2b)) 5268 || (code2 == EQ_EXPR && integer_nonzerop (op2b))) 5269 { 5270 true_test_var = op2a; 5271 if (var == true_test_var) 5272 return var; 5273 } 5274 else if ((code2 == EQ_EXPR && integer_zerop (op2b)) 5275 || (code2 == NE_EXPR && integer_nonzerop (op2b))) 5276 { 5277 false_test_var = op2a; 5278 if (var == false_test_var) 5279 return boolean_true_node; 5280 } 5281 } 5282 5283 /* If the definition is a comparison, recurse on it. */ 5284 if (TREE_CODE_CLASS (innercode) == tcc_comparison) 5285 { 5286 tree t = or_comparisons_1 (innercode, 5287 gimple_assign_rhs1 (stmt), 5288 gimple_assign_rhs2 (stmt), 5289 code2, 5290 op2a, 5291 op2b); 5292 if (t) 5293 return t; 5294 } 5295 5296 /* If the definition is an AND or OR expression, we may be able to 5297 simplify by reassociating. */ 5298 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE 5299 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)) 5300 { 5301 tree inner1 = gimple_assign_rhs1 (stmt); 5302 tree inner2 = gimple_assign_rhs2 (stmt); 5303 gimple *s; 5304 tree t; 5305 tree partial = NULL_TREE; 5306 bool is_or = (innercode == BIT_IOR_EXPR); 5307 5308 /* Check for boolean identities that don't require recursive examination 5309 of inner1/inner2: 5310 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var 5311 inner1 OR (inner1 AND inner2) => inner1 5312 !inner1 OR (inner1 OR inner2) => true 5313 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2 5314 */ 5315 if (inner1 == true_test_var) 5316 return (is_or ? var : inner1); 5317 else if (inner2 == true_test_var) 5318 return (is_or ? var : inner2); 5319 else if (inner1 == false_test_var) 5320 return (is_or 5321 ? boolean_true_node 5322 : or_var_with_comparison (inner2, false, code2, op2a, op2b)); 5323 else if (inner2 == false_test_var) 5324 return (is_or 5325 ? boolean_true_node 5326 : or_var_with_comparison (inner1, false, code2, op2a, op2b)); 5327 5328 /* Next, redistribute/reassociate the OR across the inner tests. 5329 Compute the first partial result, (inner1 OR (op2a code op2b)) */ 5330 if (TREE_CODE (inner1) == SSA_NAME 5331 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1)) 5332 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison 5333 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s), 5334 gimple_assign_rhs1 (s), 5335 gimple_assign_rhs2 (s), 5336 code2, op2a, op2b))) 5337 { 5338 /* Handle the OR case, where we are reassociating: 5339 (inner1 OR inner2) OR (op2a code2 op2b) 5340 => (t OR inner2) 5341 If the partial result t is a constant, we win. Otherwise 5342 continue on to try reassociating with the other inner test. */ 5343 if (is_or) 5344 { 5345 if (integer_onep (t)) 5346 return boolean_true_node; 5347 else if (integer_zerop (t)) 5348 return inner2; 5349 } 5350 5351 /* Handle the AND case, where we are redistributing: 5352 (inner1 AND inner2) OR (op2a code2 op2b) 5353 => (t AND (inner2 OR (op2a code op2b))) */ 5354 else if (integer_zerop (t)) 5355 return boolean_false_node; 5356 5357 /* Save partial result for later. */ 5358 partial = t; 5359 } 5360 5361 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */ 5362 if (TREE_CODE (inner2) == SSA_NAME 5363 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2)) 5364 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison 5365 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s), 5366 gimple_assign_rhs1 (s), 5367 gimple_assign_rhs2 (s), 5368 code2, op2a, op2b))) 5369 { 5370 /* Handle the OR case, where we are reassociating: 5371 (inner1 OR inner2) OR (op2a code2 op2b) 5372 => (inner1 OR t) 5373 => (t OR partial) */ 5374 if (is_or) 5375 { 5376 if (integer_zerop (t)) 5377 return inner1; 5378 else if (integer_onep (t)) 5379 return boolean_true_node; 5380 /* If both are the same, we can apply the identity 5381 (x OR x) == x. */ 5382 else if (partial && same_bool_result_p (t, partial)) 5383 return t; 5384 } 5385 5386 /* Handle the AND case, where we are redistributing: 5387 (inner1 AND inner2) OR (op2a code2 op2b) 5388 => (t AND (inner1 OR (op2a code2 op2b))) 5389 => (t AND partial) */ 5390 else 5391 { 5392 if (integer_zerop (t)) 5393 return boolean_false_node; 5394 else if (partial) 5395 { 5396 /* We already got a simplification for the other 5397 operand to the redistributed AND expression. The 5398 interesting case is when at least one is true. 5399 Or, if both are the same, we can apply the identity 5400 (x AND x) == x. */ 5401 if (integer_onep (partial)) 5402 return t; 5403 else if (integer_onep (t)) 5404 return partial; 5405 else if (same_bool_result_p (t, partial)) 5406 return t; 5407 } 5408 } 5409 } 5410 } 5411 return NULL_TREE; 5412 } 5413 5414 /* Try to simplify the OR of two comparisons defined by 5415 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively. 5416 If this can be done without constructing an intermediate value, 5417 return the resulting tree; otherwise NULL_TREE is returned. 5418 This function is deliberately asymmetric as it recurses on SSA_DEFs 5419 in the first comparison but not the second. */ 5420 5421 static tree 5422 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b, 5423 enum tree_code code2, tree op2a, tree op2b) 5424 { 5425 tree truth_type = truth_type_for (TREE_TYPE (op1a)); 5426 5427 /* First check for ((x CODE1 y) OR (x CODE2 y)). */ 5428 if (operand_equal_p (op1a, op2a, 0) 5429 && operand_equal_p (op1b, op2b, 0)) 5430 { 5431 /* Result will be either NULL_TREE, or a combined comparison. */ 5432 tree t = combine_comparisons (UNKNOWN_LOCATION, 5433 TRUTH_ORIF_EXPR, code1, code2, 5434 truth_type, op1a, op1b); 5435 if (t) 5436 return t; 5437 } 5438 5439 /* Likewise the swapped case of the above. */ 5440 if (operand_equal_p (op1a, op2b, 0) 5441 && operand_equal_p (op1b, op2a, 0)) 5442 { 5443 /* Result will be either NULL_TREE, or a combined comparison. */ 5444 tree t = combine_comparisons (UNKNOWN_LOCATION, 5445 TRUTH_ORIF_EXPR, code1, 5446 swap_tree_comparison (code2), 5447 truth_type, op1a, op1b); 5448 if (t) 5449 return t; 5450 } 5451 5452 /* If both comparisons are of the same value against constants, we might 5453 be able to merge them. */ 5454 if (operand_equal_p (op1a, op2a, 0) 5455 && TREE_CODE (op1b) == INTEGER_CST 5456 && TREE_CODE (op2b) == INTEGER_CST) 5457 { 5458 int cmp = tree_int_cst_compare (op1b, op2b); 5459 5460 /* If we have (op1a != op1b), we should either be able to 5461 return that or TRUE, depending on whether the constant op1b 5462 also satisfies the other comparison against op2b. */ 5463 if (code1 == NE_EXPR) 5464 { 5465 bool done = true; 5466 bool val; 5467 switch (code2) 5468 { 5469 case EQ_EXPR: val = (cmp == 0); break; 5470 case NE_EXPR: val = (cmp != 0); break; 5471 case LT_EXPR: val = (cmp < 0); break; 5472 case GT_EXPR: val = (cmp > 0); break; 5473 case LE_EXPR: val = (cmp <= 0); break; 5474 case GE_EXPR: val = (cmp >= 0); break; 5475 default: done = false; 5476 } 5477 if (done) 5478 { 5479 if (val) 5480 return boolean_true_node; 5481 else 5482 return fold_build2 (code1, boolean_type_node, op1a, op1b); 5483 } 5484 } 5485 /* Likewise if the second comparison is a != comparison. */ 5486 else if (code2 == NE_EXPR) 5487 { 5488 bool done = true; 5489 bool val; 5490 switch (code1) 5491 { 5492 case EQ_EXPR: val = (cmp == 0); break; 5493 case NE_EXPR: val = (cmp != 0); break; 5494 case LT_EXPR: val = (cmp > 0); break; 5495 case GT_EXPR: val = (cmp < 0); break; 5496 case LE_EXPR: val = (cmp >= 0); break; 5497 case GE_EXPR: val = (cmp <= 0); break; 5498 default: done = false; 5499 } 5500 if (done) 5501 { 5502 if (val) 5503 return boolean_true_node; 5504 else 5505 return fold_build2 (code2, boolean_type_node, op2a, op2b); 5506 } 5507 } 5508 5509 /* See if an equality test is redundant with the other comparison. */ 5510 else if (code1 == EQ_EXPR) 5511 { 5512 bool val; 5513 switch (code2) 5514 { 5515 case EQ_EXPR: val = (cmp == 0); break; 5516 case NE_EXPR: val = (cmp != 0); break; 5517 case LT_EXPR: val = (cmp < 0); break; 5518 case GT_EXPR: val = (cmp > 0); break; 5519 case LE_EXPR: val = (cmp <= 0); break; 5520 case GE_EXPR: val = (cmp >= 0); break; 5521 default: 5522 val = false; 5523 } 5524 if (val) 5525 return fold_build2 (code2, boolean_type_node, op2a, op2b); 5526 } 5527 else if (code2 == EQ_EXPR) 5528 { 5529 bool val; 5530 switch (code1) 5531 { 5532 case EQ_EXPR: val = (cmp == 0); break; 5533 case NE_EXPR: val = (cmp != 0); break; 5534 case LT_EXPR: val = (cmp > 0); break; 5535 case GT_EXPR: val = (cmp < 0); break; 5536 case LE_EXPR: val = (cmp >= 0); break; 5537 case GE_EXPR: val = (cmp <= 0); break; 5538 default: 5539 val = false; 5540 } 5541 if (val) 5542 return fold_build2 (code1, boolean_type_node, op1a, op1b); 5543 } 5544 5545 /* Chose the less restrictive of two < or <= comparisons. */ 5546 else if ((code1 == LT_EXPR || code1 == LE_EXPR) 5547 && (code2 == LT_EXPR || code2 == LE_EXPR)) 5548 { 5549 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR)) 5550 return fold_build2 (code2, boolean_type_node, op2a, op2b); 5551 else 5552 return fold_build2 (code1, boolean_type_node, op1a, op1b); 5553 } 5554 5555 /* Likewise chose the less restrictive of two > or >= comparisons. */ 5556 else if ((code1 == GT_EXPR || code1 == GE_EXPR) 5557 && (code2 == GT_EXPR || code2 == GE_EXPR)) 5558 { 5559 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR)) 5560 return fold_build2 (code2, boolean_type_node, op2a, op2b); 5561 else 5562 return fold_build2 (code1, boolean_type_node, op1a, op1b); 5563 } 5564 5565 /* Check for singleton ranges. */ 5566 else if (cmp == 0 5567 && ((code1 == LT_EXPR && code2 == GT_EXPR) 5568 || (code1 == GT_EXPR && code2 == LT_EXPR))) 5569 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b); 5570 5571 /* Check for less/greater pairs that don't restrict the range at all. */ 5572 else if (cmp >= 0 5573 && (code1 == LT_EXPR || code1 == LE_EXPR) 5574 && (code2 == GT_EXPR || code2 == GE_EXPR)) 5575 return boolean_true_node; 5576 else if (cmp <= 0 5577 && (code1 == GT_EXPR || code1 == GE_EXPR) 5578 && (code2 == LT_EXPR || code2 == LE_EXPR)) 5579 return boolean_true_node; 5580 } 5581 5582 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where 5583 NAME's definition is a truth value. See if there are any simplifications 5584 that can be done against the NAME's definition. */ 5585 if (TREE_CODE (op1a) == SSA_NAME 5586 && (code1 == NE_EXPR || code1 == EQ_EXPR) 5587 && (integer_zerop (op1b) || integer_onep (op1b))) 5588 { 5589 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b)) 5590 || (code1 == NE_EXPR && integer_onep (op1b))); 5591 gimple *stmt = SSA_NAME_DEF_STMT (op1a); 5592 switch (gimple_code (stmt)) 5593 { 5594 case GIMPLE_ASSIGN: 5595 /* Try to simplify by copy-propagating the definition. */ 5596 return or_var_with_comparison (op1a, invert, code2, op2a, op2b); 5597 5598 case GIMPLE_PHI: 5599 /* If every argument to the PHI produces the same result when 5600 ORed with the second comparison, we win. 5601 Do not do this unless the type is bool since we need a bool 5602 result here anyway. */ 5603 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE) 5604 { 5605 tree result = NULL_TREE; 5606 unsigned i; 5607 for (i = 0; i < gimple_phi_num_args (stmt); i++) 5608 { 5609 tree arg = gimple_phi_arg_def (stmt, i); 5610 5611 /* If this PHI has itself as an argument, ignore it. 5612 If all the other args produce the same result, 5613 we're still OK. */ 5614 if (arg == gimple_phi_result (stmt)) 5615 continue; 5616 else if (TREE_CODE (arg) == INTEGER_CST) 5617 { 5618 if (invert ? integer_zerop (arg) : integer_nonzerop (arg)) 5619 { 5620 if (!result) 5621 result = boolean_true_node; 5622 else if (!integer_onep (result)) 5623 return NULL_TREE; 5624 } 5625 else if (!result) 5626 result = fold_build2 (code2, boolean_type_node, 5627 op2a, op2b); 5628 else if (!same_bool_comparison_p (result, 5629 code2, op2a, op2b)) 5630 return NULL_TREE; 5631 } 5632 else if (TREE_CODE (arg) == SSA_NAME 5633 && !SSA_NAME_IS_DEFAULT_DEF (arg)) 5634 { 5635 tree temp; 5636 gimple *def_stmt = SSA_NAME_DEF_STMT (arg); 5637 /* In simple cases we can look through PHI nodes, 5638 but we have to be careful with loops. 5639 See PR49073. */ 5640 if (! dom_info_available_p (CDI_DOMINATORS) 5641 || gimple_bb (def_stmt) == gimple_bb (stmt) 5642 || dominated_by_p (CDI_DOMINATORS, 5643 gimple_bb (def_stmt), 5644 gimple_bb (stmt))) 5645 return NULL_TREE; 5646 temp = or_var_with_comparison (arg, invert, code2, 5647 op2a, op2b); 5648 if (!temp) 5649 return NULL_TREE; 5650 else if (!result) 5651 result = temp; 5652 else if (!same_bool_result_p (result, temp)) 5653 return NULL_TREE; 5654 } 5655 else 5656 return NULL_TREE; 5657 } 5658 return result; 5659 } 5660 5661 default: 5662 break; 5663 } 5664 } 5665 return NULL_TREE; 5666 } 5667 5668 /* Try to simplify the OR of two comparisons, specified by 5669 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively. 5670 If this can be simplified to a single expression (without requiring 5671 introducing more SSA variables to hold intermediate values), 5672 return the resulting tree. Otherwise return NULL_TREE. 5673 If the result expression is non-null, it has boolean type. */ 5674 5675 tree 5676 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b, 5677 enum tree_code code2, tree op2a, tree op2b) 5678 { 5679 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b); 5680 if (t) 5681 return t; 5682 else 5683 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b); 5684 } 5685 5686 5687 /* Fold STMT to a constant using VALUEIZE to valueize SSA names. 5688 5689 Either NULL_TREE, a simplified but non-constant or a constant 5690 is returned. 5691 5692 ??? This should go into a gimple-fold-inline.h file to be eventually 5693 privatized with the single valueize function used in the various TUs 5694 to avoid the indirect function call overhead. */ 5695 5696 tree 5697 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree), 5698 tree (*gvalueize) (tree)) 5699 { 5700 code_helper rcode; 5701 tree ops[3] = {}; 5702 /* ??? The SSA propagators do not correctly deal with following SSA use-def 5703 edges if there are intermediate VARYING defs. For this reason 5704 do not follow SSA edges here even though SCCVN can technically 5705 just deal fine with that. */ 5706 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize)) 5707 { 5708 tree res = NULL_TREE; 5709 if (gimple_simplified_result_is_gimple_val (rcode, ops)) 5710 res = ops[0]; 5711 else if (mprts_hook) 5712 res = mprts_hook (rcode, gimple_expr_type (stmt), ops); 5713 if (res) 5714 { 5715 if (dump_file && dump_flags & TDF_DETAILS) 5716 { 5717 fprintf (dump_file, "Match-and-simplified "); 5718 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM); 5719 fprintf (dump_file, " to "); 5720 print_generic_expr (dump_file, res, 0); 5721 fprintf (dump_file, "\n"); 5722 } 5723 return res; 5724 } 5725 } 5726 5727 location_t loc = gimple_location (stmt); 5728 switch (gimple_code (stmt)) 5729 { 5730 case GIMPLE_ASSIGN: 5731 { 5732 enum tree_code subcode = gimple_assign_rhs_code (stmt); 5733 5734 switch (get_gimple_rhs_class (subcode)) 5735 { 5736 case GIMPLE_SINGLE_RHS: 5737 { 5738 tree rhs = gimple_assign_rhs1 (stmt); 5739 enum tree_code_class kind = TREE_CODE_CLASS (subcode); 5740 5741 if (TREE_CODE (rhs) == SSA_NAME) 5742 { 5743 /* If the RHS is an SSA_NAME, return its known constant value, 5744 if any. */ 5745 return (*valueize) (rhs); 5746 } 5747 /* Handle propagating invariant addresses into address 5748 operations. */ 5749 else if (TREE_CODE (rhs) == ADDR_EXPR 5750 && !is_gimple_min_invariant (rhs)) 5751 { 5752 HOST_WIDE_INT offset = 0; 5753 tree base; 5754 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0), 5755 &offset, 5756 valueize); 5757 if (base 5758 && (CONSTANT_CLASS_P (base) 5759 || decl_address_invariant_p (base))) 5760 return build_invariant_address (TREE_TYPE (rhs), 5761 base, offset); 5762 } 5763 else if (TREE_CODE (rhs) == CONSTRUCTOR 5764 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE 5765 && (CONSTRUCTOR_NELTS (rhs) 5766 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)))) 5767 { 5768 unsigned i; 5769 tree val, *vec; 5770 5771 vec = XALLOCAVEC (tree, 5772 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))); 5773 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val) 5774 { 5775 val = (*valueize) (val); 5776 if (TREE_CODE (val) == INTEGER_CST 5777 || TREE_CODE (val) == REAL_CST 5778 || TREE_CODE (val) == FIXED_CST) 5779 vec[i] = val; 5780 else 5781 return NULL_TREE; 5782 } 5783 5784 return build_vector (TREE_TYPE (rhs), vec); 5785 } 5786 if (subcode == OBJ_TYPE_REF) 5787 { 5788 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs)); 5789 /* If callee is constant, we can fold away the wrapper. */ 5790 if (is_gimple_min_invariant (val)) 5791 return val; 5792 } 5793 5794 if (kind == tcc_reference) 5795 { 5796 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR 5797 || TREE_CODE (rhs) == REALPART_EXPR 5798 || TREE_CODE (rhs) == IMAGPART_EXPR) 5799 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME) 5800 { 5801 tree val = (*valueize) (TREE_OPERAND (rhs, 0)); 5802 return fold_unary_loc (EXPR_LOCATION (rhs), 5803 TREE_CODE (rhs), 5804 TREE_TYPE (rhs), val); 5805 } 5806 else if (TREE_CODE (rhs) == BIT_FIELD_REF 5807 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME) 5808 { 5809 tree val = (*valueize) (TREE_OPERAND (rhs, 0)); 5810 return fold_ternary_loc (EXPR_LOCATION (rhs), 5811 TREE_CODE (rhs), 5812 TREE_TYPE (rhs), val, 5813 TREE_OPERAND (rhs, 1), 5814 TREE_OPERAND (rhs, 2)); 5815 } 5816 else if (TREE_CODE (rhs) == MEM_REF 5817 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME) 5818 { 5819 tree val = (*valueize) (TREE_OPERAND (rhs, 0)); 5820 if (TREE_CODE (val) == ADDR_EXPR 5821 && is_gimple_min_invariant (val)) 5822 { 5823 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs), 5824 unshare_expr (val), 5825 TREE_OPERAND (rhs, 1)); 5826 if (tem) 5827 rhs = tem; 5828 } 5829 } 5830 return fold_const_aggregate_ref_1 (rhs, valueize); 5831 } 5832 else if (kind == tcc_declaration) 5833 return get_symbol_constant_value (rhs); 5834 return rhs; 5835 } 5836 5837 case GIMPLE_UNARY_RHS: 5838 return NULL_TREE; 5839 5840 case GIMPLE_BINARY_RHS: 5841 /* Translate &x + CST into an invariant form suitable for 5842 further propagation. */ 5843 if (subcode == POINTER_PLUS_EXPR) 5844 { 5845 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt)); 5846 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt)); 5847 if (TREE_CODE (op0) == ADDR_EXPR 5848 && TREE_CODE (op1) == INTEGER_CST) 5849 { 5850 tree off = fold_convert (ptr_type_node, op1); 5851 return build_fold_addr_expr_loc 5852 (loc, 5853 fold_build2 (MEM_REF, 5854 TREE_TYPE (TREE_TYPE (op0)), 5855 unshare_expr (op0), off)); 5856 } 5857 } 5858 /* Canonicalize bool != 0 and bool == 0 appearing after 5859 valueization. While gimple_simplify handles this 5860 it can get confused by the ~X == 1 -> X == 0 transform 5861 which we cant reduce to a SSA name or a constant 5862 (and we have no way to tell gimple_simplify to not 5863 consider those transforms in the first place). */ 5864 else if (subcode == EQ_EXPR 5865 || subcode == NE_EXPR) 5866 { 5867 tree lhs = gimple_assign_lhs (stmt); 5868 tree op0 = gimple_assign_rhs1 (stmt); 5869 if (useless_type_conversion_p (TREE_TYPE (lhs), 5870 TREE_TYPE (op0))) 5871 { 5872 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt)); 5873 op0 = (*valueize) (op0); 5874 if (TREE_CODE (op0) == INTEGER_CST) 5875 std::swap (op0, op1); 5876 if (TREE_CODE (op1) == INTEGER_CST 5877 && ((subcode == NE_EXPR && integer_zerop (op1)) 5878 || (subcode == EQ_EXPR && integer_onep (op1)))) 5879 return op0; 5880 } 5881 } 5882 return NULL_TREE; 5883 5884 case GIMPLE_TERNARY_RHS: 5885 { 5886 /* Handle ternary operators that can appear in GIMPLE form. */ 5887 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt)); 5888 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt)); 5889 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt)); 5890 return fold_ternary_loc (loc, subcode, 5891 gimple_expr_type (stmt), op0, op1, op2); 5892 } 5893 5894 default: 5895 gcc_unreachable (); 5896 } 5897 } 5898 5899 case GIMPLE_CALL: 5900 { 5901 tree fn; 5902 gcall *call_stmt = as_a <gcall *> (stmt); 5903 5904 if (gimple_call_internal_p (stmt)) 5905 { 5906 enum tree_code subcode = ERROR_MARK; 5907 switch (gimple_call_internal_fn (stmt)) 5908 { 5909 case IFN_UBSAN_CHECK_ADD: 5910 subcode = PLUS_EXPR; 5911 break; 5912 case IFN_UBSAN_CHECK_SUB: 5913 subcode = MINUS_EXPR; 5914 break; 5915 case IFN_UBSAN_CHECK_MUL: 5916 subcode = MULT_EXPR; 5917 break; 5918 case IFN_BUILTIN_EXPECT: 5919 { 5920 tree arg0 = gimple_call_arg (stmt, 0); 5921 tree op0 = (*valueize) (arg0); 5922 if (TREE_CODE (op0) == INTEGER_CST) 5923 return op0; 5924 return NULL_TREE; 5925 } 5926 default: 5927 return NULL_TREE; 5928 } 5929 tree arg0 = gimple_call_arg (stmt, 0); 5930 tree arg1 = gimple_call_arg (stmt, 1); 5931 tree op0 = (*valueize) (arg0); 5932 tree op1 = (*valueize) (arg1); 5933 5934 if (TREE_CODE (op0) != INTEGER_CST 5935 || TREE_CODE (op1) != INTEGER_CST) 5936 { 5937 switch (subcode) 5938 { 5939 case MULT_EXPR: 5940 /* x * 0 = 0 * x = 0 without overflow. */ 5941 if (integer_zerop (op0) || integer_zerop (op1)) 5942 return build_zero_cst (TREE_TYPE (arg0)); 5943 break; 5944 case MINUS_EXPR: 5945 /* y - y = 0 without overflow. */ 5946 if (operand_equal_p (op0, op1, 0)) 5947 return build_zero_cst (TREE_TYPE (arg0)); 5948 break; 5949 default: 5950 break; 5951 } 5952 } 5953 tree res 5954 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1); 5955 if (res 5956 && TREE_CODE (res) == INTEGER_CST 5957 && !TREE_OVERFLOW (res)) 5958 return res; 5959 return NULL_TREE; 5960 } 5961 5962 fn = (*valueize) (gimple_call_fn (stmt)); 5963 if (TREE_CODE (fn) == ADDR_EXPR 5964 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL 5965 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)) 5966 && gimple_builtin_call_types_compatible_p (stmt, 5967 TREE_OPERAND (fn, 0))) 5968 { 5969 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt)); 5970 tree retval; 5971 unsigned i; 5972 for (i = 0; i < gimple_call_num_args (stmt); ++i) 5973 args[i] = (*valueize) (gimple_call_arg (stmt, i)); 5974 retval = fold_builtin_call_array (loc, 5975 gimple_call_return_type (call_stmt), 5976 fn, gimple_call_num_args (stmt), args); 5977 if (retval) 5978 { 5979 /* fold_call_expr wraps the result inside a NOP_EXPR. */ 5980 STRIP_NOPS (retval); 5981 retval = fold_convert (gimple_call_return_type (call_stmt), 5982 retval); 5983 } 5984 return retval; 5985 } 5986 return NULL_TREE; 5987 } 5988 5989 default: 5990 return NULL_TREE; 5991 } 5992 } 5993 5994 /* Fold STMT to a constant using VALUEIZE to valueize SSA names. 5995 Returns NULL_TREE if folding to a constant is not possible, otherwise 5996 returns a constant according to is_gimple_min_invariant. */ 5997 5998 tree 5999 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree)) 6000 { 6001 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize); 6002 if (res && is_gimple_min_invariant (res)) 6003 return res; 6004 return NULL_TREE; 6005 } 6006 6007 6008 /* The following set of functions are supposed to fold references using 6009 their constant initializers. */ 6010 6011 /* See if we can find constructor defining value of BASE. 6012 When we know the consructor with constant offset (such as 6013 base is array[40] and we do know constructor of array), then 6014 BIT_OFFSET is adjusted accordingly. 6015 6016 As a special case, return error_mark_node when constructor 6017 is not explicitly available, but it is known to be zero 6018 such as 'static const int a;'. */ 6019 static tree 6020 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset, 6021 tree (*valueize)(tree)) 6022 { 6023 HOST_WIDE_INT bit_offset2, size, max_size; 6024 bool reverse; 6025 6026 if (TREE_CODE (base) == MEM_REF) 6027 { 6028 if (!integer_zerop (TREE_OPERAND (base, 1))) 6029 { 6030 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1))) 6031 return NULL_TREE; 6032 *bit_offset += (mem_ref_offset (base).to_short_addr () 6033 * BITS_PER_UNIT); 6034 } 6035 6036 if (valueize 6037 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME) 6038 base = valueize (TREE_OPERAND (base, 0)); 6039 if (!base || TREE_CODE (base) != ADDR_EXPR) 6040 return NULL_TREE; 6041 base = TREE_OPERAND (base, 0); 6042 } 6043 else if (valueize 6044 && TREE_CODE (base) == SSA_NAME) 6045 base = valueize (base); 6046 6047 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its 6048 DECL_INITIAL. If BASE is a nested reference into another 6049 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve 6050 the inner reference. */ 6051 switch (TREE_CODE (base)) 6052 { 6053 case VAR_DECL: 6054 case CONST_DECL: 6055 { 6056 tree init = ctor_for_folding (base); 6057 6058 /* Our semantic is exact opposite of ctor_for_folding; 6059 NULL means unknown, while error_mark_node is 0. */ 6060 if (init == error_mark_node) 6061 return NULL_TREE; 6062 if (!init) 6063 return error_mark_node; 6064 return init; 6065 } 6066 6067 case VIEW_CONVERT_EXPR: 6068 return get_base_constructor (TREE_OPERAND (base, 0), 6069 bit_offset, valueize); 6070 6071 case ARRAY_REF: 6072 case COMPONENT_REF: 6073 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size, 6074 &reverse); 6075 if (max_size == -1 || size != max_size) 6076 return NULL_TREE; 6077 *bit_offset += bit_offset2; 6078 return get_base_constructor (base, bit_offset, valueize); 6079 6080 case CONSTRUCTOR: 6081 return base; 6082 6083 default: 6084 if (CONSTANT_CLASS_P (base)) 6085 return base; 6086 6087 return NULL_TREE; 6088 } 6089 } 6090 6091 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size 6092 SIZE to the memory at bit OFFSET. */ 6093 6094 static tree 6095 fold_array_ctor_reference (tree type, tree ctor, 6096 unsigned HOST_WIDE_INT offset, 6097 unsigned HOST_WIDE_INT size, 6098 tree from_decl) 6099 { 6100 offset_int low_bound; 6101 offset_int elt_size; 6102 offset_int access_index; 6103 tree domain_type = NULL_TREE; 6104 HOST_WIDE_INT inner_offset; 6105 6106 /* Compute low bound and elt size. */ 6107 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE) 6108 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor)); 6109 if (domain_type && TYPE_MIN_VALUE (domain_type)) 6110 { 6111 /* Static constructors for variably sized objects makes no sense. */ 6112 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST) 6113 return NULL_TREE; 6114 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type)); 6115 } 6116 else 6117 low_bound = 0; 6118 /* Static constructors for variably sized objects makes no sense. */ 6119 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST) 6120 return NULL_TREE; 6121 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))); 6122 6123 /* We can handle only constantly sized accesses that are known to not 6124 be larger than size of array element. */ 6125 if (!TYPE_SIZE_UNIT (type) 6126 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST 6127 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type)) 6128 || elt_size == 0) 6129 return NULL_TREE; 6130 6131 /* Compute the array index we look for. */ 6132 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT), 6133 elt_size); 6134 access_index += low_bound; 6135 6136 /* And offset within the access. */ 6137 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT); 6138 6139 /* See if the array field is large enough to span whole access. We do not 6140 care to fold accesses spanning multiple array indexes. */ 6141 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT) 6142 return NULL_TREE; 6143 if (tree val = get_array_ctor_element_at_index (ctor, access_index)) 6144 return fold_ctor_reference (type, val, inner_offset, size, from_decl); 6145 6146 /* When memory is not explicitely mentioned in constructor, 6147 it is 0 (or out of range). */ 6148 return build_zero_cst (type); 6149 } 6150 6151 /* CTOR is CONSTRUCTOR of an aggregate or vector. 6152 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */ 6153 6154 static tree 6155 fold_nonarray_ctor_reference (tree type, tree ctor, 6156 unsigned HOST_WIDE_INT offset, 6157 unsigned HOST_WIDE_INT size, 6158 tree from_decl) 6159 { 6160 unsigned HOST_WIDE_INT cnt; 6161 tree cfield, cval; 6162 6163 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, 6164 cval) 6165 { 6166 tree byte_offset = DECL_FIELD_OFFSET (cfield); 6167 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield); 6168 tree field_size = DECL_SIZE (cfield); 6169 offset_int bitoffset; 6170 offset_int bitoffset_end, access_end; 6171 6172 /* Variable sized objects in static constructors makes no sense, 6173 but field_size can be NULL for flexible array members. */ 6174 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST 6175 && TREE_CODE (byte_offset) == INTEGER_CST 6176 && (field_size != NULL_TREE 6177 ? TREE_CODE (field_size) == INTEGER_CST 6178 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE)); 6179 6180 /* Compute bit offset of the field. */ 6181 bitoffset = (wi::to_offset (field_offset) 6182 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT)); 6183 /* Compute bit offset where the field ends. */ 6184 if (field_size != NULL_TREE) 6185 bitoffset_end = bitoffset + wi::to_offset (field_size); 6186 else 6187 bitoffset_end = 0; 6188 6189 access_end = offset_int (offset) + size; 6190 6191 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and 6192 [BITOFFSET, BITOFFSET_END)? */ 6193 if (wi::cmps (access_end, bitoffset) > 0 6194 && (field_size == NULL_TREE 6195 || wi::lts_p (offset, bitoffset_end))) 6196 { 6197 offset_int inner_offset = offset_int (offset) - bitoffset; 6198 /* We do have overlap. Now see if field is large enough to 6199 cover the access. Give up for accesses spanning multiple 6200 fields. */ 6201 if (wi::cmps (access_end, bitoffset_end) > 0) 6202 return NULL_TREE; 6203 if (offset < bitoffset) 6204 return NULL_TREE; 6205 return fold_ctor_reference (type, cval, 6206 inner_offset.to_uhwi (), size, 6207 from_decl); 6208 } 6209 } 6210 /* When memory is not explicitely mentioned in constructor, it is 0. */ 6211 return build_zero_cst (type); 6212 } 6213 6214 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE 6215 to the memory at bit OFFSET. */ 6216 6217 tree 6218 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset, 6219 unsigned HOST_WIDE_INT size, tree from_decl) 6220 { 6221 tree ret; 6222 6223 /* We found the field with exact match. */ 6224 if (useless_type_conversion_p (type, TREE_TYPE (ctor)) 6225 && !offset) 6226 return canonicalize_constructor_val (unshare_expr (ctor), from_decl); 6227 6228 /* We are at the end of walk, see if we can view convert the 6229 result. */ 6230 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset 6231 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */ 6232 && !compare_tree_int (TYPE_SIZE (type), size) 6233 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size)) 6234 { 6235 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl); 6236 if (ret) 6237 { 6238 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret); 6239 if (ret) 6240 STRIP_USELESS_TYPE_CONVERSION (ret); 6241 } 6242 return ret; 6243 } 6244 /* For constants and byte-aligned/sized reads try to go through 6245 native_encode/interpret. */ 6246 if (CONSTANT_CLASS_P (ctor) 6247 && BITS_PER_UNIT == 8 6248 && offset % BITS_PER_UNIT == 0 6249 && size % BITS_PER_UNIT == 0 6250 && size <= MAX_BITSIZE_MODE_ANY_MODE) 6251 { 6252 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT]; 6253 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT, 6254 offset / BITS_PER_UNIT); 6255 if (len > 0) 6256 return native_interpret_expr (type, buf, len); 6257 } 6258 if (TREE_CODE (ctor) == CONSTRUCTOR) 6259 { 6260 6261 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE 6262 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE) 6263 return fold_array_ctor_reference (type, ctor, offset, size, 6264 from_decl); 6265 else 6266 return fold_nonarray_ctor_reference (type, ctor, offset, size, 6267 from_decl); 6268 } 6269 6270 return NULL_TREE; 6271 } 6272 6273 /* Return the tree representing the element referenced by T if T is an 6274 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA 6275 names using VALUEIZE. Return NULL_TREE otherwise. */ 6276 6277 tree 6278 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree)) 6279 { 6280 tree ctor, idx, base; 6281 HOST_WIDE_INT offset, size, max_size; 6282 tree tem; 6283 bool reverse; 6284 6285 if (TREE_THIS_VOLATILE (t)) 6286 return NULL_TREE; 6287 6288 if (DECL_P (t)) 6289 return get_symbol_constant_value (t); 6290 6291 tem = fold_read_from_constant_string (t); 6292 if (tem) 6293 return tem; 6294 6295 switch (TREE_CODE (t)) 6296 { 6297 case ARRAY_REF: 6298 case ARRAY_RANGE_REF: 6299 /* Constant indexes are handled well by get_base_constructor. 6300 Only special case variable offsets. 6301 FIXME: This code can't handle nested references with variable indexes 6302 (they will be handled only by iteration of ccp). Perhaps we can bring 6303 get_ref_base_and_extent here and make it use a valueize callback. */ 6304 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME 6305 && valueize 6306 && (idx = (*valueize) (TREE_OPERAND (t, 1))) 6307 && TREE_CODE (idx) == INTEGER_CST) 6308 { 6309 tree low_bound, unit_size; 6310 6311 /* If the resulting bit-offset is constant, track it. */ 6312 if ((low_bound = array_ref_low_bound (t), 6313 TREE_CODE (low_bound) == INTEGER_CST) 6314 && (unit_size = array_ref_element_size (t), 6315 tree_fits_uhwi_p (unit_size))) 6316 { 6317 offset_int woffset 6318 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound), 6319 TYPE_PRECISION (TREE_TYPE (idx))); 6320 6321 woffset *= tree_to_uhwi (unit_size); 6322 woffset *= BITS_PER_UNIT; 6323 if (wi::fits_shwi_p (woffset)) 6324 { 6325 offset = woffset.to_shwi (); 6326 base = TREE_OPERAND (t, 0); 6327 ctor = get_base_constructor (base, &offset, valueize); 6328 /* Empty constructor. Always fold to 0. */ 6329 if (ctor == error_mark_node) 6330 return build_zero_cst (TREE_TYPE (t)); 6331 /* Out of bound array access. Value is undefined, 6332 but don't fold. */ 6333 if (offset < 0) 6334 return NULL_TREE; 6335 /* We can not determine ctor. */ 6336 if (!ctor) 6337 return NULL_TREE; 6338 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, 6339 tree_to_uhwi (unit_size) 6340 * BITS_PER_UNIT, 6341 base); 6342 } 6343 } 6344 } 6345 /* Fallthru. */ 6346 6347 case COMPONENT_REF: 6348 case BIT_FIELD_REF: 6349 case TARGET_MEM_REF: 6350 case MEM_REF: 6351 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse); 6352 ctor = get_base_constructor (base, &offset, valueize); 6353 6354 /* Empty constructor. Always fold to 0. */ 6355 if (ctor == error_mark_node) 6356 return build_zero_cst (TREE_TYPE (t)); 6357 /* We do not know precise address. */ 6358 if (max_size == -1 || max_size != size) 6359 return NULL_TREE; 6360 /* We can not determine ctor. */ 6361 if (!ctor) 6362 return NULL_TREE; 6363 6364 /* Out of bound array access. Value is undefined, but don't fold. */ 6365 if (offset < 0) 6366 return NULL_TREE; 6367 6368 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, 6369 base); 6370 6371 case REALPART_EXPR: 6372 case IMAGPART_EXPR: 6373 { 6374 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize); 6375 if (c && TREE_CODE (c) == COMPLEX_CST) 6376 return fold_build1_loc (EXPR_LOCATION (t), 6377 TREE_CODE (t), TREE_TYPE (t), c); 6378 break; 6379 } 6380 6381 default: 6382 break; 6383 } 6384 6385 return NULL_TREE; 6386 } 6387 6388 tree 6389 fold_const_aggregate_ref (tree t) 6390 { 6391 return fold_const_aggregate_ref_1 (t, NULL); 6392 } 6393 6394 /* Lookup virtual method with index TOKEN in a virtual table V 6395 at OFFSET. 6396 Set CAN_REFER if non-NULL to false if method 6397 is not referable or if the virtual table is ill-formed (such as rewriten 6398 by non-C++ produced symbol). Otherwise just return NULL in that calse. */ 6399 6400 tree 6401 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token, 6402 tree v, 6403 unsigned HOST_WIDE_INT offset, 6404 bool *can_refer) 6405 { 6406 tree vtable = v, init, fn; 6407 unsigned HOST_WIDE_INT size; 6408 unsigned HOST_WIDE_INT elt_size, access_index; 6409 tree domain_type; 6410 6411 if (can_refer) 6412 *can_refer = true; 6413 6414 /* First of all double check we have virtual table. */ 6415 if (!VAR_P (v) || !DECL_VIRTUAL_P (v)) 6416 { 6417 /* Pass down that we lost track of the target. */ 6418 if (can_refer) 6419 *can_refer = false; 6420 return NULL_TREE; 6421 } 6422 6423 init = ctor_for_folding (v); 6424 6425 /* The virtual tables should always be born with constructors 6426 and we always should assume that they are avaialble for 6427 folding. At the moment we do not stream them in all cases, 6428 but it should never happen that ctor seem unreachable. */ 6429 gcc_assert (init); 6430 if (init == error_mark_node) 6431 { 6432 /* Pass down that we lost track of the target. */ 6433 if (can_refer) 6434 *can_refer = false; 6435 return NULL_TREE; 6436 } 6437 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE); 6438 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v)))); 6439 offset *= BITS_PER_UNIT; 6440 offset += token * size; 6441 6442 /* Lookup the value in the constructor that is assumed to be array. 6443 This is equivalent to 6444 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init, 6445 offset, size, NULL); 6446 but in a constant time. We expect that frontend produced a simple 6447 array without indexed initializers. */ 6448 6449 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE); 6450 domain_type = TYPE_DOMAIN (TREE_TYPE (init)); 6451 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type))); 6452 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init)))); 6453 6454 access_index = offset / BITS_PER_UNIT / elt_size; 6455 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0); 6456 6457 /* This code makes an assumption that there are no 6458 indexed fileds produced by C++ FE, so we can directly index the array. */ 6459 if (access_index < CONSTRUCTOR_NELTS (init)) 6460 { 6461 fn = CONSTRUCTOR_ELT (init, access_index)->value; 6462 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index); 6463 STRIP_NOPS (fn); 6464 } 6465 else 6466 fn = NULL; 6467 6468 /* For type inconsistent program we may end up looking up virtual method 6469 in virtual table that does not contain TOKEN entries. We may overrun 6470 the virtual table and pick up a constant or RTTI info pointer. 6471 In any case the call is undefined. */ 6472 if (!fn 6473 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR) 6474 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL) 6475 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE); 6476 else 6477 { 6478 fn = TREE_OPERAND (fn, 0); 6479 6480 /* When cgraph node is missing and function is not public, we cannot 6481 devirtualize. This can happen in WHOPR when the actual method 6482 ends up in other partition, because we found devirtualization 6483 possibility too late. */ 6484 if (!can_refer_decl_in_current_unit_p (fn, vtable)) 6485 { 6486 if (can_refer) 6487 { 6488 *can_refer = false; 6489 return fn; 6490 } 6491 return NULL_TREE; 6492 } 6493 } 6494 6495 /* Make sure we create a cgraph node for functions we'll reference. 6496 They can be non-existent if the reference comes from an entry 6497 of an external vtable for example. */ 6498 cgraph_node::get_create (fn); 6499 6500 return fn; 6501 } 6502 6503 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN 6504 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression. 6505 KNOWN_BINFO carries the binfo describing the true type of 6506 OBJ_TYPE_REF_OBJECT(REF). 6507 Set CAN_REFER if non-NULL to false if method 6508 is not referable or if the virtual table is ill-formed (such as rewriten 6509 by non-C++ produced symbol). Otherwise just return NULL in that calse. */ 6510 6511 tree 6512 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo, 6513 bool *can_refer) 6514 { 6515 unsigned HOST_WIDE_INT offset; 6516 tree v; 6517 6518 v = BINFO_VTABLE (known_binfo); 6519 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */ 6520 if (!v) 6521 return NULL_TREE; 6522 6523 if (!vtable_pointer_value_to_vtable (v, &v, &offset)) 6524 { 6525 if (can_refer) 6526 *can_refer = false; 6527 return NULL_TREE; 6528 } 6529 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer); 6530 } 6531 6532 /* Given a pointer value T, return a simplified version of an 6533 indirection through T, or NULL_TREE if no simplification is 6534 possible. Note that the resulting type may be different from 6535 the type pointed to in the sense that it is still compatible 6536 from the langhooks point of view. */ 6537 6538 tree 6539 gimple_fold_indirect_ref (tree t) 6540 { 6541 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype); 6542 tree sub = t; 6543 tree subtype; 6544 6545 STRIP_NOPS (sub); 6546 subtype = TREE_TYPE (sub); 6547 if (!POINTER_TYPE_P (subtype) 6548 || TYPE_REF_CAN_ALIAS_ALL (ptype)) 6549 return NULL_TREE; 6550 6551 if (TREE_CODE (sub) == ADDR_EXPR) 6552 { 6553 tree op = TREE_OPERAND (sub, 0); 6554 tree optype = TREE_TYPE (op); 6555 /* *&p => p */ 6556 if (useless_type_conversion_p (type, optype)) 6557 return op; 6558 6559 /* *(foo *)&fooarray => fooarray[0] */ 6560 if (TREE_CODE (optype) == ARRAY_TYPE 6561 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST 6562 && useless_type_conversion_p (type, TREE_TYPE (optype))) 6563 { 6564 tree type_domain = TYPE_DOMAIN (optype); 6565 tree min_val = size_zero_node; 6566 if (type_domain && TYPE_MIN_VALUE (type_domain)) 6567 min_val = TYPE_MIN_VALUE (type_domain); 6568 if (TREE_CODE (min_val) == INTEGER_CST) 6569 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE); 6570 } 6571 /* *(foo *)&complexfoo => __real__ complexfoo */ 6572 else if (TREE_CODE (optype) == COMPLEX_TYPE 6573 && useless_type_conversion_p (type, TREE_TYPE (optype))) 6574 return fold_build1 (REALPART_EXPR, type, op); 6575 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */ 6576 else if (TREE_CODE (optype) == VECTOR_TYPE 6577 && useless_type_conversion_p (type, TREE_TYPE (optype))) 6578 { 6579 tree part_width = TYPE_SIZE (type); 6580 tree index = bitsize_int (0); 6581 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index); 6582 } 6583 } 6584 6585 /* *(p + CST) -> ... */ 6586 if (TREE_CODE (sub) == POINTER_PLUS_EXPR 6587 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST) 6588 { 6589 tree addr = TREE_OPERAND (sub, 0); 6590 tree off = TREE_OPERAND (sub, 1); 6591 tree addrtype; 6592 6593 STRIP_NOPS (addr); 6594 addrtype = TREE_TYPE (addr); 6595 6596 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */ 6597 if (TREE_CODE (addr) == ADDR_EXPR 6598 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE 6599 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))) 6600 && tree_fits_uhwi_p (off)) 6601 { 6602 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off); 6603 tree part_width = TYPE_SIZE (type); 6604 unsigned HOST_WIDE_INT part_widthi 6605 = tree_to_shwi (part_width) / BITS_PER_UNIT; 6606 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT; 6607 tree index = bitsize_int (indexi); 6608 if (offset / part_widthi 6609 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))) 6610 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0), 6611 part_width, index); 6612 } 6613 6614 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */ 6615 if (TREE_CODE (addr) == ADDR_EXPR 6616 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE 6617 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))) 6618 { 6619 tree size = TYPE_SIZE_UNIT (type); 6620 if (tree_int_cst_equal (size, off)) 6621 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0)); 6622 } 6623 6624 /* *(p + CST) -> MEM_REF <p, CST>. */ 6625 if (TREE_CODE (addr) != ADDR_EXPR 6626 || DECL_P (TREE_OPERAND (addr, 0))) 6627 return fold_build2 (MEM_REF, type, 6628 addr, 6629 wide_int_to_tree (ptype, off)); 6630 } 6631 6632 /* *(foo *)fooarrptr => (*fooarrptr)[0] */ 6633 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE 6634 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST 6635 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype)))) 6636 { 6637 tree type_domain; 6638 tree min_val = size_zero_node; 6639 tree osub = sub; 6640 sub = gimple_fold_indirect_ref (sub); 6641 if (! sub) 6642 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub); 6643 type_domain = TYPE_DOMAIN (TREE_TYPE (sub)); 6644 if (type_domain && TYPE_MIN_VALUE (type_domain)) 6645 min_val = TYPE_MIN_VALUE (type_domain); 6646 if (TREE_CODE (min_val) == INTEGER_CST) 6647 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE); 6648 } 6649 6650 return NULL_TREE; 6651 } 6652 6653 /* Return true if CODE is an operation that when operating on signed 6654 integer types involves undefined behavior on overflow and the 6655 operation can be expressed with unsigned arithmetic. */ 6656 6657 bool 6658 arith_code_with_undefined_signed_overflow (tree_code code) 6659 { 6660 switch (code) 6661 { 6662 case PLUS_EXPR: 6663 case MINUS_EXPR: 6664 case MULT_EXPR: 6665 case NEGATE_EXPR: 6666 case POINTER_PLUS_EXPR: 6667 return true; 6668 default: 6669 return false; 6670 } 6671 } 6672 6673 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic 6674 operation that can be transformed to unsigned arithmetic by converting 6675 its operand, carrying out the operation in the corresponding unsigned 6676 type and converting the result back to the original type. 6677 6678 Returns a sequence of statements that replace STMT and also contain 6679 a modified form of STMT itself. */ 6680 6681 gimple_seq 6682 rewrite_to_defined_overflow (gimple *stmt) 6683 { 6684 if (dump_file && (dump_flags & TDF_DETAILS)) 6685 { 6686 fprintf (dump_file, "rewriting stmt with undefined signed " 6687 "overflow "); 6688 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); 6689 } 6690 6691 tree lhs = gimple_assign_lhs (stmt); 6692 tree type = unsigned_type_for (TREE_TYPE (lhs)); 6693 gimple_seq stmts = NULL; 6694 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i) 6695 { 6696 tree op = gimple_op (stmt, i); 6697 op = gimple_convert (&stmts, type, op); 6698 gimple_set_op (stmt, i, op); 6699 } 6700 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt)); 6701 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR) 6702 gimple_assign_set_rhs_code (stmt, PLUS_EXPR); 6703 gimple_seq_add_stmt (&stmts, stmt); 6704 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt)); 6705 gimple_seq_add_stmt (&stmts, cvt); 6706 6707 return stmts; 6708 } 6709 6710 6711 /* The valueization hook we use for the gimple_build API simplification. 6712 This makes us match fold_buildN behavior by only combining with 6713 statements in the sequence(s) we are currently building. */ 6714 6715 static tree 6716 gimple_build_valueize (tree op) 6717 { 6718 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL) 6719 return op; 6720 return NULL_TREE; 6721 } 6722 6723 /* Build the expression CODE OP0 of type TYPE with location LOC, 6724 simplifying it first if possible. Returns the built 6725 expression value and appends statements possibly defining it 6726 to SEQ. */ 6727 6728 tree 6729 gimple_build (gimple_seq *seq, location_t loc, 6730 enum tree_code code, tree type, tree op0) 6731 { 6732 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize); 6733 if (!res) 6734 { 6735 res = create_tmp_reg_or_ssa_name (type); 6736 gimple *stmt; 6737 if (code == REALPART_EXPR 6738 || code == IMAGPART_EXPR 6739 || code == VIEW_CONVERT_EXPR) 6740 stmt = gimple_build_assign (res, code, build1 (code, type, op0)); 6741 else 6742 stmt = gimple_build_assign (res, code, op0); 6743 gimple_set_location (stmt, loc); 6744 gimple_seq_add_stmt_without_update (seq, stmt); 6745 } 6746 return res; 6747 } 6748 6749 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC, 6750 simplifying it first if possible. Returns the built 6751 expression value and appends statements possibly defining it 6752 to SEQ. */ 6753 6754 tree 6755 gimple_build (gimple_seq *seq, location_t loc, 6756 enum tree_code code, tree type, tree op0, tree op1) 6757 { 6758 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize); 6759 if (!res) 6760 { 6761 res = create_tmp_reg_or_ssa_name (type); 6762 gimple *stmt = gimple_build_assign (res, code, op0, op1); 6763 gimple_set_location (stmt, loc); 6764 gimple_seq_add_stmt_without_update (seq, stmt); 6765 } 6766 return res; 6767 } 6768 6769 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC, 6770 simplifying it first if possible. Returns the built 6771 expression value and appends statements possibly defining it 6772 to SEQ. */ 6773 6774 tree 6775 gimple_build (gimple_seq *seq, location_t loc, 6776 enum tree_code code, tree type, tree op0, tree op1, tree op2) 6777 { 6778 tree res = gimple_simplify (code, type, op0, op1, op2, 6779 seq, gimple_build_valueize); 6780 if (!res) 6781 { 6782 res = create_tmp_reg_or_ssa_name (type); 6783 gimple *stmt; 6784 if (code == BIT_FIELD_REF) 6785 stmt = gimple_build_assign (res, code, 6786 build3 (code, type, op0, op1, op2)); 6787 else 6788 stmt = gimple_build_assign (res, code, op0, op1, op2); 6789 gimple_set_location (stmt, loc); 6790 gimple_seq_add_stmt_without_update (seq, stmt); 6791 } 6792 return res; 6793 } 6794 6795 /* Build the call FN (ARG0) with a result of type TYPE 6796 (or no result if TYPE is void) with location LOC, 6797 simplifying it first if possible. Returns the built 6798 expression value (or NULL_TREE if TYPE is void) and appends 6799 statements possibly defining it to SEQ. */ 6800 6801 tree 6802 gimple_build (gimple_seq *seq, location_t loc, 6803 enum built_in_function fn, tree type, tree arg0) 6804 { 6805 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize); 6806 if (!res) 6807 { 6808 tree decl = builtin_decl_implicit (fn); 6809 gimple *stmt = gimple_build_call (decl, 1, arg0); 6810 if (!VOID_TYPE_P (type)) 6811 { 6812 res = create_tmp_reg_or_ssa_name (type); 6813 gimple_call_set_lhs (stmt, res); 6814 } 6815 gimple_set_location (stmt, loc); 6816 gimple_seq_add_stmt_without_update (seq, stmt); 6817 } 6818 return res; 6819 } 6820 6821 /* Build the call FN (ARG0, ARG1) with a result of type TYPE 6822 (or no result if TYPE is void) with location LOC, 6823 simplifying it first if possible. Returns the built 6824 expression value (or NULL_TREE if TYPE is void) and appends 6825 statements possibly defining it to SEQ. */ 6826 6827 tree 6828 gimple_build (gimple_seq *seq, location_t loc, 6829 enum built_in_function fn, tree type, tree arg0, tree arg1) 6830 { 6831 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize); 6832 if (!res) 6833 { 6834 tree decl = builtin_decl_implicit (fn); 6835 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1); 6836 if (!VOID_TYPE_P (type)) 6837 { 6838 res = create_tmp_reg_or_ssa_name (type); 6839 gimple_call_set_lhs (stmt, res); 6840 } 6841 gimple_set_location (stmt, loc); 6842 gimple_seq_add_stmt_without_update (seq, stmt); 6843 } 6844 return res; 6845 } 6846 6847 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE 6848 (or no result if TYPE is void) with location LOC, 6849 simplifying it first if possible. Returns the built 6850 expression value (or NULL_TREE if TYPE is void) and appends 6851 statements possibly defining it to SEQ. */ 6852 6853 tree 6854 gimple_build (gimple_seq *seq, location_t loc, 6855 enum built_in_function fn, tree type, 6856 tree arg0, tree arg1, tree arg2) 6857 { 6858 tree res = gimple_simplify (fn, type, arg0, arg1, arg2, 6859 seq, gimple_build_valueize); 6860 if (!res) 6861 { 6862 tree decl = builtin_decl_implicit (fn); 6863 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2); 6864 if (!VOID_TYPE_P (type)) 6865 { 6866 res = create_tmp_reg_or_ssa_name (type); 6867 gimple_call_set_lhs (stmt, res); 6868 } 6869 gimple_set_location (stmt, loc); 6870 gimple_seq_add_stmt_without_update (seq, stmt); 6871 } 6872 return res; 6873 } 6874 6875 /* Build the conversion (TYPE) OP with a result of type TYPE 6876 with location LOC if such conversion is neccesary in GIMPLE, 6877 simplifying it first. 6878 Returns the built expression value and appends 6879 statements possibly defining it to SEQ. */ 6880 6881 tree 6882 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op) 6883 { 6884 if (useless_type_conversion_p (type, TREE_TYPE (op))) 6885 return op; 6886 return gimple_build (seq, loc, NOP_EXPR, type, op); 6887 } 6888 6889 /* Build the conversion (ptrofftype) OP with a result of a type 6890 compatible with ptrofftype with location LOC if such conversion 6891 is neccesary in GIMPLE, simplifying it first. 6892 Returns the built expression value and appends 6893 statements possibly defining it to SEQ. */ 6894 6895 tree 6896 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op) 6897 { 6898 if (ptrofftype_p (TREE_TYPE (op))) 6899 return op; 6900 return gimple_convert (seq, loc, sizetype, op); 6901 } 6902 6903 /* Return true if the result of assignment STMT is known to be non-negative. 6904 If the return value is based on the assumption that signed overflow is 6905 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change 6906 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */ 6907 6908 static bool 6909 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p, 6910 int depth) 6911 { 6912 enum tree_code code = gimple_assign_rhs_code (stmt); 6913 switch (get_gimple_rhs_class (code)) 6914 { 6915 case GIMPLE_UNARY_RHS: 6916 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt), 6917 gimple_expr_type (stmt), 6918 gimple_assign_rhs1 (stmt), 6919 strict_overflow_p, depth); 6920 case GIMPLE_BINARY_RHS: 6921 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt), 6922 gimple_expr_type (stmt), 6923 gimple_assign_rhs1 (stmt), 6924 gimple_assign_rhs2 (stmt), 6925 strict_overflow_p, depth); 6926 case GIMPLE_TERNARY_RHS: 6927 return false; 6928 case GIMPLE_SINGLE_RHS: 6929 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt), 6930 strict_overflow_p, depth); 6931 case GIMPLE_INVALID_RHS: 6932 break; 6933 } 6934 gcc_unreachable (); 6935 } 6936 6937 /* Return true if return value of call STMT is known to be non-negative. 6938 If the return value is based on the assumption that signed overflow is 6939 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change 6940 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */ 6941 6942 static bool 6943 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p, 6944 int depth) 6945 { 6946 tree arg0 = gimple_call_num_args (stmt) > 0 ? 6947 gimple_call_arg (stmt, 0) : NULL_TREE; 6948 tree arg1 = gimple_call_num_args (stmt) > 1 ? 6949 gimple_call_arg (stmt, 1) : NULL_TREE; 6950 6951 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt), 6952 gimple_call_combined_fn (stmt), 6953 arg0, 6954 arg1, 6955 strict_overflow_p, depth); 6956 } 6957 6958 /* Return true if return value of call STMT is known to be non-negative. 6959 If the return value is based on the assumption that signed overflow is 6960 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change 6961 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */ 6962 6963 static bool 6964 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p, 6965 int depth) 6966 { 6967 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i) 6968 { 6969 tree arg = gimple_phi_arg_def (stmt, i); 6970 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1)) 6971 return false; 6972 } 6973 return true; 6974 } 6975 6976 /* Return true if STMT is known to compute a non-negative value. 6977 If the return value is based on the assumption that signed overflow is 6978 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change 6979 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */ 6980 6981 bool 6982 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p, 6983 int depth) 6984 { 6985 switch (gimple_code (stmt)) 6986 { 6987 case GIMPLE_ASSIGN: 6988 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p, 6989 depth); 6990 case GIMPLE_CALL: 6991 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p, 6992 depth); 6993 case GIMPLE_PHI: 6994 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p, 6995 depth); 6996 default: 6997 return false; 6998 } 6999 } 7000 7001 /* Return true if the floating-point value computed by assignment STMT 7002 is known to have an integer value. We also allow +Inf, -Inf and NaN 7003 to be considered integer values. Return false for signaling NaN. 7004 7005 DEPTH is the current nesting depth of the query. */ 7006 7007 static bool 7008 gimple_assign_integer_valued_real_p (gimple *stmt, int depth) 7009 { 7010 enum tree_code code = gimple_assign_rhs_code (stmt); 7011 switch (get_gimple_rhs_class (code)) 7012 { 7013 case GIMPLE_UNARY_RHS: 7014 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt), 7015 gimple_assign_rhs1 (stmt), depth); 7016 case GIMPLE_BINARY_RHS: 7017 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt), 7018 gimple_assign_rhs1 (stmt), 7019 gimple_assign_rhs2 (stmt), depth); 7020 case GIMPLE_TERNARY_RHS: 7021 return false; 7022 case GIMPLE_SINGLE_RHS: 7023 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth); 7024 case GIMPLE_INVALID_RHS: 7025 break; 7026 } 7027 gcc_unreachable (); 7028 } 7029 7030 /* Return true if the floating-point value computed by call STMT is known 7031 to have an integer value. We also allow +Inf, -Inf and NaN to be 7032 considered integer values. Return false for signaling NaN. 7033 7034 DEPTH is the current nesting depth of the query. */ 7035 7036 static bool 7037 gimple_call_integer_valued_real_p (gimple *stmt, int depth) 7038 { 7039 tree arg0 = (gimple_call_num_args (stmt) > 0 7040 ? gimple_call_arg (stmt, 0) 7041 : NULL_TREE); 7042 tree arg1 = (gimple_call_num_args (stmt) > 1 7043 ? gimple_call_arg (stmt, 1) 7044 : NULL_TREE); 7045 return integer_valued_real_call_p (gimple_call_combined_fn (stmt), 7046 arg0, arg1, depth); 7047 } 7048 7049 /* Return true if the floating-point result of phi STMT is known to have 7050 an integer value. We also allow +Inf, -Inf and NaN to be considered 7051 integer values. Return false for signaling NaN. 7052 7053 DEPTH is the current nesting depth of the query. */ 7054 7055 static bool 7056 gimple_phi_integer_valued_real_p (gimple *stmt, int depth) 7057 { 7058 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i) 7059 { 7060 tree arg = gimple_phi_arg_def (stmt, i); 7061 if (!integer_valued_real_single_p (arg, depth + 1)) 7062 return false; 7063 } 7064 return true; 7065 } 7066 7067 /* Return true if the floating-point value computed by STMT is known 7068 to have an integer value. We also allow +Inf, -Inf and NaN to be 7069 considered integer values. Return false for signaling NaN. 7070 7071 DEPTH is the current nesting depth of the query. */ 7072 7073 bool 7074 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth) 7075 { 7076 switch (gimple_code (stmt)) 7077 { 7078 case GIMPLE_ASSIGN: 7079 return gimple_assign_integer_valued_real_p (stmt, depth); 7080 case GIMPLE_CALL: 7081 return gimple_call_integer_valued_real_p (stmt, depth); 7082 case GIMPLE_PHI: 7083 return gimple_phi_integer_valued_real_p (stmt, depth); 7084 default: 7085 return false; 7086 } 7087 } 7088