1 /* Statement simplification on GIMPLE. 2 Copyright (C) 2010-2017 Free Software Foundation, Inc. 3 Split out from tree-ssa-ccp.c. 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify it 8 under the terms of the GNU General Public License as published by the 9 Free Software Foundation; either version 3, or (at your option) any 10 later version. 11 12 GCC is distributed in the hope that it will be useful, but WITHOUT 13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15 for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #include "config.h" 22 #include "system.h" 23 #include "coretypes.h" 24 #include "backend.h" 25 #include "target.h" 26 #include "rtl.h" 27 #include "tree.h" 28 #include "gimple.h" 29 #include "predict.h" 30 #include "ssa.h" 31 #include "cgraph.h" 32 #include "gimple-pretty-print.h" 33 #include "fold-const.h" 34 #include "stmt.h" 35 #include "expr.h" 36 #include "stor-layout.h" 37 #include "dumpfile.h" 38 #include "gimple-fold.h" 39 #include "gimplify.h" 40 #include "gimple-iterator.h" 41 #include "tree-into-ssa.h" 42 #include "tree-dfa.h" 43 #include "tree-ssa.h" 44 #include "tree-ssa-propagate.h" 45 #include "ipa-utils.h" 46 #include "tree-ssa-address.h" 47 #include "langhooks.h" 48 #include "gimplify-me.h" 49 #include "dbgcnt.h" 50 #include "builtins.h" 51 #include "tree-eh.h" 52 #include "gimple-match.h" 53 #include "gomp-constants.h" 54 #include "optabs-query.h" 55 #include "omp-general.h" 56 #include "ipa-chkp.h" 57 #include "tree-cfg.h" 58 #include "fold-const-call.h" 59 60 /* Return true when DECL can be referenced from current unit. 61 FROM_DECL (if non-null) specify constructor of variable DECL was taken from. 62 We can get declarations that are not possible to reference for various 63 reasons: 64 65 1) When analyzing C++ virtual tables. 66 C++ virtual tables do have known constructors even 67 when they are keyed to other compilation unit. 68 Those tables can contain pointers to methods and vars 69 in other units. Those methods have both STATIC and EXTERNAL 70 set. 71 2) In WHOPR mode devirtualization might lead to reference 72 to method that was partitioned elsehwere. 73 In this case we have static VAR_DECL or FUNCTION_DECL 74 that has no corresponding callgraph/varpool node 75 declaring the body. 76 3) COMDAT functions referred by external vtables that 77 we devirtualize only during final compilation stage. 78 At this time we already decided that we will not output 79 the function body and thus we can't reference the symbol 80 directly. */ 81 82 static bool 83 can_refer_decl_in_current_unit_p (tree decl, tree from_decl) 84 { 85 varpool_node *vnode; 86 struct cgraph_node *node; 87 symtab_node *snode; 88 89 if (DECL_ABSTRACT_P (decl)) 90 return false; 91 92 /* We are concerned only about static/external vars and functions. */ 93 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl)) 94 || !VAR_OR_FUNCTION_DECL_P (decl)) 95 return true; 96 97 /* Static objects can be referred only if they was not optimized out yet. */ 98 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl)) 99 { 100 /* Before we start optimizing unreachable code we can be sure all 101 static objects are defined. */ 102 if (symtab->function_flags_ready) 103 return true; 104 snode = symtab_node::get (decl); 105 if (!snode || !snode->definition) 106 return false; 107 node = dyn_cast <cgraph_node *> (snode); 108 return !node || !node->global.inlined_to; 109 } 110 111 /* We will later output the initializer, so we can refer to it. 112 So we are concerned only when DECL comes from initializer of 113 external var or var that has been optimized out. */ 114 if (!from_decl 115 || !VAR_P (from_decl) 116 || (!DECL_EXTERNAL (from_decl) 117 && (vnode = varpool_node::get (from_decl)) != NULL 118 && vnode->definition) 119 || (flag_ltrans 120 && (vnode = varpool_node::get (from_decl)) != NULL 121 && vnode->in_other_partition)) 122 return true; 123 /* We are folding reference from external vtable. The vtable may reffer 124 to a symbol keyed to other compilation unit. The other compilation 125 unit may be in separate DSO and the symbol may be hidden. */ 126 if (DECL_VISIBILITY_SPECIFIED (decl) 127 && DECL_EXTERNAL (decl) 128 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT 129 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition)) 130 return false; 131 /* When function is public, we always can introduce new reference. 132 Exception are the COMDAT functions where introducing a direct 133 reference imply need to include function body in the curren tunit. */ 134 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl)) 135 return true; 136 /* We have COMDAT. We are going to check if we still have definition 137 or if the definition is going to be output in other partition. 138 Bypass this when gimplifying; all needed functions will be produced. 139 140 As observed in PR20991 for already optimized out comdat virtual functions 141 it may be tempting to not necessarily give up because the copy will be 142 output elsewhere when corresponding vtable is output. 143 This is however not possible - ABI specify that COMDATs are output in 144 units where they are used and when the other unit was compiled with LTO 145 it is possible that vtable was kept public while the function itself 146 was privatized. */ 147 if (!symtab->function_flags_ready) 148 return true; 149 150 snode = symtab_node::get (decl); 151 if (!snode 152 || ((!snode->definition || DECL_EXTERNAL (decl)) 153 && (!snode->in_other_partition 154 || (!snode->forced_by_abi && !snode->force_output)))) 155 return false; 156 node = dyn_cast <cgraph_node *> (snode); 157 return !node || !node->global.inlined_to; 158 } 159 160 /* Create a temporary for TYPE for a statement STMT. If the current function 161 is in SSA form, a SSA name is created. Otherwise a temporary register 162 is made. */ 163 164 static tree 165 create_tmp_reg_or_ssa_name (tree type, gimple *stmt = NULL) 166 { 167 if (gimple_in_ssa_p (cfun)) 168 return make_ssa_name (type, stmt); 169 else 170 return create_tmp_reg (type); 171 } 172 173 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into 174 acceptable form for is_gimple_min_invariant. 175 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */ 176 177 tree 178 canonicalize_constructor_val (tree cval, tree from_decl) 179 { 180 tree orig_cval = cval; 181 STRIP_NOPS (cval); 182 if (TREE_CODE (cval) == POINTER_PLUS_EXPR 183 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST) 184 { 185 tree ptr = TREE_OPERAND (cval, 0); 186 if (is_gimple_min_invariant (ptr)) 187 cval = build1_loc (EXPR_LOCATION (cval), 188 ADDR_EXPR, TREE_TYPE (ptr), 189 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)), 190 ptr, 191 fold_convert (ptr_type_node, 192 TREE_OPERAND (cval, 1)))); 193 } 194 if (TREE_CODE (cval) == ADDR_EXPR) 195 { 196 tree base = NULL_TREE; 197 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR) 198 { 199 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0)); 200 if (base) 201 TREE_OPERAND (cval, 0) = base; 202 } 203 else 204 base = get_base_address (TREE_OPERAND (cval, 0)); 205 if (!base) 206 return NULL_TREE; 207 208 if (VAR_OR_FUNCTION_DECL_P (base) 209 && !can_refer_decl_in_current_unit_p (base, from_decl)) 210 return NULL_TREE; 211 if (TREE_TYPE (base) == error_mark_node) 212 return NULL_TREE; 213 if (VAR_P (base)) 214 TREE_ADDRESSABLE (base) = 1; 215 else if (TREE_CODE (base) == FUNCTION_DECL) 216 { 217 /* Make sure we create a cgraph node for functions we'll reference. 218 They can be non-existent if the reference comes from an entry 219 of an external vtable for example. */ 220 cgraph_node::get_create (base); 221 } 222 /* Fixup types in global initializers. */ 223 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0))) 224 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0)); 225 226 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval))) 227 cval = fold_convert (TREE_TYPE (orig_cval), cval); 228 return cval; 229 } 230 if (TREE_OVERFLOW_P (cval)) 231 return drop_tree_overflow (cval); 232 return orig_cval; 233 } 234 235 /* If SYM is a constant variable with known value, return the value. 236 NULL_TREE is returned otherwise. */ 237 238 tree 239 get_symbol_constant_value (tree sym) 240 { 241 tree val = ctor_for_folding (sym); 242 if (val != error_mark_node) 243 { 244 if (val) 245 { 246 val = canonicalize_constructor_val (unshare_expr (val), sym); 247 if (val && is_gimple_min_invariant (val)) 248 return val; 249 else 250 return NULL_TREE; 251 } 252 /* Variables declared 'const' without an initializer 253 have zero as the initializer if they may not be 254 overridden at link or run time. */ 255 if (!val 256 && is_gimple_reg_type (TREE_TYPE (sym))) 257 return build_zero_cst (TREE_TYPE (sym)); 258 } 259 260 return NULL_TREE; 261 } 262 263 264 265 /* Subroutine of fold_stmt. We perform several simplifications of the 266 memory reference tree EXPR and make sure to re-gimplify them properly 267 after propagation of constant addresses. IS_LHS is true if the 268 reference is supposed to be an lvalue. */ 269 270 static tree 271 maybe_fold_reference (tree expr, bool is_lhs) 272 { 273 tree result; 274 275 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR 276 || TREE_CODE (expr) == REALPART_EXPR 277 || TREE_CODE (expr) == IMAGPART_EXPR) 278 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0))) 279 return fold_unary_loc (EXPR_LOCATION (expr), 280 TREE_CODE (expr), 281 TREE_TYPE (expr), 282 TREE_OPERAND (expr, 0)); 283 else if (TREE_CODE (expr) == BIT_FIELD_REF 284 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0))) 285 return fold_ternary_loc (EXPR_LOCATION (expr), 286 TREE_CODE (expr), 287 TREE_TYPE (expr), 288 TREE_OPERAND (expr, 0), 289 TREE_OPERAND (expr, 1), 290 TREE_OPERAND (expr, 2)); 291 292 if (!is_lhs 293 && (result = fold_const_aggregate_ref (expr)) 294 && is_gimple_min_invariant (result)) 295 return result; 296 297 return NULL_TREE; 298 } 299 300 301 /* Attempt to fold an assignment statement pointed-to by SI. Returns a 302 replacement rhs for the statement or NULL_TREE if no simplification 303 could be made. It is assumed that the operands have been previously 304 folded. */ 305 306 static tree 307 fold_gimple_assign (gimple_stmt_iterator *si) 308 { 309 gimple *stmt = gsi_stmt (*si); 310 enum tree_code subcode = gimple_assign_rhs_code (stmt); 311 location_t loc = gimple_location (stmt); 312 313 tree result = NULL_TREE; 314 315 switch (get_gimple_rhs_class (subcode)) 316 { 317 case GIMPLE_SINGLE_RHS: 318 { 319 tree rhs = gimple_assign_rhs1 (stmt); 320 321 if (TREE_CLOBBER_P (rhs)) 322 return NULL_TREE; 323 324 if (REFERENCE_CLASS_P (rhs)) 325 return maybe_fold_reference (rhs, false); 326 327 else if (TREE_CODE (rhs) == OBJ_TYPE_REF) 328 { 329 tree val = OBJ_TYPE_REF_EXPR (rhs); 330 if (is_gimple_min_invariant (val)) 331 return val; 332 else if (flag_devirtualize && virtual_method_call_p (rhs)) 333 { 334 bool final; 335 vec <cgraph_node *>targets 336 = possible_polymorphic_call_targets (rhs, stmt, &final); 337 if (final && targets.length () <= 1 && dbg_cnt (devirt)) 338 { 339 if (dump_enabled_p ()) 340 { 341 location_t loc = gimple_location_safe (stmt); 342 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc, 343 "resolving virtual function address " 344 "reference to function %s\n", 345 targets.length () == 1 346 ? targets[0]->name () 347 : "NULL"); 348 } 349 if (targets.length () == 1) 350 { 351 val = fold_convert (TREE_TYPE (val), 352 build_fold_addr_expr_loc 353 (loc, targets[0]->decl)); 354 STRIP_USELESS_TYPE_CONVERSION (val); 355 } 356 else 357 /* We can not use __builtin_unreachable here because it 358 can not have address taken. */ 359 val = build_int_cst (TREE_TYPE (val), 0); 360 return val; 361 } 362 } 363 } 364 365 else if (TREE_CODE (rhs) == ADDR_EXPR) 366 { 367 tree ref = TREE_OPERAND (rhs, 0); 368 tree tem = maybe_fold_reference (ref, true); 369 if (tem 370 && TREE_CODE (tem) == MEM_REF 371 && integer_zerop (TREE_OPERAND (tem, 1))) 372 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0)); 373 else if (tem) 374 result = fold_convert (TREE_TYPE (rhs), 375 build_fold_addr_expr_loc (loc, tem)); 376 else if (TREE_CODE (ref) == MEM_REF 377 && integer_zerop (TREE_OPERAND (ref, 1))) 378 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0)); 379 380 if (result) 381 { 382 /* Strip away useless type conversions. Both the 383 NON_LVALUE_EXPR that may have been added by fold, and 384 "useless" type conversions that might now be apparent 385 due to propagation. */ 386 STRIP_USELESS_TYPE_CONVERSION (result); 387 388 if (result != rhs && valid_gimple_rhs_p (result)) 389 return result; 390 } 391 } 392 393 else if (TREE_CODE (rhs) == CONSTRUCTOR 394 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE) 395 { 396 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */ 397 unsigned i; 398 tree val; 399 400 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val) 401 if (! CONSTANT_CLASS_P (val)) 402 return NULL_TREE; 403 404 return build_vector_from_ctor (TREE_TYPE (rhs), 405 CONSTRUCTOR_ELTS (rhs)); 406 } 407 408 else if (DECL_P (rhs)) 409 return get_symbol_constant_value (rhs); 410 } 411 break; 412 413 case GIMPLE_UNARY_RHS: 414 break; 415 416 case GIMPLE_BINARY_RHS: 417 break; 418 419 case GIMPLE_TERNARY_RHS: 420 result = fold_ternary_loc (loc, subcode, 421 TREE_TYPE (gimple_assign_lhs (stmt)), 422 gimple_assign_rhs1 (stmt), 423 gimple_assign_rhs2 (stmt), 424 gimple_assign_rhs3 (stmt)); 425 426 if (result) 427 { 428 STRIP_USELESS_TYPE_CONVERSION (result); 429 if (valid_gimple_rhs_p (result)) 430 return result; 431 } 432 break; 433 434 case GIMPLE_INVALID_RHS: 435 gcc_unreachable (); 436 } 437 438 return NULL_TREE; 439 } 440 441 442 /* Replace a statement at *SI_P with a sequence of statements in STMTS, 443 adjusting the replacement stmts location and virtual operands. 444 If the statement has a lhs the last stmt in the sequence is expected 445 to assign to that lhs. */ 446 447 static void 448 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts) 449 { 450 gimple *stmt = gsi_stmt (*si_p); 451 452 if (gimple_has_location (stmt)) 453 annotate_all_with_location (stmts, gimple_location (stmt)); 454 455 /* First iterate over the replacement statements backward, assigning 456 virtual operands to their defining statements. */ 457 gimple *laststore = NULL; 458 for (gimple_stmt_iterator i = gsi_last (stmts); 459 !gsi_end_p (i); gsi_prev (&i)) 460 { 461 gimple *new_stmt = gsi_stmt (i); 462 if ((gimple_assign_single_p (new_stmt) 463 && !is_gimple_reg (gimple_assign_lhs (new_stmt))) 464 || (is_gimple_call (new_stmt) 465 && (gimple_call_flags (new_stmt) 466 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0)) 467 { 468 tree vdef; 469 if (!laststore) 470 vdef = gimple_vdef (stmt); 471 else 472 vdef = make_ssa_name (gimple_vop (cfun), new_stmt); 473 gimple_set_vdef (new_stmt, vdef); 474 if (vdef && TREE_CODE (vdef) == SSA_NAME) 475 SSA_NAME_DEF_STMT (vdef) = new_stmt; 476 laststore = new_stmt; 477 } 478 } 479 480 /* Second iterate over the statements forward, assigning virtual 481 operands to their uses. */ 482 tree reaching_vuse = gimple_vuse (stmt); 483 for (gimple_stmt_iterator i = gsi_start (stmts); 484 !gsi_end_p (i); gsi_next (&i)) 485 { 486 gimple *new_stmt = gsi_stmt (i); 487 /* If the new statement possibly has a VUSE, update it with exact SSA 488 name we know will reach this one. */ 489 if (gimple_has_mem_ops (new_stmt)) 490 gimple_set_vuse (new_stmt, reaching_vuse); 491 gimple_set_modified (new_stmt, true); 492 if (gimple_vdef (new_stmt)) 493 reaching_vuse = gimple_vdef (new_stmt); 494 } 495 496 /* If the new sequence does not do a store release the virtual 497 definition of the original statement. */ 498 if (reaching_vuse 499 && reaching_vuse == gimple_vuse (stmt)) 500 { 501 tree vdef = gimple_vdef (stmt); 502 if (vdef 503 && TREE_CODE (vdef) == SSA_NAME) 504 { 505 unlink_stmt_vdef (stmt); 506 release_ssa_name (vdef); 507 } 508 } 509 510 /* Finally replace the original statement with the sequence. */ 511 gsi_replace_with_seq (si_p, stmts, false); 512 } 513 514 /* Convert EXPR into a GIMPLE value suitable for substitution on the 515 RHS of an assignment. Insert the necessary statements before 516 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL 517 is replaced. If the call is expected to produces a result, then it 518 is replaced by an assignment of the new RHS to the result variable. 519 If the result is to be ignored, then the call is replaced by a 520 GIMPLE_NOP. A proper VDEF chain is retained by making the first 521 VUSE and the last VDEF of the whole sequence be the same as the replaced 522 statement and using new SSA names for stores in between. */ 523 524 void 525 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr) 526 { 527 tree lhs; 528 gimple *stmt, *new_stmt; 529 gimple_stmt_iterator i; 530 gimple_seq stmts = NULL; 531 532 stmt = gsi_stmt (*si_p); 533 534 gcc_assert (is_gimple_call (stmt)); 535 536 push_gimplify_context (gimple_in_ssa_p (cfun)); 537 538 lhs = gimple_call_lhs (stmt); 539 if (lhs == NULL_TREE) 540 { 541 gimplify_and_add (expr, &stmts); 542 /* We can end up with folding a memcpy of an empty class assignment 543 which gets optimized away by C++ gimplification. */ 544 if (gimple_seq_empty_p (stmts)) 545 { 546 pop_gimplify_context (NULL); 547 if (gimple_in_ssa_p (cfun)) 548 { 549 unlink_stmt_vdef (stmt); 550 release_defs (stmt); 551 } 552 gsi_replace (si_p, gimple_build_nop (), false); 553 return; 554 } 555 } 556 else 557 { 558 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE); 559 new_stmt = gimple_build_assign (lhs, tmp); 560 i = gsi_last (stmts); 561 gsi_insert_after_without_update (&i, new_stmt, 562 GSI_CONTINUE_LINKING); 563 } 564 565 pop_gimplify_context (NULL); 566 567 gsi_replace_with_seq_vops (si_p, stmts); 568 } 569 570 571 /* Replace the call at *GSI with the gimple value VAL. */ 572 573 static void 574 replace_call_with_value (gimple_stmt_iterator *gsi, tree val) 575 { 576 gimple *stmt = gsi_stmt (*gsi); 577 tree lhs = gimple_call_lhs (stmt); 578 gimple *repl; 579 if (lhs) 580 { 581 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val))) 582 val = fold_convert (TREE_TYPE (lhs), val); 583 repl = gimple_build_assign (lhs, val); 584 } 585 else 586 repl = gimple_build_nop (); 587 tree vdef = gimple_vdef (stmt); 588 if (vdef && TREE_CODE (vdef) == SSA_NAME) 589 { 590 unlink_stmt_vdef (stmt); 591 release_ssa_name (vdef); 592 } 593 gsi_replace (gsi, repl, false); 594 } 595 596 /* Replace the call at *GSI with the new call REPL and fold that 597 again. */ 598 599 static void 600 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl) 601 { 602 gimple *stmt = gsi_stmt (*gsi); 603 gimple_call_set_lhs (repl, gimple_call_lhs (stmt)); 604 gimple_set_location (repl, gimple_location (stmt)); 605 if (gimple_vdef (stmt) 606 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) 607 { 608 gimple_set_vdef (repl, gimple_vdef (stmt)); 609 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl; 610 } 611 if (gimple_vuse (stmt)) 612 gimple_set_vuse (repl, gimple_vuse (stmt)); 613 gsi_replace (gsi, repl, false); 614 fold_stmt (gsi); 615 } 616 617 /* Return true if VAR is a VAR_DECL or a component thereof. */ 618 619 static bool 620 var_decl_component_p (tree var) 621 { 622 tree inner = var; 623 while (handled_component_p (inner)) 624 inner = TREE_OPERAND (inner, 0); 625 return SSA_VAR_P (inner); 626 } 627 628 /* Fold function call to builtin mem{{,p}cpy,move}. Return 629 false if no simplification can be made. 630 If ENDP is 0, return DEST (like memcpy). 631 If ENDP is 1, return DEST+LEN (like mempcpy). 632 If ENDP is 2, return DEST+LEN-1 (like stpcpy). 633 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap 634 (memmove). */ 635 636 static bool 637 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi, 638 tree dest, tree src, int endp) 639 { 640 gimple *stmt = gsi_stmt (*gsi); 641 tree lhs = gimple_call_lhs (stmt); 642 tree len = gimple_call_arg (stmt, 2); 643 tree destvar, srcvar; 644 location_t loc = gimple_location (stmt); 645 646 /* If the LEN parameter is zero, return DEST. */ 647 if (integer_zerop (len)) 648 { 649 gimple *repl; 650 if (gimple_call_lhs (stmt)) 651 repl = gimple_build_assign (gimple_call_lhs (stmt), dest); 652 else 653 repl = gimple_build_nop (); 654 tree vdef = gimple_vdef (stmt); 655 if (vdef && TREE_CODE (vdef) == SSA_NAME) 656 { 657 unlink_stmt_vdef (stmt); 658 release_ssa_name (vdef); 659 } 660 gsi_replace (gsi, repl, false); 661 return true; 662 } 663 664 /* If SRC and DEST are the same (and not volatile), return 665 DEST{,+LEN,+LEN-1}. */ 666 if (operand_equal_p (src, dest, 0)) 667 { 668 unlink_stmt_vdef (stmt); 669 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) 670 release_ssa_name (gimple_vdef (stmt)); 671 if (!lhs) 672 { 673 gsi_replace (gsi, gimple_build_nop (), false); 674 return true; 675 } 676 goto done; 677 } 678 else 679 { 680 tree srctype, desttype; 681 unsigned int src_align, dest_align; 682 tree off0; 683 684 /* Inlining of memcpy/memmove may cause bounds lost (if we copy 685 pointers as wide integer) and also may result in huge function 686 size because of inlined bounds copy. Thus don't inline for 687 functions we want to instrument. */ 688 if (flag_check_pointer_bounds 689 && chkp_instrumentable_p (cfun->decl) 690 /* Even if data may contain pointers we can inline if copy 691 less than a pointer size. */ 692 && (!tree_fits_uhwi_p (len) 693 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0)) 694 return false; 695 696 /* Build accesses at offset zero with a ref-all character type. */ 697 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node, 698 ptr_mode, true), 0); 699 700 /* If we can perform the copy efficiently with first doing all loads 701 and then all stores inline it that way. Currently efficiently 702 means that we can load all the memory into a single integer 703 register which is what MOVE_MAX gives us. */ 704 src_align = get_pointer_alignment (src); 705 dest_align = get_pointer_alignment (dest); 706 if (tree_fits_uhwi_p (len) 707 && compare_tree_int (len, MOVE_MAX) <= 0 708 /* ??? Don't transform copies from strings with known length this 709 confuses the tree-ssa-strlen.c. This doesn't handle 710 the case in gcc.dg/strlenopt-8.c which is XFAILed for that 711 reason. */ 712 && !c_strlen (src, 2)) 713 { 714 unsigned ilen = tree_to_uhwi (len); 715 if (pow2p_hwi (ilen)) 716 { 717 tree type = lang_hooks.types.type_for_size (ilen * 8, 1); 718 if (type 719 && TYPE_MODE (type) != BLKmode 720 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT 721 == ilen * 8) 722 /* If the destination pointer is not aligned we must be able 723 to emit an unaligned store. */ 724 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type)) 725 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align) 726 || (optab_handler (movmisalign_optab, TYPE_MODE (type)) 727 != CODE_FOR_nothing))) 728 { 729 tree srctype = type; 730 tree desttype = type; 731 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))) 732 srctype = build_aligned_type (type, src_align); 733 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0); 734 tree tem = fold_const_aggregate_ref (srcmem); 735 if (tem) 736 srcmem = tem; 737 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)) 738 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), 739 src_align) 740 && (optab_handler (movmisalign_optab, 741 TYPE_MODE (type)) 742 == CODE_FOR_nothing)) 743 srcmem = NULL_TREE; 744 if (srcmem) 745 { 746 gimple *new_stmt; 747 if (is_gimple_reg_type (TREE_TYPE (srcmem))) 748 { 749 new_stmt = gimple_build_assign (NULL_TREE, srcmem); 750 srcmem 751 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem), 752 new_stmt); 753 gimple_assign_set_lhs (new_stmt, srcmem); 754 gimple_set_vuse (new_stmt, gimple_vuse (stmt)); 755 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); 756 } 757 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))) 758 desttype = build_aligned_type (type, dest_align); 759 new_stmt 760 = gimple_build_assign (fold_build2 (MEM_REF, desttype, 761 dest, off0), 762 srcmem); 763 gimple_set_vuse (new_stmt, gimple_vuse (stmt)); 764 gimple_set_vdef (new_stmt, gimple_vdef (stmt)); 765 if (gimple_vdef (new_stmt) 766 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME) 767 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt; 768 if (!lhs) 769 { 770 gsi_replace (gsi, new_stmt, false); 771 return true; 772 } 773 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); 774 goto done; 775 } 776 } 777 } 778 } 779 780 if (endp == 3) 781 { 782 /* Both DEST and SRC must be pointer types. 783 ??? This is what old code did. Is the testing for pointer types 784 really mandatory? 785 786 If either SRC is readonly or length is 1, we can use memcpy. */ 787 if (!dest_align || !src_align) 788 return false; 789 if (readonly_data_expr (src) 790 || (tree_fits_uhwi_p (len) 791 && (MIN (src_align, dest_align) / BITS_PER_UNIT 792 >= tree_to_uhwi (len)))) 793 { 794 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY); 795 if (!fn) 796 return false; 797 gimple_call_set_fndecl (stmt, fn); 798 gimple_call_set_arg (stmt, 0, dest); 799 gimple_call_set_arg (stmt, 1, src); 800 fold_stmt (gsi); 801 return true; 802 } 803 804 /* If *src and *dest can't overlap, optimize into memcpy as well. */ 805 if (TREE_CODE (src) == ADDR_EXPR 806 && TREE_CODE (dest) == ADDR_EXPR) 807 { 808 tree src_base, dest_base, fn; 809 HOST_WIDE_INT src_offset = 0, dest_offset = 0; 810 HOST_WIDE_INT maxsize; 811 812 srcvar = TREE_OPERAND (src, 0); 813 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset); 814 if (src_base == NULL) 815 src_base = srcvar; 816 destvar = TREE_OPERAND (dest, 0); 817 dest_base = get_addr_base_and_unit_offset (destvar, 818 &dest_offset); 819 if (dest_base == NULL) 820 dest_base = destvar; 821 if (tree_fits_uhwi_p (len)) 822 maxsize = tree_to_uhwi (len); 823 else 824 maxsize = -1; 825 if (SSA_VAR_P (src_base) 826 && SSA_VAR_P (dest_base)) 827 { 828 if (operand_equal_p (src_base, dest_base, 0) 829 && ranges_overlap_p (src_offset, maxsize, 830 dest_offset, maxsize)) 831 return false; 832 } 833 else if (TREE_CODE (src_base) == MEM_REF 834 && TREE_CODE (dest_base) == MEM_REF) 835 { 836 if (! operand_equal_p (TREE_OPERAND (src_base, 0), 837 TREE_OPERAND (dest_base, 0), 0)) 838 return false; 839 offset_int off = mem_ref_offset (src_base) + src_offset; 840 if (!wi::fits_shwi_p (off)) 841 return false; 842 src_offset = off.to_shwi (); 843 844 off = mem_ref_offset (dest_base) + dest_offset; 845 if (!wi::fits_shwi_p (off)) 846 return false; 847 dest_offset = off.to_shwi (); 848 if (ranges_overlap_p (src_offset, maxsize, 849 dest_offset, maxsize)) 850 return false; 851 } 852 else 853 return false; 854 855 fn = builtin_decl_implicit (BUILT_IN_MEMCPY); 856 if (!fn) 857 return false; 858 gimple_call_set_fndecl (stmt, fn); 859 gimple_call_set_arg (stmt, 0, dest); 860 gimple_call_set_arg (stmt, 1, src); 861 fold_stmt (gsi); 862 return true; 863 } 864 865 /* If the destination and source do not alias optimize into 866 memcpy as well. */ 867 if ((is_gimple_min_invariant (dest) 868 || TREE_CODE (dest) == SSA_NAME) 869 && (is_gimple_min_invariant (src) 870 || TREE_CODE (src) == SSA_NAME)) 871 { 872 ao_ref destr, srcr; 873 ao_ref_init_from_ptr_and_size (&destr, dest, len); 874 ao_ref_init_from_ptr_and_size (&srcr, src, len); 875 if (!refs_may_alias_p_1 (&destr, &srcr, false)) 876 { 877 tree fn; 878 fn = builtin_decl_implicit (BUILT_IN_MEMCPY); 879 if (!fn) 880 return false; 881 gimple_call_set_fndecl (stmt, fn); 882 gimple_call_set_arg (stmt, 0, dest); 883 gimple_call_set_arg (stmt, 1, src); 884 fold_stmt (gsi); 885 return true; 886 } 887 } 888 889 return false; 890 } 891 892 if (!tree_fits_shwi_p (len)) 893 return false; 894 /* FIXME: 895 This logic lose for arguments like (type *)malloc (sizeof (type)), 896 since we strip the casts of up to VOID return value from malloc. 897 Perhaps we ought to inherit type from non-VOID argument here? */ 898 STRIP_NOPS (src); 899 STRIP_NOPS (dest); 900 if (!POINTER_TYPE_P (TREE_TYPE (src)) 901 || !POINTER_TYPE_P (TREE_TYPE (dest))) 902 return false; 903 /* In the following try to find a type that is most natural to be 904 used for the memcpy source and destination and that allows 905 the most optimization when memcpy is turned into a plain assignment 906 using that type. In theory we could always use a char[len] type 907 but that only gains us that the destination and source possibly 908 no longer will have their address taken. */ 909 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */ 910 if (TREE_CODE (src) == POINTER_PLUS_EXPR) 911 { 912 tree tem = TREE_OPERAND (src, 0); 913 STRIP_NOPS (tem); 914 if (tem != TREE_OPERAND (src, 0)) 915 src = build1 (NOP_EXPR, TREE_TYPE (tem), src); 916 } 917 if (TREE_CODE (dest) == POINTER_PLUS_EXPR) 918 { 919 tree tem = TREE_OPERAND (dest, 0); 920 STRIP_NOPS (tem); 921 if (tem != TREE_OPERAND (dest, 0)) 922 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest); 923 } 924 srctype = TREE_TYPE (TREE_TYPE (src)); 925 if (TREE_CODE (srctype) == ARRAY_TYPE 926 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)) 927 { 928 srctype = TREE_TYPE (srctype); 929 STRIP_NOPS (src); 930 src = build1 (NOP_EXPR, build_pointer_type (srctype), src); 931 } 932 desttype = TREE_TYPE (TREE_TYPE (dest)); 933 if (TREE_CODE (desttype) == ARRAY_TYPE 934 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)) 935 { 936 desttype = TREE_TYPE (desttype); 937 STRIP_NOPS (dest); 938 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest); 939 } 940 if (TREE_ADDRESSABLE (srctype) 941 || TREE_ADDRESSABLE (desttype)) 942 return false; 943 944 /* Make sure we are not copying using a floating-point mode or 945 a type whose size possibly does not match its precision. */ 946 if (FLOAT_MODE_P (TYPE_MODE (desttype)) 947 || TREE_CODE (desttype) == BOOLEAN_TYPE 948 || TREE_CODE (desttype) == ENUMERAL_TYPE) 949 desttype = bitwise_type_for_mode (TYPE_MODE (desttype)); 950 if (FLOAT_MODE_P (TYPE_MODE (srctype)) 951 || TREE_CODE (srctype) == BOOLEAN_TYPE 952 || TREE_CODE (srctype) == ENUMERAL_TYPE) 953 srctype = bitwise_type_for_mode (TYPE_MODE (srctype)); 954 if (!srctype) 955 srctype = desttype; 956 if (!desttype) 957 desttype = srctype; 958 if (!srctype) 959 return false; 960 961 src_align = get_pointer_alignment (src); 962 dest_align = get_pointer_alignment (dest); 963 if (dest_align < TYPE_ALIGN (desttype) 964 || src_align < TYPE_ALIGN (srctype)) 965 return false; 966 967 destvar = dest; 968 STRIP_NOPS (destvar); 969 if (TREE_CODE (destvar) == ADDR_EXPR 970 && var_decl_component_p (TREE_OPERAND (destvar, 0)) 971 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)) 972 destvar = fold_build2 (MEM_REF, desttype, destvar, off0); 973 else 974 destvar = NULL_TREE; 975 976 srcvar = src; 977 STRIP_NOPS (srcvar); 978 if (TREE_CODE (srcvar) == ADDR_EXPR 979 && var_decl_component_p (TREE_OPERAND (srcvar, 0)) 980 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)) 981 { 982 if (!destvar 983 || src_align >= TYPE_ALIGN (desttype)) 984 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype, 985 srcvar, off0); 986 else if (!STRICT_ALIGNMENT) 987 { 988 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype), 989 src_align); 990 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0); 991 } 992 else 993 srcvar = NULL_TREE; 994 } 995 else 996 srcvar = NULL_TREE; 997 998 if (srcvar == NULL_TREE && destvar == NULL_TREE) 999 return false; 1000 1001 if (srcvar == NULL_TREE) 1002 { 1003 STRIP_NOPS (src); 1004 if (src_align >= TYPE_ALIGN (desttype)) 1005 srcvar = fold_build2 (MEM_REF, desttype, src, off0); 1006 else 1007 { 1008 if (STRICT_ALIGNMENT) 1009 return false; 1010 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype), 1011 src_align); 1012 srcvar = fold_build2 (MEM_REF, srctype, src, off0); 1013 } 1014 } 1015 else if (destvar == NULL_TREE) 1016 { 1017 STRIP_NOPS (dest); 1018 if (dest_align >= TYPE_ALIGN (srctype)) 1019 destvar = fold_build2 (MEM_REF, srctype, dest, off0); 1020 else 1021 { 1022 if (STRICT_ALIGNMENT) 1023 return false; 1024 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype), 1025 dest_align); 1026 destvar = fold_build2 (MEM_REF, desttype, dest, off0); 1027 } 1028 } 1029 1030 gimple *new_stmt; 1031 if (is_gimple_reg_type (TREE_TYPE (srcvar))) 1032 { 1033 tree tem = fold_const_aggregate_ref (srcvar); 1034 if (tem) 1035 srcvar = tem; 1036 if (! is_gimple_min_invariant (srcvar)) 1037 { 1038 new_stmt = gimple_build_assign (NULL_TREE, srcvar); 1039 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar), 1040 new_stmt); 1041 gimple_assign_set_lhs (new_stmt, srcvar); 1042 gimple_set_vuse (new_stmt, gimple_vuse (stmt)); 1043 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); 1044 } 1045 } 1046 new_stmt = gimple_build_assign (destvar, srcvar); 1047 gimple_set_vuse (new_stmt, gimple_vuse (stmt)); 1048 gimple_set_vdef (new_stmt, gimple_vdef (stmt)); 1049 if (gimple_vdef (new_stmt) 1050 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME) 1051 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt; 1052 if (!lhs) 1053 { 1054 gsi_replace (gsi, new_stmt, false); 1055 return true; 1056 } 1057 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); 1058 } 1059 1060 done: 1061 gimple_seq stmts = NULL; 1062 if (endp == 0 || endp == 3) 1063 len = NULL_TREE; 1064 else if (endp == 2) 1065 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len, 1066 ssize_int (1)); 1067 if (endp == 2 || endp == 1) 1068 { 1069 len = gimple_convert_to_ptrofftype (&stmts, loc, len); 1070 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR, 1071 TREE_TYPE (dest), dest, len); 1072 } 1073 1074 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); 1075 gimple *repl = gimple_build_assign (lhs, dest); 1076 gsi_replace (gsi, repl, false); 1077 return true; 1078 } 1079 1080 /* Fold function call to builtin memset or bzero at *GSI setting the 1081 memory of size LEN to VAL. Return whether a simplification was made. */ 1082 1083 static bool 1084 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len) 1085 { 1086 gimple *stmt = gsi_stmt (*gsi); 1087 tree etype; 1088 unsigned HOST_WIDE_INT length, cval; 1089 1090 /* If the LEN parameter is zero, return DEST. */ 1091 if (integer_zerop (len)) 1092 { 1093 replace_call_with_value (gsi, gimple_call_arg (stmt, 0)); 1094 return true; 1095 } 1096 1097 if (! tree_fits_uhwi_p (len)) 1098 return false; 1099 1100 if (TREE_CODE (c) != INTEGER_CST) 1101 return false; 1102 1103 tree dest = gimple_call_arg (stmt, 0); 1104 tree var = dest; 1105 if (TREE_CODE (var) != ADDR_EXPR) 1106 return false; 1107 1108 var = TREE_OPERAND (var, 0); 1109 if (TREE_THIS_VOLATILE (var)) 1110 return false; 1111 1112 etype = TREE_TYPE (var); 1113 if (TREE_CODE (etype) == ARRAY_TYPE) 1114 etype = TREE_TYPE (etype); 1115 1116 if (!INTEGRAL_TYPE_P (etype) 1117 && !POINTER_TYPE_P (etype)) 1118 return NULL_TREE; 1119 1120 if (! var_decl_component_p (var)) 1121 return NULL_TREE; 1122 1123 length = tree_to_uhwi (len); 1124 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length 1125 || get_pointer_alignment (dest) / BITS_PER_UNIT < length) 1126 return NULL_TREE; 1127 1128 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) 1129 return NULL_TREE; 1130 1131 if (integer_zerop (c)) 1132 cval = 0; 1133 else 1134 { 1135 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64) 1136 return NULL_TREE; 1137 1138 cval = TREE_INT_CST_LOW (c); 1139 cval &= 0xff; 1140 cval |= cval << 8; 1141 cval |= cval << 16; 1142 cval |= (cval << 31) << 1; 1143 } 1144 1145 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0)); 1146 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval)); 1147 gimple_set_vuse (store, gimple_vuse (stmt)); 1148 tree vdef = gimple_vdef (stmt); 1149 if (vdef && TREE_CODE (vdef) == SSA_NAME) 1150 { 1151 gimple_set_vdef (store, gimple_vdef (stmt)); 1152 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store; 1153 } 1154 gsi_insert_before (gsi, store, GSI_SAME_STMT); 1155 if (gimple_call_lhs (stmt)) 1156 { 1157 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest); 1158 gsi_replace (gsi, asgn, false); 1159 } 1160 else 1161 { 1162 gimple_stmt_iterator gsi2 = *gsi; 1163 gsi_prev (gsi); 1164 gsi_remove (&gsi2, true); 1165 } 1166 1167 return true; 1168 } 1169 1170 1171 /* Obtain the minimum and maximum string length or minimum and maximum 1172 value of ARG in LENGTH[0] and LENGTH[1], respectively. 1173 If ARG is an SSA name variable, follow its use-def chains. When 1174 TYPE == 0, if LENGTH[1] is not equal to the length we determine or 1175 if we are unable to determine the length or value, return False. 1176 VISITED is a bitmap of visited variables. 1177 TYPE is 0 if string length should be obtained, 1 for maximum string 1178 length and 2 for maximum value ARG can have. 1179 When FUZZY is set and the length of a string cannot be determined, 1180 the function instead considers as the maximum possible length the 1181 size of a character array it may refer to. 1182 Set *FLEXP to true if the range of the string lengths has been 1183 obtained from the upper bound of an array at the end of a struct. 1184 Such an array may hold a string that's longer than its upper bound 1185 due to it being used as a poor-man's flexible array member. */ 1186 1187 static bool 1188 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type, 1189 bool fuzzy, bool *flexp) 1190 { 1191 tree var, val; 1192 gimple *def_stmt; 1193 1194 /* The minimum and maximum length. The MAXLEN pointer stays unchanged 1195 but MINLEN may be cleared during the execution of the function. */ 1196 tree *minlen = length; 1197 tree *const maxlen = length + 1; 1198 1199 if (TREE_CODE (arg) != SSA_NAME) 1200 { 1201 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */ 1202 if (TREE_CODE (arg) == ADDR_EXPR 1203 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF 1204 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1))) 1205 { 1206 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0); 1207 if (TREE_CODE (aop0) == INDIRECT_REF 1208 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME) 1209 return get_range_strlen (TREE_OPERAND (aop0, 0), 1210 length, visited, type, fuzzy, flexp); 1211 } 1212 1213 if (type == 2) 1214 { 1215 val = arg; 1216 if (TREE_CODE (val) != INTEGER_CST 1217 || tree_int_cst_sgn (val) < 0) 1218 return false; 1219 } 1220 else 1221 val = c_strlen (arg, 1); 1222 1223 if (!val && fuzzy) 1224 { 1225 if (TREE_CODE (arg) == ADDR_EXPR) 1226 return get_range_strlen (TREE_OPERAND (arg, 0), length, 1227 visited, type, fuzzy, flexp); 1228 1229 if (TREE_CODE (arg) == COMPONENT_REF 1230 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE) 1231 { 1232 /* Use the type of the member array to determine the upper 1233 bound on the length of the array. This may be overly 1234 optimistic if the array itself isn't NUL-terminated and 1235 the caller relies on the subsequent member to contain 1236 the NUL. 1237 Set *FLEXP to true if the array whose bound is being 1238 used is at the end of a struct. */ 1239 if (array_at_struct_end_p (arg)) 1240 *flexp = true; 1241 1242 arg = TREE_OPERAND (arg, 1); 1243 val = TYPE_SIZE_UNIT (TREE_TYPE (arg)); 1244 if (!val || integer_zerop (val)) 1245 return false; 1246 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val, 1247 integer_one_node); 1248 /* Set the minimum size to zero since the string in 1249 the array could have zero length. */ 1250 *minlen = ssize_int (0); 1251 } 1252 } 1253 1254 if (!val) 1255 return false; 1256 1257 if (minlen 1258 && (!*minlen 1259 || (type > 0 1260 && TREE_CODE (*minlen) == INTEGER_CST 1261 && TREE_CODE (val) == INTEGER_CST 1262 && tree_int_cst_lt (val, *minlen)))) 1263 *minlen = val; 1264 1265 if (*maxlen) 1266 { 1267 if (type > 0) 1268 { 1269 if (TREE_CODE (*maxlen) != INTEGER_CST 1270 || TREE_CODE (val) != INTEGER_CST) 1271 return false; 1272 1273 if (tree_int_cst_lt (*maxlen, val)) 1274 *maxlen = val; 1275 return true; 1276 } 1277 else if (simple_cst_equal (val, *maxlen) != 1) 1278 return false; 1279 } 1280 1281 *maxlen = val; 1282 return true; 1283 } 1284 1285 /* If ARG is registered for SSA update we cannot look at its defining 1286 statement. */ 1287 if (name_registered_for_update_p (arg)) 1288 return false; 1289 1290 /* If we were already here, break the infinite cycle. */ 1291 if (!*visited) 1292 *visited = BITMAP_ALLOC (NULL); 1293 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg))) 1294 return true; 1295 1296 var = arg; 1297 def_stmt = SSA_NAME_DEF_STMT (var); 1298 1299 switch (gimple_code (def_stmt)) 1300 { 1301 case GIMPLE_ASSIGN: 1302 /* The RHS of the statement defining VAR must either have a 1303 constant length or come from another SSA_NAME with a constant 1304 length. */ 1305 if (gimple_assign_single_p (def_stmt) 1306 || gimple_assign_unary_nop_p (def_stmt)) 1307 { 1308 tree rhs = gimple_assign_rhs1 (def_stmt); 1309 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp); 1310 } 1311 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR) 1312 { 1313 tree op2 = gimple_assign_rhs2 (def_stmt); 1314 tree op3 = gimple_assign_rhs3 (def_stmt); 1315 return get_range_strlen (op2, length, visited, type, fuzzy, flexp) 1316 && get_range_strlen (op3, length, visited, type, fuzzy, flexp); 1317 } 1318 return false; 1319 1320 case GIMPLE_PHI: 1321 { 1322 /* All the arguments of the PHI node must have the same constant 1323 length. */ 1324 unsigned i; 1325 1326 for (i = 0; i < gimple_phi_num_args (def_stmt); i++) 1327 { 1328 tree arg = gimple_phi_arg (def_stmt, i)->def; 1329 1330 /* If this PHI has itself as an argument, we cannot 1331 determine the string length of this argument. However, 1332 if we can find a constant string length for the other 1333 PHI args then we can still be sure that this is a 1334 constant string length. So be optimistic and just 1335 continue with the next argument. */ 1336 if (arg == gimple_phi_result (def_stmt)) 1337 continue; 1338 1339 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp)) 1340 { 1341 if (fuzzy) 1342 *maxlen = build_all_ones_cst (size_type_node); 1343 else 1344 return false; 1345 } 1346 } 1347 } 1348 return true; 1349 1350 default: 1351 return false; 1352 } 1353 } 1354 1355 /* Determine the minimum and maximum value or string length that ARG 1356 refers to and store each in the first two elements of MINMAXLEN. 1357 For expressions that point to strings of unknown lengths that are 1358 character arrays, use the upper bound of the array as the maximum 1359 length. For example, given an expression like 'x ? array : "xyz"' 1360 and array declared as 'char array[8]', MINMAXLEN[0] will be set 1361 to 3 and MINMAXLEN[1] to 7, the longest string that could be 1362 stored in array. 1363 Return true if the range of the string lengths has been obtained 1364 from the upper bound of an array at the end of a struct. Such 1365 an array may hold a string that's longer than its upper bound 1366 due to it being used as a poor-man's flexible array member. */ 1367 1368 bool 1369 get_range_strlen (tree arg, tree minmaxlen[2]) 1370 { 1371 bitmap visited = NULL; 1372 1373 minmaxlen[0] = NULL_TREE; 1374 minmaxlen[1] = NULL_TREE; 1375 1376 bool flexarray = false; 1377 get_range_strlen (arg, minmaxlen, &visited, 1, true, &flexarray); 1378 1379 if (visited) 1380 BITMAP_FREE (visited); 1381 1382 return flexarray; 1383 } 1384 1385 tree 1386 get_maxval_strlen (tree arg, int type) 1387 { 1388 bitmap visited = NULL; 1389 tree len[2] = { NULL_TREE, NULL_TREE }; 1390 1391 bool dummy; 1392 if (!get_range_strlen (arg, len, &visited, type, false, &dummy)) 1393 len[1] = NULL_TREE; 1394 if (visited) 1395 BITMAP_FREE (visited); 1396 1397 return len[1]; 1398 } 1399 1400 1401 /* Fold function call to builtin strcpy with arguments DEST and SRC. 1402 If LEN is not NULL, it represents the length of the string to be 1403 copied. Return NULL_TREE if no simplification can be made. */ 1404 1405 static bool 1406 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi, 1407 tree dest, tree src) 1408 { 1409 location_t loc = gimple_location (gsi_stmt (*gsi)); 1410 tree fn; 1411 1412 /* If SRC and DEST are the same (and not volatile), return DEST. */ 1413 if (operand_equal_p (src, dest, 0)) 1414 { 1415 replace_call_with_value (gsi, dest); 1416 return true; 1417 } 1418 1419 if (optimize_function_for_size_p (cfun)) 1420 return false; 1421 1422 fn = builtin_decl_implicit (BUILT_IN_MEMCPY); 1423 if (!fn) 1424 return false; 1425 1426 tree len = get_maxval_strlen (src, 0); 1427 if (!len) 1428 return false; 1429 1430 len = fold_convert_loc (loc, size_type_node, len); 1431 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1)); 1432 len = force_gimple_operand_gsi (gsi, len, true, 1433 NULL_TREE, true, GSI_SAME_STMT); 1434 gimple *repl = gimple_build_call (fn, 3, dest, src, len); 1435 replace_call_with_call_and_fold (gsi, repl); 1436 return true; 1437 } 1438 1439 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN. 1440 If SLEN is not NULL, it represents the length of the source string. 1441 Return NULL_TREE if no simplification can be made. */ 1442 1443 static bool 1444 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi, 1445 tree dest, tree src, tree len) 1446 { 1447 location_t loc = gimple_location (gsi_stmt (*gsi)); 1448 tree fn; 1449 1450 /* If the LEN parameter is zero, return DEST. */ 1451 if (integer_zerop (len)) 1452 { 1453 replace_call_with_value (gsi, dest); 1454 return true; 1455 } 1456 1457 /* We can't compare slen with len as constants below if len is not a 1458 constant. */ 1459 if (TREE_CODE (len) != INTEGER_CST) 1460 return false; 1461 1462 /* Now, we must be passed a constant src ptr parameter. */ 1463 tree slen = get_maxval_strlen (src, 0); 1464 if (!slen || TREE_CODE (slen) != INTEGER_CST) 1465 return false; 1466 1467 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1)); 1468 1469 /* We do not support simplification of this case, though we do 1470 support it when expanding trees into RTL. */ 1471 /* FIXME: generate a call to __builtin_memset. */ 1472 if (tree_int_cst_lt (slen, len)) 1473 return false; 1474 1475 /* OK transform into builtin memcpy. */ 1476 fn = builtin_decl_implicit (BUILT_IN_MEMCPY); 1477 if (!fn) 1478 return false; 1479 1480 len = fold_convert_loc (loc, size_type_node, len); 1481 len = force_gimple_operand_gsi (gsi, len, true, 1482 NULL_TREE, true, GSI_SAME_STMT); 1483 gimple *repl = gimple_build_call (fn, 3, dest, src, len); 1484 replace_call_with_call_and_fold (gsi, repl); 1485 return true; 1486 } 1487 1488 /* Fold function call to builtin strchr or strrchr. 1489 If both arguments are constant, evaluate and fold the result, 1490 otherwise simplify str(r)chr (str, 0) into str + strlen (str). 1491 In general strlen is significantly faster than strchr 1492 due to being a simpler operation. */ 1493 static bool 1494 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr) 1495 { 1496 gimple *stmt = gsi_stmt (*gsi); 1497 tree str = gimple_call_arg (stmt, 0); 1498 tree c = gimple_call_arg (stmt, 1); 1499 location_t loc = gimple_location (stmt); 1500 const char *p; 1501 char ch; 1502 1503 if (!gimple_call_lhs (stmt)) 1504 return false; 1505 1506 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch)) 1507 { 1508 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch); 1509 1510 if (p1 == NULL) 1511 { 1512 replace_call_with_value (gsi, integer_zero_node); 1513 return true; 1514 } 1515 1516 tree len = build_int_cst (size_type_node, p1 - p); 1517 gimple_seq stmts = NULL; 1518 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt), 1519 POINTER_PLUS_EXPR, str, len); 1520 gimple_seq_add_stmt_without_update (&stmts, new_stmt); 1521 gsi_replace_with_seq_vops (gsi, stmts); 1522 return true; 1523 } 1524 1525 if (!integer_zerop (c)) 1526 return false; 1527 1528 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */ 1529 if (is_strrchr && optimize_function_for_size_p (cfun)) 1530 { 1531 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR); 1532 1533 if (strchr_fn) 1534 { 1535 gimple *repl = gimple_build_call (strchr_fn, 2, str, c); 1536 replace_call_with_call_and_fold (gsi, repl); 1537 return true; 1538 } 1539 1540 return false; 1541 } 1542 1543 tree len; 1544 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN); 1545 1546 if (!strlen_fn) 1547 return false; 1548 1549 /* Create newstr = strlen (str). */ 1550 gimple_seq stmts = NULL; 1551 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str); 1552 gimple_set_location (new_stmt, loc); 1553 len = create_tmp_reg_or_ssa_name (size_type_node); 1554 gimple_call_set_lhs (new_stmt, len); 1555 gimple_seq_add_stmt_without_update (&stmts, new_stmt); 1556 1557 /* Create (str p+ strlen (str)). */ 1558 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), 1559 POINTER_PLUS_EXPR, str, len); 1560 gimple_seq_add_stmt_without_update (&stmts, new_stmt); 1561 gsi_replace_with_seq_vops (gsi, stmts); 1562 /* gsi now points at the assignment to the lhs, get a 1563 stmt iterator to the strlen. 1564 ??? We can't use gsi_for_stmt as that doesn't work when the 1565 CFG isn't built yet. */ 1566 gimple_stmt_iterator gsi2 = *gsi; 1567 gsi_prev (&gsi2); 1568 fold_stmt (&gsi2); 1569 return true; 1570 } 1571 1572 /* Fold function call to builtin strstr. 1573 If both arguments are constant, evaluate and fold the result, 1574 additionally fold strstr (x, "") into x and strstr (x, "c") 1575 into strchr (x, 'c'). */ 1576 static bool 1577 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi) 1578 { 1579 gimple *stmt = gsi_stmt (*gsi); 1580 tree haystack = gimple_call_arg (stmt, 0); 1581 tree needle = gimple_call_arg (stmt, 1); 1582 const char *p, *q; 1583 1584 if (!gimple_call_lhs (stmt)) 1585 return false; 1586 1587 q = c_getstr (needle); 1588 if (q == NULL) 1589 return false; 1590 1591 if ((p = c_getstr (haystack))) 1592 { 1593 const char *r = strstr (p, q); 1594 1595 if (r == NULL) 1596 { 1597 replace_call_with_value (gsi, integer_zero_node); 1598 return true; 1599 } 1600 1601 tree len = build_int_cst (size_type_node, r - p); 1602 gimple_seq stmts = NULL; 1603 gimple *new_stmt 1604 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR, 1605 haystack, len); 1606 gimple_seq_add_stmt_without_update (&stmts, new_stmt); 1607 gsi_replace_with_seq_vops (gsi, stmts); 1608 return true; 1609 } 1610 1611 /* For strstr (x, "") return x. */ 1612 if (q[0] == '\0') 1613 { 1614 replace_call_with_value (gsi, haystack); 1615 return true; 1616 } 1617 1618 /* Transform strstr (x, "c") into strchr (x, 'c'). */ 1619 if (q[1] == '\0') 1620 { 1621 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR); 1622 if (strchr_fn) 1623 { 1624 tree c = build_int_cst (integer_type_node, q[0]); 1625 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c); 1626 replace_call_with_call_and_fold (gsi, repl); 1627 return true; 1628 } 1629 } 1630 1631 return false; 1632 } 1633 1634 /* Simplify a call to the strcat builtin. DST and SRC are the arguments 1635 to the call. 1636 1637 Return NULL_TREE if no simplification was possible, otherwise return the 1638 simplified form of the call as a tree. 1639 1640 The simplified form may be a constant or other expression which 1641 computes the same value, but in a more efficient manner (including 1642 calls to other builtin functions). 1643 1644 The call may contain arguments which need to be evaluated, but 1645 which are not useful to determine the result of the call. In 1646 this case we return a chain of COMPOUND_EXPRs. The LHS of each 1647 COMPOUND_EXPR will be an argument which must be evaluated. 1648 COMPOUND_EXPRs are chained through their RHS. The RHS of the last 1649 COMPOUND_EXPR in the chain will contain the tree for the simplified 1650 form of the builtin function call. */ 1651 1652 static bool 1653 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src) 1654 { 1655 gimple *stmt = gsi_stmt (*gsi); 1656 location_t loc = gimple_location (stmt); 1657 1658 const char *p = c_getstr (src); 1659 1660 /* If the string length is zero, return the dst parameter. */ 1661 if (p && *p == '\0') 1662 { 1663 replace_call_with_value (gsi, dst); 1664 return true; 1665 } 1666 1667 if (!optimize_bb_for_speed_p (gimple_bb (stmt))) 1668 return false; 1669 1670 /* See if we can store by pieces into (dst + strlen(dst)). */ 1671 tree newdst; 1672 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN); 1673 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY); 1674 1675 if (!strlen_fn || !memcpy_fn) 1676 return false; 1677 1678 /* If the length of the source string isn't computable don't 1679 split strcat into strlen and memcpy. */ 1680 tree len = get_maxval_strlen (src, 0); 1681 if (! len) 1682 return false; 1683 1684 /* Create strlen (dst). */ 1685 gimple_seq stmts = NULL, stmts2; 1686 gimple *repl = gimple_build_call (strlen_fn, 1, dst); 1687 gimple_set_location (repl, loc); 1688 newdst = create_tmp_reg_or_ssa_name (size_type_node); 1689 gimple_call_set_lhs (repl, newdst); 1690 gimple_seq_add_stmt_without_update (&stmts, repl); 1691 1692 /* Create (dst p+ strlen (dst)). */ 1693 newdst = fold_build_pointer_plus_loc (loc, dst, newdst); 1694 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE); 1695 gimple_seq_add_seq_without_update (&stmts, stmts2); 1696 1697 len = fold_convert_loc (loc, size_type_node, len); 1698 len = size_binop_loc (loc, PLUS_EXPR, len, 1699 build_int_cst (size_type_node, 1)); 1700 len = force_gimple_operand (len, &stmts2, true, NULL_TREE); 1701 gimple_seq_add_seq_without_update (&stmts, stmts2); 1702 1703 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len); 1704 gimple_seq_add_stmt_without_update (&stmts, repl); 1705 if (gimple_call_lhs (stmt)) 1706 { 1707 repl = gimple_build_assign (gimple_call_lhs (stmt), dst); 1708 gimple_seq_add_stmt_without_update (&stmts, repl); 1709 gsi_replace_with_seq_vops (gsi, stmts); 1710 /* gsi now points at the assignment to the lhs, get a 1711 stmt iterator to the memcpy call. 1712 ??? We can't use gsi_for_stmt as that doesn't work when the 1713 CFG isn't built yet. */ 1714 gimple_stmt_iterator gsi2 = *gsi; 1715 gsi_prev (&gsi2); 1716 fold_stmt (&gsi2); 1717 } 1718 else 1719 { 1720 gsi_replace_with_seq_vops (gsi, stmts); 1721 fold_stmt (gsi); 1722 } 1723 return true; 1724 } 1725 1726 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE 1727 are the arguments to the call. */ 1728 1729 static bool 1730 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi) 1731 { 1732 gimple *stmt = gsi_stmt (*gsi); 1733 tree dest = gimple_call_arg (stmt, 0); 1734 tree src = gimple_call_arg (stmt, 1); 1735 tree size = gimple_call_arg (stmt, 2); 1736 tree fn; 1737 const char *p; 1738 1739 1740 p = c_getstr (src); 1741 /* If the SRC parameter is "", return DEST. */ 1742 if (p && *p == '\0') 1743 { 1744 replace_call_with_value (gsi, dest); 1745 return true; 1746 } 1747 1748 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size)) 1749 return false; 1750 1751 /* If __builtin_strcat_chk is used, assume strcat is available. */ 1752 fn = builtin_decl_explicit (BUILT_IN_STRCAT); 1753 if (!fn) 1754 return false; 1755 1756 gimple *repl = gimple_build_call (fn, 2, dest, src); 1757 replace_call_with_call_and_fold (gsi, repl); 1758 return true; 1759 } 1760 1761 /* Simplify a call to the strncat builtin. */ 1762 1763 static bool 1764 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi) 1765 { 1766 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi)); 1767 tree dst = gimple_call_arg (stmt, 0); 1768 tree src = gimple_call_arg (stmt, 1); 1769 tree len = gimple_call_arg (stmt, 2); 1770 1771 const char *p = c_getstr (src); 1772 1773 /* If the requested length is zero, or the src parameter string 1774 length is zero, return the dst parameter. */ 1775 if (integer_zerop (len) || (p && *p == '\0')) 1776 { 1777 replace_call_with_value (gsi, dst); 1778 return true; 1779 } 1780 1781 /* If the requested len is greater than or equal to the string 1782 length, call strcat. */ 1783 if (TREE_CODE (len) == INTEGER_CST && p 1784 && compare_tree_int (len, strlen (p)) >= 0) 1785 { 1786 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT); 1787 1788 /* If the replacement _DECL isn't initialized, don't do the 1789 transformation. */ 1790 if (!fn) 1791 return false; 1792 1793 gcall *repl = gimple_build_call (fn, 2, dst, src); 1794 replace_call_with_call_and_fold (gsi, repl); 1795 return true; 1796 } 1797 1798 return false; 1799 } 1800 1801 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC, 1802 LEN, and SIZE. */ 1803 1804 static bool 1805 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi) 1806 { 1807 gimple *stmt = gsi_stmt (*gsi); 1808 tree dest = gimple_call_arg (stmt, 0); 1809 tree src = gimple_call_arg (stmt, 1); 1810 tree len = gimple_call_arg (stmt, 2); 1811 tree size = gimple_call_arg (stmt, 3); 1812 tree fn; 1813 const char *p; 1814 1815 p = c_getstr (src); 1816 /* If the SRC parameter is "" or if LEN is 0, return DEST. */ 1817 if ((p && *p == '\0') 1818 || integer_zerop (len)) 1819 { 1820 replace_call_with_value (gsi, dest); 1821 return true; 1822 } 1823 1824 if (! tree_fits_uhwi_p (size)) 1825 return false; 1826 1827 if (! integer_all_onesp (size)) 1828 { 1829 tree src_len = c_strlen (src, 1); 1830 if (src_len 1831 && tree_fits_uhwi_p (src_len) 1832 && tree_fits_uhwi_p (len) 1833 && ! tree_int_cst_lt (len, src_len)) 1834 { 1835 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */ 1836 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK); 1837 if (!fn) 1838 return false; 1839 1840 gimple *repl = gimple_build_call (fn, 3, dest, src, size); 1841 replace_call_with_call_and_fold (gsi, repl); 1842 return true; 1843 } 1844 return false; 1845 } 1846 1847 /* If __builtin_strncat_chk is used, assume strncat is available. */ 1848 fn = builtin_decl_explicit (BUILT_IN_STRNCAT); 1849 if (!fn) 1850 return false; 1851 1852 gimple *repl = gimple_build_call (fn, 3, dest, src, len); 1853 replace_call_with_call_and_fold (gsi, repl); 1854 return true; 1855 } 1856 1857 /* Build and append gimple statements to STMTS that would load a first 1858 character of a memory location identified by STR. LOC is location 1859 of the statement. */ 1860 1861 static tree 1862 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts) 1863 { 1864 tree var; 1865 1866 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0); 1867 tree cst_uchar_ptr_node 1868 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true); 1869 tree off0 = build_int_cst (cst_uchar_ptr_node, 0); 1870 1871 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0); 1872 gassign *stmt = gimple_build_assign (NULL_TREE, temp); 1873 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt); 1874 1875 gimple_assign_set_lhs (stmt, var); 1876 gimple_seq_add_stmt_without_update (stmts, stmt); 1877 1878 return var; 1879 } 1880 1881 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. 1882 FCODE is the name of the builtin. */ 1883 1884 static bool 1885 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi) 1886 { 1887 gimple *stmt = gsi_stmt (*gsi); 1888 tree callee = gimple_call_fndecl (stmt); 1889 enum built_in_function fcode = DECL_FUNCTION_CODE (callee); 1890 1891 tree type = integer_type_node; 1892 tree str1 = gimple_call_arg (stmt, 0); 1893 tree str2 = gimple_call_arg (stmt, 1); 1894 tree lhs = gimple_call_lhs (stmt); 1895 HOST_WIDE_INT length = -1; 1896 1897 /* Handle strncmp and strncasecmp functions. */ 1898 if (gimple_call_num_args (stmt) == 3) 1899 { 1900 tree len = gimple_call_arg (stmt, 2); 1901 if (tree_fits_uhwi_p (len)) 1902 length = tree_to_uhwi (len); 1903 } 1904 1905 /* If the LEN parameter is zero, return zero. */ 1906 if (length == 0) 1907 { 1908 replace_call_with_value (gsi, integer_zero_node); 1909 return true; 1910 } 1911 1912 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */ 1913 if (operand_equal_p (str1, str2, 0)) 1914 { 1915 replace_call_with_value (gsi, integer_zero_node); 1916 return true; 1917 } 1918 1919 const char *p1 = c_getstr (str1); 1920 const char *p2 = c_getstr (str2); 1921 1922 /* For known strings, return an immediate value. */ 1923 if (p1 && p2) 1924 { 1925 int r = 0; 1926 bool known_result = false; 1927 1928 switch (fcode) 1929 { 1930 case BUILT_IN_STRCMP: 1931 { 1932 r = strcmp (p1, p2); 1933 known_result = true; 1934 break; 1935 } 1936 case BUILT_IN_STRNCMP: 1937 { 1938 if (length == -1) 1939 break; 1940 r = strncmp (p1, p2, length); 1941 known_result = true; 1942 break; 1943 } 1944 /* Only handleable situation is where the string are equal (result 0), 1945 which is already handled by operand_equal_p case. */ 1946 case BUILT_IN_STRCASECMP: 1947 break; 1948 case BUILT_IN_STRNCASECMP: 1949 { 1950 if (length == -1) 1951 break; 1952 r = strncmp (p1, p2, length); 1953 if (r == 0) 1954 known_result = true; 1955 break;; 1956 } 1957 default: 1958 gcc_unreachable (); 1959 } 1960 1961 if (known_result) 1962 { 1963 replace_call_with_value (gsi, build_cmp_result (type, r)); 1964 return true; 1965 } 1966 } 1967 1968 bool nonzero_length = length >= 1 1969 || fcode == BUILT_IN_STRCMP 1970 || fcode == BUILT_IN_STRCASECMP; 1971 1972 location_t loc = gimple_location (stmt); 1973 1974 /* If the second arg is "", return *(const unsigned char*)arg1. */ 1975 if (p2 && *p2 == '\0' && nonzero_length) 1976 { 1977 gimple_seq stmts = NULL; 1978 tree var = gimple_load_first_char (loc, str1, &stmts); 1979 if (lhs) 1980 { 1981 stmt = gimple_build_assign (lhs, NOP_EXPR, var); 1982 gimple_seq_add_stmt_without_update (&stmts, stmt); 1983 } 1984 1985 gsi_replace_with_seq_vops (gsi, stmts); 1986 return true; 1987 } 1988 1989 /* If the first arg is "", return -*(const unsigned char*)arg2. */ 1990 if (p1 && *p1 == '\0' && nonzero_length) 1991 { 1992 gimple_seq stmts = NULL; 1993 tree var = gimple_load_first_char (loc, str2, &stmts); 1994 1995 if (lhs) 1996 { 1997 tree c = create_tmp_reg_or_ssa_name (integer_type_node); 1998 stmt = gimple_build_assign (c, NOP_EXPR, var); 1999 gimple_seq_add_stmt_without_update (&stmts, stmt); 2000 2001 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c); 2002 gimple_seq_add_stmt_without_update (&stmts, stmt); 2003 } 2004 2005 gsi_replace_with_seq_vops (gsi, stmts); 2006 return true; 2007 } 2008 2009 /* If len parameter is one, return an expression corresponding to 2010 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */ 2011 if (fcode == BUILT_IN_STRNCMP && length == 1) 2012 { 2013 gimple_seq stmts = NULL; 2014 tree temp1 = gimple_load_first_char (loc, str1, &stmts); 2015 tree temp2 = gimple_load_first_char (loc, str2, &stmts); 2016 2017 if (lhs) 2018 { 2019 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node); 2020 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1); 2021 gimple_seq_add_stmt_without_update (&stmts, convert1); 2022 2023 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node); 2024 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2); 2025 gimple_seq_add_stmt_without_update (&stmts, convert2); 2026 2027 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2); 2028 gimple_seq_add_stmt_without_update (&stmts, stmt); 2029 } 2030 2031 gsi_replace_with_seq_vops (gsi, stmts); 2032 return true; 2033 } 2034 2035 return false; 2036 } 2037 2038 /* Fold a call to the memchr pointed by GSI iterator. */ 2039 2040 static bool 2041 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi) 2042 { 2043 gimple *stmt = gsi_stmt (*gsi); 2044 tree lhs = gimple_call_lhs (stmt); 2045 tree arg1 = gimple_call_arg (stmt, 0); 2046 tree arg2 = gimple_call_arg (stmt, 1); 2047 tree len = gimple_call_arg (stmt, 2); 2048 2049 /* If the LEN parameter is zero, return zero. */ 2050 if (integer_zerop (len)) 2051 { 2052 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0)); 2053 return true; 2054 } 2055 2056 char c; 2057 if (TREE_CODE (arg2) != INTEGER_CST 2058 || !tree_fits_uhwi_p (len) 2059 || !target_char_cst_p (arg2, &c)) 2060 return false; 2061 2062 unsigned HOST_WIDE_INT length = tree_to_uhwi (len); 2063 unsigned HOST_WIDE_INT string_length; 2064 const char *p1 = c_getstr (arg1, &string_length); 2065 2066 if (p1) 2067 { 2068 const char *r = (const char *)memchr (p1, c, MIN (length, string_length)); 2069 if (r == NULL) 2070 { 2071 if (length <= string_length) 2072 { 2073 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0)); 2074 return true; 2075 } 2076 } 2077 else 2078 { 2079 unsigned HOST_WIDE_INT offset = r - p1; 2080 gimple_seq stmts = NULL; 2081 if (lhs != NULL_TREE) 2082 { 2083 tree offset_cst = build_int_cst (TREE_TYPE (len), offset); 2084 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR, 2085 arg1, offset_cst); 2086 gimple_seq_add_stmt_without_update (&stmts, stmt); 2087 } 2088 else 2089 gimple_seq_add_stmt_without_update (&stmts, 2090 gimple_build_nop ()); 2091 2092 gsi_replace_with_seq_vops (gsi, stmts); 2093 return true; 2094 } 2095 } 2096 2097 return false; 2098 } 2099 2100 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments 2101 to the call. IGNORE is true if the value returned 2102 by the builtin will be ignored. UNLOCKED is true is true if this 2103 actually a call to fputs_unlocked. If LEN in non-NULL, it represents 2104 the known length of the string. Return NULL_TREE if no simplification 2105 was possible. */ 2106 2107 static bool 2108 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi, 2109 tree arg0, tree arg1, 2110 bool unlocked) 2111 { 2112 gimple *stmt = gsi_stmt (*gsi); 2113 2114 /* If we're using an unlocked function, assume the other unlocked 2115 functions exist explicitly. */ 2116 tree const fn_fputc = (unlocked 2117 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED) 2118 : builtin_decl_implicit (BUILT_IN_FPUTC)); 2119 tree const fn_fwrite = (unlocked 2120 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED) 2121 : builtin_decl_implicit (BUILT_IN_FWRITE)); 2122 2123 /* If the return value is used, don't do the transformation. */ 2124 if (gimple_call_lhs (stmt)) 2125 return false; 2126 2127 /* Get the length of the string passed to fputs. If the length 2128 can't be determined, punt. */ 2129 tree len = get_maxval_strlen (arg0, 0); 2130 if (!len 2131 || TREE_CODE (len) != INTEGER_CST) 2132 return false; 2133 2134 switch (compare_tree_int (len, 1)) 2135 { 2136 case -1: /* length is 0, delete the call entirely . */ 2137 replace_call_with_value (gsi, integer_zero_node); 2138 return true; 2139 2140 case 0: /* length is 1, call fputc. */ 2141 { 2142 const char *p = c_getstr (arg0); 2143 if (p != NULL) 2144 { 2145 if (!fn_fputc) 2146 return false; 2147 2148 gimple *repl = gimple_build_call (fn_fputc, 2, 2149 build_int_cst 2150 (integer_type_node, p[0]), arg1); 2151 replace_call_with_call_and_fold (gsi, repl); 2152 return true; 2153 } 2154 } 2155 /* FALLTHROUGH */ 2156 case 1: /* length is greater than 1, call fwrite. */ 2157 { 2158 /* If optimizing for size keep fputs. */ 2159 if (optimize_function_for_size_p (cfun)) 2160 return false; 2161 /* New argument list transforming fputs(string, stream) to 2162 fwrite(string, 1, len, stream). */ 2163 if (!fn_fwrite) 2164 return false; 2165 2166 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0, 2167 size_one_node, len, arg1); 2168 replace_call_with_call_and_fold (gsi, repl); 2169 return true; 2170 } 2171 default: 2172 gcc_unreachable (); 2173 } 2174 return false; 2175 } 2176 2177 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin. 2178 DEST, SRC, LEN, and SIZE are the arguments to the call. 2179 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_* 2180 code of the builtin. If MAXLEN is not NULL, it is maximum length 2181 passed as third argument. */ 2182 2183 static bool 2184 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi, 2185 tree dest, tree src, tree len, tree size, 2186 enum built_in_function fcode) 2187 { 2188 gimple *stmt = gsi_stmt (*gsi); 2189 location_t loc = gimple_location (stmt); 2190 bool ignore = gimple_call_lhs (stmt) == NULL_TREE; 2191 tree fn; 2192 2193 /* If SRC and DEST are the same (and not volatile), return DEST 2194 (resp. DEST+LEN for __mempcpy_chk). */ 2195 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0)) 2196 { 2197 if (fcode != BUILT_IN_MEMPCPY_CHK) 2198 { 2199 replace_call_with_value (gsi, dest); 2200 return true; 2201 } 2202 else 2203 { 2204 gimple_seq stmts = NULL; 2205 len = gimple_convert_to_ptrofftype (&stmts, loc, len); 2206 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR, 2207 TREE_TYPE (dest), dest, len); 2208 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); 2209 replace_call_with_value (gsi, temp); 2210 return true; 2211 } 2212 } 2213 2214 if (! tree_fits_uhwi_p (size)) 2215 return false; 2216 2217 tree maxlen = get_maxval_strlen (len, 2); 2218 if (! integer_all_onesp (size)) 2219 { 2220 if (! tree_fits_uhwi_p (len)) 2221 { 2222 /* If LEN is not constant, try MAXLEN too. 2223 For MAXLEN only allow optimizing into non-_ocs function 2224 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */ 2225 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen)) 2226 { 2227 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore) 2228 { 2229 /* (void) __mempcpy_chk () can be optimized into 2230 (void) __memcpy_chk (). */ 2231 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK); 2232 if (!fn) 2233 return false; 2234 2235 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size); 2236 replace_call_with_call_and_fold (gsi, repl); 2237 return true; 2238 } 2239 return false; 2240 } 2241 } 2242 else 2243 maxlen = len; 2244 2245 if (tree_int_cst_lt (size, maxlen)) 2246 return false; 2247 } 2248 2249 fn = NULL_TREE; 2250 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume 2251 mem{cpy,pcpy,move,set} is available. */ 2252 switch (fcode) 2253 { 2254 case BUILT_IN_MEMCPY_CHK: 2255 fn = builtin_decl_explicit (BUILT_IN_MEMCPY); 2256 break; 2257 case BUILT_IN_MEMPCPY_CHK: 2258 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY); 2259 break; 2260 case BUILT_IN_MEMMOVE_CHK: 2261 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE); 2262 break; 2263 case BUILT_IN_MEMSET_CHK: 2264 fn = builtin_decl_explicit (BUILT_IN_MEMSET); 2265 break; 2266 default: 2267 break; 2268 } 2269 2270 if (!fn) 2271 return false; 2272 2273 gimple *repl = gimple_build_call (fn, 3, dest, src, len); 2274 replace_call_with_call_and_fold (gsi, repl); 2275 return true; 2276 } 2277 2278 /* Fold a call to the __st[rp]cpy_chk builtin. 2279 DEST, SRC, and SIZE are the arguments to the call. 2280 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_* 2281 code of the builtin. If MAXLEN is not NULL, it is maximum length of 2282 strings passed as second argument. */ 2283 2284 static bool 2285 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi, 2286 tree dest, 2287 tree src, tree size, 2288 enum built_in_function fcode) 2289 { 2290 gimple *stmt = gsi_stmt (*gsi); 2291 location_t loc = gimple_location (stmt); 2292 bool ignore = gimple_call_lhs (stmt) == NULL_TREE; 2293 tree len, fn; 2294 2295 /* If SRC and DEST are the same (and not volatile), return DEST. */ 2296 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0)) 2297 { 2298 replace_call_with_value (gsi, dest); 2299 return true; 2300 } 2301 2302 if (! tree_fits_uhwi_p (size)) 2303 return false; 2304 2305 tree maxlen = get_maxval_strlen (src, 1); 2306 if (! integer_all_onesp (size)) 2307 { 2308 len = c_strlen (src, 1); 2309 if (! len || ! tree_fits_uhwi_p (len)) 2310 { 2311 /* If LEN is not constant, try MAXLEN too. 2312 For MAXLEN only allow optimizing into non-_ocs function 2313 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */ 2314 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen)) 2315 { 2316 if (fcode == BUILT_IN_STPCPY_CHK) 2317 { 2318 if (! ignore) 2319 return false; 2320 2321 /* If return value of __stpcpy_chk is ignored, 2322 optimize into __strcpy_chk. */ 2323 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK); 2324 if (!fn) 2325 return false; 2326 2327 gimple *repl = gimple_build_call (fn, 3, dest, src, size); 2328 replace_call_with_call_and_fold (gsi, repl); 2329 return true; 2330 } 2331 2332 if (! len || TREE_SIDE_EFFECTS (len)) 2333 return false; 2334 2335 /* If c_strlen returned something, but not a constant, 2336 transform __strcpy_chk into __memcpy_chk. */ 2337 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK); 2338 if (!fn) 2339 return false; 2340 2341 gimple_seq stmts = NULL; 2342 len = gimple_convert (&stmts, loc, size_type_node, len); 2343 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len, 2344 build_int_cst (size_type_node, 1)); 2345 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); 2346 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size); 2347 replace_call_with_call_and_fold (gsi, repl); 2348 return true; 2349 } 2350 } 2351 else 2352 maxlen = len; 2353 2354 if (! tree_int_cst_lt (maxlen, size)) 2355 return false; 2356 } 2357 2358 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */ 2359 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK 2360 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY); 2361 if (!fn) 2362 return false; 2363 2364 gimple *repl = gimple_build_call (fn, 2, dest, src); 2365 replace_call_with_call_and_fold (gsi, repl); 2366 return true; 2367 } 2368 2369 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE 2370 are the arguments to the call. If MAXLEN is not NULL, it is maximum 2371 length passed as third argument. IGNORE is true if return value can be 2372 ignored. FCODE is the BUILT_IN_* code of the builtin. */ 2373 2374 static bool 2375 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi, 2376 tree dest, tree src, 2377 tree len, tree size, 2378 enum built_in_function fcode) 2379 { 2380 gimple *stmt = gsi_stmt (*gsi); 2381 bool ignore = gimple_call_lhs (stmt) == NULL_TREE; 2382 tree fn; 2383 2384 if (fcode == BUILT_IN_STPNCPY_CHK && ignore) 2385 { 2386 /* If return value of __stpncpy_chk is ignored, 2387 optimize into __strncpy_chk. */ 2388 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK); 2389 if (fn) 2390 { 2391 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size); 2392 replace_call_with_call_and_fold (gsi, repl); 2393 return true; 2394 } 2395 } 2396 2397 if (! tree_fits_uhwi_p (size)) 2398 return false; 2399 2400 tree maxlen = get_maxval_strlen (len, 2); 2401 if (! integer_all_onesp (size)) 2402 { 2403 if (! tree_fits_uhwi_p (len)) 2404 { 2405 /* If LEN is not constant, try MAXLEN too. 2406 For MAXLEN only allow optimizing into non-_ocs function 2407 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */ 2408 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen)) 2409 return false; 2410 } 2411 else 2412 maxlen = len; 2413 2414 if (tree_int_cst_lt (size, maxlen)) 2415 return false; 2416 } 2417 2418 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */ 2419 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK 2420 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY); 2421 if (!fn) 2422 return false; 2423 2424 gimple *repl = gimple_build_call (fn, 3, dest, src, len); 2425 replace_call_with_call_and_fold (gsi, repl); 2426 return true; 2427 } 2428 2429 /* Fold function call to builtin stpcpy with arguments DEST and SRC. 2430 Return NULL_TREE if no simplification can be made. */ 2431 2432 static bool 2433 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi) 2434 { 2435 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi)); 2436 location_t loc = gimple_location (stmt); 2437 tree dest = gimple_call_arg (stmt, 0); 2438 tree src = gimple_call_arg (stmt, 1); 2439 tree fn, len, lenp1; 2440 2441 /* If the result is unused, replace stpcpy with strcpy. */ 2442 if (gimple_call_lhs (stmt) == NULL_TREE) 2443 { 2444 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY); 2445 if (!fn) 2446 return false; 2447 gimple_call_set_fndecl (stmt, fn); 2448 fold_stmt (gsi); 2449 return true; 2450 } 2451 2452 len = c_strlen (src, 1); 2453 if (!len 2454 || TREE_CODE (len) != INTEGER_CST) 2455 return false; 2456 2457 if (optimize_function_for_size_p (cfun) 2458 /* If length is zero it's small enough. */ 2459 && !integer_zerop (len)) 2460 return false; 2461 2462 /* If the source has a known length replace stpcpy with memcpy. */ 2463 fn = builtin_decl_implicit (BUILT_IN_MEMCPY); 2464 if (!fn) 2465 return false; 2466 2467 gimple_seq stmts = NULL; 2468 tree tem = gimple_convert (&stmts, loc, size_type_node, len); 2469 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, 2470 tem, build_int_cst (size_type_node, 1)); 2471 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); 2472 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1); 2473 gimple_set_vuse (repl, gimple_vuse (stmt)); 2474 gimple_set_vdef (repl, gimple_vdef (stmt)); 2475 if (gimple_vdef (repl) 2476 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME) 2477 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl; 2478 gsi_insert_before (gsi, repl, GSI_SAME_STMT); 2479 /* Replace the result with dest + len. */ 2480 stmts = NULL; 2481 tem = gimple_convert (&stmts, loc, sizetype, len); 2482 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); 2483 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt), 2484 POINTER_PLUS_EXPR, dest, tem); 2485 gsi_replace (gsi, ret, false); 2486 /* Finally fold the memcpy call. */ 2487 gimple_stmt_iterator gsi2 = *gsi; 2488 gsi_prev (&gsi2); 2489 fold_stmt (&gsi2); 2490 return true; 2491 } 2492 2493 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return 2494 NULL_TREE if a normal call should be emitted rather than expanding 2495 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or 2496 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length 2497 passed as second argument. */ 2498 2499 static bool 2500 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi, 2501 enum built_in_function fcode) 2502 { 2503 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi)); 2504 tree dest, size, len, fn, fmt, flag; 2505 const char *fmt_str; 2506 2507 /* Verify the required arguments in the original call. */ 2508 if (gimple_call_num_args (stmt) < 5) 2509 return false; 2510 2511 dest = gimple_call_arg (stmt, 0); 2512 len = gimple_call_arg (stmt, 1); 2513 flag = gimple_call_arg (stmt, 2); 2514 size = gimple_call_arg (stmt, 3); 2515 fmt = gimple_call_arg (stmt, 4); 2516 2517 if (! tree_fits_uhwi_p (size)) 2518 return false; 2519 2520 if (! integer_all_onesp (size)) 2521 { 2522 tree maxlen = get_maxval_strlen (len, 2); 2523 if (! tree_fits_uhwi_p (len)) 2524 { 2525 /* If LEN is not constant, try MAXLEN too. 2526 For MAXLEN only allow optimizing into non-_ocs function 2527 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */ 2528 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen)) 2529 return false; 2530 } 2531 else 2532 maxlen = len; 2533 2534 if (tree_int_cst_lt (size, maxlen)) 2535 return false; 2536 } 2537 2538 if (!init_target_chars ()) 2539 return false; 2540 2541 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0 2542 or if format doesn't contain % chars or is "%s". */ 2543 if (! integer_zerop (flag)) 2544 { 2545 fmt_str = c_getstr (fmt); 2546 if (fmt_str == NULL) 2547 return false; 2548 if (strchr (fmt_str, target_percent) != NULL 2549 && strcmp (fmt_str, target_percent_s)) 2550 return false; 2551 } 2552 2553 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is 2554 available. */ 2555 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK 2556 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF); 2557 if (!fn) 2558 return false; 2559 2560 /* Replace the called function and the first 5 argument by 3 retaining 2561 trailing varargs. */ 2562 gimple_call_set_fndecl (stmt, fn); 2563 gimple_call_set_fntype (stmt, TREE_TYPE (fn)); 2564 gimple_call_set_arg (stmt, 0, dest); 2565 gimple_call_set_arg (stmt, 1, len); 2566 gimple_call_set_arg (stmt, 2, fmt); 2567 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i) 2568 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2)); 2569 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2); 2570 fold_stmt (gsi); 2571 return true; 2572 } 2573 2574 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS. 2575 Return NULL_TREE if a normal call should be emitted rather than 2576 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK 2577 or BUILT_IN_VSPRINTF_CHK. */ 2578 2579 static bool 2580 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi, 2581 enum built_in_function fcode) 2582 { 2583 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi)); 2584 tree dest, size, len, fn, fmt, flag; 2585 const char *fmt_str; 2586 unsigned nargs = gimple_call_num_args (stmt); 2587 2588 /* Verify the required arguments in the original call. */ 2589 if (nargs < 4) 2590 return false; 2591 dest = gimple_call_arg (stmt, 0); 2592 flag = gimple_call_arg (stmt, 1); 2593 size = gimple_call_arg (stmt, 2); 2594 fmt = gimple_call_arg (stmt, 3); 2595 2596 if (! tree_fits_uhwi_p (size)) 2597 return false; 2598 2599 len = NULL_TREE; 2600 2601 if (!init_target_chars ()) 2602 return false; 2603 2604 /* Check whether the format is a literal string constant. */ 2605 fmt_str = c_getstr (fmt); 2606 if (fmt_str != NULL) 2607 { 2608 /* If the format doesn't contain % args or %%, we know the size. */ 2609 if (strchr (fmt_str, target_percent) == 0) 2610 { 2611 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4) 2612 len = build_int_cstu (size_type_node, strlen (fmt_str)); 2613 } 2614 /* If the format is "%s" and first ... argument is a string literal, 2615 we know the size too. */ 2616 else if (fcode == BUILT_IN_SPRINTF_CHK 2617 && strcmp (fmt_str, target_percent_s) == 0) 2618 { 2619 tree arg; 2620 2621 if (nargs == 5) 2622 { 2623 arg = gimple_call_arg (stmt, 4); 2624 if (POINTER_TYPE_P (TREE_TYPE (arg))) 2625 { 2626 len = c_strlen (arg, 1); 2627 if (! len || ! tree_fits_uhwi_p (len)) 2628 len = NULL_TREE; 2629 } 2630 } 2631 } 2632 } 2633 2634 if (! integer_all_onesp (size)) 2635 { 2636 if (! len || ! tree_int_cst_lt (len, size)) 2637 return false; 2638 } 2639 2640 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0 2641 or if format doesn't contain % chars or is "%s". */ 2642 if (! integer_zerop (flag)) 2643 { 2644 if (fmt_str == NULL) 2645 return false; 2646 if (strchr (fmt_str, target_percent) != NULL 2647 && strcmp (fmt_str, target_percent_s)) 2648 return false; 2649 } 2650 2651 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */ 2652 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK 2653 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF); 2654 if (!fn) 2655 return false; 2656 2657 /* Replace the called function and the first 4 argument by 2 retaining 2658 trailing varargs. */ 2659 gimple_call_set_fndecl (stmt, fn); 2660 gimple_call_set_fntype (stmt, TREE_TYPE (fn)); 2661 gimple_call_set_arg (stmt, 0, dest); 2662 gimple_call_set_arg (stmt, 1, fmt); 2663 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i) 2664 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2)); 2665 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2); 2666 fold_stmt (gsi); 2667 return true; 2668 } 2669 2670 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG. 2671 ORIG may be null if this is a 2-argument call. We don't attempt to 2672 simplify calls with more than 3 arguments. 2673 2674 Return NULL_TREE if no simplification was possible, otherwise return the 2675 simplified form of the call as a tree. If IGNORED is true, it means that 2676 the caller does not use the returned value of the function. */ 2677 2678 static bool 2679 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi) 2680 { 2681 gimple *stmt = gsi_stmt (*gsi); 2682 tree dest = gimple_call_arg (stmt, 0); 2683 tree fmt = gimple_call_arg (stmt, 1); 2684 tree orig = NULL_TREE; 2685 const char *fmt_str = NULL; 2686 2687 /* Verify the required arguments in the original call. We deal with two 2688 types of sprintf() calls: 'sprintf (str, fmt)' and 2689 'sprintf (dest, "%s", orig)'. */ 2690 if (gimple_call_num_args (stmt) > 3) 2691 return false; 2692 2693 if (gimple_call_num_args (stmt) == 3) 2694 orig = gimple_call_arg (stmt, 2); 2695 2696 /* Check whether the format is a literal string constant. */ 2697 fmt_str = c_getstr (fmt); 2698 if (fmt_str == NULL) 2699 return false; 2700 2701 if (!init_target_chars ()) 2702 return false; 2703 2704 /* If the format doesn't contain % args or %%, use strcpy. */ 2705 if (strchr (fmt_str, target_percent) == NULL) 2706 { 2707 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY); 2708 2709 if (!fn) 2710 return false; 2711 2712 /* Don't optimize sprintf (buf, "abc", ptr++). */ 2713 if (orig) 2714 return false; 2715 2716 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when 2717 'format' is known to contain no % formats. */ 2718 gimple_seq stmts = NULL; 2719 gimple *repl = gimple_build_call (fn, 2, dest, fmt); 2720 gimple_seq_add_stmt_without_update (&stmts, repl); 2721 if (gimple_call_lhs (stmt)) 2722 { 2723 repl = gimple_build_assign (gimple_call_lhs (stmt), 2724 build_int_cst (integer_type_node, 2725 strlen (fmt_str))); 2726 gimple_seq_add_stmt_without_update (&stmts, repl); 2727 gsi_replace_with_seq_vops (gsi, stmts); 2728 /* gsi now points at the assignment to the lhs, get a 2729 stmt iterator to the memcpy call. 2730 ??? We can't use gsi_for_stmt as that doesn't work when the 2731 CFG isn't built yet. */ 2732 gimple_stmt_iterator gsi2 = *gsi; 2733 gsi_prev (&gsi2); 2734 fold_stmt (&gsi2); 2735 } 2736 else 2737 { 2738 gsi_replace_with_seq_vops (gsi, stmts); 2739 fold_stmt (gsi); 2740 } 2741 return true; 2742 } 2743 2744 /* If the format is "%s", use strcpy if the result isn't used. */ 2745 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0) 2746 { 2747 tree fn; 2748 fn = builtin_decl_implicit (BUILT_IN_STRCPY); 2749 2750 if (!fn) 2751 return false; 2752 2753 /* Don't crash on sprintf (str1, "%s"). */ 2754 if (!orig) 2755 return false; 2756 2757 tree orig_len = NULL_TREE; 2758 if (gimple_call_lhs (stmt)) 2759 { 2760 orig_len = get_maxval_strlen (orig, 0); 2761 if (!orig_len) 2762 return false; 2763 } 2764 2765 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */ 2766 gimple_seq stmts = NULL; 2767 gimple *repl = gimple_build_call (fn, 2, dest, orig); 2768 gimple_seq_add_stmt_without_update (&stmts, repl); 2769 if (gimple_call_lhs (stmt)) 2770 { 2771 if (!useless_type_conversion_p (integer_type_node, 2772 TREE_TYPE (orig_len))) 2773 orig_len = fold_convert (integer_type_node, orig_len); 2774 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len); 2775 gimple_seq_add_stmt_without_update (&stmts, repl); 2776 gsi_replace_with_seq_vops (gsi, stmts); 2777 /* gsi now points at the assignment to the lhs, get a 2778 stmt iterator to the memcpy call. 2779 ??? We can't use gsi_for_stmt as that doesn't work when the 2780 CFG isn't built yet. */ 2781 gimple_stmt_iterator gsi2 = *gsi; 2782 gsi_prev (&gsi2); 2783 fold_stmt (&gsi2); 2784 } 2785 else 2786 { 2787 gsi_replace_with_seq_vops (gsi, stmts); 2788 fold_stmt (gsi); 2789 } 2790 return true; 2791 } 2792 return false; 2793 } 2794 2795 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE, 2796 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't 2797 attempt to simplify calls with more than 4 arguments. 2798 2799 Return NULL_TREE if no simplification was possible, otherwise return the 2800 simplified form of the call as a tree. If IGNORED is true, it means that 2801 the caller does not use the returned value of the function. */ 2802 2803 static bool 2804 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi) 2805 { 2806 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi)); 2807 tree dest = gimple_call_arg (stmt, 0); 2808 tree destsize = gimple_call_arg (stmt, 1); 2809 tree fmt = gimple_call_arg (stmt, 2); 2810 tree orig = NULL_TREE; 2811 const char *fmt_str = NULL; 2812 2813 if (gimple_call_num_args (stmt) > 4) 2814 return false; 2815 2816 if (gimple_call_num_args (stmt) == 4) 2817 orig = gimple_call_arg (stmt, 3); 2818 2819 if (!tree_fits_uhwi_p (destsize)) 2820 return false; 2821 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize); 2822 2823 /* Check whether the format is a literal string constant. */ 2824 fmt_str = c_getstr (fmt); 2825 if (fmt_str == NULL) 2826 return false; 2827 2828 if (!init_target_chars ()) 2829 return false; 2830 2831 /* If the format doesn't contain % args or %%, use strcpy. */ 2832 if (strchr (fmt_str, target_percent) == NULL) 2833 { 2834 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY); 2835 if (!fn) 2836 return false; 2837 2838 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */ 2839 if (orig) 2840 return false; 2841 2842 /* We could expand this as 2843 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0'; 2844 or to 2845 memcpy (str, fmt_with_nul_at_cstm1, cst); 2846 but in the former case that might increase code size 2847 and in the latter case grow .rodata section too much. 2848 So punt for now. */ 2849 size_t len = strlen (fmt_str); 2850 if (len >= destlen) 2851 return false; 2852 2853 gimple_seq stmts = NULL; 2854 gimple *repl = gimple_build_call (fn, 2, dest, fmt); 2855 gimple_seq_add_stmt_without_update (&stmts, repl); 2856 if (gimple_call_lhs (stmt)) 2857 { 2858 repl = gimple_build_assign (gimple_call_lhs (stmt), 2859 build_int_cst (integer_type_node, len)); 2860 gimple_seq_add_stmt_without_update (&stmts, repl); 2861 gsi_replace_with_seq_vops (gsi, stmts); 2862 /* gsi now points at the assignment to the lhs, get a 2863 stmt iterator to the memcpy call. 2864 ??? We can't use gsi_for_stmt as that doesn't work when the 2865 CFG isn't built yet. */ 2866 gimple_stmt_iterator gsi2 = *gsi; 2867 gsi_prev (&gsi2); 2868 fold_stmt (&gsi2); 2869 } 2870 else 2871 { 2872 gsi_replace_with_seq_vops (gsi, stmts); 2873 fold_stmt (gsi); 2874 } 2875 return true; 2876 } 2877 2878 /* If the format is "%s", use strcpy if the result isn't used. */ 2879 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0) 2880 { 2881 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY); 2882 if (!fn) 2883 return false; 2884 2885 /* Don't crash on snprintf (str1, cst, "%s"). */ 2886 if (!orig) 2887 return false; 2888 2889 tree orig_len = get_maxval_strlen (orig, 0); 2890 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST) 2891 return false; 2892 2893 /* We could expand this as 2894 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0'; 2895 or to 2896 memcpy (str1, str2_with_nul_at_cstm1, cst); 2897 but in the former case that might increase code size 2898 and in the latter case grow .rodata section too much. 2899 So punt for now. */ 2900 if (compare_tree_int (orig_len, destlen) >= 0) 2901 return false; 2902 2903 /* Convert snprintf (str1, cst, "%s", str2) into 2904 strcpy (str1, str2) if strlen (str2) < cst. */ 2905 gimple_seq stmts = NULL; 2906 gimple *repl = gimple_build_call (fn, 2, dest, orig); 2907 gimple_seq_add_stmt_without_update (&stmts, repl); 2908 if (gimple_call_lhs (stmt)) 2909 { 2910 if (!useless_type_conversion_p (integer_type_node, 2911 TREE_TYPE (orig_len))) 2912 orig_len = fold_convert (integer_type_node, orig_len); 2913 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len); 2914 gimple_seq_add_stmt_without_update (&stmts, repl); 2915 gsi_replace_with_seq_vops (gsi, stmts); 2916 /* gsi now points at the assignment to the lhs, get a 2917 stmt iterator to the memcpy call. 2918 ??? We can't use gsi_for_stmt as that doesn't work when the 2919 CFG isn't built yet. */ 2920 gimple_stmt_iterator gsi2 = *gsi; 2921 gsi_prev (&gsi2); 2922 fold_stmt (&gsi2); 2923 } 2924 else 2925 { 2926 gsi_replace_with_seq_vops (gsi, stmts); 2927 fold_stmt (gsi); 2928 } 2929 return true; 2930 } 2931 return false; 2932 } 2933 2934 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins. 2935 FP, FMT, and ARG are the arguments to the call. We don't fold calls with 2936 more than 3 arguments, and ARG may be null in the 2-argument case. 2937 2938 Return NULL_TREE if no simplification was possible, otherwise return the 2939 simplified form of the call as a tree. FCODE is the BUILT_IN_* 2940 code of the function to be simplified. */ 2941 2942 static bool 2943 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi, 2944 tree fp, tree fmt, tree arg, 2945 enum built_in_function fcode) 2946 { 2947 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi)); 2948 tree fn_fputc, fn_fputs; 2949 const char *fmt_str = NULL; 2950 2951 /* If the return value is used, don't do the transformation. */ 2952 if (gimple_call_lhs (stmt) != NULL_TREE) 2953 return false; 2954 2955 /* Check whether the format is a literal string constant. */ 2956 fmt_str = c_getstr (fmt); 2957 if (fmt_str == NULL) 2958 return false; 2959 2960 if (fcode == BUILT_IN_FPRINTF_UNLOCKED) 2961 { 2962 /* If we're using an unlocked function, assume the other 2963 unlocked functions exist explicitly. */ 2964 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED); 2965 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED); 2966 } 2967 else 2968 { 2969 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC); 2970 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS); 2971 } 2972 2973 if (!init_target_chars ()) 2974 return false; 2975 2976 /* If the format doesn't contain % args or %%, use strcpy. */ 2977 if (strchr (fmt_str, target_percent) == NULL) 2978 { 2979 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK 2980 && arg) 2981 return false; 2982 2983 /* If the format specifier was "", fprintf does nothing. */ 2984 if (fmt_str[0] == '\0') 2985 { 2986 replace_call_with_value (gsi, NULL_TREE); 2987 return true; 2988 } 2989 2990 /* When "string" doesn't contain %, replace all cases of 2991 fprintf (fp, string) with fputs (string, fp). The fputs 2992 builtin will take care of special cases like length == 1. */ 2993 if (fn_fputs) 2994 { 2995 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp); 2996 replace_call_with_call_and_fold (gsi, repl); 2997 return true; 2998 } 2999 } 3000 3001 /* The other optimizations can be done only on the non-va_list variants. */ 3002 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK) 3003 return false; 3004 3005 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */ 3006 else if (strcmp (fmt_str, target_percent_s) == 0) 3007 { 3008 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg))) 3009 return false; 3010 if (fn_fputs) 3011 { 3012 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp); 3013 replace_call_with_call_and_fold (gsi, repl); 3014 return true; 3015 } 3016 } 3017 3018 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */ 3019 else if (strcmp (fmt_str, target_percent_c) == 0) 3020 { 3021 if (!arg 3022 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg))) 3023 return false; 3024 if (fn_fputc) 3025 { 3026 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp); 3027 replace_call_with_call_and_fold (gsi, repl); 3028 return true; 3029 } 3030 } 3031 3032 return false; 3033 } 3034 3035 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins. 3036 FMT and ARG are the arguments to the call; we don't fold cases with 3037 more than 2 arguments, and ARG may be null if this is a 1-argument case. 3038 3039 Return NULL_TREE if no simplification was possible, otherwise return the 3040 simplified form of the call as a tree. FCODE is the BUILT_IN_* 3041 code of the function to be simplified. */ 3042 3043 static bool 3044 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt, 3045 tree arg, enum built_in_function fcode) 3046 { 3047 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi)); 3048 tree fn_putchar, fn_puts, newarg; 3049 const char *fmt_str = NULL; 3050 3051 /* If the return value is used, don't do the transformation. */ 3052 if (gimple_call_lhs (stmt) != NULL_TREE) 3053 return false; 3054 3055 /* Check whether the format is a literal string constant. */ 3056 fmt_str = c_getstr (fmt); 3057 if (fmt_str == NULL) 3058 return false; 3059 3060 if (fcode == BUILT_IN_PRINTF_UNLOCKED) 3061 { 3062 /* If we're using an unlocked function, assume the other 3063 unlocked functions exist explicitly. */ 3064 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED); 3065 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED); 3066 } 3067 else 3068 { 3069 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR); 3070 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS); 3071 } 3072 3073 if (!init_target_chars ()) 3074 return false; 3075 3076 if (strcmp (fmt_str, target_percent_s) == 0 3077 || strchr (fmt_str, target_percent) == NULL) 3078 { 3079 const char *str; 3080 3081 if (strcmp (fmt_str, target_percent_s) == 0) 3082 { 3083 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK) 3084 return false; 3085 3086 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg))) 3087 return false; 3088 3089 str = c_getstr (arg); 3090 if (str == NULL) 3091 return false; 3092 } 3093 else 3094 { 3095 /* The format specifier doesn't contain any '%' characters. */ 3096 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK 3097 && arg) 3098 return false; 3099 str = fmt_str; 3100 } 3101 3102 /* If the string was "", printf does nothing. */ 3103 if (str[0] == '\0') 3104 { 3105 replace_call_with_value (gsi, NULL_TREE); 3106 return true; 3107 } 3108 3109 /* If the string has length of 1, call putchar. */ 3110 if (str[1] == '\0') 3111 { 3112 /* Given printf("c"), (where c is any one character,) 3113 convert "c"[0] to an int and pass that to the replacement 3114 function. */ 3115 newarg = build_int_cst (integer_type_node, str[0]); 3116 if (fn_putchar) 3117 { 3118 gcall *repl = gimple_build_call (fn_putchar, 1, newarg); 3119 replace_call_with_call_and_fold (gsi, repl); 3120 return true; 3121 } 3122 } 3123 else 3124 { 3125 /* If the string was "string\n", call puts("string"). */ 3126 size_t len = strlen (str); 3127 if ((unsigned char)str[len - 1] == target_newline 3128 && (size_t) (int) len == len 3129 && (int) len > 0) 3130 { 3131 char *newstr; 3132 tree offset_node, string_cst; 3133 3134 /* Create a NUL-terminated string that's one char shorter 3135 than the original, stripping off the trailing '\n'. */ 3136 newarg = build_string_literal (len, str); 3137 string_cst = string_constant (newarg, &offset_node); 3138 gcc_checking_assert (string_cst 3139 && (TREE_STRING_LENGTH (string_cst) 3140 == (int) len) 3141 && integer_zerop (offset_node) 3142 && (unsigned char) 3143 TREE_STRING_POINTER (string_cst)[len - 1] 3144 == target_newline); 3145 /* build_string_literal creates a new STRING_CST, 3146 modify it in place to avoid double copying. */ 3147 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst)); 3148 newstr[len - 1] = '\0'; 3149 if (fn_puts) 3150 { 3151 gcall *repl = gimple_build_call (fn_puts, 1, newarg); 3152 replace_call_with_call_and_fold (gsi, repl); 3153 return true; 3154 } 3155 } 3156 else 3157 /* We'd like to arrange to call fputs(string,stdout) here, 3158 but we need stdout and don't have a way to get it yet. */ 3159 return false; 3160 } 3161 } 3162 3163 /* The other optimizations can be done only on the non-va_list variants. */ 3164 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK) 3165 return false; 3166 3167 /* If the format specifier was "%s\n", call __builtin_puts(arg). */ 3168 else if (strcmp (fmt_str, target_percent_s_newline) == 0) 3169 { 3170 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg))) 3171 return false; 3172 if (fn_puts) 3173 { 3174 gcall *repl = gimple_build_call (fn_puts, 1, arg); 3175 replace_call_with_call_and_fold (gsi, repl); 3176 return true; 3177 } 3178 } 3179 3180 /* If the format specifier was "%c", call __builtin_putchar(arg). */ 3181 else if (strcmp (fmt_str, target_percent_c) == 0) 3182 { 3183 if (!arg || ! useless_type_conversion_p (integer_type_node, 3184 TREE_TYPE (arg))) 3185 return false; 3186 if (fn_putchar) 3187 { 3188 gcall *repl = gimple_build_call (fn_putchar, 1, arg); 3189 replace_call_with_call_and_fold (gsi, repl); 3190 return true; 3191 } 3192 } 3193 3194 return false; 3195 } 3196 3197 3198 3199 /* Fold a call to __builtin_strlen with known length LEN. */ 3200 3201 static bool 3202 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi) 3203 { 3204 gimple *stmt = gsi_stmt (*gsi); 3205 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0); 3206 if (!len) 3207 return false; 3208 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT); 3209 replace_call_with_value (gsi, len); 3210 return true; 3211 } 3212 3213 /* Fold a call to __builtin_acc_on_device. */ 3214 3215 static bool 3216 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0) 3217 { 3218 /* Defer folding until we know which compiler we're in. */ 3219 if (symtab->state != EXPANSION) 3220 return false; 3221 3222 unsigned val_host = GOMP_DEVICE_HOST; 3223 unsigned val_dev = GOMP_DEVICE_NONE; 3224 3225 #ifdef ACCEL_COMPILER 3226 val_host = GOMP_DEVICE_NOT_HOST; 3227 val_dev = ACCEL_COMPILER_acc_device; 3228 #endif 3229 3230 location_t loc = gimple_location (gsi_stmt (*gsi)); 3231 3232 tree host_eq = make_ssa_name (boolean_type_node); 3233 gimple *host_ass = gimple_build_assign 3234 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host)); 3235 gimple_set_location (host_ass, loc); 3236 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT); 3237 3238 tree dev_eq = make_ssa_name (boolean_type_node); 3239 gimple *dev_ass = gimple_build_assign 3240 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev)); 3241 gimple_set_location (dev_ass, loc); 3242 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT); 3243 3244 tree result = make_ssa_name (boolean_type_node); 3245 gimple *result_ass = gimple_build_assign 3246 (result, BIT_IOR_EXPR, host_eq, dev_eq); 3247 gimple_set_location (result_ass, loc); 3248 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT); 3249 3250 replace_call_with_value (gsi, result); 3251 3252 return true; 3253 } 3254 3255 /* Fold the non-target builtin at *GSI and return whether any simplification 3256 was made. */ 3257 3258 static bool 3259 gimple_fold_builtin (gimple_stmt_iterator *gsi) 3260 { 3261 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi)); 3262 tree callee = gimple_call_fndecl (stmt); 3263 3264 /* Give up for always_inline inline builtins until they are 3265 inlined. */ 3266 if (avoid_folding_inline_builtin (callee)) 3267 return false; 3268 3269 unsigned n = gimple_call_num_args (stmt); 3270 enum built_in_function fcode = DECL_FUNCTION_CODE (callee); 3271 switch (fcode) 3272 { 3273 case BUILT_IN_BZERO: 3274 return gimple_fold_builtin_memset (gsi, integer_zero_node, 3275 gimple_call_arg (stmt, 1)); 3276 case BUILT_IN_MEMSET: 3277 return gimple_fold_builtin_memset (gsi, 3278 gimple_call_arg (stmt, 1), 3279 gimple_call_arg (stmt, 2)); 3280 case BUILT_IN_BCOPY: 3281 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1), 3282 gimple_call_arg (stmt, 0), 3); 3283 case BUILT_IN_MEMCPY: 3284 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0), 3285 gimple_call_arg (stmt, 1), 0); 3286 case BUILT_IN_MEMPCPY: 3287 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0), 3288 gimple_call_arg (stmt, 1), 1); 3289 case BUILT_IN_MEMMOVE: 3290 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0), 3291 gimple_call_arg (stmt, 1), 3); 3292 case BUILT_IN_SPRINTF_CHK: 3293 case BUILT_IN_VSPRINTF_CHK: 3294 return gimple_fold_builtin_sprintf_chk (gsi, fcode); 3295 case BUILT_IN_STRCAT_CHK: 3296 return gimple_fold_builtin_strcat_chk (gsi); 3297 case BUILT_IN_STRNCAT_CHK: 3298 return gimple_fold_builtin_strncat_chk (gsi); 3299 case BUILT_IN_STRLEN: 3300 return gimple_fold_builtin_strlen (gsi); 3301 case BUILT_IN_STRCPY: 3302 return gimple_fold_builtin_strcpy (gsi, 3303 gimple_call_arg (stmt, 0), 3304 gimple_call_arg (stmt, 1)); 3305 case BUILT_IN_STRNCPY: 3306 return gimple_fold_builtin_strncpy (gsi, 3307 gimple_call_arg (stmt, 0), 3308 gimple_call_arg (stmt, 1), 3309 gimple_call_arg (stmt, 2)); 3310 case BUILT_IN_STRCAT: 3311 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0), 3312 gimple_call_arg (stmt, 1)); 3313 case BUILT_IN_STRNCAT: 3314 return gimple_fold_builtin_strncat (gsi); 3315 case BUILT_IN_INDEX: 3316 case BUILT_IN_STRCHR: 3317 return gimple_fold_builtin_strchr (gsi, false); 3318 case BUILT_IN_RINDEX: 3319 case BUILT_IN_STRRCHR: 3320 return gimple_fold_builtin_strchr (gsi, true); 3321 case BUILT_IN_STRSTR: 3322 return gimple_fold_builtin_strstr (gsi); 3323 case BUILT_IN_STRCMP: 3324 case BUILT_IN_STRCASECMP: 3325 case BUILT_IN_STRNCMP: 3326 case BUILT_IN_STRNCASECMP: 3327 return gimple_fold_builtin_string_compare (gsi); 3328 case BUILT_IN_MEMCHR: 3329 return gimple_fold_builtin_memchr (gsi); 3330 case BUILT_IN_FPUTS: 3331 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0), 3332 gimple_call_arg (stmt, 1), false); 3333 case BUILT_IN_FPUTS_UNLOCKED: 3334 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0), 3335 gimple_call_arg (stmt, 1), true); 3336 case BUILT_IN_MEMCPY_CHK: 3337 case BUILT_IN_MEMPCPY_CHK: 3338 case BUILT_IN_MEMMOVE_CHK: 3339 case BUILT_IN_MEMSET_CHK: 3340 return gimple_fold_builtin_memory_chk (gsi, 3341 gimple_call_arg (stmt, 0), 3342 gimple_call_arg (stmt, 1), 3343 gimple_call_arg (stmt, 2), 3344 gimple_call_arg (stmt, 3), 3345 fcode); 3346 case BUILT_IN_STPCPY: 3347 return gimple_fold_builtin_stpcpy (gsi); 3348 case BUILT_IN_STRCPY_CHK: 3349 case BUILT_IN_STPCPY_CHK: 3350 return gimple_fold_builtin_stxcpy_chk (gsi, 3351 gimple_call_arg (stmt, 0), 3352 gimple_call_arg (stmt, 1), 3353 gimple_call_arg (stmt, 2), 3354 fcode); 3355 case BUILT_IN_STRNCPY_CHK: 3356 case BUILT_IN_STPNCPY_CHK: 3357 return gimple_fold_builtin_stxncpy_chk (gsi, 3358 gimple_call_arg (stmt, 0), 3359 gimple_call_arg (stmt, 1), 3360 gimple_call_arg (stmt, 2), 3361 gimple_call_arg (stmt, 3), 3362 fcode); 3363 case BUILT_IN_SNPRINTF_CHK: 3364 case BUILT_IN_VSNPRINTF_CHK: 3365 return gimple_fold_builtin_snprintf_chk (gsi, fcode); 3366 case BUILT_IN_SNPRINTF: 3367 return gimple_fold_builtin_snprintf (gsi); 3368 case BUILT_IN_SPRINTF: 3369 return gimple_fold_builtin_sprintf (gsi); 3370 case BUILT_IN_FPRINTF: 3371 case BUILT_IN_FPRINTF_UNLOCKED: 3372 case BUILT_IN_VFPRINTF: 3373 if (n == 2 || n == 3) 3374 return gimple_fold_builtin_fprintf (gsi, 3375 gimple_call_arg (stmt, 0), 3376 gimple_call_arg (stmt, 1), 3377 n == 3 3378 ? gimple_call_arg (stmt, 2) 3379 : NULL_TREE, 3380 fcode); 3381 break; 3382 case BUILT_IN_FPRINTF_CHK: 3383 case BUILT_IN_VFPRINTF_CHK: 3384 if (n == 3 || n == 4) 3385 return gimple_fold_builtin_fprintf (gsi, 3386 gimple_call_arg (stmt, 0), 3387 gimple_call_arg (stmt, 2), 3388 n == 4 3389 ? gimple_call_arg (stmt, 3) 3390 : NULL_TREE, 3391 fcode); 3392 break; 3393 case BUILT_IN_PRINTF: 3394 case BUILT_IN_PRINTF_UNLOCKED: 3395 case BUILT_IN_VPRINTF: 3396 if (n == 1 || n == 2) 3397 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0), 3398 n == 2 3399 ? gimple_call_arg (stmt, 1) 3400 : NULL_TREE, fcode); 3401 break; 3402 case BUILT_IN_PRINTF_CHK: 3403 case BUILT_IN_VPRINTF_CHK: 3404 if (n == 2 || n == 3) 3405 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1), 3406 n == 3 3407 ? gimple_call_arg (stmt, 2) 3408 : NULL_TREE, fcode); 3409 break; 3410 case BUILT_IN_ACC_ON_DEVICE: 3411 return gimple_fold_builtin_acc_on_device (gsi, 3412 gimple_call_arg (stmt, 0)); 3413 default:; 3414 } 3415 3416 /* Try the generic builtin folder. */ 3417 bool ignore = (gimple_call_lhs (stmt) == NULL); 3418 tree result = fold_call_stmt (stmt, ignore); 3419 if (result) 3420 { 3421 if (ignore) 3422 STRIP_NOPS (result); 3423 else 3424 result = fold_convert (gimple_call_return_type (stmt), result); 3425 if (!update_call_from_tree (gsi, result)) 3426 gimplify_and_update_call_from_tree (gsi, result); 3427 return true; 3428 } 3429 3430 return false; 3431 } 3432 3433 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal 3434 function calls to constants, where possible. */ 3435 3436 static tree 3437 fold_internal_goacc_dim (const gimple *call) 3438 { 3439 int axis = oacc_get_ifn_dim_arg (call); 3440 int size = oacc_get_fn_dim_size (current_function_decl, axis); 3441 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS; 3442 tree result = NULL_TREE; 3443 3444 /* If the size is 1, or we only want the size and it is not dynamic, 3445 we know the answer. */ 3446 if (size == 1 || (!is_pos && size)) 3447 { 3448 tree type = TREE_TYPE (gimple_call_lhs (call)); 3449 result = build_int_cst (type, size - is_pos); 3450 } 3451 3452 return result; 3453 } 3454 3455 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable 3456 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is 3457 &var where var is only addressable because of such calls. */ 3458 3459 bool 3460 optimize_atomic_compare_exchange_p (gimple *stmt) 3461 { 3462 if (gimple_call_num_args (stmt) != 6 3463 || !flag_inline_atomics 3464 || !optimize 3465 || (flag_sanitize & (SANITIZE_THREAD | SANITIZE_ADDRESS)) != 0 3466 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL) 3467 || !gimple_vdef (stmt) 3468 || !gimple_vuse (stmt)) 3469 return false; 3470 3471 tree fndecl = gimple_call_fndecl (stmt); 3472 switch (DECL_FUNCTION_CODE (fndecl)) 3473 { 3474 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1: 3475 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2: 3476 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4: 3477 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8: 3478 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16: 3479 break; 3480 default: 3481 return false; 3482 } 3483 3484 tree expected = gimple_call_arg (stmt, 1); 3485 if (TREE_CODE (expected) != ADDR_EXPR 3486 || !SSA_VAR_P (TREE_OPERAND (expected, 0))) 3487 return false; 3488 3489 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0)); 3490 if (!is_gimple_reg_type (etype) 3491 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl) 3492 || TREE_THIS_VOLATILE (etype) 3493 || VECTOR_TYPE_P (etype) 3494 || TREE_CODE (etype) == COMPLEX_TYPE 3495 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs 3496 might not preserve all the bits. See PR71716. */ 3497 || SCALAR_FLOAT_TYPE_P (etype) 3498 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype))) 3499 return false; 3500 3501 tree weak = gimple_call_arg (stmt, 3); 3502 if (!integer_zerop (weak) && !integer_onep (weak)) 3503 return false; 3504 3505 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl)); 3506 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt))); 3507 machine_mode mode = TYPE_MODE (itype); 3508 3509 if (direct_optab_handler (atomic_compare_and_swap_optab, mode) 3510 == CODE_FOR_nothing 3511 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing) 3512 return false; 3513 3514 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode)) 3515 return false; 3516 3517 return true; 3518 } 3519 3520 /* Fold 3521 r = __atomic_compare_exchange_N (p, &e, d, w, s, f); 3522 into 3523 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f); 3524 i = IMAGPART_EXPR <t>; 3525 r = (_Bool) i; 3526 e = REALPART_EXPR <t>; */ 3527 3528 void 3529 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi) 3530 { 3531 gimple *stmt = gsi_stmt (*gsi); 3532 tree fndecl = gimple_call_fndecl (stmt); 3533 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl)); 3534 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt))); 3535 tree ctype = build_complex_type (itype); 3536 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0); 3537 bool throws = false; 3538 edge e = NULL; 3539 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)), 3540 expected); 3541 gsi_insert_before (gsi, g, GSI_SAME_STMT); 3542 gimple_stmt_iterator gsiret = gsi_for_stmt (g); 3543 if (!useless_type_conversion_p (itype, TREE_TYPE (expected))) 3544 { 3545 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR, 3546 build1 (VIEW_CONVERT_EXPR, itype, 3547 gimple_assign_lhs (g))); 3548 gsi_insert_before (gsi, g, GSI_SAME_STMT); 3549 } 3550 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0) 3551 + int_size_in_bytes (itype); 3552 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6, 3553 gimple_call_arg (stmt, 0), 3554 gimple_assign_lhs (g), 3555 gimple_call_arg (stmt, 2), 3556 build_int_cst (integer_type_node, flag), 3557 gimple_call_arg (stmt, 4), 3558 gimple_call_arg (stmt, 5)); 3559 tree lhs = make_ssa_name (ctype); 3560 gimple_call_set_lhs (g, lhs); 3561 gimple_set_vdef (g, gimple_vdef (stmt)); 3562 gimple_set_vuse (g, gimple_vuse (stmt)); 3563 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g; 3564 tree oldlhs = gimple_call_lhs (stmt); 3565 if (stmt_can_throw_internal (stmt)) 3566 { 3567 throws = true; 3568 e = find_fallthru_edge (gsi_bb (*gsi)->succs); 3569 } 3570 gimple_call_set_nothrow (as_a <gcall *> (g), 3571 gimple_call_nothrow_p (as_a <gcall *> (stmt))); 3572 gimple_call_set_lhs (stmt, NULL_TREE); 3573 gsi_replace (gsi, g, true); 3574 if (oldlhs) 3575 { 3576 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR, 3577 build1 (IMAGPART_EXPR, itype, lhs)); 3578 if (throws) 3579 { 3580 gsi_insert_on_edge_immediate (e, g); 3581 *gsi = gsi_for_stmt (g); 3582 } 3583 else 3584 gsi_insert_after (gsi, g, GSI_NEW_STMT); 3585 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g)); 3586 gsi_insert_after (gsi, g, GSI_NEW_STMT); 3587 } 3588 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR, 3589 build1 (REALPART_EXPR, itype, lhs)); 3590 if (throws && oldlhs == NULL_TREE) 3591 { 3592 gsi_insert_on_edge_immediate (e, g); 3593 *gsi = gsi_for_stmt (g); 3594 } 3595 else 3596 gsi_insert_after (gsi, g, GSI_NEW_STMT); 3597 if (!useless_type_conversion_p (TREE_TYPE (expected), itype)) 3598 { 3599 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)), 3600 VIEW_CONVERT_EXPR, 3601 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected), 3602 gimple_assign_lhs (g))); 3603 gsi_insert_after (gsi, g, GSI_NEW_STMT); 3604 } 3605 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g)); 3606 gsi_insert_after (gsi, g, GSI_NEW_STMT); 3607 *gsi = gsiret; 3608 } 3609 3610 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation 3611 doesn't fit into TYPE. The test for overflow should be regardless of 3612 -fwrapv, and even for unsigned types. */ 3613 3614 bool 3615 arith_overflowed_p (enum tree_code code, const_tree type, 3616 const_tree arg0, const_tree arg1) 3617 { 3618 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int; 3619 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> > 3620 widest2_int_cst; 3621 widest2_int warg0 = widest2_int_cst (arg0); 3622 widest2_int warg1 = widest2_int_cst (arg1); 3623 widest2_int wres; 3624 switch (code) 3625 { 3626 case PLUS_EXPR: wres = wi::add (warg0, warg1); break; 3627 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break; 3628 case MULT_EXPR: wres = wi::mul (warg0, warg1); break; 3629 default: gcc_unreachable (); 3630 } 3631 signop sign = TYPE_SIGN (type); 3632 if (sign == UNSIGNED && wi::neg_p (wres)) 3633 return true; 3634 return wi::min_precision (wres, sign) > TYPE_PRECISION (type); 3635 } 3636 3637 /* Attempt to fold a call statement referenced by the statement iterator GSI. 3638 The statement may be replaced by another statement, e.g., if the call 3639 simplifies to a constant value. Return true if any changes were made. 3640 It is assumed that the operands have been previously folded. */ 3641 3642 static bool 3643 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace) 3644 { 3645 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi)); 3646 tree callee; 3647 bool changed = false; 3648 unsigned i; 3649 3650 /* Fold *& in call arguments. */ 3651 for (i = 0; i < gimple_call_num_args (stmt); ++i) 3652 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i))) 3653 { 3654 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false); 3655 if (tmp) 3656 { 3657 gimple_call_set_arg (stmt, i, tmp); 3658 changed = true; 3659 } 3660 } 3661 3662 /* Check for virtual calls that became direct calls. */ 3663 callee = gimple_call_fn (stmt); 3664 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF) 3665 { 3666 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE) 3667 { 3668 if (dump_file && virtual_method_call_p (callee) 3669 && !possible_polymorphic_call_target_p 3670 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl 3671 (OBJ_TYPE_REF_EXPR (callee))))) 3672 { 3673 fprintf (dump_file, 3674 "Type inheritance inconsistent devirtualization of "); 3675 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); 3676 fprintf (dump_file, " to "); 3677 print_generic_expr (dump_file, callee, TDF_SLIM); 3678 fprintf (dump_file, "\n"); 3679 } 3680 3681 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee)); 3682 changed = true; 3683 } 3684 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee)) 3685 { 3686 bool final; 3687 vec <cgraph_node *>targets 3688 = possible_polymorphic_call_targets (callee, stmt, &final); 3689 if (final && targets.length () <= 1 && dbg_cnt (devirt)) 3690 { 3691 tree lhs = gimple_call_lhs (stmt); 3692 if (dump_enabled_p ()) 3693 { 3694 location_t loc = gimple_location_safe (stmt); 3695 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc, 3696 "folding virtual function call to %s\n", 3697 targets.length () == 1 3698 ? targets[0]->name () 3699 : "__builtin_unreachable"); 3700 } 3701 if (targets.length () == 1) 3702 { 3703 tree fndecl = targets[0]->decl; 3704 gimple_call_set_fndecl (stmt, fndecl); 3705 changed = true; 3706 /* If changing the call to __cxa_pure_virtual 3707 or similar noreturn function, adjust gimple_call_fntype 3708 too. */ 3709 if (gimple_call_noreturn_p (stmt) 3710 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl))) 3711 && TYPE_ARG_TYPES (TREE_TYPE (fndecl)) 3712 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl))) 3713 == void_type_node)) 3714 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl)); 3715 /* If the call becomes noreturn, remove the lhs. */ 3716 if (lhs 3717 && gimple_call_noreturn_p (stmt) 3718 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt))) 3719 || should_remove_lhs_p (lhs))) 3720 { 3721 if (TREE_CODE (lhs) == SSA_NAME) 3722 { 3723 tree var = create_tmp_var (TREE_TYPE (lhs)); 3724 tree def = get_or_create_ssa_default_def (cfun, var); 3725 gimple *new_stmt = gimple_build_assign (lhs, def); 3726 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); 3727 } 3728 gimple_call_set_lhs (stmt, NULL_TREE); 3729 } 3730 maybe_remove_unused_call_args (cfun, stmt); 3731 } 3732 else 3733 { 3734 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE); 3735 gimple *new_stmt = gimple_build_call (fndecl, 0); 3736 gimple_set_location (new_stmt, gimple_location (stmt)); 3737 /* If the call had a SSA name as lhs morph that into 3738 an uninitialized value. */ 3739 if (lhs && TREE_CODE (lhs) == SSA_NAME) 3740 { 3741 tree var = create_tmp_var (TREE_TYPE (lhs)); 3742 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var); 3743 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop (); 3744 set_ssa_default_def (cfun, var, lhs); 3745 } 3746 gimple_set_vuse (new_stmt, gimple_vuse (stmt)); 3747 gimple_set_vdef (new_stmt, gimple_vdef (stmt)); 3748 gsi_replace (gsi, new_stmt, false); 3749 return true; 3750 } 3751 } 3752 } 3753 } 3754 3755 /* Check for indirect calls that became direct calls, and then 3756 no longer require a static chain. */ 3757 if (gimple_call_chain (stmt)) 3758 { 3759 tree fn = gimple_call_fndecl (stmt); 3760 if (fn && !DECL_STATIC_CHAIN (fn)) 3761 { 3762 gimple_call_set_chain (stmt, NULL); 3763 changed = true; 3764 } 3765 else 3766 { 3767 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false); 3768 if (tmp) 3769 { 3770 gimple_call_set_chain (stmt, tmp); 3771 changed = true; 3772 } 3773 } 3774 } 3775 3776 if (inplace) 3777 return changed; 3778 3779 /* Check for builtins that CCP can handle using information not 3780 available in the generic fold routines. */ 3781 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) 3782 { 3783 if (gimple_fold_builtin (gsi)) 3784 changed = true; 3785 } 3786 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD)) 3787 { 3788 changed |= targetm.gimple_fold_builtin (gsi); 3789 } 3790 else if (gimple_call_internal_p (stmt)) 3791 { 3792 enum tree_code subcode = ERROR_MARK; 3793 tree result = NULL_TREE; 3794 bool cplx_result = false; 3795 tree overflow = NULL_TREE; 3796 switch (gimple_call_internal_fn (stmt)) 3797 { 3798 case IFN_BUILTIN_EXPECT: 3799 result = fold_builtin_expect (gimple_location (stmt), 3800 gimple_call_arg (stmt, 0), 3801 gimple_call_arg (stmt, 1), 3802 gimple_call_arg (stmt, 2)); 3803 break; 3804 case IFN_UBSAN_OBJECT_SIZE: 3805 if (integer_all_onesp (gimple_call_arg (stmt, 2)) 3806 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST 3807 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST 3808 && tree_int_cst_le (gimple_call_arg (stmt, 1), 3809 gimple_call_arg (stmt, 2)))) 3810 { 3811 gsi_replace (gsi, gimple_build_nop (), false); 3812 unlink_stmt_vdef (stmt); 3813 release_defs (stmt); 3814 return true; 3815 } 3816 break; 3817 case IFN_GOACC_DIM_SIZE: 3818 case IFN_GOACC_DIM_POS: 3819 result = fold_internal_goacc_dim (stmt); 3820 break; 3821 case IFN_UBSAN_CHECK_ADD: 3822 subcode = PLUS_EXPR; 3823 break; 3824 case IFN_UBSAN_CHECK_SUB: 3825 subcode = MINUS_EXPR; 3826 break; 3827 case IFN_UBSAN_CHECK_MUL: 3828 subcode = MULT_EXPR; 3829 break; 3830 case IFN_ADD_OVERFLOW: 3831 subcode = PLUS_EXPR; 3832 cplx_result = true; 3833 break; 3834 case IFN_SUB_OVERFLOW: 3835 subcode = MINUS_EXPR; 3836 cplx_result = true; 3837 break; 3838 case IFN_MUL_OVERFLOW: 3839 subcode = MULT_EXPR; 3840 cplx_result = true; 3841 break; 3842 default: 3843 break; 3844 } 3845 if (subcode != ERROR_MARK) 3846 { 3847 tree arg0 = gimple_call_arg (stmt, 0); 3848 tree arg1 = gimple_call_arg (stmt, 1); 3849 tree type = TREE_TYPE (arg0); 3850 if (cplx_result) 3851 { 3852 tree lhs = gimple_call_lhs (stmt); 3853 if (lhs == NULL_TREE) 3854 type = NULL_TREE; 3855 else 3856 type = TREE_TYPE (TREE_TYPE (lhs)); 3857 } 3858 if (type == NULL_TREE) 3859 ; 3860 /* x = y + 0; x = y - 0; x = y * 0; */ 3861 else if (integer_zerop (arg1)) 3862 result = subcode == MULT_EXPR ? integer_zero_node : arg0; 3863 /* x = 0 + y; x = 0 * y; */ 3864 else if (subcode != MINUS_EXPR && integer_zerop (arg0)) 3865 result = subcode == MULT_EXPR ? integer_zero_node : arg1; 3866 /* x = y - y; */ 3867 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0)) 3868 result = integer_zero_node; 3869 /* x = y * 1; x = 1 * y; */ 3870 else if (subcode == MULT_EXPR && integer_onep (arg1)) 3871 result = arg0; 3872 else if (subcode == MULT_EXPR && integer_onep (arg0)) 3873 result = arg1; 3874 else if (TREE_CODE (arg0) == INTEGER_CST 3875 && TREE_CODE (arg1) == INTEGER_CST) 3876 { 3877 if (cplx_result) 3878 result = int_const_binop (subcode, fold_convert (type, arg0), 3879 fold_convert (type, arg1)); 3880 else 3881 result = int_const_binop (subcode, arg0, arg1); 3882 if (result && arith_overflowed_p (subcode, type, arg0, arg1)) 3883 { 3884 if (cplx_result) 3885 overflow = build_one_cst (type); 3886 else 3887 result = NULL_TREE; 3888 } 3889 } 3890 if (result) 3891 { 3892 if (result == integer_zero_node) 3893 result = build_zero_cst (type); 3894 else if (cplx_result && TREE_TYPE (result) != type) 3895 { 3896 if (TREE_CODE (result) == INTEGER_CST) 3897 { 3898 if (arith_overflowed_p (PLUS_EXPR, type, result, 3899 integer_zero_node)) 3900 overflow = build_one_cst (type); 3901 } 3902 else if ((!TYPE_UNSIGNED (TREE_TYPE (result)) 3903 && TYPE_UNSIGNED (type)) 3904 || (TYPE_PRECISION (type) 3905 < (TYPE_PRECISION (TREE_TYPE (result)) 3906 + (TYPE_UNSIGNED (TREE_TYPE (result)) 3907 && !TYPE_UNSIGNED (type))))) 3908 result = NULL_TREE; 3909 if (result) 3910 result = fold_convert (type, result); 3911 } 3912 } 3913 } 3914 3915 if (result) 3916 { 3917 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result)) 3918 result = drop_tree_overflow (result); 3919 if (cplx_result) 3920 { 3921 if (overflow == NULL_TREE) 3922 overflow = build_zero_cst (TREE_TYPE (result)); 3923 tree ctype = build_complex_type (TREE_TYPE (result)); 3924 if (TREE_CODE (result) == INTEGER_CST 3925 && TREE_CODE (overflow) == INTEGER_CST) 3926 result = build_complex (ctype, result, overflow); 3927 else 3928 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR, 3929 ctype, result, overflow); 3930 } 3931 if (!update_call_from_tree (gsi, result)) 3932 gimplify_and_update_call_from_tree (gsi, result); 3933 changed = true; 3934 } 3935 } 3936 3937 return changed; 3938 } 3939 3940 3941 /* Return true whether NAME has a use on STMT. */ 3942 3943 static bool 3944 has_use_on_stmt (tree name, gimple *stmt) 3945 { 3946 imm_use_iterator iter; 3947 use_operand_p use_p; 3948 FOR_EACH_IMM_USE_FAST (use_p, iter, name) 3949 if (USE_STMT (use_p) == stmt) 3950 return true; 3951 return false; 3952 } 3953 3954 /* Worker for fold_stmt_1 dispatch to pattern based folding with 3955 gimple_simplify. 3956 3957 Replaces *GSI with the simplification result in RCODE and OPS 3958 and the associated statements in *SEQ. Does the replacement 3959 according to INPLACE and returns true if the operation succeeded. */ 3960 3961 static bool 3962 replace_stmt_with_simplification (gimple_stmt_iterator *gsi, 3963 code_helper rcode, tree *ops, 3964 gimple_seq *seq, bool inplace) 3965 { 3966 gimple *stmt = gsi_stmt (*gsi); 3967 3968 /* Play safe and do not allow abnormals to be mentioned in 3969 newly created statements. See also maybe_push_res_to_seq. 3970 As an exception allow such uses if there was a use of the 3971 same SSA name on the old stmt. */ 3972 if ((TREE_CODE (ops[0]) == SSA_NAME 3973 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]) 3974 && !has_use_on_stmt (ops[0], stmt)) 3975 || (ops[1] 3976 && TREE_CODE (ops[1]) == SSA_NAME 3977 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]) 3978 && !has_use_on_stmt (ops[1], stmt)) 3979 || (ops[2] 3980 && TREE_CODE (ops[2]) == SSA_NAME 3981 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2]) 3982 && !has_use_on_stmt (ops[2], stmt)) 3983 || (COMPARISON_CLASS_P (ops[0]) 3984 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME 3985 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0)) 3986 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt)) 3987 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME 3988 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1)) 3989 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt))))) 3990 return false; 3991 3992 /* Don't insert new statements when INPLACE is true, even if we could 3993 reuse STMT for the final statement. */ 3994 if (inplace && !gimple_seq_empty_p (*seq)) 3995 return false; 3996 3997 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt)) 3998 { 3999 gcc_assert (rcode.is_tree_code ()); 4000 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison 4001 /* GIMPLE_CONDs condition may not throw. */ 4002 && (!flag_exceptions 4003 || !cfun->can_throw_non_call_exceptions 4004 || !operation_could_trap_p (rcode, 4005 FLOAT_TYPE_P (TREE_TYPE (ops[0])), 4006 false, NULL_TREE))) 4007 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]); 4008 else if (rcode == SSA_NAME) 4009 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0], 4010 build_zero_cst (TREE_TYPE (ops[0]))); 4011 else if (rcode == INTEGER_CST) 4012 { 4013 if (integer_zerop (ops[0])) 4014 gimple_cond_make_false (cond_stmt); 4015 else 4016 gimple_cond_make_true (cond_stmt); 4017 } 4018 else if (!inplace) 4019 { 4020 tree res = maybe_push_res_to_seq (rcode, boolean_type_node, 4021 ops, seq); 4022 if (!res) 4023 return false; 4024 gimple_cond_set_condition (cond_stmt, NE_EXPR, res, 4025 build_zero_cst (TREE_TYPE (res))); 4026 } 4027 else 4028 return false; 4029 if (dump_file && (dump_flags & TDF_DETAILS)) 4030 { 4031 fprintf (dump_file, "gimple_simplified to "); 4032 if (!gimple_seq_empty_p (*seq)) 4033 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM); 4034 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 4035 0, TDF_SLIM); 4036 } 4037 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT); 4038 return true; 4039 } 4040 else if (is_gimple_assign (stmt) 4041 && rcode.is_tree_code ()) 4042 { 4043 if (!inplace 4044 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode)) 4045 { 4046 maybe_build_generic_op (rcode, 4047 TREE_TYPE (gimple_assign_lhs (stmt)), ops); 4048 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]); 4049 if (dump_file && (dump_flags & TDF_DETAILS)) 4050 { 4051 fprintf (dump_file, "gimple_simplified to "); 4052 if (!gimple_seq_empty_p (*seq)) 4053 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM); 4054 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 4055 0, TDF_SLIM); 4056 } 4057 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT); 4058 return true; 4059 } 4060 } 4061 else if (rcode.is_fn_code () 4062 && gimple_call_combined_fn (stmt) == rcode) 4063 { 4064 unsigned i; 4065 for (i = 0; i < gimple_call_num_args (stmt); ++i) 4066 { 4067 gcc_assert (ops[i] != NULL_TREE); 4068 gimple_call_set_arg (stmt, i, ops[i]); 4069 } 4070 if (i < 3) 4071 gcc_assert (ops[i] == NULL_TREE); 4072 if (dump_file && (dump_flags & TDF_DETAILS)) 4073 { 4074 fprintf (dump_file, "gimple_simplified to "); 4075 if (!gimple_seq_empty_p (*seq)) 4076 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM); 4077 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM); 4078 } 4079 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT); 4080 return true; 4081 } 4082 else if (!inplace) 4083 { 4084 if (gimple_has_lhs (stmt)) 4085 { 4086 tree lhs = gimple_get_lhs (stmt); 4087 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs), 4088 ops, seq, lhs)) 4089 return false; 4090 if (dump_file && (dump_flags & TDF_DETAILS)) 4091 { 4092 fprintf (dump_file, "gimple_simplified to "); 4093 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM); 4094 } 4095 gsi_replace_with_seq_vops (gsi, *seq); 4096 return true; 4097 } 4098 else 4099 gcc_unreachable (); 4100 } 4101 4102 return false; 4103 } 4104 4105 /* Canonicalize MEM_REFs invariant address operand after propagation. */ 4106 4107 static bool 4108 maybe_canonicalize_mem_ref_addr (tree *t) 4109 { 4110 bool res = false; 4111 4112 if (TREE_CODE (*t) == ADDR_EXPR) 4113 t = &TREE_OPERAND (*t, 0); 4114 4115 /* The C and C++ frontends use an ARRAY_REF for indexing with their 4116 generic vector extension. The actual vector referenced is 4117 view-converted to an array type for this purpose. If the index 4118 is constant the canonical representation in the middle-end is a 4119 BIT_FIELD_REF so re-write the former to the latter here. */ 4120 if (TREE_CODE (*t) == ARRAY_REF 4121 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR 4122 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST 4123 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))) 4124 { 4125 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)); 4126 if (VECTOR_TYPE_P (vtype)) 4127 { 4128 tree low = array_ref_low_bound (*t); 4129 if (TREE_CODE (low) == INTEGER_CST) 4130 { 4131 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1))) 4132 { 4133 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)), 4134 wi::to_widest (low)); 4135 idx = wi::mul (idx, wi::to_widest 4136 (TYPE_SIZE (TREE_TYPE (*t)))); 4137 widest_int ext 4138 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t)))); 4139 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype)))) 4140 { 4141 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF, 4142 TREE_TYPE (*t), 4143 TREE_OPERAND (TREE_OPERAND (*t, 0), 0), 4144 TYPE_SIZE (TREE_TYPE (*t)), 4145 wide_int_to_tree (sizetype, idx)); 4146 res = true; 4147 } 4148 } 4149 } 4150 } 4151 } 4152 4153 while (handled_component_p (*t)) 4154 t = &TREE_OPERAND (*t, 0); 4155 4156 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating 4157 of invariant addresses into a SSA name MEM_REF address. */ 4158 if (TREE_CODE (*t) == MEM_REF 4159 || TREE_CODE (*t) == TARGET_MEM_REF) 4160 { 4161 tree addr = TREE_OPERAND (*t, 0); 4162 if (TREE_CODE (addr) == ADDR_EXPR 4163 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF 4164 || handled_component_p (TREE_OPERAND (addr, 0)))) 4165 { 4166 tree base; 4167 HOST_WIDE_INT coffset; 4168 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0), 4169 &coffset); 4170 if (!base) 4171 gcc_unreachable (); 4172 4173 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base); 4174 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR, 4175 TREE_OPERAND (*t, 1), 4176 size_int (coffset)); 4177 res = true; 4178 } 4179 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL 4180 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0))); 4181 } 4182 4183 /* Canonicalize back MEM_REFs to plain reference trees if the object 4184 accessed is a decl that has the same access semantics as the MEM_REF. */ 4185 if (TREE_CODE (*t) == MEM_REF 4186 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR 4187 && integer_zerop (TREE_OPERAND (*t, 1)) 4188 && MR_DEPENDENCE_CLIQUE (*t) == 0) 4189 { 4190 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0); 4191 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1)); 4192 if (/* Same volatile qualification. */ 4193 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl) 4194 /* Same TBAA behavior with -fstrict-aliasing. */ 4195 && !TYPE_REF_CAN_ALIAS_ALL (alias_type) 4196 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl)) 4197 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type))) 4198 /* Same alignment. */ 4199 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t)) 4200 /* We have to look out here to not drop a required conversion 4201 from the rhs to the lhs if *t appears on the lhs or vice-versa 4202 if it appears on the rhs. Thus require strict type 4203 compatibility. */ 4204 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl))) 4205 { 4206 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0); 4207 res = true; 4208 } 4209 } 4210 4211 /* Canonicalize TARGET_MEM_REF in particular with respect to 4212 the indexes becoming constant. */ 4213 else if (TREE_CODE (*t) == TARGET_MEM_REF) 4214 { 4215 tree tem = maybe_fold_tmr (*t); 4216 if (tem) 4217 { 4218 *t = tem; 4219 res = true; 4220 } 4221 } 4222 4223 return res; 4224 } 4225 4226 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument 4227 distinguishes both cases. */ 4228 4229 static bool 4230 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree)) 4231 { 4232 bool changed = false; 4233 gimple *stmt = gsi_stmt (*gsi); 4234 bool nowarning = gimple_no_warning_p (stmt); 4235 unsigned i; 4236 fold_defer_overflow_warnings (); 4237 4238 /* First do required canonicalization of [TARGET_]MEM_REF addresses 4239 after propagation. 4240 ??? This shouldn't be done in generic folding but in the 4241 propagation helpers which also know whether an address was 4242 propagated. 4243 Also canonicalize operand order. */ 4244 switch (gimple_code (stmt)) 4245 { 4246 case GIMPLE_ASSIGN: 4247 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS) 4248 { 4249 tree *rhs = gimple_assign_rhs1_ptr (stmt); 4250 if ((REFERENCE_CLASS_P (*rhs) 4251 || TREE_CODE (*rhs) == ADDR_EXPR) 4252 && maybe_canonicalize_mem_ref_addr (rhs)) 4253 changed = true; 4254 tree *lhs = gimple_assign_lhs_ptr (stmt); 4255 if (REFERENCE_CLASS_P (*lhs) 4256 && maybe_canonicalize_mem_ref_addr (lhs)) 4257 changed = true; 4258 } 4259 else 4260 { 4261 /* Canonicalize operand order. */ 4262 enum tree_code code = gimple_assign_rhs_code (stmt); 4263 if (TREE_CODE_CLASS (code) == tcc_comparison 4264 || commutative_tree_code (code) 4265 || commutative_ternary_tree_code (code)) 4266 { 4267 tree rhs1 = gimple_assign_rhs1 (stmt); 4268 tree rhs2 = gimple_assign_rhs2 (stmt); 4269 if (tree_swap_operands_p (rhs1, rhs2)) 4270 { 4271 gimple_assign_set_rhs1 (stmt, rhs2); 4272 gimple_assign_set_rhs2 (stmt, rhs1); 4273 if (TREE_CODE_CLASS (code) == tcc_comparison) 4274 gimple_assign_set_rhs_code (stmt, 4275 swap_tree_comparison (code)); 4276 changed = true; 4277 } 4278 } 4279 } 4280 break; 4281 case GIMPLE_CALL: 4282 { 4283 for (i = 0; i < gimple_call_num_args (stmt); ++i) 4284 { 4285 tree *arg = gimple_call_arg_ptr (stmt, i); 4286 if (REFERENCE_CLASS_P (*arg) 4287 && maybe_canonicalize_mem_ref_addr (arg)) 4288 changed = true; 4289 } 4290 tree *lhs = gimple_call_lhs_ptr (stmt); 4291 if (*lhs 4292 && REFERENCE_CLASS_P (*lhs) 4293 && maybe_canonicalize_mem_ref_addr (lhs)) 4294 changed = true; 4295 break; 4296 } 4297 case GIMPLE_ASM: 4298 { 4299 gasm *asm_stmt = as_a <gasm *> (stmt); 4300 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i) 4301 { 4302 tree link = gimple_asm_output_op (asm_stmt, i); 4303 tree op = TREE_VALUE (link); 4304 if (REFERENCE_CLASS_P (op) 4305 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link))) 4306 changed = true; 4307 } 4308 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i) 4309 { 4310 tree link = gimple_asm_input_op (asm_stmt, i); 4311 tree op = TREE_VALUE (link); 4312 if ((REFERENCE_CLASS_P (op) 4313 || TREE_CODE (op) == ADDR_EXPR) 4314 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link))) 4315 changed = true; 4316 } 4317 } 4318 break; 4319 case GIMPLE_DEBUG: 4320 if (gimple_debug_bind_p (stmt)) 4321 { 4322 tree *val = gimple_debug_bind_get_value_ptr (stmt); 4323 if (*val 4324 && (REFERENCE_CLASS_P (*val) 4325 || TREE_CODE (*val) == ADDR_EXPR) 4326 && maybe_canonicalize_mem_ref_addr (val)) 4327 changed = true; 4328 } 4329 break; 4330 case GIMPLE_COND: 4331 { 4332 /* Canonicalize operand order. */ 4333 tree lhs = gimple_cond_lhs (stmt); 4334 tree rhs = gimple_cond_rhs (stmt); 4335 if (tree_swap_operands_p (lhs, rhs)) 4336 { 4337 gcond *gc = as_a <gcond *> (stmt); 4338 gimple_cond_set_lhs (gc, rhs); 4339 gimple_cond_set_rhs (gc, lhs); 4340 gimple_cond_set_code (gc, 4341 swap_tree_comparison (gimple_cond_code (gc))); 4342 changed = true; 4343 } 4344 } 4345 default:; 4346 } 4347 4348 /* Dispatch to pattern-based folding. */ 4349 if (!inplace 4350 || is_gimple_assign (stmt) 4351 || gimple_code (stmt) == GIMPLE_COND) 4352 { 4353 gimple_seq seq = NULL; 4354 code_helper rcode; 4355 tree ops[3] = {}; 4356 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq, 4357 valueize, valueize)) 4358 { 4359 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace)) 4360 changed = true; 4361 else 4362 gimple_seq_discard (seq); 4363 } 4364 } 4365 4366 stmt = gsi_stmt (*gsi); 4367 4368 /* Fold the main computation performed by the statement. */ 4369 switch (gimple_code (stmt)) 4370 { 4371 case GIMPLE_ASSIGN: 4372 { 4373 /* Try to canonicalize for boolean-typed X the comparisons 4374 X == 0, X == 1, X != 0, and X != 1. */ 4375 if (gimple_assign_rhs_code (stmt) == EQ_EXPR 4376 || gimple_assign_rhs_code (stmt) == NE_EXPR) 4377 { 4378 tree lhs = gimple_assign_lhs (stmt); 4379 tree op1 = gimple_assign_rhs1 (stmt); 4380 tree op2 = gimple_assign_rhs2 (stmt); 4381 tree type = TREE_TYPE (op1); 4382 4383 /* Check whether the comparison operands are of the same boolean 4384 type as the result type is. 4385 Check that second operand is an integer-constant with value 4386 one or zero. */ 4387 if (TREE_CODE (op2) == INTEGER_CST 4388 && (integer_zerop (op2) || integer_onep (op2)) 4389 && useless_type_conversion_p (TREE_TYPE (lhs), type)) 4390 { 4391 enum tree_code cmp_code = gimple_assign_rhs_code (stmt); 4392 bool is_logical_not = false; 4393 4394 /* X == 0 and X != 1 is a logical-not.of X 4395 X == 1 and X != 0 is X */ 4396 if ((cmp_code == EQ_EXPR && integer_zerop (op2)) 4397 || (cmp_code == NE_EXPR && integer_onep (op2))) 4398 is_logical_not = true; 4399 4400 if (is_logical_not == false) 4401 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1); 4402 /* Only for one-bit precision typed X the transformation 4403 !X -> ~X is valied. */ 4404 else if (TYPE_PRECISION (type) == 1) 4405 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1); 4406 /* Otherwise we use !X -> X ^ 1. */ 4407 else 4408 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1, 4409 build_int_cst (type, 1)); 4410 changed = true; 4411 break; 4412 } 4413 } 4414 4415 unsigned old_num_ops = gimple_num_ops (stmt); 4416 tree lhs = gimple_assign_lhs (stmt); 4417 tree new_rhs = fold_gimple_assign (gsi); 4418 if (new_rhs 4419 && !useless_type_conversion_p (TREE_TYPE (lhs), 4420 TREE_TYPE (new_rhs))) 4421 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs); 4422 if (new_rhs 4423 && (!inplace 4424 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)) 4425 { 4426 gimple_assign_set_rhs_from_tree (gsi, new_rhs); 4427 changed = true; 4428 } 4429 break; 4430 } 4431 4432 case GIMPLE_CALL: 4433 changed |= gimple_fold_call (gsi, inplace); 4434 break; 4435 4436 case GIMPLE_ASM: 4437 /* Fold *& in asm operands. */ 4438 { 4439 gasm *asm_stmt = as_a <gasm *> (stmt); 4440 size_t noutputs; 4441 const char **oconstraints; 4442 const char *constraint; 4443 bool allows_mem, allows_reg; 4444 4445 noutputs = gimple_asm_noutputs (asm_stmt); 4446 oconstraints = XALLOCAVEC (const char *, noutputs); 4447 4448 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i) 4449 { 4450 tree link = gimple_asm_output_op (asm_stmt, i); 4451 tree op = TREE_VALUE (link); 4452 oconstraints[i] 4453 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); 4454 if (REFERENCE_CLASS_P (op) 4455 && (op = maybe_fold_reference (op, true)) != NULL_TREE) 4456 { 4457 TREE_VALUE (link) = op; 4458 changed = true; 4459 } 4460 } 4461 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i) 4462 { 4463 tree link = gimple_asm_input_op (asm_stmt, i); 4464 tree op = TREE_VALUE (link); 4465 constraint 4466 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); 4467 parse_input_constraint (&constraint, 0, 0, noutputs, 0, 4468 oconstraints, &allows_mem, &allows_reg); 4469 if (REFERENCE_CLASS_P (op) 4470 && (op = maybe_fold_reference (op, !allows_reg && allows_mem)) 4471 != NULL_TREE) 4472 { 4473 TREE_VALUE (link) = op; 4474 changed = true; 4475 } 4476 } 4477 } 4478 break; 4479 4480 case GIMPLE_DEBUG: 4481 if (gimple_debug_bind_p (stmt)) 4482 { 4483 tree val = gimple_debug_bind_get_value (stmt); 4484 if (val 4485 && REFERENCE_CLASS_P (val)) 4486 { 4487 tree tem = maybe_fold_reference (val, false); 4488 if (tem) 4489 { 4490 gimple_debug_bind_set_value (stmt, tem); 4491 changed = true; 4492 } 4493 } 4494 else if (val 4495 && TREE_CODE (val) == ADDR_EXPR) 4496 { 4497 tree ref = TREE_OPERAND (val, 0); 4498 tree tem = maybe_fold_reference (ref, false); 4499 if (tem) 4500 { 4501 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val)); 4502 gimple_debug_bind_set_value (stmt, tem); 4503 changed = true; 4504 } 4505 } 4506 } 4507 break; 4508 4509 case GIMPLE_RETURN: 4510 { 4511 greturn *ret_stmt = as_a<greturn *> (stmt); 4512 tree ret = gimple_return_retval(ret_stmt); 4513 4514 if (ret && TREE_CODE (ret) == SSA_NAME && valueize) 4515 { 4516 tree val = valueize (ret); 4517 if (val && val != ret 4518 && may_propagate_copy (ret, val)) 4519 { 4520 gimple_return_set_retval (ret_stmt, val); 4521 changed = true; 4522 } 4523 } 4524 } 4525 break; 4526 4527 default:; 4528 } 4529 4530 stmt = gsi_stmt (*gsi); 4531 4532 /* Fold *& on the lhs. */ 4533 if (gimple_has_lhs (stmt)) 4534 { 4535 tree lhs = gimple_get_lhs (stmt); 4536 if (lhs && REFERENCE_CLASS_P (lhs)) 4537 { 4538 tree new_lhs = maybe_fold_reference (lhs, true); 4539 if (new_lhs) 4540 { 4541 gimple_set_lhs (stmt, new_lhs); 4542 changed = true; 4543 } 4544 } 4545 } 4546 4547 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0); 4548 return changed; 4549 } 4550 4551 /* Valueziation callback that ends up not following SSA edges. */ 4552 4553 tree 4554 no_follow_ssa_edges (tree) 4555 { 4556 return NULL_TREE; 4557 } 4558 4559 /* Valueization callback that ends up following single-use SSA edges only. */ 4560 4561 tree 4562 follow_single_use_edges (tree val) 4563 { 4564 if (TREE_CODE (val) == SSA_NAME 4565 && !has_single_use (val)) 4566 return NULL_TREE; 4567 return val; 4568 } 4569 4570 /* Fold the statement pointed to by GSI. In some cases, this function may 4571 replace the whole statement with a new one. Returns true iff folding 4572 makes any changes. 4573 The statement pointed to by GSI should be in valid gimple form but may 4574 be in unfolded state as resulting from for example constant propagation 4575 which can produce *&x = 0. */ 4576 4577 bool 4578 fold_stmt (gimple_stmt_iterator *gsi) 4579 { 4580 return fold_stmt_1 (gsi, false, no_follow_ssa_edges); 4581 } 4582 4583 bool 4584 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree)) 4585 { 4586 return fold_stmt_1 (gsi, false, valueize); 4587 } 4588 4589 /* Perform the minimal folding on statement *GSI. Only operations like 4590 *&x created by constant propagation are handled. The statement cannot 4591 be replaced with a new one. Return true if the statement was 4592 changed, false otherwise. 4593 The statement *GSI should be in valid gimple form but may 4594 be in unfolded state as resulting from for example constant propagation 4595 which can produce *&x = 0. */ 4596 4597 bool 4598 fold_stmt_inplace (gimple_stmt_iterator *gsi) 4599 { 4600 gimple *stmt = gsi_stmt (*gsi); 4601 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges); 4602 gcc_assert (gsi_stmt (*gsi) == stmt); 4603 return changed; 4604 } 4605 4606 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE 4607 if EXPR is null or we don't know how. 4608 If non-null, the result always has boolean type. */ 4609 4610 static tree 4611 canonicalize_bool (tree expr, bool invert) 4612 { 4613 if (!expr) 4614 return NULL_TREE; 4615 else if (invert) 4616 { 4617 if (integer_nonzerop (expr)) 4618 return boolean_false_node; 4619 else if (integer_zerop (expr)) 4620 return boolean_true_node; 4621 else if (TREE_CODE (expr) == SSA_NAME) 4622 return fold_build2 (EQ_EXPR, boolean_type_node, expr, 4623 build_int_cst (TREE_TYPE (expr), 0)); 4624 else if (COMPARISON_CLASS_P (expr)) 4625 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false), 4626 boolean_type_node, 4627 TREE_OPERAND (expr, 0), 4628 TREE_OPERAND (expr, 1)); 4629 else 4630 return NULL_TREE; 4631 } 4632 else 4633 { 4634 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE) 4635 return expr; 4636 if (integer_nonzerop (expr)) 4637 return boolean_true_node; 4638 else if (integer_zerop (expr)) 4639 return boolean_false_node; 4640 else if (TREE_CODE (expr) == SSA_NAME) 4641 return fold_build2 (NE_EXPR, boolean_type_node, expr, 4642 build_int_cst (TREE_TYPE (expr), 0)); 4643 else if (COMPARISON_CLASS_P (expr)) 4644 return fold_build2 (TREE_CODE (expr), 4645 boolean_type_node, 4646 TREE_OPERAND (expr, 0), 4647 TREE_OPERAND (expr, 1)); 4648 else 4649 return NULL_TREE; 4650 } 4651 } 4652 4653 /* Check to see if a boolean expression EXPR is logically equivalent to the 4654 comparison (OP1 CODE OP2). Check for various identities involving 4655 SSA_NAMEs. */ 4656 4657 static bool 4658 same_bool_comparison_p (const_tree expr, enum tree_code code, 4659 const_tree op1, const_tree op2) 4660 { 4661 gimple *s; 4662 4663 /* The obvious case. */ 4664 if (TREE_CODE (expr) == code 4665 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0) 4666 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0)) 4667 return true; 4668 4669 /* Check for comparing (name, name != 0) and the case where expr 4670 is an SSA_NAME with a definition matching the comparison. */ 4671 if (TREE_CODE (expr) == SSA_NAME 4672 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE) 4673 { 4674 if (operand_equal_p (expr, op1, 0)) 4675 return ((code == NE_EXPR && integer_zerop (op2)) 4676 || (code == EQ_EXPR && integer_nonzerop (op2))); 4677 s = SSA_NAME_DEF_STMT (expr); 4678 if (is_gimple_assign (s) 4679 && gimple_assign_rhs_code (s) == code 4680 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0) 4681 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0)) 4682 return true; 4683 } 4684 4685 /* If op1 is of the form (name != 0) or (name == 0), and the definition 4686 of name is a comparison, recurse. */ 4687 if (TREE_CODE (op1) == SSA_NAME 4688 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE) 4689 { 4690 s = SSA_NAME_DEF_STMT (op1); 4691 if (is_gimple_assign (s) 4692 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison) 4693 { 4694 enum tree_code c = gimple_assign_rhs_code (s); 4695 if ((c == NE_EXPR && integer_zerop (op2)) 4696 || (c == EQ_EXPR && integer_nonzerop (op2))) 4697 return same_bool_comparison_p (expr, c, 4698 gimple_assign_rhs1 (s), 4699 gimple_assign_rhs2 (s)); 4700 if ((c == EQ_EXPR && integer_zerop (op2)) 4701 || (c == NE_EXPR && integer_nonzerop (op2))) 4702 return same_bool_comparison_p (expr, 4703 invert_tree_comparison (c, false), 4704 gimple_assign_rhs1 (s), 4705 gimple_assign_rhs2 (s)); 4706 } 4707 } 4708 return false; 4709 } 4710 4711 /* Check to see if two boolean expressions OP1 and OP2 are logically 4712 equivalent. */ 4713 4714 static bool 4715 same_bool_result_p (const_tree op1, const_tree op2) 4716 { 4717 /* Simple cases first. */ 4718 if (operand_equal_p (op1, op2, 0)) 4719 return true; 4720 4721 /* Check the cases where at least one of the operands is a comparison. 4722 These are a bit smarter than operand_equal_p in that they apply some 4723 identifies on SSA_NAMEs. */ 4724 if (COMPARISON_CLASS_P (op2) 4725 && same_bool_comparison_p (op1, TREE_CODE (op2), 4726 TREE_OPERAND (op2, 0), 4727 TREE_OPERAND (op2, 1))) 4728 return true; 4729 if (COMPARISON_CLASS_P (op1) 4730 && same_bool_comparison_p (op2, TREE_CODE (op1), 4731 TREE_OPERAND (op1, 0), 4732 TREE_OPERAND (op1, 1))) 4733 return true; 4734 4735 /* Default case. */ 4736 return false; 4737 } 4738 4739 /* Forward declarations for some mutually recursive functions. */ 4740 4741 static tree 4742 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b, 4743 enum tree_code code2, tree op2a, tree op2b); 4744 static tree 4745 and_var_with_comparison (tree var, bool invert, 4746 enum tree_code code2, tree op2a, tree op2b); 4747 static tree 4748 and_var_with_comparison_1 (gimple *stmt, 4749 enum tree_code code2, tree op2a, tree op2b); 4750 static tree 4751 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b, 4752 enum tree_code code2, tree op2a, tree op2b); 4753 static tree 4754 or_var_with_comparison (tree var, bool invert, 4755 enum tree_code code2, tree op2a, tree op2b); 4756 static tree 4757 or_var_with_comparison_1 (gimple *stmt, 4758 enum tree_code code2, tree op2a, tree op2b); 4759 4760 /* Helper function for and_comparisons_1: try to simplify the AND of the 4761 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B). 4762 If INVERT is true, invert the value of the VAR before doing the AND. 4763 Return NULL_EXPR if we can't simplify this to a single expression. */ 4764 4765 static tree 4766 and_var_with_comparison (tree var, bool invert, 4767 enum tree_code code2, tree op2a, tree op2b) 4768 { 4769 tree t; 4770 gimple *stmt = SSA_NAME_DEF_STMT (var); 4771 4772 /* We can only deal with variables whose definitions are assignments. */ 4773 if (!is_gimple_assign (stmt)) 4774 return NULL_TREE; 4775 4776 /* If we have an inverted comparison, apply DeMorgan's law and rewrite 4777 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b)) 4778 Then we only have to consider the simpler non-inverted cases. */ 4779 if (invert) 4780 t = or_var_with_comparison_1 (stmt, 4781 invert_tree_comparison (code2, false), 4782 op2a, op2b); 4783 else 4784 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b); 4785 return canonicalize_bool (t, invert); 4786 } 4787 4788 /* Try to simplify the AND of the ssa variable defined by the assignment 4789 STMT with the comparison specified by (OP2A CODE2 OP2B). 4790 Return NULL_EXPR if we can't simplify this to a single expression. */ 4791 4792 static tree 4793 and_var_with_comparison_1 (gimple *stmt, 4794 enum tree_code code2, tree op2a, tree op2b) 4795 { 4796 tree var = gimple_assign_lhs (stmt); 4797 tree true_test_var = NULL_TREE; 4798 tree false_test_var = NULL_TREE; 4799 enum tree_code innercode = gimple_assign_rhs_code (stmt); 4800 4801 /* Check for identities like (var AND (var == 0)) => false. */ 4802 if (TREE_CODE (op2a) == SSA_NAME 4803 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE) 4804 { 4805 if ((code2 == NE_EXPR && integer_zerop (op2b)) 4806 || (code2 == EQ_EXPR && integer_nonzerop (op2b))) 4807 { 4808 true_test_var = op2a; 4809 if (var == true_test_var) 4810 return var; 4811 } 4812 else if ((code2 == EQ_EXPR && integer_zerop (op2b)) 4813 || (code2 == NE_EXPR && integer_nonzerop (op2b))) 4814 { 4815 false_test_var = op2a; 4816 if (var == false_test_var) 4817 return boolean_false_node; 4818 } 4819 } 4820 4821 /* If the definition is a comparison, recurse on it. */ 4822 if (TREE_CODE_CLASS (innercode) == tcc_comparison) 4823 { 4824 tree t = and_comparisons_1 (innercode, 4825 gimple_assign_rhs1 (stmt), 4826 gimple_assign_rhs2 (stmt), 4827 code2, 4828 op2a, 4829 op2b); 4830 if (t) 4831 return t; 4832 } 4833 4834 /* If the definition is an AND or OR expression, we may be able to 4835 simplify by reassociating. */ 4836 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE 4837 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)) 4838 { 4839 tree inner1 = gimple_assign_rhs1 (stmt); 4840 tree inner2 = gimple_assign_rhs2 (stmt); 4841 gimple *s; 4842 tree t; 4843 tree partial = NULL_TREE; 4844 bool is_and = (innercode == BIT_AND_EXPR); 4845 4846 /* Check for boolean identities that don't require recursive examination 4847 of inner1/inner2: 4848 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var 4849 inner1 AND (inner1 OR inner2) => inner1 4850 !inner1 AND (inner1 AND inner2) => false 4851 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2 4852 Likewise for similar cases involving inner2. */ 4853 if (inner1 == true_test_var) 4854 return (is_and ? var : inner1); 4855 else if (inner2 == true_test_var) 4856 return (is_and ? var : inner2); 4857 else if (inner1 == false_test_var) 4858 return (is_and 4859 ? boolean_false_node 4860 : and_var_with_comparison (inner2, false, code2, op2a, op2b)); 4861 else if (inner2 == false_test_var) 4862 return (is_and 4863 ? boolean_false_node 4864 : and_var_with_comparison (inner1, false, code2, op2a, op2b)); 4865 4866 /* Next, redistribute/reassociate the AND across the inner tests. 4867 Compute the first partial result, (inner1 AND (op2a code op2b)) */ 4868 if (TREE_CODE (inner1) == SSA_NAME 4869 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1)) 4870 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison 4871 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s), 4872 gimple_assign_rhs1 (s), 4873 gimple_assign_rhs2 (s), 4874 code2, op2a, op2b))) 4875 { 4876 /* Handle the AND case, where we are reassociating: 4877 (inner1 AND inner2) AND (op2a code2 op2b) 4878 => (t AND inner2) 4879 If the partial result t is a constant, we win. Otherwise 4880 continue on to try reassociating with the other inner test. */ 4881 if (is_and) 4882 { 4883 if (integer_onep (t)) 4884 return inner2; 4885 else if (integer_zerop (t)) 4886 return boolean_false_node; 4887 } 4888 4889 /* Handle the OR case, where we are redistributing: 4890 (inner1 OR inner2) AND (op2a code2 op2b) 4891 => (t OR (inner2 AND (op2a code2 op2b))) */ 4892 else if (integer_onep (t)) 4893 return boolean_true_node; 4894 4895 /* Save partial result for later. */ 4896 partial = t; 4897 } 4898 4899 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */ 4900 if (TREE_CODE (inner2) == SSA_NAME 4901 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2)) 4902 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison 4903 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s), 4904 gimple_assign_rhs1 (s), 4905 gimple_assign_rhs2 (s), 4906 code2, op2a, op2b))) 4907 { 4908 /* Handle the AND case, where we are reassociating: 4909 (inner1 AND inner2) AND (op2a code2 op2b) 4910 => (inner1 AND t) */ 4911 if (is_and) 4912 { 4913 if (integer_onep (t)) 4914 return inner1; 4915 else if (integer_zerop (t)) 4916 return boolean_false_node; 4917 /* If both are the same, we can apply the identity 4918 (x AND x) == x. */ 4919 else if (partial && same_bool_result_p (t, partial)) 4920 return t; 4921 } 4922 4923 /* Handle the OR case. where we are redistributing: 4924 (inner1 OR inner2) AND (op2a code2 op2b) 4925 => (t OR (inner1 AND (op2a code2 op2b))) 4926 => (t OR partial) */ 4927 else 4928 { 4929 if (integer_onep (t)) 4930 return boolean_true_node; 4931 else if (partial) 4932 { 4933 /* We already got a simplification for the other 4934 operand to the redistributed OR expression. The 4935 interesting case is when at least one is false. 4936 Or, if both are the same, we can apply the identity 4937 (x OR x) == x. */ 4938 if (integer_zerop (partial)) 4939 return t; 4940 else if (integer_zerop (t)) 4941 return partial; 4942 else if (same_bool_result_p (t, partial)) 4943 return t; 4944 } 4945 } 4946 } 4947 } 4948 return NULL_TREE; 4949 } 4950 4951 /* Try to simplify the AND of two comparisons defined by 4952 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively. 4953 If this can be done without constructing an intermediate value, 4954 return the resulting tree; otherwise NULL_TREE is returned. 4955 This function is deliberately asymmetric as it recurses on SSA_DEFs 4956 in the first comparison but not the second. */ 4957 4958 static tree 4959 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b, 4960 enum tree_code code2, tree op2a, tree op2b) 4961 { 4962 tree truth_type = truth_type_for (TREE_TYPE (op1a)); 4963 4964 /* First check for ((x CODE1 y) AND (x CODE2 y)). */ 4965 if (operand_equal_p (op1a, op2a, 0) 4966 && operand_equal_p (op1b, op2b, 0)) 4967 { 4968 /* Result will be either NULL_TREE, or a combined comparison. */ 4969 tree t = combine_comparisons (UNKNOWN_LOCATION, 4970 TRUTH_ANDIF_EXPR, code1, code2, 4971 truth_type, op1a, op1b); 4972 if (t) 4973 return t; 4974 } 4975 4976 /* Likewise the swapped case of the above. */ 4977 if (operand_equal_p (op1a, op2b, 0) 4978 && operand_equal_p (op1b, op2a, 0)) 4979 { 4980 /* Result will be either NULL_TREE, or a combined comparison. */ 4981 tree t = combine_comparisons (UNKNOWN_LOCATION, 4982 TRUTH_ANDIF_EXPR, code1, 4983 swap_tree_comparison (code2), 4984 truth_type, op1a, op1b); 4985 if (t) 4986 return t; 4987 } 4988 4989 /* If both comparisons are of the same value against constants, we might 4990 be able to merge them. */ 4991 if (operand_equal_p (op1a, op2a, 0) 4992 && TREE_CODE (op1b) == INTEGER_CST 4993 && TREE_CODE (op2b) == INTEGER_CST) 4994 { 4995 int cmp = tree_int_cst_compare (op1b, op2b); 4996 4997 /* If we have (op1a == op1b), we should either be able to 4998 return that or FALSE, depending on whether the constant op1b 4999 also satisfies the other comparison against op2b. */ 5000 if (code1 == EQ_EXPR) 5001 { 5002 bool done = true; 5003 bool val; 5004 switch (code2) 5005 { 5006 case EQ_EXPR: val = (cmp == 0); break; 5007 case NE_EXPR: val = (cmp != 0); break; 5008 case LT_EXPR: val = (cmp < 0); break; 5009 case GT_EXPR: val = (cmp > 0); break; 5010 case LE_EXPR: val = (cmp <= 0); break; 5011 case GE_EXPR: val = (cmp >= 0); break; 5012 default: done = false; 5013 } 5014 if (done) 5015 { 5016 if (val) 5017 return fold_build2 (code1, boolean_type_node, op1a, op1b); 5018 else 5019 return boolean_false_node; 5020 } 5021 } 5022 /* Likewise if the second comparison is an == comparison. */ 5023 else if (code2 == EQ_EXPR) 5024 { 5025 bool done = true; 5026 bool val; 5027 switch (code1) 5028 { 5029 case EQ_EXPR: val = (cmp == 0); break; 5030 case NE_EXPR: val = (cmp != 0); break; 5031 case LT_EXPR: val = (cmp > 0); break; 5032 case GT_EXPR: val = (cmp < 0); break; 5033 case LE_EXPR: val = (cmp >= 0); break; 5034 case GE_EXPR: val = (cmp <= 0); break; 5035 default: done = false; 5036 } 5037 if (done) 5038 { 5039 if (val) 5040 return fold_build2 (code2, boolean_type_node, op2a, op2b); 5041 else 5042 return boolean_false_node; 5043 } 5044 } 5045 5046 /* Same business with inequality tests. */ 5047 else if (code1 == NE_EXPR) 5048 { 5049 bool val; 5050 switch (code2) 5051 { 5052 case EQ_EXPR: val = (cmp != 0); break; 5053 case NE_EXPR: val = (cmp == 0); break; 5054 case LT_EXPR: val = (cmp >= 0); break; 5055 case GT_EXPR: val = (cmp <= 0); break; 5056 case LE_EXPR: val = (cmp > 0); break; 5057 case GE_EXPR: val = (cmp < 0); break; 5058 default: 5059 val = false; 5060 } 5061 if (val) 5062 return fold_build2 (code2, boolean_type_node, op2a, op2b); 5063 } 5064 else if (code2 == NE_EXPR) 5065 { 5066 bool val; 5067 switch (code1) 5068 { 5069 case EQ_EXPR: val = (cmp == 0); break; 5070 case NE_EXPR: val = (cmp != 0); break; 5071 case LT_EXPR: val = (cmp <= 0); break; 5072 case GT_EXPR: val = (cmp >= 0); break; 5073 case LE_EXPR: val = (cmp < 0); break; 5074 case GE_EXPR: val = (cmp > 0); break; 5075 default: 5076 val = false; 5077 } 5078 if (val) 5079 return fold_build2 (code1, boolean_type_node, op1a, op1b); 5080 } 5081 5082 /* Chose the more restrictive of two < or <= comparisons. */ 5083 else if ((code1 == LT_EXPR || code1 == LE_EXPR) 5084 && (code2 == LT_EXPR || code2 == LE_EXPR)) 5085 { 5086 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR)) 5087 return fold_build2 (code1, boolean_type_node, op1a, op1b); 5088 else 5089 return fold_build2 (code2, boolean_type_node, op2a, op2b); 5090 } 5091 5092 /* Likewise chose the more restrictive of two > or >= comparisons. */ 5093 else if ((code1 == GT_EXPR || code1 == GE_EXPR) 5094 && (code2 == GT_EXPR || code2 == GE_EXPR)) 5095 { 5096 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR)) 5097 return fold_build2 (code1, boolean_type_node, op1a, op1b); 5098 else 5099 return fold_build2 (code2, boolean_type_node, op2a, op2b); 5100 } 5101 5102 /* Check for singleton ranges. */ 5103 else if (cmp == 0 5104 && ((code1 == LE_EXPR && code2 == GE_EXPR) 5105 || (code1 == GE_EXPR && code2 == LE_EXPR))) 5106 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b); 5107 5108 /* Check for disjoint ranges. */ 5109 else if (cmp <= 0 5110 && (code1 == LT_EXPR || code1 == LE_EXPR) 5111 && (code2 == GT_EXPR || code2 == GE_EXPR)) 5112 return boolean_false_node; 5113 else if (cmp >= 0 5114 && (code1 == GT_EXPR || code1 == GE_EXPR) 5115 && (code2 == LT_EXPR || code2 == LE_EXPR)) 5116 return boolean_false_node; 5117 } 5118 5119 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where 5120 NAME's definition is a truth value. See if there are any simplifications 5121 that can be done against the NAME's definition. */ 5122 if (TREE_CODE (op1a) == SSA_NAME 5123 && (code1 == NE_EXPR || code1 == EQ_EXPR) 5124 && (integer_zerop (op1b) || integer_onep (op1b))) 5125 { 5126 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b)) 5127 || (code1 == NE_EXPR && integer_onep (op1b))); 5128 gimple *stmt = SSA_NAME_DEF_STMT (op1a); 5129 switch (gimple_code (stmt)) 5130 { 5131 case GIMPLE_ASSIGN: 5132 /* Try to simplify by copy-propagating the definition. */ 5133 return and_var_with_comparison (op1a, invert, code2, op2a, op2b); 5134 5135 case GIMPLE_PHI: 5136 /* If every argument to the PHI produces the same result when 5137 ANDed with the second comparison, we win. 5138 Do not do this unless the type is bool since we need a bool 5139 result here anyway. */ 5140 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE) 5141 { 5142 tree result = NULL_TREE; 5143 unsigned i; 5144 for (i = 0; i < gimple_phi_num_args (stmt); i++) 5145 { 5146 tree arg = gimple_phi_arg_def (stmt, i); 5147 5148 /* If this PHI has itself as an argument, ignore it. 5149 If all the other args produce the same result, 5150 we're still OK. */ 5151 if (arg == gimple_phi_result (stmt)) 5152 continue; 5153 else if (TREE_CODE (arg) == INTEGER_CST) 5154 { 5155 if (invert ? integer_nonzerop (arg) : integer_zerop (arg)) 5156 { 5157 if (!result) 5158 result = boolean_false_node; 5159 else if (!integer_zerop (result)) 5160 return NULL_TREE; 5161 } 5162 else if (!result) 5163 result = fold_build2 (code2, boolean_type_node, 5164 op2a, op2b); 5165 else if (!same_bool_comparison_p (result, 5166 code2, op2a, op2b)) 5167 return NULL_TREE; 5168 } 5169 else if (TREE_CODE (arg) == SSA_NAME 5170 && !SSA_NAME_IS_DEFAULT_DEF (arg)) 5171 { 5172 tree temp; 5173 gimple *def_stmt = SSA_NAME_DEF_STMT (arg); 5174 /* In simple cases we can look through PHI nodes, 5175 but we have to be careful with loops. 5176 See PR49073. */ 5177 if (! dom_info_available_p (CDI_DOMINATORS) 5178 || gimple_bb (def_stmt) == gimple_bb (stmt) 5179 || dominated_by_p (CDI_DOMINATORS, 5180 gimple_bb (def_stmt), 5181 gimple_bb (stmt))) 5182 return NULL_TREE; 5183 temp = and_var_with_comparison (arg, invert, code2, 5184 op2a, op2b); 5185 if (!temp) 5186 return NULL_TREE; 5187 else if (!result) 5188 result = temp; 5189 else if (!same_bool_result_p (result, temp)) 5190 return NULL_TREE; 5191 } 5192 else 5193 return NULL_TREE; 5194 } 5195 return result; 5196 } 5197 5198 default: 5199 break; 5200 } 5201 } 5202 return NULL_TREE; 5203 } 5204 5205 /* Try to simplify the AND of two comparisons, specified by 5206 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively. 5207 If this can be simplified to a single expression (without requiring 5208 introducing more SSA variables to hold intermediate values), 5209 return the resulting tree. Otherwise return NULL_TREE. 5210 If the result expression is non-null, it has boolean type. */ 5211 5212 tree 5213 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b, 5214 enum tree_code code2, tree op2a, tree op2b) 5215 { 5216 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b); 5217 if (t) 5218 return t; 5219 else 5220 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b); 5221 } 5222 5223 /* Helper function for or_comparisons_1: try to simplify the OR of the 5224 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B). 5225 If INVERT is true, invert the value of VAR before doing the OR. 5226 Return NULL_EXPR if we can't simplify this to a single expression. */ 5227 5228 static tree 5229 or_var_with_comparison (tree var, bool invert, 5230 enum tree_code code2, tree op2a, tree op2b) 5231 { 5232 tree t; 5233 gimple *stmt = SSA_NAME_DEF_STMT (var); 5234 5235 /* We can only deal with variables whose definitions are assignments. */ 5236 if (!is_gimple_assign (stmt)) 5237 return NULL_TREE; 5238 5239 /* If we have an inverted comparison, apply DeMorgan's law and rewrite 5240 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b)) 5241 Then we only have to consider the simpler non-inverted cases. */ 5242 if (invert) 5243 t = and_var_with_comparison_1 (stmt, 5244 invert_tree_comparison (code2, false), 5245 op2a, op2b); 5246 else 5247 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b); 5248 return canonicalize_bool (t, invert); 5249 } 5250 5251 /* Try to simplify the OR of the ssa variable defined by the assignment 5252 STMT with the comparison specified by (OP2A CODE2 OP2B). 5253 Return NULL_EXPR if we can't simplify this to a single expression. */ 5254 5255 static tree 5256 or_var_with_comparison_1 (gimple *stmt, 5257 enum tree_code code2, tree op2a, tree op2b) 5258 { 5259 tree var = gimple_assign_lhs (stmt); 5260 tree true_test_var = NULL_TREE; 5261 tree false_test_var = NULL_TREE; 5262 enum tree_code innercode = gimple_assign_rhs_code (stmt); 5263 5264 /* Check for identities like (var OR (var != 0)) => true . */ 5265 if (TREE_CODE (op2a) == SSA_NAME 5266 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE) 5267 { 5268 if ((code2 == NE_EXPR && integer_zerop (op2b)) 5269 || (code2 == EQ_EXPR && integer_nonzerop (op2b))) 5270 { 5271 true_test_var = op2a; 5272 if (var == true_test_var) 5273 return var; 5274 } 5275 else if ((code2 == EQ_EXPR && integer_zerop (op2b)) 5276 || (code2 == NE_EXPR && integer_nonzerop (op2b))) 5277 { 5278 false_test_var = op2a; 5279 if (var == false_test_var) 5280 return boolean_true_node; 5281 } 5282 } 5283 5284 /* If the definition is a comparison, recurse on it. */ 5285 if (TREE_CODE_CLASS (innercode) == tcc_comparison) 5286 { 5287 tree t = or_comparisons_1 (innercode, 5288 gimple_assign_rhs1 (stmt), 5289 gimple_assign_rhs2 (stmt), 5290 code2, 5291 op2a, 5292 op2b); 5293 if (t) 5294 return t; 5295 } 5296 5297 /* If the definition is an AND or OR expression, we may be able to 5298 simplify by reassociating. */ 5299 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE 5300 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)) 5301 { 5302 tree inner1 = gimple_assign_rhs1 (stmt); 5303 tree inner2 = gimple_assign_rhs2 (stmt); 5304 gimple *s; 5305 tree t; 5306 tree partial = NULL_TREE; 5307 bool is_or = (innercode == BIT_IOR_EXPR); 5308 5309 /* Check for boolean identities that don't require recursive examination 5310 of inner1/inner2: 5311 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var 5312 inner1 OR (inner1 AND inner2) => inner1 5313 !inner1 OR (inner1 OR inner2) => true 5314 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2 5315 */ 5316 if (inner1 == true_test_var) 5317 return (is_or ? var : inner1); 5318 else if (inner2 == true_test_var) 5319 return (is_or ? var : inner2); 5320 else if (inner1 == false_test_var) 5321 return (is_or 5322 ? boolean_true_node 5323 : or_var_with_comparison (inner2, false, code2, op2a, op2b)); 5324 else if (inner2 == false_test_var) 5325 return (is_or 5326 ? boolean_true_node 5327 : or_var_with_comparison (inner1, false, code2, op2a, op2b)); 5328 5329 /* Next, redistribute/reassociate the OR across the inner tests. 5330 Compute the first partial result, (inner1 OR (op2a code op2b)) */ 5331 if (TREE_CODE (inner1) == SSA_NAME 5332 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1)) 5333 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison 5334 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s), 5335 gimple_assign_rhs1 (s), 5336 gimple_assign_rhs2 (s), 5337 code2, op2a, op2b))) 5338 { 5339 /* Handle the OR case, where we are reassociating: 5340 (inner1 OR inner2) OR (op2a code2 op2b) 5341 => (t OR inner2) 5342 If the partial result t is a constant, we win. Otherwise 5343 continue on to try reassociating with the other inner test. */ 5344 if (is_or) 5345 { 5346 if (integer_onep (t)) 5347 return boolean_true_node; 5348 else if (integer_zerop (t)) 5349 return inner2; 5350 } 5351 5352 /* Handle the AND case, where we are redistributing: 5353 (inner1 AND inner2) OR (op2a code2 op2b) 5354 => (t AND (inner2 OR (op2a code op2b))) */ 5355 else if (integer_zerop (t)) 5356 return boolean_false_node; 5357 5358 /* Save partial result for later. */ 5359 partial = t; 5360 } 5361 5362 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */ 5363 if (TREE_CODE (inner2) == SSA_NAME 5364 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2)) 5365 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison 5366 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s), 5367 gimple_assign_rhs1 (s), 5368 gimple_assign_rhs2 (s), 5369 code2, op2a, op2b))) 5370 { 5371 /* Handle the OR case, where we are reassociating: 5372 (inner1 OR inner2) OR (op2a code2 op2b) 5373 => (inner1 OR t) 5374 => (t OR partial) */ 5375 if (is_or) 5376 { 5377 if (integer_zerop (t)) 5378 return inner1; 5379 else if (integer_onep (t)) 5380 return boolean_true_node; 5381 /* If both are the same, we can apply the identity 5382 (x OR x) == x. */ 5383 else if (partial && same_bool_result_p (t, partial)) 5384 return t; 5385 } 5386 5387 /* Handle the AND case, where we are redistributing: 5388 (inner1 AND inner2) OR (op2a code2 op2b) 5389 => (t AND (inner1 OR (op2a code2 op2b))) 5390 => (t AND partial) */ 5391 else 5392 { 5393 if (integer_zerop (t)) 5394 return boolean_false_node; 5395 else if (partial) 5396 { 5397 /* We already got a simplification for the other 5398 operand to the redistributed AND expression. The 5399 interesting case is when at least one is true. 5400 Or, if both are the same, we can apply the identity 5401 (x AND x) == x. */ 5402 if (integer_onep (partial)) 5403 return t; 5404 else if (integer_onep (t)) 5405 return partial; 5406 else if (same_bool_result_p (t, partial)) 5407 return t; 5408 } 5409 } 5410 } 5411 } 5412 return NULL_TREE; 5413 } 5414 5415 /* Try to simplify the OR of two comparisons defined by 5416 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively. 5417 If this can be done without constructing an intermediate value, 5418 return the resulting tree; otherwise NULL_TREE is returned. 5419 This function is deliberately asymmetric as it recurses on SSA_DEFs 5420 in the first comparison but not the second. */ 5421 5422 static tree 5423 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b, 5424 enum tree_code code2, tree op2a, tree op2b) 5425 { 5426 tree truth_type = truth_type_for (TREE_TYPE (op1a)); 5427 5428 /* First check for ((x CODE1 y) OR (x CODE2 y)). */ 5429 if (operand_equal_p (op1a, op2a, 0) 5430 && operand_equal_p (op1b, op2b, 0)) 5431 { 5432 /* Result will be either NULL_TREE, or a combined comparison. */ 5433 tree t = combine_comparisons (UNKNOWN_LOCATION, 5434 TRUTH_ORIF_EXPR, code1, code2, 5435 truth_type, op1a, op1b); 5436 if (t) 5437 return t; 5438 } 5439 5440 /* Likewise the swapped case of the above. */ 5441 if (operand_equal_p (op1a, op2b, 0) 5442 && operand_equal_p (op1b, op2a, 0)) 5443 { 5444 /* Result will be either NULL_TREE, or a combined comparison. */ 5445 tree t = combine_comparisons (UNKNOWN_LOCATION, 5446 TRUTH_ORIF_EXPR, code1, 5447 swap_tree_comparison (code2), 5448 truth_type, op1a, op1b); 5449 if (t) 5450 return t; 5451 } 5452 5453 /* If both comparisons are of the same value against constants, we might 5454 be able to merge them. */ 5455 if (operand_equal_p (op1a, op2a, 0) 5456 && TREE_CODE (op1b) == INTEGER_CST 5457 && TREE_CODE (op2b) == INTEGER_CST) 5458 { 5459 int cmp = tree_int_cst_compare (op1b, op2b); 5460 5461 /* If we have (op1a != op1b), we should either be able to 5462 return that or TRUE, depending on whether the constant op1b 5463 also satisfies the other comparison against op2b. */ 5464 if (code1 == NE_EXPR) 5465 { 5466 bool done = true; 5467 bool val; 5468 switch (code2) 5469 { 5470 case EQ_EXPR: val = (cmp == 0); break; 5471 case NE_EXPR: val = (cmp != 0); break; 5472 case LT_EXPR: val = (cmp < 0); break; 5473 case GT_EXPR: val = (cmp > 0); break; 5474 case LE_EXPR: val = (cmp <= 0); break; 5475 case GE_EXPR: val = (cmp >= 0); break; 5476 default: done = false; 5477 } 5478 if (done) 5479 { 5480 if (val) 5481 return boolean_true_node; 5482 else 5483 return fold_build2 (code1, boolean_type_node, op1a, op1b); 5484 } 5485 } 5486 /* Likewise if the second comparison is a != comparison. */ 5487 else if (code2 == NE_EXPR) 5488 { 5489 bool done = true; 5490 bool val; 5491 switch (code1) 5492 { 5493 case EQ_EXPR: val = (cmp == 0); break; 5494 case NE_EXPR: val = (cmp != 0); break; 5495 case LT_EXPR: val = (cmp > 0); break; 5496 case GT_EXPR: val = (cmp < 0); break; 5497 case LE_EXPR: val = (cmp >= 0); break; 5498 case GE_EXPR: val = (cmp <= 0); break; 5499 default: done = false; 5500 } 5501 if (done) 5502 { 5503 if (val) 5504 return boolean_true_node; 5505 else 5506 return fold_build2 (code2, boolean_type_node, op2a, op2b); 5507 } 5508 } 5509 5510 /* See if an equality test is redundant with the other comparison. */ 5511 else if (code1 == EQ_EXPR) 5512 { 5513 bool val; 5514 switch (code2) 5515 { 5516 case EQ_EXPR: val = (cmp == 0); break; 5517 case NE_EXPR: val = (cmp != 0); break; 5518 case LT_EXPR: val = (cmp < 0); break; 5519 case GT_EXPR: val = (cmp > 0); break; 5520 case LE_EXPR: val = (cmp <= 0); break; 5521 case GE_EXPR: val = (cmp >= 0); break; 5522 default: 5523 val = false; 5524 } 5525 if (val) 5526 return fold_build2 (code2, boolean_type_node, op2a, op2b); 5527 } 5528 else if (code2 == EQ_EXPR) 5529 { 5530 bool val; 5531 switch (code1) 5532 { 5533 case EQ_EXPR: val = (cmp == 0); break; 5534 case NE_EXPR: val = (cmp != 0); break; 5535 case LT_EXPR: val = (cmp > 0); break; 5536 case GT_EXPR: val = (cmp < 0); break; 5537 case LE_EXPR: val = (cmp >= 0); break; 5538 case GE_EXPR: val = (cmp <= 0); break; 5539 default: 5540 val = false; 5541 } 5542 if (val) 5543 return fold_build2 (code1, boolean_type_node, op1a, op1b); 5544 } 5545 5546 /* Chose the less restrictive of two < or <= comparisons. */ 5547 else if ((code1 == LT_EXPR || code1 == LE_EXPR) 5548 && (code2 == LT_EXPR || code2 == LE_EXPR)) 5549 { 5550 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR)) 5551 return fold_build2 (code2, boolean_type_node, op2a, op2b); 5552 else 5553 return fold_build2 (code1, boolean_type_node, op1a, op1b); 5554 } 5555 5556 /* Likewise chose the less restrictive of two > or >= comparisons. */ 5557 else if ((code1 == GT_EXPR || code1 == GE_EXPR) 5558 && (code2 == GT_EXPR || code2 == GE_EXPR)) 5559 { 5560 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR)) 5561 return fold_build2 (code2, boolean_type_node, op2a, op2b); 5562 else 5563 return fold_build2 (code1, boolean_type_node, op1a, op1b); 5564 } 5565 5566 /* Check for singleton ranges. */ 5567 else if (cmp == 0 5568 && ((code1 == LT_EXPR && code2 == GT_EXPR) 5569 || (code1 == GT_EXPR && code2 == LT_EXPR))) 5570 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b); 5571 5572 /* Check for less/greater pairs that don't restrict the range at all. */ 5573 else if (cmp >= 0 5574 && (code1 == LT_EXPR || code1 == LE_EXPR) 5575 && (code2 == GT_EXPR || code2 == GE_EXPR)) 5576 return boolean_true_node; 5577 else if (cmp <= 0 5578 && (code1 == GT_EXPR || code1 == GE_EXPR) 5579 && (code2 == LT_EXPR || code2 == LE_EXPR)) 5580 return boolean_true_node; 5581 } 5582 5583 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where 5584 NAME's definition is a truth value. See if there are any simplifications 5585 that can be done against the NAME's definition. */ 5586 if (TREE_CODE (op1a) == SSA_NAME 5587 && (code1 == NE_EXPR || code1 == EQ_EXPR) 5588 && (integer_zerop (op1b) || integer_onep (op1b))) 5589 { 5590 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b)) 5591 || (code1 == NE_EXPR && integer_onep (op1b))); 5592 gimple *stmt = SSA_NAME_DEF_STMT (op1a); 5593 switch (gimple_code (stmt)) 5594 { 5595 case GIMPLE_ASSIGN: 5596 /* Try to simplify by copy-propagating the definition. */ 5597 return or_var_with_comparison (op1a, invert, code2, op2a, op2b); 5598 5599 case GIMPLE_PHI: 5600 /* If every argument to the PHI produces the same result when 5601 ORed with the second comparison, we win. 5602 Do not do this unless the type is bool since we need a bool 5603 result here anyway. */ 5604 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE) 5605 { 5606 tree result = NULL_TREE; 5607 unsigned i; 5608 for (i = 0; i < gimple_phi_num_args (stmt); i++) 5609 { 5610 tree arg = gimple_phi_arg_def (stmt, i); 5611 5612 /* If this PHI has itself as an argument, ignore it. 5613 If all the other args produce the same result, 5614 we're still OK. */ 5615 if (arg == gimple_phi_result (stmt)) 5616 continue; 5617 else if (TREE_CODE (arg) == INTEGER_CST) 5618 { 5619 if (invert ? integer_zerop (arg) : integer_nonzerop (arg)) 5620 { 5621 if (!result) 5622 result = boolean_true_node; 5623 else if (!integer_onep (result)) 5624 return NULL_TREE; 5625 } 5626 else if (!result) 5627 result = fold_build2 (code2, boolean_type_node, 5628 op2a, op2b); 5629 else if (!same_bool_comparison_p (result, 5630 code2, op2a, op2b)) 5631 return NULL_TREE; 5632 } 5633 else if (TREE_CODE (arg) == SSA_NAME 5634 && !SSA_NAME_IS_DEFAULT_DEF (arg)) 5635 { 5636 tree temp; 5637 gimple *def_stmt = SSA_NAME_DEF_STMT (arg); 5638 /* In simple cases we can look through PHI nodes, 5639 but we have to be careful with loops. 5640 See PR49073. */ 5641 if (! dom_info_available_p (CDI_DOMINATORS) 5642 || gimple_bb (def_stmt) == gimple_bb (stmt) 5643 || dominated_by_p (CDI_DOMINATORS, 5644 gimple_bb (def_stmt), 5645 gimple_bb (stmt))) 5646 return NULL_TREE; 5647 temp = or_var_with_comparison (arg, invert, code2, 5648 op2a, op2b); 5649 if (!temp) 5650 return NULL_TREE; 5651 else if (!result) 5652 result = temp; 5653 else if (!same_bool_result_p (result, temp)) 5654 return NULL_TREE; 5655 } 5656 else 5657 return NULL_TREE; 5658 } 5659 return result; 5660 } 5661 5662 default: 5663 break; 5664 } 5665 } 5666 return NULL_TREE; 5667 } 5668 5669 /* Try to simplify the OR of two comparisons, specified by 5670 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively. 5671 If this can be simplified to a single expression (without requiring 5672 introducing more SSA variables to hold intermediate values), 5673 return the resulting tree. Otherwise return NULL_TREE. 5674 If the result expression is non-null, it has boolean type. */ 5675 5676 tree 5677 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b, 5678 enum tree_code code2, tree op2a, tree op2b) 5679 { 5680 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b); 5681 if (t) 5682 return t; 5683 else 5684 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b); 5685 } 5686 5687 5688 /* Fold STMT to a constant using VALUEIZE to valueize SSA names. 5689 5690 Either NULL_TREE, a simplified but non-constant or a constant 5691 is returned. 5692 5693 ??? This should go into a gimple-fold-inline.h file to be eventually 5694 privatized with the single valueize function used in the various TUs 5695 to avoid the indirect function call overhead. */ 5696 5697 tree 5698 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree), 5699 tree (*gvalueize) (tree)) 5700 { 5701 code_helper rcode; 5702 tree ops[3] = {}; 5703 /* ??? The SSA propagators do not correctly deal with following SSA use-def 5704 edges if there are intermediate VARYING defs. For this reason 5705 do not follow SSA edges here even though SCCVN can technically 5706 just deal fine with that. */ 5707 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize)) 5708 { 5709 tree res = NULL_TREE; 5710 if (gimple_simplified_result_is_gimple_val (rcode, ops)) 5711 res = ops[0]; 5712 else if (mprts_hook) 5713 res = mprts_hook (rcode, gimple_expr_type (stmt), ops); 5714 if (res) 5715 { 5716 if (dump_file && dump_flags & TDF_DETAILS) 5717 { 5718 fprintf (dump_file, "Match-and-simplified "); 5719 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM); 5720 fprintf (dump_file, " to "); 5721 print_generic_expr (dump_file, res, 0); 5722 fprintf (dump_file, "\n"); 5723 } 5724 return res; 5725 } 5726 } 5727 5728 location_t loc = gimple_location (stmt); 5729 switch (gimple_code (stmt)) 5730 { 5731 case GIMPLE_ASSIGN: 5732 { 5733 enum tree_code subcode = gimple_assign_rhs_code (stmt); 5734 5735 switch (get_gimple_rhs_class (subcode)) 5736 { 5737 case GIMPLE_SINGLE_RHS: 5738 { 5739 tree rhs = gimple_assign_rhs1 (stmt); 5740 enum tree_code_class kind = TREE_CODE_CLASS (subcode); 5741 5742 if (TREE_CODE (rhs) == SSA_NAME) 5743 { 5744 /* If the RHS is an SSA_NAME, return its known constant value, 5745 if any. */ 5746 return (*valueize) (rhs); 5747 } 5748 /* Handle propagating invariant addresses into address 5749 operations. */ 5750 else if (TREE_CODE (rhs) == ADDR_EXPR 5751 && !is_gimple_min_invariant (rhs)) 5752 { 5753 HOST_WIDE_INT offset = 0; 5754 tree base; 5755 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0), 5756 &offset, 5757 valueize); 5758 if (base 5759 && (CONSTANT_CLASS_P (base) 5760 || decl_address_invariant_p (base))) 5761 return build_invariant_address (TREE_TYPE (rhs), 5762 base, offset); 5763 } 5764 else if (TREE_CODE (rhs) == CONSTRUCTOR 5765 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE 5766 && (CONSTRUCTOR_NELTS (rhs) 5767 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)))) 5768 { 5769 unsigned i; 5770 tree val, *vec; 5771 5772 vec = XALLOCAVEC (tree, 5773 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))); 5774 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val) 5775 { 5776 val = (*valueize) (val); 5777 if (TREE_CODE (val) == INTEGER_CST 5778 || TREE_CODE (val) == REAL_CST 5779 || TREE_CODE (val) == FIXED_CST) 5780 vec[i] = val; 5781 else 5782 return NULL_TREE; 5783 } 5784 5785 return build_vector (TREE_TYPE (rhs), vec); 5786 } 5787 if (subcode == OBJ_TYPE_REF) 5788 { 5789 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs)); 5790 /* If callee is constant, we can fold away the wrapper. */ 5791 if (is_gimple_min_invariant (val)) 5792 return val; 5793 } 5794 5795 if (kind == tcc_reference) 5796 { 5797 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR 5798 || TREE_CODE (rhs) == REALPART_EXPR 5799 || TREE_CODE (rhs) == IMAGPART_EXPR) 5800 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME) 5801 { 5802 tree val = (*valueize) (TREE_OPERAND (rhs, 0)); 5803 return fold_unary_loc (EXPR_LOCATION (rhs), 5804 TREE_CODE (rhs), 5805 TREE_TYPE (rhs), val); 5806 } 5807 else if (TREE_CODE (rhs) == BIT_FIELD_REF 5808 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME) 5809 { 5810 tree val = (*valueize) (TREE_OPERAND (rhs, 0)); 5811 return fold_ternary_loc (EXPR_LOCATION (rhs), 5812 TREE_CODE (rhs), 5813 TREE_TYPE (rhs), val, 5814 TREE_OPERAND (rhs, 1), 5815 TREE_OPERAND (rhs, 2)); 5816 } 5817 else if (TREE_CODE (rhs) == MEM_REF 5818 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME) 5819 { 5820 tree val = (*valueize) (TREE_OPERAND (rhs, 0)); 5821 if (TREE_CODE (val) == ADDR_EXPR 5822 && is_gimple_min_invariant (val)) 5823 { 5824 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs), 5825 unshare_expr (val), 5826 TREE_OPERAND (rhs, 1)); 5827 if (tem) 5828 rhs = tem; 5829 } 5830 } 5831 return fold_const_aggregate_ref_1 (rhs, valueize); 5832 } 5833 else if (kind == tcc_declaration) 5834 return get_symbol_constant_value (rhs); 5835 return rhs; 5836 } 5837 5838 case GIMPLE_UNARY_RHS: 5839 return NULL_TREE; 5840 5841 case GIMPLE_BINARY_RHS: 5842 /* Translate &x + CST into an invariant form suitable for 5843 further propagation. */ 5844 if (subcode == POINTER_PLUS_EXPR) 5845 { 5846 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt)); 5847 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt)); 5848 if (TREE_CODE (op0) == ADDR_EXPR 5849 && TREE_CODE (op1) == INTEGER_CST) 5850 { 5851 tree off = fold_convert (ptr_type_node, op1); 5852 return build_fold_addr_expr_loc 5853 (loc, 5854 fold_build2 (MEM_REF, 5855 TREE_TYPE (TREE_TYPE (op0)), 5856 unshare_expr (op0), off)); 5857 } 5858 } 5859 /* Canonicalize bool != 0 and bool == 0 appearing after 5860 valueization. While gimple_simplify handles this 5861 it can get confused by the ~X == 1 -> X == 0 transform 5862 which we cant reduce to a SSA name or a constant 5863 (and we have no way to tell gimple_simplify to not 5864 consider those transforms in the first place). */ 5865 else if (subcode == EQ_EXPR 5866 || subcode == NE_EXPR) 5867 { 5868 tree lhs = gimple_assign_lhs (stmt); 5869 tree op0 = gimple_assign_rhs1 (stmt); 5870 if (useless_type_conversion_p (TREE_TYPE (lhs), 5871 TREE_TYPE (op0))) 5872 { 5873 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt)); 5874 op0 = (*valueize) (op0); 5875 if (TREE_CODE (op0) == INTEGER_CST) 5876 std::swap (op0, op1); 5877 if (TREE_CODE (op1) == INTEGER_CST 5878 && ((subcode == NE_EXPR && integer_zerop (op1)) 5879 || (subcode == EQ_EXPR && integer_onep (op1)))) 5880 return op0; 5881 } 5882 } 5883 return NULL_TREE; 5884 5885 case GIMPLE_TERNARY_RHS: 5886 { 5887 /* Handle ternary operators that can appear in GIMPLE form. */ 5888 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt)); 5889 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt)); 5890 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt)); 5891 return fold_ternary_loc (loc, subcode, 5892 gimple_expr_type (stmt), op0, op1, op2); 5893 } 5894 5895 default: 5896 gcc_unreachable (); 5897 } 5898 } 5899 5900 case GIMPLE_CALL: 5901 { 5902 tree fn; 5903 gcall *call_stmt = as_a <gcall *> (stmt); 5904 5905 if (gimple_call_internal_p (stmt)) 5906 { 5907 enum tree_code subcode = ERROR_MARK; 5908 switch (gimple_call_internal_fn (stmt)) 5909 { 5910 case IFN_UBSAN_CHECK_ADD: 5911 subcode = PLUS_EXPR; 5912 break; 5913 case IFN_UBSAN_CHECK_SUB: 5914 subcode = MINUS_EXPR; 5915 break; 5916 case IFN_UBSAN_CHECK_MUL: 5917 subcode = MULT_EXPR; 5918 break; 5919 case IFN_BUILTIN_EXPECT: 5920 { 5921 tree arg0 = gimple_call_arg (stmt, 0); 5922 tree op0 = (*valueize) (arg0); 5923 if (TREE_CODE (op0) == INTEGER_CST) 5924 return op0; 5925 return NULL_TREE; 5926 } 5927 default: 5928 return NULL_TREE; 5929 } 5930 tree arg0 = gimple_call_arg (stmt, 0); 5931 tree arg1 = gimple_call_arg (stmt, 1); 5932 tree op0 = (*valueize) (arg0); 5933 tree op1 = (*valueize) (arg1); 5934 5935 if (TREE_CODE (op0) != INTEGER_CST 5936 || TREE_CODE (op1) != INTEGER_CST) 5937 { 5938 switch (subcode) 5939 { 5940 case MULT_EXPR: 5941 /* x * 0 = 0 * x = 0 without overflow. */ 5942 if (integer_zerop (op0) || integer_zerop (op1)) 5943 return build_zero_cst (TREE_TYPE (arg0)); 5944 break; 5945 case MINUS_EXPR: 5946 /* y - y = 0 without overflow. */ 5947 if (operand_equal_p (op0, op1, 0)) 5948 return build_zero_cst (TREE_TYPE (arg0)); 5949 break; 5950 default: 5951 break; 5952 } 5953 } 5954 tree res 5955 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1); 5956 if (res 5957 && TREE_CODE (res) == INTEGER_CST 5958 && !TREE_OVERFLOW (res)) 5959 return res; 5960 return NULL_TREE; 5961 } 5962 5963 fn = (*valueize) (gimple_call_fn (stmt)); 5964 if (TREE_CODE (fn) == ADDR_EXPR 5965 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL 5966 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)) 5967 && gimple_builtin_call_types_compatible_p (stmt, 5968 TREE_OPERAND (fn, 0))) 5969 { 5970 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt)); 5971 tree retval; 5972 unsigned i; 5973 for (i = 0; i < gimple_call_num_args (stmt); ++i) 5974 args[i] = (*valueize) (gimple_call_arg (stmt, i)); 5975 retval = fold_builtin_call_array (loc, 5976 gimple_call_return_type (call_stmt), 5977 fn, gimple_call_num_args (stmt), args); 5978 if (retval) 5979 { 5980 /* fold_call_expr wraps the result inside a NOP_EXPR. */ 5981 STRIP_NOPS (retval); 5982 retval = fold_convert (gimple_call_return_type (call_stmt), 5983 retval); 5984 } 5985 return retval; 5986 } 5987 return NULL_TREE; 5988 } 5989 5990 default: 5991 return NULL_TREE; 5992 } 5993 } 5994 5995 /* Fold STMT to a constant using VALUEIZE to valueize SSA names. 5996 Returns NULL_TREE if folding to a constant is not possible, otherwise 5997 returns a constant according to is_gimple_min_invariant. */ 5998 5999 tree 6000 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree)) 6001 { 6002 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize); 6003 if (res && is_gimple_min_invariant (res)) 6004 return res; 6005 return NULL_TREE; 6006 } 6007 6008 6009 /* The following set of functions are supposed to fold references using 6010 their constant initializers. */ 6011 6012 /* See if we can find constructor defining value of BASE. 6013 When we know the consructor with constant offset (such as 6014 base is array[40] and we do know constructor of array), then 6015 BIT_OFFSET is adjusted accordingly. 6016 6017 As a special case, return error_mark_node when constructor 6018 is not explicitly available, but it is known to be zero 6019 such as 'static const int a;'. */ 6020 static tree 6021 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset, 6022 tree (*valueize)(tree)) 6023 { 6024 HOST_WIDE_INT bit_offset2, size, max_size; 6025 bool reverse; 6026 6027 if (TREE_CODE (base) == MEM_REF) 6028 { 6029 if (!integer_zerop (TREE_OPERAND (base, 1))) 6030 { 6031 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1))) 6032 return NULL_TREE; 6033 *bit_offset += (mem_ref_offset (base).to_short_addr () 6034 * BITS_PER_UNIT); 6035 } 6036 6037 if (valueize 6038 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME) 6039 base = valueize (TREE_OPERAND (base, 0)); 6040 if (!base || TREE_CODE (base) != ADDR_EXPR) 6041 return NULL_TREE; 6042 base = TREE_OPERAND (base, 0); 6043 } 6044 else if (valueize 6045 && TREE_CODE (base) == SSA_NAME) 6046 base = valueize (base); 6047 6048 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its 6049 DECL_INITIAL. If BASE is a nested reference into another 6050 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve 6051 the inner reference. */ 6052 switch (TREE_CODE (base)) 6053 { 6054 case VAR_DECL: 6055 case CONST_DECL: 6056 { 6057 tree init = ctor_for_folding (base); 6058 6059 /* Our semantic is exact opposite of ctor_for_folding; 6060 NULL means unknown, while error_mark_node is 0. */ 6061 if (init == error_mark_node) 6062 return NULL_TREE; 6063 if (!init) 6064 return error_mark_node; 6065 return init; 6066 } 6067 6068 case VIEW_CONVERT_EXPR: 6069 return get_base_constructor (TREE_OPERAND (base, 0), 6070 bit_offset, valueize); 6071 6072 case ARRAY_REF: 6073 case COMPONENT_REF: 6074 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size, 6075 &reverse); 6076 if (max_size == -1 || size != max_size) 6077 return NULL_TREE; 6078 *bit_offset += bit_offset2; 6079 return get_base_constructor (base, bit_offset, valueize); 6080 6081 case CONSTRUCTOR: 6082 return base; 6083 6084 default: 6085 if (CONSTANT_CLASS_P (base)) 6086 return base; 6087 6088 return NULL_TREE; 6089 } 6090 } 6091 6092 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size 6093 SIZE to the memory at bit OFFSET. */ 6094 6095 static tree 6096 fold_array_ctor_reference (tree type, tree ctor, 6097 unsigned HOST_WIDE_INT offset, 6098 unsigned HOST_WIDE_INT size, 6099 tree from_decl) 6100 { 6101 offset_int low_bound; 6102 offset_int elt_size; 6103 offset_int access_index; 6104 tree domain_type = NULL_TREE; 6105 HOST_WIDE_INT inner_offset; 6106 6107 /* Compute low bound and elt size. */ 6108 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE) 6109 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor)); 6110 if (domain_type && TYPE_MIN_VALUE (domain_type)) 6111 { 6112 /* Static constructors for variably sized objects makes no sense. */ 6113 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST) 6114 return NULL_TREE; 6115 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type)); 6116 } 6117 else 6118 low_bound = 0; 6119 /* Static constructors for variably sized objects makes no sense. */ 6120 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST) 6121 return NULL_TREE; 6122 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))); 6123 6124 /* We can handle only constantly sized accesses that are known to not 6125 be larger than size of array element. */ 6126 if (!TYPE_SIZE_UNIT (type) 6127 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST 6128 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type)) 6129 || elt_size == 0) 6130 return NULL_TREE; 6131 6132 /* Compute the array index we look for. */ 6133 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT), 6134 elt_size); 6135 access_index += low_bound; 6136 6137 /* And offset within the access. */ 6138 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT); 6139 6140 /* See if the array field is large enough to span whole access. We do not 6141 care to fold accesses spanning multiple array indexes. */ 6142 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT) 6143 return NULL_TREE; 6144 if (tree val = get_array_ctor_element_at_index (ctor, access_index)) 6145 return fold_ctor_reference (type, val, inner_offset, size, from_decl); 6146 6147 /* When memory is not explicitely mentioned in constructor, 6148 it is 0 (or out of range). */ 6149 return build_zero_cst (type); 6150 } 6151 6152 /* CTOR is CONSTRUCTOR of an aggregate or vector. 6153 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */ 6154 6155 static tree 6156 fold_nonarray_ctor_reference (tree type, tree ctor, 6157 unsigned HOST_WIDE_INT offset, 6158 unsigned HOST_WIDE_INT size, 6159 tree from_decl) 6160 { 6161 unsigned HOST_WIDE_INT cnt; 6162 tree cfield, cval; 6163 6164 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, 6165 cval) 6166 { 6167 tree byte_offset = DECL_FIELD_OFFSET (cfield); 6168 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield); 6169 tree field_size = DECL_SIZE (cfield); 6170 offset_int bitoffset; 6171 offset_int bitoffset_end, access_end; 6172 6173 /* Variable sized objects in static constructors makes no sense, 6174 but field_size can be NULL for flexible array members. */ 6175 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST 6176 && TREE_CODE (byte_offset) == INTEGER_CST 6177 && (field_size != NULL_TREE 6178 ? TREE_CODE (field_size) == INTEGER_CST 6179 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE)); 6180 6181 /* Compute bit offset of the field. */ 6182 bitoffset = (wi::to_offset (field_offset) 6183 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT)); 6184 /* Compute bit offset where the field ends. */ 6185 if (field_size != NULL_TREE) 6186 bitoffset_end = bitoffset + wi::to_offset (field_size); 6187 else 6188 bitoffset_end = 0; 6189 6190 access_end = offset_int (offset) + size; 6191 6192 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and 6193 [BITOFFSET, BITOFFSET_END)? */ 6194 if (wi::cmps (access_end, bitoffset) > 0 6195 && (field_size == NULL_TREE 6196 || wi::lts_p (offset, bitoffset_end))) 6197 { 6198 offset_int inner_offset = offset_int (offset) - bitoffset; 6199 /* We do have overlap. Now see if field is large enough to 6200 cover the access. Give up for accesses spanning multiple 6201 fields. */ 6202 if (wi::cmps (access_end, bitoffset_end) > 0) 6203 return NULL_TREE; 6204 if (offset < bitoffset) 6205 return NULL_TREE; 6206 return fold_ctor_reference (type, cval, 6207 inner_offset.to_uhwi (), size, 6208 from_decl); 6209 } 6210 } 6211 /* When memory is not explicitely mentioned in constructor, it is 0. */ 6212 return build_zero_cst (type); 6213 } 6214 6215 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE 6216 to the memory at bit OFFSET. */ 6217 6218 tree 6219 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset, 6220 unsigned HOST_WIDE_INT size, tree from_decl) 6221 { 6222 tree ret; 6223 6224 /* We found the field with exact match. */ 6225 if (useless_type_conversion_p (type, TREE_TYPE (ctor)) 6226 && !offset) 6227 return canonicalize_constructor_val (unshare_expr (ctor), from_decl); 6228 6229 /* We are at the end of walk, see if we can view convert the 6230 result. */ 6231 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset 6232 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */ 6233 && !compare_tree_int (TYPE_SIZE (type), size) 6234 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size)) 6235 { 6236 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl); 6237 if (ret) 6238 { 6239 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret); 6240 if (ret) 6241 STRIP_USELESS_TYPE_CONVERSION (ret); 6242 } 6243 return ret; 6244 } 6245 /* For constants and byte-aligned/sized reads try to go through 6246 native_encode/interpret. */ 6247 if (CONSTANT_CLASS_P (ctor) 6248 && BITS_PER_UNIT == 8 6249 && offset % BITS_PER_UNIT == 0 6250 && size % BITS_PER_UNIT == 0 6251 && size <= MAX_BITSIZE_MODE_ANY_MODE) 6252 { 6253 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT]; 6254 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT, 6255 offset / BITS_PER_UNIT); 6256 if (len > 0) 6257 return native_interpret_expr (type, buf, len); 6258 } 6259 if (TREE_CODE (ctor) == CONSTRUCTOR) 6260 { 6261 6262 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE 6263 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE) 6264 return fold_array_ctor_reference (type, ctor, offset, size, 6265 from_decl); 6266 else 6267 return fold_nonarray_ctor_reference (type, ctor, offset, size, 6268 from_decl); 6269 } 6270 6271 return NULL_TREE; 6272 } 6273 6274 /* Return the tree representing the element referenced by T if T is an 6275 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA 6276 names using VALUEIZE. Return NULL_TREE otherwise. */ 6277 6278 tree 6279 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree)) 6280 { 6281 tree ctor, idx, base; 6282 HOST_WIDE_INT offset, size, max_size; 6283 tree tem; 6284 bool reverse; 6285 6286 if (TREE_THIS_VOLATILE (t)) 6287 return NULL_TREE; 6288 6289 if (DECL_P (t)) 6290 return get_symbol_constant_value (t); 6291 6292 tem = fold_read_from_constant_string (t); 6293 if (tem) 6294 return tem; 6295 6296 switch (TREE_CODE (t)) 6297 { 6298 case ARRAY_REF: 6299 case ARRAY_RANGE_REF: 6300 /* Constant indexes are handled well by get_base_constructor. 6301 Only special case variable offsets. 6302 FIXME: This code can't handle nested references with variable indexes 6303 (they will be handled only by iteration of ccp). Perhaps we can bring 6304 get_ref_base_and_extent here and make it use a valueize callback. */ 6305 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME 6306 && valueize 6307 && (idx = (*valueize) (TREE_OPERAND (t, 1))) 6308 && TREE_CODE (idx) == INTEGER_CST) 6309 { 6310 tree low_bound, unit_size; 6311 6312 /* If the resulting bit-offset is constant, track it. */ 6313 if ((low_bound = array_ref_low_bound (t), 6314 TREE_CODE (low_bound) == INTEGER_CST) 6315 && (unit_size = array_ref_element_size (t), 6316 tree_fits_uhwi_p (unit_size))) 6317 { 6318 offset_int woffset 6319 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound), 6320 TYPE_PRECISION (TREE_TYPE (idx))); 6321 6322 if (wi::fits_shwi_p (woffset)) 6323 { 6324 offset = woffset.to_shwi (); 6325 /* TODO: This code seems wrong, multiply then check 6326 to see if it fits. */ 6327 offset *= tree_to_uhwi (unit_size); 6328 offset *= BITS_PER_UNIT; 6329 6330 base = TREE_OPERAND (t, 0); 6331 ctor = get_base_constructor (base, &offset, valueize); 6332 /* Empty constructor. Always fold to 0. */ 6333 if (ctor == error_mark_node) 6334 return build_zero_cst (TREE_TYPE (t)); 6335 /* Out of bound array access. Value is undefined, 6336 but don't fold. */ 6337 if (offset < 0) 6338 return NULL_TREE; 6339 /* We can not determine ctor. */ 6340 if (!ctor) 6341 return NULL_TREE; 6342 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, 6343 tree_to_uhwi (unit_size) 6344 * BITS_PER_UNIT, 6345 base); 6346 } 6347 } 6348 } 6349 /* Fallthru. */ 6350 6351 case COMPONENT_REF: 6352 case BIT_FIELD_REF: 6353 case TARGET_MEM_REF: 6354 case MEM_REF: 6355 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse); 6356 ctor = get_base_constructor (base, &offset, valueize); 6357 6358 /* Empty constructor. Always fold to 0. */ 6359 if (ctor == error_mark_node) 6360 return build_zero_cst (TREE_TYPE (t)); 6361 /* We do not know precise address. */ 6362 if (max_size == -1 || max_size != size) 6363 return NULL_TREE; 6364 /* We can not determine ctor. */ 6365 if (!ctor) 6366 return NULL_TREE; 6367 6368 /* Out of bound array access. Value is undefined, but don't fold. */ 6369 if (offset < 0) 6370 return NULL_TREE; 6371 6372 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, 6373 base); 6374 6375 case REALPART_EXPR: 6376 case IMAGPART_EXPR: 6377 { 6378 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize); 6379 if (c && TREE_CODE (c) == COMPLEX_CST) 6380 return fold_build1_loc (EXPR_LOCATION (t), 6381 TREE_CODE (t), TREE_TYPE (t), c); 6382 break; 6383 } 6384 6385 default: 6386 break; 6387 } 6388 6389 return NULL_TREE; 6390 } 6391 6392 tree 6393 fold_const_aggregate_ref (tree t) 6394 { 6395 return fold_const_aggregate_ref_1 (t, NULL); 6396 } 6397 6398 /* Lookup virtual method with index TOKEN in a virtual table V 6399 at OFFSET. 6400 Set CAN_REFER if non-NULL to false if method 6401 is not referable or if the virtual table is ill-formed (such as rewriten 6402 by non-C++ produced symbol). Otherwise just return NULL in that calse. */ 6403 6404 tree 6405 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token, 6406 tree v, 6407 unsigned HOST_WIDE_INT offset, 6408 bool *can_refer) 6409 { 6410 tree vtable = v, init, fn; 6411 unsigned HOST_WIDE_INT size; 6412 unsigned HOST_WIDE_INT elt_size, access_index; 6413 tree domain_type; 6414 6415 if (can_refer) 6416 *can_refer = true; 6417 6418 /* First of all double check we have virtual table. */ 6419 if (!VAR_P (v) || !DECL_VIRTUAL_P (v)) 6420 { 6421 /* Pass down that we lost track of the target. */ 6422 if (can_refer) 6423 *can_refer = false; 6424 return NULL_TREE; 6425 } 6426 6427 init = ctor_for_folding (v); 6428 6429 /* The virtual tables should always be born with constructors 6430 and we always should assume that they are avaialble for 6431 folding. At the moment we do not stream them in all cases, 6432 but it should never happen that ctor seem unreachable. */ 6433 gcc_assert (init); 6434 if (init == error_mark_node) 6435 { 6436 /* Pass down that we lost track of the target. */ 6437 if (can_refer) 6438 *can_refer = false; 6439 return NULL_TREE; 6440 } 6441 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE); 6442 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v)))); 6443 offset *= BITS_PER_UNIT; 6444 offset += token * size; 6445 6446 /* Lookup the value in the constructor that is assumed to be array. 6447 This is equivalent to 6448 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init, 6449 offset, size, NULL); 6450 but in a constant time. We expect that frontend produced a simple 6451 array without indexed initializers. */ 6452 6453 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE); 6454 domain_type = TYPE_DOMAIN (TREE_TYPE (init)); 6455 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type))); 6456 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init)))); 6457 6458 access_index = offset / BITS_PER_UNIT / elt_size; 6459 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0); 6460 6461 /* This code makes an assumption that there are no 6462 indexed fileds produced by C++ FE, so we can directly index the array. */ 6463 if (access_index < CONSTRUCTOR_NELTS (init)) 6464 { 6465 fn = CONSTRUCTOR_ELT (init, access_index)->value; 6466 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index); 6467 STRIP_NOPS (fn); 6468 } 6469 else 6470 fn = NULL; 6471 6472 /* For type inconsistent program we may end up looking up virtual method 6473 in virtual table that does not contain TOKEN entries. We may overrun 6474 the virtual table and pick up a constant or RTTI info pointer. 6475 In any case the call is undefined. */ 6476 if (!fn 6477 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR) 6478 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL) 6479 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE); 6480 else 6481 { 6482 fn = TREE_OPERAND (fn, 0); 6483 6484 /* When cgraph node is missing and function is not public, we cannot 6485 devirtualize. This can happen in WHOPR when the actual method 6486 ends up in other partition, because we found devirtualization 6487 possibility too late. */ 6488 if (!can_refer_decl_in_current_unit_p (fn, vtable)) 6489 { 6490 if (can_refer) 6491 { 6492 *can_refer = false; 6493 return fn; 6494 } 6495 return NULL_TREE; 6496 } 6497 } 6498 6499 /* Make sure we create a cgraph node for functions we'll reference. 6500 They can be non-existent if the reference comes from an entry 6501 of an external vtable for example. */ 6502 cgraph_node::get_create (fn); 6503 6504 return fn; 6505 } 6506 6507 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN 6508 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression. 6509 KNOWN_BINFO carries the binfo describing the true type of 6510 OBJ_TYPE_REF_OBJECT(REF). 6511 Set CAN_REFER if non-NULL to false if method 6512 is not referable or if the virtual table is ill-formed (such as rewriten 6513 by non-C++ produced symbol). Otherwise just return NULL in that calse. */ 6514 6515 tree 6516 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo, 6517 bool *can_refer) 6518 { 6519 unsigned HOST_WIDE_INT offset; 6520 tree v; 6521 6522 v = BINFO_VTABLE (known_binfo); 6523 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */ 6524 if (!v) 6525 return NULL_TREE; 6526 6527 if (!vtable_pointer_value_to_vtable (v, &v, &offset)) 6528 { 6529 if (can_refer) 6530 *can_refer = false; 6531 return NULL_TREE; 6532 } 6533 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer); 6534 } 6535 6536 /* Given a pointer value T, return a simplified version of an 6537 indirection through T, or NULL_TREE if no simplification is 6538 possible. Note that the resulting type may be different from 6539 the type pointed to in the sense that it is still compatible 6540 from the langhooks point of view. */ 6541 6542 tree 6543 gimple_fold_indirect_ref (tree t) 6544 { 6545 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype); 6546 tree sub = t; 6547 tree subtype; 6548 6549 STRIP_NOPS (sub); 6550 subtype = TREE_TYPE (sub); 6551 if (!POINTER_TYPE_P (subtype) 6552 || TYPE_REF_CAN_ALIAS_ALL (ptype)) 6553 return NULL_TREE; 6554 6555 if (TREE_CODE (sub) == ADDR_EXPR) 6556 { 6557 tree op = TREE_OPERAND (sub, 0); 6558 tree optype = TREE_TYPE (op); 6559 /* *&p => p */ 6560 if (useless_type_conversion_p (type, optype)) 6561 return op; 6562 6563 /* *(foo *)&fooarray => fooarray[0] */ 6564 if (TREE_CODE (optype) == ARRAY_TYPE 6565 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST 6566 && useless_type_conversion_p (type, TREE_TYPE (optype))) 6567 { 6568 tree type_domain = TYPE_DOMAIN (optype); 6569 tree min_val = size_zero_node; 6570 if (type_domain && TYPE_MIN_VALUE (type_domain)) 6571 min_val = TYPE_MIN_VALUE (type_domain); 6572 if (TREE_CODE (min_val) == INTEGER_CST) 6573 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE); 6574 } 6575 /* *(foo *)&complexfoo => __real__ complexfoo */ 6576 else if (TREE_CODE (optype) == COMPLEX_TYPE 6577 && useless_type_conversion_p (type, TREE_TYPE (optype))) 6578 return fold_build1 (REALPART_EXPR, type, op); 6579 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */ 6580 else if (TREE_CODE (optype) == VECTOR_TYPE 6581 && useless_type_conversion_p (type, TREE_TYPE (optype))) 6582 { 6583 tree part_width = TYPE_SIZE (type); 6584 tree index = bitsize_int (0); 6585 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index); 6586 } 6587 } 6588 6589 /* *(p + CST) -> ... */ 6590 if (TREE_CODE (sub) == POINTER_PLUS_EXPR 6591 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST) 6592 { 6593 tree addr = TREE_OPERAND (sub, 0); 6594 tree off = TREE_OPERAND (sub, 1); 6595 tree addrtype; 6596 6597 STRIP_NOPS (addr); 6598 addrtype = TREE_TYPE (addr); 6599 6600 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */ 6601 if (TREE_CODE (addr) == ADDR_EXPR 6602 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE 6603 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))) 6604 && tree_fits_uhwi_p (off)) 6605 { 6606 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off); 6607 tree part_width = TYPE_SIZE (type); 6608 unsigned HOST_WIDE_INT part_widthi 6609 = tree_to_shwi (part_width) / BITS_PER_UNIT; 6610 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT; 6611 tree index = bitsize_int (indexi); 6612 if (offset / part_widthi 6613 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))) 6614 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0), 6615 part_width, index); 6616 } 6617 6618 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */ 6619 if (TREE_CODE (addr) == ADDR_EXPR 6620 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE 6621 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))) 6622 { 6623 tree size = TYPE_SIZE_UNIT (type); 6624 if (tree_int_cst_equal (size, off)) 6625 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0)); 6626 } 6627 6628 /* *(p + CST) -> MEM_REF <p, CST>. */ 6629 if (TREE_CODE (addr) != ADDR_EXPR 6630 || DECL_P (TREE_OPERAND (addr, 0))) 6631 return fold_build2 (MEM_REF, type, 6632 addr, 6633 wide_int_to_tree (ptype, off)); 6634 } 6635 6636 /* *(foo *)fooarrptr => (*fooarrptr)[0] */ 6637 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE 6638 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST 6639 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype)))) 6640 { 6641 tree type_domain; 6642 tree min_val = size_zero_node; 6643 tree osub = sub; 6644 sub = gimple_fold_indirect_ref (sub); 6645 if (! sub) 6646 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub); 6647 type_domain = TYPE_DOMAIN (TREE_TYPE (sub)); 6648 if (type_domain && TYPE_MIN_VALUE (type_domain)) 6649 min_val = TYPE_MIN_VALUE (type_domain); 6650 if (TREE_CODE (min_val) == INTEGER_CST) 6651 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE); 6652 } 6653 6654 return NULL_TREE; 6655 } 6656 6657 /* Return true if CODE is an operation that when operating on signed 6658 integer types involves undefined behavior on overflow and the 6659 operation can be expressed with unsigned arithmetic. */ 6660 6661 bool 6662 arith_code_with_undefined_signed_overflow (tree_code code) 6663 { 6664 switch (code) 6665 { 6666 case PLUS_EXPR: 6667 case MINUS_EXPR: 6668 case MULT_EXPR: 6669 case NEGATE_EXPR: 6670 case POINTER_PLUS_EXPR: 6671 return true; 6672 default: 6673 return false; 6674 } 6675 } 6676 6677 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic 6678 operation that can be transformed to unsigned arithmetic by converting 6679 its operand, carrying out the operation in the corresponding unsigned 6680 type and converting the result back to the original type. 6681 6682 Returns a sequence of statements that replace STMT and also contain 6683 a modified form of STMT itself. */ 6684 6685 gimple_seq 6686 rewrite_to_defined_overflow (gimple *stmt) 6687 { 6688 if (dump_file && (dump_flags & TDF_DETAILS)) 6689 { 6690 fprintf (dump_file, "rewriting stmt with undefined signed " 6691 "overflow "); 6692 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); 6693 } 6694 6695 tree lhs = gimple_assign_lhs (stmt); 6696 tree type = unsigned_type_for (TREE_TYPE (lhs)); 6697 gimple_seq stmts = NULL; 6698 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i) 6699 { 6700 tree op = gimple_op (stmt, i); 6701 op = gimple_convert (&stmts, type, op); 6702 gimple_set_op (stmt, i, op); 6703 } 6704 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt)); 6705 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR) 6706 gimple_assign_set_rhs_code (stmt, PLUS_EXPR); 6707 gimple_seq_add_stmt (&stmts, stmt); 6708 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt)); 6709 gimple_seq_add_stmt (&stmts, cvt); 6710 6711 return stmts; 6712 } 6713 6714 6715 /* The valueization hook we use for the gimple_build API simplification. 6716 This makes us match fold_buildN behavior by only combining with 6717 statements in the sequence(s) we are currently building. */ 6718 6719 static tree 6720 gimple_build_valueize (tree op) 6721 { 6722 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL) 6723 return op; 6724 return NULL_TREE; 6725 } 6726 6727 /* Build the expression CODE OP0 of type TYPE with location LOC, 6728 simplifying it first if possible. Returns the built 6729 expression value and appends statements possibly defining it 6730 to SEQ. */ 6731 6732 tree 6733 gimple_build (gimple_seq *seq, location_t loc, 6734 enum tree_code code, tree type, tree op0) 6735 { 6736 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize); 6737 if (!res) 6738 { 6739 res = create_tmp_reg_or_ssa_name (type); 6740 gimple *stmt; 6741 if (code == REALPART_EXPR 6742 || code == IMAGPART_EXPR 6743 || code == VIEW_CONVERT_EXPR) 6744 stmt = gimple_build_assign (res, code, build1 (code, type, op0)); 6745 else 6746 stmt = gimple_build_assign (res, code, op0); 6747 gimple_set_location (stmt, loc); 6748 gimple_seq_add_stmt_without_update (seq, stmt); 6749 } 6750 return res; 6751 } 6752 6753 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC, 6754 simplifying it first if possible. Returns the built 6755 expression value and appends statements possibly defining it 6756 to SEQ. */ 6757 6758 tree 6759 gimple_build (gimple_seq *seq, location_t loc, 6760 enum tree_code code, tree type, tree op0, tree op1) 6761 { 6762 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize); 6763 if (!res) 6764 { 6765 res = create_tmp_reg_or_ssa_name (type); 6766 gimple *stmt = gimple_build_assign (res, code, op0, op1); 6767 gimple_set_location (stmt, loc); 6768 gimple_seq_add_stmt_without_update (seq, stmt); 6769 } 6770 return res; 6771 } 6772 6773 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC, 6774 simplifying it first if possible. Returns the built 6775 expression value and appends statements possibly defining it 6776 to SEQ. */ 6777 6778 tree 6779 gimple_build (gimple_seq *seq, location_t loc, 6780 enum tree_code code, tree type, tree op0, tree op1, tree op2) 6781 { 6782 tree res = gimple_simplify (code, type, op0, op1, op2, 6783 seq, gimple_build_valueize); 6784 if (!res) 6785 { 6786 res = create_tmp_reg_or_ssa_name (type); 6787 gimple *stmt; 6788 if (code == BIT_FIELD_REF) 6789 stmt = gimple_build_assign (res, code, 6790 build3 (code, type, op0, op1, op2)); 6791 else 6792 stmt = gimple_build_assign (res, code, op0, op1, op2); 6793 gimple_set_location (stmt, loc); 6794 gimple_seq_add_stmt_without_update (seq, stmt); 6795 } 6796 return res; 6797 } 6798 6799 /* Build the call FN (ARG0) with a result of type TYPE 6800 (or no result if TYPE is void) with location LOC, 6801 simplifying it first if possible. Returns the built 6802 expression value (or NULL_TREE if TYPE is void) and appends 6803 statements possibly defining it to SEQ. */ 6804 6805 tree 6806 gimple_build (gimple_seq *seq, location_t loc, 6807 enum built_in_function fn, tree type, tree arg0) 6808 { 6809 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize); 6810 if (!res) 6811 { 6812 tree decl = builtin_decl_implicit (fn); 6813 gimple *stmt = gimple_build_call (decl, 1, arg0); 6814 if (!VOID_TYPE_P (type)) 6815 { 6816 res = create_tmp_reg_or_ssa_name (type); 6817 gimple_call_set_lhs (stmt, res); 6818 } 6819 gimple_set_location (stmt, loc); 6820 gimple_seq_add_stmt_without_update (seq, stmt); 6821 } 6822 return res; 6823 } 6824 6825 /* Build the call FN (ARG0, ARG1) with a result of type TYPE 6826 (or no result if TYPE is void) with location LOC, 6827 simplifying it first if possible. Returns the built 6828 expression value (or NULL_TREE if TYPE is void) and appends 6829 statements possibly defining it to SEQ. */ 6830 6831 tree 6832 gimple_build (gimple_seq *seq, location_t loc, 6833 enum built_in_function fn, tree type, tree arg0, tree arg1) 6834 { 6835 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize); 6836 if (!res) 6837 { 6838 tree decl = builtin_decl_implicit (fn); 6839 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1); 6840 if (!VOID_TYPE_P (type)) 6841 { 6842 res = create_tmp_reg_or_ssa_name (type); 6843 gimple_call_set_lhs (stmt, res); 6844 } 6845 gimple_set_location (stmt, loc); 6846 gimple_seq_add_stmt_without_update (seq, stmt); 6847 } 6848 return res; 6849 } 6850 6851 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE 6852 (or no result if TYPE is void) with location LOC, 6853 simplifying it first if possible. Returns the built 6854 expression value (or NULL_TREE if TYPE is void) and appends 6855 statements possibly defining it to SEQ. */ 6856 6857 tree 6858 gimple_build (gimple_seq *seq, location_t loc, 6859 enum built_in_function fn, tree type, 6860 tree arg0, tree arg1, tree arg2) 6861 { 6862 tree res = gimple_simplify (fn, type, arg0, arg1, arg2, 6863 seq, gimple_build_valueize); 6864 if (!res) 6865 { 6866 tree decl = builtin_decl_implicit (fn); 6867 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2); 6868 if (!VOID_TYPE_P (type)) 6869 { 6870 res = create_tmp_reg_or_ssa_name (type); 6871 gimple_call_set_lhs (stmt, res); 6872 } 6873 gimple_set_location (stmt, loc); 6874 gimple_seq_add_stmt_without_update (seq, stmt); 6875 } 6876 return res; 6877 } 6878 6879 /* Build the conversion (TYPE) OP with a result of type TYPE 6880 with location LOC if such conversion is neccesary in GIMPLE, 6881 simplifying it first. 6882 Returns the built expression value and appends 6883 statements possibly defining it to SEQ. */ 6884 6885 tree 6886 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op) 6887 { 6888 if (useless_type_conversion_p (type, TREE_TYPE (op))) 6889 return op; 6890 return gimple_build (seq, loc, NOP_EXPR, type, op); 6891 } 6892 6893 /* Build the conversion (ptrofftype) OP with a result of a type 6894 compatible with ptrofftype with location LOC if such conversion 6895 is neccesary in GIMPLE, simplifying it first. 6896 Returns the built expression value and appends 6897 statements possibly defining it to SEQ. */ 6898 6899 tree 6900 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op) 6901 { 6902 if (ptrofftype_p (TREE_TYPE (op))) 6903 return op; 6904 return gimple_convert (seq, loc, sizetype, op); 6905 } 6906 6907 /* Return true if the result of assignment STMT is known to be non-negative. 6908 If the return value is based on the assumption that signed overflow is 6909 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change 6910 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */ 6911 6912 static bool 6913 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p, 6914 int depth) 6915 { 6916 enum tree_code code = gimple_assign_rhs_code (stmt); 6917 switch (get_gimple_rhs_class (code)) 6918 { 6919 case GIMPLE_UNARY_RHS: 6920 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt), 6921 gimple_expr_type (stmt), 6922 gimple_assign_rhs1 (stmt), 6923 strict_overflow_p, depth); 6924 case GIMPLE_BINARY_RHS: 6925 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt), 6926 gimple_expr_type (stmt), 6927 gimple_assign_rhs1 (stmt), 6928 gimple_assign_rhs2 (stmt), 6929 strict_overflow_p, depth); 6930 case GIMPLE_TERNARY_RHS: 6931 return false; 6932 case GIMPLE_SINGLE_RHS: 6933 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt), 6934 strict_overflow_p, depth); 6935 case GIMPLE_INVALID_RHS: 6936 break; 6937 } 6938 gcc_unreachable (); 6939 } 6940 6941 /* Return true if return value of call STMT is known to be non-negative. 6942 If the return value is based on the assumption that signed overflow is 6943 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change 6944 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */ 6945 6946 static bool 6947 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p, 6948 int depth) 6949 { 6950 tree arg0 = gimple_call_num_args (stmt) > 0 ? 6951 gimple_call_arg (stmt, 0) : NULL_TREE; 6952 tree arg1 = gimple_call_num_args (stmt) > 1 ? 6953 gimple_call_arg (stmt, 1) : NULL_TREE; 6954 6955 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt), 6956 gimple_call_combined_fn (stmt), 6957 arg0, 6958 arg1, 6959 strict_overflow_p, depth); 6960 } 6961 6962 /* Return true if return value of call STMT is known to be non-negative. 6963 If the return value is based on the assumption that signed overflow is 6964 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change 6965 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */ 6966 6967 static bool 6968 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p, 6969 int depth) 6970 { 6971 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i) 6972 { 6973 tree arg = gimple_phi_arg_def (stmt, i); 6974 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1)) 6975 return false; 6976 } 6977 return true; 6978 } 6979 6980 /* Return true if STMT is known to compute a non-negative value. 6981 If the return value is based on the assumption that signed overflow is 6982 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change 6983 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */ 6984 6985 bool 6986 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p, 6987 int depth) 6988 { 6989 switch (gimple_code (stmt)) 6990 { 6991 case GIMPLE_ASSIGN: 6992 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p, 6993 depth); 6994 case GIMPLE_CALL: 6995 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p, 6996 depth); 6997 case GIMPLE_PHI: 6998 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p, 6999 depth); 7000 default: 7001 return false; 7002 } 7003 } 7004 7005 /* Return true if the floating-point value computed by assignment STMT 7006 is known to have an integer value. We also allow +Inf, -Inf and NaN 7007 to be considered integer values. Return false for signaling NaN. 7008 7009 DEPTH is the current nesting depth of the query. */ 7010 7011 static bool 7012 gimple_assign_integer_valued_real_p (gimple *stmt, int depth) 7013 { 7014 enum tree_code code = gimple_assign_rhs_code (stmt); 7015 switch (get_gimple_rhs_class (code)) 7016 { 7017 case GIMPLE_UNARY_RHS: 7018 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt), 7019 gimple_assign_rhs1 (stmt), depth); 7020 case GIMPLE_BINARY_RHS: 7021 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt), 7022 gimple_assign_rhs1 (stmt), 7023 gimple_assign_rhs2 (stmt), depth); 7024 case GIMPLE_TERNARY_RHS: 7025 return false; 7026 case GIMPLE_SINGLE_RHS: 7027 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth); 7028 case GIMPLE_INVALID_RHS: 7029 break; 7030 } 7031 gcc_unreachable (); 7032 } 7033 7034 /* Return true if the floating-point value computed by call STMT is known 7035 to have an integer value. We also allow +Inf, -Inf and NaN to be 7036 considered integer values. Return false for signaling NaN. 7037 7038 DEPTH is the current nesting depth of the query. */ 7039 7040 static bool 7041 gimple_call_integer_valued_real_p (gimple *stmt, int depth) 7042 { 7043 tree arg0 = (gimple_call_num_args (stmt) > 0 7044 ? gimple_call_arg (stmt, 0) 7045 : NULL_TREE); 7046 tree arg1 = (gimple_call_num_args (stmt) > 1 7047 ? gimple_call_arg (stmt, 1) 7048 : NULL_TREE); 7049 return integer_valued_real_call_p (gimple_call_combined_fn (stmt), 7050 arg0, arg1, depth); 7051 } 7052 7053 /* Return true if the floating-point result of phi STMT is known to have 7054 an integer value. We also allow +Inf, -Inf and NaN to be considered 7055 integer values. Return false for signaling NaN. 7056 7057 DEPTH is the current nesting depth of the query. */ 7058 7059 static bool 7060 gimple_phi_integer_valued_real_p (gimple *stmt, int depth) 7061 { 7062 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i) 7063 { 7064 tree arg = gimple_phi_arg_def (stmt, i); 7065 if (!integer_valued_real_single_p (arg, depth + 1)) 7066 return false; 7067 } 7068 return true; 7069 } 7070 7071 /* Return true if the floating-point value computed by STMT is known 7072 to have an integer value. We also allow +Inf, -Inf and NaN to be 7073 considered integer values. Return false for signaling NaN. 7074 7075 DEPTH is the current nesting depth of the query. */ 7076 7077 bool 7078 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth) 7079 { 7080 switch (gimple_code (stmt)) 7081 { 7082 case GIMPLE_ASSIGN: 7083 return gimple_assign_integer_valued_real_p (stmt, depth); 7084 case GIMPLE_CALL: 7085 return gimple_call_integer_valued_real_p (stmt, depth); 7086 case GIMPLE_PHI: 7087 return gimple_phi_integer_valued_real_p (stmt, depth); 7088 default: 7089 return false; 7090 } 7091 } 7092