1 /* Statement simplification on GIMPLE. 2 Copyright (C) 2010-2013 Free Software Foundation, Inc. 3 Split out from tree-ssa-ccp.c. 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify it 8 under the terms of the GNU General Public License as published by the 9 Free Software Foundation; either version 3, or (at your option) any 10 later version. 11 12 GCC is distributed in the hope that it will be useful, but WITHOUT 13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15 for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #include "config.h" 22 #include "system.h" 23 #include "coretypes.h" 24 #include "tm.h" 25 #include "tree.h" 26 #include "flags.h" 27 #include "function.h" 28 #include "dumpfile.h" 29 #include "tree-flow.h" 30 #include "tree-ssa-propagate.h" 31 #include "target.h" 32 #include "gimple-fold.h" 33 34 /* Return true when DECL can be referenced from current unit. 35 FROM_DECL (if non-null) specify constructor of variable DECL was taken from. 36 We can get declarations that are not possible to reference for various 37 reasons: 38 39 1) When analyzing C++ virtual tables. 40 C++ virtual tables do have known constructors even 41 when they are keyed to other compilation unit. 42 Those tables can contain pointers to methods and vars 43 in other units. Those methods have both STATIC and EXTERNAL 44 set. 45 2) In WHOPR mode devirtualization might lead to reference 46 to method that was partitioned elsehwere. 47 In this case we have static VAR_DECL or FUNCTION_DECL 48 that has no corresponding callgraph/varpool node 49 declaring the body. 50 3) COMDAT functions referred by external vtables that 51 we devirtualize only during final copmilation stage. 52 At this time we already decided that we will not output 53 the function body and thus we can't reference the symbol 54 directly. */ 55 56 static bool 57 can_refer_decl_in_current_unit_p (tree decl, tree from_decl) 58 { 59 struct varpool_node *vnode; 60 struct cgraph_node *node; 61 symtab_node snode; 62 63 /* We will later output the initializer, so we can refer to it. 64 So we are concerned only when DECL comes from initializer of 65 external var. */ 66 if (!from_decl 67 || TREE_CODE (from_decl) != VAR_DECL 68 || !DECL_EXTERNAL (from_decl) 69 || (flag_ltrans 70 && symtab_get_node (from_decl)->symbol.in_other_partition)) 71 return true; 72 /* We are concerned only about static/external vars and functions. */ 73 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl)) 74 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL)) 75 return true; 76 /* Weakrefs have somewhat confusing DECL_EXTERNAL flag set; they 77 are always safe. */ 78 if (DECL_EXTERNAL (decl) 79 && lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))) 80 return true; 81 /* We are folding reference from external vtable. The vtable may reffer 82 to a symbol keyed to other compilation unit. The other compilation 83 unit may be in separate DSO and the symbol may be hidden. */ 84 if (DECL_VISIBILITY_SPECIFIED (decl) 85 && DECL_EXTERNAL (decl) 86 && (!(snode = symtab_get_node (decl)) || !snode->symbol.in_other_partition)) 87 return false; 88 /* When function is public, we always can introduce new reference. 89 Exception are the COMDAT functions where introducing a direct 90 reference imply need to include function body in the curren tunit. */ 91 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl)) 92 return true; 93 /* We are not at ltrans stage; so don't worry about WHOPR. 94 Also when still gimplifying all referred comdat functions will be 95 produced. 96 97 As observed in PR20991 for already optimized out comdat virtual functions 98 it may be tempting to not necessarily give up because the copy will be 99 output elsewhere when corresponding vtable is output. 100 This is however not possible - ABI specify that COMDATs are output in 101 units where they are used and when the other unit was compiled with LTO 102 it is possible that vtable was kept public while the function itself 103 was privatized. */ 104 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready)) 105 return true; 106 107 /* OK we are seeing either COMDAT or static variable. In this case we must 108 check that the definition is still around so we can refer it. */ 109 if (TREE_CODE (decl) == FUNCTION_DECL) 110 { 111 node = cgraph_get_node (decl); 112 /* Check that we still have function body and that we didn't took 113 the decision to eliminate offline copy of the function yet. 114 The second is important when devirtualization happens during final 115 compilation stage when making a new reference no longer makes callee 116 to be compiled. */ 117 if (!node || !node->analyzed || node->global.inlined_to) 118 { 119 gcc_checking_assert (!TREE_ASM_WRITTEN (decl)); 120 return false; 121 } 122 } 123 else if (TREE_CODE (decl) == VAR_DECL) 124 { 125 vnode = varpool_get_node (decl); 126 if (!vnode || !vnode->analyzed) 127 { 128 gcc_checking_assert (!TREE_ASM_WRITTEN (decl)); 129 return false; 130 } 131 } 132 return true; 133 } 134 135 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into 136 acceptable form for is_gimple_min_invariant. 137 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */ 138 139 tree 140 canonicalize_constructor_val (tree cval, tree from_decl) 141 { 142 tree orig_cval = cval; 143 STRIP_NOPS (cval); 144 if (TREE_CODE (cval) == POINTER_PLUS_EXPR 145 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST) 146 { 147 tree ptr = TREE_OPERAND (cval, 0); 148 if (is_gimple_min_invariant (ptr)) 149 cval = build1_loc (EXPR_LOCATION (cval), 150 ADDR_EXPR, TREE_TYPE (ptr), 151 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)), 152 ptr, 153 fold_convert (ptr_type_node, 154 TREE_OPERAND (cval, 1)))); 155 } 156 if (TREE_CODE (cval) == ADDR_EXPR) 157 { 158 tree base = NULL_TREE; 159 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR) 160 { 161 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0)); 162 if (base) 163 TREE_OPERAND (cval, 0) = base; 164 } 165 else 166 base = get_base_address (TREE_OPERAND (cval, 0)); 167 if (!base) 168 return NULL_TREE; 169 170 if ((TREE_CODE (base) == VAR_DECL 171 || TREE_CODE (base) == FUNCTION_DECL) 172 && !can_refer_decl_in_current_unit_p (base, from_decl)) 173 return NULL_TREE; 174 if (TREE_CODE (base) == VAR_DECL) 175 TREE_ADDRESSABLE (base) = 1; 176 else if (TREE_CODE (base) == FUNCTION_DECL) 177 { 178 /* Make sure we create a cgraph node for functions we'll reference. 179 They can be non-existent if the reference comes from an entry 180 of an external vtable for example. */ 181 cgraph_get_create_real_symbol_node (base); 182 } 183 /* Fixup types in global initializers. */ 184 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0))) 185 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0)); 186 187 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval))) 188 cval = fold_convert (TREE_TYPE (orig_cval), cval); 189 return cval; 190 } 191 return orig_cval; 192 } 193 194 /* If SYM is a constant variable with known value, return the value. 195 NULL_TREE is returned otherwise. */ 196 197 tree 198 get_symbol_constant_value (tree sym) 199 { 200 if (const_value_known_p (sym)) 201 { 202 tree val = DECL_INITIAL (sym); 203 if (val) 204 { 205 val = canonicalize_constructor_val (unshare_expr (val), sym); 206 if (val && is_gimple_min_invariant (val)) 207 return val; 208 else 209 return NULL_TREE; 210 } 211 /* Variables declared 'const' without an initializer 212 have zero as the initializer if they may not be 213 overridden at link or run time. */ 214 if (!val 215 && (INTEGRAL_TYPE_P (TREE_TYPE (sym)) 216 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym)))) 217 return build_zero_cst (TREE_TYPE (sym)); 218 } 219 220 return NULL_TREE; 221 } 222 223 224 225 /* Subroutine of fold_stmt. We perform several simplifications of the 226 memory reference tree EXPR and make sure to re-gimplify them properly 227 after propagation of constant addresses. IS_LHS is true if the 228 reference is supposed to be an lvalue. */ 229 230 static tree 231 maybe_fold_reference (tree expr, bool is_lhs) 232 { 233 tree *t = &expr; 234 tree result; 235 236 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR 237 || TREE_CODE (expr) == REALPART_EXPR 238 || TREE_CODE (expr) == IMAGPART_EXPR) 239 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0))) 240 return fold_unary_loc (EXPR_LOCATION (expr), 241 TREE_CODE (expr), 242 TREE_TYPE (expr), 243 TREE_OPERAND (expr, 0)); 244 else if (TREE_CODE (expr) == BIT_FIELD_REF 245 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0))) 246 return fold_ternary_loc (EXPR_LOCATION (expr), 247 TREE_CODE (expr), 248 TREE_TYPE (expr), 249 TREE_OPERAND (expr, 0), 250 TREE_OPERAND (expr, 1), 251 TREE_OPERAND (expr, 2)); 252 253 while (handled_component_p (*t)) 254 t = &TREE_OPERAND (*t, 0); 255 256 /* Canonicalize MEM_REFs invariant address operand. Do this first 257 to avoid feeding non-canonical MEM_REFs elsewhere. */ 258 if (TREE_CODE (*t) == MEM_REF 259 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0))) 260 { 261 bool volatile_p = TREE_THIS_VOLATILE (*t); 262 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t), 263 TREE_OPERAND (*t, 0), 264 TREE_OPERAND (*t, 1)); 265 if (tem) 266 { 267 TREE_THIS_VOLATILE (tem) = volatile_p; 268 *t = tem; 269 tem = maybe_fold_reference (expr, is_lhs); 270 if (tem) 271 return tem; 272 return expr; 273 } 274 } 275 276 if (!is_lhs 277 && (result = fold_const_aggregate_ref (expr)) 278 && is_gimple_min_invariant (result)) 279 return result; 280 281 /* Fold back MEM_REFs to reference trees. */ 282 if (TREE_CODE (*t) == MEM_REF 283 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR 284 && integer_zerop (TREE_OPERAND (*t, 1)) 285 && (TREE_THIS_VOLATILE (*t) 286 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))) 287 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1))) 288 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t)) 289 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1))))) 290 /* We have to look out here to not drop a required conversion 291 from the rhs to the lhs if is_lhs, but we don't have the 292 rhs here to verify that. Thus require strict type 293 compatibility. */ 294 && types_compatible_p (TREE_TYPE (*t), 295 TREE_TYPE (TREE_OPERAND 296 (TREE_OPERAND (*t, 0), 0)))) 297 { 298 tree tem; 299 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0); 300 tem = maybe_fold_reference (expr, is_lhs); 301 if (tem) 302 return tem; 303 return expr; 304 } 305 else if (TREE_CODE (*t) == TARGET_MEM_REF) 306 { 307 tree tem = maybe_fold_tmr (*t); 308 if (tem) 309 { 310 *t = tem; 311 tem = maybe_fold_reference (expr, is_lhs); 312 if (tem) 313 return tem; 314 return expr; 315 } 316 } 317 318 return NULL_TREE; 319 } 320 321 322 /* Attempt to fold an assignment statement pointed-to by SI. Returns a 323 replacement rhs for the statement or NULL_TREE if no simplification 324 could be made. It is assumed that the operands have been previously 325 folded. */ 326 327 static tree 328 fold_gimple_assign (gimple_stmt_iterator *si) 329 { 330 gimple stmt = gsi_stmt (*si); 331 enum tree_code subcode = gimple_assign_rhs_code (stmt); 332 location_t loc = gimple_location (stmt); 333 334 tree result = NULL_TREE; 335 336 switch (get_gimple_rhs_class (subcode)) 337 { 338 case GIMPLE_SINGLE_RHS: 339 { 340 tree rhs = gimple_assign_rhs1 (stmt); 341 342 if (REFERENCE_CLASS_P (rhs)) 343 return maybe_fold_reference (rhs, false); 344 345 else if (TREE_CODE (rhs) == ADDR_EXPR) 346 { 347 tree ref = TREE_OPERAND (rhs, 0); 348 tree tem = maybe_fold_reference (ref, true); 349 if (tem 350 && TREE_CODE (tem) == MEM_REF 351 && integer_zerop (TREE_OPERAND (tem, 1))) 352 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0)); 353 else if (tem) 354 result = fold_convert (TREE_TYPE (rhs), 355 build_fold_addr_expr_loc (loc, tem)); 356 else if (TREE_CODE (ref) == MEM_REF 357 && integer_zerop (TREE_OPERAND (ref, 1))) 358 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0)); 359 } 360 361 else if (TREE_CODE (rhs) == CONSTRUCTOR 362 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE 363 && (CONSTRUCTOR_NELTS (rhs) 364 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)))) 365 { 366 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */ 367 unsigned i; 368 tree val; 369 370 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val) 371 if (TREE_CODE (val) != INTEGER_CST 372 && TREE_CODE (val) != REAL_CST 373 && TREE_CODE (val) != FIXED_CST) 374 return NULL_TREE; 375 376 return build_vector_from_ctor (TREE_TYPE (rhs), 377 CONSTRUCTOR_ELTS (rhs)); 378 } 379 380 else if (DECL_P (rhs)) 381 return get_symbol_constant_value (rhs); 382 383 /* If we couldn't fold the RHS, hand over to the generic 384 fold routines. */ 385 if (result == NULL_TREE) 386 result = fold (rhs); 387 388 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR 389 that may have been added by fold, and "useless" type 390 conversions that might now be apparent due to propagation. */ 391 STRIP_USELESS_TYPE_CONVERSION (result); 392 393 if (result != rhs && valid_gimple_rhs_p (result)) 394 return result; 395 396 return NULL_TREE; 397 } 398 break; 399 400 case GIMPLE_UNARY_RHS: 401 { 402 tree rhs = gimple_assign_rhs1 (stmt); 403 404 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs); 405 if (result) 406 { 407 /* If the operation was a conversion do _not_ mark a 408 resulting constant with TREE_OVERFLOW if the original 409 constant was not. These conversions have implementation 410 defined behavior and retaining the TREE_OVERFLOW flag 411 here would confuse later passes such as VRP. */ 412 if (CONVERT_EXPR_CODE_P (subcode) 413 && TREE_CODE (result) == INTEGER_CST 414 && TREE_CODE (rhs) == INTEGER_CST) 415 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs); 416 417 STRIP_USELESS_TYPE_CONVERSION (result); 418 if (valid_gimple_rhs_p (result)) 419 return result; 420 } 421 } 422 break; 423 424 case GIMPLE_BINARY_RHS: 425 /* Try to canonicalize for boolean-typed X the comparisons 426 X == 0, X == 1, X != 0, and X != 1. */ 427 if (gimple_assign_rhs_code (stmt) == EQ_EXPR 428 || gimple_assign_rhs_code (stmt) == NE_EXPR) 429 { 430 tree lhs = gimple_assign_lhs (stmt); 431 tree op1 = gimple_assign_rhs1 (stmt); 432 tree op2 = gimple_assign_rhs2 (stmt); 433 tree type = TREE_TYPE (op1); 434 435 /* Check whether the comparison operands are of the same boolean 436 type as the result type is. 437 Check that second operand is an integer-constant with value 438 one or zero. */ 439 if (TREE_CODE (op2) == INTEGER_CST 440 && (integer_zerop (op2) || integer_onep (op2)) 441 && useless_type_conversion_p (TREE_TYPE (lhs), type)) 442 { 443 enum tree_code cmp_code = gimple_assign_rhs_code (stmt); 444 bool is_logical_not = false; 445 446 /* X == 0 and X != 1 is a logical-not.of X 447 X == 1 and X != 0 is X */ 448 if ((cmp_code == EQ_EXPR && integer_zerop (op2)) 449 || (cmp_code == NE_EXPR && integer_onep (op2))) 450 is_logical_not = true; 451 452 if (is_logical_not == false) 453 result = op1; 454 /* Only for one-bit precision typed X the transformation 455 !X -> ~X is valied. */ 456 else if (TYPE_PRECISION (type) == 1) 457 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR, 458 type, op1); 459 /* Otherwise we use !X -> X ^ 1. */ 460 else 461 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR, 462 type, op1, build_int_cst (type, 1)); 463 464 } 465 } 466 467 if (!result) 468 result = fold_binary_loc (loc, subcode, 469 TREE_TYPE (gimple_assign_lhs (stmt)), 470 gimple_assign_rhs1 (stmt), 471 gimple_assign_rhs2 (stmt)); 472 473 if (result) 474 { 475 STRIP_USELESS_TYPE_CONVERSION (result); 476 if (valid_gimple_rhs_p (result)) 477 return result; 478 } 479 break; 480 481 case GIMPLE_TERNARY_RHS: 482 /* Try to fold a conditional expression. */ 483 if (gimple_assign_rhs_code (stmt) == COND_EXPR) 484 { 485 tree op0 = gimple_assign_rhs1 (stmt); 486 tree tem; 487 bool set = false; 488 location_t cond_loc = gimple_location (stmt); 489 490 if (COMPARISON_CLASS_P (op0)) 491 { 492 fold_defer_overflow_warnings (); 493 tem = fold_binary_loc (cond_loc, 494 TREE_CODE (op0), TREE_TYPE (op0), 495 TREE_OPERAND (op0, 0), 496 TREE_OPERAND (op0, 1)); 497 /* This is actually a conditional expression, not a GIMPLE 498 conditional statement, however, the valid_gimple_rhs_p 499 test still applies. */ 500 set = (tem && is_gimple_condexpr (tem) 501 && valid_gimple_rhs_p (tem)); 502 fold_undefer_overflow_warnings (set, stmt, 0); 503 } 504 else if (is_gimple_min_invariant (op0)) 505 { 506 tem = op0; 507 set = true; 508 } 509 else 510 return NULL_TREE; 511 512 if (set) 513 result = fold_build3_loc (cond_loc, COND_EXPR, 514 TREE_TYPE (gimple_assign_lhs (stmt)), tem, 515 gimple_assign_rhs2 (stmt), 516 gimple_assign_rhs3 (stmt)); 517 } 518 519 if (!result) 520 result = fold_ternary_loc (loc, subcode, 521 TREE_TYPE (gimple_assign_lhs (stmt)), 522 gimple_assign_rhs1 (stmt), 523 gimple_assign_rhs2 (stmt), 524 gimple_assign_rhs3 (stmt)); 525 526 if (result) 527 { 528 STRIP_USELESS_TYPE_CONVERSION (result); 529 if (valid_gimple_rhs_p (result)) 530 return result; 531 } 532 break; 533 534 case GIMPLE_INVALID_RHS: 535 gcc_unreachable (); 536 } 537 538 return NULL_TREE; 539 } 540 541 /* Attempt to fold a conditional statement. Return true if any changes were 542 made. We only attempt to fold the condition expression, and do not perform 543 any transformation that would require alteration of the cfg. It is 544 assumed that the operands have been previously folded. */ 545 546 static bool 547 fold_gimple_cond (gimple stmt) 548 { 549 tree result = fold_binary_loc (gimple_location (stmt), 550 gimple_cond_code (stmt), 551 boolean_type_node, 552 gimple_cond_lhs (stmt), 553 gimple_cond_rhs (stmt)); 554 555 if (result) 556 { 557 STRIP_USELESS_TYPE_CONVERSION (result); 558 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result)) 559 { 560 gimple_cond_set_condition_from_tree (stmt, result); 561 return true; 562 } 563 } 564 565 return false; 566 } 567 568 /* Convert EXPR into a GIMPLE value suitable for substitution on the 569 RHS of an assignment. Insert the necessary statements before 570 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL 571 is replaced. If the call is expected to produces a result, then it 572 is replaced by an assignment of the new RHS to the result variable. 573 If the result is to be ignored, then the call is replaced by a 574 GIMPLE_NOP. A proper VDEF chain is retained by making the first 575 VUSE and the last VDEF of the whole sequence be the same as the replaced 576 statement and using new SSA names for stores in between. */ 577 578 void 579 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr) 580 { 581 tree lhs; 582 gimple stmt, new_stmt; 583 gimple_stmt_iterator i; 584 gimple_seq stmts = NULL; 585 struct gimplify_ctx gctx; 586 gimple laststore; 587 tree reaching_vuse; 588 589 stmt = gsi_stmt (*si_p); 590 591 gcc_assert (is_gimple_call (stmt)); 592 593 push_gimplify_context (&gctx); 594 gctx.into_ssa = gimple_in_ssa_p (cfun); 595 596 lhs = gimple_call_lhs (stmt); 597 if (lhs == NULL_TREE) 598 { 599 gimplify_and_add (expr, &stmts); 600 /* We can end up with folding a memcpy of an empty class assignment 601 which gets optimized away by C++ gimplification. */ 602 if (gimple_seq_empty_p (stmts)) 603 { 604 pop_gimplify_context (NULL); 605 if (gimple_in_ssa_p (cfun)) 606 { 607 unlink_stmt_vdef (stmt); 608 release_defs (stmt); 609 } 610 gsi_replace (si_p, gimple_build_nop (), true); 611 return; 612 } 613 } 614 else 615 { 616 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL); 617 new_stmt = gimple_build_assign (lhs, tmp); 618 i = gsi_last (stmts); 619 gsi_insert_after_without_update (&i, new_stmt, 620 GSI_CONTINUE_LINKING); 621 } 622 623 pop_gimplify_context (NULL); 624 625 if (gimple_has_location (stmt)) 626 annotate_all_with_location (stmts, gimple_location (stmt)); 627 628 /* First iterate over the replacement statements backward, assigning 629 virtual operands to their defining statements. */ 630 laststore = NULL; 631 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i)) 632 { 633 new_stmt = gsi_stmt (i); 634 if ((gimple_assign_single_p (new_stmt) 635 && !is_gimple_reg (gimple_assign_lhs (new_stmt))) 636 || (is_gimple_call (new_stmt) 637 && (gimple_call_flags (new_stmt) 638 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0)) 639 { 640 tree vdef; 641 if (!laststore) 642 vdef = gimple_vdef (stmt); 643 else 644 vdef = make_ssa_name (gimple_vop (cfun), new_stmt); 645 gimple_set_vdef (new_stmt, vdef); 646 if (vdef && TREE_CODE (vdef) == SSA_NAME) 647 SSA_NAME_DEF_STMT (vdef) = new_stmt; 648 laststore = new_stmt; 649 } 650 } 651 652 /* Second iterate over the statements forward, assigning virtual 653 operands to their uses. */ 654 reaching_vuse = gimple_vuse (stmt); 655 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i)) 656 { 657 new_stmt = gsi_stmt (i); 658 /* If the new statement possibly has a VUSE, update it with exact SSA 659 name we know will reach this one. */ 660 if (gimple_has_mem_ops (new_stmt)) 661 gimple_set_vuse (new_stmt, reaching_vuse); 662 gimple_set_modified (new_stmt, true); 663 if (gimple_vdef (new_stmt)) 664 reaching_vuse = gimple_vdef (new_stmt); 665 } 666 667 /* If the new sequence does not do a store release the virtual 668 definition of the original statement. */ 669 if (reaching_vuse 670 && reaching_vuse == gimple_vuse (stmt)) 671 { 672 tree vdef = gimple_vdef (stmt); 673 if (vdef 674 && TREE_CODE (vdef) == SSA_NAME) 675 { 676 unlink_stmt_vdef (stmt); 677 release_ssa_name (vdef); 678 } 679 } 680 681 /* Finally replace the original statement with the sequence. */ 682 gsi_replace_with_seq (si_p, stmts, false); 683 } 684 685 /* Return the string length, maximum string length or maximum value of 686 ARG in LENGTH. 687 If ARG is an SSA name variable, follow its use-def chains. If LENGTH 688 is not NULL and, for TYPE == 0, its value is not equal to the length 689 we determine or if we are unable to determine the length or value, 690 return false. VISITED is a bitmap of visited variables. 691 TYPE is 0 if string length should be returned, 1 for maximum string 692 length and 2 for maximum value ARG can have. */ 693 694 static bool 695 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type) 696 { 697 tree var, val; 698 gimple def_stmt; 699 700 if (TREE_CODE (arg) != SSA_NAME) 701 { 702 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */ 703 if (TREE_CODE (arg) == ADDR_EXPR 704 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF 705 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1))) 706 { 707 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0); 708 if (TREE_CODE (aop0) == INDIRECT_REF 709 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME) 710 return get_maxval_strlen (TREE_OPERAND (aop0, 0), 711 length, visited, type); 712 } 713 714 if (type == 2) 715 { 716 val = arg; 717 if (TREE_CODE (val) != INTEGER_CST 718 || tree_int_cst_sgn (val) < 0) 719 return false; 720 } 721 else 722 val = c_strlen (arg, 1); 723 if (!val) 724 return false; 725 726 if (*length) 727 { 728 if (type > 0) 729 { 730 if (TREE_CODE (*length) != INTEGER_CST 731 || TREE_CODE (val) != INTEGER_CST) 732 return false; 733 734 if (tree_int_cst_lt (*length, val)) 735 *length = val; 736 return true; 737 } 738 else if (simple_cst_equal (val, *length) != 1) 739 return false; 740 } 741 742 *length = val; 743 return true; 744 } 745 746 /* If ARG is registered for SSA update we cannot look at its defining 747 statement. */ 748 if (name_registered_for_update_p (arg)) 749 return false; 750 751 /* If we were already here, break the infinite cycle. */ 752 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg))) 753 return true; 754 755 var = arg; 756 def_stmt = SSA_NAME_DEF_STMT (var); 757 758 switch (gimple_code (def_stmt)) 759 { 760 case GIMPLE_ASSIGN: 761 /* The RHS of the statement defining VAR must either have a 762 constant length or come from another SSA_NAME with a constant 763 length. */ 764 if (gimple_assign_single_p (def_stmt) 765 || gimple_assign_unary_nop_p (def_stmt)) 766 { 767 tree rhs = gimple_assign_rhs1 (def_stmt); 768 return get_maxval_strlen (rhs, length, visited, type); 769 } 770 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR) 771 { 772 tree op2 = gimple_assign_rhs2 (def_stmt); 773 tree op3 = gimple_assign_rhs3 (def_stmt); 774 return get_maxval_strlen (op2, length, visited, type) 775 && get_maxval_strlen (op3, length, visited, type); 776 } 777 return false; 778 779 case GIMPLE_PHI: 780 { 781 /* All the arguments of the PHI node must have the same constant 782 length. */ 783 unsigned i; 784 785 for (i = 0; i < gimple_phi_num_args (def_stmt); i++) 786 { 787 tree arg = gimple_phi_arg (def_stmt, i)->def; 788 789 /* If this PHI has itself as an argument, we cannot 790 determine the string length of this argument. However, 791 if we can find a constant string length for the other 792 PHI args then we can still be sure that this is a 793 constant string length. So be optimistic and just 794 continue with the next argument. */ 795 if (arg == gimple_phi_result (def_stmt)) 796 continue; 797 798 if (!get_maxval_strlen (arg, length, visited, type)) 799 return false; 800 } 801 } 802 return true; 803 804 default: 805 return false; 806 } 807 } 808 809 810 /* Fold builtin call in statement STMT. Returns a simplified tree. 811 We may return a non-constant expression, including another call 812 to a different function and with different arguments, e.g., 813 substituting memcpy for strcpy when the string length is known. 814 Note that some builtins expand into inline code that may not 815 be valid in GIMPLE. Callers must take care. */ 816 817 tree 818 gimple_fold_builtin (gimple stmt) 819 { 820 tree result, val[3]; 821 tree callee, a; 822 int arg_idx, type; 823 bitmap visited; 824 bool ignore; 825 int nargs; 826 location_t loc = gimple_location (stmt); 827 828 gcc_assert (is_gimple_call (stmt)); 829 830 ignore = (gimple_call_lhs (stmt) == NULL); 831 832 /* First try the generic builtin folder. If that succeeds, return the 833 result directly. */ 834 result = fold_call_stmt (stmt, ignore); 835 if (result) 836 { 837 if (ignore) 838 STRIP_NOPS (result); 839 return result; 840 } 841 842 /* Ignore MD builtins. */ 843 callee = gimple_call_fndecl (stmt); 844 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD) 845 return NULL_TREE; 846 847 /* Give up for always_inline inline builtins until they are 848 inlined. */ 849 if (avoid_folding_inline_builtin (callee)) 850 return NULL_TREE; 851 852 /* If the builtin could not be folded, and it has no argument list, 853 we're done. */ 854 nargs = gimple_call_num_args (stmt); 855 if (nargs == 0) 856 return NULL_TREE; 857 858 /* Limit the work only for builtins we know how to simplify. */ 859 switch (DECL_FUNCTION_CODE (callee)) 860 { 861 case BUILT_IN_STRLEN: 862 case BUILT_IN_FPUTS: 863 case BUILT_IN_FPUTS_UNLOCKED: 864 arg_idx = 0; 865 type = 0; 866 break; 867 case BUILT_IN_STRCPY: 868 case BUILT_IN_STRNCPY: 869 case BUILT_IN_STRCAT: 870 arg_idx = 1; 871 type = 0; 872 break; 873 case BUILT_IN_MEMCPY_CHK: 874 case BUILT_IN_MEMPCPY_CHK: 875 case BUILT_IN_MEMMOVE_CHK: 876 case BUILT_IN_MEMSET_CHK: 877 case BUILT_IN_STRNCPY_CHK: 878 case BUILT_IN_STPNCPY_CHK: 879 arg_idx = 2; 880 type = 2; 881 break; 882 case BUILT_IN_STRCPY_CHK: 883 case BUILT_IN_STPCPY_CHK: 884 arg_idx = 1; 885 type = 1; 886 break; 887 case BUILT_IN_SNPRINTF_CHK: 888 case BUILT_IN_VSNPRINTF_CHK: 889 arg_idx = 1; 890 type = 2; 891 break; 892 default: 893 return NULL_TREE; 894 } 895 896 if (arg_idx >= nargs) 897 return NULL_TREE; 898 899 /* Try to use the dataflow information gathered by the CCP process. */ 900 visited = BITMAP_ALLOC (NULL); 901 bitmap_clear (visited); 902 903 memset (val, 0, sizeof (val)); 904 a = gimple_call_arg (stmt, arg_idx); 905 if (!get_maxval_strlen (a, &val[arg_idx], visited, type)) 906 val[arg_idx] = NULL_TREE; 907 908 BITMAP_FREE (visited); 909 910 result = NULL_TREE; 911 switch (DECL_FUNCTION_CODE (callee)) 912 { 913 case BUILT_IN_STRLEN: 914 if (val[0] && nargs == 1) 915 { 916 tree new_val = 917 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]); 918 919 /* If the result is not a valid gimple value, or not a cast 920 of a valid gimple value, then we cannot use the result. */ 921 if (is_gimple_val (new_val) 922 || (CONVERT_EXPR_P (new_val) 923 && is_gimple_val (TREE_OPERAND (new_val, 0)))) 924 return new_val; 925 } 926 break; 927 928 case BUILT_IN_STRCPY: 929 if (val[1] && is_gimple_val (val[1]) && nargs == 2) 930 result = fold_builtin_strcpy (loc, callee, 931 gimple_call_arg (stmt, 0), 932 gimple_call_arg (stmt, 1), 933 val[1]); 934 break; 935 936 case BUILT_IN_STRNCPY: 937 if (val[1] && is_gimple_val (val[1]) && nargs == 3) 938 result = fold_builtin_strncpy (loc, callee, 939 gimple_call_arg (stmt, 0), 940 gimple_call_arg (stmt, 1), 941 gimple_call_arg (stmt, 2), 942 val[1]); 943 break; 944 945 case BUILT_IN_STRCAT: 946 if (val[1] && is_gimple_val (val[1]) && nargs == 2) 947 result = fold_builtin_strcat (loc, gimple_call_arg (stmt, 0), 948 gimple_call_arg (stmt, 1), 949 val[1]); 950 break; 951 952 case BUILT_IN_FPUTS: 953 if (nargs == 2) 954 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0), 955 gimple_call_arg (stmt, 1), 956 ignore, false, val[0]); 957 break; 958 959 case BUILT_IN_FPUTS_UNLOCKED: 960 if (nargs == 2) 961 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0), 962 gimple_call_arg (stmt, 1), 963 ignore, true, val[0]); 964 break; 965 966 case BUILT_IN_MEMCPY_CHK: 967 case BUILT_IN_MEMPCPY_CHK: 968 case BUILT_IN_MEMMOVE_CHK: 969 case BUILT_IN_MEMSET_CHK: 970 if (val[2] && is_gimple_val (val[2]) && nargs == 4) 971 result = fold_builtin_memory_chk (loc, callee, 972 gimple_call_arg (stmt, 0), 973 gimple_call_arg (stmt, 1), 974 gimple_call_arg (stmt, 2), 975 gimple_call_arg (stmt, 3), 976 val[2], ignore, 977 DECL_FUNCTION_CODE (callee)); 978 break; 979 980 case BUILT_IN_STRCPY_CHK: 981 case BUILT_IN_STPCPY_CHK: 982 if (val[1] && is_gimple_val (val[1]) && nargs == 3) 983 result = fold_builtin_stxcpy_chk (loc, callee, 984 gimple_call_arg (stmt, 0), 985 gimple_call_arg (stmt, 1), 986 gimple_call_arg (stmt, 2), 987 val[1], ignore, 988 DECL_FUNCTION_CODE (callee)); 989 break; 990 991 case BUILT_IN_STRNCPY_CHK: 992 case BUILT_IN_STPNCPY_CHK: 993 if (val[2] && is_gimple_val (val[2]) && nargs == 4) 994 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0), 995 gimple_call_arg (stmt, 1), 996 gimple_call_arg (stmt, 2), 997 gimple_call_arg (stmt, 3), 998 val[2], ignore, 999 DECL_FUNCTION_CODE (callee)); 1000 break; 1001 1002 case BUILT_IN_SNPRINTF_CHK: 1003 case BUILT_IN_VSNPRINTF_CHK: 1004 if (val[1] && is_gimple_val (val[1])) 1005 result = gimple_fold_builtin_snprintf_chk (stmt, val[1], 1006 DECL_FUNCTION_CODE (callee)); 1007 break; 1008 1009 default: 1010 gcc_unreachable (); 1011 } 1012 1013 if (result && ignore) 1014 result = fold_ignored_result (result); 1015 return result; 1016 } 1017 1018 1019 /* Return a binfo to be used for devirtualization of calls based on an object 1020 represented by a declaration (i.e. a global or automatically allocated one) 1021 or NULL if it cannot be found or is not safe. CST is expected to be an 1022 ADDR_EXPR of such object or the function will return NULL. Currently it is 1023 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */ 1024 1025 tree 1026 gimple_extract_devirt_binfo_from_cst (tree cst) 1027 { 1028 HOST_WIDE_INT offset, size, max_size; 1029 tree base, type, expected_type, binfo; 1030 bool last_artificial = false; 1031 1032 if (!flag_devirtualize 1033 || TREE_CODE (cst) != ADDR_EXPR 1034 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE) 1035 return NULL_TREE; 1036 1037 cst = TREE_OPERAND (cst, 0); 1038 expected_type = TREE_TYPE (cst); 1039 base = get_ref_base_and_extent (cst, &offset, &size, &max_size); 1040 type = TREE_TYPE (base); 1041 if (!DECL_P (base) 1042 || max_size == -1 1043 || max_size != size 1044 || TREE_CODE (type) != RECORD_TYPE) 1045 return NULL_TREE; 1046 1047 /* Find the sub-object the constant actually refers to and mark whether it is 1048 an artificial one (as opposed to a user-defined one). */ 1049 while (true) 1050 { 1051 HOST_WIDE_INT pos, size; 1052 tree fld; 1053 1054 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type)) 1055 break; 1056 if (offset < 0) 1057 return NULL_TREE; 1058 1059 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld)) 1060 { 1061 if (TREE_CODE (fld) != FIELD_DECL) 1062 continue; 1063 1064 pos = int_bit_position (fld); 1065 size = tree_low_cst (DECL_SIZE (fld), 1); 1066 if (pos <= offset && (pos + size) > offset) 1067 break; 1068 } 1069 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE) 1070 return NULL_TREE; 1071 1072 last_artificial = DECL_ARTIFICIAL (fld); 1073 type = TREE_TYPE (fld); 1074 offset -= pos; 1075 } 1076 /* Artificial sub-objects are ancestors, we do not want to use them for 1077 devirtualization, at least not here. */ 1078 if (last_artificial) 1079 return NULL_TREE; 1080 binfo = TYPE_BINFO (type); 1081 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0) 1082 return NULL_TREE; 1083 else 1084 return binfo; 1085 } 1086 1087 /* Attempt to fold a call statement referenced by the statement iterator GSI. 1088 The statement may be replaced by another statement, e.g., if the call 1089 simplifies to a constant value. Return true if any changes were made. 1090 It is assumed that the operands have been previously folded. */ 1091 1092 static bool 1093 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace) 1094 { 1095 gimple stmt = gsi_stmt (*gsi); 1096 tree callee; 1097 bool changed = false; 1098 unsigned i; 1099 1100 /* Fold *& in call arguments. */ 1101 for (i = 0; i < gimple_call_num_args (stmt); ++i) 1102 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i))) 1103 { 1104 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false); 1105 if (tmp) 1106 { 1107 gimple_call_set_arg (stmt, i, tmp); 1108 changed = true; 1109 } 1110 } 1111 1112 /* Check for virtual calls that became direct calls. */ 1113 callee = gimple_call_fn (stmt); 1114 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF) 1115 { 1116 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE) 1117 { 1118 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee)); 1119 changed = true; 1120 } 1121 else 1122 { 1123 tree obj = OBJ_TYPE_REF_OBJECT (callee); 1124 tree binfo = gimple_extract_devirt_binfo_from_cst (obj); 1125 if (binfo) 1126 { 1127 HOST_WIDE_INT token 1128 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee)); 1129 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo); 1130 if (fndecl) 1131 { 1132 gimple_call_set_fndecl (stmt, fndecl); 1133 changed = true; 1134 } 1135 } 1136 } 1137 } 1138 1139 if (inplace) 1140 return changed; 1141 1142 /* Check for builtins that CCP can handle using information not 1143 available in the generic fold routines. */ 1144 callee = gimple_call_fndecl (stmt); 1145 if (callee && DECL_BUILT_IN (callee)) 1146 { 1147 tree result = gimple_fold_builtin (stmt); 1148 if (result) 1149 { 1150 if (!update_call_from_tree (gsi, result)) 1151 gimplify_and_update_call_from_tree (gsi, result); 1152 changed = true; 1153 } 1154 } 1155 1156 return changed; 1157 } 1158 1159 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument 1160 distinguishes both cases. */ 1161 1162 static bool 1163 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace) 1164 { 1165 bool changed = false; 1166 gimple stmt = gsi_stmt (*gsi); 1167 unsigned i; 1168 1169 /* Fold the main computation performed by the statement. */ 1170 switch (gimple_code (stmt)) 1171 { 1172 case GIMPLE_ASSIGN: 1173 { 1174 unsigned old_num_ops = gimple_num_ops (stmt); 1175 enum tree_code subcode = gimple_assign_rhs_code (stmt); 1176 tree lhs = gimple_assign_lhs (stmt); 1177 tree new_rhs; 1178 /* First canonicalize operand order. This avoids building new 1179 trees if this is the only thing fold would later do. */ 1180 if ((commutative_tree_code (subcode) 1181 || commutative_ternary_tree_code (subcode)) 1182 && tree_swap_operands_p (gimple_assign_rhs1 (stmt), 1183 gimple_assign_rhs2 (stmt), false)) 1184 { 1185 tree tem = gimple_assign_rhs1 (stmt); 1186 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt)); 1187 gimple_assign_set_rhs2 (stmt, tem); 1188 changed = true; 1189 } 1190 new_rhs = fold_gimple_assign (gsi); 1191 if (new_rhs 1192 && !useless_type_conversion_p (TREE_TYPE (lhs), 1193 TREE_TYPE (new_rhs))) 1194 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs); 1195 if (new_rhs 1196 && (!inplace 1197 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)) 1198 { 1199 gimple_assign_set_rhs_from_tree (gsi, new_rhs); 1200 changed = true; 1201 } 1202 break; 1203 } 1204 1205 case GIMPLE_COND: 1206 changed |= fold_gimple_cond (stmt); 1207 break; 1208 1209 case GIMPLE_CALL: 1210 changed |= gimple_fold_call (gsi, inplace); 1211 break; 1212 1213 case GIMPLE_ASM: 1214 /* Fold *& in asm operands. */ 1215 { 1216 size_t noutputs; 1217 const char **oconstraints; 1218 const char *constraint; 1219 bool allows_mem, allows_reg; 1220 1221 noutputs = gimple_asm_noutputs (stmt); 1222 oconstraints = XALLOCAVEC (const char *, noutputs); 1223 1224 for (i = 0; i < gimple_asm_noutputs (stmt); ++i) 1225 { 1226 tree link = gimple_asm_output_op (stmt, i); 1227 tree op = TREE_VALUE (link); 1228 oconstraints[i] 1229 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); 1230 if (REFERENCE_CLASS_P (op) 1231 && (op = maybe_fold_reference (op, true)) != NULL_TREE) 1232 { 1233 TREE_VALUE (link) = op; 1234 changed = true; 1235 } 1236 } 1237 for (i = 0; i < gimple_asm_ninputs (stmt); ++i) 1238 { 1239 tree link = gimple_asm_input_op (stmt, i); 1240 tree op = TREE_VALUE (link); 1241 constraint 1242 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); 1243 parse_input_constraint (&constraint, 0, 0, noutputs, 0, 1244 oconstraints, &allows_mem, &allows_reg); 1245 if (REFERENCE_CLASS_P (op) 1246 && (op = maybe_fold_reference (op, !allows_reg && allows_mem)) 1247 != NULL_TREE) 1248 { 1249 TREE_VALUE (link) = op; 1250 changed = true; 1251 } 1252 } 1253 } 1254 break; 1255 1256 case GIMPLE_DEBUG: 1257 if (gimple_debug_bind_p (stmt)) 1258 { 1259 tree val = gimple_debug_bind_get_value (stmt); 1260 if (val 1261 && REFERENCE_CLASS_P (val)) 1262 { 1263 tree tem = maybe_fold_reference (val, false); 1264 if (tem) 1265 { 1266 gimple_debug_bind_set_value (stmt, tem); 1267 changed = true; 1268 } 1269 } 1270 else if (val 1271 && TREE_CODE (val) == ADDR_EXPR) 1272 { 1273 tree ref = TREE_OPERAND (val, 0); 1274 tree tem = maybe_fold_reference (ref, false); 1275 if (tem) 1276 { 1277 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val)); 1278 gimple_debug_bind_set_value (stmt, tem); 1279 changed = true; 1280 } 1281 } 1282 } 1283 break; 1284 1285 default:; 1286 } 1287 1288 stmt = gsi_stmt (*gsi); 1289 1290 /* Fold *& on the lhs. */ 1291 if (gimple_has_lhs (stmt)) 1292 { 1293 tree lhs = gimple_get_lhs (stmt); 1294 if (lhs && REFERENCE_CLASS_P (lhs)) 1295 { 1296 tree new_lhs = maybe_fold_reference (lhs, true); 1297 if (new_lhs) 1298 { 1299 gimple_set_lhs (stmt, new_lhs); 1300 changed = true; 1301 } 1302 } 1303 } 1304 1305 return changed; 1306 } 1307 1308 /* Fold the statement pointed to by GSI. In some cases, this function may 1309 replace the whole statement with a new one. Returns true iff folding 1310 makes any changes. 1311 The statement pointed to by GSI should be in valid gimple form but may 1312 be in unfolded state as resulting from for example constant propagation 1313 which can produce *&x = 0. */ 1314 1315 bool 1316 fold_stmt (gimple_stmt_iterator *gsi) 1317 { 1318 return fold_stmt_1 (gsi, false); 1319 } 1320 1321 /* Perform the minimal folding on statement *GSI. Only operations like 1322 *&x created by constant propagation are handled. The statement cannot 1323 be replaced with a new one. Return true if the statement was 1324 changed, false otherwise. 1325 The statement *GSI should be in valid gimple form but may 1326 be in unfolded state as resulting from for example constant propagation 1327 which can produce *&x = 0. */ 1328 1329 bool 1330 fold_stmt_inplace (gimple_stmt_iterator *gsi) 1331 { 1332 gimple stmt = gsi_stmt (*gsi); 1333 bool changed = fold_stmt_1 (gsi, true); 1334 gcc_assert (gsi_stmt (*gsi) == stmt); 1335 return changed; 1336 } 1337 1338 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE 1339 if EXPR is null or we don't know how. 1340 If non-null, the result always has boolean type. */ 1341 1342 static tree 1343 canonicalize_bool (tree expr, bool invert) 1344 { 1345 if (!expr) 1346 return NULL_TREE; 1347 else if (invert) 1348 { 1349 if (integer_nonzerop (expr)) 1350 return boolean_false_node; 1351 else if (integer_zerop (expr)) 1352 return boolean_true_node; 1353 else if (TREE_CODE (expr) == SSA_NAME) 1354 return fold_build2 (EQ_EXPR, boolean_type_node, expr, 1355 build_int_cst (TREE_TYPE (expr), 0)); 1356 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison) 1357 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false), 1358 boolean_type_node, 1359 TREE_OPERAND (expr, 0), 1360 TREE_OPERAND (expr, 1)); 1361 else 1362 return NULL_TREE; 1363 } 1364 else 1365 { 1366 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE) 1367 return expr; 1368 if (integer_nonzerop (expr)) 1369 return boolean_true_node; 1370 else if (integer_zerop (expr)) 1371 return boolean_false_node; 1372 else if (TREE_CODE (expr) == SSA_NAME) 1373 return fold_build2 (NE_EXPR, boolean_type_node, expr, 1374 build_int_cst (TREE_TYPE (expr), 0)); 1375 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison) 1376 return fold_build2 (TREE_CODE (expr), 1377 boolean_type_node, 1378 TREE_OPERAND (expr, 0), 1379 TREE_OPERAND (expr, 1)); 1380 else 1381 return NULL_TREE; 1382 } 1383 } 1384 1385 /* Check to see if a boolean expression EXPR is logically equivalent to the 1386 comparison (OP1 CODE OP2). Check for various identities involving 1387 SSA_NAMEs. */ 1388 1389 static bool 1390 same_bool_comparison_p (const_tree expr, enum tree_code code, 1391 const_tree op1, const_tree op2) 1392 { 1393 gimple s; 1394 1395 /* The obvious case. */ 1396 if (TREE_CODE (expr) == code 1397 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0) 1398 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0)) 1399 return true; 1400 1401 /* Check for comparing (name, name != 0) and the case where expr 1402 is an SSA_NAME with a definition matching the comparison. */ 1403 if (TREE_CODE (expr) == SSA_NAME 1404 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE) 1405 { 1406 if (operand_equal_p (expr, op1, 0)) 1407 return ((code == NE_EXPR && integer_zerop (op2)) 1408 || (code == EQ_EXPR && integer_nonzerop (op2))); 1409 s = SSA_NAME_DEF_STMT (expr); 1410 if (is_gimple_assign (s) 1411 && gimple_assign_rhs_code (s) == code 1412 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0) 1413 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0)) 1414 return true; 1415 } 1416 1417 /* If op1 is of the form (name != 0) or (name == 0), and the definition 1418 of name is a comparison, recurse. */ 1419 if (TREE_CODE (op1) == SSA_NAME 1420 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE) 1421 { 1422 s = SSA_NAME_DEF_STMT (op1); 1423 if (is_gimple_assign (s) 1424 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison) 1425 { 1426 enum tree_code c = gimple_assign_rhs_code (s); 1427 if ((c == NE_EXPR && integer_zerop (op2)) 1428 || (c == EQ_EXPR && integer_nonzerop (op2))) 1429 return same_bool_comparison_p (expr, c, 1430 gimple_assign_rhs1 (s), 1431 gimple_assign_rhs2 (s)); 1432 if ((c == EQ_EXPR && integer_zerop (op2)) 1433 || (c == NE_EXPR && integer_nonzerop (op2))) 1434 return same_bool_comparison_p (expr, 1435 invert_tree_comparison (c, false), 1436 gimple_assign_rhs1 (s), 1437 gimple_assign_rhs2 (s)); 1438 } 1439 } 1440 return false; 1441 } 1442 1443 /* Check to see if two boolean expressions OP1 and OP2 are logically 1444 equivalent. */ 1445 1446 static bool 1447 same_bool_result_p (const_tree op1, const_tree op2) 1448 { 1449 /* Simple cases first. */ 1450 if (operand_equal_p (op1, op2, 0)) 1451 return true; 1452 1453 /* Check the cases where at least one of the operands is a comparison. 1454 These are a bit smarter than operand_equal_p in that they apply some 1455 identifies on SSA_NAMEs. */ 1456 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison 1457 && same_bool_comparison_p (op1, TREE_CODE (op2), 1458 TREE_OPERAND (op2, 0), 1459 TREE_OPERAND (op2, 1))) 1460 return true; 1461 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison 1462 && same_bool_comparison_p (op2, TREE_CODE (op1), 1463 TREE_OPERAND (op1, 0), 1464 TREE_OPERAND (op1, 1))) 1465 return true; 1466 1467 /* Default case. */ 1468 return false; 1469 } 1470 1471 /* Forward declarations for some mutually recursive functions. */ 1472 1473 static tree 1474 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b, 1475 enum tree_code code2, tree op2a, tree op2b); 1476 static tree 1477 and_var_with_comparison (tree var, bool invert, 1478 enum tree_code code2, tree op2a, tree op2b); 1479 static tree 1480 and_var_with_comparison_1 (gimple stmt, 1481 enum tree_code code2, tree op2a, tree op2b); 1482 static tree 1483 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b, 1484 enum tree_code code2, tree op2a, tree op2b); 1485 static tree 1486 or_var_with_comparison (tree var, bool invert, 1487 enum tree_code code2, tree op2a, tree op2b); 1488 static tree 1489 or_var_with_comparison_1 (gimple stmt, 1490 enum tree_code code2, tree op2a, tree op2b); 1491 1492 /* Helper function for and_comparisons_1: try to simplify the AND of the 1493 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B). 1494 If INVERT is true, invert the value of the VAR before doing the AND. 1495 Return NULL_EXPR if we can't simplify this to a single expression. */ 1496 1497 static tree 1498 and_var_with_comparison (tree var, bool invert, 1499 enum tree_code code2, tree op2a, tree op2b) 1500 { 1501 tree t; 1502 gimple stmt = SSA_NAME_DEF_STMT (var); 1503 1504 /* We can only deal with variables whose definitions are assignments. */ 1505 if (!is_gimple_assign (stmt)) 1506 return NULL_TREE; 1507 1508 /* If we have an inverted comparison, apply DeMorgan's law and rewrite 1509 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b)) 1510 Then we only have to consider the simpler non-inverted cases. */ 1511 if (invert) 1512 t = or_var_with_comparison_1 (stmt, 1513 invert_tree_comparison (code2, false), 1514 op2a, op2b); 1515 else 1516 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b); 1517 return canonicalize_bool (t, invert); 1518 } 1519 1520 /* Try to simplify the AND of the ssa variable defined by the assignment 1521 STMT with the comparison specified by (OP2A CODE2 OP2B). 1522 Return NULL_EXPR if we can't simplify this to a single expression. */ 1523 1524 static tree 1525 and_var_with_comparison_1 (gimple stmt, 1526 enum tree_code code2, tree op2a, tree op2b) 1527 { 1528 tree var = gimple_assign_lhs (stmt); 1529 tree true_test_var = NULL_TREE; 1530 tree false_test_var = NULL_TREE; 1531 enum tree_code innercode = gimple_assign_rhs_code (stmt); 1532 1533 /* Check for identities like (var AND (var == 0)) => false. */ 1534 if (TREE_CODE (op2a) == SSA_NAME 1535 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE) 1536 { 1537 if ((code2 == NE_EXPR && integer_zerop (op2b)) 1538 || (code2 == EQ_EXPR && integer_nonzerop (op2b))) 1539 { 1540 true_test_var = op2a; 1541 if (var == true_test_var) 1542 return var; 1543 } 1544 else if ((code2 == EQ_EXPR && integer_zerop (op2b)) 1545 || (code2 == NE_EXPR && integer_nonzerop (op2b))) 1546 { 1547 false_test_var = op2a; 1548 if (var == false_test_var) 1549 return boolean_false_node; 1550 } 1551 } 1552 1553 /* If the definition is a comparison, recurse on it. */ 1554 if (TREE_CODE_CLASS (innercode) == tcc_comparison) 1555 { 1556 tree t = and_comparisons_1 (innercode, 1557 gimple_assign_rhs1 (stmt), 1558 gimple_assign_rhs2 (stmt), 1559 code2, 1560 op2a, 1561 op2b); 1562 if (t) 1563 return t; 1564 } 1565 1566 /* If the definition is an AND or OR expression, we may be able to 1567 simplify by reassociating. */ 1568 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE 1569 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)) 1570 { 1571 tree inner1 = gimple_assign_rhs1 (stmt); 1572 tree inner2 = gimple_assign_rhs2 (stmt); 1573 gimple s; 1574 tree t; 1575 tree partial = NULL_TREE; 1576 bool is_and = (innercode == BIT_AND_EXPR); 1577 1578 /* Check for boolean identities that don't require recursive examination 1579 of inner1/inner2: 1580 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var 1581 inner1 AND (inner1 OR inner2) => inner1 1582 !inner1 AND (inner1 AND inner2) => false 1583 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2 1584 Likewise for similar cases involving inner2. */ 1585 if (inner1 == true_test_var) 1586 return (is_and ? var : inner1); 1587 else if (inner2 == true_test_var) 1588 return (is_and ? var : inner2); 1589 else if (inner1 == false_test_var) 1590 return (is_and 1591 ? boolean_false_node 1592 : and_var_with_comparison (inner2, false, code2, op2a, op2b)); 1593 else if (inner2 == false_test_var) 1594 return (is_and 1595 ? boolean_false_node 1596 : and_var_with_comparison (inner1, false, code2, op2a, op2b)); 1597 1598 /* Next, redistribute/reassociate the AND across the inner tests. 1599 Compute the first partial result, (inner1 AND (op2a code op2b)) */ 1600 if (TREE_CODE (inner1) == SSA_NAME 1601 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1)) 1602 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison 1603 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s), 1604 gimple_assign_rhs1 (s), 1605 gimple_assign_rhs2 (s), 1606 code2, op2a, op2b))) 1607 { 1608 /* Handle the AND case, where we are reassociating: 1609 (inner1 AND inner2) AND (op2a code2 op2b) 1610 => (t AND inner2) 1611 If the partial result t is a constant, we win. Otherwise 1612 continue on to try reassociating with the other inner test. */ 1613 if (is_and) 1614 { 1615 if (integer_onep (t)) 1616 return inner2; 1617 else if (integer_zerop (t)) 1618 return boolean_false_node; 1619 } 1620 1621 /* Handle the OR case, where we are redistributing: 1622 (inner1 OR inner2) AND (op2a code2 op2b) 1623 => (t OR (inner2 AND (op2a code2 op2b))) */ 1624 else if (integer_onep (t)) 1625 return boolean_true_node; 1626 1627 /* Save partial result for later. */ 1628 partial = t; 1629 } 1630 1631 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */ 1632 if (TREE_CODE (inner2) == SSA_NAME 1633 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2)) 1634 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison 1635 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s), 1636 gimple_assign_rhs1 (s), 1637 gimple_assign_rhs2 (s), 1638 code2, op2a, op2b))) 1639 { 1640 /* Handle the AND case, where we are reassociating: 1641 (inner1 AND inner2) AND (op2a code2 op2b) 1642 => (inner1 AND t) */ 1643 if (is_and) 1644 { 1645 if (integer_onep (t)) 1646 return inner1; 1647 else if (integer_zerop (t)) 1648 return boolean_false_node; 1649 /* If both are the same, we can apply the identity 1650 (x AND x) == x. */ 1651 else if (partial && same_bool_result_p (t, partial)) 1652 return t; 1653 } 1654 1655 /* Handle the OR case. where we are redistributing: 1656 (inner1 OR inner2) AND (op2a code2 op2b) 1657 => (t OR (inner1 AND (op2a code2 op2b))) 1658 => (t OR partial) */ 1659 else 1660 { 1661 if (integer_onep (t)) 1662 return boolean_true_node; 1663 else if (partial) 1664 { 1665 /* We already got a simplification for the other 1666 operand to the redistributed OR expression. The 1667 interesting case is when at least one is false. 1668 Or, if both are the same, we can apply the identity 1669 (x OR x) == x. */ 1670 if (integer_zerop (partial)) 1671 return t; 1672 else if (integer_zerop (t)) 1673 return partial; 1674 else if (same_bool_result_p (t, partial)) 1675 return t; 1676 } 1677 } 1678 } 1679 } 1680 return NULL_TREE; 1681 } 1682 1683 /* Try to simplify the AND of two comparisons defined by 1684 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively. 1685 If this can be done without constructing an intermediate value, 1686 return the resulting tree; otherwise NULL_TREE is returned. 1687 This function is deliberately asymmetric as it recurses on SSA_DEFs 1688 in the first comparison but not the second. */ 1689 1690 static tree 1691 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b, 1692 enum tree_code code2, tree op2a, tree op2b) 1693 { 1694 tree truth_type = truth_type_for (TREE_TYPE (op1a)); 1695 1696 /* First check for ((x CODE1 y) AND (x CODE2 y)). */ 1697 if (operand_equal_p (op1a, op2a, 0) 1698 && operand_equal_p (op1b, op2b, 0)) 1699 { 1700 /* Result will be either NULL_TREE, or a combined comparison. */ 1701 tree t = combine_comparisons (UNKNOWN_LOCATION, 1702 TRUTH_ANDIF_EXPR, code1, code2, 1703 truth_type, op1a, op1b); 1704 if (t) 1705 return t; 1706 } 1707 1708 /* Likewise the swapped case of the above. */ 1709 if (operand_equal_p (op1a, op2b, 0) 1710 && operand_equal_p (op1b, op2a, 0)) 1711 { 1712 /* Result will be either NULL_TREE, or a combined comparison. */ 1713 tree t = combine_comparisons (UNKNOWN_LOCATION, 1714 TRUTH_ANDIF_EXPR, code1, 1715 swap_tree_comparison (code2), 1716 truth_type, op1a, op1b); 1717 if (t) 1718 return t; 1719 } 1720 1721 /* If both comparisons are of the same value against constants, we might 1722 be able to merge them. */ 1723 if (operand_equal_p (op1a, op2a, 0) 1724 && TREE_CODE (op1b) == INTEGER_CST 1725 && TREE_CODE (op2b) == INTEGER_CST) 1726 { 1727 int cmp = tree_int_cst_compare (op1b, op2b); 1728 1729 /* If we have (op1a == op1b), we should either be able to 1730 return that or FALSE, depending on whether the constant op1b 1731 also satisfies the other comparison against op2b. */ 1732 if (code1 == EQ_EXPR) 1733 { 1734 bool done = true; 1735 bool val; 1736 switch (code2) 1737 { 1738 case EQ_EXPR: val = (cmp == 0); break; 1739 case NE_EXPR: val = (cmp != 0); break; 1740 case LT_EXPR: val = (cmp < 0); break; 1741 case GT_EXPR: val = (cmp > 0); break; 1742 case LE_EXPR: val = (cmp <= 0); break; 1743 case GE_EXPR: val = (cmp >= 0); break; 1744 default: done = false; 1745 } 1746 if (done) 1747 { 1748 if (val) 1749 return fold_build2 (code1, boolean_type_node, op1a, op1b); 1750 else 1751 return boolean_false_node; 1752 } 1753 } 1754 /* Likewise if the second comparison is an == comparison. */ 1755 else if (code2 == EQ_EXPR) 1756 { 1757 bool done = true; 1758 bool val; 1759 switch (code1) 1760 { 1761 case EQ_EXPR: val = (cmp == 0); break; 1762 case NE_EXPR: val = (cmp != 0); break; 1763 case LT_EXPR: val = (cmp > 0); break; 1764 case GT_EXPR: val = (cmp < 0); break; 1765 case LE_EXPR: val = (cmp >= 0); break; 1766 case GE_EXPR: val = (cmp <= 0); break; 1767 default: done = false; 1768 } 1769 if (done) 1770 { 1771 if (val) 1772 return fold_build2 (code2, boolean_type_node, op2a, op2b); 1773 else 1774 return boolean_false_node; 1775 } 1776 } 1777 1778 /* Same business with inequality tests. */ 1779 else if (code1 == NE_EXPR) 1780 { 1781 bool val; 1782 switch (code2) 1783 { 1784 case EQ_EXPR: val = (cmp != 0); break; 1785 case NE_EXPR: val = (cmp == 0); break; 1786 case LT_EXPR: val = (cmp >= 0); break; 1787 case GT_EXPR: val = (cmp <= 0); break; 1788 case LE_EXPR: val = (cmp > 0); break; 1789 case GE_EXPR: val = (cmp < 0); break; 1790 default: 1791 val = false; 1792 } 1793 if (val) 1794 return fold_build2 (code2, boolean_type_node, op2a, op2b); 1795 } 1796 else if (code2 == NE_EXPR) 1797 { 1798 bool val; 1799 switch (code1) 1800 { 1801 case EQ_EXPR: val = (cmp == 0); break; 1802 case NE_EXPR: val = (cmp != 0); break; 1803 case LT_EXPR: val = (cmp <= 0); break; 1804 case GT_EXPR: val = (cmp >= 0); break; 1805 case LE_EXPR: val = (cmp < 0); break; 1806 case GE_EXPR: val = (cmp > 0); break; 1807 default: 1808 val = false; 1809 } 1810 if (val) 1811 return fold_build2 (code1, boolean_type_node, op1a, op1b); 1812 } 1813 1814 /* Chose the more restrictive of two < or <= comparisons. */ 1815 else if ((code1 == LT_EXPR || code1 == LE_EXPR) 1816 && (code2 == LT_EXPR || code2 == LE_EXPR)) 1817 { 1818 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR)) 1819 return fold_build2 (code1, boolean_type_node, op1a, op1b); 1820 else 1821 return fold_build2 (code2, boolean_type_node, op2a, op2b); 1822 } 1823 1824 /* Likewise chose the more restrictive of two > or >= comparisons. */ 1825 else if ((code1 == GT_EXPR || code1 == GE_EXPR) 1826 && (code2 == GT_EXPR || code2 == GE_EXPR)) 1827 { 1828 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR)) 1829 return fold_build2 (code1, boolean_type_node, op1a, op1b); 1830 else 1831 return fold_build2 (code2, boolean_type_node, op2a, op2b); 1832 } 1833 1834 /* Check for singleton ranges. */ 1835 else if (cmp == 0 1836 && ((code1 == LE_EXPR && code2 == GE_EXPR) 1837 || (code1 == GE_EXPR && code2 == LE_EXPR))) 1838 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b); 1839 1840 /* Check for disjoint ranges. */ 1841 else if (cmp <= 0 1842 && (code1 == LT_EXPR || code1 == LE_EXPR) 1843 && (code2 == GT_EXPR || code2 == GE_EXPR)) 1844 return boolean_false_node; 1845 else if (cmp >= 0 1846 && (code1 == GT_EXPR || code1 == GE_EXPR) 1847 && (code2 == LT_EXPR || code2 == LE_EXPR)) 1848 return boolean_false_node; 1849 } 1850 1851 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where 1852 NAME's definition is a truth value. See if there are any simplifications 1853 that can be done against the NAME's definition. */ 1854 if (TREE_CODE (op1a) == SSA_NAME 1855 && (code1 == NE_EXPR || code1 == EQ_EXPR) 1856 && (integer_zerop (op1b) || integer_onep (op1b))) 1857 { 1858 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b)) 1859 || (code1 == NE_EXPR && integer_onep (op1b))); 1860 gimple stmt = SSA_NAME_DEF_STMT (op1a); 1861 switch (gimple_code (stmt)) 1862 { 1863 case GIMPLE_ASSIGN: 1864 /* Try to simplify by copy-propagating the definition. */ 1865 return and_var_with_comparison (op1a, invert, code2, op2a, op2b); 1866 1867 case GIMPLE_PHI: 1868 /* If every argument to the PHI produces the same result when 1869 ANDed with the second comparison, we win. 1870 Do not do this unless the type is bool since we need a bool 1871 result here anyway. */ 1872 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE) 1873 { 1874 tree result = NULL_TREE; 1875 unsigned i; 1876 for (i = 0; i < gimple_phi_num_args (stmt); i++) 1877 { 1878 tree arg = gimple_phi_arg_def (stmt, i); 1879 1880 /* If this PHI has itself as an argument, ignore it. 1881 If all the other args produce the same result, 1882 we're still OK. */ 1883 if (arg == gimple_phi_result (stmt)) 1884 continue; 1885 else if (TREE_CODE (arg) == INTEGER_CST) 1886 { 1887 if (invert ? integer_nonzerop (arg) : integer_zerop (arg)) 1888 { 1889 if (!result) 1890 result = boolean_false_node; 1891 else if (!integer_zerop (result)) 1892 return NULL_TREE; 1893 } 1894 else if (!result) 1895 result = fold_build2 (code2, boolean_type_node, 1896 op2a, op2b); 1897 else if (!same_bool_comparison_p (result, 1898 code2, op2a, op2b)) 1899 return NULL_TREE; 1900 } 1901 else if (TREE_CODE (arg) == SSA_NAME 1902 && !SSA_NAME_IS_DEFAULT_DEF (arg)) 1903 { 1904 tree temp; 1905 gimple def_stmt = SSA_NAME_DEF_STMT (arg); 1906 /* In simple cases we can look through PHI nodes, 1907 but we have to be careful with loops. 1908 See PR49073. */ 1909 if (! dom_info_available_p (CDI_DOMINATORS) 1910 || gimple_bb (def_stmt) == gimple_bb (stmt) 1911 || dominated_by_p (CDI_DOMINATORS, 1912 gimple_bb (def_stmt), 1913 gimple_bb (stmt))) 1914 return NULL_TREE; 1915 temp = and_var_with_comparison (arg, invert, code2, 1916 op2a, op2b); 1917 if (!temp) 1918 return NULL_TREE; 1919 else if (!result) 1920 result = temp; 1921 else if (!same_bool_result_p (result, temp)) 1922 return NULL_TREE; 1923 } 1924 else 1925 return NULL_TREE; 1926 } 1927 return result; 1928 } 1929 1930 default: 1931 break; 1932 } 1933 } 1934 return NULL_TREE; 1935 } 1936 1937 /* Try to simplify the AND of two comparisons, specified by 1938 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively. 1939 If this can be simplified to a single expression (without requiring 1940 introducing more SSA variables to hold intermediate values), 1941 return the resulting tree. Otherwise return NULL_TREE. 1942 If the result expression is non-null, it has boolean type. */ 1943 1944 tree 1945 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b, 1946 enum tree_code code2, tree op2a, tree op2b) 1947 { 1948 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b); 1949 if (t) 1950 return t; 1951 else 1952 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b); 1953 } 1954 1955 /* Helper function for or_comparisons_1: try to simplify the OR of the 1956 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B). 1957 If INVERT is true, invert the value of VAR before doing the OR. 1958 Return NULL_EXPR if we can't simplify this to a single expression. */ 1959 1960 static tree 1961 or_var_with_comparison (tree var, bool invert, 1962 enum tree_code code2, tree op2a, tree op2b) 1963 { 1964 tree t; 1965 gimple stmt = SSA_NAME_DEF_STMT (var); 1966 1967 /* We can only deal with variables whose definitions are assignments. */ 1968 if (!is_gimple_assign (stmt)) 1969 return NULL_TREE; 1970 1971 /* If we have an inverted comparison, apply DeMorgan's law and rewrite 1972 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b)) 1973 Then we only have to consider the simpler non-inverted cases. */ 1974 if (invert) 1975 t = and_var_with_comparison_1 (stmt, 1976 invert_tree_comparison (code2, false), 1977 op2a, op2b); 1978 else 1979 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b); 1980 return canonicalize_bool (t, invert); 1981 } 1982 1983 /* Try to simplify the OR of the ssa variable defined by the assignment 1984 STMT with the comparison specified by (OP2A CODE2 OP2B). 1985 Return NULL_EXPR if we can't simplify this to a single expression. */ 1986 1987 static tree 1988 or_var_with_comparison_1 (gimple stmt, 1989 enum tree_code code2, tree op2a, tree op2b) 1990 { 1991 tree var = gimple_assign_lhs (stmt); 1992 tree true_test_var = NULL_TREE; 1993 tree false_test_var = NULL_TREE; 1994 enum tree_code innercode = gimple_assign_rhs_code (stmt); 1995 1996 /* Check for identities like (var OR (var != 0)) => true . */ 1997 if (TREE_CODE (op2a) == SSA_NAME 1998 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE) 1999 { 2000 if ((code2 == NE_EXPR && integer_zerop (op2b)) 2001 || (code2 == EQ_EXPR && integer_nonzerop (op2b))) 2002 { 2003 true_test_var = op2a; 2004 if (var == true_test_var) 2005 return var; 2006 } 2007 else if ((code2 == EQ_EXPR && integer_zerop (op2b)) 2008 || (code2 == NE_EXPR && integer_nonzerop (op2b))) 2009 { 2010 false_test_var = op2a; 2011 if (var == false_test_var) 2012 return boolean_true_node; 2013 } 2014 } 2015 2016 /* If the definition is a comparison, recurse on it. */ 2017 if (TREE_CODE_CLASS (innercode) == tcc_comparison) 2018 { 2019 tree t = or_comparisons_1 (innercode, 2020 gimple_assign_rhs1 (stmt), 2021 gimple_assign_rhs2 (stmt), 2022 code2, 2023 op2a, 2024 op2b); 2025 if (t) 2026 return t; 2027 } 2028 2029 /* If the definition is an AND or OR expression, we may be able to 2030 simplify by reassociating. */ 2031 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE 2032 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)) 2033 { 2034 tree inner1 = gimple_assign_rhs1 (stmt); 2035 tree inner2 = gimple_assign_rhs2 (stmt); 2036 gimple s; 2037 tree t; 2038 tree partial = NULL_TREE; 2039 bool is_or = (innercode == BIT_IOR_EXPR); 2040 2041 /* Check for boolean identities that don't require recursive examination 2042 of inner1/inner2: 2043 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var 2044 inner1 OR (inner1 AND inner2) => inner1 2045 !inner1 OR (inner1 OR inner2) => true 2046 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2 2047 */ 2048 if (inner1 == true_test_var) 2049 return (is_or ? var : inner1); 2050 else if (inner2 == true_test_var) 2051 return (is_or ? var : inner2); 2052 else if (inner1 == false_test_var) 2053 return (is_or 2054 ? boolean_true_node 2055 : or_var_with_comparison (inner2, false, code2, op2a, op2b)); 2056 else if (inner2 == false_test_var) 2057 return (is_or 2058 ? boolean_true_node 2059 : or_var_with_comparison (inner1, false, code2, op2a, op2b)); 2060 2061 /* Next, redistribute/reassociate the OR across the inner tests. 2062 Compute the first partial result, (inner1 OR (op2a code op2b)) */ 2063 if (TREE_CODE (inner1) == SSA_NAME 2064 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1)) 2065 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison 2066 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s), 2067 gimple_assign_rhs1 (s), 2068 gimple_assign_rhs2 (s), 2069 code2, op2a, op2b))) 2070 { 2071 /* Handle the OR case, where we are reassociating: 2072 (inner1 OR inner2) OR (op2a code2 op2b) 2073 => (t OR inner2) 2074 If the partial result t is a constant, we win. Otherwise 2075 continue on to try reassociating with the other inner test. */ 2076 if (is_or) 2077 { 2078 if (integer_onep (t)) 2079 return boolean_true_node; 2080 else if (integer_zerop (t)) 2081 return inner2; 2082 } 2083 2084 /* Handle the AND case, where we are redistributing: 2085 (inner1 AND inner2) OR (op2a code2 op2b) 2086 => (t AND (inner2 OR (op2a code op2b))) */ 2087 else if (integer_zerop (t)) 2088 return boolean_false_node; 2089 2090 /* Save partial result for later. */ 2091 partial = t; 2092 } 2093 2094 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */ 2095 if (TREE_CODE (inner2) == SSA_NAME 2096 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2)) 2097 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison 2098 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s), 2099 gimple_assign_rhs1 (s), 2100 gimple_assign_rhs2 (s), 2101 code2, op2a, op2b))) 2102 { 2103 /* Handle the OR case, where we are reassociating: 2104 (inner1 OR inner2) OR (op2a code2 op2b) 2105 => (inner1 OR t) 2106 => (t OR partial) */ 2107 if (is_or) 2108 { 2109 if (integer_zerop (t)) 2110 return inner1; 2111 else if (integer_onep (t)) 2112 return boolean_true_node; 2113 /* If both are the same, we can apply the identity 2114 (x OR x) == x. */ 2115 else if (partial && same_bool_result_p (t, partial)) 2116 return t; 2117 } 2118 2119 /* Handle the AND case, where we are redistributing: 2120 (inner1 AND inner2) OR (op2a code2 op2b) 2121 => (t AND (inner1 OR (op2a code2 op2b))) 2122 => (t AND partial) */ 2123 else 2124 { 2125 if (integer_zerop (t)) 2126 return boolean_false_node; 2127 else if (partial) 2128 { 2129 /* We already got a simplification for the other 2130 operand to the redistributed AND expression. The 2131 interesting case is when at least one is true. 2132 Or, if both are the same, we can apply the identity 2133 (x AND x) == x. */ 2134 if (integer_onep (partial)) 2135 return t; 2136 else if (integer_onep (t)) 2137 return partial; 2138 else if (same_bool_result_p (t, partial)) 2139 return t; 2140 } 2141 } 2142 } 2143 } 2144 return NULL_TREE; 2145 } 2146 2147 /* Try to simplify the OR of two comparisons defined by 2148 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively. 2149 If this can be done without constructing an intermediate value, 2150 return the resulting tree; otherwise NULL_TREE is returned. 2151 This function is deliberately asymmetric as it recurses on SSA_DEFs 2152 in the first comparison but not the second. */ 2153 2154 static tree 2155 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b, 2156 enum tree_code code2, tree op2a, tree op2b) 2157 { 2158 tree truth_type = truth_type_for (TREE_TYPE (op1a)); 2159 2160 /* First check for ((x CODE1 y) OR (x CODE2 y)). */ 2161 if (operand_equal_p (op1a, op2a, 0) 2162 && operand_equal_p (op1b, op2b, 0)) 2163 { 2164 /* Result will be either NULL_TREE, or a combined comparison. */ 2165 tree t = combine_comparisons (UNKNOWN_LOCATION, 2166 TRUTH_ORIF_EXPR, code1, code2, 2167 truth_type, op1a, op1b); 2168 if (t) 2169 return t; 2170 } 2171 2172 /* Likewise the swapped case of the above. */ 2173 if (operand_equal_p (op1a, op2b, 0) 2174 && operand_equal_p (op1b, op2a, 0)) 2175 { 2176 /* Result will be either NULL_TREE, or a combined comparison. */ 2177 tree t = combine_comparisons (UNKNOWN_LOCATION, 2178 TRUTH_ORIF_EXPR, code1, 2179 swap_tree_comparison (code2), 2180 truth_type, op1a, op1b); 2181 if (t) 2182 return t; 2183 } 2184 2185 /* If both comparisons are of the same value against constants, we might 2186 be able to merge them. */ 2187 if (operand_equal_p (op1a, op2a, 0) 2188 && TREE_CODE (op1b) == INTEGER_CST 2189 && TREE_CODE (op2b) == INTEGER_CST) 2190 { 2191 int cmp = tree_int_cst_compare (op1b, op2b); 2192 2193 /* If we have (op1a != op1b), we should either be able to 2194 return that or TRUE, depending on whether the constant op1b 2195 also satisfies the other comparison against op2b. */ 2196 if (code1 == NE_EXPR) 2197 { 2198 bool done = true; 2199 bool val; 2200 switch (code2) 2201 { 2202 case EQ_EXPR: val = (cmp == 0); break; 2203 case NE_EXPR: val = (cmp != 0); break; 2204 case LT_EXPR: val = (cmp < 0); break; 2205 case GT_EXPR: val = (cmp > 0); break; 2206 case LE_EXPR: val = (cmp <= 0); break; 2207 case GE_EXPR: val = (cmp >= 0); break; 2208 default: done = false; 2209 } 2210 if (done) 2211 { 2212 if (val) 2213 return boolean_true_node; 2214 else 2215 return fold_build2 (code1, boolean_type_node, op1a, op1b); 2216 } 2217 } 2218 /* Likewise if the second comparison is a != comparison. */ 2219 else if (code2 == NE_EXPR) 2220 { 2221 bool done = true; 2222 bool val; 2223 switch (code1) 2224 { 2225 case EQ_EXPR: val = (cmp == 0); break; 2226 case NE_EXPR: val = (cmp != 0); break; 2227 case LT_EXPR: val = (cmp > 0); break; 2228 case GT_EXPR: val = (cmp < 0); break; 2229 case LE_EXPR: val = (cmp >= 0); break; 2230 case GE_EXPR: val = (cmp <= 0); break; 2231 default: done = false; 2232 } 2233 if (done) 2234 { 2235 if (val) 2236 return boolean_true_node; 2237 else 2238 return fold_build2 (code2, boolean_type_node, op2a, op2b); 2239 } 2240 } 2241 2242 /* See if an equality test is redundant with the other comparison. */ 2243 else if (code1 == EQ_EXPR) 2244 { 2245 bool val; 2246 switch (code2) 2247 { 2248 case EQ_EXPR: val = (cmp == 0); break; 2249 case NE_EXPR: val = (cmp != 0); break; 2250 case LT_EXPR: val = (cmp < 0); break; 2251 case GT_EXPR: val = (cmp > 0); break; 2252 case LE_EXPR: val = (cmp <= 0); break; 2253 case GE_EXPR: val = (cmp >= 0); break; 2254 default: 2255 val = false; 2256 } 2257 if (val) 2258 return fold_build2 (code2, boolean_type_node, op2a, op2b); 2259 } 2260 else if (code2 == EQ_EXPR) 2261 { 2262 bool val; 2263 switch (code1) 2264 { 2265 case EQ_EXPR: val = (cmp == 0); break; 2266 case NE_EXPR: val = (cmp != 0); break; 2267 case LT_EXPR: val = (cmp > 0); break; 2268 case GT_EXPR: val = (cmp < 0); break; 2269 case LE_EXPR: val = (cmp >= 0); break; 2270 case GE_EXPR: val = (cmp <= 0); break; 2271 default: 2272 val = false; 2273 } 2274 if (val) 2275 return fold_build2 (code1, boolean_type_node, op1a, op1b); 2276 } 2277 2278 /* Chose the less restrictive of two < or <= comparisons. */ 2279 else if ((code1 == LT_EXPR || code1 == LE_EXPR) 2280 && (code2 == LT_EXPR || code2 == LE_EXPR)) 2281 { 2282 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR)) 2283 return fold_build2 (code2, boolean_type_node, op2a, op2b); 2284 else 2285 return fold_build2 (code1, boolean_type_node, op1a, op1b); 2286 } 2287 2288 /* Likewise chose the less restrictive of two > or >= comparisons. */ 2289 else if ((code1 == GT_EXPR || code1 == GE_EXPR) 2290 && (code2 == GT_EXPR || code2 == GE_EXPR)) 2291 { 2292 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR)) 2293 return fold_build2 (code2, boolean_type_node, op2a, op2b); 2294 else 2295 return fold_build2 (code1, boolean_type_node, op1a, op1b); 2296 } 2297 2298 /* Check for singleton ranges. */ 2299 else if (cmp == 0 2300 && ((code1 == LT_EXPR && code2 == GT_EXPR) 2301 || (code1 == GT_EXPR && code2 == LT_EXPR))) 2302 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b); 2303 2304 /* Check for less/greater pairs that don't restrict the range at all. */ 2305 else if (cmp >= 0 2306 && (code1 == LT_EXPR || code1 == LE_EXPR) 2307 && (code2 == GT_EXPR || code2 == GE_EXPR)) 2308 return boolean_true_node; 2309 else if (cmp <= 0 2310 && (code1 == GT_EXPR || code1 == GE_EXPR) 2311 && (code2 == LT_EXPR || code2 == LE_EXPR)) 2312 return boolean_true_node; 2313 } 2314 2315 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where 2316 NAME's definition is a truth value. See if there are any simplifications 2317 that can be done against the NAME's definition. */ 2318 if (TREE_CODE (op1a) == SSA_NAME 2319 && (code1 == NE_EXPR || code1 == EQ_EXPR) 2320 && (integer_zerop (op1b) || integer_onep (op1b))) 2321 { 2322 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b)) 2323 || (code1 == NE_EXPR && integer_onep (op1b))); 2324 gimple stmt = SSA_NAME_DEF_STMT (op1a); 2325 switch (gimple_code (stmt)) 2326 { 2327 case GIMPLE_ASSIGN: 2328 /* Try to simplify by copy-propagating the definition. */ 2329 return or_var_with_comparison (op1a, invert, code2, op2a, op2b); 2330 2331 case GIMPLE_PHI: 2332 /* If every argument to the PHI produces the same result when 2333 ORed with the second comparison, we win. 2334 Do not do this unless the type is bool since we need a bool 2335 result here anyway. */ 2336 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE) 2337 { 2338 tree result = NULL_TREE; 2339 unsigned i; 2340 for (i = 0; i < gimple_phi_num_args (stmt); i++) 2341 { 2342 tree arg = gimple_phi_arg_def (stmt, i); 2343 2344 /* If this PHI has itself as an argument, ignore it. 2345 If all the other args produce the same result, 2346 we're still OK. */ 2347 if (arg == gimple_phi_result (stmt)) 2348 continue; 2349 else if (TREE_CODE (arg) == INTEGER_CST) 2350 { 2351 if (invert ? integer_zerop (arg) : integer_nonzerop (arg)) 2352 { 2353 if (!result) 2354 result = boolean_true_node; 2355 else if (!integer_onep (result)) 2356 return NULL_TREE; 2357 } 2358 else if (!result) 2359 result = fold_build2 (code2, boolean_type_node, 2360 op2a, op2b); 2361 else if (!same_bool_comparison_p (result, 2362 code2, op2a, op2b)) 2363 return NULL_TREE; 2364 } 2365 else if (TREE_CODE (arg) == SSA_NAME 2366 && !SSA_NAME_IS_DEFAULT_DEF (arg)) 2367 { 2368 tree temp; 2369 gimple def_stmt = SSA_NAME_DEF_STMT (arg); 2370 /* In simple cases we can look through PHI nodes, 2371 but we have to be careful with loops. 2372 See PR49073. */ 2373 if (! dom_info_available_p (CDI_DOMINATORS) 2374 || gimple_bb (def_stmt) == gimple_bb (stmt) 2375 || dominated_by_p (CDI_DOMINATORS, 2376 gimple_bb (def_stmt), 2377 gimple_bb (stmt))) 2378 return NULL_TREE; 2379 temp = or_var_with_comparison (arg, invert, code2, 2380 op2a, op2b); 2381 if (!temp) 2382 return NULL_TREE; 2383 else if (!result) 2384 result = temp; 2385 else if (!same_bool_result_p (result, temp)) 2386 return NULL_TREE; 2387 } 2388 else 2389 return NULL_TREE; 2390 } 2391 return result; 2392 } 2393 2394 default: 2395 break; 2396 } 2397 } 2398 return NULL_TREE; 2399 } 2400 2401 /* Try to simplify the OR of two comparisons, specified by 2402 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively. 2403 If this can be simplified to a single expression (without requiring 2404 introducing more SSA variables to hold intermediate values), 2405 return the resulting tree. Otherwise return NULL_TREE. 2406 If the result expression is non-null, it has boolean type. */ 2407 2408 tree 2409 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b, 2410 enum tree_code code2, tree op2a, tree op2b) 2411 { 2412 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b); 2413 if (t) 2414 return t; 2415 else 2416 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b); 2417 } 2418 2419 2420 /* Fold STMT to a constant using VALUEIZE to valueize SSA names. 2421 2422 Either NULL_TREE, a simplified but non-constant or a constant 2423 is returned. 2424 2425 ??? This should go into a gimple-fold-inline.h file to be eventually 2426 privatized with the single valueize function used in the various TUs 2427 to avoid the indirect function call overhead. */ 2428 2429 tree 2430 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree)) 2431 { 2432 location_t loc = gimple_location (stmt); 2433 switch (gimple_code (stmt)) 2434 { 2435 case GIMPLE_ASSIGN: 2436 { 2437 enum tree_code subcode = gimple_assign_rhs_code (stmt); 2438 2439 switch (get_gimple_rhs_class (subcode)) 2440 { 2441 case GIMPLE_SINGLE_RHS: 2442 { 2443 tree rhs = gimple_assign_rhs1 (stmt); 2444 enum tree_code_class kind = TREE_CODE_CLASS (subcode); 2445 2446 if (TREE_CODE (rhs) == SSA_NAME) 2447 { 2448 /* If the RHS is an SSA_NAME, return its known constant value, 2449 if any. */ 2450 return (*valueize) (rhs); 2451 } 2452 /* Handle propagating invariant addresses into address 2453 operations. */ 2454 else if (TREE_CODE (rhs) == ADDR_EXPR 2455 && !is_gimple_min_invariant (rhs)) 2456 { 2457 HOST_WIDE_INT offset = 0; 2458 tree base; 2459 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0), 2460 &offset, 2461 valueize); 2462 if (base 2463 && (CONSTANT_CLASS_P (base) 2464 || decl_address_invariant_p (base))) 2465 return build_invariant_address (TREE_TYPE (rhs), 2466 base, offset); 2467 } 2468 else if (TREE_CODE (rhs) == CONSTRUCTOR 2469 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE 2470 && (CONSTRUCTOR_NELTS (rhs) 2471 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)))) 2472 { 2473 unsigned i; 2474 tree val, *vec; 2475 2476 vec = XALLOCAVEC (tree, 2477 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))); 2478 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val) 2479 { 2480 val = (*valueize) (val); 2481 if (TREE_CODE (val) == INTEGER_CST 2482 || TREE_CODE (val) == REAL_CST 2483 || TREE_CODE (val) == FIXED_CST) 2484 vec[i] = val; 2485 else 2486 return NULL_TREE; 2487 } 2488 2489 return build_vector (TREE_TYPE (rhs), vec); 2490 } 2491 2492 if (kind == tcc_reference) 2493 { 2494 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR 2495 || TREE_CODE (rhs) == REALPART_EXPR 2496 || TREE_CODE (rhs) == IMAGPART_EXPR) 2497 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME) 2498 { 2499 tree val = (*valueize) (TREE_OPERAND (rhs, 0)); 2500 return fold_unary_loc (EXPR_LOCATION (rhs), 2501 TREE_CODE (rhs), 2502 TREE_TYPE (rhs), val); 2503 } 2504 else if (TREE_CODE (rhs) == BIT_FIELD_REF 2505 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME) 2506 { 2507 tree val = (*valueize) (TREE_OPERAND (rhs, 0)); 2508 return fold_ternary_loc (EXPR_LOCATION (rhs), 2509 TREE_CODE (rhs), 2510 TREE_TYPE (rhs), val, 2511 TREE_OPERAND (rhs, 1), 2512 TREE_OPERAND (rhs, 2)); 2513 } 2514 else if (TREE_CODE (rhs) == MEM_REF 2515 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME) 2516 { 2517 tree val = (*valueize) (TREE_OPERAND (rhs, 0)); 2518 if (TREE_CODE (val) == ADDR_EXPR 2519 && is_gimple_min_invariant (val)) 2520 { 2521 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs), 2522 unshare_expr (val), 2523 TREE_OPERAND (rhs, 1)); 2524 if (tem) 2525 rhs = tem; 2526 } 2527 } 2528 return fold_const_aggregate_ref_1 (rhs, valueize); 2529 } 2530 else if (kind == tcc_declaration) 2531 return get_symbol_constant_value (rhs); 2532 return rhs; 2533 } 2534 2535 case GIMPLE_UNARY_RHS: 2536 { 2537 /* Handle unary operators that can appear in GIMPLE form. 2538 Note that we know the single operand must be a constant, 2539 so this should almost always return a simplified RHS. */ 2540 tree lhs = gimple_assign_lhs (stmt); 2541 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt)); 2542 2543 /* Conversions are useless for CCP purposes if they are 2544 value-preserving. Thus the restrictions that 2545 useless_type_conversion_p places for restrict qualification 2546 of pointer types should not apply here. 2547 Substitution later will only substitute to allowed places. */ 2548 if (CONVERT_EXPR_CODE_P (subcode) 2549 && POINTER_TYPE_P (TREE_TYPE (lhs)) 2550 && POINTER_TYPE_P (TREE_TYPE (op0)) 2551 && TYPE_ADDR_SPACE (TREE_TYPE (lhs)) 2552 == TYPE_ADDR_SPACE (TREE_TYPE (op0)) 2553 && TYPE_MODE (TREE_TYPE (lhs)) 2554 == TYPE_MODE (TREE_TYPE (op0))) 2555 return op0; 2556 2557 return 2558 fold_unary_ignore_overflow_loc (loc, subcode, 2559 gimple_expr_type (stmt), op0); 2560 } 2561 2562 case GIMPLE_BINARY_RHS: 2563 { 2564 /* Handle binary operators that can appear in GIMPLE form. */ 2565 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt)); 2566 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt)); 2567 2568 /* Translate &x + CST into an invariant form suitable for 2569 further propagation. */ 2570 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR 2571 && TREE_CODE (op0) == ADDR_EXPR 2572 && TREE_CODE (op1) == INTEGER_CST) 2573 { 2574 tree off = fold_convert (ptr_type_node, op1); 2575 return build_fold_addr_expr_loc 2576 (loc, 2577 fold_build2 (MEM_REF, 2578 TREE_TYPE (TREE_TYPE (op0)), 2579 unshare_expr (op0), off)); 2580 } 2581 2582 return fold_binary_loc (loc, subcode, 2583 gimple_expr_type (stmt), op0, op1); 2584 } 2585 2586 case GIMPLE_TERNARY_RHS: 2587 { 2588 /* Handle ternary operators that can appear in GIMPLE form. */ 2589 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt)); 2590 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt)); 2591 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt)); 2592 2593 /* Fold embedded expressions in ternary codes. */ 2594 if ((subcode == COND_EXPR 2595 || subcode == VEC_COND_EXPR) 2596 && COMPARISON_CLASS_P (op0)) 2597 { 2598 tree op00 = (*valueize) (TREE_OPERAND (op0, 0)); 2599 tree op01 = (*valueize) (TREE_OPERAND (op0, 1)); 2600 tree tem = fold_binary_loc (loc, TREE_CODE (op0), 2601 TREE_TYPE (op0), op00, op01); 2602 if (tem) 2603 op0 = tem; 2604 } 2605 2606 return fold_ternary_loc (loc, subcode, 2607 gimple_expr_type (stmt), op0, op1, op2); 2608 } 2609 2610 default: 2611 gcc_unreachable (); 2612 } 2613 } 2614 2615 case GIMPLE_CALL: 2616 { 2617 tree fn; 2618 2619 if (gimple_call_internal_p (stmt)) 2620 /* No folding yet for these functions. */ 2621 return NULL_TREE; 2622 2623 fn = (*valueize) (gimple_call_fn (stmt)); 2624 if (TREE_CODE (fn) == ADDR_EXPR 2625 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL 2626 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))) 2627 { 2628 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt)); 2629 tree call, retval; 2630 unsigned i; 2631 for (i = 0; i < gimple_call_num_args (stmt); ++i) 2632 args[i] = (*valueize) (gimple_call_arg (stmt, i)); 2633 call = build_call_array_loc (loc, 2634 gimple_call_return_type (stmt), 2635 fn, gimple_call_num_args (stmt), args); 2636 retval = fold_call_expr (EXPR_LOCATION (call), call, false); 2637 if (retval) 2638 /* fold_call_expr wraps the result inside a NOP_EXPR. */ 2639 STRIP_NOPS (retval); 2640 return retval; 2641 } 2642 return NULL_TREE; 2643 } 2644 2645 default: 2646 return NULL_TREE; 2647 } 2648 } 2649 2650 /* Fold STMT to a constant using VALUEIZE to valueize SSA names. 2651 Returns NULL_TREE if folding to a constant is not possible, otherwise 2652 returns a constant according to is_gimple_min_invariant. */ 2653 2654 tree 2655 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree)) 2656 { 2657 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize); 2658 if (res && is_gimple_min_invariant (res)) 2659 return res; 2660 return NULL_TREE; 2661 } 2662 2663 2664 /* The following set of functions are supposed to fold references using 2665 their constant initializers. */ 2666 2667 static tree fold_ctor_reference (tree type, tree ctor, 2668 unsigned HOST_WIDE_INT offset, 2669 unsigned HOST_WIDE_INT size, tree); 2670 2671 /* See if we can find constructor defining value of BASE. 2672 When we know the consructor with constant offset (such as 2673 base is array[40] and we do know constructor of array), then 2674 BIT_OFFSET is adjusted accordingly. 2675 2676 As a special case, return error_mark_node when constructor 2677 is not explicitly available, but it is known to be zero 2678 such as 'static const int a;'. */ 2679 static tree 2680 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset, 2681 tree (*valueize)(tree)) 2682 { 2683 HOST_WIDE_INT bit_offset2, size, max_size; 2684 if (TREE_CODE (base) == MEM_REF) 2685 { 2686 if (!integer_zerop (TREE_OPERAND (base, 1))) 2687 { 2688 if (!host_integerp (TREE_OPERAND (base, 1), 0)) 2689 return NULL_TREE; 2690 *bit_offset += (mem_ref_offset (base).low 2691 * BITS_PER_UNIT); 2692 } 2693 2694 if (valueize 2695 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME) 2696 base = valueize (TREE_OPERAND (base, 0)); 2697 if (!base || TREE_CODE (base) != ADDR_EXPR) 2698 return NULL_TREE; 2699 base = TREE_OPERAND (base, 0); 2700 } 2701 2702 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its 2703 DECL_INITIAL. If BASE is a nested reference into another 2704 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve 2705 the inner reference. */ 2706 switch (TREE_CODE (base)) 2707 { 2708 case VAR_DECL: 2709 if (!const_value_known_p (base)) 2710 return NULL_TREE; 2711 2712 /* Fallthru. */ 2713 case CONST_DECL: 2714 if (!DECL_INITIAL (base) 2715 && (TREE_STATIC (base) || DECL_EXTERNAL (base))) 2716 return error_mark_node; 2717 /* Do not return an error_mark_node DECL_INITIAL. LTO uses this 2718 as special marker (_not_ zero ...) for its own purposes. */ 2719 if (DECL_INITIAL (base) == error_mark_node) 2720 return NULL_TREE; 2721 return DECL_INITIAL (base); 2722 2723 case ARRAY_REF: 2724 case COMPONENT_REF: 2725 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size); 2726 if (max_size == -1 || size != max_size) 2727 return NULL_TREE; 2728 *bit_offset += bit_offset2; 2729 return get_base_constructor (base, bit_offset, valueize); 2730 2731 case STRING_CST: 2732 case CONSTRUCTOR: 2733 return base; 2734 2735 default: 2736 return NULL_TREE; 2737 } 2738 } 2739 2740 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE 2741 to the memory at bit OFFSET. 2742 2743 We do only simple job of folding byte accesses. */ 2744 2745 static tree 2746 fold_string_cst_ctor_reference (tree type, tree ctor, 2747 unsigned HOST_WIDE_INT offset, 2748 unsigned HOST_WIDE_INT size) 2749 { 2750 if (INTEGRAL_TYPE_P (type) 2751 && (TYPE_MODE (type) 2752 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) 2753 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) 2754 == MODE_INT) 2755 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1 2756 && size == BITS_PER_UNIT 2757 && !(offset % BITS_PER_UNIT)) 2758 { 2759 offset /= BITS_PER_UNIT; 2760 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor)) 2761 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor) 2762 [offset])); 2763 /* Folding 2764 const char a[20]="hello"; 2765 return a[10]; 2766 2767 might lead to offset greater than string length. In this case we 2768 know value is either initialized to 0 or out of bounds. Return 0 2769 in both cases. */ 2770 return build_zero_cst (type); 2771 } 2772 return NULL_TREE; 2773 } 2774 2775 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size 2776 SIZE to the memory at bit OFFSET. */ 2777 2778 static tree 2779 fold_array_ctor_reference (tree type, tree ctor, 2780 unsigned HOST_WIDE_INT offset, 2781 unsigned HOST_WIDE_INT size, 2782 tree from_decl) 2783 { 2784 unsigned HOST_WIDE_INT cnt; 2785 tree cfield, cval; 2786 double_int low_bound, elt_size; 2787 double_int index, max_index; 2788 double_int access_index; 2789 tree domain_type = NULL_TREE, index_type = NULL_TREE; 2790 HOST_WIDE_INT inner_offset; 2791 2792 /* Compute low bound and elt size. */ 2793 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE) 2794 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor)); 2795 if (domain_type && TYPE_MIN_VALUE (domain_type)) 2796 { 2797 /* Static constructors for variably sized objects makes no sense. */ 2798 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST); 2799 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type)); 2800 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type)); 2801 } 2802 else 2803 low_bound = double_int_zero; 2804 /* Static constructors for variably sized objects makes no sense. */ 2805 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) 2806 == INTEGER_CST); 2807 elt_size = 2808 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))); 2809 2810 2811 /* We can handle only constantly sized accesses that are known to not 2812 be larger than size of array element. */ 2813 if (!TYPE_SIZE_UNIT (type) 2814 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST 2815 || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type)))) 2816 return NULL_TREE; 2817 2818 /* Compute the array index we look for. */ 2819 access_index = double_int::from_uhwi (offset / BITS_PER_UNIT) 2820 .udiv (elt_size, TRUNC_DIV_EXPR); 2821 access_index += low_bound; 2822 if (index_type) 2823 access_index = access_index.ext (TYPE_PRECISION (index_type), 2824 TYPE_UNSIGNED (index_type)); 2825 2826 /* And offset within the access. */ 2827 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT); 2828 2829 /* See if the array field is large enough to span whole access. We do not 2830 care to fold accesses spanning multiple array indexes. */ 2831 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT) 2832 return NULL_TREE; 2833 2834 index = low_bound - double_int_one; 2835 if (index_type) 2836 index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type)); 2837 2838 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval) 2839 { 2840 /* Array constructor might explicitely set index, or specify range 2841 or leave index NULL meaning that it is next index after previous 2842 one. */ 2843 if (cfield) 2844 { 2845 if (TREE_CODE (cfield) == INTEGER_CST) 2846 max_index = index = tree_to_double_int (cfield); 2847 else 2848 { 2849 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR); 2850 index = tree_to_double_int (TREE_OPERAND (cfield, 0)); 2851 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1)); 2852 } 2853 } 2854 else 2855 { 2856 index += double_int_one; 2857 if (index_type) 2858 index = index.ext (TYPE_PRECISION (index_type), 2859 TYPE_UNSIGNED (index_type)); 2860 max_index = index; 2861 } 2862 2863 /* Do we have match? */ 2864 if (access_index.cmp (index, 1) >= 0 2865 && access_index.cmp (max_index, 1) <= 0) 2866 return fold_ctor_reference (type, cval, inner_offset, size, 2867 from_decl); 2868 } 2869 /* When memory is not explicitely mentioned in constructor, 2870 it is 0 (or out of range). */ 2871 return build_zero_cst (type); 2872 } 2873 2874 /* CTOR is CONSTRUCTOR of an aggregate or vector. 2875 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */ 2876 2877 static tree 2878 fold_nonarray_ctor_reference (tree type, tree ctor, 2879 unsigned HOST_WIDE_INT offset, 2880 unsigned HOST_WIDE_INT size, 2881 tree from_decl) 2882 { 2883 unsigned HOST_WIDE_INT cnt; 2884 tree cfield, cval; 2885 2886 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, 2887 cval) 2888 { 2889 tree byte_offset = DECL_FIELD_OFFSET (cfield); 2890 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield); 2891 tree field_size = DECL_SIZE (cfield); 2892 double_int bitoffset; 2893 double_int byte_offset_cst = tree_to_double_int (byte_offset); 2894 double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT); 2895 double_int bitoffset_end, access_end; 2896 2897 /* Variable sized objects in static constructors makes no sense, 2898 but field_size can be NULL for flexible array members. */ 2899 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST 2900 && TREE_CODE (byte_offset) == INTEGER_CST 2901 && (field_size != NULL_TREE 2902 ? TREE_CODE (field_size) == INTEGER_CST 2903 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE)); 2904 2905 /* Compute bit offset of the field. */ 2906 bitoffset = tree_to_double_int (field_offset) 2907 + byte_offset_cst * bits_per_unit_cst; 2908 /* Compute bit offset where the field ends. */ 2909 if (field_size != NULL_TREE) 2910 bitoffset_end = bitoffset + tree_to_double_int (field_size); 2911 else 2912 bitoffset_end = double_int_zero; 2913 2914 access_end = double_int::from_uhwi (offset) 2915 + double_int::from_uhwi (size); 2916 2917 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and 2918 [BITOFFSET, BITOFFSET_END)? */ 2919 if (access_end.cmp (bitoffset, 0) > 0 2920 && (field_size == NULL_TREE 2921 || double_int::from_uhwi (offset).slt (bitoffset_end))) 2922 { 2923 double_int inner_offset = double_int::from_uhwi (offset) - bitoffset; 2924 /* We do have overlap. Now see if field is large enough to 2925 cover the access. Give up for accesses spanning multiple 2926 fields. */ 2927 if (access_end.cmp (bitoffset_end, 0) > 0) 2928 return NULL_TREE; 2929 if (double_int::from_uhwi (offset).slt (bitoffset)) 2930 return NULL_TREE; 2931 return fold_ctor_reference (type, cval, 2932 inner_offset.to_uhwi (), size, 2933 from_decl); 2934 } 2935 } 2936 /* When memory is not explicitely mentioned in constructor, it is 0. */ 2937 return build_zero_cst (type); 2938 } 2939 2940 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE 2941 to the memory at bit OFFSET. */ 2942 2943 static tree 2944 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset, 2945 unsigned HOST_WIDE_INT size, tree from_decl) 2946 { 2947 tree ret; 2948 2949 /* We found the field with exact match. */ 2950 if (useless_type_conversion_p (type, TREE_TYPE (ctor)) 2951 && !offset) 2952 return canonicalize_constructor_val (unshare_expr (ctor), from_decl); 2953 2954 /* We are at the end of walk, see if we can view convert the 2955 result. */ 2956 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset 2957 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */ 2958 && !compare_tree_int (TYPE_SIZE (type), size) 2959 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size)) 2960 { 2961 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl); 2962 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret); 2963 if (ret) 2964 STRIP_NOPS (ret); 2965 return ret; 2966 } 2967 if (TREE_CODE (ctor) == STRING_CST) 2968 return fold_string_cst_ctor_reference (type, ctor, offset, size); 2969 if (TREE_CODE (ctor) == CONSTRUCTOR) 2970 { 2971 2972 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE 2973 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE) 2974 return fold_array_ctor_reference (type, ctor, offset, size, 2975 from_decl); 2976 else 2977 return fold_nonarray_ctor_reference (type, ctor, offset, size, 2978 from_decl); 2979 } 2980 2981 return NULL_TREE; 2982 } 2983 2984 /* Return the tree representing the element referenced by T if T is an 2985 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA 2986 names using VALUEIZE. Return NULL_TREE otherwise. */ 2987 2988 tree 2989 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree)) 2990 { 2991 tree ctor, idx, base; 2992 HOST_WIDE_INT offset, size, max_size; 2993 tree tem; 2994 2995 if (TREE_THIS_VOLATILE (t)) 2996 return NULL_TREE; 2997 2998 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration) 2999 return get_symbol_constant_value (t); 3000 3001 tem = fold_read_from_constant_string (t); 3002 if (tem) 3003 return tem; 3004 3005 switch (TREE_CODE (t)) 3006 { 3007 case ARRAY_REF: 3008 case ARRAY_RANGE_REF: 3009 /* Constant indexes are handled well by get_base_constructor. 3010 Only special case variable offsets. 3011 FIXME: This code can't handle nested references with variable indexes 3012 (they will be handled only by iteration of ccp). Perhaps we can bring 3013 get_ref_base_and_extent here and make it use a valueize callback. */ 3014 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME 3015 && valueize 3016 && (idx = (*valueize) (TREE_OPERAND (t, 1))) 3017 && TREE_CODE (idx) == INTEGER_CST) 3018 { 3019 tree low_bound, unit_size; 3020 double_int doffset; 3021 3022 /* If the resulting bit-offset is constant, track it. */ 3023 if ((low_bound = array_ref_low_bound (t), 3024 TREE_CODE (low_bound) == INTEGER_CST) 3025 && (unit_size = array_ref_element_size (t), 3026 host_integerp (unit_size, 1)) 3027 && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound)) 3028 .sext (TYPE_PRECISION (TREE_TYPE (idx))), 3029 doffset.fits_shwi ())) 3030 { 3031 offset = doffset.to_shwi (); 3032 offset *= TREE_INT_CST_LOW (unit_size); 3033 offset *= BITS_PER_UNIT; 3034 3035 base = TREE_OPERAND (t, 0); 3036 ctor = get_base_constructor (base, &offset, valueize); 3037 /* Empty constructor. Always fold to 0. */ 3038 if (ctor == error_mark_node) 3039 return build_zero_cst (TREE_TYPE (t)); 3040 /* Out of bound array access. Value is undefined, 3041 but don't fold. */ 3042 if (offset < 0) 3043 return NULL_TREE; 3044 /* We can not determine ctor. */ 3045 if (!ctor) 3046 return NULL_TREE; 3047 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, 3048 TREE_INT_CST_LOW (unit_size) 3049 * BITS_PER_UNIT, 3050 base); 3051 } 3052 } 3053 /* Fallthru. */ 3054 3055 case COMPONENT_REF: 3056 case BIT_FIELD_REF: 3057 case TARGET_MEM_REF: 3058 case MEM_REF: 3059 base = get_ref_base_and_extent (t, &offset, &size, &max_size); 3060 ctor = get_base_constructor (base, &offset, valueize); 3061 3062 /* Empty constructor. Always fold to 0. */ 3063 if (ctor == error_mark_node) 3064 return build_zero_cst (TREE_TYPE (t)); 3065 /* We do not know precise address. */ 3066 if (max_size == -1 || max_size != size) 3067 return NULL_TREE; 3068 /* We can not determine ctor. */ 3069 if (!ctor) 3070 return NULL_TREE; 3071 3072 /* Out of bound array access. Value is undefined, but don't fold. */ 3073 if (offset < 0) 3074 return NULL_TREE; 3075 3076 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, 3077 base); 3078 3079 case REALPART_EXPR: 3080 case IMAGPART_EXPR: 3081 { 3082 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize); 3083 if (c && TREE_CODE (c) == COMPLEX_CST) 3084 return fold_build1_loc (EXPR_LOCATION (t), 3085 TREE_CODE (t), TREE_TYPE (t), c); 3086 break; 3087 } 3088 3089 default: 3090 break; 3091 } 3092 3093 return NULL_TREE; 3094 } 3095 3096 tree 3097 fold_const_aggregate_ref (tree t) 3098 { 3099 return fold_const_aggregate_ref_1 (t, NULL); 3100 } 3101 3102 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN 3103 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression. 3104 KNOWN_BINFO carries the binfo describing the true type of 3105 OBJ_TYPE_REF_OBJECT(REF). */ 3106 3107 tree 3108 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo) 3109 { 3110 unsigned HOST_WIDE_INT offset, size; 3111 tree v, fn, vtable; 3112 3113 vtable = v = BINFO_VTABLE (known_binfo); 3114 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */ 3115 if (!v) 3116 return NULL_TREE; 3117 3118 if (TREE_CODE (v) == POINTER_PLUS_EXPR) 3119 { 3120 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT; 3121 v = TREE_OPERAND (v, 0); 3122 } 3123 else 3124 offset = 0; 3125 3126 if (TREE_CODE (v) != ADDR_EXPR) 3127 return NULL_TREE; 3128 v = TREE_OPERAND (v, 0); 3129 3130 if (TREE_CODE (v) != VAR_DECL 3131 || !DECL_VIRTUAL_P (v) 3132 || !DECL_INITIAL (v) 3133 || DECL_INITIAL (v) == error_mark_node) 3134 return NULL_TREE; 3135 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE); 3136 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1); 3137 offset += token * size; 3138 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v), 3139 offset, size, vtable); 3140 if (!fn || integer_zerop (fn)) 3141 return NULL_TREE; 3142 gcc_assert (TREE_CODE (fn) == ADDR_EXPR 3143 || TREE_CODE (fn) == FDESC_EXPR); 3144 fn = TREE_OPERAND (fn, 0); 3145 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL); 3146 3147 /* When cgraph node is missing and function is not public, we cannot 3148 devirtualize. This can happen in WHOPR when the actual method 3149 ends up in other partition, because we found devirtualization 3150 possibility too late. */ 3151 if (!can_refer_decl_in_current_unit_p (fn, vtable)) 3152 return NULL_TREE; 3153 3154 /* Make sure we create a cgraph node for functions we'll reference. 3155 They can be non-existent if the reference comes from an entry 3156 of an external vtable for example. */ 3157 cgraph_get_create_node (fn); 3158 3159 return fn; 3160 } 3161 3162 /* Return true iff VAL is a gimple expression that is known to be 3163 non-negative. Restricted to floating-point inputs. */ 3164 3165 bool 3166 gimple_val_nonnegative_real_p (tree val) 3167 { 3168 gimple def_stmt; 3169 3170 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val))); 3171 3172 /* Use existing logic for non-gimple trees. */ 3173 if (tree_expr_nonnegative_p (val)) 3174 return true; 3175 3176 if (TREE_CODE (val) != SSA_NAME) 3177 return false; 3178 3179 /* Currently we look only at the immediately defining statement 3180 to make this determination, since recursion on defining 3181 statements of operands can lead to quadratic behavior in the 3182 worst case. This is expected to catch almost all occurrences 3183 in practice. It would be possible to implement limited-depth 3184 recursion if important cases are lost. Alternatively, passes 3185 that need this information (such as the pow/powi lowering code 3186 in the cse_sincos pass) could be revised to provide it through 3187 dataflow propagation. */ 3188 3189 def_stmt = SSA_NAME_DEF_STMT (val); 3190 3191 if (is_gimple_assign (def_stmt)) 3192 { 3193 tree op0, op1; 3194 3195 /* See fold-const.c:tree_expr_nonnegative_p for additional 3196 cases that could be handled with recursion. */ 3197 3198 switch (gimple_assign_rhs_code (def_stmt)) 3199 { 3200 case ABS_EXPR: 3201 /* Always true for floating-point operands. */ 3202 return true; 3203 3204 case MULT_EXPR: 3205 /* True if the two operands are identical (since we are 3206 restricted to floating-point inputs). */ 3207 op0 = gimple_assign_rhs1 (def_stmt); 3208 op1 = gimple_assign_rhs2 (def_stmt); 3209 3210 if (op0 == op1 3211 || operand_equal_p (op0, op1, 0)) 3212 return true; 3213 3214 default: 3215 return false; 3216 } 3217 } 3218 else if (is_gimple_call (def_stmt)) 3219 { 3220 tree fndecl = gimple_call_fndecl (def_stmt); 3221 if (fndecl 3222 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) 3223 { 3224 tree arg1; 3225 3226 switch (DECL_FUNCTION_CODE (fndecl)) 3227 { 3228 CASE_FLT_FN (BUILT_IN_ACOS): 3229 CASE_FLT_FN (BUILT_IN_ACOSH): 3230 CASE_FLT_FN (BUILT_IN_CABS): 3231 CASE_FLT_FN (BUILT_IN_COSH): 3232 CASE_FLT_FN (BUILT_IN_ERFC): 3233 CASE_FLT_FN (BUILT_IN_EXP): 3234 CASE_FLT_FN (BUILT_IN_EXP10): 3235 CASE_FLT_FN (BUILT_IN_EXP2): 3236 CASE_FLT_FN (BUILT_IN_FABS): 3237 CASE_FLT_FN (BUILT_IN_FDIM): 3238 CASE_FLT_FN (BUILT_IN_HYPOT): 3239 CASE_FLT_FN (BUILT_IN_POW10): 3240 return true; 3241 3242 CASE_FLT_FN (BUILT_IN_SQRT): 3243 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other 3244 nonnegative inputs. */ 3245 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val)))) 3246 return true; 3247 3248 break; 3249 3250 CASE_FLT_FN (BUILT_IN_POWI): 3251 /* True if the second argument is an even integer. */ 3252 arg1 = gimple_call_arg (def_stmt, 1); 3253 3254 if (TREE_CODE (arg1) == INTEGER_CST 3255 && (TREE_INT_CST_LOW (arg1) & 1) == 0) 3256 return true; 3257 3258 break; 3259 3260 CASE_FLT_FN (BUILT_IN_POW): 3261 /* True if the second argument is an even integer-valued 3262 real. */ 3263 arg1 = gimple_call_arg (def_stmt, 1); 3264 3265 if (TREE_CODE (arg1) == REAL_CST) 3266 { 3267 REAL_VALUE_TYPE c; 3268 HOST_WIDE_INT n; 3269 3270 c = TREE_REAL_CST (arg1); 3271 n = real_to_integer (&c); 3272 3273 if ((n & 1) == 0) 3274 { 3275 REAL_VALUE_TYPE cint; 3276 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0); 3277 if (real_identical (&c, &cint)) 3278 return true; 3279 } 3280 } 3281 3282 break; 3283 3284 default: 3285 return false; 3286 } 3287 } 3288 } 3289 3290 return false; 3291 } 3292