1 /* Forward propagation of expressions for single use variables. 2 Copyright (C) 2004-2013 Free Software Foundation, Inc. 3 4 This file is part of GCC. 5 6 GCC is free software; you can redistribute it and/or modify 7 it under the terms of the GNU General Public License as published by 8 the Free Software Foundation; either version 3, or (at your option) 9 any later version. 10 11 GCC is distributed in the hope that it will be useful, 12 but WITHOUT ANY WARRANTY; without even the implied warranty of 13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 GNU General Public License for more details. 15 16 You should have received a copy of the GNU General Public License 17 along with GCC; see the file COPYING3. If not see 18 <http://www.gnu.org/licenses/>. */ 19 20 #include "config.h" 21 #include "system.h" 22 #include "coretypes.h" 23 #include "tm.h" 24 #include "tree.h" 25 #include "tm_p.h" 26 #include "basic-block.h" 27 #include "gimple-pretty-print.h" 28 #include "tree-flow.h" 29 #include "tree-pass.h" 30 #include "langhooks.h" 31 #include "flags.h" 32 #include "gimple.h" 33 #include "expr.h" 34 #include "cfgloop.h" 35 #include "optabs.h" 36 #include "tree-ssa-propagate.h" 37 38 /* This pass propagates the RHS of assignment statements into use 39 sites of the LHS of the assignment. It's basically a specialized 40 form of tree combination. It is hoped all of this can disappear 41 when we have a generalized tree combiner. 42 43 One class of common cases we handle is forward propagating a single use 44 variable into a COND_EXPR. 45 46 bb0: 47 x = a COND b; 48 if (x) goto ... else goto ... 49 50 Will be transformed into: 51 52 bb0: 53 if (a COND b) goto ... else goto ... 54 55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1). 56 57 Or (assuming c1 and c2 are constants): 58 59 bb0: 60 x = a + c1; 61 if (x EQ/NEQ c2) goto ... else goto ... 62 63 Will be transformed into: 64 65 bb0: 66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ... 67 68 Similarly for x = a - c1. 69 70 Or 71 72 bb0: 73 x = !a 74 if (x) goto ... else goto ... 75 76 Will be transformed into: 77 78 bb0: 79 if (a == 0) goto ... else goto ... 80 81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1). 82 For these cases, we propagate A into all, possibly more than one, 83 COND_EXPRs that use X. 84 85 Or 86 87 bb0: 88 x = (typecast) a 89 if (x) goto ... else goto ... 90 91 Will be transformed into: 92 93 bb0: 94 if (a != 0) goto ... else goto ... 95 96 (Assuming a is an integral type and x is a boolean or x is an 97 integral and a is a boolean.) 98 99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1). 100 For these cases, we propagate A into all, possibly more than one, 101 COND_EXPRs that use X. 102 103 In addition to eliminating the variable and the statement which assigns 104 a value to the variable, we may be able to later thread the jump without 105 adding insane complexity in the dominator optimizer. 106 107 Also note these transformations can cascade. We handle this by having 108 a worklist of COND_EXPR statements to examine. As we make a change to 109 a statement, we put it back on the worklist to examine on the next 110 iteration of the main loop. 111 112 A second class of propagation opportunities arises for ADDR_EXPR 113 nodes. 114 115 ptr = &x->y->z; 116 res = *ptr; 117 118 Will get turned into 119 120 res = x->y->z; 121 122 Or 123 ptr = (type1*)&type2var; 124 res = *ptr 125 126 Will get turned into (if type1 and type2 are the same size 127 and neither have volatile on them): 128 res = VIEW_CONVERT_EXPR<type1>(type2var) 129 130 Or 131 132 ptr = &x[0]; 133 ptr2 = ptr + <constant>; 134 135 Will get turned into 136 137 ptr2 = &x[constant/elementsize]; 138 139 Or 140 141 ptr = &x[0]; 142 offset = index * element_size; 143 offset_p = (pointer) offset; 144 ptr2 = ptr + offset_p 145 146 Will get turned into: 147 148 ptr2 = &x[index]; 149 150 Or 151 ssa = (int) decl 152 res = ssa & 1 153 154 Provided that decl has known alignment >= 2, will get turned into 155 156 res = 0 157 158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to 159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent 160 {NOT_EXPR,NEG_EXPR}. 161 162 This will (of course) be extended as other needs arise. */ 163 164 static bool forward_propagate_addr_expr (tree name, tree rhs); 165 166 /* Set to true if we delete dead edges during the optimization. */ 167 static bool cfg_changed; 168 169 static tree rhs_to_tree (tree type, gimple stmt); 170 171 /* Get the next statement we can propagate NAME's value into skipping 172 trivial copies. Returns the statement that is suitable as a 173 propagation destination or NULL_TREE if there is no such one. 174 This only returns destinations in a single-use chain. FINAL_NAME_P 175 if non-NULL is written to the ssa name that represents the use. */ 176 177 static gimple 178 get_prop_dest_stmt (tree name, tree *final_name_p) 179 { 180 use_operand_p use; 181 gimple use_stmt; 182 183 do { 184 /* If name has multiple uses, bail out. */ 185 if (!single_imm_use (name, &use, &use_stmt)) 186 return NULL; 187 188 /* If this is not a trivial copy, we found it. */ 189 if (!gimple_assign_ssa_name_copy_p (use_stmt) 190 || gimple_assign_rhs1 (use_stmt) != name) 191 break; 192 193 /* Continue searching uses of the copy destination. */ 194 name = gimple_assign_lhs (use_stmt); 195 } while (1); 196 197 if (final_name_p) 198 *final_name_p = name; 199 200 return use_stmt; 201 } 202 203 /* Get the statement we can propagate from into NAME skipping 204 trivial copies. Returns the statement which defines the 205 propagation source or NULL_TREE if there is no such one. 206 If SINGLE_USE_ONLY is set considers only sources which have 207 a single use chain up to NAME. If SINGLE_USE_P is non-null, 208 it is set to whether the chain to NAME is a single use chain 209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */ 210 211 static gimple 212 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p) 213 { 214 bool single_use = true; 215 216 do { 217 gimple def_stmt = SSA_NAME_DEF_STMT (name); 218 219 if (!has_single_use (name)) 220 { 221 single_use = false; 222 if (single_use_only) 223 return NULL; 224 } 225 226 /* If name is defined by a PHI node or is the default def, bail out. */ 227 if (!is_gimple_assign (def_stmt)) 228 return NULL; 229 230 /* If def_stmt is a simple copy, continue looking. */ 231 if (gimple_assign_rhs_code (def_stmt) == SSA_NAME) 232 name = gimple_assign_rhs1 (def_stmt); 233 else 234 { 235 if (!single_use_only && single_use_p) 236 *single_use_p = single_use; 237 238 return def_stmt; 239 } 240 } while (1); 241 } 242 243 /* Checks if the destination ssa name in DEF_STMT can be used as 244 propagation source. Returns true if so, otherwise false. */ 245 246 static bool 247 can_propagate_from (gimple def_stmt) 248 { 249 gcc_assert (is_gimple_assign (def_stmt)); 250 251 /* If the rhs has side-effects we cannot propagate from it. */ 252 if (gimple_has_volatile_ops (def_stmt)) 253 return false; 254 255 /* If the rhs is a load we cannot propagate from it. */ 256 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference 257 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration) 258 return false; 259 260 /* Constants can be always propagated. */ 261 if (gimple_assign_single_p (def_stmt) 262 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))) 263 return true; 264 265 /* We cannot propagate ssa names that occur in abnormal phi nodes. */ 266 if (stmt_references_abnormal_ssa_name (def_stmt)) 267 return false; 268 269 /* If the definition is a conversion of a pointer to a function type, 270 then we can not apply optimizations as some targets require 271 function pointers to be canonicalized and in this case this 272 optimization could eliminate a necessary canonicalization. */ 273 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))) 274 { 275 tree rhs = gimple_assign_rhs1 (def_stmt); 276 if (POINTER_TYPE_P (TREE_TYPE (rhs)) 277 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE) 278 return false; 279 } 280 281 return true; 282 } 283 284 /* Remove a chain of dead statements starting at the definition of 285 NAME. The chain is linked via the first operand of the defining statements. 286 If NAME was replaced in its only use then this function can be used 287 to clean up dead stmts. The function handles already released SSA 288 names gracefully. 289 Returns true if cleanup-cfg has to run. */ 290 291 static bool 292 remove_prop_source_from_use (tree name) 293 { 294 gimple_stmt_iterator gsi; 295 gimple stmt; 296 bool cfg_changed = false; 297 298 do { 299 basic_block bb; 300 301 if (SSA_NAME_IN_FREE_LIST (name) 302 || SSA_NAME_IS_DEFAULT_DEF (name) 303 || !has_zero_uses (name)) 304 return cfg_changed; 305 306 stmt = SSA_NAME_DEF_STMT (name); 307 if (gimple_code (stmt) == GIMPLE_PHI 308 || gimple_has_side_effects (stmt)) 309 return cfg_changed; 310 311 bb = gimple_bb (stmt); 312 gsi = gsi_for_stmt (stmt); 313 unlink_stmt_vdef (stmt); 314 if (gsi_remove (&gsi, true)) 315 cfg_changed |= gimple_purge_dead_eh_edges (bb); 316 release_defs (stmt); 317 318 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE; 319 } while (name && TREE_CODE (name) == SSA_NAME); 320 321 return cfg_changed; 322 } 323 324 /* Return the rhs of a gimple_assign STMT in a form of a single tree, 325 converted to type TYPE. 326 327 This should disappear, but is needed so we can combine expressions and use 328 the fold() interfaces. Long term, we need to develop folding and combine 329 routines that deal with gimple exclusively . */ 330 331 static tree 332 rhs_to_tree (tree type, gimple stmt) 333 { 334 location_t loc = gimple_location (stmt); 335 enum tree_code code = gimple_assign_rhs_code (stmt); 336 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS) 337 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt), 338 gimple_assign_rhs2 (stmt), 339 gimple_assign_rhs3 (stmt)); 340 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS) 341 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt), 342 gimple_assign_rhs2 (stmt)); 343 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS) 344 return build1 (code, type, gimple_assign_rhs1 (stmt)); 345 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS) 346 return gimple_assign_rhs1 (stmt); 347 else 348 gcc_unreachable (); 349 } 350 351 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns 352 the folded result in a form suitable for COND_EXPR_COND or 353 NULL_TREE, if there is no suitable simplified form. If 354 INVARIANT_ONLY is true only gimple_min_invariant results are 355 considered simplified. */ 356 357 static tree 358 combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type, 359 tree op0, tree op1, bool invariant_only) 360 { 361 tree t; 362 363 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison); 364 365 fold_defer_overflow_warnings (); 366 t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1); 367 if (!t) 368 { 369 fold_undefer_overflow_warnings (false, NULL, 0); 370 return NULL_TREE; 371 } 372 373 /* Require that we got a boolean type out if we put one in. */ 374 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type)); 375 376 /* Canonicalize the combined condition for use in a COND_EXPR. */ 377 t = canonicalize_cond_expr_cond (t); 378 379 /* Bail out if we required an invariant but didn't get one. */ 380 if (!t || (invariant_only && !is_gimple_min_invariant (t))) 381 { 382 fold_undefer_overflow_warnings (false, NULL, 0); 383 return NULL_TREE; 384 } 385 386 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0); 387 388 return t; 389 } 390 391 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements 392 of its operand. Return a new comparison tree or NULL_TREE if there 393 were no simplifying combines. */ 394 395 static tree 396 forward_propagate_into_comparison_1 (gimple stmt, 397 enum tree_code code, tree type, 398 tree op0, tree op1) 399 { 400 tree tmp = NULL_TREE; 401 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE; 402 bool single_use0_p = false, single_use1_p = false; 403 404 /* For comparisons use the first operand, that is likely to 405 simplify comparisons against constants. */ 406 if (TREE_CODE (op0) == SSA_NAME) 407 { 408 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p); 409 if (def_stmt && can_propagate_from (def_stmt)) 410 { 411 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt); 412 tmp = combine_cond_expr_cond (stmt, code, type, 413 rhs0, op1, !single_use0_p); 414 if (tmp) 415 return tmp; 416 } 417 } 418 419 /* If that wasn't successful, try the second operand. */ 420 if (TREE_CODE (op1) == SSA_NAME) 421 { 422 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p); 423 if (def_stmt && can_propagate_from (def_stmt)) 424 { 425 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt); 426 tmp = combine_cond_expr_cond (stmt, code, type, 427 op0, rhs1, !single_use1_p); 428 if (tmp) 429 return tmp; 430 } 431 } 432 433 /* If that wasn't successful either, try both operands. */ 434 if (rhs0 != NULL_TREE 435 && rhs1 != NULL_TREE) 436 tmp = combine_cond_expr_cond (stmt, code, type, 437 rhs0, rhs1, 438 !(single_use0_p && single_use1_p)); 439 440 return tmp; 441 } 442 443 /* Propagate from the ssa name definition statements of the assignment 444 from a comparison at *GSI into the conditional if that simplifies it. 445 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup, 446 otherwise returns 0. */ 447 448 static int 449 forward_propagate_into_comparison (gimple_stmt_iterator *gsi) 450 { 451 gimple stmt = gsi_stmt (*gsi); 452 tree tmp; 453 bool cfg_changed = false; 454 tree type = TREE_TYPE (gimple_assign_lhs (stmt)); 455 tree rhs1 = gimple_assign_rhs1 (stmt); 456 tree rhs2 = gimple_assign_rhs2 (stmt); 457 458 /* Combine the comparison with defining statements. */ 459 tmp = forward_propagate_into_comparison_1 (stmt, 460 gimple_assign_rhs_code (stmt), 461 type, rhs1, rhs2); 462 if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp))) 463 { 464 gimple_assign_set_rhs_from_tree (gsi, tmp); 465 fold_stmt (gsi); 466 update_stmt (gsi_stmt (*gsi)); 467 468 if (TREE_CODE (rhs1) == SSA_NAME) 469 cfg_changed |= remove_prop_source_from_use (rhs1); 470 if (TREE_CODE (rhs2) == SSA_NAME) 471 cfg_changed |= remove_prop_source_from_use (rhs2); 472 return cfg_changed ? 2 : 1; 473 } 474 475 return 0; 476 } 477 478 /* Propagate from the ssa name definition statements of COND_EXPR 479 in GIMPLE_COND statement STMT into the conditional if that simplifies it. 480 Returns zero if no statement was changed, one if there were 481 changes and two if cfg_cleanup needs to run. 482 483 This must be kept in sync with forward_propagate_into_cond. */ 484 485 static int 486 forward_propagate_into_gimple_cond (gimple stmt) 487 { 488 tree tmp; 489 enum tree_code code = gimple_cond_code (stmt); 490 bool cfg_changed = false; 491 tree rhs1 = gimple_cond_lhs (stmt); 492 tree rhs2 = gimple_cond_rhs (stmt); 493 494 /* We can do tree combining on SSA_NAME and comparison expressions. */ 495 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison) 496 return 0; 497 498 tmp = forward_propagate_into_comparison_1 (stmt, code, 499 boolean_type_node, 500 rhs1, rhs2); 501 if (tmp) 502 { 503 if (dump_file && tmp) 504 { 505 fprintf (dump_file, " Replaced '"); 506 print_gimple_expr (dump_file, stmt, 0, 0); 507 fprintf (dump_file, "' with '"); 508 print_generic_expr (dump_file, tmp, 0); 509 fprintf (dump_file, "'\n"); 510 } 511 512 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp)); 513 update_stmt (stmt); 514 515 if (TREE_CODE (rhs1) == SSA_NAME) 516 cfg_changed |= remove_prop_source_from_use (rhs1); 517 if (TREE_CODE (rhs2) == SSA_NAME) 518 cfg_changed |= remove_prop_source_from_use (rhs2); 519 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1; 520 } 521 522 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */ 523 if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE 524 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)) 525 && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1)) 526 && ((code == EQ_EXPR 527 && integer_zerop (rhs2)) 528 || (code == NE_EXPR 529 && integer_onep (rhs2)))) 530 { 531 basic_block bb = gimple_bb (stmt); 532 gimple_cond_set_code (stmt, NE_EXPR); 533 gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1))); 534 EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE); 535 EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE); 536 return 1; 537 } 538 539 return 0; 540 } 541 542 543 /* Propagate from the ssa name definition statements of COND_EXPR 544 in the rhs of statement STMT into the conditional if that simplifies it. 545 Returns true zero if the stmt was changed. */ 546 547 static bool 548 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p) 549 { 550 gimple stmt = gsi_stmt (*gsi_p); 551 tree tmp = NULL_TREE; 552 tree cond = gimple_assign_rhs1 (stmt); 553 enum tree_code code = gimple_assign_rhs_code (stmt); 554 bool swap = false; 555 556 /* We can do tree combining on SSA_NAME and comparison expressions. */ 557 if (COMPARISON_CLASS_P (cond)) 558 tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond), 559 TREE_TYPE (cond), 560 TREE_OPERAND (cond, 0), 561 TREE_OPERAND (cond, 1)); 562 else if (TREE_CODE (cond) == SSA_NAME) 563 { 564 enum tree_code def_code; 565 tree name = cond; 566 gimple def_stmt = get_prop_source_stmt (name, true, NULL); 567 if (!def_stmt || !can_propagate_from (def_stmt)) 568 return 0; 569 570 def_code = gimple_assign_rhs_code (def_stmt); 571 if (TREE_CODE_CLASS (def_code) == tcc_comparison) 572 tmp = fold_build2_loc (gimple_location (def_stmt), 573 def_code, 574 TREE_TYPE (cond), 575 gimple_assign_rhs1 (def_stmt), 576 gimple_assign_rhs2 (def_stmt)); 577 else if (code == COND_EXPR 578 && ((def_code == BIT_NOT_EXPR 579 && TYPE_PRECISION (TREE_TYPE (cond)) == 1) 580 || (def_code == BIT_XOR_EXPR 581 && integer_onep (gimple_assign_rhs2 (def_stmt))))) 582 { 583 tmp = gimple_assign_rhs1 (def_stmt); 584 swap = true; 585 } 586 } 587 588 if (tmp 589 && is_gimple_condexpr (tmp)) 590 { 591 if (dump_file && tmp) 592 { 593 fprintf (dump_file, " Replaced '"); 594 print_generic_expr (dump_file, cond, 0); 595 fprintf (dump_file, "' with '"); 596 print_generic_expr (dump_file, tmp, 0); 597 fprintf (dump_file, "'\n"); 598 } 599 600 if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp) 601 : integer_onep (tmp)) 602 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt)); 603 else if (integer_zerop (tmp)) 604 gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt)); 605 else 606 { 607 gimple_assign_set_rhs1 (stmt, unshare_expr (tmp)); 608 if (swap) 609 { 610 tree t = gimple_assign_rhs2 (stmt); 611 gimple_assign_set_rhs2 (stmt, gimple_assign_rhs3 (stmt)); 612 gimple_assign_set_rhs3 (stmt, t); 613 } 614 } 615 stmt = gsi_stmt (*gsi_p); 616 update_stmt (stmt); 617 618 return true; 619 } 620 621 return 0; 622 } 623 624 /* Propagate from the ssa name definition statements of COND_EXPR 625 values in the rhs of statement STMT into the conditional arms 626 if that simplifies it. 627 Returns true if the stmt was changed. */ 628 629 static bool 630 combine_cond_exprs (gimple_stmt_iterator *gsi_p) 631 { 632 gimple stmt = gsi_stmt (*gsi_p); 633 tree cond, val1, val2; 634 bool changed = false; 635 636 cond = gimple_assign_rhs1 (stmt); 637 val1 = gimple_assign_rhs2 (stmt); 638 if (TREE_CODE (val1) == SSA_NAME) 639 { 640 gimple def_stmt = SSA_NAME_DEF_STMT (val1); 641 if (is_gimple_assign (def_stmt) 642 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt) 643 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0)) 644 { 645 val1 = unshare_expr (gimple_assign_rhs2 (def_stmt)); 646 gimple_assign_set_rhs2 (stmt, val1); 647 changed = true; 648 } 649 } 650 val2 = gimple_assign_rhs3 (stmt); 651 if (TREE_CODE (val2) == SSA_NAME) 652 { 653 gimple def_stmt = SSA_NAME_DEF_STMT (val2); 654 if (is_gimple_assign (def_stmt) 655 && gimple_assign_rhs_code (def_stmt) == gimple_assign_rhs_code (stmt) 656 && operand_equal_p (gimple_assign_rhs1 (def_stmt), cond, 0)) 657 { 658 val2 = unshare_expr (gimple_assign_rhs3 (def_stmt)); 659 gimple_assign_set_rhs3 (stmt, val2); 660 changed = true; 661 } 662 } 663 if (operand_equal_p (val1, val2, 0)) 664 { 665 gimple_assign_set_rhs_from_tree (gsi_p, val1); 666 stmt = gsi_stmt (*gsi_p); 667 changed = true; 668 } 669 670 if (changed) 671 update_stmt (stmt); 672 673 return changed; 674 } 675 676 /* We've just substituted an ADDR_EXPR into stmt. Update all the 677 relevant data structures to match. */ 678 679 static void 680 tidy_after_forward_propagate_addr (gimple stmt) 681 { 682 /* We may have turned a trapping insn into a non-trapping insn. */ 683 if (maybe_clean_or_replace_eh_stmt (stmt, stmt) 684 && gimple_purge_dead_eh_edges (gimple_bb (stmt))) 685 cfg_changed = true; 686 687 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR) 688 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt)); 689 } 690 691 /* NAME is a SSA_NAME representing DEF_RHS which is of the form 692 ADDR_EXPR <whatever>. 693 694 Try to forward propagate the ADDR_EXPR into the use USE_STMT. 695 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF 696 node or for recovery of array indexing from pointer arithmetic. 697 698 Return true if the propagation was successful (the propagation can 699 be not totally successful, yet things may have been changed). */ 700 701 static bool 702 forward_propagate_addr_expr_1 (tree name, tree def_rhs, 703 gimple_stmt_iterator *use_stmt_gsi, 704 bool single_use_p) 705 { 706 tree lhs, rhs, rhs2, array_ref; 707 gimple use_stmt = gsi_stmt (*use_stmt_gsi); 708 enum tree_code rhs_code; 709 bool res = true; 710 711 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR); 712 713 lhs = gimple_assign_lhs (use_stmt); 714 rhs_code = gimple_assign_rhs_code (use_stmt); 715 rhs = gimple_assign_rhs1 (use_stmt); 716 717 /* Trivial cases. The use statement could be a trivial copy or a 718 useless conversion. Recurse to the uses of the lhs as copyprop does 719 not copy through different variant pointers and FRE does not catch 720 all useless conversions. Treat the case of a single-use name and 721 a conversion to def_rhs type separate, though. */ 722 if (TREE_CODE (lhs) == SSA_NAME 723 && ((rhs_code == SSA_NAME && rhs == name) 724 || CONVERT_EXPR_CODE_P (rhs_code))) 725 { 726 /* Only recurse if we don't deal with a single use or we cannot 727 do the propagation to the current statement. In particular 728 we can end up with a conversion needed for a non-invariant 729 address which we cannot do in a single statement. */ 730 if (!single_use_p 731 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)) 732 && (!is_gimple_min_invariant (def_rhs) 733 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs)) 734 && POINTER_TYPE_P (TREE_TYPE (def_rhs)) 735 && (TYPE_PRECISION (TREE_TYPE (lhs)) 736 > TYPE_PRECISION (TREE_TYPE (def_rhs))))))) 737 return forward_propagate_addr_expr (lhs, def_rhs); 738 739 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs)); 740 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))) 741 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs)); 742 else 743 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR); 744 return true; 745 } 746 747 /* Propagate through constant pointer adjustments. */ 748 if (TREE_CODE (lhs) == SSA_NAME 749 && rhs_code == POINTER_PLUS_EXPR 750 && rhs == name 751 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST) 752 { 753 tree new_def_rhs; 754 /* As we come here with non-invariant addresses in def_rhs we need 755 to make sure we can build a valid constant offsetted address 756 for further propagation. Simply rely on fold building that 757 and check after the fact. */ 758 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)), 759 def_rhs, 760 fold_convert (ptr_type_node, 761 gimple_assign_rhs2 (use_stmt))); 762 if (TREE_CODE (new_def_rhs) == MEM_REF 763 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0))) 764 return false; 765 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs, 766 TREE_TYPE (rhs)); 767 768 /* Recurse. If we could propagate into all uses of lhs do not 769 bother to replace into the current use but just pretend we did. */ 770 if (TREE_CODE (new_def_rhs) == ADDR_EXPR 771 && forward_propagate_addr_expr (lhs, new_def_rhs)) 772 return true; 773 774 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs))) 775 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs), 776 new_def_rhs, NULL_TREE); 777 else if (is_gimple_min_invariant (new_def_rhs)) 778 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR, 779 new_def_rhs, NULL_TREE); 780 else 781 return false; 782 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt); 783 update_stmt (use_stmt); 784 return true; 785 } 786 787 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS. 788 ADDR_EXPR will not appear on the LHS. */ 789 lhs = gimple_assign_lhs (use_stmt); 790 while (handled_component_p (lhs)) 791 lhs = TREE_OPERAND (lhs, 0); 792 793 /* Now see if the LHS node is a MEM_REF using NAME. If so, 794 propagate the ADDR_EXPR into the use of NAME and fold the result. */ 795 if (TREE_CODE (lhs) == MEM_REF 796 && TREE_OPERAND (lhs, 0) == name) 797 { 798 tree def_rhs_base; 799 HOST_WIDE_INT def_rhs_offset; 800 /* If the address is invariant we can always fold it. */ 801 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0), 802 &def_rhs_offset))) 803 { 804 double_int off = mem_ref_offset (lhs); 805 tree new_ptr; 806 off += double_int::from_shwi (def_rhs_offset); 807 if (TREE_CODE (def_rhs_base) == MEM_REF) 808 { 809 off += mem_ref_offset (def_rhs_base); 810 new_ptr = TREE_OPERAND (def_rhs_base, 0); 811 } 812 else 813 new_ptr = build_fold_addr_expr (def_rhs_base); 814 TREE_OPERAND (lhs, 0) = new_ptr; 815 TREE_OPERAND (lhs, 1) 816 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off); 817 tidy_after_forward_propagate_addr (use_stmt); 818 /* Continue propagating into the RHS if this was not the only use. */ 819 if (single_use_p) 820 return true; 821 } 822 /* If the LHS is a plain dereference and the value type is the same as 823 that of the pointed-to type of the address we can put the 824 dereferenced address on the LHS preserving the original alias-type. */ 825 else if (gimple_assign_lhs (use_stmt) == lhs 826 && integer_zerop (TREE_OPERAND (lhs, 1)) 827 && useless_type_conversion_p 828 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)), 829 TREE_TYPE (gimple_assign_rhs1 (use_stmt)))) 830 { 831 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0); 832 tree new_offset, new_base, saved, new_lhs; 833 while (handled_component_p (*def_rhs_basep)) 834 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0); 835 saved = *def_rhs_basep; 836 if (TREE_CODE (*def_rhs_basep) == MEM_REF) 837 { 838 new_base = TREE_OPERAND (*def_rhs_basep, 0); 839 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)), 840 TREE_OPERAND (*def_rhs_basep, 1)); 841 } 842 else 843 { 844 new_base = build_fold_addr_expr (*def_rhs_basep); 845 new_offset = TREE_OPERAND (lhs, 1); 846 } 847 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep), 848 new_base, new_offset); 849 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs); 850 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs); 851 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs); 852 new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0)); 853 gimple_assign_set_lhs (use_stmt, new_lhs); 854 TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs); 855 TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs); 856 *def_rhs_basep = saved; 857 tidy_after_forward_propagate_addr (use_stmt); 858 /* Continue propagating into the RHS if this was not the 859 only use. */ 860 if (single_use_p) 861 return true; 862 } 863 else 864 /* We can have a struct assignment dereferencing our name twice. 865 Note that we didn't propagate into the lhs to not falsely 866 claim we did when propagating into the rhs. */ 867 res = false; 868 } 869 870 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR 871 nodes from the RHS. */ 872 rhs = gimple_assign_rhs1 (use_stmt); 873 if (TREE_CODE (rhs) == ADDR_EXPR) 874 rhs = TREE_OPERAND (rhs, 0); 875 while (handled_component_p (rhs)) 876 rhs = TREE_OPERAND (rhs, 0); 877 878 /* Now see if the RHS node is a MEM_REF using NAME. If so, 879 propagate the ADDR_EXPR into the use of NAME and fold the result. */ 880 if (TREE_CODE (rhs) == MEM_REF 881 && TREE_OPERAND (rhs, 0) == name) 882 { 883 tree def_rhs_base; 884 HOST_WIDE_INT def_rhs_offset; 885 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0), 886 &def_rhs_offset))) 887 { 888 double_int off = mem_ref_offset (rhs); 889 tree new_ptr; 890 off += double_int::from_shwi (def_rhs_offset); 891 if (TREE_CODE (def_rhs_base) == MEM_REF) 892 { 893 off += mem_ref_offset (def_rhs_base); 894 new_ptr = TREE_OPERAND (def_rhs_base, 0); 895 } 896 else 897 new_ptr = build_fold_addr_expr (def_rhs_base); 898 TREE_OPERAND (rhs, 0) = new_ptr; 899 TREE_OPERAND (rhs, 1) 900 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off); 901 fold_stmt_inplace (use_stmt_gsi); 902 tidy_after_forward_propagate_addr (use_stmt); 903 return res; 904 } 905 /* If the RHS is a plain dereference and the value type is the same as 906 that of the pointed-to type of the address we can put the 907 dereferenced address on the RHS preserving the original alias-type. */ 908 else if (gimple_assign_rhs1 (use_stmt) == rhs 909 && integer_zerop (TREE_OPERAND (rhs, 1)) 910 && useless_type_conversion_p 911 (TREE_TYPE (gimple_assign_lhs (use_stmt)), 912 TREE_TYPE (TREE_OPERAND (def_rhs, 0)))) 913 { 914 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0); 915 tree new_offset, new_base, saved, new_rhs; 916 while (handled_component_p (*def_rhs_basep)) 917 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0); 918 saved = *def_rhs_basep; 919 if (TREE_CODE (*def_rhs_basep) == MEM_REF) 920 { 921 new_base = TREE_OPERAND (*def_rhs_basep, 0); 922 new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)), 923 TREE_OPERAND (*def_rhs_basep, 1)); 924 } 925 else 926 { 927 new_base = build_fold_addr_expr (*def_rhs_basep); 928 new_offset = TREE_OPERAND (rhs, 1); 929 } 930 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep), 931 new_base, new_offset); 932 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs); 933 TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs); 934 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs); 935 new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0)); 936 gimple_assign_set_rhs1 (use_stmt, new_rhs); 937 TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs); 938 TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs); 939 *def_rhs_basep = saved; 940 fold_stmt_inplace (use_stmt_gsi); 941 tidy_after_forward_propagate_addr (use_stmt); 942 return res; 943 } 944 } 945 946 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there 947 is nothing to do. */ 948 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR 949 || gimple_assign_rhs1 (use_stmt) != name) 950 return false; 951 952 /* The remaining cases are all for turning pointer arithmetic into 953 array indexing. They only apply when we have the address of 954 element zero in an array. If that is not the case then there 955 is nothing to do. */ 956 array_ref = TREE_OPERAND (def_rhs, 0); 957 if ((TREE_CODE (array_ref) != ARRAY_REF 958 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE 959 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST) 960 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE) 961 return false; 962 963 rhs2 = gimple_assign_rhs2 (use_stmt); 964 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */ 965 if (TREE_CODE (rhs2) == INTEGER_CST) 966 { 967 tree new_rhs = build1_loc (gimple_location (use_stmt), 968 ADDR_EXPR, TREE_TYPE (def_rhs), 969 fold_build2 (MEM_REF, 970 TREE_TYPE (TREE_TYPE (def_rhs)), 971 unshare_expr (def_rhs), 972 fold_convert (ptr_type_node, 973 rhs2))); 974 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs); 975 use_stmt = gsi_stmt (*use_stmt_gsi); 976 update_stmt (use_stmt); 977 tidy_after_forward_propagate_addr (use_stmt); 978 return true; 979 } 980 981 return false; 982 } 983 984 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>. 985 986 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME. 987 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF 988 node or for recovery of array indexing from pointer arithmetic. 989 Returns true, if all uses have been propagated into. */ 990 991 static bool 992 forward_propagate_addr_expr (tree name, tree rhs) 993 { 994 int stmt_loop_depth = bb_loop_depth (gimple_bb (SSA_NAME_DEF_STMT (name))); 995 imm_use_iterator iter; 996 gimple use_stmt; 997 bool all = true; 998 bool single_use_p = has_single_use (name); 999 1000 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name) 1001 { 1002 bool result; 1003 tree use_rhs; 1004 1005 /* If the use is not in a simple assignment statement, then 1006 there is nothing we can do. */ 1007 if (gimple_code (use_stmt) != GIMPLE_ASSIGN) 1008 { 1009 if (!is_gimple_debug (use_stmt)) 1010 all = false; 1011 continue; 1012 } 1013 1014 /* If the use is in a deeper loop nest, then we do not want 1015 to propagate non-invariant ADDR_EXPRs into the loop as that 1016 is likely adding expression evaluations into the loop. */ 1017 if (bb_loop_depth (gimple_bb (use_stmt)) > stmt_loop_depth 1018 && !is_gimple_min_invariant (rhs)) 1019 { 1020 all = false; 1021 continue; 1022 } 1023 1024 { 1025 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt); 1026 result = forward_propagate_addr_expr_1 (name, rhs, &gsi, 1027 single_use_p); 1028 /* If the use has moved to a different statement adjust 1029 the update machinery for the old statement too. */ 1030 if (use_stmt != gsi_stmt (gsi)) 1031 { 1032 update_stmt (use_stmt); 1033 use_stmt = gsi_stmt (gsi); 1034 } 1035 1036 update_stmt (use_stmt); 1037 } 1038 all &= result; 1039 1040 /* Remove intermediate now unused copy and conversion chains. */ 1041 use_rhs = gimple_assign_rhs1 (use_stmt); 1042 if (result 1043 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME 1044 && TREE_CODE (use_rhs) == SSA_NAME 1045 && has_zero_uses (gimple_assign_lhs (use_stmt))) 1046 { 1047 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt); 1048 release_defs (use_stmt); 1049 gsi_remove (&gsi, true); 1050 } 1051 } 1052 1053 return all && has_zero_uses (name); 1054 } 1055 1056 1057 /* Forward propagate the comparison defined in *DEFGSI like 1058 cond_1 = x CMP y to uses of the form 1059 a_1 = (T')cond_1 1060 a_1 = !cond_1 1061 a_1 = cond_1 != 0 1062 Returns true if stmt is now unused. Advance DEFGSI to the next 1063 statement. */ 1064 1065 static bool 1066 forward_propagate_comparison (gimple_stmt_iterator *defgsi) 1067 { 1068 gimple stmt = gsi_stmt (*defgsi); 1069 tree name = gimple_assign_lhs (stmt); 1070 gimple use_stmt; 1071 tree tmp = NULL_TREE; 1072 gimple_stmt_iterator gsi; 1073 enum tree_code code; 1074 tree lhs; 1075 1076 /* Don't propagate ssa names that occur in abnormal phis. */ 1077 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME 1078 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))) 1079 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME 1080 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt)))) 1081 goto bailout; 1082 1083 /* Do not un-cse comparisons. But propagate through copies. */ 1084 use_stmt = get_prop_dest_stmt (name, &name); 1085 if (!use_stmt 1086 || !is_gimple_assign (use_stmt)) 1087 goto bailout; 1088 1089 code = gimple_assign_rhs_code (use_stmt); 1090 lhs = gimple_assign_lhs (use_stmt); 1091 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs))) 1092 goto bailout; 1093 1094 /* We can propagate the condition into a statement that 1095 computes the logical negation of the comparison result. */ 1096 if ((code == BIT_NOT_EXPR 1097 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1) 1098 || (code == BIT_XOR_EXPR 1099 && integer_onep (gimple_assign_rhs2 (use_stmt)))) 1100 { 1101 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt)); 1102 bool nans = HONOR_NANS (TYPE_MODE (type)); 1103 enum tree_code inv_code; 1104 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans); 1105 if (inv_code == ERROR_MARK) 1106 goto bailout; 1107 1108 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt), 1109 gimple_assign_rhs2 (stmt)); 1110 } 1111 else 1112 goto bailout; 1113 1114 gsi = gsi_for_stmt (use_stmt); 1115 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp)); 1116 use_stmt = gsi_stmt (gsi); 1117 update_stmt (use_stmt); 1118 1119 if (dump_file && (dump_flags & TDF_DETAILS)) 1120 { 1121 fprintf (dump_file, " Replaced '"); 1122 print_gimple_expr (dump_file, stmt, 0, dump_flags); 1123 fprintf (dump_file, "' with '"); 1124 print_gimple_expr (dump_file, use_stmt, 0, dump_flags); 1125 fprintf (dump_file, "'\n"); 1126 } 1127 1128 /* When we remove stmt now the iterator defgsi goes off it's current 1129 sequence, hence advance it now. */ 1130 gsi_next (defgsi); 1131 1132 /* Remove defining statements. */ 1133 return remove_prop_source_from_use (name); 1134 1135 bailout: 1136 gsi_next (defgsi); 1137 return false; 1138 } 1139 1140 1141 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y. 1142 If so, we can change STMT into lhs = y which can later be copy 1143 propagated. Similarly for negation. 1144 1145 This could trivially be formulated as a forward propagation 1146 to immediate uses. However, we already had an implementation 1147 from DOM which used backward propagation via the use-def links. 1148 1149 It turns out that backward propagation is actually faster as 1150 there's less work to do for each NOT/NEG expression we find. 1151 Backwards propagation needs to look at the statement in a single 1152 backlink. Forward propagation needs to look at potentially more 1153 than one forward link. 1154 1155 Returns true when the statement was changed. */ 1156 1157 static bool 1158 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p) 1159 { 1160 gimple stmt = gsi_stmt (*gsi_p); 1161 tree rhs = gimple_assign_rhs1 (stmt); 1162 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs); 1163 1164 /* See if the RHS_DEF_STMT has the same form as our statement. */ 1165 if (is_gimple_assign (rhs_def_stmt) 1166 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt)) 1167 { 1168 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt); 1169 1170 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */ 1171 if (TREE_CODE (rhs_def_operand) == SSA_NAME 1172 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand)) 1173 { 1174 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand); 1175 stmt = gsi_stmt (*gsi_p); 1176 update_stmt (stmt); 1177 return true; 1178 } 1179 } 1180 1181 return false; 1182 } 1183 1184 /* Helper function for simplify_gimple_switch. Remove case labels that 1185 have values outside the range of the new type. */ 1186 1187 static void 1188 simplify_gimple_switch_label_vec (gimple stmt, tree index_type) 1189 { 1190 unsigned int branch_num = gimple_switch_num_labels (stmt); 1191 vec<tree> labels; 1192 labels.create (branch_num); 1193 unsigned int i, len; 1194 1195 /* Collect the existing case labels in a VEC, and preprocess it as if 1196 we are gimplifying a GENERIC SWITCH_EXPR. */ 1197 for (i = 1; i < branch_num; i++) 1198 labels.quick_push (gimple_switch_label (stmt, i)); 1199 preprocess_case_label_vec_for_gimple (labels, index_type, NULL); 1200 1201 /* If any labels were removed, replace the existing case labels 1202 in the GIMPLE_SWITCH statement with the correct ones. 1203 Note that the type updates were done in-place on the case labels, 1204 so we only have to replace the case labels in the GIMPLE_SWITCH 1205 if the number of labels changed. */ 1206 len = labels.length (); 1207 if (len < branch_num - 1) 1208 { 1209 bitmap target_blocks; 1210 edge_iterator ei; 1211 edge e; 1212 1213 /* Corner case: *all* case labels have been removed as being 1214 out-of-range for INDEX_TYPE. Push one label and let the 1215 CFG cleanups deal with this further. */ 1216 if (len == 0) 1217 { 1218 tree label, elt; 1219 1220 label = CASE_LABEL (gimple_switch_default_label (stmt)); 1221 elt = build_case_label (build_int_cst (index_type, 0), NULL, label); 1222 labels.quick_push (elt); 1223 len = 1; 1224 } 1225 1226 for (i = 0; i < labels.length (); i++) 1227 gimple_switch_set_label (stmt, i + 1, labels[i]); 1228 for (i++ ; i < branch_num; i++) 1229 gimple_switch_set_label (stmt, i, NULL_TREE); 1230 gimple_switch_set_num_labels (stmt, len + 1); 1231 1232 /* Cleanup any edges that are now dead. */ 1233 target_blocks = BITMAP_ALLOC (NULL); 1234 for (i = 0; i < gimple_switch_num_labels (stmt); i++) 1235 { 1236 tree elt = gimple_switch_label (stmt, i); 1237 basic_block target = label_to_block (CASE_LABEL (elt)); 1238 bitmap_set_bit (target_blocks, target->index); 1239 } 1240 for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); ) 1241 { 1242 if (! bitmap_bit_p (target_blocks, e->dest->index)) 1243 { 1244 remove_edge (e); 1245 cfg_changed = true; 1246 free_dominance_info (CDI_DOMINATORS); 1247 } 1248 else 1249 ei_next (&ei); 1250 } 1251 BITMAP_FREE (target_blocks); 1252 } 1253 1254 labels.release (); 1255 } 1256 1257 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of 1258 the condition which we may be able to optimize better. */ 1259 1260 static bool 1261 simplify_gimple_switch (gimple stmt) 1262 { 1263 tree cond = gimple_switch_index (stmt); 1264 tree def, to, ti; 1265 gimple def_stmt; 1266 1267 /* The optimization that we really care about is removing unnecessary 1268 casts. That will let us do much better in propagating the inferred 1269 constant at the switch target. */ 1270 if (TREE_CODE (cond) == SSA_NAME) 1271 { 1272 def_stmt = SSA_NAME_DEF_STMT (cond); 1273 if (is_gimple_assign (def_stmt)) 1274 { 1275 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR) 1276 { 1277 int need_precision; 1278 bool fail; 1279 1280 def = gimple_assign_rhs1 (def_stmt); 1281 1282 to = TREE_TYPE (cond); 1283 ti = TREE_TYPE (def); 1284 1285 /* If we have an extension that preserves value, then we 1286 can copy the source value into the switch. */ 1287 1288 need_precision = TYPE_PRECISION (ti); 1289 fail = false; 1290 if (! INTEGRAL_TYPE_P (ti)) 1291 fail = true; 1292 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti)) 1293 fail = true; 1294 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti)) 1295 need_precision += 1; 1296 if (TYPE_PRECISION (to) < need_precision) 1297 fail = true; 1298 1299 if (!fail) 1300 { 1301 gimple_switch_set_index (stmt, def); 1302 simplify_gimple_switch_label_vec (stmt, ti); 1303 update_stmt (stmt); 1304 return true; 1305 } 1306 } 1307 } 1308 } 1309 1310 return false; 1311 } 1312 1313 /* For pointers p2 and p1 return p2 - p1 if the 1314 difference is known and constant, otherwise return NULL. */ 1315 1316 static tree 1317 constant_pointer_difference (tree p1, tree p2) 1318 { 1319 int i, j; 1320 #define CPD_ITERATIONS 5 1321 tree exps[2][CPD_ITERATIONS]; 1322 tree offs[2][CPD_ITERATIONS]; 1323 int cnt[2]; 1324 1325 for (i = 0; i < 2; i++) 1326 { 1327 tree p = i ? p1 : p2; 1328 tree off = size_zero_node; 1329 gimple stmt; 1330 enum tree_code code; 1331 1332 /* For each of p1 and p2 we need to iterate at least 1333 twice, to handle ADDR_EXPR directly in p1/p2, 1334 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc. 1335 on definition's stmt RHS. Iterate a few extra times. */ 1336 j = 0; 1337 do 1338 { 1339 if (!POINTER_TYPE_P (TREE_TYPE (p))) 1340 break; 1341 if (TREE_CODE (p) == ADDR_EXPR) 1342 { 1343 tree q = TREE_OPERAND (p, 0); 1344 HOST_WIDE_INT offset; 1345 tree base = get_addr_base_and_unit_offset (q, &offset); 1346 if (base) 1347 { 1348 q = base; 1349 if (offset) 1350 off = size_binop (PLUS_EXPR, off, size_int (offset)); 1351 } 1352 if (TREE_CODE (q) == MEM_REF 1353 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME) 1354 { 1355 p = TREE_OPERAND (q, 0); 1356 off = size_binop (PLUS_EXPR, off, 1357 double_int_to_tree (sizetype, 1358 mem_ref_offset (q))); 1359 } 1360 else 1361 { 1362 exps[i][j] = q; 1363 offs[i][j++] = off; 1364 break; 1365 } 1366 } 1367 if (TREE_CODE (p) != SSA_NAME) 1368 break; 1369 exps[i][j] = p; 1370 offs[i][j++] = off; 1371 if (j == CPD_ITERATIONS) 1372 break; 1373 stmt = SSA_NAME_DEF_STMT (p); 1374 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p) 1375 break; 1376 code = gimple_assign_rhs_code (stmt); 1377 if (code == POINTER_PLUS_EXPR) 1378 { 1379 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST) 1380 break; 1381 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt)); 1382 p = gimple_assign_rhs1 (stmt); 1383 } 1384 else if (code == ADDR_EXPR || code == NOP_EXPR) 1385 p = gimple_assign_rhs1 (stmt); 1386 else 1387 break; 1388 } 1389 while (1); 1390 cnt[i] = j; 1391 } 1392 1393 for (i = 0; i < cnt[0]; i++) 1394 for (j = 0; j < cnt[1]; j++) 1395 if (exps[0][i] == exps[1][j]) 1396 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]); 1397 1398 return NULL_TREE; 1399 } 1400 1401 /* *GSI_P is a GIMPLE_CALL to a builtin function. 1402 Optimize 1403 memcpy (p, "abcd", 4); 1404 memset (p + 4, ' ', 3); 1405 into 1406 memcpy (p, "abcd ", 7); 1407 call if the latter can be stored by pieces during expansion. */ 1408 1409 static bool 1410 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2) 1411 { 1412 gimple stmt1, stmt2 = gsi_stmt (*gsi_p); 1413 tree vuse = gimple_vuse (stmt2); 1414 if (vuse == NULL) 1415 return false; 1416 stmt1 = SSA_NAME_DEF_STMT (vuse); 1417 1418 switch (DECL_FUNCTION_CODE (callee2)) 1419 { 1420 case BUILT_IN_MEMSET: 1421 if (gimple_call_num_args (stmt2) != 3 1422 || gimple_call_lhs (stmt2) 1423 || CHAR_BIT != 8 1424 || BITS_PER_UNIT != 8) 1425 break; 1426 else 1427 { 1428 tree callee1; 1429 tree ptr1, src1, str1, off1, len1, lhs1; 1430 tree ptr2 = gimple_call_arg (stmt2, 0); 1431 tree val2 = gimple_call_arg (stmt2, 1); 1432 tree len2 = gimple_call_arg (stmt2, 2); 1433 tree diff, vdef, new_str_cst; 1434 gimple use_stmt; 1435 unsigned int ptr1_align; 1436 unsigned HOST_WIDE_INT src_len; 1437 char *src_buf; 1438 use_operand_p use_p; 1439 1440 if (!host_integerp (val2, 0) 1441 || !host_integerp (len2, 1) 1442 || compare_tree_int (len2, 1024) == 1) 1443 break; 1444 if (is_gimple_call (stmt1)) 1445 { 1446 /* If first stmt is a call, it needs to be memcpy 1447 or mempcpy, with string literal as second argument and 1448 constant length. */ 1449 callee1 = gimple_call_fndecl (stmt1); 1450 if (callee1 == NULL_TREE 1451 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL 1452 || gimple_call_num_args (stmt1) != 3) 1453 break; 1454 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY 1455 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY) 1456 break; 1457 ptr1 = gimple_call_arg (stmt1, 0); 1458 src1 = gimple_call_arg (stmt1, 1); 1459 len1 = gimple_call_arg (stmt1, 2); 1460 lhs1 = gimple_call_lhs (stmt1); 1461 if (!host_integerp (len1, 1)) 1462 break; 1463 str1 = string_constant (src1, &off1); 1464 if (str1 == NULL_TREE) 1465 break; 1466 if (!host_integerp (off1, 1) 1467 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0 1468 || compare_tree_int (len1, TREE_STRING_LENGTH (str1) 1469 - tree_low_cst (off1, 1)) > 0 1470 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE 1471 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1))) 1472 != TYPE_MODE (char_type_node)) 1473 break; 1474 } 1475 else if (gimple_assign_single_p (stmt1)) 1476 { 1477 /* Otherwise look for length 1 memcpy optimized into 1478 assignment. */ 1479 ptr1 = gimple_assign_lhs (stmt1); 1480 src1 = gimple_assign_rhs1 (stmt1); 1481 if (TREE_CODE (ptr1) != MEM_REF 1482 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node) 1483 || !host_integerp (src1, 0)) 1484 break; 1485 ptr1 = build_fold_addr_expr (ptr1); 1486 callee1 = NULL_TREE; 1487 len1 = size_one_node; 1488 lhs1 = NULL_TREE; 1489 off1 = size_zero_node; 1490 str1 = NULL_TREE; 1491 } 1492 else 1493 break; 1494 1495 diff = constant_pointer_difference (ptr1, ptr2); 1496 if (diff == NULL && lhs1 != NULL) 1497 { 1498 diff = constant_pointer_difference (lhs1, ptr2); 1499 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY 1500 && diff != NULL) 1501 diff = size_binop (PLUS_EXPR, diff, 1502 fold_convert (sizetype, len1)); 1503 } 1504 /* If the difference between the second and first destination pointer 1505 is not constant, or is bigger than memcpy length, bail out. */ 1506 if (diff == NULL 1507 || !host_integerp (diff, 1) 1508 || tree_int_cst_lt (len1, diff) 1509 || compare_tree_int (diff, 1024) == 1) 1510 break; 1511 1512 /* Use maximum of difference plus memset length and memcpy length 1513 as the new memcpy length, if it is too big, bail out. */ 1514 src_len = tree_low_cst (diff, 1); 1515 src_len += tree_low_cst (len2, 1); 1516 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1)) 1517 src_len = tree_low_cst (len1, 1); 1518 if (src_len > 1024) 1519 break; 1520 1521 /* If mempcpy value is used elsewhere, bail out, as mempcpy 1522 with bigger length will return different result. */ 1523 if (lhs1 != NULL_TREE 1524 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY 1525 && (TREE_CODE (lhs1) != SSA_NAME 1526 || !single_imm_use (lhs1, &use_p, &use_stmt) 1527 || use_stmt != stmt2)) 1528 break; 1529 1530 /* If anything reads memory in between memcpy and memset 1531 call, the modified memcpy call might change it. */ 1532 vdef = gimple_vdef (stmt1); 1533 if (vdef != NULL 1534 && (!single_imm_use (vdef, &use_p, &use_stmt) 1535 || use_stmt != stmt2)) 1536 break; 1537 1538 ptr1_align = get_pointer_alignment (ptr1); 1539 /* Construct the new source string literal. */ 1540 src_buf = XALLOCAVEC (char, src_len + 1); 1541 if (callee1) 1542 memcpy (src_buf, 1543 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1), 1544 tree_low_cst (len1, 1)); 1545 else 1546 src_buf[0] = tree_low_cst (src1, 0); 1547 memset (src_buf + tree_low_cst (diff, 1), 1548 tree_low_cst (val2, 0), tree_low_cst (len2, 1)); 1549 src_buf[src_len] = '\0'; 1550 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str 1551 handle embedded '\0's. */ 1552 if (strlen (src_buf) != src_len) 1553 break; 1554 rtl_profile_for_bb (gimple_bb (stmt2)); 1555 /* If the new memcpy wouldn't be emitted by storing the literal 1556 by pieces, this optimization might enlarge .rodata too much, 1557 as commonly used string literals couldn't be shared any 1558 longer. */ 1559 if (!can_store_by_pieces (src_len, 1560 builtin_strncpy_read_str, 1561 src_buf, ptr1_align, false)) 1562 break; 1563 1564 new_str_cst = build_string_literal (src_len, src_buf); 1565 if (callee1) 1566 { 1567 /* If STMT1 is a mem{,p}cpy call, adjust it and remove 1568 memset call. */ 1569 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY) 1570 gimple_call_set_lhs (stmt1, NULL_TREE); 1571 gimple_call_set_arg (stmt1, 1, new_str_cst); 1572 gimple_call_set_arg (stmt1, 2, 1573 build_int_cst (TREE_TYPE (len1), src_len)); 1574 update_stmt (stmt1); 1575 unlink_stmt_vdef (stmt2); 1576 gsi_remove (gsi_p, true); 1577 release_defs (stmt2); 1578 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY) 1579 release_ssa_name (lhs1); 1580 return true; 1581 } 1582 else 1583 { 1584 /* Otherwise, if STMT1 is length 1 memcpy optimized into 1585 assignment, remove STMT1 and change memset call into 1586 memcpy call. */ 1587 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1); 1588 1589 if (!is_gimple_val (ptr1)) 1590 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE, 1591 true, GSI_SAME_STMT); 1592 gimple_call_set_fndecl (stmt2, 1593 builtin_decl_explicit (BUILT_IN_MEMCPY)); 1594 gimple_call_set_arg (stmt2, 0, ptr1); 1595 gimple_call_set_arg (stmt2, 1, new_str_cst); 1596 gimple_call_set_arg (stmt2, 2, 1597 build_int_cst (TREE_TYPE (len2), src_len)); 1598 unlink_stmt_vdef (stmt1); 1599 gsi_remove (&gsi, true); 1600 release_defs (stmt1); 1601 update_stmt (stmt2); 1602 return false; 1603 } 1604 } 1605 break; 1606 default: 1607 break; 1608 } 1609 return false; 1610 } 1611 1612 /* Checks if expression has type of one-bit precision, or is a known 1613 truth-valued expression. */ 1614 static bool 1615 truth_valued_ssa_name (tree name) 1616 { 1617 gimple def; 1618 tree type = TREE_TYPE (name); 1619 1620 if (!INTEGRAL_TYPE_P (type)) 1621 return false; 1622 /* Don't check here for BOOLEAN_TYPE as the precision isn't 1623 necessarily one and so ~X is not equal to !X. */ 1624 if (TYPE_PRECISION (type) == 1) 1625 return true; 1626 def = SSA_NAME_DEF_STMT (name); 1627 if (is_gimple_assign (def)) 1628 return truth_value_p (gimple_assign_rhs_code (def)); 1629 return false; 1630 } 1631 1632 /* Helper routine for simplify_bitwise_binary_1 function. 1633 Return for the SSA name NAME the expression X if it mets condition 1634 NAME = !X. Otherwise return NULL_TREE. 1635 Detected patterns for NAME = !X are: 1636 !X and X == 0 for X with integral type. 1637 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */ 1638 static tree 1639 lookup_logical_inverted_value (tree name) 1640 { 1641 tree op1, op2; 1642 enum tree_code code; 1643 gimple def; 1644 1645 /* If name has none-intergal type, or isn't a SSA_NAME, then 1646 return. */ 1647 if (TREE_CODE (name) != SSA_NAME 1648 || !INTEGRAL_TYPE_P (TREE_TYPE (name))) 1649 return NULL_TREE; 1650 def = SSA_NAME_DEF_STMT (name); 1651 if (!is_gimple_assign (def)) 1652 return NULL_TREE; 1653 1654 code = gimple_assign_rhs_code (def); 1655 op1 = gimple_assign_rhs1 (def); 1656 op2 = NULL_TREE; 1657 1658 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand. 1659 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */ 1660 if (code == EQ_EXPR || code == NE_EXPR 1661 || code == BIT_XOR_EXPR) 1662 op2 = gimple_assign_rhs2 (def); 1663 1664 switch (code) 1665 { 1666 case BIT_NOT_EXPR: 1667 if (truth_valued_ssa_name (name)) 1668 return op1; 1669 break; 1670 case EQ_EXPR: 1671 /* Check if we have X == 0 and X has an integral type. */ 1672 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1))) 1673 break; 1674 if (integer_zerop (op2)) 1675 return op1; 1676 break; 1677 case NE_EXPR: 1678 /* Check if we have X != 1 and X is a truth-valued. */ 1679 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1))) 1680 break; 1681 if (integer_onep (op2) && truth_valued_ssa_name (op1)) 1682 return op1; 1683 break; 1684 case BIT_XOR_EXPR: 1685 /* Check if we have X ^ 1 and X is truth valued. */ 1686 if (integer_onep (op2) && truth_valued_ssa_name (op1)) 1687 return op1; 1688 break; 1689 default: 1690 break; 1691 } 1692 1693 return NULL_TREE; 1694 } 1695 1696 /* Optimize ARG1 CODE ARG2 to a constant for bitwise binary 1697 operations CODE, if one operand has the logically inverted 1698 value of the other. */ 1699 static tree 1700 simplify_bitwise_binary_1 (enum tree_code code, tree type, 1701 tree arg1, tree arg2) 1702 { 1703 tree anot; 1704 1705 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */ 1706 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR 1707 && code != BIT_XOR_EXPR) 1708 return NULL_TREE; 1709 1710 /* First check if operands ARG1 and ARG2 are equal. If so 1711 return NULL_TREE as this optimization is handled fold_stmt. */ 1712 if (arg1 == arg2) 1713 return NULL_TREE; 1714 /* See if we have in arguments logical-not patterns. */ 1715 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE 1716 || anot != arg2) 1717 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE 1718 || anot != arg1)) 1719 return NULL_TREE; 1720 1721 /* X & !X -> 0. */ 1722 if (code == BIT_AND_EXPR) 1723 return fold_convert (type, integer_zero_node); 1724 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */ 1725 if (truth_valued_ssa_name (anot)) 1726 return fold_convert (type, integer_one_node); 1727 1728 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */ 1729 return NULL_TREE; 1730 } 1731 1732 /* Given a ssa_name in NAME see if it was defined by an assignment and 1733 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2 1734 to the second operand on the rhs. */ 1735 1736 static inline void 1737 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2) 1738 { 1739 gimple def; 1740 enum tree_code code1; 1741 tree arg11; 1742 tree arg21; 1743 tree arg31; 1744 enum gimple_rhs_class grhs_class; 1745 1746 code1 = TREE_CODE (name); 1747 arg11 = name; 1748 arg21 = NULL_TREE; 1749 grhs_class = get_gimple_rhs_class (code1); 1750 1751 if (code1 == SSA_NAME) 1752 { 1753 def = SSA_NAME_DEF_STMT (name); 1754 1755 if (def && is_gimple_assign (def) 1756 && can_propagate_from (def)) 1757 { 1758 code1 = gimple_assign_rhs_code (def); 1759 arg11 = gimple_assign_rhs1 (def); 1760 arg21 = gimple_assign_rhs2 (def); 1761 arg31 = gimple_assign_rhs2 (def); 1762 } 1763 } 1764 else if (grhs_class == GIMPLE_TERNARY_RHS 1765 || GIMPLE_BINARY_RHS 1766 || GIMPLE_UNARY_RHS 1767 || GIMPLE_SINGLE_RHS) 1768 extract_ops_from_tree_1 (name, &code1, &arg11, &arg21, &arg31); 1769 1770 *code = code1; 1771 *arg1 = arg11; 1772 if (arg2) 1773 *arg2 = arg21; 1774 /* Ignore arg3 currently. */ 1775 } 1776 1777 /* Return true if a conversion of an operand from type FROM to type TO 1778 should be applied after performing the operation instead. */ 1779 1780 static bool 1781 hoist_conversion_for_bitop_p (tree to, tree from) 1782 { 1783 /* That's a good idea if the conversion widens the operand, thus 1784 after hoisting the conversion the operation will be narrower. */ 1785 if (TYPE_PRECISION (from) < TYPE_PRECISION (to)) 1786 return true; 1787 1788 /* It's also a good idea if the conversion is to a non-integer mode. */ 1789 if (GET_MODE_CLASS (TYPE_MODE (to)) != MODE_INT) 1790 return true; 1791 1792 /* Or if the precision of TO is not the same as the precision 1793 of its mode. */ 1794 if (TYPE_PRECISION (to) != GET_MODE_PRECISION (TYPE_MODE (to))) 1795 return true; 1796 1797 return false; 1798 } 1799 1800 /* Simplify bitwise binary operations. 1801 Return true if a transformation applied, otherwise return false. */ 1802 1803 static bool 1804 simplify_bitwise_binary (gimple_stmt_iterator *gsi) 1805 { 1806 gimple stmt = gsi_stmt (*gsi); 1807 tree arg1 = gimple_assign_rhs1 (stmt); 1808 tree arg2 = gimple_assign_rhs2 (stmt); 1809 enum tree_code code = gimple_assign_rhs_code (stmt); 1810 tree res; 1811 tree def1_arg1, def1_arg2, def2_arg1, def2_arg2; 1812 enum tree_code def1_code, def2_code; 1813 1814 defcodefor_name (arg1, &def1_code, &def1_arg1, &def1_arg2); 1815 defcodefor_name (arg2, &def2_code, &def2_arg1, &def2_arg2); 1816 1817 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST)) 1818 when profitable. */ 1819 if (TREE_CODE (arg2) == INTEGER_CST 1820 && CONVERT_EXPR_CODE_P (def1_code) 1821 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1)) 1822 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1)) 1823 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1))) 1824 { 1825 gimple newop; 1826 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL); 1827 newop = 1828 gimple_build_assign_with_ops (code, tem, def1_arg1, 1829 fold_convert_loc (gimple_location (stmt), 1830 TREE_TYPE (def1_arg1), 1831 arg2)); 1832 gimple_set_location (newop, gimple_location (stmt)); 1833 gsi_insert_before (gsi, newop, GSI_SAME_STMT); 1834 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR, 1835 tem, NULL_TREE, NULL_TREE); 1836 update_stmt (gsi_stmt (*gsi)); 1837 return true; 1838 } 1839 1840 /* For bitwise binary operations apply operand conversions to the 1841 binary operation result instead of to the operands. This allows 1842 to combine successive conversions and bitwise binary operations. */ 1843 if (CONVERT_EXPR_CODE_P (def1_code) 1844 && CONVERT_EXPR_CODE_P (def2_code) 1845 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1)) 1846 && hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1))) 1847 { 1848 gimple newop; 1849 tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL); 1850 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1); 1851 gimple_set_location (newop, gimple_location (stmt)); 1852 gsi_insert_before (gsi, newop, GSI_SAME_STMT); 1853 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR, 1854 tem, NULL_TREE, NULL_TREE); 1855 update_stmt (gsi_stmt (*gsi)); 1856 return true; 1857 } 1858 1859 1860 /* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */ 1861 if (def1_code == def2_code 1862 && def1_code == BIT_AND_EXPR 1863 && operand_equal_for_phi_arg_p (def1_arg2, 1864 def2_arg2)) 1865 { 1866 tree b = def1_arg2; 1867 tree a = def1_arg1; 1868 tree c = def2_arg1; 1869 tree inner = fold_build2 (code, TREE_TYPE (arg2), a, c); 1870 /* If A OP0 C (this usually means C is the same as A) is 0 1871 then fold it down correctly. */ 1872 if (integer_zerop (inner)) 1873 { 1874 gimple_assign_set_rhs_from_tree (gsi, inner); 1875 update_stmt (stmt); 1876 return true; 1877 } 1878 /* If A OP0 C (this usually means C is the same as A) is a ssa_name 1879 then fold it down correctly. */ 1880 else if (TREE_CODE (inner) == SSA_NAME) 1881 { 1882 tree outer = fold_build2 (def1_code, TREE_TYPE (inner), 1883 inner, b); 1884 gimple_assign_set_rhs_from_tree (gsi, outer); 1885 update_stmt (stmt); 1886 return true; 1887 } 1888 else 1889 { 1890 gimple newop; 1891 tree tem; 1892 tem = make_ssa_name (TREE_TYPE (arg2), NULL); 1893 newop = gimple_build_assign_with_ops (code, tem, a, c); 1894 gimple_set_location (newop, gimple_location (stmt)); 1895 /* Make sure to re-process the new stmt as it's walking upwards. */ 1896 gsi_insert_before (gsi, newop, GSI_NEW_STMT); 1897 gimple_assign_set_rhs1 (stmt, tem); 1898 gimple_assign_set_rhs2 (stmt, b); 1899 gimple_assign_set_rhs_code (stmt, def1_code); 1900 update_stmt (stmt); 1901 return true; 1902 } 1903 } 1904 1905 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */ 1906 if (code == BIT_AND_EXPR 1907 && def1_code == BIT_IOR_EXPR 1908 && TREE_CODE (arg2) == INTEGER_CST 1909 && TREE_CODE (def1_arg2) == INTEGER_CST) 1910 { 1911 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2), 1912 arg2, def1_arg2); 1913 tree tem; 1914 gimple newop; 1915 if (integer_zerop (cst)) 1916 { 1917 gimple_assign_set_rhs1 (stmt, def1_arg1); 1918 update_stmt (stmt); 1919 return true; 1920 } 1921 tem = make_ssa_name (TREE_TYPE (arg2), NULL); 1922 newop = gimple_build_assign_with_ops (BIT_AND_EXPR, 1923 tem, def1_arg1, arg2); 1924 gimple_set_location (newop, gimple_location (stmt)); 1925 /* Make sure to re-process the new stmt as it's walking upwards. */ 1926 gsi_insert_before (gsi, newop, GSI_NEW_STMT); 1927 gimple_assign_set_rhs1 (stmt, tem); 1928 gimple_assign_set_rhs2 (stmt, cst); 1929 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR); 1930 update_stmt (stmt); 1931 return true; 1932 } 1933 1934 /* Combine successive equal operations with constants. */ 1935 if ((code == BIT_AND_EXPR 1936 || code == BIT_IOR_EXPR 1937 || code == BIT_XOR_EXPR) 1938 && def1_code == code 1939 && TREE_CODE (arg2) == INTEGER_CST 1940 && TREE_CODE (def1_arg2) == INTEGER_CST) 1941 { 1942 tree cst = fold_build2 (code, TREE_TYPE (arg2), 1943 arg2, def1_arg2); 1944 gimple_assign_set_rhs1 (stmt, def1_arg1); 1945 gimple_assign_set_rhs2 (stmt, cst); 1946 update_stmt (stmt); 1947 return true; 1948 } 1949 1950 /* Canonicalize X ^ ~0 to ~X. */ 1951 if (code == BIT_XOR_EXPR 1952 && TREE_CODE (arg2) == INTEGER_CST 1953 && integer_all_onesp (arg2)) 1954 { 1955 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE); 1956 gcc_assert (gsi_stmt (*gsi) == stmt); 1957 update_stmt (stmt); 1958 return true; 1959 } 1960 1961 /* Try simple folding for X op !X, and X op X. */ 1962 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2); 1963 if (res != NULL_TREE) 1964 { 1965 gimple_assign_set_rhs_from_tree (gsi, res); 1966 update_stmt (gsi_stmt (*gsi)); 1967 return true; 1968 } 1969 1970 if (code == BIT_AND_EXPR || code == BIT_IOR_EXPR) 1971 { 1972 enum tree_code ocode = code == BIT_AND_EXPR ? BIT_IOR_EXPR : BIT_AND_EXPR; 1973 if (def1_code == ocode) 1974 { 1975 tree x = arg2; 1976 enum tree_code coden; 1977 tree a1, a2; 1978 /* ( X | Y) & X -> X */ 1979 /* ( X & Y) | X -> X */ 1980 if (x == def1_arg1 1981 || x == def1_arg2) 1982 { 1983 gimple_assign_set_rhs_from_tree (gsi, x); 1984 update_stmt (gsi_stmt (*gsi)); 1985 return true; 1986 } 1987 1988 defcodefor_name (def1_arg1, &coden, &a1, &a2); 1989 /* (~X | Y) & X -> X & Y */ 1990 /* (~X & Y) | X -> X | Y */ 1991 if (coden == BIT_NOT_EXPR && a1 == x) 1992 { 1993 gimple_assign_set_rhs_with_ops (gsi, code, 1994 x, def1_arg2); 1995 gcc_assert (gsi_stmt (*gsi) == stmt); 1996 update_stmt (stmt); 1997 return true; 1998 } 1999 defcodefor_name (def1_arg2, &coden, &a1, &a2); 2000 /* (Y | ~X) & X -> X & Y */ 2001 /* (Y & ~X) | X -> X | Y */ 2002 if (coden == BIT_NOT_EXPR && a1 == x) 2003 { 2004 gimple_assign_set_rhs_with_ops (gsi, code, 2005 x, def1_arg1); 2006 gcc_assert (gsi_stmt (*gsi) == stmt); 2007 update_stmt (stmt); 2008 return true; 2009 } 2010 } 2011 if (def2_code == ocode) 2012 { 2013 enum tree_code coden; 2014 tree a1; 2015 tree x = arg1; 2016 /* X & ( X | Y) -> X */ 2017 /* X | ( X & Y) -> X */ 2018 if (x == def2_arg1 2019 || x == def2_arg2) 2020 { 2021 gimple_assign_set_rhs_from_tree (gsi, x); 2022 update_stmt (gsi_stmt (*gsi)); 2023 return true; 2024 } 2025 defcodefor_name (def2_arg1, &coden, &a1, NULL); 2026 /* (~X | Y) & X -> X & Y */ 2027 /* (~X & Y) | X -> X | Y */ 2028 if (coden == BIT_NOT_EXPR && a1 == x) 2029 { 2030 gimple_assign_set_rhs_with_ops (gsi, code, 2031 x, def2_arg2); 2032 gcc_assert (gsi_stmt (*gsi) == stmt); 2033 update_stmt (stmt); 2034 return true; 2035 } 2036 defcodefor_name (def2_arg2, &coden, &a1, NULL); 2037 /* (Y | ~X) & X -> X & Y */ 2038 /* (Y & ~X) | X -> X | Y */ 2039 if (coden == BIT_NOT_EXPR && a1 == x) 2040 { 2041 gimple_assign_set_rhs_with_ops (gsi, code, 2042 x, def2_arg1); 2043 gcc_assert (gsi_stmt (*gsi) == stmt); 2044 update_stmt (stmt); 2045 return true; 2046 } 2047 } 2048 } 2049 2050 return false; 2051 } 2052 2053 2054 /* Perform re-associations of the plus or minus statement STMT that are 2055 always permitted. Returns true if the CFG was changed. */ 2056 2057 static bool 2058 associate_plusminus (gimple_stmt_iterator *gsi) 2059 { 2060 gimple stmt = gsi_stmt (*gsi); 2061 tree rhs1 = gimple_assign_rhs1 (stmt); 2062 tree rhs2 = gimple_assign_rhs2 (stmt); 2063 enum tree_code code = gimple_assign_rhs_code (stmt); 2064 bool changed; 2065 2066 /* We can't reassociate at all for saturating types. */ 2067 if (TYPE_SATURATING (TREE_TYPE (rhs1))) 2068 return false; 2069 2070 /* First contract negates. */ 2071 do 2072 { 2073 changed = false; 2074 2075 /* A +- (-B) -> A -+ B. */ 2076 if (TREE_CODE (rhs2) == SSA_NAME) 2077 { 2078 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2); 2079 if (is_gimple_assign (def_stmt) 2080 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR 2081 && can_propagate_from (def_stmt)) 2082 { 2083 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR; 2084 gimple_assign_set_rhs_code (stmt, code); 2085 rhs2 = gimple_assign_rhs1 (def_stmt); 2086 gimple_assign_set_rhs2 (stmt, rhs2); 2087 gimple_set_modified (stmt, true); 2088 changed = true; 2089 } 2090 } 2091 2092 /* (-A) + B -> B - A. */ 2093 if (TREE_CODE (rhs1) == SSA_NAME 2094 && code == PLUS_EXPR) 2095 { 2096 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1); 2097 if (is_gimple_assign (def_stmt) 2098 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR 2099 && can_propagate_from (def_stmt)) 2100 { 2101 code = MINUS_EXPR; 2102 gimple_assign_set_rhs_code (stmt, code); 2103 rhs1 = rhs2; 2104 gimple_assign_set_rhs1 (stmt, rhs1); 2105 rhs2 = gimple_assign_rhs1 (def_stmt); 2106 gimple_assign_set_rhs2 (stmt, rhs2); 2107 gimple_set_modified (stmt, true); 2108 changed = true; 2109 } 2110 } 2111 } 2112 while (changed); 2113 2114 /* We can't reassociate floating-point or fixed-point plus or minus 2115 because of saturation to +-Inf. */ 2116 if (FLOAT_TYPE_P (TREE_TYPE (rhs1)) 2117 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1))) 2118 goto out; 2119 2120 /* Second match patterns that allow contracting a plus-minus pair 2121 irrespective of overflow issues. 2122 2123 (A +- B) - A -> +- B 2124 (A +- B) -+ B -> A 2125 (CST +- A) +- CST -> CST +- A 2126 (A + CST) +- CST -> A + CST 2127 ~A + A -> -1 2128 ~A + 1 -> -A 2129 A - (A +- B) -> -+ B 2130 A +- (B +- A) -> +- B 2131 CST +- (CST +- A) -> CST +- A 2132 CST +- (A +- CST) -> CST +- A 2133 A + ~A -> -1 2134 2135 via commutating the addition and contracting operations to zero 2136 by reassociation. */ 2137 2138 if (TREE_CODE (rhs1) == SSA_NAME) 2139 { 2140 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1); 2141 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt)) 2142 { 2143 enum tree_code def_code = gimple_assign_rhs_code (def_stmt); 2144 if (def_code == PLUS_EXPR 2145 || def_code == MINUS_EXPR) 2146 { 2147 tree def_rhs1 = gimple_assign_rhs1 (def_stmt); 2148 tree def_rhs2 = gimple_assign_rhs2 (def_stmt); 2149 if (operand_equal_p (def_rhs1, rhs2, 0) 2150 && code == MINUS_EXPR) 2151 { 2152 /* (A +- B) - A -> +- B. */ 2153 code = ((def_code == PLUS_EXPR) 2154 ? TREE_CODE (def_rhs2) : NEGATE_EXPR); 2155 rhs1 = def_rhs2; 2156 rhs2 = NULL_TREE; 2157 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE); 2158 gcc_assert (gsi_stmt (*gsi) == stmt); 2159 gimple_set_modified (stmt, true); 2160 } 2161 else if (operand_equal_p (def_rhs2, rhs2, 0) 2162 && code != def_code) 2163 { 2164 /* (A +- B) -+ B -> A. */ 2165 code = TREE_CODE (def_rhs1); 2166 rhs1 = def_rhs1; 2167 rhs2 = NULL_TREE; 2168 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE); 2169 gcc_assert (gsi_stmt (*gsi) == stmt); 2170 gimple_set_modified (stmt, true); 2171 } 2172 else if (TREE_CODE (rhs2) == INTEGER_CST 2173 && TREE_CODE (def_rhs1) == INTEGER_CST) 2174 { 2175 /* (CST +- A) +- CST -> CST +- A. */ 2176 tree cst = fold_binary (code, TREE_TYPE (rhs1), 2177 def_rhs1, rhs2); 2178 if (cst && !TREE_OVERFLOW (cst)) 2179 { 2180 code = def_code; 2181 gimple_assign_set_rhs_code (stmt, code); 2182 rhs1 = cst; 2183 gimple_assign_set_rhs1 (stmt, rhs1); 2184 rhs2 = def_rhs2; 2185 gimple_assign_set_rhs2 (stmt, rhs2); 2186 gimple_set_modified (stmt, true); 2187 } 2188 } 2189 else if (TREE_CODE (rhs2) == INTEGER_CST 2190 && TREE_CODE (def_rhs2) == INTEGER_CST 2191 && def_code == PLUS_EXPR) 2192 { 2193 /* (A + CST) +- CST -> A + CST. */ 2194 tree cst = fold_binary (code, TREE_TYPE (rhs1), 2195 def_rhs2, rhs2); 2196 if (cst && !TREE_OVERFLOW (cst)) 2197 { 2198 code = PLUS_EXPR; 2199 gimple_assign_set_rhs_code (stmt, code); 2200 rhs1 = def_rhs1; 2201 gimple_assign_set_rhs1 (stmt, rhs1); 2202 rhs2 = cst; 2203 gimple_assign_set_rhs2 (stmt, rhs2); 2204 gimple_set_modified (stmt, true); 2205 } 2206 } 2207 } 2208 else if (def_code == BIT_NOT_EXPR 2209 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1))) 2210 { 2211 tree def_rhs1 = gimple_assign_rhs1 (def_stmt); 2212 if (code == PLUS_EXPR 2213 && operand_equal_p (def_rhs1, rhs2, 0)) 2214 { 2215 /* ~A + A -> -1. */ 2216 code = INTEGER_CST; 2217 rhs1 = build_int_cst_type (TREE_TYPE (rhs2), -1); 2218 rhs2 = NULL_TREE; 2219 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE); 2220 gcc_assert (gsi_stmt (*gsi) == stmt); 2221 gimple_set_modified (stmt, true); 2222 } 2223 else if (code == PLUS_EXPR 2224 && integer_onep (rhs1)) 2225 { 2226 /* ~A + 1 -> -A. */ 2227 code = NEGATE_EXPR; 2228 rhs1 = def_rhs1; 2229 rhs2 = NULL_TREE; 2230 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE); 2231 gcc_assert (gsi_stmt (*gsi) == stmt); 2232 gimple_set_modified (stmt, true); 2233 } 2234 } 2235 } 2236 } 2237 2238 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME) 2239 { 2240 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2); 2241 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt)) 2242 { 2243 enum tree_code def_code = gimple_assign_rhs_code (def_stmt); 2244 if (def_code == PLUS_EXPR 2245 || def_code == MINUS_EXPR) 2246 { 2247 tree def_rhs1 = gimple_assign_rhs1 (def_stmt); 2248 tree def_rhs2 = gimple_assign_rhs2 (def_stmt); 2249 if (operand_equal_p (def_rhs1, rhs1, 0) 2250 && code == MINUS_EXPR) 2251 { 2252 /* A - (A +- B) -> -+ B. */ 2253 code = ((def_code == PLUS_EXPR) 2254 ? NEGATE_EXPR : TREE_CODE (def_rhs2)); 2255 rhs1 = def_rhs2; 2256 rhs2 = NULL_TREE; 2257 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE); 2258 gcc_assert (gsi_stmt (*gsi) == stmt); 2259 gimple_set_modified (stmt, true); 2260 } 2261 else if (operand_equal_p (def_rhs2, rhs1, 0) 2262 && code != def_code) 2263 { 2264 /* A +- (B +- A) -> +- B. */ 2265 code = ((code == PLUS_EXPR) 2266 ? TREE_CODE (def_rhs1) : NEGATE_EXPR); 2267 rhs1 = def_rhs1; 2268 rhs2 = NULL_TREE; 2269 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE); 2270 gcc_assert (gsi_stmt (*gsi) == stmt); 2271 gimple_set_modified (stmt, true); 2272 } 2273 else if (TREE_CODE (rhs1) == INTEGER_CST 2274 && TREE_CODE (def_rhs1) == INTEGER_CST) 2275 { 2276 /* CST +- (CST +- A) -> CST +- A. */ 2277 tree cst = fold_binary (code, TREE_TYPE (rhs2), 2278 rhs1, def_rhs1); 2279 if (cst && !TREE_OVERFLOW (cst)) 2280 { 2281 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR); 2282 gimple_assign_set_rhs_code (stmt, code); 2283 rhs1 = cst; 2284 gimple_assign_set_rhs1 (stmt, rhs1); 2285 rhs2 = def_rhs2; 2286 gimple_assign_set_rhs2 (stmt, rhs2); 2287 gimple_set_modified (stmt, true); 2288 } 2289 } 2290 else if (TREE_CODE (rhs1) == INTEGER_CST 2291 && TREE_CODE (def_rhs2) == INTEGER_CST) 2292 { 2293 /* CST +- (A +- CST) -> CST +- A. */ 2294 tree cst = fold_binary (def_code == code 2295 ? PLUS_EXPR : MINUS_EXPR, 2296 TREE_TYPE (rhs2), 2297 rhs1, def_rhs2); 2298 if (cst && !TREE_OVERFLOW (cst)) 2299 { 2300 rhs1 = cst; 2301 gimple_assign_set_rhs1 (stmt, rhs1); 2302 rhs2 = def_rhs1; 2303 gimple_assign_set_rhs2 (stmt, rhs2); 2304 gimple_set_modified (stmt, true); 2305 } 2306 } 2307 } 2308 else if (def_code == BIT_NOT_EXPR 2309 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2))) 2310 { 2311 tree def_rhs1 = gimple_assign_rhs1 (def_stmt); 2312 if (code == PLUS_EXPR 2313 && operand_equal_p (def_rhs1, rhs1, 0)) 2314 { 2315 /* A + ~A -> -1. */ 2316 code = INTEGER_CST; 2317 rhs1 = build_int_cst_type (TREE_TYPE (rhs1), -1); 2318 rhs2 = NULL_TREE; 2319 gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE); 2320 gcc_assert (gsi_stmt (*gsi) == stmt); 2321 gimple_set_modified (stmt, true); 2322 } 2323 } 2324 } 2325 } 2326 2327 out: 2328 if (gimple_modified_p (stmt)) 2329 { 2330 fold_stmt_inplace (gsi); 2331 update_stmt (stmt); 2332 if (maybe_clean_or_replace_eh_stmt (stmt, stmt) 2333 && gimple_purge_dead_eh_edges (gimple_bb (stmt))) 2334 return true; 2335 } 2336 2337 return false; 2338 } 2339 2340 /* Associate operands of a POINTER_PLUS_EXPR assignmen at *GSI. Returns 2341 true if anything changed, false otherwise. */ 2342 2343 static bool 2344 associate_pointerplus (gimple_stmt_iterator *gsi) 2345 { 2346 gimple stmt = gsi_stmt (*gsi); 2347 gimple def_stmt; 2348 tree ptr, rhs, algn; 2349 2350 /* Pattern match 2351 tem = (sizetype) ptr; 2352 tem = tem & algn; 2353 tem = -tem; 2354 ... = ptr p+ tem; 2355 and produce the simpler and easier to analyze with respect to alignment 2356 ... = ptr & ~algn; */ 2357 ptr = gimple_assign_rhs1 (stmt); 2358 rhs = gimple_assign_rhs2 (stmt); 2359 if (TREE_CODE (rhs) != SSA_NAME) 2360 return false; 2361 def_stmt = SSA_NAME_DEF_STMT (rhs); 2362 if (!is_gimple_assign (def_stmt) 2363 || gimple_assign_rhs_code (def_stmt) != NEGATE_EXPR) 2364 return false; 2365 rhs = gimple_assign_rhs1 (def_stmt); 2366 if (TREE_CODE (rhs) != SSA_NAME) 2367 return false; 2368 def_stmt = SSA_NAME_DEF_STMT (rhs); 2369 if (!is_gimple_assign (def_stmt) 2370 || gimple_assign_rhs_code (def_stmt) != BIT_AND_EXPR) 2371 return false; 2372 rhs = gimple_assign_rhs1 (def_stmt); 2373 algn = gimple_assign_rhs2 (def_stmt); 2374 if (TREE_CODE (rhs) != SSA_NAME 2375 || TREE_CODE (algn) != INTEGER_CST) 2376 return false; 2377 def_stmt = SSA_NAME_DEF_STMT (rhs); 2378 if (!is_gimple_assign (def_stmt) 2379 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))) 2380 return false; 2381 if (gimple_assign_rhs1 (def_stmt) != ptr) 2382 return false; 2383 2384 algn = double_int_to_tree (TREE_TYPE (ptr), ~tree_to_double_int (algn)); 2385 gimple_assign_set_rhs_with_ops (gsi, BIT_AND_EXPR, ptr, algn); 2386 fold_stmt_inplace (gsi); 2387 update_stmt (stmt); 2388 2389 return true; 2390 } 2391 2392 /* Combine two conversions in a row for the second conversion at *GSI. 2393 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to 2394 run. Else it returns 0. */ 2395 2396 static int 2397 combine_conversions (gimple_stmt_iterator *gsi) 2398 { 2399 gimple stmt = gsi_stmt (*gsi); 2400 gimple def_stmt; 2401 tree op0, lhs; 2402 enum tree_code code = gimple_assign_rhs_code (stmt); 2403 enum tree_code code2; 2404 2405 gcc_checking_assert (CONVERT_EXPR_CODE_P (code) 2406 || code == FLOAT_EXPR 2407 || code == FIX_TRUNC_EXPR); 2408 2409 lhs = gimple_assign_lhs (stmt); 2410 op0 = gimple_assign_rhs1 (stmt); 2411 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0))) 2412 { 2413 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0)); 2414 return 1; 2415 } 2416 2417 if (TREE_CODE (op0) != SSA_NAME) 2418 return 0; 2419 2420 def_stmt = SSA_NAME_DEF_STMT (op0); 2421 if (!is_gimple_assign (def_stmt)) 2422 return 0; 2423 2424 code2 = gimple_assign_rhs_code (def_stmt); 2425 2426 if (CONVERT_EXPR_CODE_P (code2) || code2 == FLOAT_EXPR) 2427 { 2428 tree defop0 = gimple_assign_rhs1 (def_stmt); 2429 tree type = TREE_TYPE (lhs); 2430 tree inside_type = TREE_TYPE (defop0); 2431 tree inter_type = TREE_TYPE (op0); 2432 int inside_int = INTEGRAL_TYPE_P (inside_type); 2433 int inside_ptr = POINTER_TYPE_P (inside_type); 2434 int inside_float = FLOAT_TYPE_P (inside_type); 2435 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE; 2436 unsigned int inside_prec = TYPE_PRECISION (inside_type); 2437 int inside_unsignedp = TYPE_UNSIGNED (inside_type); 2438 int inter_int = INTEGRAL_TYPE_P (inter_type); 2439 int inter_ptr = POINTER_TYPE_P (inter_type); 2440 int inter_float = FLOAT_TYPE_P (inter_type); 2441 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE; 2442 unsigned int inter_prec = TYPE_PRECISION (inter_type); 2443 int inter_unsignedp = TYPE_UNSIGNED (inter_type); 2444 int final_int = INTEGRAL_TYPE_P (type); 2445 int final_ptr = POINTER_TYPE_P (type); 2446 int final_float = FLOAT_TYPE_P (type); 2447 int final_vec = TREE_CODE (type) == VECTOR_TYPE; 2448 unsigned int final_prec = TYPE_PRECISION (type); 2449 int final_unsignedp = TYPE_UNSIGNED (type); 2450 2451 /* Don't propagate ssa names that occur in abnormal phis. */ 2452 if (TREE_CODE (defop0) == SSA_NAME 2453 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (defop0)) 2454 return 0; 2455 2456 /* In addition to the cases of two conversions in a row 2457 handled below, if we are converting something to its own 2458 type via an object of identical or wider precision, neither 2459 conversion is needed. */ 2460 if (useless_type_conversion_p (type, inside_type) 2461 && (((inter_int || inter_ptr) && final_int) 2462 || (inter_float && final_float)) 2463 && inter_prec >= final_prec) 2464 { 2465 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0)); 2466 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0)); 2467 update_stmt (stmt); 2468 return remove_prop_source_from_use (op0) ? 2 : 1; 2469 } 2470 2471 /* Likewise, if the intermediate and initial types are either both 2472 float or both integer, we don't need the middle conversion if the 2473 former is wider than the latter and doesn't change the signedness 2474 (for integers). Avoid this if the final type is a pointer since 2475 then we sometimes need the middle conversion. Likewise if the 2476 final type has a precision not equal to the size of its mode. */ 2477 if (((inter_int && inside_int) || (inter_float && inside_float)) 2478 && (final_int || final_float) 2479 && inter_prec >= inside_prec 2480 && (inter_float || inter_unsignedp == inside_unsignedp) 2481 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type)) 2482 && TYPE_MODE (type) == TYPE_MODE (inter_type))) 2483 { 2484 gimple_assign_set_rhs1 (stmt, defop0); 2485 update_stmt (stmt); 2486 return remove_prop_source_from_use (op0) ? 2 : 1; 2487 } 2488 2489 /* If we have a sign-extension of a zero-extended value, we can 2490 replace that by a single zero-extension. Likewise if the 2491 final conversion does not change precision we can drop the 2492 intermediate conversion. */ 2493 if (inside_int && inter_int && final_int 2494 && ((inside_prec < inter_prec && inter_prec < final_prec 2495 && inside_unsignedp && !inter_unsignedp) 2496 || final_prec == inter_prec)) 2497 { 2498 gimple_assign_set_rhs1 (stmt, defop0); 2499 update_stmt (stmt); 2500 return remove_prop_source_from_use (op0) ? 2 : 1; 2501 } 2502 2503 /* Two conversions in a row are not needed unless: 2504 - some conversion is floating-point (overstrict for now), or 2505 - some conversion is a vector (overstrict for now), or 2506 - the intermediate type is narrower than both initial and 2507 final, or 2508 - the intermediate type and innermost type differ in signedness, 2509 and the outermost type is wider than the intermediate, or 2510 - the initial type is a pointer type and the precisions of the 2511 intermediate and final types differ, or 2512 - the final type is a pointer type and the precisions of the 2513 initial and intermediate types differ. */ 2514 if (! inside_float && ! inter_float && ! final_float 2515 && ! inside_vec && ! inter_vec && ! final_vec 2516 && (inter_prec >= inside_prec || inter_prec >= final_prec) 2517 && ! (inside_int && inter_int 2518 && inter_unsignedp != inside_unsignedp 2519 && inter_prec < final_prec) 2520 && ((inter_unsignedp && inter_prec > inside_prec) 2521 == (final_unsignedp && final_prec > inter_prec)) 2522 && ! (inside_ptr && inter_prec != final_prec) 2523 && ! (final_ptr && inside_prec != inter_prec) 2524 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type)) 2525 && TYPE_MODE (type) == TYPE_MODE (inter_type))) 2526 { 2527 gimple_assign_set_rhs1 (stmt, defop0); 2528 update_stmt (stmt); 2529 return remove_prop_source_from_use (op0) ? 2 : 1; 2530 } 2531 2532 /* A truncation to an unsigned type should be canonicalized as 2533 bitwise and of a mask. */ 2534 if (final_int && inter_int && inside_int 2535 && final_prec == inside_prec 2536 && final_prec > inter_prec 2537 && inter_unsignedp) 2538 { 2539 tree tem; 2540 tem = fold_build2 (BIT_AND_EXPR, inside_type, 2541 defop0, 2542 double_int_to_tree 2543 (inside_type, double_int::mask (inter_prec))); 2544 if (!useless_type_conversion_p (type, inside_type)) 2545 { 2546 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true, 2547 GSI_SAME_STMT); 2548 gimple_assign_set_rhs1 (stmt, tem); 2549 } 2550 else 2551 gimple_assign_set_rhs_from_tree (gsi, tem); 2552 update_stmt (gsi_stmt (*gsi)); 2553 return 1; 2554 } 2555 2556 /* If we are converting an integer to a floating-point that can 2557 represent it exactly and back to an integer, we can skip the 2558 floating-point conversion. */ 2559 if (inside_int && inter_float && final_int && 2560 (unsigned) significand_size (TYPE_MODE (inter_type)) 2561 >= inside_prec - !inside_unsignedp) 2562 { 2563 if (useless_type_conversion_p (type, inside_type)) 2564 { 2565 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0)); 2566 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0)); 2567 update_stmt (stmt); 2568 return remove_prop_source_from_use (op0) ? 2 : 1; 2569 } 2570 else 2571 { 2572 gimple_assign_set_rhs1 (stmt, defop0); 2573 gimple_assign_set_rhs_code (stmt, CONVERT_EXPR); 2574 update_stmt (stmt); 2575 return remove_prop_source_from_use (op0) ? 2 : 1; 2576 } 2577 } 2578 } 2579 2580 return 0; 2581 } 2582 2583 /* Combine an element access with a shuffle. Returns true if there were 2584 any changes made, else it returns false. */ 2585 2586 static bool 2587 simplify_bitfield_ref (gimple_stmt_iterator *gsi) 2588 { 2589 gimple stmt = gsi_stmt (*gsi); 2590 gimple def_stmt; 2591 tree op, op0, op1, op2; 2592 tree elem_type; 2593 unsigned idx, n, size; 2594 enum tree_code code; 2595 2596 op = gimple_assign_rhs1 (stmt); 2597 gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF); 2598 2599 op0 = TREE_OPERAND (op, 0); 2600 if (TREE_CODE (op0) != SSA_NAME 2601 || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE) 2602 return false; 2603 2604 def_stmt = get_prop_source_stmt (op0, false, NULL); 2605 if (!def_stmt || !can_propagate_from (def_stmt)) 2606 return false; 2607 2608 op1 = TREE_OPERAND (op, 1); 2609 op2 = TREE_OPERAND (op, 2); 2610 code = gimple_assign_rhs_code (def_stmt); 2611 2612 if (code == CONSTRUCTOR) 2613 { 2614 tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op), 2615 gimple_assign_rhs1 (def_stmt), op1, op2); 2616 if (!tem || !valid_gimple_rhs_p (tem)) 2617 return false; 2618 gimple_assign_set_rhs_from_tree (gsi, tem); 2619 update_stmt (gsi_stmt (*gsi)); 2620 return true; 2621 } 2622 2623 elem_type = TREE_TYPE (TREE_TYPE (op0)); 2624 if (TREE_TYPE (op) != elem_type) 2625 return false; 2626 2627 size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type)); 2628 n = TREE_INT_CST_LOW (op1) / size; 2629 if (n != 1) 2630 return false; 2631 idx = TREE_INT_CST_LOW (op2) / size; 2632 2633 if (code == VEC_PERM_EXPR) 2634 { 2635 tree p, m, index, tem; 2636 unsigned nelts; 2637 m = gimple_assign_rhs3 (def_stmt); 2638 if (TREE_CODE (m) != VECTOR_CST) 2639 return false; 2640 nelts = VECTOR_CST_NELTS (m); 2641 idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx)); 2642 idx %= 2 * nelts; 2643 if (idx < nelts) 2644 { 2645 p = gimple_assign_rhs1 (def_stmt); 2646 } 2647 else 2648 { 2649 p = gimple_assign_rhs2 (def_stmt); 2650 idx -= nelts; 2651 } 2652 index = build_int_cst (TREE_TYPE (TREE_TYPE (m)), idx * size); 2653 tem = build3 (BIT_FIELD_REF, TREE_TYPE (op), 2654 unshare_expr (p), op1, index); 2655 gimple_assign_set_rhs1 (stmt, tem); 2656 fold_stmt (gsi); 2657 update_stmt (gsi_stmt (*gsi)); 2658 return true; 2659 } 2660 2661 return false; 2662 } 2663 2664 /* Determine whether applying the 2 permutations (mask1 then mask2) 2665 gives back one of the input. */ 2666 2667 static int 2668 is_combined_permutation_identity (tree mask1, tree mask2) 2669 { 2670 tree mask; 2671 unsigned int nelts, i, j; 2672 bool maybe_identity1 = true; 2673 bool maybe_identity2 = true; 2674 2675 gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST 2676 && TREE_CODE (mask2) == VECTOR_CST); 2677 mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2); 2678 gcc_assert (TREE_CODE (mask) == VECTOR_CST); 2679 2680 nelts = VECTOR_CST_NELTS (mask); 2681 for (i = 0; i < nelts; i++) 2682 { 2683 tree val = VECTOR_CST_ELT (mask, i); 2684 gcc_assert (TREE_CODE (val) == INTEGER_CST); 2685 j = TREE_INT_CST_LOW (val) & (2 * nelts - 1); 2686 if (j == i) 2687 maybe_identity2 = false; 2688 else if (j == i + nelts) 2689 maybe_identity1 = false; 2690 else 2691 return 0; 2692 } 2693 return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0; 2694 } 2695 2696 /* Combine a shuffle with its arguments. Returns 1 if there were any 2697 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */ 2698 2699 static int 2700 simplify_permutation (gimple_stmt_iterator *gsi) 2701 { 2702 gimple stmt = gsi_stmt (*gsi); 2703 gimple def_stmt; 2704 tree op0, op1, op2, op3, arg0, arg1; 2705 enum tree_code code; 2706 bool single_use_op0 = false; 2707 2708 gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR); 2709 2710 op0 = gimple_assign_rhs1 (stmt); 2711 op1 = gimple_assign_rhs2 (stmt); 2712 op2 = gimple_assign_rhs3 (stmt); 2713 2714 if (TREE_CODE (op2) != VECTOR_CST) 2715 return 0; 2716 2717 if (TREE_CODE (op0) == VECTOR_CST) 2718 { 2719 code = VECTOR_CST; 2720 arg0 = op0; 2721 } 2722 else if (TREE_CODE (op0) == SSA_NAME) 2723 { 2724 def_stmt = get_prop_source_stmt (op0, false, &single_use_op0); 2725 if (!def_stmt || !can_propagate_from (def_stmt)) 2726 return 0; 2727 2728 code = gimple_assign_rhs_code (def_stmt); 2729 arg0 = gimple_assign_rhs1 (def_stmt); 2730 } 2731 else 2732 return 0; 2733 2734 /* Two consecutive shuffles. */ 2735 if (code == VEC_PERM_EXPR) 2736 { 2737 tree orig; 2738 int ident; 2739 2740 if (op0 != op1) 2741 return 0; 2742 op3 = gimple_assign_rhs3 (def_stmt); 2743 if (TREE_CODE (op3) != VECTOR_CST) 2744 return 0; 2745 ident = is_combined_permutation_identity (op3, op2); 2746 if (!ident) 2747 return 0; 2748 orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt) 2749 : gimple_assign_rhs2 (def_stmt); 2750 gimple_assign_set_rhs1 (stmt, unshare_expr (orig)); 2751 gimple_assign_set_rhs_code (stmt, TREE_CODE (orig)); 2752 gimple_set_num_ops (stmt, 2); 2753 update_stmt (stmt); 2754 return remove_prop_source_from_use (op0) ? 2 : 1; 2755 } 2756 2757 /* Shuffle of a constructor. */ 2758 else if (code == CONSTRUCTOR || code == VECTOR_CST) 2759 { 2760 tree opt; 2761 bool ret = false; 2762 if (op0 != op1) 2763 { 2764 if (TREE_CODE (op0) == SSA_NAME && !single_use_op0) 2765 return 0; 2766 2767 if (TREE_CODE (op1) == VECTOR_CST) 2768 arg1 = op1; 2769 else if (TREE_CODE (op1) == SSA_NAME) 2770 { 2771 enum tree_code code2; 2772 2773 gimple def_stmt2 = get_prop_source_stmt (op1, true, NULL); 2774 if (!def_stmt2 || !can_propagate_from (def_stmt2)) 2775 return 0; 2776 2777 code2 = gimple_assign_rhs_code (def_stmt2); 2778 if (code2 != CONSTRUCTOR && code2 != VECTOR_CST) 2779 return 0; 2780 arg1 = gimple_assign_rhs1 (def_stmt2); 2781 } 2782 else 2783 return 0; 2784 } 2785 else 2786 { 2787 /* Already used twice in this statement. */ 2788 if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2) 2789 return 0; 2790 arg1 = arg0; 2791 } 2792 opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE(op0), arg0, arg1, op2); 2793 if (!opt 2794 || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE(opt) != VECTOR_CST)) 2795 return 0; 2796 gimple_assign_set_rhs_from_tree (gsi, opt); 2797 update_stmt (gsi_stmt (*gsi)); 2798 if (TREE_CODE (op0) == SSA_NAME) 2799 ret = remove_prop_source_from_use (op0); 2800 if (op0 != op1 && TREE_CODE (op1) == SSA_NAME) 2801 ret |= remove_prop_source_from_use (op1); 2802 return ret ? 2 : 1; 2803 } 2804 2805 return 0; 2806 } 2807 2808 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */ 2809 2810 static bool 2811 simplify_vector_constructor (gimple_stmt_iterator *gsi) 2812 { 2813 gimple stmt = gsi_stmt (*gsi); 2814 gimple def_stmt; 2815 tree op, op2, orig, type, elem_type; 2816 unsigned elem_size, nelts, i; 2817 enum tree_code code; 2818 constructor_elt *elt; 2819 unsigned char *sel; 2820 bool maybe_ident; 2821 2822 gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR); 2823 2824 op = gimple_assign_rhs1 (stmt); 2825 type = TREE_TYPE (op); 2826 gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE); 2827 2828 nelts = TYPE_VECTOR_SUBPARTS (type); 2829 elem_type = TREE_TYPE (type); 2830 elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type)); 2831 2832 sel = XALLOCAVEC (unsigned char, nelts); 2833 orig = NULL; 2834 maybe_ident = true; 2835 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt) 2836 { 2837 tree ref, op1; 2838 2839 if (i >= nelts) 2840 return false; 2841 2842 if (TREE_CODE (elt->value) != SSA_NAME) 2843 return false; 2844 def_stmt = get_prop_source_stmt (elt->value, false, NULL); 2845 if (!def_stmt) 2846 return false; 2847 code = gimple_assign_rhs_code (def_stmt); 2848 if (code != BIT_FIELD_REF) 2849 return false; 2850 op1 = gimple_assign_rhs1 (def_stmt); 2851 ref = TREE_OPERAND (op1, 0); 2852 if (orig) 2853 { 2854 if (ref != orig) 2855 return false; 2856 } 2857 else 2858 { 2859 if (TREE_CODE (ref) != SSA_NAME) 2860 return false; 2861 if (!useless_type_conversion_p (type, TREE_TYPE (ref))) 2862 return false; 2863 orig = ref; 2864 } 2865 if (TREE_INT_CST_LOW (TREE_OPERAND (op1, 1)) != elem_size) 2866 return false; 2867 sel[i] = TREE_INT_CST_LOW (TREE_OPERAND (op1, 2)) / elem_size; 2868 if (sel[i] != i) maybe_ident = false; 2869 } 2870 if (i < nelts) 2871 return false; 2872 2873 if (maybe_ident) 2874 gimple_assign_set_rhs_from_tree (gsi, orig); 2875 else 2876 { 2877 tree mask_type, *mask_elts; 2878 2879 if (!can_vec_perm_p (TYPE_MODE (type), false, sel)) 2880 return false; 2881 mask_type 2882 = build_vector_type (build_nonstandard_integer_type (elem_size, 1), 2883 nelts); 2884 if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT 2885 || GET_MODE_SIZE (TYPE_MODE (mask_type)) 2886 != GET_MODE_SIZE (TYPE_MODE (type))) 2887 return false; 2888 mask_elts = XALLOCAVEC (tree, nelts); 2889 for (i = 0; i < nelts; i++) 2890 mask_elts[i] = build_int_cst (TREE_TYPE (mask_type), sel[i]); 2891 op2 = build_vector (mask_type, mask_elts); 2892 gimple_assign_set_rhs_with_ops_1 (gsi, VEC_PERM_EXPR, orig, orig, op2); 2893 } 2894 update_stmt (gsi_stmt (*gsi)); 2895 return true; 2896 } 2897 2898 /* Main entry point for the forward propagation and statement combine 2899 optimizer. */ 2900 2901 static unsigned int 2902 ssa_forward_propagate_and_combine (void) 2903 { 2904 basic_block bb; 2905 unsigned int todoflags = 0; 2906 2907 cfg_changed = false; 2908 2909 FOR_EACH_BB (bb) 2910 { 2911 gimple_stmt_iterator gsi; 2912 2913 /* Apply forward propagation to all stmts in the basic-block. 2914 Note we update GSI within the loop as necessary. */ 2915 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); ) 2916 { 2917 gimple stmt = gsi_stmt (gsi); 2918 tree lhs, rhs; 2919 enum tree_code code; 2920 2921 if (!is_gimple_assign (stmt)) 2922 { 2923 gsi_next (&gsi); 2924 continue; 2925 } 2926 2927 lhs = gimple_assign_lhs (stmt); 2928 rhs = gimple_assign_rhs1 (stmt); 2929 code = gimple_assign_rhs_code (stmt); 2930 if (TREE_CODE (lhs) != SSA_NAME 2931 || has_zero_uses (lhs)) 2932 { 2933 gsi_next (&gsi); 2934 continue; 2935 } 2936 2937 /* If this statement sets an SSA_NAME to an address, 2938 try to propagate the address into the uses of the SSA_NAME. */ 2939 if (code == ADDR_EXPR 2940 /* Handle pointer conversions on invariant addresses 2941 as well, as this is valid gimple. */ 2942 || (CONVERT_EXPR_CODE_P (code) 2943 && TREE_CODE (rhs) == ADDR_EXPR 2944 && POINTER_TYPE_P (TREE_TYPE (lhs)))) 2945 { 2946 tree base = get_base_address (TREE_OPERAND (rhs, 0)); 2947 if ((!base 2948 || !DECL_P (base) 2949 || decl_address_invariant_p (base)) 2950 && !stmt_references_abnormal_ssa_name (stmt) 2951 && forward_propagate_addr_expr (lhs, rhs)) 2952 { 2953 release_defs (stmt); 2954 gsi_remove (&gsi, true); 2955 } 2956 else 2957 gsi_next (&gsi); 2958 } 2959 else if (code == POINTER_PLUS_EXPR) 2960 { 2961 tree off = gimple_assign_rhs2 (stmt); 2962 if (TREE_CODE (off) == INTEGER_CST 2963 && can_propagate_from (stmt) 2964 && !simple_iv_increment_p (stmt) 2965 /* ??? Better adjust the interface to that function 2966 instead of building new trees here. */ 2967 && forward_propagate_addr_expr 2968 (lhs, 2969 build1_loc (gimple_location (stmt), 2970 ADDR_EXPR, TREE_TYPE (rhs), 2971 fold_build2 (MEM_REF, 2972 TREE_TYPE (TREE_TYPE (rhs)), 2973 rhs, 2974 fold_convert (ptr_type_node, 2975 off))))) 2976 { 2977 release_defs (stmt); 2978 gsi_remove (&gsi, true); 2979 } 2980 else if (is_gimple_min_invariant (rhs)) 2981 { 2982 /* Make sure to fold &a[0] + off_1 here. */ 2983 fold_stmt_inplace (&gsi); 2984 update_stmt (stmt); 2985 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR) 2986 gsi_next (&gsi); 2987 } 2988 else 2989 gsi_next (&gsi); 2990 } 2991 else if (TREE_CODE_CLASS (code) == tcc_comparison) 2992 { 2993 if (forward_propagate_comparison (&gsi)) 2994 cfg_changed = true; 2995 } 2996 else 2997 gsi_next (&gsi); 2998 } 2999 3000 /* Combine stmts with the stmts defining their operands. 3001 Note we update GSI within the loop as necessary. */ 3002 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);) 3003 { 3004 gimple stmt = gsi_stmt (gsi); 3005 bool changed = false; 3006 3007 /* Mark stmt as potentially needing revisiting. */ 3008 gimple_set_plf (stmt, GF_PLF_1, false); 3009 3010 switch (gimple_code (stmt)) 3011 { 3012 case GIMPLE_ASSIGN: 3013 { 3014 tree rhs1 = gimple_assign_rhs1 (stmt); 3015 enum tree_code code = gimple_assign_rhs_code (stmt); 3016 3017 if ((code == BIT_NOT_EXPR 3018 || code == NEGATE_EXPR) 3019 && TREE_CODE (rhs1) == SSA_NAME) 3020 changed = simplify_not_neg_expr (&gsi); 3021 else if (code == COND_EXPR 3022 || code == VEC_COND_EXPR) 3023 { 3024 /* In this case the entire COND_EXPR is in rhs1. */ 3025 if (forward_propagate_into_cond (&gsi) 3026 || combine_cond_exprs (&gsi)) 3027 { 3028 changed = true; 3029 stmt = gsi_stmt (gsi); 3030 } 3031 } 3032 else if (TREE_CODE_CLASS (code) == tcc_comparison) 3033 { 3034 int did_something; 3035 did_something = forward_propagate_into_comparison (&gsi); 3036 if (did_something == 2) 3037 cfg_changed = true; 3038 changed = did_something != 0; 3039 } 3040 else if (code == BIT_AND_EXPR 3041 || code == BIT_IOR_EXPR 3042 || code == BIT_XOR_EXPR) 3043 changed = simplify_bitwise_binary (&gsi); 3044 else if (code == PLUS_EXPR 3045 || code == MINUS_EXPR) 3046 changed = associate_plusminus (&gsi); 3047 else if (code == POINTER_PLUS_EXPR) 3048 changed = associate_pointerplus (&gsi); 3049 else if (CONVERT_EXPR_CODE_P (code) 3050 || code == FLOAT_EXPR 3051 || code == FIX_TRUNC_EXPR) 3052 { 3053 int did_something = combine_conversions (&gsi); 3054 if (did_something == 2) 3055 cfg_changed = true; 3056 changed = did_something != 0; 3057 } 3058 else if (code == VEC_PERM_EXPR) 3059 { 3060 int did_something = simplify_permutation (&gsi); 3061 if (did_something == 2) 3062 cfg_changed = true; 3063 changed = did_something != 0; 3064 } 3065 else if (code == BIT_FIELD_REF) 3066 changed = simplify_bitfield_ref (&gsi); 3067 else if (code == CONSTRUCTOR 3068 && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE) 3069 changed = simplify_vector_constructor (&gsi); 3070 break; 3071 } 3072 3073 case GIMPLE_SWITCH: 3074 changed = simplify_gimple_switch (stmt); 3075 break; 3076 3077 case GIMPLE_COND: 3078 { 3079 int did_something; 3080 did_something = forward_propagate_into_gimple_cond (stmt); 3081 if (did_something == 2) 3082 cfg_changed = true; 3083 changed = did_something != 0; 3084 break; 3085 } 3086 3087 case GIMPLE_CALL: 3088 { 3089 tree callee = gimple_call_fndecl (stmt); 3090 if (callee != NULL_TREE 3091 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL) 3092 changed = simplify_builtin_call (&gsi, callee); 3093 break; 3094 } 3095 3096 default:; 3097 } 3098 3099 if (changed) 3100 { 3101 /* If the stmt changed then re-visit it and the statements 3102 inserted before it. */ 3103 for (; !gsi_end_p (gsi); gsi_prev (&gsi)) 3104 if (gimple_plf (gsi_stmt (gsi), GF_PLF_1)) 3105 break; 3106 if (gsi_end_p (gsi)) 3107 gsi = gsi_start_bb (bb); 3108 else 3109 gsi_next (&gsi); 3110 } 3111 else 3112 { 3113 /* Stmt no longer needs to be revisited. */ 3114 gimple_set_plf (stmt, GF_PLF_1, true); 3115 gsi_next (&gsi); 3116 } 3117 } 3118 } 3119 3120 if (cfg_changed) 3121 todoflags |= TODO_cleanup_cfg; 3122 3123 return todoflags; 3124 } 3125 3126 3127 static bool 3128 gate_forwprop (void) 3129 { 3130 return flag_tree_forwprop; 3131 } 3132 3133 struct gimple_opt_pass pass_forwprop = 3134 { 3135 { 3136 GIMPLE_PASS, 3137 "forwprop", /* name */ 3138 OPTGROUP_NONE, /* optinfo_flags */ 3139 gate_forwprop, /* gate */ 3140 ssa_forward_propagate_and_combine, /* execute */ 3141 NULL, /* sub */ 3142 NULL, /* next */ 3143 0, /* static_pass_number */ 3144 TV_TREE_FORWPROP, /* tv_id */ 3145 PROP_cfg | PROP_ssa, /* properties_required */ 3146 0, /* properties_provided */ 3147 0, /* properties_destroyed */ 3148 0, /* todo_flags_start */ 3149 TODO_ggc_collect 3150 | TODO_update_ssa 3151 | TODO_verify_ssa /* todo_flags_finish */ 3152 } 3153 }; 3154