1 /* Dead code elimination pass for the GNU compiler. 2 Copyright (C) 2002-2020 Free Software Foundation, Inc. 3 Contributed by Ben Elliston <bje@redhat.com> 4 and Andrew MacLeod <amacleod@redhat.com> 5 Adapted to use control dependence by Steven Bosscher, SUSE Labs. 6 7 This file is part of GCC. 8 9 GCC is free software; you can redistribute it and/or modify it 10 under the terms of the GNU General Public License as published by the 11 Free Software Foundation; either version 3, or (at your option) any 12 later version. 13 14 GCC is distributed in the hope that it will be useful, but WITHOUT 15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 17 for more details. 18 19 You should have received a copy of the GNU General Public License 20 along with GCC; see the file COPYING3. If not see 21 <http://www.gnu.org/licenses/>. */ 22 23 /* Dead code elimination. 24 25 References: 26 27 Building an Optimizing Compiler, 28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9. 29 30 Advanced Compiler Design and Implementation, 31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10. 32 33 Dead-code elimination is the removal of statements which have no 34 impact on the program's output. "Dead statements" have no impact 35 on the program's output, while "necessary statements" may have 36 impact on the output. 37 38 The algorithm consists of three phases: 39 1. Marking as necessary all statements known to be necessary, 40 e.g. most function calls, writing a value to memory, etc; 41 2. Propagating necessary statements, e.g., the statements 42 giving values to operands in necessary statements; and 43 3. Removing dead statements. */ 44 45 #include "config.h" 46 #include "system.h" 47 #include "coretypes.h" 48 #include "backend.h" 49 #include "rtl.h" 50 #include "tree.h" 51 #include "gimple.h" 52 #include "cfghooks.h" 53 #include "tree-pass.h" 54 #include "ssa.h" 55 #include "gimple-pretty-print.h" 56 #include "fold-const.h" 57 #include "calls.h" 58 #include "cfganal.h" 59 #include "tree-eh.h" 60 #include "gimplify.h" 61 #include "gimple-iterator.h" 62 #include "tree-cfg.h" 63 #include "tree-ssa-loop-niter.h" 64 #include "tree-into-ssa.h" 65 #include "tree-dfa.h" 66 #include "cfgloop.h" 67 #include "tree-scalar-evolution.h" 68 #include "tree-ssa-propagate.h" 69 #include "gimple-fold.h" 70 71 static struct stmt_stats 72 { 73 int total; 74 int total_phis; 75 int removed; 76 int removed_phis; 77 } stats; 78 79 #define STMT_NECESSARY GF_PLF_1 80 81 static vec<gimple *> worklist; 82 83 /* Vector indicating an SSA name has already been processed and marked 84 as necessary. */ 85 static sbitmap processed; 86 87 /* Vector indicating that the last statement of a basic block has already 88 been marked as necessary. */ 89 static sbitmap last_stmt_necessary; 90 91 /* Vector indicating that BB contains statements that are live. */ 92 static sbitmap bb_contains_live_stmts; 93 94 /* Before we can determine whether a control branch is dead, we need to 95 compute which blocks are control dependent on which edges. 96 97 We expect each block to be control dependent on very few edges so we 98 use a bitmap for each block recording its edges. An array holds the 99 bitmap. The Ith bit in the bitmap is set if that block is dependent 100 on the Ith edge. */ 101 static control_dependences *cd; 102 103 /* Vector indicating that a basic block has already had all the edges 104 processed that it is control dependent on. */ 105 static sbitmap visited_control_parents; 106 107 /* TRUE if this pass alters the CFG (by removing control statements). 108 FALSE otherwise. 109 110 If this pass alters the CFG, then it will arrange for the dominators 111 to be recomputed. */ 112 static bool cfg_altered; 113 114 /* When non-NULL holds map from basic block index into the postorder. */ 115 static int *bb_postorder; 116 117 118 /* True if we should treat any stmt with a vdef as necessary. */ 119 120 static inline bool 121 keep_all_vdefs_p () 122 { 123 return optimize_debug; 124 } 125 126 /* If STMT is not already marked necessary, mark it, and add it to the 127 worklist if ADD_TO_WORKLIST is true. */ 128 129 static inline void 130 mark_stmt_necessary (gimple *stmt, bool add_to_worklist) 131 { 132 gcc_assert (stmt); 133 134 if (gimple_plf (stmt, STMT_NECESSARY)) 135 return; 136 137 if (dump_file && (dump_flags & TDF_DETAILS)) 138 { 139 fprintf (dump_file, "Marking useful stmt: "); 140 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); 141 fprintf (dump_file, "\n"); 142 } 143 144 gimple_set_plf (stmt, STMT_NECESSARY, true); 145 if (add_to_worklist) 146 worklist.safe_push (stmt); 147 if (add_to_worklist && bb_contains_live_stmts && !is_gimple_debug (stmt)) 148 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index); 149 } 150 151 152 /* Mark the statement defining operand OP as necessary. */ 153 154 static inline void 155 mark_operand_necessary (tree op) 156 { 157 gimple *stmt; 158 int ver; 159 160 gcc_assert (op); 161 162 ver = SSA_NAME_VERSION (op); 163 if (bitmap_bit_p (processed, ver)) 164 { 165 stmt = SSA_NAME_DEF_STMT (op); 166 gcc_assert (gimple_nop_p (stmt) 167 || gimple_plf (stmt, STMT_NECESSARY)); 168 return; 169 } 170 bitmap_set_bit (processed, ver); 171 172 stmt = SSA_NAME_DEF_STMT (op); 173 gcc_assert (stmt); 174 175 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt)) 176 return; 177 178 if (dump_file && (dump_flags & TDF_DETAILS)) 179 { 180 fprintf (dump_file, "marking necessary through "); 181 print_generic_expr (dump_file, op); 182 fprintf (dump_file, " stmt "); 183 print_gimple_stmt (dump_file, stmt, 0); 184 } 185 186 gimple_set_plf (stmt, STMT_NECESSARY, true); 187 if (bb_contains_live_stmts) 188 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index); 189 worklist.safe_push (stmt); 190 } 191 192 193 /* Mark STMT as necessary if it obviously is. Add it to the worklist if 194 it can make other statements necessary. 195 196 If AGGRESSIVE is false, control statements are conservatively marked as 197 necessary. */ 198 199 static void 200 mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive) 201 { 202 /* With non-call exceptions, we have to assume that all statements could 203 throw. If a statement could throw, it can be deemed necessary. */ 204 if (stmt_unremovable_because_of_non_call_eh_p (cfun, stmt)) 205 { 206 mark_stmt_necessary (stmt, true); 207 return; 208 } 209 210 /* Statements that are implicitly live. Most function calls, asm 211 and return statements are required. Labels and GIMPLE_BIND nodes 212 are kept because they are control flow, and we have no way of 213 knowing whether they can be removed. DCE can eliminate all the 214 other statements in a block, and CFG can then remove the block 215 and labels. */ 216 switch (gimple_code (stmt)) 217 { 218 case GIMPLE_PREDICT: 219 case GIMPLE_LABEL: 220 mark_stmt_necessary (stmt, false); 221 return; 222 223 case GIMPLE_ASM: 224 case GIMPLE_RESX: 225 case GIMPLE_RETURN: 226 mark_stmt_necessary (stmt, true); 227 return; 228 229 case GIMPLE_CALL: 230 { 231 tree callee = gimple_call_fndecl (stmt); 232 if (callee != NULL_TREE 233 && fndecl_built_in_p (callee, BUILT_IN_NORMAL)) 234 switch (DECL_FUNCTION_CODE (callee)) 235 { 236 case BUILT_IN_MALLOC: 237 case BUILT_IN_ALIGNED_ALLOC: 238 case BUILT_IN_CALLOC: 239 CASE_BUILT_IN_ALLOCA: 240 case BUILT_IN_STRDUP: 241 case BUILT_IN_STRNDUP: 242 return; 243 244 default:; 245 } 246 247 if (callee != NULL_TREE 248 && flag_allocation_dce 249 && DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)) 250 return; 251 252 /* Most, but not all function calls are required. Function calls that 253 produce no result and have no side effects (i.e. const pure 254 functions) are unnecessary. */ 255 if (gimple_has_side_effects (stmt)) 256 { 257 mark_stmt_necessary (stmt, true); 258 return; 259 } 260 /* IFN_GOACC_LOOP calls are necessary in that they are used to 261 represent parameter (i.e. step, bound) of a lowered OpenACC 262 partitioned loop. But this kind of partitioned loop might not 263 survive from aggressive loop removal for it has loop exit and 264 is assumed to be finite. Therefore, we need to explicitly mark 265 these calls. (An example is libgomp.oacc-c-c++-common/pr84955.c) */ 266 if (gimple_call_internal_p (stmt, IFN_GOACC_LOOP)) 267 { 268 mark_stmt_necessary (stmt, true); 269 return; 270 } 271 if (!gimple_call_lhs (stmt)) 272 return; 273 break; 274 } 275 276 case GIMPLE_DEBUG: 277 /* Debug temps without a value are not useful. ??? If we could 278 easily locate the debug temp bind stmt for a use thereof, 279 would could refrain from marking all debug temps here, and 280 mark them only if they're used. */ 281 if (gimple_debug_nonbind_marker_p (stmt) 282 || !gimple_debug_bind_p (stmt) 283 || gimple_debug_bind_has_value_p (stmt) 284 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL) 285 mark_stmt_necessary (stmt, false); 286 return; 287 288 case GIMPLE_GOTO: 289 gcc_assert (!simple_goto_p (stmt)); 290 mark_stmt_necessary (stmt, true); 291 return; 292 293 case GIMPLE_COND: 294 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2); 295 /* Fall through. */ 296 297 case GIMPLE_SWITCH: 298 if (! aggressive) 299 mark_stmt_necessary (stmt, true); 300 break; 301 302 case GIMPLE_ASSIGN: 303 if (gimple_clobber_p (stmt)) 304 return; 305 break; 306 307 default: 308 break; 309 } 310 311 /* If the statement has volatile operands, it needs to be preserved. 312 Same for statements that can alter control flow in unpredictable 313 ways. */ 314 if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt)) 315 { 316 mark_stmt_necessary (stmt, true); 317 return; 318 } 319 320 if (stmt_may_clobber_global_p (stmt)) 321 { 322 mark_stmt_necessary (stmt, true); 323 return; 324 } 325 326 if (gimple_vdef (stmt) && keep_all_vdefs_p ()) 327 { 328 mark_stmt_necessary (stmt, true); 329 return; 330 } 331 332 return; 333 } 334 335 336 /* Mark the last statement of BB as necessary. */ 337 338 static void 339 mark_last_stmt_necessary (basic_block bb) 340 { 341 gimple *stmt = last_stmt (bb); 342 343 bitmap_set_bit (last_stmt_necessary, bb->index); 344 bitmap_set_bit (bb_contains_live_stmts, bb->index); 345 346 /* We actually mark the statement only if it is a control statement. */ 347 if (stmt && is_ctrl_stmt (stmt)) 348 mark_stmt_necessary (stmt, true); 349 } 350 351 352 /* Mark control dependent edges of BB as necessary. We have to do this only 353 once for each basic block so we set the appropriate bit after we're done. 354 355 When IGNORE_SELF is true, ignore BB in the list of control dependences. */ 356 357 static void 358 mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self) 359 { 360 bitmap_iterator bi; 361 unsigned edge_number; 362 bool skipped = false; 363 364 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)); 365 366 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)) 367 return; 368 369 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index), 370 0, edge_number, bi) 371 { 372 basic_block cd_bb = cd->get_edge_src (edge_number); 373 374 if (ignore_self && cd_bb == bb) 375 { 376 skipped = true; 377 continue; 378 } 379 380 if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index)) 381 mark_last_stmt_necessary (cd_bb); 382 } 383 384 if (!skipped) 385 bitmap_set_bit (visited_control_parents, bb->index); 386 } 387 388 389 /* Find obviously necessary statements. These are things like most function 390 calls, and stores to file level variables. 391 392 If EL is NULL, control statements are conservatively marked as 393 necessary. Otherwise it contains the list of edges used by control 394 dependence analysis. */ 395 396 static void 397 find_obviously_necessary_stmts (bool aggressive) 398 { 399 basic_block bb; 400 gimple_stmt_iterator gsi; 401 edge e; 402 gimple *phi, *stmt; 403 int flags; 404 405 FOR_EACH_BB_FN (bb, cfun) 406 { 407 /* PHI nodes are never inherently necessary. */ 408 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 409 { 410 phi = gsi_stmt (gsi); 411 gimple_set_plf (phi, STMT_NECESSARY, false); 412 } 413 414 /* Check all statements in the block. */ 415 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 416 { 417 stmt = gsi_stmt (gsi); 418 gimple_set_plf (stmt, STMT_NECESSARY, false); 419 mark_stmt_if_obviously_necessary (stmt, aggressive); 420 } 421 } 422 423 /* Pure and const functions are finite and thus have no infinite loops in 424 them. */ 425 flags = flags_from_decl_or_type (current_function_decl); 426 if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE)) 427 return; 428 429 /* Prevent the empty possibly infinite loops from being removed. */ 430 if (aggressive) 431 { 432 class loop *loop; 433 if (mark_irreducible_loops ()) 434 FOR_EACH_BB_FN (bb, cfun) 435 { 436 edge_iterator ei; 437 FOR_EACH_EDGE (e, ei, bb->succs) 438 if ((e->flags & EDGE_DFS_BACK) 439 && (e->flags & EDGE_IRREDUCIBLE_LOOP)) 440 { 441 if (dump_file) 442 fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n", 443 e->src->index, e->dest->index); 444 mark_control_dependent_edges_necessary (e->dest, false); 445 } 446 } 447 448 FOR_EACH_LOOP (loop, 0) 449 if (!finite_loop_p (loop)) 450 { 451 if (dump_file) 452 fprintf (dump_file, "cannot prove finiteness of loop %i\n", loop->num); 453 mark_control_dependent_edges_necessary (loop->latch, false); 454 } 455 } 456 } 457 458 459 /* Return true if REF is based on an aliased base, otherwise false. */ 460 461 static bool 462 ref_may_be_aliased (tree ref) 463 { 464 gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR); 465 while (handled_component_p (ref)) 466 ref = TREE_OPERAND (ref, 0); 467 if (TREE_CODE (ref) == MEM_REF 468 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR) 469 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0); 470 return !(DECL_P (ref) 471 && !may_be_aliased (ref)); 472 } 473 474 static bitmap visited = NULL; 475 static unsigned int longest_chain = 0; 476 static unsigned int total_chain = 0; 477 static unsigned int nr_walks = 0; 478 static bool chain_ovfl = false; 479 480 /* Worker for the walker that marks reaching definitions of REF, 481 which is based on a non-aliased decl, necessary. It returns 482 true whenever the defining statement of the current VDEF is 483 a kill for REF, as no dominating may-defs are necessary for REF 484 anymore. DATA points to the basic-block that contains the 485 stmt that refers to REF. */ 486 487 static bool 488 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data) 489 { 490 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef); 491 492 /* All stmts we visit are necessary. */ 493 if (! gimple_clobber_p (def_stmt)) 494 mark_operand_necessary (vdef); 495 496 /* If the stmt lhs kills ref, then we can stop walking. */ 497 if (gimple_has_lhs (def_stmt) 498 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME 499 /* The assignment is not necessarily carried out if it can throw 500 and we can catch it in the current function where we could inspect 501 the previous value. 502 ??? We only need to care about the RHS throwing. For aggregate 503 assignments or similar calls and non-call exceptions the LHS 504 might throw as well. */ 505 && !stmt_can_throw_internal (cfun, def_stmt)) 506 { 507 tree base, lhs = gimple_get_lhs (def_stmt); 508 poly_int64 size, offset, max_size; 509 bool reverse; 510 ao_ref_base (ref); 511 base 512 = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse); 513 /* We can get MEM[symbol: sZ, index: D.8862_1] here, 514 so base == refd->base does not always hold. */ 515 if (base == ref->base) 516 { 517 /* For a must-alias check we need to be able to constrain 518 the accesses properly. */ 519 if (known_eq (size, max_size) 520 && known_subrange_p (ref->offset, ref->max_size, offset, size)) 521 return true; 522 /* Or they need to be exactly the same. */ 523 else if (ref->ref 524 /* Make sure there is no induction variable involved 525 in the references (gcc.c-torture/execute/pr42142.c). 526 The simplest way is to check if the kill dominates 527 the use. */ 528 /* But when both are in the same block we cannot 529 easily tell whether we came from a backedge 530 unless we decide to compute stmt UIDs 531 (see PR58246). */ 532 && (basic_block) data != gimple_bb (def_stmt) 533 && dominated_by_p (CDI_DOMINATORS, (basic_block) data, 534 gimple_bb (def_stmt)) 535 && operand_equal_p (ref->ref, lhs, 0)) 536 return true; 537 } 538 } 539 540 /* Otherwise keep walking. */ 541 return false; 542 } 543 544 static void 545 mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref) 546 { 547 /* Should have been caught before calling this function. */ 548 gcc_checking_assert (!keep_all_vdefs_p ()); 549 550 unsigned int chain; 551 ao_ref refd; 552 gcc_assert (!chain_ovfl); 553 ao_ref_init (&refd, ref); 554 chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt), 555 mark_aliased_reaching_defs_necessary_1, 556 gimple_bb (stmt), NULL); 557 if (chain > longest_chain) 558 longest_chain = chain; 559 total_chain += chain; 560 nr_walks++; 561 } 562 563 /* Worker for the walker that marks reaching definitions of REF, which 564 is not based on a non-aliased decl. For simplicity we need to end 565 up marking all may-defs necessary that are not based on a non-aliased 566 decl. The only job of this walker is to skip may-defs based on 567 a non-aliased decl. */ 568 569 static bool 570 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED, 571 tree vdef, void *data ATTRIBUTE_UNUSED) 572 { 573 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef); 574 575 /* We have to skip already visited (and thus necessary) statements 576 to make the chaining work after we dropped back to simple mode. */ 577 if (chain_ovfl 578 && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef))) 579 { 580 gcc_assert (gimple_nop_p (def_stmt) 581 || gimple_plf (def_stmt, STMT_NECESSARY)); 582 return false; 583 } 584 585 /* We want to skip stores to non-aliased variables. */ 586 if (!chain_ovfl 587 && gimple_assign_single_p (def_stmt)) 588 { 589 tree lhs = gimple_assign_lhs (def_stmt); 590 if (!ref_may_be_aliased (lhs)) 591 return false; 592 } 593 594 /* We want to skip statments that do not constitute stores but have 595 a virtual definition. */ 596 if (is_gimple_call (def_stmt)) 597 { 598 tree callee = gimple_call_fndecl (def_stmt); 599 if (callee != NULL_TREE 600 && fndecl_built_in_p (callee, BUILT_IN_NORMAL)) 601 switch (DECL_FUNCTION_CODE (callee)) 602 { 603 case BUILT_IN_MALLOC: 604 case BUILT_IN_ALIGNED_ALLOC: 605 case BUILT_IN_CALLOC: 606 CASE_BUILT_IN_ALLOCA: 607 case BUILT_IN_FREE: 608 return false; 609 610 default:; 611 } 612 613 if (callee != NULL_TREE 614 && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee) 615 || DECL_IS_REPLACEABLE_OPERATOR_DELETE_P (callee))) 616 return false; 617 } 618 619 if (! gimple_clobber_p (def_stmt)) 620 mark_operand_necessary (vdef); 621 622 return false; 623 } 624 625 static void 626 mark_all_reaching_defs_necessary (gimple *stmt) 627 { 628 /* Should have been caught before calling this function. */ 629 gcc_checking_assert (!keep_all_vdefs_p ()); 630 walk_aliased_vdefs (NULL, gimple_vuse (stmt), 631 mark_all_reaching_defs_necessary_1, NULL, &visited); 632 } 633 634 /* Return true for PHI nodes with one or identical arguments 635 can be removed. */ 636 static bool 637 degenerate_phi_p (gimple *phi) 638 { 639 unsigned int i; 640 tree op = gimple_phi_arg_def (phi, 0); 641 for (i = 1; i < gimple_phi_num_args (phi); i++) 642 if (gimple_phi_arg_def (phi, i) != op) 643 return false; 644 return true; 645 } 646 647 /* Return that NEW_CALL and DELETE_CALL are a valid pair of new 648 and delete operators. */ 649 650 static bool 651 valid_new_delete_pair_p (gimple *new_call, gimple *delete_call) 652 { 653 tree new_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (new_call)); 654 tree delete_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (delete_call)); 655 const char *new_name = IDENTIFIER_POINTER (new_asm); 656 const char *delete_name = IDENTIFIER_POINTER (delete_asm); 657 unsigned int new_len = IDENTIFIER_LENGTH (new_asm); 658 unsigned int delete_len = IDENTIFIER_LENGTH (delete_asm); 659 660 if (new_len < 5 || delete_len < 6) 661 return false; 662 if (new_name[0] == '_') 663 ++new_name, --new_len; 664 if (new_name[0] == '_') 665 ++new_name, --new_len; 666 if (delete_name[0] == '_') 667 ++delete_name, --delete_len; 668 if (delete_name[0] == '_') 669 ++delete_name, --delete_len; 670 if (new_len < 4 || delete_len < 5) 671 return false; 672 /* *_len is now just the length after initial underscores. */ 673 if (new_name[0] != 'Z' || new_name[1] != 'n') 674 return false; 675 if (delete_name[0] != 'Z' || delete_name[1] != 'd') 676 return false; 677 /* _Znw must match _Zdl, _Zna must match _Zda. */ 678 if ((new_name[2] != 'w' || delete_name[2] != 'l') 679 && (new_name[2] != 'a' || delete_name[2] != 'a')) 680 return false; 681 /* 'j', 'm' and 'y' correspond to size_t. */ 682 if (new_name[3] != 'j' && new_name[3] != 'm' && new_name[3] != 'y') 683 return false; 684 if (delete_name[3] != 'P' || delete_name[4] != 'v') 685 return false; 686 if (new_len == 4 687 || (new_len == 18 && !memcmp (new_name + 4, "RKSt9nothrow_t", 14))) 688 { 689 /* _ZnXY or _ZnXYRKSt9nothrow_t matches 690 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */ 691 if (delete_len == 5) 692 return true; 693 if (delete_len == 6 && delete_name[5] == new_name[3]) 694 return true; 695 if (delete_len == 19 && !memcmp (delete_name + 5, "RKSt9nothrow_t", 14)) 696 return true; 697 } 698 else if ((new_len == 19 && !memcmp (new_name + 4, "St11align_val_t", 15)) 699 || (new_len == 33 700 && !memcmp (new_name + 4, "St11align_val_tRKSt9nothrow_t", 29))) 701 { 702 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches 703 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or 704 _ZdXPvSt11align_val_tRKSt9nothrow_t. */ 705 if (delete_len == 20 && !memcmp (delete_name + 5, "St11align_val_t", 15)) 706 return true; 707 if (delete_len == 21 708 && delete_name[5] == new_name[3] 709 && !memcmp (delete_name + 6, "St11align_val_t", 15)) 710 return true; 711 if (delete_len == 34 712 && !memcmp (delete_name + 5, "St11align_val_tRKSt9nothrow_t", 29)) 713 return true; 714 } 715 return false; 716 } 717 718 /* Propagate necessity using the operands of necessary statements. 719 Process the uses on each statement in the worklist, and add all 720 feeding statements which contribute to the calculation of this 721 value to the worklist. 722 723 In conservative mode, EL is NULL. */ 724 725 static void 726 propagate_necessity (bool aggressive) 727 { 728 gimple *stmt; 729 730 if (dump_file && (dump_flags & TDF_DETAILS)) 731 fprintf (dump_file, "\nProcessing worklist:\n"); 732 733 while (worklist.length () > 0) 734 { 735 /* Take STMT from worklist. */ 736 stmt = worklist.pop (); 737 738 if (dump_file && (dump_flags & TDF_DETAILS)) 739 { 740 fprintf (dump_file, "processing: "); 741 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); 742 fprintf (dump_file, "\n"); 743 } 744 745 if (aggressive) 746 { 747 /* Mark the last statement of the basic blocks on which the block 748 containing STMT is control dependent, but only if we haven't 749 already done so. */ 750 basic_block bb = gimple_bb (stmt); 751 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun) 752 && !bitmap_bit_p (visited_control_parents, bb->index)) 753 mark_control_dependent_edges_necessary (bb, false); 754 } 755 756 if (gimple_code (stmt) == GIMPLE_PHI 757 /* We do not process virtual PHI nodes nor do we track their 758 necessity. */ 759 && !virtual_operand_p (gimple_phi_result (stmt))) 760 { 761 /* PHI nodes are somewhat special in that each PHI alternative has 762 data and control dependencies. All the statements feeding the 763 PHI node's arguments are always necessary. In aggressive mode, 764 we also consider the control dependent edges leading to the 765 predecessor block associated with each PHI alternative as 766 necessary. */ 767 gphi *phi = as_a <gphi *> (stmt); 768 size_t k; 769 770 for (k = 0; k < gimple_phi_num_args (stmt); k++) 771 { 772 tree arg = PHI_ARG_DEF (stmt, k); 773 if (TREE_CODE (arg) == SSA_NAME) 774 mark_operand_necessary (arg); 775 } 776 777 /* For PHI operands it matters from where the control flow arrives 778 to the BB. Consider the following example: 779 780 a=exp1; 781 b=exp2; 782 if (test) 783 ; 784 else 785 ; 786 c=PHI(a,b) 787 788 We need to mark control dependence of the empty basic blocks, since they 789 contains computation of PHI operands. 790 791 Doing so is too restrictive in the case the predecestor block is in 792 the loop. Consider: 793 794 if (b) 795 { 796 int i; 797 for (i = 0; i<1000; ++i) 798 ; 799 j = 0; 800 } 801 return j; 802 803 There is PHI for J in the BB containing return statement. 804 In this case the control dependence of predecestor block (that is 805 within the empty loop) also contains the block determining number 806 of iterations of the block that would prevent removing of empty 807 loop in this case. 808 809 This scenario can be avoided by splitting critical edges. 810 To save the critical edge splitting pass we identify how the control 811 dependence would look like if the edge was split. 812 813 Consider the modified CFG created from current CFG by splitting 814 edge B->C. In the postdominance tree of modified CFG, C' is 815 always child of C. There are two cases how chlids of C' can look 816 like: 817 818 1) C' is leaf 819 820 In this case the only basic block C' is control dependent on is B. 821 822 2) C' has single child that is B 823 824 In this case control dependence of C' is same as control 825 dependence of B in original CFG except for block B itself. 826 (since C' postdominate B in modified CFG) 827 828 Now how to decide what case happens? There are two basic options: 829 830 a) C postdominate B. Then C immediately postdominate B and 831 case 2 happens iff there is no other way from B to C except 832 the edge B->C. 833 834 There is other way from B to C iff there is succesor of B that 835 is not postdominated by B. Testing this condition is somewhat 836 expensive, because we need to iterate all succesors of B. 837 We are safe to assume that this does not happen: we will mark B 838 as needed when processing the other path from B to C that is 839 conrol dependent on B and marking control dependencies of B 840 itself is harmless because they will be processed anyway after 841 processing control statement in B. 842 843 b) C does not postdominate B. Always case 1 happens since there is 844 path from C to exit that does not go through B and thus also C'. */ 845 846 if (aggressive && !degenerate_phi_p (stmt)) 847 { 848 for (k = 0; k < gimple_phi_num_args (stmt); k++) 849 { 850 basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src; 851 852 if (gimple_bb (stmt) 853 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb)) 854 { 855 if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index)) 856 mark_last_stmt_necessary (arg_bb); 857 } 858 else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun) 859 && !bitmap_bit_p (visited_control_parents, 860 arg_bb->index)) 861 mark_control_dependent_edges_necessary (arg_bb, true); 862 } 863 } 864 } 865 else 866 { 867 /* Propagate through the operands. Examine all the USE, VUSE and 868 VDEF operands in this statement. Mark all the statements 869 which feed this statement's uses as necessary. */ 870 ssa_op_iter iter; 871 tree use; 872 873 /* If this is a call to free which is directly fed by an 874 allocation function do not mark that necessary through 875 processing the argument. */ 876 bool is_delete_operator 877 = (is_gimple_call (stmt) 878 && gimple_call_replaceable_operator_delete_p (as_a <gcall *> (stmt))); 879 if (is_delete_operator 880 || gimple_call_builtin_p (stmt, BUILT_IN_FREE)) 881 { 882 tree ptr = gimple_call_arg (stmt, 0); 883 gimple *def_stmt; 884 tree def_callee; 885 /* If the pointer we free is defined by an allocation 886 function do not add the call to the worklist. */ 887 if (TREE_CODE (ptr) == SSA_NAME 888 && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr)) 889 && (def_callee = gimple_call_fndecl (def_stmt)) 890 && ((DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL 891 && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC 892 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC 893 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC)) 894 || DECL_IS_REPLACEABLE_OPERATOR_NEW_P (def_callee))) 895 { 896 if (is_delete_operator) 897 { 898 if (!valid_new_delete_pair_p (def_stmt, stmt)) 899 mark_operand_necessary (gimple_call_arg (stmt, 0)); 900 901 /* Delete operators can have alignment and (or) size 902 as next arguments. When being a SSA_NAME, they 903 must be marked as necessary. */ 904 if (gimple_call_num_args (stmt) >= 2) 905 for (unsigned i = 1; i < gimple_call_num_args (stmt); 906 i++) 907 { 908 tree arg = gimple_call_arg (stmt, i); 909 if (TREE_CODE (arg) == SSA_NAME) 910 mark_operand_necessary (arg); 911 } 912 } 913 914 continue; 915 } 916 } 917 918 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE) 919 mark_operand_necessary (use); 920 921 use = gimple_vuse (stmt); 922 if (!use) 923 continue; 924 925 /* No need to search for vdefs if we intrinsicly keep them all. */ 926 if (keep_all_vdefs_p ()) 927 continue; 928 929 /* If we dropped to simple mode make all immediately 930 reachable definitions necessary. */ 931 if (chain_ovfl) 932 { 933 mark_all_reaching_defs_necessary (stmt); 934 continue; 935 } 936 937 /* For statements that may load from memory (have a VUSE) we 938 have to mark all reaching (may-)definitions as necessary. 939 We partition this task into two cases: 940 1) explicit loads based on decls that are not aliased 941 2) implicit loads (like calls) and explicit loads not 942 based on decls that are not aliased (like indirect 943 references or loads from globals) 944 For 1) we mark all reaching may-defs as necessary, stopping 945 at dominating kills. For 2) we want to mark all dominating 946 references necessary, but non-aliased ones which we handle 947 in 1). By keeping a global visited bitmap for references 948 we walk for 2) we avoid quadratic behavior for those. */ 949 950 if (is_gimple_call (stmt)) 951 { 952 tree callee = gimple_call_fndecl (stmt); 953 unsigned i; 954 955 /* Calls to functions that are merely acting as barriers 956 or that only store to memory do not make any previous 957 stores necessary. */ 958 if (callee != NULL_TREE 959 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL 960 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET 961 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK 962 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC 963 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALIGNED_ALLOC 964 || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC 965 || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE 966 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END 967 || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee)) 968 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE 969 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE 970 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED)) 971 continue; 972 973 if (callee != NULL_TREE 974 && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee) 975 || DECL_IS_REPLACEABLE_OPERATOR_DELETE_P (callee))) 976 continue; 977 978 /* Calls implicitly load from memory, their arguments 979 in addition may explicitly perform memory loads. */ 980 mark_all_reaching_defs_necessary (stmt); 981 for (i = 0; i < gimple_call_num_args (stmt); ++i) 982 { 983 tree arg = gimple_call_arg (stmt, i); 984 if (TREE_CODE (arg) == SSA_NAME 985 || is_gimple_min_invariant (arg)) 986 continue; 987 if (TREE_CODE (arg) == WITH_SIZE_EXPR) 988 arg = TREE_OPERAND (arg, 0); 989 if (!ref_may_be_aliased (arg)) 990 mark_aliased_reaching_defs_necessary (stmt, arg); 991 } 992 } 993 else if (gimple_assign_single_p (stmt)) 994 { 995 tree rhs; 996 /* If this is a load mark things necessary. */ 997 rhs = gimple_assign_rhs1 (stmt); 998 if (TREE_CODE (rhs) != SSA_NAME 999 && !is_gimple_min_invariant (rhs) 1000 && TREE_CODE (rhs) != CONSTRUCTOR) 1001 { 1002 if (!ref_may_be_aliased (rhs)) 1003 mark_aliased_reaching_defs_necessary (stmt, rhs); 1004 else 1005 mark_all_reaching_defs_necessary (stmt); 1006 } 1007 } 1008 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt)) 1009 { 1010 tree rhs = gimple_return_retval (return_stmt); 1011 /* A return statement may perform a load. */ 1012 if (rhs 1013 && TREE_CODE (rhs) != SSA_NAME 1014 && !is_gimple_min_invariant (rhs) 1015 && TREE_CODE (rhs) != CONSTRUCTOR) 1016 { 1017 if (!ref_may_be_aliased (rhs)) 1018 mark_aliased_reaching_defs_necessary (stmt, rhs); 1019 else 1020 mark_all_reaching_defs_necessary (stmt); 1021 } 1022 } 1023 else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt)) 1024 { 1025 unsigned i; 1026 mark_all_reaching_defs_necessary (stmt); 1027 /* Inputs may perform loads. */ 1028 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i) 1029 { 1030 tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i)); 1031 if (TREE_CODE (op) != SSA_NAME 1032 && !is_gimple_min_invariant (op) 1033 && TREE_CODE (op) != CONSTRUCTOR 1034 && !ref_may_be_aliased (op)) 1035 mark_aliased_reaching_defs_necessary (stmt, op); 1036 } 1037 } 1038 else if (gimple_code (stmt) == GIMPLE_TRANSACTION) 1039 { 1040 /* The beginning of a transaction is a memory barrier. */ 1041 /* ??? If we were really cool, we'd only be a barrier 1042 for the memories touched within the transaction. */ 1043 mark_all_reaching_defs_necessary (stmt); 1044 } 1045 else 1046 gcc_unreachable (); 1047 1048 /* If we over-used our alias oracle budget drop to simple 1049 mode. The cost metric allows quadratic behavior 1050 (number of uses times number of may-defs queries) up to 1051 a constant maximal number of queries and after that falls back to 1052 super-linear complexity. */ 1053 if (/* Constant but quadratic for small functions. */ 1054 total_chain > 128 * 128 1055 /* Linear in the number of may-defs. */ 1056 && total_chain > 32 * longest_chain 1057 /* Linear in the number of uses. */ 1058 && total_chain > nr_walks * 32) 1059 { 1060 chain_ovfl = true; 1061 if (visited) 1062 bitmap_clear (visited); 1063 } 1064 } 1065 } 1066 } 1067 1068 /* Remove dead PHI nodes from block BB. */ 1069 1070 static bool 1071 remove_dead_phis (basic_block bb) 1072 { 1073 bool something_changed = false; 1074 gphi *phi; 1075 gphi_iterator gsi; 1076 1077 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);) 1078 { 1079 stats.total_phis++; 1080 phi = gsi.phi (); 1081 1082 /* We do not track necessity of virtual PHI nodes. Instead do 1083 very simple dead PHI removal here. */ 1084 if (virtual_operand_p (gimple_phi_result (phi))) 1085 { 1086 /* Virtual PHI nodes with one or identical arguments 1087 can be removed. */ 1088 if (degenerate_phi_p (phi)) 1089 { 1090 tree vdef = gimple_phi_result (phi); 1091 tree vuse = gimple_phi_arg_def (phi, 0); 1092 1093 use_operand_p use_p; 1094 imm_use_iterator iter; 1095 gimple *use_stmt; 1096 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef) 1097 FOR_EACH_IMM_USE_ON_STMT (use_p, iter) 1098 SET_USE (use_p, vuse); 1099 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef) 1100 && TREE_CODE (vuse) == SSA_NAME) 1101 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1; 1102 } 1103 else 1104 gimple_set_plf (phi, STMT_NECESSARY, true); 1105 } 1106 1107 if (!gimple_plf (phi, STMT_NECESSARY)) 1108 { 1109 something_changed = true; 1110 if (dump_file && (dump_flags & TDF_DETAILS)) 1111 { 1112 fprintf (dump_file, "Deleting : "); 1113 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM); 1114 fprintf (dump_file, "\n"); 1115 } 1116 1117 remove_phi_node (&gsi, true); 1118 stats.removed_phis++; 1119 continue; 1120 } 1121 1122 gsi_next (&gsi); 1123 } 1124 return something_changed; 1125 } 1126 1127 1128 /* Remove dead statement pointed to by iterator I. Receives the basic block BB 1129 containing I so that we don't have to look it up. */ 1130 1131 static void 1132 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb, 1133 vec<edge> &to_remove_edges) 1134 { 1135 gimple *stmt = gsi_stmt (*i); 1136 1137 if (dump_file && (dump_flags & TDF_DETAILS)) 1138 { 1139 fprintf (dump_file, "Deleting : "); 1140 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); 1141 fprintf (dump_file, "\n"); 1142 } 1143 1144 stats.removed++; 1145 1146 /* If we have determined that a conditional branch statement contributes 1147 nothing to the program, then we not only remove it, but we need to update 1148 the CFG. We can chose any of edges out of BB as long as we are sure to not 1149 close infinite loops. This is done by always choosing the edge closer to 1150 exit in inverted_post_order_compute order. */ 1151 if (is_ctrl_stmt (stmt)) 1152 { 1153 edge_iterator ei; 1154 edge e = NULL, e2; 1155 1156 /* See if there is only one non-abnormal edge. */ 1157 if (single_succ_p (bb)) 1158 e = single_succ_edge (bb); 1159 /* Otherwise chose one that is closer to bb with live statement in it. 1160 To be able to chose one, we compute inverted post order starting from 1161 all BBs with live statements. */ 1162 if (!e) 1163 { 1164 if (!bb_postorder) 1165 { 1166 auto_vec<int, 20> postorder; 1167 inverted_post_order_compute (&postorder, 1168 &bb_contains_live_stmts); 1169 bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun)); 1170 for (unsigned int i = 0; i < postorder.length (); ++i) 1171 bb_postorder[postorder[i]] = i; 1172 } 1173 FOR_EACH_EDGE (e2, ei, bb->succs) 1174 if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun) 1175 || bb_postorder [e->dest->index] 1176 < bb_postorder [e2->dest->index]) 1177 e = e2; 1178 } 1179 gcc_assert (e); 1180 e->probability = profile_probability::always (); 1181 1182 /* The edge is no longer associated with a conditional, so it does 1183 not have TRUE/FALSE flags. 1184 We are also safe to drop EH/ABNORMAL flags and turn them into 1185 normal control flow, because we know that all the destinations (including 1186 those odd edges) are equivalent for program execution. */ 1187 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE | EDGE_EH | EDGE_ABNORMAL); 1188 1189 /* The lone outgoing edge from BB will be a fallthru edge. */ 1190 e->flags |= EDGE_FALLTHRU; 1191 1192 /* Remove the remaining outgoing edges. */ 1193 FOR_EACH_EDGE (e2, ei, bb->succs) 1194 if (e != e2) 1195 { 1196 /* If we made a BB unconditionally exit a loop or removed 1197 an entry into an irreducible region, then this transform 1198 alters the set of BBs in the loop. Schedule a fixup. */ 1199 if (loop_exit_edge_p (bb->loop_father, e) 1200 || (e2->dest->flags & BB_IRREDUCIBLE_LOOP)) 1201 loops_state_set (LOOPS_NEED_FIXUP); 1202 to_remove_edges.safe_push (e2); 1203 } 1204 } 1205 1206 /* If this is a store into a variable that is being optimized away, 1207 add a debug bind stmt if possible. */ 1208 if (MAY_HAVE_DEBUG_BIND_STMTS 1209 && gimple_assign_single_p (stmt) 1210 && is_gimple_val (gimple_assign_rhs1 (stmt))) 1211 { 1212 tree lhs = gimple_assign_lhs (stmt); 1213 if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL) 1214 && !DECL_IGNORED_P (lhs) 1215 && is_gimple_reg_type (TREE_TYPE (lhs)) 1216 && !is_global_var (lhs) 1217 && !DECL_HAS_VALUE_EXPR_P (lhs)) 1218 { 1219 tree rhs = gimple_assign_rhs1 (stmt); 1220 gdebug *note 1221 = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt); 1222 gsi_insert_after (i, note, GSI_SAME_STMT); 1223 } 1224 } 1225 1226 unlink_stmt_vdef (stmt); 1227 gsi_remove (i, true); 1228 release_defs (stmt); 1229 } 1230 1231 /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any 1232 uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */ 1233 1234 static tree 1235 find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data) 1236 { 1237 if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR) 1238 *walk_subtrees = 0; 1239 if (*tp == (tree) data) 1240 return *tp; 1241 return NULL_TREE; 1242 } 1243 1244 /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used, 1245 but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls 1246 into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug 1247 uses. */ 1248 1249 static void 1250 maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi, 1251 enum tree_code subcode) 1252 { 1253 gimple *stmt = gsi_stmt (*gsi); 1254 tree lhs = gimple_call_lhs (stmt); 1255 1256 if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME) 1257 return; 1258 1259 imm_use_iterator imm_iter; 1260 use_operand_p use_p; 1261 bool has_debug_uses = false; 1262 bool has_realpart_uses = false; 1263 bool has_other_uses = false; 1264 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs) 1265 { 1266 gimple *use_stmt = USE_STMT (use_p); 1267 if (is_gimple_debug (use_stmt)) 1268 has_debug_uses = true; 1269 else if (is_gimple_assign (use_stmt) 1270 && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR 1271 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs) 1272 has_realpart_uses = true; 1273 else 1274 { 1275 has_other_uses = true; 1276 break; 1277 } 1278 } 1279 1280 if (!has_realpart_uses || has_other_uses) 1281 return; 1282 1283 tree arg0 = gimple_call_arg (stmt, 0); 1284 tree arg1 = gimple_call_arg (stmt, 1); 1285 location_t loc = gimple_location (stmt); 1286 tree type = TREE_TYPE (TREE_TYPE (lhs)); 1287 tree utype = type; 1288 if (!TYPE_UNSIGNED (type)) 1289 utype = build_nonstandard_integer_type (TYPE_PRECISION (type), 1); 1290 tree result = fold_build2_loc (loc, subcode, utype, 1291 fold_convert_loc (loc, utype, arg0), 1292 fold_convert_loc (loc, utype, arg1)); 1293 result = fold_convert_loc (loc, type, result); 1294 1295 if (has_debug_uses) 1296 { 1297 gimple *use_stmt; 1298 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs) 1299 { 1300 if (!gimple_debug_bind_p (use_stmt)) 1301 continue; 1302 tree v = gimple_debug_bind_get_value (use_stmt); 1303 if (walk_tree (&v, find_non_realpart_uses, lhs, NULL)) 1304 { 1305 gimple_debug_bind_reset_value (use_stmt); 1306 update_stmt (use_stmt); 1307 } 1308 } 1309 } 1310 1311 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result)) 1312 result = drop_tree_overflow (result); 1313 tree overflow = build_zero_cst (type); 1314 tree ctype = build_complex_type (type); 1315 if (TREE_CODE (result) == INTEGER_CST) 1316 result = build_complex (ctype, result, overflow); 1317 else 1318 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR, 1319 ctype, result, overflow); 1320 1321 if (dump_file && (dump_flags & TDF_DETAILS)) 1322 { 1323 fprintf (dump_file, "Transforming call: "); 1324 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); 1325 fprintf (dump_file, "because the overflow result is never used into: "); 1326 print_generic_stmt (dump_file, result, TDF_SLIM); 1327 fprintf (dump_file, "\n"); 1328 } 1329 1330 if (!update_call_from_tree (gsi, result)) 1331 gimplify_and_update_call_from_tree (gsi, result); 1332 } 1333 1334 /* Eliminate unnecessary statements. Any instruction not marked as necessary 1335 contributes nothing to the program, and can be deleted. */ 1336 1337 static bool 1338 eliminate_unnecessary_stmts (void) 1339 { 1340 bool something_changed = false; 1341 basic_block bb; 1342 gimple_stmt_iterator gsi, psi; 1343 gimple *stmt; 1344 tree call; 1345 vec<basic_block> h; 1346 auto_vec<edge> to_remove_edges; 1347 1348 if (dump_file && (dump_flags & TDF_DETAILS)) 1349 fprintf (dump_file, "\nEliminating unnecessary statements:\n"); 1350 1351 clear_special_calls (); 1352 1353 /* Walking basic blocks and statements in reverse order avoids 1354 releasing SSA names before any other DEFs that refer to them are 1355 released. This helps avoid loss of debug information, as we get 1356 a chance to propagate all RHSs of removed SSAs into debug uses, 1357 rather than only the latest ones. E.g., consider: 1358 1359 x_3 = y_1 + z_2; 1360 a_5 = x_3 - b_4; 1361 # DEBUG a => a_5 1362 1363 If we were to release x_3 before a_5, when we reached a_5 and 1364 tried to substitute it into the debug stmt, we'd see x_3 there, 1365 but x_3's DEF, type, etc would have already been disconnected. 1366 By going backwards, the debug stmt first changes to: 1367 1368 # DEBUG a => x_3 - b_4 1369 1370 and then to: 1371 1372 # DEBUG a => y_1 + z_2 - b_4 1373 1374 as desired. */ 1375 gcc_assert (dom_info_available_p (CDI_DOMINATORS)); 1376 h = get_all_dominated_blocks (CDI_DOMINATORS, 1377 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun))); 1378 1379 while (h.length ()) 1380 { 1381 bb = h.pop (); 1382 1383 /* Remove dead statements. */ 1384 auto_bitmap debug_seen; 1385 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi) 1386 { 1387 stmt = gsi_stmt (gsi); 1388 1389 psi = gsi; 1390 gsi_prev (&psi); 1391 1392 stats.total++; 1393 1394 /* We can mark a call to free as not necessary if the 1395 defining statement of its argument is not necessary 1396 (and thus is getting removed). */ 1397 if (gimple_plf (stmt, STMT_NECESSARY) 1398 && (gimple_call_builtin_p (stmt, BUILT_IN_FREE) 1399 || (is_gimple_call (stmt) 1400 && gimple_call_replaceable_operator_delete_p (as_a <gcall *> (stmt))))) 1401 { 1402 tree ptr = gimple_call_arg (stmt, 0); 1403 if (TREE_CODE (ptr) == SSA_NAME) 1404 { 1405 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr); 1406 if (!gimple_nop_p (def_stmt) 1407 && !gimple_plf (def_stmt, STMT_NECESSARY)) 1408 gimple_set_plf (stmt, STMT_NECESSARY, false); 1409 } 1410 } 1411 1412 /* If GSI is not necessary then remove it. */ 1413 if (!gimple_plf (stmt, STMT_NECESSARY)) 1414 { 1415 /* Keep clobbers that we can keep live live. */ 1416 if (gimple_clobber_p (stmt)) 1417 { 1418 ssa_op_iter iter; 1419 use_operand_p use_p; 1420 bool dead = false; 1421 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE) 1422 { 1423 tree name = USE_FROM_PTR (use_p); 1424 if (!SSA_NAME_IS_DEFAULT_DEF (name) 1425 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))) 1426 { 1427 dead = true; 1428 break; 1429 } 1430 } 1431 if (!dead) 1432 { 1433 bitmap_clear (debug_seen); 1434 continue; 1435 } 1436 } 1437 if (!is_gimple_debug (stmt)) 1438 something_changed = true; 1439 remove_dead_stmt (&gsi, bb, to_remove_edges); 1440 continue; 1441 } 1442 else if (is_gimple_call (stmt)) 1443 { 1444 tree name = gimple_call_lhs (stmt); 1445 1446 notice_special_calls (as_a <gcall *> (stmt)); 1447 1448 /* When LHS of var = call (); is dead, simplify it into 1449 call (); saving one operand. */ 1450 if (name 1451 && TREE_CODE (name) == SSA_NAME 1452 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)) 1453 /* Avoid doing so for allocation calls which we 1454 did not mark as necessary, it will confuse the 1455 special logic we apply to malloc/free pair removal. */ 1456 && (!(call = gimple_call_fndecl (stmt)) 1457 || ((DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL 1458 || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC 1459 && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC 1460 && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC 1461 && !ALLOCA_FUNCTION_CODE_P 1462 (DECL_FUNCTION_CODE (call)))) 1463 && !DECL_IS_REPLACEABLE_OPERATOR_NEW_P (call)))) 1464 { 1465 something_changed = true; 1466 if (dump_file && (dump_flags & TDF_DETAILS)) 1467 { 1468 fprintf (dump_file, "Deleting LHS of call: "); 1469 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); 1470 fprintf (dump_file, "\n"); 1471 } 1472 1473 gimple_call_set_lhs (stmt, NULL_TREE); 1474 maybe_clean_or_replace_eh_stmt (stmt, stmt); 1475 update_stmt (stmt); 1476 release_ssa_name (name); 1477 1478 /* GOMP_SIMD_LANE (unless three argument) or ASAN_POISON 1479 without lhs is not needed. */ 1480 if (gimple_call_internal_p (stmt)) 1481 switch (gimple_call_internal_fn (stmt)) 1482 { 1483 case IFN_GOMP_SIMD_LANE: 1484 if (gimple_call_num_args (stmt) >= 3 1485 && !integer_nonzerop (gimple_call_arg (stmt, 2))) 1486 break; 1487 /* FALLTHRU */ 1488 case IFN_ASAN_POISON: 1489 remove_dead_stmt (&gsi, bb, to_remove_edges); 1490 break; 1491 default: 1492 break; 1493 } 1494 } 1495 else if (gimple_call_internal_p (stmt)) 1496 switch (gimple_call_internal_fn (stmt)) 1497 { 1498 case IFN_ADD_OVERFLOW: 1499 maybe_optimize_arith_overflow (&gsi, PLUS_EXPR); 1500 break; 1501 case IFN_SUB_OVERFLOW: 1502 maybe_optimize_arith_overflow (&gsi, MINUS_EXPR); 1503 break; 1504 case IFN_MUL_OVERFLOW: 1505 maybe_optimize_arith_overflow (&gsi, MULT_EXPR); 1506 break; 1507 default: 1508 break; 1509 } 1510 } 1511 else if (gimple_debug_bind_p (stmt)) 1512 { 1513 /* We are only keeping the last debug-bind of a 1514 non-DEBUG_EXPR_DECL variable in a series of 1515 debug-bind stmts. */ 1516 tree var = gimple_debug_bind_get_var (stmt); 1517 if (TREE_CODE (var) != DEBUG_EXPR_DECL 1518 && !bitmap_set_bit (debug_seen, DECL_UID (var))) 1519 remove_dead_stmt (&gsi, bb, to_remove_edges); 1520 continue; 1521 } 1522 bitmap_clear (debug_seen); 1523 } 1524 1525 /* Remove dead PHI nodes. */ 1526 something_changed |= remove_dead_phis (bb); 1527 } 1528 1529 h.release (); 1530 1531 /* Since we don't track liveness of virtual PHI nodes, it is possible that we 1532 rendered some PHI nodes unreachable while they are still in use. 1533 Mark them for renaming. */ 1534 if (!to_remove_edges.is_empty ()) 1535 { 1536 basic_block prev_bb; 1537 1538 /* Remove edges. We've delayed this to not get bogus debug stmts 1539 during PHI node removal. */ 1540 for (unsigned i = 0; i < to_remove_edges.length (); ++i) 1541 remove_edge (to_remove_edges[i]); 1542 cfg_altered = true; 1543 1544 find_unreachable_blocks (); 1545 1546 /* Delete all unreachable basic blocks in reverse dominator order. */ 1547 for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb; 1548 bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb) 1549 { 1550 prev_bb = bb->prev_bb; 1551 1552 if (!bitmap_bit_p (bb_contains_live_stmts, bb->index) 1553 || !(bb->flags & BB_REACHABLE)) 1554 { 1555 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); 1556 gsi_next (&gsi)) 1557 if (virtual_operand_p (gimple_phi_result (gsi.phi ()))) 1558 { 1559 bool found = false; 1560 imm_use_iterator iter; 1561 1562 FOR_EACH_IMM_USE_STMT (stmt, iter, 1563 gimple_phi_result (gsi.phi ())) 1564 { 1565 if (!(gimple_bb (stmt)->flags & BB_REACHABLE)) 1566 continue; 1567 if (gimple_code (stmt) == GIMPLE_PHI 1568 || gimple_plf (stmt, STMT_NECESSARY)) 1569 { 1570 found = true; 1571 BREAK_FROM_IMM_USE_STMT (iter); 1572 } 1573 } 1574 if (found) 1575 mark_virtual_phi_result_for_renaming (gsi.phi ()); 1576 } 1577 1578 if (!(bb->flags & BB_REACHABLE)) 1579 { 1580 /* Speed up the removal of blocks that don't 1581 dominate others. Walking backwards, this should 1582 be the common case. ??? Do we need to recompute 1583 dominators because of cfg_altered? */ 1584 if (!first_dom_son (CDI_DOMINATORS, bb)) 1585 delete_basic_block (bb); 1586 else 1587 { 1588 h = get_all_dominated_blocks (CDI_DOMINATORS, bb); 1589 1590 while (h.length ()) 1591 { 1592 bb = h.pop (); 1593 prev_bb = bb->prev_bb; 1594 /* Rearrangements to the CFG may have failed 1595 to update the dominators tree, so that 1596 formerly-dominated blocks are now 1597 otherwise reachable. */ 1598 if (!!(bb->flags & BB_REACHABLE)) 1599 continue; 1600 delete_basic_block (bb); 1601 } 1602 1603 h.release (); 1604 } 1605 } 1606 } 1607 } 1608 } 1609 1610 if (bb_postorder) 1611 free (bb_postorder); 1612 bb_postorder = NULL; 1613 1614 return something_changed; 1615 } 1616 1617 1618 /* Print out removed statement statistics. */ 1619 1620 static void 1621 print_stats (void) 1622 { 1623 float percg; 1624 1625 percg = ((float) stats.removed / (float) stats.total) * 100; 1626 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n", 1627 stats.removed, stats.total, (int) percg); 1628 1629 if (stats.total_phis == 0) 1630 percg = 0; 1631 else 1632 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100; 1633 1634 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n", 1635 stats.removed_phis, stats.total_phis, (int) percg); 1636 } 1637 1638 /* Initialization for this pass. Set up the used data structures. */ 1639 1640 static void 1641 tree_dce_init (bool aggressive) 1642 { 1643 memset ((void *) &stats, 0, sizeof (stats)); 1644 1645 if (aggressive) 1646 { 1647 last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun)); 1648 bitmap_clear (last_stmt_necessary); 1649 bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun)); 1650 bitmap_clear (bb_contains_live_stmts); 1651 } 1652 1653 processed = sbitmap_alloc (num_ssa_names + 1); 1654 bitmap_clear (processed); 1655 1656 worklist.create (64); 1657 cfg_altered = false; 1658 } 1659 1660 /* Cleanup after this pass. */ 1661 1662 static void 1663 tree_dce_done (bool aggressive) 1664 { 1665 if (aggressive) 1666 { 1667 delete cd; 1668 sbitmap_free (visited_control_parents); 1669 sbitmap_free (last_stmt_necessary); 1670 sbitmap_free (bb_contains_live_stmts); 1671 bb_contains_live_stmts = NULL; 1672 } 1673 1674 sbitmap_free (processed); 1675 1676 worklist.release (); 1677 } 1678 1679 /* Main routine to eliminate dead code. 1680 1681 AGGRESSIVE controls the aggressiveness of the algorithm. 1682 In conservative mode, we ignore control dependence and simply declare 1683 all but the most trivially dead branches necessary. This mode is fast. 1684 In aggressive mode, control dependences are taken into account, which 1685 results in more dead code elimination, but at the cost of some time. 1686 1687 FIXME: Aggressive mode before PRE doesn't work currently because 1688 the dominance info is not invalidated after DCE1. This is 1689 not an issue right now because we only run aggressive DCE 1690 as the last tree SSA pass, but keep this in mind when you 1691 start experimenting with pass ordering. */ 1692 1693 static unsigned int 1694 perform_tree_ssa_dce (bool aggressive) 1695 { 1696 bool something_changed = 0; 1697 1698 calculate_dominance_info (CDI_DOMINATORS); 1699 1700 /* Preheaders are needed for SCEV to work. 1701 Simple lateches and recorded exits improve chances that loop will 1702 proved to be finite in testcases such as in loop-15.c and loop-24.c */ 1703 bool in_loop_pipeline = scev_initialized_p (); 1704 if (aggressive && ! in_loop_pipeline) 1705 { 1706 scev_initialize (); 1707 loop_optimizer_init (LOOPS_NORMAL 1708 | LOOPS_HAVE_RECORDED_EXITS); 1709 } 1710 1711 tree_dce_init (aggressive); 1712 1713 if (aggressive) 1714 { 1715 /* Compute control dependence. */ 1716 calculate_dominance_info (CDI_POST_DOMINATORS); 1717 cd = new control_dependences (); 1718 1719 visited_control_parents = 1720 sbitmap_alloc (last_basic_block_for_fn (cfun)); 1721 bitmap_clear (visited_control_parents); 1722 1723 mark_dfs_back_edges (); 1724 } 1725 1726 find_obviously_necessary_stmts (aggressive); 1727 1728 if (aggressive && ! in_loop_pipeline) 1729 { 1730 loop_optimizer_finalize (); 1731 scev_finalize (); 1732 } 1733 1734 longest_chain = 0; 1735 total_chain = 0; 1736 nr_walks = 0; 1737 chain_ovfl = false; 1738 visited = BITMAP_ALLOC (NULL); 1739 propagate_necessity (aggressive); 1740 BITMAP_FREE (visited); 1741 1742 something_changed |= eliminate_unnecessary_stmts (); 1743 something_changed |= cfg_altered; 1744 1745 /* We do not update postdominators, so free them unconditionally. */ 1746 free_dominance_info (CDI_POST_DOMINATORS); 1747 1748 /* If we removed paths in the CFG, then we need to update 1749 dominators as well. I haven't investigated the possibility 1750 of incrementally updating dominators. */ 1751 if (cfg_altered) 1752 free_dominance_info (CDI_DOMINATORS); 1753 1754 statistics_counter_event (cfun, "Statements deleted", stats.removed); 1755 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis); 1756 1757 /* Debugging dumps. */ 1758 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS))) 1759 print_stats (); 1760 1761 tree_dce_done (aggressive); 1762 1763 if (something_changed) 1764 { 1765 free_numbers_of_iterations_estimates (cfun); 1766 if (in_loop_pipeline) 1767 scev_reset (); 1768 return TODO_update_ssa | TODO_cleanup_cfg; 1769 } 1770 return 0; 1771 } 1772 1773 /* Pass entry points. */ 1774 static unsigned int 1775 tree_ssa_dce (void) 1776 { 1777 return perform_tree_ssa_dce (/*aggressive=*/false); 1778 } 1779 1780 static unsigned int 1781 tree_ssa_cd_dce (void) 1782 { 1783 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2); 1784 } 1785 1786 namespace { 1787 1788 const pass_data pass_data_dce = 1789 { 1790 GIMPLE_PASS, /* type */ 1791 "dce", /* name */ 1792 OPTGROUP_NONE, /* optinfo_flags */ 1793 TV_TREE_DCE, /* tv_id */ 1794 ( PROP_cfg | PROP_ssa ), /* properties_required */ 1795 0, /* properties_provided */ 1796 0, /* properties_destroyed */ 1797 0, /* todo_flags_start */ 1798 0, /* todo_flags_finish */ 1799 }; 1800 1801 class pass_dce : public gimple_opt_pass 1802 { 1803 public: 1804 pass_dce (gcc::context *ctxt) 1805 : gimple_opt_pass (pass_data_dce, ctxt) 1806 {} 1807 1808 /* opt_pass methods: */ 1809 opt_pass * clone () { return new pass_dce (m_ctxt); } 1810 virtual bool gate (function *) { return flag_tree_dce != 0; } 1811 virtual unsigned int execute (function *) { return tree_ssa_dce (); } 1812 1813 }; // class pass_dce 1814 1815 } // anon namespace 1816 1817 gimple_opt_pass * 1818 make_pass_dce (gcc::context *ctxt) 1819 { 1820 return new pass_dce (ctxt); 1821 } 1822 1823 namespace { 1824 1825 const pass_data pass_data_cd_dce = 1826 { 1827 GIMPLE_PASS, /* type */ 1828 "cddce", /* name */ 1829 OPTGROUP_NONE, /* optinfo_flags */ 1830 TV_TREE_CD_DCE, /* tv_id */ 1831 ( PROP_cfg | PROP_ssa ), /* properties_required */ 1832 0, /* properties_provided */ 1833 0, /* properties_destroyed */ 1834 0, /* todo_flags_start */ 1835 0, /* todo_flags_finish */ 1836 }; 1837 1838 class pass_cd_dce : public gimple_opt_pass 1839 { 1840 public: 1841 pass_cd_dce (gcc::context *ctxt) 1842 : gimple_opt_pass (pass_data_cd_dce, ctxt) 1843 {} 1844 1845 /* opt_pass methods: */ 1846 opt_pass * clone () { return new pass_cd_dce (m_ctxt); } 1847 virtual bool gate (function *) { return flag_tree_dce != 0; } 1848 virtual unsigned int execute (function *) { return tree_ssa_cd_dce (); } 1849 1850 }; // class pass_cd_dce 1851 1852 } // anon namespace 1853 1854 gimple_opt_pass * 1855 make_pass_cd_dce (gcc::context *ctxt) 1856 { 1857 return new pass_cd_dce (ctxt); 1858 } 1859 1860 1861 /* A cheap DCE interface. WORKLIST is a list of possibly dead stmts and 1862 is consumed by this function. The function has linear complexity in 1863 the number of dead stmts with a constant factor like the average SSA 1864 use operands number. */ 1865 1866 void 1867 simple_dce_from_worklist (bitmap worklist) 1868 { 1869 while (! bitmap_empty_p (worklist)) 1870 { 1871 /* Pop item. */ 1872 unsigned i = bitmap_first_set_bit (worklist); 1873 bitmap_clear_bit (worklist, i); 1874 1875 tree def = ssa_name (i); 1876 /* Removed by somebody else or still in use. */ 1877 if (! def || ! has_zero_uses (def)) 1878 continue; 1879 1880 gimple *t = SSA_NAME_DEF_STMT (def); 1881 if (gimple_has_side_effects (t)) 1882 continue; 1883 1884 /* Add uses to the worklist. */ 1885 ssa_op_iter iter; 1886 use_operand_p use_p; 1887 FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE) 1888 { 1889 tree use = USE_FROM_PTR (use_p); 1890 if (TREE_CODE (use) == SSA_NAME 1891 && ! SSA_NAME_IS_DEFAULT_DEF (use)) 1892 bitmap_set_bit (worklist, SSA_NAME_VERSION (use)); 1893 } 1894 1895 /* Remove stmt. */ 1896 if (dump_file && (dump_flags & TDF_DETAILS)) 1897 { 1898 fprintf (dump_file, "Removing dead stmt:"); 1899 print_gimple_stmt (dump_file, t, 0); 1900 } 1901 gimple_stmt_iterator gsi = gsi_for_stmt (t); 1902 if (gimple_code (t) == GIMPLE_PHI) 1903 remove_phi_node (&gsi, true); 1904 else 1905 { 1906 gsi_remove (&gsi, true); 1907 release_defs (t); 1908 } 1909 } 1910 } 1911