1 /* Control flow graph building code for GNU compiler. 2 Copyright (C) 1987-2017 Free Software Foundation, Inc. 3 4 This file is part of GCC. 5 6 GCC is free software; you can redistribute it and/or modify it under 7 the terms of the GNU General Public License as published by the Free 8 Software Foundation; either version 3, or (at your option) any later 9 version. 10 11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 12 WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 14 for more details. 15 16 You should have received a copy of the GNU General Public License 17 along with GCC; see the file COPYING3. If not see 18 <http://www.gnu.org/licenses/>. */ 19 20 21 #include "config.h" 22 #include "system.h" 23 #include "coretypes.h" 24 #include "backend.h" 25 #include "rtl.h" 26 #include "cfghooks.h" 27 #include "memmodel.h" 28 #include "emit-rtl.h" 29 #include "cfgrtl.h" 30 #include "cfganal.h" 31 #include "cfgbuild.h" 32 #include "except.h" 33 #include "stmt.h" 34 35 static void make_edges (basic_block, basic_block, int); 36 static void make_label_edge (sbitmap, basic_block, rtx, int); 37 static void find_bb_boundaries (basic_block); 38 static void compute_outgoing_frequencies (basic_block); 39 40 /* Return true if insn is something that should be contained inside basic 41 block. */ 42 43 bool 44 inside_basic_block_p (const rtx_insn *insn) 45 { 46 switch (GET_CODE (insn)) 47 { 48 case CODE_LABEL: 49 /* Avoid creating of basic block for jumptables. */ 50 return (NEXT_INSN (insn) == 0 51 || ! JUMP_TABLE_DATA_P (NEXT_INSN (insn))); 52 53 case JUMP_INSN: 54 case CALL_INSN: 55 case INSN: 56 case DEBUG_INSN: 57 return true; 58 59 case JUMP_TABLE_DATA: 60 case BARRIER: 61 case NOTE: 62 return false; 63 64 default: 65 gcc_unreachable (); 66 } 67 } 68 69 /* Return true if INSN may cause control flow transfer, so it should be last in 70 the basic block. */ 71 72 bool 73 control_flow_insn_p (const rtx_insn *insn) 74 { 75 switch (GET_CODE (insn)) 76 { 77 case NOTE: 78 case CODE_LABEL: 79 case DEBUG_INSN: 80 return false; 81 82 case JUMP_INSN: 83 return true; 84 85 case CALL_INSN: 86 /* Noreturn and sibling call instructions terminate the basic blocks 87 (but only if they happen unconditionally). */ 88 if ((SIBLING_CALL_P (insn) 89 || find_reg_note (insn, REG_NORETURN, 0)) 90 && GET_CODE (PATTERN (insn)) != COND_EXEC) 91 return true; 92 93 /* Call insn may return to the nonlocal goto handler. */ 94 if (can_nonlocal_goto (insn)) 95 return true; 96 break; 97 98 case INSN: 99 /* Treat trap instructions like noreturn calls (same provision). */ 100 if (GET_CODE (PATTERN (insn)) == TRAP_IF 101 && XEXP (PATTERN (insn), 0) == const1_rtx) 102 return true; 103 if (!cfun->can_throw_non_call_exceptions) 104 return false; 105 break; 106 107 case JUMP_TABLE_DATA: 108 case BARRIER: 109 /* It is nonsense to reach this when looking for the 110 end of basic block, but before dead code is eliminated 111 this may happen. */ 112 return false; 113 114 default: 115 gcc_unreachable (); 116 } 117 118 return can_throw_internal (insn); 119 } 120 121 122 /* Create an edge between two basic blocks. FLAGS are auxiliary information 123 about the edge that is accumulated between calls. */ 124 125 /* Create an edge from a basic block to a label. */ 126 127 static void 128 make_label_edge (sbitmap edge_cache, basic_block src, rtx label, int flags) 129 { 130 gcc_assert (LABEL_P (label)); 131 132 /* If the label was never emitted, this insn is junk, but avoid a 133 crash trying to refer to BLOCK_FOR_INSN (label). This can happen 134 as a result of a syntax error and a diagnostic has already been 135 printed. */ 136 137 if (INSN_UID (label) == 0) 138 return; 139 140 cached_make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags); 141 } 142 143 /* Create the edges generated by INSN in REGION. */ 144 145 void 146 rtl_make_eh_edge (sbitmap edge_cache, basic_block src, rtx insn) 147 { 148 eh_landing_pad lp = get_eh_landing_pad_from_rtx (insn); 149 150 if (lp) 151 { 152 rtx_insn *label = lp->landing_pad; 153 154 /* During initial rtl generation, use the post_landing_pad. */ 155 if (label == NULL) 156 { 157 gcc_assert (lp->post_landing_pad); 158 label = label_rtx (lp->post_landing_pad); 159 } 160 161 make_label_edge (edge_cache, src, label, 162 EDGE_ABNORMAL | EDGE_EH 163 | (CALL_P (insn) ? EDGE_ABNORMAL_CALL : 0)); 164 } 165 } 166 167 /* States of basic block as seen by find_many_sub_basic_blocks. */ 168 enum state { 169 /* Basic blocks created via split_block belong to this state. 170 make_edges will examine these basic blocks to see if we need to 171 create edges going out of them. */ 172 BLOCK_NEW = 0, 173 174 /* Basic blocks that do not need examining belong to this state. 175 These blocks will be left intact. In particular, make_edges will 176 not create edges going out of these basic blocks. */ 177 BLOCK_ORIGINAL, 178 179 /* Basic blocks that may need splitting (due to a label appearing in 180 the middle, etc) belong to this state. After splitting them, 181 make_edges will create edges going out of them as needed. */ 182 BLOCK_TO_SPLIT 183 }; 184 185 #define STATE(BB) (enum state) ((size_t) (BB)->aux) 186 #define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE)) 187 188 /* Used internally by purge_dead_tablejump_edges, ORed into state. */ 189 #define BLOCK_USED_BY_TABLEJUMP 32 190 #define FULL_STATE(BB) ((size_t) (BB)->aux) 191 192 /* Identify the edges going out of basic blocks between MIN and MAX, 193 inclusive, that have their states set to BLOCK_NEW or 194 BLOCK_TO_SPLIT. 195 196 UPDATE_P should be nonzero if we are updating CFG and zero if we 197 are building CFG from scratch. */ 198 199 static void 200 make_edges (basic_block min, basic_block max, int update_p) 201 { 202 basic_block bb; 203 sbitmap edge_cache = NULL; 204 205 /* Heavy use of computed goto in machine-generated code can lead to 206 nearly fully-connected CFGs. In that case we spend a significant 207 amount of time searching the edge lists for duplicates. */ 208 if (!vec_safe_is_empty (forced_labels) 209 || cfun->cfg->max_jumptable_ents > 100) 210 edge_cache = sbitmap_alloc (last_basic_block_for_fn (cfun)); 211 212 /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block 213 is always the entry. */ 214 if (min == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) 215 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), min, EDGE_FALLTHRU); 216 217 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb) 218 { 219 rtx_insn *insn; 220 enum rtx_code code; 221 edge e; 222 edge_iterator ei; 223 224 if (STATE (bb) == BLOCK_ORIGINAL) 225 continue; 226 227 /* If we have an edge cache, cache edges going out of BB. */ 228 if (edge_cache) 229 { 230 bitmap_clear (edge_cache); 231 if (update_p) 232 { 233 FOR_EACH_EDGE (e, ei, bb->succs) 234 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)) 235 bitmap_set_bit (edge_cache, e->dest->index); 236 } 237 } 238 239 if (LABEL_P (BB_HEAD (bb)) 240 && LABEL_ALT_ENTRY_P (BB_HEAD (bb))) 241 cached_make_edge (NULL, ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, 0); 242 243 /* Examine the last instruction of the block, and discover the 244 ways we can leave the block. */ 245 246 insn = BB_END (bb); 247 code = GET_CODE (insn); 248 249 /* A branch. */ 250 if (code == JUMP_INSN) 251 { 252 rtx tmp; 253 rtx_jump_table_data *table; 254 255 /* Recognize a non-local goto as a branch outside the 256 current function. */ 257 if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX)) 258 ; 259 260 /* Recognize a tablejump and do the right thing. */ 261 else if (tablejump_p (insn, NULL, &table)) 262 { 263 rtvec vec = table->get_labels (); 264 int j; 265 266 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j) 267 make_label_edge (edge_cache, bb, 268 XEXP (RTVEC_ELT (vec, j), 0), 0); 269 270 /* Some targets (eg, ARM) emit a conditional jump that also 271 contains the out-of-range target. Scan for these and 272 add an edge if necessary. */ 273 if ((tmp = single_set (insn)) != NULL 274 && SET_DEST (tmp) == pc_rtx 275 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE 276 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF) 277 make_label_edge (edge_cache, bb, 278 label_ref_label (XEXP (SET_SRC (tmp), 2)), 0); 279 } 280 281 /* If this is a computed jump, then mark it as reaching 282 everything on the forced_labels list. */ 283 else if (computed_jump_p (insn)) 284 { 285 rtx_insn *insn; 286 unsigned int i; 287 FOR_EACH_VEC_SAFE_ELT (forced_labels, i, insn) 288 make_label_edge (edge_cache, bb, insn, EDGE_ABNORMAL); 289 } 290 291 /* Returns create an exit out. */ 292 else if (returnjump_p (insn)) 293 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0); 294 295 /* Recognize asm goto and do the right thing. */ 296 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL) 297 { 298 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp); 299 for (i = 0; i < n; ++i) 300 make_label_edge (edge_cache, bb, 301 XEXP (ASM_OPERANDS_LABEL (tmp, i), 0), 0); 302 } 303 304 /* Otherwise, we have a plain conditional or unconditional jump. */ 305 else 306 { 307 gcc_assert (JUMP_LABEL (insn)); 308 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0); 309 } 310 } 311 312 /* If this is a sibling call insn, then this is in effect a combined call 313 and return, and so we need an edge to the exit block. No need to 314 worry about EH edges, since we wouldn't have created the sibling call 315 in the first place. */ 316 if (code == CALL_INSN && SIBLING_CALL_P (insn)) 317 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 318 EDGE_SIBCALL | EDGE_ABNORMAL); 319 320 /* If this is a CALL_INSN, then mark it as reaching the active EH 321 handler for this CALL_INSN. If we're handling non-call 322 exceptions then any insn can reach any of the active handlers. 323 Also mark the CALL_INSN as reaching any nonlocal goto handler. */ 324 else if (code == CALL_INSN || cfun->can_throw_non_call_exceptions) 325 { 326 /* Add any appropriate EH edges. */ 327 rtl_make_eh_edge (edge_cache, bb, insn); 328 329 if (code == CALL_INSN) 330 { 331 if (can_nonlocal_goto (insn)) 332 { 333 /* ??? This could be made smarter: in some cases it's 334 possible to tell that certain calls will not do a 335 nonlocal goto. For example, if the nested functions 336 that do the nonlocal gotos do not have their addresses 337 taken, then only calls to those functions or to other 338 nested functions that use them could possibly do 339 nonlocal gotos. */ 340 for (rtx_insn_list *x = nonlocal_goto_handler_labels; 341 x; 342 x = x->next ()) 343 make_label_edge (edge_cache, bb, x->insn (), 344 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL); 345 } 346 347 if (flag_tm) 348 { 349 rtx note; 350 for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) 351 if (REG_NOTE_KIND (note) == REG_TM) 352 make_label_edge (edge_cache, bb, XEXP (note, 0), 353 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL); 354 } 355 } 356 } 357 358 /* Find out if we can drop through to the next block. */ 359 insn = NEXT_INSN (insn); 360 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun)); 361 if (e && e->flags & EDGE_FALLTHRU) 362 insn = NULL; 363 364 while (insn 365 && NOTE_P (insn) 366 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK) 367 insn = NEXT_INSN (insn); 368 369 if (!insn) 370 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 371 EDGE_FALLTHRU); 372 else if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun)) 373 { 374 if (insn == BB_HEAD (bb->next_bb)) 375 cached_make_edge (edge_cache, bb, bb->next_bb, EDGE_FALLTHRU); 376 } 377 } 378 379 if (edge_cache) 380 sbitmap_free (edge_cache); 381 } 382 383 static void 384 mark_tablejump_edge (rtx label) 385 { 386 basic_block bb; 387 388 gcc_assert (LABEL_P (label)); 389 /* See comment in make_label_edge. */ 390 if (INSN_UID (label) == 0) 391 return; 392 bb = BLOCK_FOR_INSN (label); 393 SET_STATE (bb, FULL_STATE (bb) | BLOCK_USED_BY_TABLEJUMP); 394 } 395 396 static void 397 purge_dead_tablejump_edges (basic_block bb, rtx_jump_table_data *table) 398 { 399 rtx_insn *insn = BB_END (bb); 400 rtx tmp; 401 rtvec vec; 402 int j; 403 edge_iterator ei; 404 edge e; 405 406 vec = table->get_labels (); 407 408 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j) 409 mark_tablejump_edge (XEXP (RTVEC_ELT (vec, j), 0)); 410 411 /* Some targets (eg, ARM) emit a conditional jump that also 412 contains the out-of-range target. Scan for these and 413 add an edge if necessary. */ 414 if ((tmp = single_set (insn)) != NULL 415 && SET_DEST (tmp) == pc_rtx 416 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE 417 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF) 418 mark_tablejump_edge (label_ref_label (XEXP (SET_SRC (tmp), 2))); 419 420 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) 421 { 422 if (FULL_STATE (e->dest) & BLOCK_USED_BY_TABLEJUMP) 423 SET_STATE (e->dest, FULL_STATE (e->dest) 424 & ~(size_t) BLOCK_USED_BY_TABLEJUMP); 425 else if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH))) 426 { 427 remove_edge (e); 428 continue; 429 } 430 ei_next (&ei); 431 } 432 } 433 434 /* Scan basic block BB for possible BB boundaries inside the block 435 and create new basic blocks in the progress. */ 436 437 static void 438 find_bb_boundaries (basic_block bb) 439 { 440 basic_block orig_bb = bb; 441 rtx_insn *insn = BB_HEAD (bb); 442 rtx_insn *end = BB_END (bb), *x; 443 rtx_jump_table_data *table; 444 rtx_insn *flow_transfer_insn = NULL; 445 rtx_insn *debug_insn = NULL; 446 edge fallthru = NULL; 447 448 if (insn == end) 449 return; 450 451 if (LABEL_P (insn)) 452 insn = NEXT_INSN (insn); 453 454 /* Scan insn chain and try to find new basic block boundaries. */ 455 while (1) 456 { 457 enum rtx_code code = GET_CODE (insn); 458 459 if (code == DEBUG_INSN) 460 { 461 if (flow_transfer_insn && !debug_insn) 462 debug_insn = insn; 463 } 464 /* In case we've previously seen an insn that effects a control 465 flow transfer, split the block. */ 466 else if ((flow_transfer_insn || code == CODE_LABEL) 467 && inside_basic_block_p (insn)) 468 { 469 rtx_insn *prev = PREV_INSN (insn); 470 471 /* If the first non-debug inside_basic_block_p insn after a control 472 flow transfer is not a label, split the block before the debug 473 insn instead of before the non-debug insn, so that the debug 474 insns are not lost. */ 475 if (debug_insn && code != CODE_LABEL && code != BARRIER) 476 prev = PREV_INSN (debug_insn); 477 fallthru = split_block (bb, prev); 478 if (flow_transfer_insn) 479 { 480 BB_END (bb) = flow_transfer_insn; 481 482 rtx_insn *next; 483 /* Clean up the bb field for the insns between the blocks. */ 484 for (x = NEXT_INSN (flow_transfer_insn); 485 x != BB_HEAD (fallthru->dest); 486 x = next) 487 { 488 next = NEXT_INSN (x); 489 /* Debug insns should not be in between basic blocks, 490 drop them on the floor. */ 491 if (DEBUG_INSN_P (x)) 492 delete_insn (x); 493 else if (!BARRIER_P (x)) 494 set_block_for_insn (x, NULL); 495 } 496 } 497 498 bb = fallthru->dest; 499 remove_edge (fallthru); 500 flow_transfer_insn = NULL; 501 debug_insn = NULL; 502 if (code == CODE_LABEL && LABEL_ALT_ENTRY_P (insn)) 503 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, 0); 504 } 505 else if (code == BARRIER) 506 { 507 /* __builtin_unreachable () may cause a barrier to be emitted in 508 the middle of a BB. We need to split it in the same manner as 509 if the barrier were preceded by a control_flow_insn_p insn. */ 510 if (!flow_transfer_insn) 511 flow_transfer_insn = prev_nonnote_insn_bb (insn); 512 } 513 514 if (control_flow_insn_p (insn)) 515 flow_transfer_insn = insn; 516 if (insn == end) 517 break; 518 insn = NEXT_INSN (insn); 519 } 520 521 /* In case expander replaced normal insn by sequence terminating by 522 return and barrier, or possibly other sequence not behaving like 523 ordinary jump, we need to take care and move basic block boundary. */ 524 if (flow_transfer_insn && flow_transfer_insn != end) 525 { 526 BB_END (bb) = flow_transfer_insn; 527 528 /* Clean up the bb field for the insns that do not belong to BB. */ 529 rtx_insn *next; 530 for (x = NEXT_INSN (flow_transfer_insn); ; x = next) 531 { 532 next = NEXT_INSN (x); 533 /* Debug insns should not be in between basic blocks, 534 drop them on the floor. */ 535 if (DEBUG_INSN_P (x)) 536 delete_insn (x); 537 else if (!BARRIER_P (x)) 538 set_block_for_insn (x, NULL); 539 if (x == end) 540 break; 541 } 542 } 543 544 /* We've possibly replaced the conditional jump by conditional jump 545 followed by cleanup at fallthru edge, so the outgoing edges may 546 be dead. */ 547 purge_dead_edges (bb); 548 549 /* purge_dead_edges doesn't handle tablejump's, but if we have split the 550 basic block, we might need to kill some edges. */ 551 if (bb != orig_bb && tablejump_p (BB_END (bb), NULL, &table)) 552 purge_dead_tablejump_edges (bb, table); 553 } 554 555 /* Assume that frequency of basic block B is known. Compute frequencies 556 and probabilities of outgoing edges. */ 557 558 static void 559 compute_outgoing_frequencies (basic_block b) 560 { 561 edge e, f; 562 edge_iterator ei; 563 564 if (EDGE_COUNT (b->succs) == 2) 565 { 566 rtx note = find_reg_note (BB_END (b), REG_BR_PROB, NULL); 567 int probability; 568 569 if (note) 570 { 571 probability = XINT (note, 0); 572 e = BRANCH_EDGE (b); 573 e->probability = probability; 574 e->count = apply_probability (b->count, probability); 575 f = FALLTHRU_EDGE (b); 576 f->probability = REG_BR_PROB_BASE - probability; 577 f->count = b->count - e->count; 578 return; 579 } 580 else 581 { 582 guess_outgoing_edge_probabilities (b); 583 } 584 } 585 else if (single_succ_p (b)) 586 { 587 e = single_succ_edge (b); 588 e->probability = REG_BR_PROB_BASE; 589 e->count = b->count; 590 return; 591 } 592 else 593 { 594 /* We rely on BBs with more than two successors to have sane probabilities 595 and do not guess them here. For BBs terminated by switch statements 596 expanded to jump-table jump, we have done the right thing during 597 expansion. For EH edges, we still guess the probabilities here. */ 598 bool complex_edge = false; 599 FOR_EACH_EDGE (e, ei, b->succs) 600 if (e->flags & EDGE_COMPLEX) 601 { 602 complex_edge = true; 603 break; 604 } 605 if (complex_edge) 606 guess_outgoing_edge_probabilities (b); 607 } 608 609 if (b->count) 610 FOR_EACH_EDGE (e, ei, b->succs) 611 e->count = apply_probability (b->count, e->probability); 612 } 613 614 /* Assume that some pass has inserted labels or control flow 615 instructions within a basic block. Split basic blocks as needed 616 and create edges. */ 617 618 void 619 find_many_sub_basic_blocks (sbitmap blocks) 620 { 621 basic_block bb, min, max; 622 623 FOR_EACH_BB_FN (bb, cfun) 624 SET_STATE (bb, 625 bitmap_bit_p (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL); 626 627 FOR_EACH_BB_FN (bb, cfun) 628 if (STATE (bb) == BLOCK_TO_SPLIT) 629 find_bb_boundaries (bb); 630 631 FOR_EACH_BB_FN (bb, cfun) 632 if (STATE (bb) != BLOCK_ORIGINAL) 633 break; 634 635 min = max = bb; 636 for (; bb != EXIT_BLOCK_PTR_FOR_FN (cfun); bb = bb->next_bb) 637 if (STATE (bb) != BLOCK_ORIGINAL) 638 max = bb; 639 640 /* Now re-scan and wire in all edges. This expect simple (conditional) 641 jumps at the end of each new basic blocks. */ 642 make_edges (min, max, 1); 643 644 /* Update branch probabilities. Expect only (un)conditional jumps 645 to be created with only the forward edges. */ 646 if (profile_status_for_fn (cfun) != PROFILE_ABSENT) 647 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb) 648 { 649 edge e; 650 edge_iterator ei; 651 652 if (STATE (bb) == BLOCK_ORIGINAL) 653 continue; 654 if (STATE (bb) == BLOCK_NEW) 655 { 656 bb->count = 0; 657 bb->frequency = 0; 658 FOR_EACH_EDGE (e, ei, bb->preds) 659 { 660 bb->count += e->count; 661 bb->frequency += EDGE_FREQUENCY (e); 662 } 663 } 664 665 compute_outgoing_frequencies (bb); 666 } 667 668 FOR_EACH_BB_FN (bb, cfun) 669 SET_STATE (bb, 0); 670 } 671