1 /* Control flow graph building code for GNU compiler. 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008 4 Free Software Foundation, Inc. 5 6 This file is part of GCC. 7 8 GCC is free software; you can redistribute it and/or modify it under 9 the terms of the GNU General Public License as published by the Free 10 Software Foundation; either version 3, or (at your option) any later 11 version. 12 13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 14 WARRANTY; without even the implied warranty of MERCHANTABILITY or 15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 16 for more details. 17 18 You should have received a copy of the GNU General Public License 19 along with GCC; see the file COPYING3. If not see 20 <http://www.gnu.org/licenses/>. */ 21 22 23 #include "config.h" 24 #include "system.h" 25 #include "coretypes.h" 26 #include "tm.h" 27 #include "tree.h" 28 #include "rtl.h" 29 #include "hard-reg-set.h" 30 #include "basic-block.h" 31 #include "regs.h" 32 #include "flags.h" 33 #include "output.h" 34 #include "function.h" 35 #include "except.h" 36 #include "expr.h" 37 #include "toplev.h" 38 #include "timevar.h" 39 40 static void make_edges (basic_block, basic_block, int); 41 static void make_label_edge (sbitmap, basic_block, rtx, int); 42 static void find_bb_boundaries (basic_block); 43 static void compute_outgoing_frequencies (basic_block); 44 45 /* Return true if insn is something that should be contained inside basic 46 block. */ 47 48 bool 49 inside_basic_block_p (const_rtx insn) 50 { 51 switch (GET_CODE (insn)) 52 { 53 case CODE_LABEL: 54 /* Avoid creating of basic block for jumptables. */ 55 return (NEXT_INSN (insn) == 0 56 || !JUMP_P (NEXT_INSN (insn)) 57 || (GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_VEC 58 && GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_DIFF_VEC)); 59 60 case JUMP_INSN: 61 return (GET_CODE (PATTERN (insn)) != ADDR_VEC 62 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC); 63 64 case CALL_INSN: 65 case INSN: 66 case DEBUG_INSN: 67 return true; 68 69 case BARRIER: 70 case NOTE: 71 return false; 72 73 default: 74 gcc_unreachable (); 75 } 76 } 77 78 /* Return true if INSN may cause control flow transfer, so it should be last in 79 the basic block. */ 80 81 bool 82 control_flow_insn_p (const_rtx insn) 83 { 84 switch (GET_CODE (insn)) 85 { 86 case NOTE: 87 case CODE_LABEL: 88 case DEBUG_INSN: 89 return false; 90 91 case JUMP_INSN: 92 /* Jump insn always causes control transfer except for tablejumps. */ 93 return (GET_CODE (PATTERN (insn)) != ADDR_VEC 94 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC); 95 96 case CALL_INSN: 97 /* Noreturn and sibling call instructions terminate the basic blocks 98 (but only if they happen unconditionally). */ 99 if ((SIBLING_CALL_P (insn) 100 || find_reg_note (insn, REG_NORETURN, 0)) 101 && GET_CODE (PATTERN (insn)) != COND_EXEC) 102 return true; 103 104 /* Call insn may return to the nonlocal goto handler. */ 105 if (can_nonlocal_goto (insn)) 106 return true; 107 break; 108 109 case INSN: 110 /* Treat trap instructions like noreturn calls (same provision). */ 111 if (GET_CODE (PATTERN (insn)) == TRAP_IF 112 && XEXP (PATTERN (insn), 0) == const1_rtx) 113 return true; 114 if (!flag_non_call_exceptions) 115 return false; 116 break; 117 118 case BARRIER: 119 /* It is nonsense to reach barrier when looking for the 120 end of basic block, but before dead code is eliminated 121 this may happen. */ 122 return false; 123 124 default: 125 gcc_unreachable (); 126 } 127 128 return can_throw_internal (insn); 129 } 130 131 132 /* Create an edge between two basic blocks. FLAGS are auxiliary information 133 about the edge that is accumulated between calls. */ 134 135 /* Create an edge from a basic block to a label. */ 136 137 static void 138 make_label_edge (sbitmap edge_cache, basic_block src, rtx label, int flags) 139 { 140 gcc_assert (LABEL_P (label)); 141 142 /* If the label was never emitted, this insn is junk, but avoid a 143 crash trying to refer to BLOCK_FOR_INSN (label). This can happen 144 as a result of a syntax error and a diagnostic has already been 145 printed. */ 146 147 if (INSN_UID (label) == 0) 148 return; 149 150 cached_make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags); 151 } 152 153 /* Create the edges generated by INSN in REGION. */ 154 155 void 156 rtl_make_eh_edge (sbitmap edge_cache, basic_block src, rtx insn) 157 { 158 eh_landing_pad lp = get_eh_landing_pad_from_rtx (insn); 159 160 if (lp) 161 { 162 rtx label = lp->landing_pad; 163 164 /* During initial rtl generation, use the post_landing_pad. */ 165 if (label == NULL) 166 { 167 gcc_assert (lp->post_landing_pad); 168 label = label_rtx (lp->post_landing_pad); 169 } 170 171 make_label_edge (edge_cache, src, label, 172 EDGE_ABNORMAL | EDGE_EH 173 | (CALL_P (insn) ? EDGE_ABNORMAL_CALL : 0)); 174 } 175 } 176 177 /* States of basic block as seen by find_many_sub_basic_blocks. */ 178 enum state { 179 /* Basic blocks created via split_block belong to this state. 180 make_edges will examine these basic blocks to see if we need to 181 create edges going out of them. */ 182 BLOCK_NEW = 0, 183 184 /* Basic blocks that do not need examining belong to this state. 185 These blocks will be left intact. In particular, make_edges will 186 not create edges going out of these basic blocks. */ 187 BLOCK_ORIGINAL, 188 189 /* Basic blocks that may need splitting (due to a label appearing in 190 the middle, etc) belong to this state. After splitting them, 191 make_edges will create edges going out of them as needed. */ 192 BLOCK_TO_SPLIT 193 }; 194 195 #define STATE(BB) (enum state) ((size_t) (BB)->aux) 196 #define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE)) 197 198 /* Used internally by purge_dead_tablejump_edges, ORed into state. */ 199 #define BLOCK_USED_BY_TABLEJUMP 32 200 #define FULL_STATE(BB) ((size_t) (BB)->aux) 201 202 /* Identify the edges going out of basic blocks between MIN and MAX, 203 inclusive, that have their states set to BLOCK_NEW or 204 BLOCK_TO_SPLIT. 205 206 UPDATE_P should be nonzero if we are updating CFG and zero if we 207 are building CFG from scratch. */ 208 209 static void 210 make_edges (basic_block min, basic_block max, int update_p) 211 { 212 basic_block bb; 213 sbitmap edge_cache = NULL; 214 215 /* Heavy use of computed goto in machine-generated code can lead to 216 nearly fully-connected CFGs. In that case we spend a significant 217 amount of time searching the edge lists for duplicates. */ 218 if (forced_labels || cfun->cfg->max_jumptable_ents > 100) 219 edge_cache = sbitmap_alloc (last_basic_block); 220 221 /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block 222 is always the entry. */ 223 if (min == ENTRY_BLOCK_PTR->next_bb) 224 make_edge (ENTRY_BLOCK_PTR, min, EDGE_FALLTHRU); 225 226 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb) 227 { 228 rtx insn, x; 229 enum rtx_code code; 230 edge e; 231 edge_iterator ei; 232 233 if (STATE (bb) == BLOCK_ORIGINAL) 234 continue; 235 236 /* If we have an edge cache, cache edges going out of BB. */ 237 if (edge_cache) 238 { 239 sbitmap_zero (edge_cache); 240 if (update_p) 241 { 242 FOR_EACH_EDGE (e, ei, bb->succs) 243 if (e->dest != EXIT_BLOCK_PTR) 244 SET_BIT (edge_cache, e->dest->index); 245 } 246 } 247 248 if (LABEL_P (BB_HEAD (bb)) 249 && LABEL_ALT_ENTRY_P (BB_HEAD (bb))) 250 cached_make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0); 251 252 /* Examine the last instruction of the block, and discover the 253 ways we can leave the block. */ 254 255 insn = BB_END (bb); 256 code = GET_CODE (insn); 257 258 /* A branch. */ 259 if (code == JUMP_INSN) 260 { 261 rtx tmp; 262 263 /* Recognize a non-local goto as a branch outside the 264 current function. */ 265 if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX)) 266 ; 267 268 /* Recognize a tablejump and do the right thing. */ 269 else if (tablejump_p (insn, NULL, &tmp)) 270 { 271 rtvec vec; 272 int j; 273 274 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC) 275 vec = XVEC (PATTERN (tmp), 0); 276 else 277 vec = XVEC (PATTERN (tmp), 1); 278 279 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j) 280 make_label_edge (edge_cache, bb, 281 XEXP (RTVEC_ELT (vec, j), 0), 0); 282 283 /* Some targets (eg, ARM) emit a conditional jump that also 284 contains the out-of-range target. Scan for these and 285 add an edge if necessary. */ 286 if ((tmp = single_set (insn)) != NULL 287 && SET_DEST (tmp) == pc_rtx 288 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE 289 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF) 290 make_label_edge (edge_cache, bb, 291 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0); 292 } 293 294 /* If this is a computed jump, then mark it as reaching 295 everything on the forced_labels list. */ 296 else if (computed_jump_p (insn)) 297 { 298 for (x = forced_labels; x; x = XEXP (x, 1)) 299 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL); 300 } 301 302 /* Returns create an exit out. */ 303 else if (returnjump_p (insn)) 304 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0); 305 306 /* Recognize asm goto and do the right thing. */ 307 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL) 308 { 309 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp); 310 for (i = 0; i < n; ++i) 311 make_label_edge (edge_cache, bb, 312 XEXP (ASM_OPERANDS_LABEL (tmp, i), 0), 0); 313 } 314 315 /* Otherwise, we have a plain conditional or unconditional jump. */ 316 else 317 { 318 gcc_assert (JUMP_LABEL (insn)); 319 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0); 320 } 321 } 322 323 /* If this is a sibling call insn, then this is in effect a combined call 324 and return, and so we need an edge to the exit block. No need to 325 worry about EH edges, since we wouldn't have created the sibling call 326 in the first place. */ 327 if (code == CALL_INSN && SIBLING_CALL_P (insn)) 328 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 329 EDGE_SIBCALL | EDGE_ABNORMAL); 330 331 /* If this is a CALL_INSN, then mark it as reaching the active EH 332 handler for this CALL_INSN. If we're handling non-call 333 exceptions then any insn can reach any of the active handlers. 334 Also mark the CALL_INSN as reaching any nonlocal goto handler. */ 335 else if (code == CALL_INSN || flag_non_call_exceptions) 336 { 337 /* Add any appropriate EH edges. */ 338 rtl_make_eh_edge (edge_cache, bb, insn); 339 340 if (code == CALL_INSN && nonlocal_goto_handler_labels) 341 { 342 /* ??? This could be made smarter: in some cases it's possible 343 to tell that certain calls will not do a nonlocal goto. 344 For example, if the nested functions that do the nonlocal 345 gotos do not have their addresses taken, then only calls to 346 those functions or to other nested functions that use them 347 could possibly do nonlocal gotos. */ 348 if (can_nonlocal_goto (insn)) 349 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1)) 350 make_label_edge (edge_cache, bb, XEXP (x, 0), 351 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL); 352 } 353 } 354 355 /* Find out if we can drop through to the next block. */ 356 insn = NEXT_INSN (insn); 357 e = find_edge (bb, EXIT_BLOCK_PTR); 358 if (e && e->flags & EDGE_FALLTHRU) 359 insn = NULL; 360 361 while (insn 362 && NOTE_P (insn) 363 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK) 364 insn = NEXT_INSN (insn); 365 366 if (!insn) 367 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU); 368 else if (bb->next_bb != EXIT_BLOCK_PTR) 369 { 370 if (insn == BB_HEAD (bb->next_bb)) 371 cached_make_edge (edge_cache, bb, bb->next_bb, EDGE_FALLTHRU); 372 } 373 } 374 375 if (edge_cache) 376 sbitmap_vector_free (edge_cache); 377 } 378 379 static void 380 mark_tablejump_edge (rtx label) 381 { 382 basic_block bb; 383 384 gcc_assert (LABEL_P (label)); 385 /* See comment in make_label_edge. */ 386 if (INSN_UID (label) == 0) 387 return; 388 bb = BLOCK_FOR_INSN (label); 389 SET_STATE (bb, FULL_STATE (bb) | BLOCK_USED_BY_TABLEJUMP); 390 } 391 392 static void 393 purge_dead_tablejump_edges (basic_block bb, rtx table) 394 { 395 rtx insn = BB_END (bb), tmp; 396 rtvec vec; 397 int j; 398 edge_iterator ei; 399 edge e; 400 401 if (GET_CODE (PATTERN (table)) == ADDR_VEC) 402 vec = XVEC (PATTERN (table), 0); 403 else 404 vec = XVEC (PATTERN (table), 1); 405 406 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j) 407 mark_tablejump_edge (XEXP (RTVEC_ELT (vec, j), 0)); 408 409 /* Some targets (eg, ARM) emit a conditional jump that also 410 contains the out-of-range target. Scan for these and 411 add an edge if necessary. */ 412 if ((tmp = single_set (insn)) != NULL 413 && SET_DEST (tmp) == pc_rtx 414 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE 415 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF) 416 mark_tablejump_edge (XEXP (XEXP (SET_SRC (tmp), 2), 0)); 417 418 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) 419 { 420 if (FULL_STATE (e->dest) & BLOCK_USED_BY_TABLEJUMP) 421 SET_STATE (e->dest, FULL_STATE (e->dest) 422 & ~(size_t) BLOCK_USED_BY_TABLEJUMP); 423 else if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH))) 424 { 425 remove_edge (e); 426 continue; 427 } 428 ei_next (&ei); 429 } 430 } 431 432 /* Scan basic block BB for possible BB boundaries inside the block 433 and create new basic blocks in the progress. */ 434 435 static void 436 find_bb_boundaries (basic_block bb) 437 { 438 basic_block orig_bb = bb; 439 rtx insn = BB_HEAD (bb); 440 rtx end = BB_END (bb), x; 441 rtx table; 442 rtx flow_transfer_insn = NULL_RTX; 443 edge fallthru = NULL; 444 445 if (insn == BB_END (bb)) 446 return; 447 448 if (LABEL_P (insn)) 449 insn = NEXT_INSN (insn); 450 451 /* Scan insn chain and try to find new basic block boundaries. */ 452 while (1) 453 { 454 enum rtx_code code = GET_CODE (insn); 455 456 /* In case we've previously seen an insn that effects a control 457 flow transfer, split the block. */ 458 if ((flow_transfer_insn || code == CODE_LABEL) 459 && inside_basic_block_p (insn)) 460 { 461 fallthru = split_block (bb, PREV_INSN (insn)); 462 if (flow_transfer_insn) 463 { 464 BB_END (bb) = flow_transfer_insn; 465 466 /* Clean up the bb field for the insns between the blocks. */ 467 for (x = NEXT_INSN (flow_transfer_insn); 468 x != BB_HEAD (fallthru->dest); 469 x = NEXT_INSN (x)) 470 if (!BARRIER_P (x)) 471 set_block_for_insn (x, NULL); 472 } 473 474 bb = fallthru->dest; 475 remove_edge (fallthru); 476 flow_transfer_insn = NULL_RTX; 477 if (code == CODE_LABEL && LABEL_ALT_ENTRY_P (insn)) 478 make_edge (ENTRY_BLOCK_PTR, bb, 0); 479 } 480 else if (code == BARRIER) 481 { 482 /* __builtin_unreachable () may cause a barrier to be emitted in 483 the middle of a BB. We need to split it in the same manner as 484 if the barrier were preceded by a control_flow_insn_p insn. */ 485 if (!flow_transfer_insn) 486 flow_transfer_insn = prev_nonnote_insn_bb (insn); 487 } 488 489 if (control_flow_insn_p (insn)) 490 flow_transfer_insn = insn; 491 if (insn == end) 492 break; 493 insn = NEXT_INSN (insn); 494 } 495 496 /* In case expander replaced normal insn by sequence terminating by 497 return and barrier, or possibly other sequence not behaving like 498 ordinary jump, we need to take care and move basic block boundary. */ 499 if (flow_transfer_insn) 500 { 501 BB_END (bb) = flow_transfer_insn; 502 503 /* Clean up the bb field for the insns that do not belong to BB. */ 504 x = flow_transfer_insn; 505 while (x != end) 506 { 507 x = NEXT_INSN (x); 508 if (!BARRIER_P (x)) 509 set_block_for_insn (x, NULL); 510 } 511 } 512 513 /* We've possibly replaced the conditional jump by conditional jump 514 followed by cleanup at fallthru edge, so the outgoing edges may 515 be dead. */ 516 purge_dead_edges (bb); 517 518 /* purge_dead_edges doesn't handle tablejump's, but if we have split the 519 basic block, we might need to kill some edges. */ 520 if (bb != orig_bb && tablejump_p (BB_END (bb), NULL, &table)) 521 purge_dead_tablejump_edges (bb, table); 522 } 523 524 /* Assume that frequency of basic block B is known. Compute frequencies 525 and probabilities of outgoing edges. */ 526 527 static void 528 compute_outgoing_frequencies (basic_block b) 529 { 530 edge e, f; 531 edge_iterator ei; 532 533 if (EDGE_COUNT (b->succs) == 2) 534 { 535 rtx note = find_reg_note (BB_END (b), REG_BR_PROB, NULL); 536 int probability; 537 538 if (note) 539 { 540 probability = INTVAL (XEXP (note, 0)); 541 e = BRANCH_EDGE (b); 542 e->probability = probability; 543 e->count = ((b->count * probability + REG_BR_PROB_BASE / 2) 544 / REG_BR_PROB_BASE); 545 f = FALLTHRU_EDGE (b); 546 f->probability = REG_BR_PROB_BASE - probability; 547 f->count = b->count - e->count; 548 return; 549 } 550 } 551 552 if (single_succ_p (b)) 553 { 554 e = single_succ_edge (b); 555 e->probability = REG_BR_PROB_BASE; 556 e->count = b->count; 557 return; 558 } 559 guess_outgoing_edge_probabilities (b); 560 if (b->count) 561 FOR_EACH_EDGE (e, ei, b->succs) 562 e->count = ((b->count * e->probability + REG_BR_PROB_BASE / 2) 563 / REG_BR_PROB_BASE); 564 } 565 566 /* Assume that some pass has inserted labels or control flow 567 instructions within a basic block. Split basic blocks as needed 568 and create edges. */ 569 570 void 571 find_many_sub_basic_blocks (sbitmap blocks) 572 { 573 basic_block bb, min, max; 574 575 FOR_EACH_BB (bb) 576 SET_STATE (bb, 577 TEST_BIT (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL); 578 579 FOR_EACH_BB (bb) 580 if (STATE (bb) == BLOCK_TO_SPLIT) 581 find_bb_boundaries (bb); 582 583 FOR_EACH_BB (bb) 584 if (STATE (bb) != BLOCK_ORIGINAL) 585 break; 586 587 min = max = bb; 588 for (; bb != EXIT_BLOCK_PTR; bb = bb->next_bb) 589 if (STATE (bb) != BLOCK_ORIGINAL) 590 max = bb; 591 592 /* Now re-scan and wire in all edges. This expect simple (conditional) 593 jumps at the end of each new basic blocks. */ 594 make_edges (min, max, 1); 595 596 /* Update branch probabilities. Expect only (un)conditional jumps 597 to be created with only the forward edges. */ 598 if (profile_status != PROFILE_ABSENT) 599 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb) 600 { 601 edge e; 602 edge_iterator ei; 603 604 if (STATE (bb) == BLOCK_ORIGINAL) 605 continue; 606 if (STATE (bb) == BLOCK_NEW) 607 { 608 bb->count = 0; 609 bb->frequency = 0; 610 FOR_EACH_EDGE (e, ei, bb->preds) 611 { 612 bb->count += e->count; 613 bb->frequency += EDGE_FREQUENCY (e); 614 } 615 } 616 617 compute_outgoing_frequencies (bb); 618 } 619 620 FOR_EACH_BB (bb) 621 SET_STATE (bb, 0); 622 } 623