1 /* Standard problems for dataflow support routines. 2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 3 2008, 2009 Free Software Foundation, Inc. 4 Originally contributed by Michael P. Hayes 5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com) 6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org) 7 and Kenneth Zadeck (zadeck@naturalbridge.com). 8 9 This file is part of GCC. 10 11 GCC is free software; you can redistribute it and/or modify it under 12 the terms of the GNU General Public License as published by the Free 13 Software Foundation; either version 3, or (at your option) any later 14 version. 15 16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 17 WARRANTY; without even the implied warranty of MERCHANTABILITY or 18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 19 for more details. 20 21 You should have received a copy of the GNU General Public License 22 along with GCC; see the file COPYING3. If not see 23 <http://www.gnu.org/licenses/>. */ 24 25 #include "config.h" 26 #include "system.h" 27 #include "coretypes.h" 28 #include "tm.h" 29 #include "rtl.h" 30 #include "tm_p.h" 31 #include "insn-config.h" 32 #include "recog.h" 33 #include "function.h" 34 #include "regs.h" 35 #include "output.h" 36 #include "alloc-pool.h" 37 #include "flags.h" 38 #include "hard-reg-set.h" 39 #include "basic-block.h" 40 #include "sbitmap.h" 41 #include "bitmap.h" 42 #include "timevar.h" 43 #include "df.h" 44 #include "except.h" 45 #include "dce.h" 46 #include "vecprim.h" 47 48 /* Note that turning REG_DEAD_DEBUGGING on will cause 49 gcc.c-torture/unsorted/dump-noaddr.c to fail because it prints 50 addresses in the dumps. */ 51 #if 0 52 #define REG_DEAD_DEBUGGING 53 #endif 54 55 #define DF_SPARSE_THRESHOLD 32 56 57 static bitmap seen_in_block = NULL; 58 static bitmap seen_in_insn = NULL; 59 60 61 /*---------------------------------------------------------------------------- 62 Public functions access functions for the dataflow problems. 63 ----------------------------------------------------------------------------*/ 64 /* Get the live at out set for BB no matter what problem happens to be 65 defined. This function is used by the register allocators who 66 choose different dataflow problems depending on the optimization 67 level. */ 68 69 bitmap 70 df_get_live_out (basic_block bb) 71 { 72 gcc_assert (df_lr); 73 74 if (df_live) 75 return DF_LIVE_OUT (bb); 76 else 77 return DF_LR_OUT (bb); 78 } 79 80 /* Get the live at in set for BB no matter what problem happens to be 81 defined. This function is used by the register allocators who 82 choose different dataflow problems depending on the optimization 83 level. */ 84 85 bitmap 86 df_get_live_in (basic_block bb) 87 { 88 gcc_assert (df_lr); 89 90 if (df_live) 91 return DF_LIVE_IN (bb); 92 else 93 return DF_LR_IN (bb); 94 } 95 96 /*---------------------------------------------------------------------------- 97 Utility functions. 98 ----------------------------------------------------------------------------*/ 99 100 /* Generic versions to get the void* version of the block info. Only 101 used inside the problem instance vectors. */ 102 103 /* Grow the bb_info array. */ 104 105 void 106 df_grow_bb_info (struct dataflow *dflow) 107 { 108 unsigned int new_size = last_basic_block + 1; 109 if (dflow->block_info_size < new_size) 110 { 111 new_size += new_size / 4; 112 dflow->block_info = XRESIZEVEC (void *, dflow->block_info, new_size); 113 memset (dflow->block_info + dflow->block_info_size, 0, 114 (new_size - dflow->block_info_size) *sizeof (void *)); 115 dflow->block_info_size = new_size; 116 } 117 } 118 119 /* Dump a def-use or use-def chain for REF to FILE. */ 120 121 void 122 df_chain_dump (struct df_link *link, FILE *file) 123 { 124 fprintf (file, "{ "); 125 for (; link; link = link->next) 126 { 127 fprintf (file, "%c%d(bb %d insn %d) ", 128 DF_REF_REG_DEF_P (link->ref) 129 ? 'd' 130 : (DF_REF_FLAGS (link->ref) & DF_REF_IN_NOTE) ? 'e' : 'u', 131 DF_REF_ID (link->ref), 132 DF_REF_BBNO (link->ref), 133 DF_REF_IS_ARTIFICIAL (link->ref) 134 ? -1 : DF_REF_INSN_UID (link->ref)); 135 } 136 fprintf (file, "}"); 137 } 138 139 140 /* Print some basic block info as part of df_dump. */ 141 142 void 143 df_print_bb_index (basic_block bb, FILE *file) 144 { 145 edge e; 146 edge_iterator ei; 147 148 fprintf (file, "\n( "); 149 FOR_EACH_EDGE (e, ei, bb->preds) 150 { 151 basic_block pred = e->src; 152 fprintf (file, "%d%s ", pred->index, e->flags & EDGE_EH ? "(EH)" : ""); 153 } 154 fprintf (file, ")->[%d]->( ", bb->index); 155 FOR_EACH_EDGE (e, ei, bb->succs) 156 { 157 basic_block succ = e->dest; 158 fprintf (file, "%d%s ", succ->index, e->flags & EDGE_EH ? "(EH)" : ""); 159 } 160 fprintf (file, ")\n"); 161 } 162 163 164 /*---------------------------------------------------------------------------- 165 REACHING DEFINITIONS 166 167 Find the locations in the function where each definition site for a 168 pseudo reaches. In and out bitvectors are built for each basic 169 block. The id field in the ref is used to index into these sets. 170 See df.h for details. 171 ----------------------------------------------------------------------------*/ 172 173 /* This problem plays a large number of games for the sake of 174 efficiency. 175 176 1) The order of the bits in the bitvectors. After the scanning 177 phase, all of the defs are sorted. All of the defs for the reg 0 178 are first, followed by all defs for reg 1 and so on. 179 180 2) There are two kill sets, one if the number of defs is less or 181 equal to DF_SPARSE_THRESHOLD and another if the number of defs is 182 greater. 183 184 <= : Data is built directly in the kill set. 185 186 > : One level of indirection is used to keep from generating long 187 strings of 1 bits in the kill sets. Bitvectors that are indexed 188 by the regnum are used to represent that there is a killing def 189 for the register. The confluence and transfer functions use 190 these along with the bitmap_clear_range call to remove ranges of 191 bits without actually generating a knockout vector. 192 193 The kill and sparse_kill and the dense_invalidated_by_call and 194 sparse_invalidated_by_call both play this game. */ 195 196 /* Private data used to compute the solution for this problem. These 197 data structures are not accessible outside of this module. */ 198 struct df_rd_problem_data 199 { 200 /* The set of defs to regs invalidated by call. */ 201 bitmap sparse_invalidated_by_call; 202 /* The set of defs to regs invalidate by call for rd. */ 203 bitmap dense_invalidated_by_call; 204 /* An obstack for the bitmaps we need for this problem. */ 205 bitmap_obstack rd_bitmaps; 206 }; 207 208 /* Set basic block info. */ 209 210 static void 211 df_rd_set_bb_info (unsigned int index, 212 struct df_rd_bb_info *bb_info) 213 { 214 gcc_assert (df_rd); 215 gcc_assert (index < df_rd->block_info_size); 216 df_rd->block_info[index] = bb_info; 217 } 218 219 220 /* Free basic block info. */ 221 222 static void 223 df_rd_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, 224 void *vbb_info) 225 { 226 struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info; 227 if (bb_info) 228 { 229 BITMAP_FREE (bb_info->kill); 230 BITMAP_FREE (bb_info->sparse_kill); 231 BITMAP_FREE (bb_info->gen); 232 BITMAP_FREE (bb_info->in); 233 BITMAP_FREE (bb_info->out); 234 pool_free (df_rd->block_pool, bb_info); 235 } 236 } 237 238 239 /* Allocate or reset bitmaps for DF_RD blocks. The solution bits are 240 not touched unless the block is new. */ 241 242 static void 243 df_rd_alloc (bitmap all_blocks) 244 { 245 unsigned int bb_index; 246 bitmap_iterator bi; 247 struct df_rd_problem_data *problem_data; 248 249 if (!df_rd->block_pool) 250 df_rd->block_pool = create_alloc_pool ("df_rd_block pool", 251 sizeof (struct df_rd_bb_info), 50); 252 253 if (df_rd->problem_data) 254 { 255 problem_data = (struct df_rd_problem_data *) df_rd->problem_data; 256 bitmap_clear (problem_data->sparse_invalidated_by_call); 257 bitmap_clear (problem_data->dense_invalidated_by_call); 258 } 259 else 260 { 261 problem_data = XNEW (struct df_rd_problem_data); 262 df_rd->problem_data = problem_data; 263 264 bitmap_obstack_initialize (&problem_data->rd_bitmaps); 265 problem_data->sparse_invalidated_by_call 266 = BITMAP_ALLOC (&problem_data->rd_bitmaps); 267 problem_data->dense_invalidated_by_call 268 = BITMAP_ALLOC (&problem_data->rd_bitmaps); 269 } 270 271 df_grow_bb_info (df_rd); 272 273 /* Because of the clustering of all use sites for the same pseudo, 274 we have to process all of the blocks before doing the 275 analysis. */ 276 277 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) 278 { 279 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index); 280 if (bb_info) 281 { 282 bitmap_clear (bb_info->kill); 283 bitmap_clear (bb_info->sparse_kill); 284 bitmap_clear (bb_info->gen); 285 } 286 else 287 { 288 bb_info = (struct df_rd_bb_info *) pool_alloc (df_rd->block_pool); 289 df_rd_set_bb_info (bb_index, bb_info); 290 bb_info->kill = BITMAP_ALLOC (&problem_data->rd_bitmaps); 291 bb_info->sparse_kill = BITMAP_ALLOC (&problem_data->rd_bitmaps); 292 bb_info->gen = BITMAP_ALLOC (&problem_data->rd_bitmaps); 293 bb_info->in = BITMAP_ALLOC (&problem_data->rd_bitmaps); 294 bb_info->out = BITMAP_ALLOC (&problem_data->rd_bitmaps); 295 } 296 } 297 df_rd->optional_p = true; 298 } 299 300 301 /* Add the effect of the top artificial defs of BB to the reaching definitions 302 bitmap LOCAL_RD. */ 303 304 void 305 df_rd_simulate_artificial_defs_at_top (basic_block bb, bitmap local_rd) 306 { 307 int bb_index = bb->index; 308 df_ref *def_rec; 309 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) 310 { 311 df_ref def = *def_rec; 312 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP) 313 { 314 unsigned int dregno = DF_REF_REGNO (def); 315 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))) 316 bitmap_clear_range (local_rd, 317 DF_DEFS_BEGIN (dregno), 318 DF_DEFS_COUNT (dregno)); 319 bitmap_set_bit (local_rd, DF_REF_ID (def)); 320 } 321 } 322 } 323 324 /* Add the effect of the defs of INSN to the reaching definitions bitmap 325 LOCAL_RD. */ 326 327 void 328 df_rd_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn, 329 bitmap local_rd) 330 { 331 unsigned uid = INSN_UID (insn); 332 df_ref *def_rec; 333 334 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) 335 { 336 df_ref def = *def_rec; 337 unsigned int dregno = DF_REF_REGNO (def); 338 if ((!(df->changeable_flags & DF_NO_HARD_REGS)) 339 || (dregno >= FIRST_PSEUDO_REGISTER)) 340 { 341 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))) 342 bitmap_clear_range (local_rd, 343 DF_DEFS_BEGIN (dregno), 344 DF_DEFS_COUNT (dregno)); 345 if (!(DF_REF_FLAGS (def) 346 & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))) 347 bitmap_set_bit (local_rd, DF_REF_ID (def)); 348 } 349 } 350 } 351 352 /* Process a list of DEFs for df_rd_bb_local_compute. This is a bit 353 more complicated than just simulating, because we must produce the 354 gen and kill sets and hence deal with the two possible representations 355 of kill sets. */ 356 357 static void 358 df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info, 359 df_ref *def_rec, 360 int top_flag) 361 { 362 while (*def_rec) 363 { 364 df_ref def = *def_rec; 365 if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP)) 366 { 367 unsigned int regno = DF_REF_REGNO (def); 368 unsigned int begin = DF_DEFS_BEGIN (regno); 369 unsigned int n_defs = DF_DEFS_COUNT (regno); 370 371 if ((!(df->changeable_flags & DF_NO_HARD_REGS)) 372 || (regno >= FIRST_PSEUDO_REGISTER)) 373 { 374 /* Only the last def(s) for a regno in the block has any 375 effect. */ 376 if (!bitmap_bit_p (seen_in_block, regno)) 377 { 378 /* The first def for regno in insn gets to knock out the 379 defs from other instructions. */ 380 if ((!bitmap_bit_p (seen_in_insn, regno)) 381 /* If the def is to only part of the reg, it does 382 not kill the other defs that reach here. */ 383 && (!(DF_REF_FLAGS (def) & 384 (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER)))) 385 { 386 if (n_defs > DF_SPARSE_THRESHOLD) 387 { 388 bitmap_set_bit (bb_info->sparse_kill, regno); 389 bitmap_clear_range(bb_info->gen, begin, n_defs); 390 } 391 else 392 { 393 bitmap_set_range (bb_info->kill, begin, n_defs); 394 bitmap_clear_range (bb_info->gen, begin, n_defs); 395 } 396 } 397 398 bitmap_set_bit (seen_in_insn, regno); 399 /* All defs for regno in the instruction may be put into 400 the gen set. */ 401 if (!(DF_REF_FLAGS (def) 402 & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))) 403 bitmap_set_bit (bb_info->gen, DF_REF_ID (def)); 404 } 405 } 406 } 407 def_rec++; 408 } 409 } 410 411 /* Compute local reaching def info for basic block BB. */ 412 413 static void 414 df_rd_bb_local_compute (unsigned int bb_index) 415 { 416 basic_block bb = BASIC_BLOCK (bb_index); 417 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index); 418 rtx insn; 419 420 bitmap_clear (seen_in_block); 421 bitmap_clear (seen_in_insn); 422 423 /* Artificials are only hard regs. */ 424 if (!(df->changeable_flags & DF_NO_HARD_REGS)) 425 df_rd_bb_local_compute_process_def (bb_info, 426 df_get_artificial_defs (bb_index), 427 0); 428 429 FOR_BB_INSNS_REVERSE (bb, insn) 430 { 431 unsigned int uid = INSN_UID (insn); 432 433 if (!INSN_P (insn)) 434 continue; 435 436 df_rd_bb_local_compute_process_def (bb_info, 437 DF_INSN_UID_DEFS (uid), 0); 438 439 /* This complex dance with the two bitmaps is required because 440 instructions can assign twice to the same pseudo. This 441 generally happens with calls that will have one def for the 442 result and another def for the clobber. If only one vector 443 is used and the clobber goes first, the result will be 444 lost. */ 445 bitmap_ior_into (seen_in_block, seen_in_insn); 446 bitmap_clear (seen_in_insn); 447 } 448 449 /* Process the artificial defs at the top of the block last since we 450 are going backwards through the block and these are logically at 451 the start. */ 452 if (!(df->changeable_flags & DF_NO_HARD_REGS)) 453 df_rd_bb_local_compute_process_def (bb_info, 454 df_get_artificial_defs (bb_index), 455 DF_REF_AT_TOP); 456 } 457 458 459 /* Compute local reaching def info for each basic block within BLOCKS. */ 460 461 static void 462 df_rd_local_compute (bitmap all_blocks) 463 { 464 unsigned int bb_index; 465 bitmap_iterator bi; 466 unsigned int regno; 467 struct df_rd_problem_data *problem_data 468 = (struct df_rd_problem_data *) df_rd->problem_data; 469 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call; 470 bitmap dense_invalidated = problem_data->dense_invalidated_by_call; 471 472 seen_in_block = BITMAP_ALLOC (&df_bitmap_obstack); 473 seen_in_insn = BITMAP_ALLOC (&df_bitmap_obstack); 474 475 df_maybe_reorganize_def_refs (DF_REF_ORDER_BY_REG); 476 477 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) 478 { 479 df_rd_bb_local_compute (bb_index); 480 } 481 482 /* Set up the knockout bit vectors to be applied across EH_EDGES. */ 483 EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, regno, bi) 484 { 485 if (DF_DEFS_COUNT (regno) > DF_SPARSE_THRESHOLD) 486 bitmap_set_bit (sparse_invalidated, regno); 487 else 488 bitmap_set_range (dense_invalidated, 489 DF_DEFS_BEGIN (regno), 490 DF_DEFS_COUNT (regno)); 491 } 492 493 BITMAP_FREE (seen_in_block); 494 BITMAP_FREE (seen_in_insn); 495 } 496 497 498 /* Initialize the solution bit vectors for problem. */ 499 500 static void 501 df_rd_init_solution (bitmap all_blocks) 502 { 503 unsigned int bb_index; 504 bitmap_iterator bi; 505 506 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) 507 { 508 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index); 509 510 bitmap_copy (bb_info->out, bb_info->gen); 511 bitmap_clear (bb_info->in); 512 } 513 } 514 515 /* In of target gets or of out of source. */ 516 517 static void 518 df_rd_confluence_n (edge e) 519 { 520 bitmap op1 = df_rd_get_bb_info (e->dest->index)->in; 521 bitmap op2 = df_rd_get_bb_info (e->src->index)->out; 522 523 if (e->flags & EDGE_FAKE) 524 return; 525 526 if (e->flags & EDGE_EH) 527 { 528 struct df_rd_problem_data *problem_data 529 = (struct df_rd_problem_data *) df_rd->problem_data; 530 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call; 531 bitmap dense_invalidated = problem_data->dense_invalidated_by_call; 532 bitmap_iterator bi; 533 unsigned int regno; 534 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack); 535 536 bitmap_copy (tmp, op2); 537 bitmap_and_compl_into (tmp, dense_invalidated); 538 539 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi) 540 { 541 bitmap_clear_range (tmp, 542 DF_DEFS_BEGIN (regno), 543 DF_DEFS_COUNT (regno)); 544 } 545 bitmap_ior_into (op1, tmp); 546 BITMAP_FREE (tmp); 547 } 548 else 549 bitmap_ior_into (op1, op2); 550 } 551 552 553 /* Transfer function. */ 554 555 static bool 556 df_rd_transfer_function (int bb_index) 557 { 558 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index); 559 unsigned int regno; 560 bitmap_iterator bi; 561 bitmap in = bb_info->in; 562 bitmap out = bb_info->out; 563 bitmap gen = bb_info->gen; 564 bitmap kill = bb_info->kill; 565 bitmap sparse_kill = bb_info->sparse_kill; 566 567 if (bitmap_empty_p (sparse_kill)) 568 return bitmap_ior_and_compl (out, gen, in, kill); 569 else 570 { 571 struct df_rd_problem_data *problem_data; 572 bool changed = false; 573 bitmap tmp; 574 575 /* Note that TMP is _not_ a temporary bitmap if we end up replacing 576 OUT with TMP. Therefore, allocate TMP in the RD bitmaps obstack. */ 577 problem_data = (struct df_rd_problem_data *) df_rd->problem_data; 578 tmp = BITMAP_ALLOC (&problem_data->rd_bitmaps); 579 580 bitmap_copy (tmp, in); 581 EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi) 582 { 583 bitmap_clear_range (tmp, 584 DF_DEFS_BEGIN (regno), 585 DF_DEFS_COUNT (regno)); 586 } 587 bitmap_and_compl_into (tmp, kill); 588 bitmap_ior_into (tmp, gen); 589 changed = !bitmap_equal_p (tmp, out); 590 if (changed) 591 { 592 BITMAP_FREE (out); 593 bb_info->out = tmp; 594 } 595 else 596 BITMAP_FREE (tmp); 597 return changed; 598 } 599 } 600 601 602 /* Free all storage associated with the problem. */ 603 604 static void 605 df_rd_free (void) 606 { 607 struct df_rd_problem_data *problem_data 608 = (struct df_rd_problem_data *) df_rd->problem_data; 609 610 if (problem_data) 611 { 612 free_alloc_pool (df_rd->block_pool); 613 bitmap_obstack_release (&problem_data->rd_bitmaps); 614 615 df_rd->block_info_size = 0; 616 free (df_rd->block_info); 617 free (df_rd->problem_data); 618 } 619 free (df_rd); 620 } 621 622 623 /* Debugging info. */ 624 625 static void 626 df_rd_start_dump (FILE *file) 627 { 628 struct df_rd_problem_data *problem_data 629 = (struct df_rd_problem_data *) df_rd->problem_data; 630 unsigned int m = DF_REG_SIZE(df); 631 unsigned int regno; 632 633 if (!df_rd->block_info) 634 return; 635 636 fprintf (file, ";; Reaching defs:\n\n"); 637 638 fprintf (file, " sparse invalidated \t"); 639 dump_bitmap (file, problem_data->sparse_invalidated_by_call); 640 fprintf (file, " dense invalidated \t"); 641 dump_bitmap (file, problem_data->dense_invalidated_by_call); 642 643 for (regno = 0; regno < m; regno++) 644 if (DF_DEFS_COUNT (regno)) 645 fprintf (file, "%d[%d,%d] ", regno, 646 DF_DEFS_BEGIN (regno), 647 DF_DEFS_COUNT (regno)); 648 fprintf (file, "\n"); 649 650 } 651 652 653 /* Debugging info at top of bb. */ 654 655 static void 656 df_rd_top_dump (basic_block bb, FILE *file) 657 { 658 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index); 659 if (!bb_info || !bb_info->in) 660 return; 661 662 fprintf (file, ";; rd in \t(%d)\n", (int) bitmap_count_bits (bb_info->in)); 663 dump_bitmap (file, bb_info->in); 664 fprintf (file, ";; rd gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen)); 665 dump_bitmap (file, bb_info->gen); 666 fprintf (file, ";; rd kill\t(%d)\n", (int) bitmap_count_bits (bb_info->kill)); 667 dump_bitmap (file, bb_info->kill); 668 } 669 670 671 /* Debugging info at top of bb. */ 672 673 static void 674 df_rd_bottom_dump (basic_block bb, FILE *file) 675 { 676 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index); 677 if (!bb_info || !bb_info->out) 678 return; 679 680 fprintf (file, ";; rd out \t(%d)\n", (int) bitmap_count_bits (bb_info->out)); 681 dump_bitmap (file, bb_info->out); 682 } 683 684 /* All of the information associated with every instance of the problem. */ 685 686 static struct df_problem problem_RD = 687 { 688 DF_RD, /* Problem id. */ 689 DF_FORWARD, /* Direction. */ 690 df_rd_alloc, /* Allocate the problem specific data. */ 691 NULL, /* Reset global information. */ 692 df_rd_free_bb_info, /* Free basic block info. */ 693 df_rd_local_compute, /* Local compute function. */ 694 df_rd_init_solution, /* Init the solution specific data. */ 695 df_worklist_dataflow, /* Worklist solver. */ 696 NULL, /* Confluence operator 0. */ 697 df_rd_confluence_n, /* Confluence operator n. */ 698 df_rd_transfer_function, /* Transfer function. */ 699 NULL, /* Finalize function. */ 700 df_rd_free, /* Free all of the problem information. */ 701 df_rd_free, /* Remove this problem from the stack of dataflow problems. */ 702 df_rd_start_dump, /* Debugging. */ 703 df_rd_top_dump, /* Debugging start block. */ 704 df_rd_bottom_dump, /* Debugging end block. */ 705 NULL, /* Incremental solution verify start. */ 706 NULL, /* Incremental solution verify end. */ 707 NULL, /* Dependent problem. */ 708 TV_DF_RD, /* Timing variable. */ 709 true /* Reset blocks on dropping out of blocks_to_analyze. */ 710 }; 711 712 713 714 /* Create a new RD instance and add it to the existing instance 715 of DF. */ 716 717 void 718 df_rd_add_problem (void) 719 { 720 df_add_problem (&problem_RD); 721 } 722 723 724 725 /*---------------------------------------------------------------------------- 726 LIVE REGISTERS 727 728 Find the locations in the function where any use of a pseudo can 729 reach in the backwards direction. In and out bitvectors are built 730 for each basic block. The regno is used to index into these sets. 731 See df.h for details. 732 ----------------------------------------------------------------------------*/ 733 734 /* Private data used to verify the solution for this problem. */ 735 struct df_lr_problem_data 736 { 737 bitmap *in; 738 bitmap *out; 739 }; 740 741 742 /* Set basic block info. */ 743 744 static void 745 df_lr_set_bb_info (unsigned int index, 746 struct df_lr_bb_info *bb_info) 747 { 748 gcc_assert (df_lr); 749 gcc_assert (index < df_lr->block_info_size); 750 df_lr->block_info[index] = bb_info; 751 } 752 753 754 /* Free basic block info. */ 755 756 static void 757 df_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, 758 void *vbb_info) 759 { 760 struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info; 761 if (bb_info) 762 { 763 BITMAP_FREE (bb_info->use); 764 BITMAP_FREE (bb_info->def); 765 BITMAP_FREE (bb_info->in); 766 BITMAP_FREE (bb_info->out); 767 pool_free (df_lr->block_pool, bb_info); 768 } 769 } 770 771 772 /* Allocate or reset bitmaps for DF_LR blocks. The solution bits are 773 not touched unless the block is new. */ 774 775 static void 776 df_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) 777 { 778 unsigned int bb_index; 779 bitmap_iterator bi; 780 781 if (!df_lr->block_pool) 782 df_lr->block_pool = create_alloc_pool ("df_lr_block pool", 783 sizeof (struct df_lr_bb_info), 50); 784 785 df_grow_bb_info (df_lr); 786 787 EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi) 788 { 789 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index); 790 if (bb_info) 791 { 792 bitmap_clear (bb_info->def); 793 bitmap_clear (bb_info->use); 794 } 795 else 796 { 797 bb_info = (struct df_lr_bb_info *) pool_alloc (df_lr->block_pool); 798 df_lr_set_bb_info (bb_index, bb_info); 799 bb_info->use = BITMAP_ALLOC (NULL); 800 bb_info->def = BITMAP_ALLOC (NULL); 801 bb_info->in = BITMAP_ALLOC (NULL); 802 bb_info->out = BITMAP_ALLOC (NULL); 803 } 804 } 805 806 df_lr->optional_p = false; 807 } 808 809 810 /* Reset the global solution for recalculation. */ 811 812 static void 813 df_lr_reset (bitmap all_blocks) 814 { 815 unsigned int bb_index; 816 bitmap_iterator bi; 817 818 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) 819 { 820 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index); 821 gcc_assert (bb_info); 822 bitmap_clear (bb_info->in); 823 bitmap_clear (bb_info->out); 824 } 825 } 826 827 828 /* Compute local live register info for basic block BB. */ 829 830 static void 831 df_lr_bb_local_compute (unsigned int bb_index) 832 { 833 basic_block bb = BASIC_BLOCK (bb_index); 834 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index); 835 rtx insn; 836 df_ref *def_rec; 837 df_ref *use_rec; 838 839 /* Process the registers set in an exception handler. */ 840 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) 841 { 842 df_ref def = *def_rec; 843 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0) 844 { 845 unsigned int dregno = DF_REF_REGNO (def); 846 bitmap_set_bit (bb_info->def, dregno); 847 bitmap_clear_bit (bb_info->use, dregno); 848 } 849 } 850 851 /* Process the hardware registers that are always live. */ 852 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++) 853 { 854 df_ref use = *use_rec; 855 /* Add use to set of uses in this BB. */ 856 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0) 857 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use)); 858 } 859 860 FOR_BB_INSNS_REVERSE (bb, insn) 861 { 862 unsigned int uid = INSN_UID (insn); 863 864 if (!NONDEBUG_INSN_P (insn)) 865 continue; 866 867 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) 868 { 869 df_ref def = *def_rec; 870 /* If the def is to only part of the reg, it does 871 not kill the other defs that reach here. */ 872 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))) 873 { 874 unsigned int dregno = DF_REF_REGNO (def); 875 bitmap_set_bit (bb_info->def, dregno); 876 bitmap_clear_bit (bb_info->use, dregno); 877 } 878 } 879 880 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++) 881 { 882 df_ref use = *use_rec; 883 /* Add use to set of uses in this BB. */ 884 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use)); 885 } 886 } 887 888 /* Process the registers set in an exception handler or the hard 889 frame pointer if this block is the target of a non local 890 goto. */ 891 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) 892 { 893 df_ref def = *def_rec; 894 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP) 895 { 896 unsigned int dregno = DF_REF_REGNO (def); 897 bitmap_set_bit (bb_info->def, dregno); 898 bitmap_clear_bit (bb_info->use, dregno); 899 } 900 } 901 902 #ifdef EH_USES 903 /* Process the uses that are live into an exception handler. */ 904 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++) 905 { 906 df_ref use = *use_rec; 907 /* Add use to set of uses in this BB. */ 908 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP) 909 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use)); 910 } 911 #endif 912 913 /* If the df_live problem is not defined, such as at -O0 and -O1, we 914 still need to keep the luids up to date. This is normally done 915 in the df_live problem since this problem has a forwards 916 scan. */ 917 if (!df_live) 918 df_recompute_luids (bb); 919 } 920 921 922 /* Compute local live register info for each basic block within BLOCKS. */ 923 924 static void 925 df_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED) 926 { 927 unsigned int bb_index; 928 bitmap_iterator bi; 929 930 bitmap_clear (df->hardware_regs_used); 931 932 /* The all-important stack pointer must always be live. */ 933 bitmap_set_bit (df->hardware_regs_used, STACK_POINTER_REGNUM); 934 935 /* Before reload, there are a few registers that must be forced 936 live everywhere -- which might not already be the case for 937 blocks within infinite loops. */ 938 if (!reload_completed) 939 { 940 /* Any reference to any pseudo before reload is a potential 941 reference of the frame pointer. */ 942 bitmap_set_bit (df->hardware_regs_used, FRAME_POINTER_REGNUM); 943 944 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM 945 /* Pseudos with argument area equivalences may require 946 reloading via the argument pointer. */ 947 if (fixed_regs[ARG_POINTER_REGNUM]) 948 bitmap_set_bit (df->hardware_regs_used, ARG_POINTER_REGNUM); 949 #endif 950 951 /* Any constant, or pseudo with constant equivalences, may 952 require reloading from memory using the pic register. */ 953 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM 954 && fixed_regs[PIC_OFFSET_TABLE_REGNUM]) 955 bitmap_set_bit (df->hardware_regs_used, PIC_OFFSET_TABLE_REGNUM); 956 } 957 958 EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi) 959 { 960 if (bb_index == EXIT_BLOCK) 961 { 962 /* The exit block is special for this problem and its bits are 963 computed from thin air. */ 964 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (EXIT_BLOCK); 965 bitmap_copy (bb_info->use, df->exit_block_uses); 966 } 967 else 968 df_lr_bb_local_compute (bb_index); 969 } 970 971 bitmap_clear (df_lr->out_of_date_transfer_functions); 972 } 973 974 975 /* Initialize the solution vectors. */ 976 977 static void 978 df_lr_init (bitmap all_blocks) 979 { 980 unsigned int bb_index; 981 bitmap_iterator bi; 982 983 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) 984 { 985 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index); 986 bitmap_copy (bb_info->in, bb_info->use); 987 bitmap_clear (bb_info->out); 988 } 989 } 990 991 992 /* Confluence function that processes infinite loops. This might be a 993 noreturn function that throws. And even if it isn't, getting the 994 unwind info right helps debugging. */ 995 static void 996 df_lr_confluence_0 (basic_block bb) 997 { 998 bitmap op1 = df_lr_get_bb_info (bb->index)->out; 999 if (bb != EXIT_BLOCK_PTR) 1000 bitmap_copy (op1, df->hardware_regs_used); 1001 } 1002 1003 1004 /* Confluence function that ignores fake edges. */ 1005 1006 static void 1007 df_lr_confluence_n (edge e) 1008 { 1009 bitmap op1 = df_lr_get_bb_info (e->src->index)->out; 1010 bitmap op2 = df_lr_get_bb_info (e->dest->index)->in; 1011 1012 /* Call-clobbered registers die across exception and call edges. */ 1013 /* ??? Abnormal call edges ignored for the moment, as this gets 1014 confused by sibling call edges, which crashes reg-stack. */ 1015 if (e->flags & EDGE_EH) 1016 bitmap_ior_and_compl_into (op1, op2, regs_invalidated_by_call_regset); 1017 else 1018 bitmap_ior_into (op1, op2); 1019 1020 bitmap_ior_into (op1, df->hardware_regs_used); 1021 } 1022 1023 1024 /* Transfer function. */ 1025 1026 static bool 1027 df_lr_transfer_function (int bb_index) 1028 { 1029 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index); 1030 bitmap in = bb_info->in; 1031 bitmap out = bb_info->out; 1032 bitmap use = bb_info->use; 1033 bitmap def = bb_info->def; 1034 1035 return bitmap_ior_and_compl (in, use, out, def); 1036 } 1037 1038 1039 /* Run the fast dce as a side effect of building LR. */ 1040 1041 static void 1042 df_lr_finalize (bitmap all_blocks) 1043 { 1044 df_lr->solutions_dirty = false; 1045 if (df->changeable_flags & DF_LR_RUN_DCE) 1046 { 1047 run_fast_df_dce (); 1048 1049 /* If dce deletes some instructions, we need to recompute the lr 1050 solution before proceeding further. The problem is that fast 1051 dce is a pessimestic dataflow algorithm. In the case where 1052 it deletes a statement S inside of a loop, the uses inside of 1053 S may not be deleted from the dataflow solution because they 1054 were carried around the loop. While it is conservatively 1055 correct to leave these extra bits, the standards of df 1056 require that we maintain the best possible (least fixed 1057 point) solution. The only way to do that is to redo the 1058 iteration from the beginning. See PR35805 for an 1059 example. */ 1060 if (df_lr->solutions_dirty) 1061 { 1062 df_clear_flags (DF_LR_RUN_DCE); 1063 df_lr_alloc (all_blocks); 1064 df_lr_local_compute (all_blocks); 1065 df_worklist_dataflow (df_lr, all_blocks, df->postorder, df->n_blocks); 1066 df_lr_finalize (all_blocks); 1067 df_set_flags (DF_LR_RUN_DCE); 1068 } 1069 } 1070 } 1071 1072 1073 /* Free all storage associated with the problem. */ 1074 1075 static void 1076 df_lr_free (void) 1077 { 1078 if (df_lr->block_info) 1079 { 1080 unsigned int i; 1081 for (i = 0; i < df_lr->block_info_size; i++) 1082 { 1083 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (i); 1084 if (bb_info) 1085 { 1086 BITMAP_FREE (bb_info->use); 1087 BITMAP_FREE (bb_info->def); 1088 BITMAP_FREE (bb_info->in); 1089 BITMAP_FREE (bb_info->out); 1090 } 1091 } 1092 free_alloc_pool (df_lr->block_pool); 1093 1094 df_lr->block_info_size = 0; 1095 free (df_lr->block_info); 1096 } 1097 1098 BITMAP_FREE (df_lr->out_of_date_transfer_functions); 1099 free (df_lr); 1100 } 1101 1102 1103 /* Debugging info at top of bb. */ 1104 1105 static void 1106 df_lr_top_dump (basic_block bb, FILE *file) 1107 { 1108 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index); 1109 struct df_lr_problem_data *problem_data; 1110 if (!bb_info || !bb_info->in) 1111 return; 1112 1113 fprintf (file, ";; lr in \t"); 1114 df_print_regset (file, bb_info->in); 1115 if (df_lr->problem_data) 1116 { 1117 problem_data = (struct df_lr_problem_data *)df_lr->problem_data; 1118 fprintf (file, ";; old in \t"); 1119 df_print_regset (file, problem_data->in[bb->index]); 1120 } 1121 fprintf (file, ";; lr use \t"); 1122 df_print_regset (file, bb_info->use); 1123 fprintf (file, ";; lr def \t"); 1124 df_print_regset (file, bb_info->def); 1125 } 1126 1127 1128 /* Debugging info at bottom of bb. */ 1129 1130 static void 1131 df_lr_bottom_dump (basic_block bb, FILE *file) 1132 { 1133 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index); 1134 struct df_lr_problem_data *problem_data; 1135 if (!bb_info || !bb_info->out) 1136 return; 1137 1138 fprintf (file, ";; lr out \t"); 1139 df_print_regset (file, bb_info->out); 1140 if (df_lr->problem_data) 1141 { 1142 problem_data = (struct df_lr_problem_data *)df_lr->problem_data; 1143 fprintf (file, ";; old out \t"); 1144 df_print_regset (file, problem_data->out[bb->index]); 1145 } 1146 } 1147 1148 1149 /* Build the datastructure to verify that the solution to the dataflow 1150 equations is not dirty. */ 1151 1152 static void 1153 df_lr_verify_solution_start (void) 1154 { 1155 basic_block bb; 1156 struct df_lr_problem_data *problem_data; 1157 if (df_lr->solutions_dirty) 1158 { 1159 df_lr->problem_data = NULL; 1160 return; 1161 } 1162 1163 /* Set it true so that the solution is recomputed. */ 1164 df_lr->solutions_dirty = true; 1165 1166 problem_data = XNEW (struct df_lr_problem_data); 1167 df_lr->problem_data = problem_data; 1168 problem_data->in = XNEWVEC (bitmap, last_basic_block); 1169 problem_data->out = XNEWVEC (bitmap, last_basic_block); 1170 1171 FOR_ALL_BB (bb) 1172 { 1173 problem_data->in[bb->index] = BITMAP_ALLOC (NULL); 1174 problem_data->out[bb->index] = BITMAP_ALLOC (NULL); 1175 bitmap_copy (problem_data->in[bb->index], DF_LR_IN (bb)); 1176 bitmap_copy (problem_data->out[bb->index], DF_LR_OUT (bb)); 1177 } 1178 } 1179 1180 1181 /* Compare the saved datastructure and the new solution to the dataflow 1182 equations. */ 1183 1184 static void 1185 df_lr_verify_solution_end (void) 1186 { 1187 struct df_lr_problem_data *problem_data; 1188 basic_block bb; 1189 1190 if (df_lr->problem_data == NULL) 1191 return; 1192 1193 problem_data = (struct df_lr_problem_data *)df_lr->problem_data; 1194 1195 if (df_lr->solutions_dirty) 1196 /* Do not check if the solution is still dirty. See the comment 1197 in df_lr_finalize for details. */ 1198 df_lr->solutions_dirty = false; 1199 else 1200 FOR_ALL_BB (bb) 1201 { 1202 if ((!bitmap_equal_p (problem_data->in[bb->index], DF_LR_IN (bb))) 1203 || (!bitmap_equal_p (problem_data->out[bb->index], DF_LR_OUT (bb)))) 1204 { 1205 /*df_dump (stderr);*/ 1206 gcc_unreachable (); 1207 } 1208 } 1209 1210 /* Cannot delete them immediately because you may want to dump them 1211 if the comparison fails. */ 1212 FOR_ALL_BB (bb) 1213 { 1214 BITMAP_FREE (problem_data->in[bb->index]); 1215 BITMAP_FREE (problem_data->out[bb->index]); 1216 } 1217 1218 free (problem_data->in); 1219 free (problem_data->out); 1220 free (problem_data); 1221 df_lr->problem_data = NULL; 1222 } 1223 1224 1225 /* All of the information associated with every instance of the problem. */ 1226 1227 static struct df_problem problem_LR = 1228 { 1229 DF_LR, /* Problem id. */ 1230 DF_BACKWARD, /* Direction. */ 1231 df_lr_alloc, /* Allocate the problem specific data. */ 1232 df_lr_reset, /* Reset global information. */ 1233 df_lr_free_bb_info, /* Free basic block info. */ 1234 df_lr_local_compute, /* Local compute function. */ 1235 df_lr_init, /* Init the solution specific data. */ 1236 df_worklist_dataflow, /* Worklist solver. */ 1237 df_lr_confluence_0, /* Confluence operator 0. */ 1238 df_lr_confluence_n, /* Confluence operator n. */ 1239 df_lr_transfer_function, /* Transfer function. */ 1240 df_lr_finalize, /* Finalize function. */ 1241 df_lr_free, /* Free all of the problem information. */ 1242 NULL, /* Remove this problem from the stack of dataflow problems. */ 1243 NULL, /* Debugging. */ 1244 df_lr_top_dump, /* Debugging start block. */ 1245 df_lr_bottom_dump, /* Debugging end block. */ 1246 df_lr_verify_solution_start,/* Incremental solution verify start. */ 1247 df_lr_verify_solution_end, /* Incremental solution verify end. */ 1248 NULL, /* Dependent problem. */ 1249 TV_DF_LR, /* Timing variable. */ 1250 false /* Reset blocks on dropping out of blocks_to_analyze. */ 1251 }; 1252 1253 1254 /* Create a new DATAFLOW instance and add it to an existing instance 1255 of DF. The returned structure is what is used to get at the 1256 solution. */ 1257 1258 void 1259 df_lr_add_problem (void) 1260 { 1261 df_add_problem (&problem_LR); 1262 /* These will be initialized when df_scan_blocks processes each 1263 block. */ 1264 df_lr->out_of_date_transfer_functions = BITMAP_ALLOC (NULL); 1265 } 1266 1267 1268 /* Verify that all of the lr related info is consistent and 1269 correct. */ 1270 1271 void 1272 df_lr_verify_transfer_functions (void) 1273 { 1274 basic_block bb; 1275 bitmap saved_def; 1276 bitmap saved_use; 1277 bitmap saved_adef; 1278 bitmap saved_ause; 1279 bitmap all_blocks; 1280 1281 if (!df) 1282 return; 1283 1284 saved_def = BITMAP_ALLOC (NULL); 1285 saved_use = BITMAP_ALLOC (NULL); 1286 saved_adef = BITMAP_ALLOC (NULL); 1287 saved_ause = BITMAP_ALLOC (NULL); 1288 all_blocks = BITMAP_ALLOC (NULL); 1289 1290 FOR_ALL_BB (bb) 1291 { 1292 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index); 1293 bitmap_set_bit (all_blocks, bb->index); 1294 1295 if (bb_info) 1296 { 1297 /* Make a copy of the transfer functions and then compute 1298 new ones to see if the transfer functions have 1299 changed. */ 1300 if (!bitmap_bit_p (df_lr->out_of_date_transfer_functions, 1301 bb->index)) 1302 { 1303 bitmap_copy (saved_def, bb_info->def); 1304 bitmap_copy (saved_use, bb_info->use); 1305 bitmap_clear (bb_info->def); 1306 bitmap_clear (bb_info->use); 1307 1308 df_lr_bb_local_compute (bb->index); 1309 gcc_assert (bitmap_equal_p (saved_def, bb_info->def)); 1310 gcc_assert (bitmap_equal_p (saved_use, bb_info->use)); 1311 } 1312 } 1313 else 1314 { 1315 /* If we do not have basic block info, the block must be in 1316 the list of dirty blocks or else some one has added a 1317 block behind our backs. */ 1318 gcc_assert (bitmap_bit_p (df_lr->out_of_date_transfer_functions, 1319 bb->index)); 1320 } 1321 /* Make sure no one created a block without following 1322 procedures. */ 1323 gcc_assert (df_scan_get_bb_info (bb->index)); 1324 } 1325 1326 /* Make sure there are no dirty bits in blocks that have been deleted. */ 1327 gcc_assert (!bitmap_intersect_compl_p (df_lr->out_of_date_transfer_functions, 1328 all_blocks)); 1329 1330 BITMAP_FREE (saved_def); 1331 BITMAP_FREE (saved_use); 1332 BITMAP_FREE (saved_adef); 1333 BITMAP_FREE (saved_ause); 1334 BITMAP_FREE (all_blocks); 1335 } 1336 1337 1338 1339 /*---------------------------------------------------------------------------- 1340 LIVE AND MUST-INITIALIZED REGISTERS. 1341 1342 This problem first computes the IN and OUT bitvectors for the 1343 must-initialized registers problems, which is a forward problem. 1344 It gives the set of registers for which we MUST have an available 1345 definition on any path from the entry block to the entry/exit of 1346 a basic block. Sets generate a definition, while clobbers kill 1347 a definition. 1348 1349 In and out bitvectors are built for each basic block and are indexed by 1350 regnum (see df.h for details). In and out bitvectors in struct 1351 df_live_bb_info actually refers to the must-initialized problem; 1352 1353 Then, the in and out sets for the LIVE problem itself are computed. 1354 These are the logical AND of the IN and OUT sets from the LR problem 1355 and the must-initialized problem. 1356 ----------------------------------------------------------------------------*/ 1357 1358 /* Private data used to verify the solution for this problem. */ 1359 struct df_live_problem_data 1360 { 1361 bitmap *in; 1362 bitmap *out; 1363 }; 1364 1365 /* Scratch var used by transfer functions. This is used to implement 1366 an optimization to reduce the amount of space used to compute the 1367 combined lr and live analysis. */ 1368 static bitmap df_live_scratch; 1369 1370 /* Set basic block info. */ 1371 1372 static void 1373 df_live_set_bb_info (unsigned int index, 1374 struct df_live_bb_info *bb_info) 1375 { 1376 gcc_assert (df_live); 1377 gcc_assert (index < df_live->block_info_size); 1378 df_live->block_info[index] = bb_info; 1379 } 1380 1381 1382 /* Free basic block info. */ 1383 1384 static void 1385 df_live_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, 1386 void *vbb_info) 1387 { 1388 struct df_live_bb_info *bb_info = (struct df_live_bb_info *) vbb_info; 1389 if (bb_info) 1390 { 1391 BITMAP_FREE (bb_info->gen); 1392 BITMAP_FREE (bb_info->kill); 1393 BITMAP_FREE (bb_info->in); 1394 BITMAP_FREE (bb_info->out); 1395 pool_free (df_live->block_pool, bb_info); 1396 } 1397 } 1398 1399 1400 /* Allocate or reset bitmaps for DF_LIVE blocks. The solution bits are 1401 not touched unless the block is new. */ 1402 1403 static void 1404 df_live_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) 1405 { 1406 unsigned int bb_index; 1407 bitmap_iterator bi; 1408 1409 if (!df_live->block_pool) 1410 df_live->block_pool = create_alloc_pool ("df_live_block pool", 1411 sizeof (struct df_live_bb_info), 100); 1412 if (!df_live_scratch) 1413 df_live_scratch = BITMAP_ALLOC (NULL); 1414 1415 df_grow_bb_info (df_live); 1416 1417 EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions, 0, bb_index, bi) 1418 { 1419 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index); 1420 if (bb_info) 1421 { 1422 bitmap_clear (bb_info->kill); 1423 bitmap_clear (bb_info->gen); 1424 } 1425 else 1426 { 1427 bb_info = (struct df_live_bb_info *) pool_alloc (df_live->block_pool); 1428 df_live_set_bb_info (bb_index, bb_info); 1429 bb_info->kill = BITMAP_ALLOC (NULL); 1430 bb_info->gen = BITMAP_ALLOC (NULL); 1431 bb_info->in = BITMAP_ALLOC (NULL); 1432 bb_info->out = BITMAP_ALLOC (NULL); 1433 } 1434 } 1435 df_live->optional_p = (optimize <= 1); 1436 } 1437 1438 1439 /* Reset the global solution for recalculation. */ 1440 1441 static void 1442 df_live_reset (bitmap all_blocks) 1443 { 1444 unsigned int bb_index; 1445 bitmap_iterator bi; 1446 1447 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) 1448 { 1449 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index); 1450 gcc_assert (bb_info); 1451 bitmap_clear (bb_info->in); 1452 bitmap_clear (bb_info->out); 1453 } 1454 } 1455 1456 1457 /* Compute local uninitialized register info for basic block BB. */ 1458 1459 static void 1460 df_live_bb_local_compute (unsigned int bb_index) 1461 { 1462 basic_block bb = BASIC_BLOCK (bb_index); 1463 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index); 1464 rtx insn; 1465 df_ref *def_rec; 1466 int luid = 0; 1467 1468 FOR_BB_INSNS (bb, insn) 1469 { 1470 unsigned int uid = INSN_UID (insn); 1471 struct df_insn_info *insn_info = DF_INSN_UID_GET (uid); 1472 1473 /* Inserting labels does not always trigger the incremental 1474 rescanning. */ 1475 if (!insn_info) 1476 { 1477 gcc_assert (!INSN_P (insn)); 1478 insn_info = df_insn_create_insn_record (insn); 1479 } 1480 1481 DF_INSN_INFO_LUID (insn_info) = luid; 1482 if (!INSN_P (insn)) 1483 continue; 1484 1485 luid++; 1486 for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++) 1487 { 1488 df_ref def = *def_rec; 1489 unsigned int regno = DF_REF_REGNO (def); 1490 1491 if (DF_REF_FLAGS_IS_SET (def, 1492 DF_REF_PARTIAL | DF_REF_CONDITIONAL)) 1493 /* All partial or conditional def 1494 seen are included in the gen set. */ 1495 bitmap_set_bit (bb_info->gen, regno); 1496 else if (DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER)) 1497 /* Only must clobbers for the entire reg destroy the 1498 value. */ 1499 bitmap_set_bit (bb_info->kill, regno); 1500 else if (! DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER)) 1501 bitmap_set_bit (bb_info->gen, regno); 1502 } 1503 } 1504 1505 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) 1506 { 1507 df_ref def = *def_rec; 1508 bitmap_set_bit (bb_info->gen, DF_REF_REGNO (def)); 1509 } 1510 } 1511 1512 1513 /* Compute local uninitialized register info. */ 1514 1515 static void 1516 df_live_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED) 1517 { 1518 unsigned int bb_index; 1519 bitmap_iterator bi; 1520 1521 df_grow_insn_info (); 1522 1523 EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions, 1524 0, bb_index, bi) 1525 { 1526 df_live_bb_local_compute (bb_index); 1527 } 1528 1529 bitmap_clear (df_live->out_of_date_transfer_functions); 1530 } 1531 1532 1533 /* Initialize the solution vectors. */ 1534 1535 static void 1536 df_live_init (bitmap all_blocks) 1537 { 1538 unsigned int bb_index; 1539 bitmap_iterator bi; 1540 1541 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) 1542 { 1543 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index); 1544 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index); 1545 1546 /* No register may reach a location where it is not used. Thus 1547 we trim the rr result to the places where it is used. */ 1548 bitmap_and (bb_info->out, bb_info->gen, bb_lr_info->out); 1549 bitmap_clear (bb_info->in); 1550 } 1551 } 1552 1553 /* Forward confluence function that ignores fake edges. */ 1554 1555 static void 1556 df_live_confluence_n (edge e) 1557 { 1558 bitmap op1 = df_live_get_bb_info (e->dest->index)->in; 1559 bitmap op2 = df_live_get_bb_info (e->src->index)->out; 1560 1561 if (e->flags & EDGE_FAKE) 1562 return; 1563 1564 bitmap_ior_into (op1, op2); 1565 } 1566 1567 1568 /* Transfer function for the forwards must-initialized problem. */ 1569 1570 static bool 1571 df_live_transfer_function (int bb_index) 1572 { 1573 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index); 1574 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index); 1575 bitmap in = bb_info->in; 1576 bitmap out = bb_info->out; 1577 bitmap gen = bb_info->gen; 1578 bitmap kill = bb_info->kill; 1579 1580 /* We need to use a scratch set here so that the value returned from this 1581 function invocation properly reflects whether the sets changed in a 1582 significant way; i.e. not just because the lr set was anded in. */ 1583 bitmap_and (df_live_scratch, gen, bb_lr_info->out); 1584 /* No register may reach a location where it is not used. Thus 1585 we trim the rr result to the places where it is used. */ 1586 bitmap_and_into (in, bb_lr_info->in); 1587 1588 return bitmap_ior_and_compl (out, df_live_scratch, in, kill); 1589 } 1590 1591 1592 /* And the LR info with the must-initialized registers, to produce the LIVE info. */ 1593 1594 static void 1595 df_live_finalize (bitmap all_blocks) 1596 { 1597 1598 if (df_live->solutions_dirty) 1599 { 1600 bitmap_iterator bi; 1601 unsigned int bb_index; 1602 1603 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) 1604 { 1605 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index); 1606 struct df_live_bb_info *bb_live_info = df_live_get_bb_info (bb_index); 1607 1608 /* No register may reach a location where it is not used. Thus 1609 we trim the rr result to the places where it is used. */ 1610 bitmap_and_into (bb_live_info->in, bb_lr_info->in); 1611 bitmap_and_into (bb_live_info->out, bb_lr_info->out); 1612 } 1613 1614 df_live->solutions_dirty = false; 1615 } 1616 } 1617 1618 1619 /* Free all storage associated with the problem. */ 1620 1621 static void 1622 df_live_free (void) 1623 { 1624 if (df_live->block_info) 1625 { 1626 unsigned int i; 1627 1628 for (i = 0; i < df_live->block_info_size; i++) 1629 { 1630 struct df_live_bb_info *bb_info = df_live_get_bb_info (i); 1631 if (bb_info) 1632 { 1633 BITMAP_FREE (bb_info->gen); 1634 BITMAP_FREE (bb_info->kill); 1635 BITMAP_FREE (bb_info->in); 1636 BITMAP_FREE (bb_info->out); 1637 } 1638 } 1639 1640 free_alloc_pool (df_live->block_pool); 1641 df_live->block_info_size = 0; 1642 free (df_live->block_info); 1643 1644 if (df_live_scratch) 1645 BITMAP_FREE (df_live_scratch); 1646 } 1647 BITMAP_FREE (df_live->out_of_date_transfer_functions); 1648 free (df_live); 1649 } 1650 1651 1652 /* Debugging info at top of bb. */ 1653 1654 static void 1655 df_live_top_dump (basic_block bb, FILE *file) 1656 { 1657 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index); 1658 struct df_live_problem_data *problem_data; 1659 1660 if (!bb_info || !bb_info->in) 1661 return; 1662 1663 fprintf (file, ";; live in \t"); 1664 df_print_regset (file, bb_info->in); 1665 if (df_live->problem_data) 1666 { 1667 problem_data = (struct df_live_problem_data *)df_live->problem_data; 1668 fprintf (file, ";; old in \t"); 1669 df_print_regset (file, problem_data->in[bb->index]); 1670 } 1671 fprintf (file, ";; live gen \t"); 1672 df_print_regset (file, bb_info->gen); 1673 fprintf (file, ";; live kill\t"); 1674 df_print_regset (file, bb_info->kill); 1675 } 1676 1677 1678 /* Debugging info at bottom of bb. */ 1679 1680 static void 1681 df_live_bottom_dump (basic_block bb, FILE *file) 1682 { 1683 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index); 1684 struct df_live_problem_data *problem_data; 1685 1686 if (!bb_info || !bb_info->out) 1687 return; 1688 1689 fprintf (file, ";; live out \t"); 1690 df_print_regset (file, bb_info->out); 1691 if (df_live->problem_data) 1692 { 1693 problem_data = (struct df_live_problem_data *)df_live->problem_data; 1694 fprintf (file, ";; old out \t"); 1695 df_print_regset (file, problem_data->out[bb->index]); 1696 } 1697 } 1698 1699 1700 /* Build the datastructure to verify that the solution to the dataflow 1701 equations is not dirty. */ 1702 1703 static void 1704 df_live_verify_solution_start (void) 1705 { 1706 basic_block bb; 1707 struct df_live_problem_data *problem_data; 1708 if (df_live->solutions_dirty) 1709 { 1710 df_live->problem_data = NULL; 1711 return; 1712 } 1713 1714 /* Set it true so that the solution is recomputed. */ 1715 df_live->solutions_dirty = true; 1716 1717 problem_data = XNEW (struct df_live_problem_data); 1718 df_live->problem_data = problem_data; 1719 problem_data->in = XNEWVEC (bitmap, last_basic_block); 1720 problem_data->out = XNEWVEC (bitmap, last_basic_block); 1721 1722 FOR_ALL_BB (bb) 1723 { 1724 problem_data->in[bb->index] = BITMAP_ALLOC (NULL); 1725 problem_data->out[bb->index] = BITMAP_ALLOC (NULL); 1726 bitmap_copy (problem_data->in[bb->index], DF_LIVE_IN (bb)); 1727 bitmap_copy (problem_data->out[bb->index], DF_LIVE_OUT (bb)); 1728 } 1729 } 1730 1731 1732 /* Compare the saved datastructure and the new solution to the dataflow 1733 equations. */ 1734 1735 static void 1736 df_live_verify_solution_end (void) 1737 { 1738 struct df_live_problem_data *problem_data; 1739 basic_block bb; 1740 1741 if (df_live->problem_data == NULL) 1742 return; 1743 1744 problem_data = (struct df_live_problem_data *)df_live->problem_data; 1745 1746 FOR_ALL_BB (bb) 1747 { 1748 if ((!bitmap_equal_p (problem_data->in[bb->index], DF_LIVE_IN (bb))) 1749 || (!bitmap_equal_p (problem_data->out[bb->index], DF_LIVE_OUT (bb)))) 1750 { 1751 /*df_dump (stderr);*/ 1752 gcc_unreachable (); 1753 } 1754 } 1755 1756 /* Cannot delete them immediately because you may want to dump them 1757 if the comparison fails. */ 1758 FOR_ALL_BB (bb) 1759 { 1760 BITMAP_FREE (problem_data->in[bb->index]); 1761 BITMAP_FREE (problem_data->out[bb->index]); 1762 } 1763 1764 free (problem_data->in); 1765 free (problem_data->out); 1766 free (problem_data); 1767 df_live->problem_data = NULL; 1768 } 1769 1770 1771 /* All of the information associated with every instance of the problem. */ 1772 1773 static struct df_problem problem_LIVE = 1774 { 1775 DF_LIVE, /* Problem id. */ 1776 DF_FORWARD, /* Direction. */ 1777 df_live_alloc, /* Allocate the problem specific data. */ 1778 df_live_reset, /* Reset global information. */ 1779 df_live_free_bb_info, /* Free basic block info. */ 1780 df_live_local_compute, /* Local compute function. */ 1781 df_live_init, /* Init the solution specific data. */ 1782 df_worklist_dataflow, /* Worklist solver. */ 1783 NULL, /* Confluence operator 0. */ 1784 df_live_confluence_n, /* Confluence operator n. */ 1785 df_live_transfer_function, /* Transfer function. */ 1786 df_live_finalize, /* Finalize function. */ 1787 df_live_free, /* Free all of the problem information. */ 1788 df_live_free, /* Remove this problem from the stack of dataflow problems. */ 1789 NULL, /* Debugging. */ 1790 df_live_top_dump, /* Debugging start block. */ 1791 df_live_bottom_dump, /* Debugging end block. */ 1792 df_live_verify_solution_start,/* Incremental solution verify start. */ 1793 df_live_verify_solution_end, /* Incremental solution verify end. */ 1794 &problem_LR, /* Dependent problem. */ 1795 TV_DF_LIVE, /* Timing variable. */ 1796 false /* Reset blocks on dropping out of blocks_to_analyze. */ 1797 }; 1798 1799 1800 /* Create a new DATAFLOW instance and add it to an existing instance 1801 of DF. The returned structure is what is used to get at the 1802 solution. */ 1803 1804 void 1805 df_live_add_problem (void) 1806 { 1807 df_add_problem (&problem_LIVE); 1808 /* These will be initialized when df_scan_blocks processes each 1809 block. */ 1810 df_live->out_of_date_transfer_functions = BITMAP_ALLOC (NULL); 1811 } 1812 1813 1814 /* Set all of the blocks as dirty. This needs to be done if this 1815 problem is added after all of the insns have been scanned. */ 1816 1817 void 1818 df_live_set_all_dirty (void) 1819 { 1820 basic_block bb; 1821 FOR_ALL_BB (bb) 1822 bitmap_set_bit (df_live->out_of_date_transfer_functions, 1823 bb->index); 1824 } 1825 1826 1827 /* Verify that all of the lr related info is consistent and 1828 correct. */ 1829 1830 void 1831 df_live_verify_transfer_functions (void) 1832 { 1833 basic_block bb; 1834 bitmap saved_gen; 1835 bitmap saved_kill; 1836 bitmap all_blocks; 1837 1838 if (!df) 1839 return; 1840 1841 saved_gen = BITMAP_ALLOC (NULL); 1842 saved_kill = BITMAP_ALLOC (NULL); 1843 all_blocks = BITMAP_ALLOC (NULL); 1844 1845 df_grow_insn_info (); 1846 1847 FOR_ALL_BB (bb) 1848 { 1849 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index); 1850 bitmap_set_bit (all_blocks, bb->index); 1851 1852 if (bb_info) 1853 { 1854 /* Make a copy of the transfer functions and then compute 1855 new ones to see if the transfer functions have 1856 changed. */ 1857 if (!bitmap_bit_p (df_live->out_of_date_transfer_functions, 1858 bb->index)) 1859 { 1860 bitmap_copy (saved_gen, bb_info->gen); 1861 bitmap_copy (saved_kill, bb_info->kill); 1862 bitmap_clear (bb_info->gen); 1863 bitmap_clear (bb_info->kill); 1864 1865 df_live_bb_local_compute (bb->index); 1866 gcc_assert (bitmap_equal_p (saved_gen, bb_info->gen)); 1867 gcc_assert (bitmap_equal_p (saved_kill, bb_info->kill)); 1868 } 1869 } 1870 else 1871 { 1872 /* If we do not have basic block info, the block must be in 1873 the list of dirty blocks or else some one has added a 1874 block behind our backs. */ 1875 gcc_assert (bitmap_bit_p (df_live->out_of_date_transfer_functions, 1876 bb->index)); 1877 } 1878 /* Make sure no one created a block without following 1879 procedures. */ 1880 gcc_assert (df_scan_get_bb_info (bb->index)); 1881 } 1882 1883 /* Make sure there are no dirty bits in blocks that have been deleted. */ 1884 gcc_assert (!bitmap_intersect_compl_p (df_live->out_of_date_transfer_functions, 1885 all_blocks)); 1886 BITMAP_FREE (saved_gen); 1887 BITMAP_FREE (saved_kill); 1888 BITMAP_FREE (all_blocks); 1889 } 1890 1891 /*---------------------------------------------------------------------------- 1892 CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS 1893 1894 Link either the defs to the uses and / or the uses to the defs. 1895 1896 These problems are set up like the other dataflow problems so that 1897 they nicely fit into the framework. They are much simpler and only 1898 involve a single traversal of instructions and an examination of 1899 the reaching defs information (the dependent problem). 1900 ----------------------------------------------------------------------------*/ 1901 1902 #define df_chain_problem_p(FLAG) (((enum df_chain_flags)df_chain->local_flags)&(FLAG)) 1903 1904 /* Create a du or ud chain from SRC to DST and link it into SRC. */ 1905 1906 struct df_link * 1907 df_chain_create (df_ref src, df_ref dst) 1908 { 1909 struct df_link *head = DF_REF_CHAIN (src); 1910 struct df_link *link = (struct df_link *) pool_alloc (df_chain->block_pool); 1911 1912 DF_REF_CHAIN (src) = link; 1913 link->next = head; 1914 link->ref = dst; 1915 return link; 1916 } 1917 1918 1919 /* Delete any du or ud chains that start at REF and point to 1920 TARGET. */ 1921 static void 1922 df_chain_unlink_1 (df_ref ref, df_ref target) 1923 { 1924 struct df_link *chain = DF_REF_CHAIN (ref); 1925 struct df_link *prev = NULL; 1926 1927 while (chain) 1928 { 1929 if (chain->ref == target) 1930 { 1931 if (prev) 1932 prev->next = chain->next; 1933 else 1934 DF_REF_CHAIN (ref) = chain->next; 1935 pool_free (df_chain->block_pool, chain); 1936 return; 1937 } 1938 prev = chain; 1939 chain = chain->next; 1940 } 1941 } 1942 1943 1944 /* Delete a du or ud chain that leave or point to REF. */ 1945 1946 void 1947 df_chain_unlink (df_ref ref) 1948 { 1949 struct df_link *chain = DF_REF_CHAIN (ref); 1950 while (chain) 1951 { 1952 struct df_link *next = chain->next; 1953 /* Delete the other side if it exists. */ 1954 df_chain_unlink_1 (chain->ref, ref); 1955 pool_free (df_chain->block_pool, chain); 1956 chain = next; 1957 } 1958 DF_REF_CHAIN (ref) = NULL; 1959 } 1960 1961 1962 /* Copy the du or ud chain starting at FROM_REF and attach it to 1963 TO_REF. */ 1964 1965 void 1966 df_chain_copy (df_ref to_ref, 1967 struct df_link *from_ref) 1968 { 1969 while (from_ref) 1970 { 1971 df_chain_create (to_ref, from_ref->ref); 1972 from_ref = from_ref->next; 1973 } 1974 } 1975 1976 1977 /* Remove this problem from the stack of dataflow problems. */ 1978 1979 static void 1980 df_chain_remove_problem (void) 1981 { 1982 bitmap_iterator bi; 1983 unsigned int bb_index; 1984 1985 /* Wholesale destruction of the old chains. */ 1986 if (df_chain->block_pool) 1987 free_alloc_pool (df_chain->block_pool); 1988 1989 EXECUTE_IF_SET_IN_BITMAP (df_chain->out_of_date_transfer_functions, 0, bb_index, bi) 1990 { 1991 rtx insn; 1992 df_ref *def_rec; 1993 df_ref *use_rec; 1994 basic_block bb = BASIC_BLOCK (bb_index); 1995 1996 if (df_chain_problem_p (DF_DU_CHAIN)) 1997 for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++) 1998 DF_REF_CHAIN (*def_rec) = NULL; 1999 if (df_chain_problem_p (DF_UD_CHAIN)) 2000 for (use_rec = df_get_artificial_uses (bb->index); *use_rec; use_rec++) 2001 DF_REF_CHAIN (*use_rec) = NULL; 2002 2003 FOR_BB_INSNS (bb, insn) 2004 { 2005 unsigned int uid = INSN_UID (insn); 2006 2007 if (INSN_P (insn)) 2008 { 2009 if (df_chain_problem_p (DF_DU_CHAIN)) 2010 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) 2011 DF_REF_CHAIN (*def_rec) = NULL; 2012 if (df_chain_problem_p (DF_UD_CHAIN)) 2013 { 2014 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++) 2015 DF_REF_CHAIN (*use_rec) = NULL; 2016 for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++) 2017 DF_REF_CHAIN (*use_rec) = NULL; 2018 } 2019 } 2020 } 2021 } 2022 2023 bitmap_clear (df_chain->out_of_date_transfer_functions); 2024 df_chain->block_pool = NULL; 2025 } 2026 2027 2028 /* Remove the chain problem completely. */ 2029 2030 static void 2031 df_chain_fully_remove_problem (void) 2032 { 2033 df_chain_remove_problem (); 2034 BITMAP_FREE (df_chain->out_of_date_transfer_functions); 2035 free (df_chain); 2036 } 2037 2038 2039 /* Create def-use or use-def chains. */ 2040 2041 static void 2042 df_chain_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) 2043 { 2044 df_chain_remove_problem (); 2045 df_chain->block_pool = create_alloc_pool ("df_chain_block pool", 2046 sizeof (struct df_link), 50); 2047 df_chain->optional_p = true; 2048 } 2049 2050 2051 /* Reset all of the chains when the set of basic blocks changes. */ 2052 2053 static void 2054 df_chain_reset (bitmap blocks_to_clear ATTRIBUTE_UNUSED) 2055 { 2056 df_chain_remove_problem (); 2057 } 2058 2059 2060 /* Create the chains for a list of USEs. */ 2061 2062 static void 2063 df_chain_create_bb_process_use (bitmap local_rd, 2064 df_ref *use_rec, 2065 int top_flag) 2066 { 2067 bitmap_iterator bi; 2068 unsigned int def_index; 2069 2070 while (*use_rec) 2071 { 2072 df_ref use = *use_rec; 2073 unsigned int uregno = DF_REF_REGNO (use); 2074 if ((!(df->changeable_flags & DF_NO_HARD_REGS)) 2075 || (uregno >= FIRST_PSEUDO_REGISTER)) 2076 { 2077 /* Do not want to go through this for an uninitialized var. */ 2078 int count = DF_DEFS_COUNT (uregno); 2079 if (count) 2080 { 2081 if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP)) 2082 { 2083 unsigned int first_index = DF_DEFS_BEGIN (uregno); 2084 unsigned int last_index = first_index + count - 1; 2085 2086 EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi) 2087 { 2088 df_ref def; 2089 if (def_index > last_index) 2090 break; 2091 2092 def = DF_DEFS_GET (def_index); 2093 if (df_chain_problem_p (DF_DU_CHAIN)) 2094 df_chain_create (def, use); 2095 if (df_chain_problem_p (DF_UD_CHAIN)) 2096 df_chain_create (use, def); 2097 } 2098 } 2099 } 2100 } 2101 2102 use_rec++; 2103 } 2104 } 2105 2106 2107 /* Create chains from reaching defs bitmaps for basic block BB. */ 2108 2109 static void 2110 df_chain_create_bb (unsigned int bb_index) 2111 { 2112 basic_block bb = BASIC_BLOCK (bb_index); 2113 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index); 2114 rtx insn; 2115 bitmap cpy = BITMAP_ALLOC (NULL); 2116 2117 bitmap_copy (cpy, bb_info->in); 2118 bitmap_set_bit (df_chain->out_of_date_transfer_functions, bb_index); 2119 2120 /* Since we are going forwards, process the artificial uses first 2121 then the artificial defs second. */ 2122 2123 #ifdef EH_USES 2124 /* Create the chains for the artificial uses from the EH_USES at the 2125 beginning of the block. */ 2126 2127 /* Artificials are only hard regs. */ 2128 if (!(df->changeable_flags & DF_NO_HARD_REGS)) 2129 df_chain_create_bb_process_use (cpy, 2130 df_get_artificial_uses (bb->index), 2131 DF_REF_AT_TOP); 2132 #endif 2133 2134 df_rd_simulate_artificial_defs_at_top (bb, cpy); 2135 2136 /* Process the regular instructions next. */ 2137 FOR_BB_INSNS (bb, insn) 2138 if (INSN_P (insn)) 2139 { 2140 unsigned int uid = INSN_UID (insn); 2141 2142 /* First scan the uses and link them up with the defs that remain 2143 in the cpy vector. */ 2144 df_chain_create_bb_process_use (cpy, DF_INSN_UID_USES (uid), 0); 2145 if (df->changeable_flags & DF_EQ_NOTES) 2146 df_chain_create_bb_process_use (cpy, DF_INSN_UID_EQ_USES (uid), 0); 2147 2148 /* Since we are going forwards, process the defs second. */ 2149 df_rd_simulate_one_insn (bb, insn, cpy); 2150 } 2151 2152 /* Create the chains for the artificial uses of the hard registers 2153 at the end of the block. */ 2154 if (!(df->changeable_flags & DF_NO_HARD_REGS)) 2155 df_chain_create_bb_process_use (cpy, 2156 df_get_artificial_uses (bb->index), 2157 0); 2158 2159 BITMAP_FREE (cpy); 2160 } 2161 2162 /* Create def-use chains from reaching use bitmaps for basic blocks 2163 in BLOCKS. */ 2164 2165 static void 2166 df_chain_finalize (bitmap all_blocks) 2167 { 2168 unsigned int bb_index; 2169 bitmap_iterator bi; 2170 2171 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) 2172 { 2173 df_chain_create_bb (bb_index); 2174 } 2175 } 2176 2177 2178 /* Free all storage associated with the problem. */ 2179 2180 static void 2181 df_chain_free (void) 2182 { 2183 free_alloc_pool (df_chain->block_pool); 2184 BITMAP_FREE (df_chain->out_of_date_transfer_functions); 2185 free (df_chain); 2186 } 2187 2188 2189 /* Debugging info. */ 2190 2191 static void 2192 df_chain_top_dump (basic_block bb, FILE *file) 2193 { 2194 if (df_chain_problem_p (DF_DU_CHAIN)) 2195 { 2196 rtx insn; 2197 df_ref *def_rec = df_get_artificial_defs (bb->index); 2198 if (*def_rec) 2199 { 2200 2201 fprintf (file, ";; DU chains for artificial defs\n"); 2202 while (*def_rec) 2203 { 2204 df_ref def = *def_rec; 2205 fprintf (file, ";; reg %d ", DF_REF_REGNO (def)); 2206 df_chain_dump (DF_REF_CHAIN (def), file); 2207 fprintf (file, "\n"); 2208 def_rec++; 2209 } 2210 } 2211 2212 FOR_BB_INSNS (bb, insn) 2213 { 2214 if (INSN_P (insn)) 2215 { 2216 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn); 2217 def_rec = DF_INSN_INFO_DEFS (insn_info); 2218 if (*def_rec) 2219 { 2220 fprintf (file, ";; DU chains for insn luid %d uid %d\n", 2221 DF_INSN_INFO_LUID (insn_info), INSN_UID (insn)); 2222 2223 while (*def_rec) 2224 { 2225 df_ref def = *def_rec; 2226 fprintf (file, ";; reg %d ", DF_REF_REGNO (def)); 2227 if (DF_REF_FLAGS (def) & DF_REF_READ_WRITE) 2228 fprintf (file, "read/write "); 2229 df_chain_dump (DF_REF_CHAIN (def), file); 2230 fprintf (file, "\n"); 2231 def_rec++; 2232 } 2233 } 2234 } 2235 } 2236 } 2237 } 2238 2239 2240 static void 2241 df_chain_bottom_dump (basic_block bb, FILE *file) 2242 { 2243 if (df_chain_problem_p (DF_UD_CHAIN)) 2244 { 2245 rtx insn; 2246 df_ref *use_rec = df_get_artificial_uses (bb->index); 2247 2248 if (*use_rec) 2249 { 2250 fprintf (file, ";; UD chains for artificial uses\n"); 2251 while (*use_rec) 2252 { 2253 df_ref use = *use_rec; 2254 fprintf (file, ";; reg %d ", DF_REF_REGNO (use)); 2255 df_chain_dump (DF_REF_CHAIN (use), file); 2256 fprintf (file, "\n"); 2257 use_rec++; 2258 } 2259 } 2260 2261 FOR_BB_INSNS (bb, insn) 2262 { 2263 if (INSN_P (insn)) 2264 { 2265 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn); 2266 df_ref *eq_use_rec = DF_INSN_INFO_EQ_USES (insn_info); 2267 use_rec = DF_INSN_INFO_USES (insn_info); 2268 if (*use_rec || *eq_use_rec) 2269 { 2270 fprintf (file, ";; UD chains for insn luid %d uid %d\n", 2271 DF_INSN_INFO_LUID (insn_info), INSN_UID (insn)); 2272 2273 while (*use_rec) 2274 { 2275 df_ref use = *use_rec; 2276 fprintf (file, ";; reg %d ", DF_REF_REGNO (use)); 2277 if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE) 2278 fprintf (file, "read/write "); 2279 df_chain_dump (DF_REF_CHAIN (use), file); 2280 fprintf (file, "\n"); 2281 use_rec++; 2282 } 2283 while (*eq_use_rec) 2284 { 2285 df_ref use = *eq_use_rec; 2286 fprintf (file, ";; eq_note reg %d ", DF_REF_REGNO (use)); 2287 df_chain_dump (DF_REF_CHAIN (use), file); 2288 fprintf (file, "\n"); 2289 eq_use_rec++; 2290 } 2291 } 2292 } 2293 } 2294 } 2295 } 2296 2297 2298 static struct df_problem problem_CHAIN = 2299 { 2300 DF_CHAIN, /* Problem id. */ 2301 DF_NONE, /* Direction. */ 2302 df_chain_alloc, /* Allocate the problem specific data. */ 2303 df_chain_reset, /* Reset global information. */ 2304 NULL, /* Free basic block info. */ 2305 NULL, /* Local compute function. */ 2306 NULL, /* Init the solution specific data. */ 2307 NULL, /* Iterative solver. */ 2308 NULL, /* Confluence operator 0. */ 2309 NULL, /* Confluence operator n. */ 2310 NULL, /* Transfer function. */ 2311 df_chain_finalize, /* Finalize function. */ 2312 df_chain_free, /* Free all of the problem information. */ 2313 df_chain_fully_remove_problem,/* Remove this problem from the stack of dataflow problems. */ 2314 NULL, /* Debugging. */ 2315 df_chain_top_dump, /* Debugging start block. */ 2316 df_chain_bottom_dump, /* Debugging end block. */ 2317 NULL, /* Incremental solution verify start. */ 2318 NULL, /* Incremental solution verify end. */ 2319 &problem_RD, /* Dependent problem. */ 2320 TV_DF_CHAIN, /* Timing variable. */ 2321 false /* Reset blocks on dropping out of blocks_to_analyze. */ 2322 }; 2323 2324 2325 /* Create a new DATAFLOW instance and add it to an existing instance 2326 of DF. The returned structure is what is used to get at the 2327 solution. */ 2328 2329 void 2330 df_chain_add_problem (unsigned int chain_flags) 2331 { 2332 df_add_problem (&problem_CHAIN); 2333 df_chain->local_flags = chain_flags; 2334 df_chain->out_of_date_transfer_functions = BITMAP_ALLOC (NULL); 2335 } 2336 2337 #undef df_chain_problem_p 2338 2339 2340 /*---------------------------------------------------------------------------- 2341 BYTE LEVEL LIVE REGISTERS 2342 2343 Find the locations in the function where any use of a pseudo can 2344 reach in the backwards direction. In and out bitvectors are built 2345 for each basic block. There are two mapping functions, 2346 df_byte_lr_get_regno_start and df_byte_lr_get_regno_len that are 2347 used to map regnos into bit vector positions. 2348 2349 This problem differs from the regular df_lr function in the way 2350 that subregs, *_extracts and strict_low_parts are handled. In lr 2351 these are consider partial kills, here, the exact set of bytes is 2352 modeled. Note that any reg that has none of these operations is 2353 only modeled with a single bit since all operations access the 2354 entire register. 2355 2356 This problem is more brittle that the regular lr. It currently can 2357 be used in dce incrementally, but cannot be used in an environment 2358 where insns are created or modified. The problem is that the 2359 mapping of regnos to bitmap positions is relatively compact, in 2360 that if a pseudo does not do any of the byte wise operations, only 2361 one slot is allocated, rather than a slot for each byte. If insn 2362 are created, where a subreg is used for a reg that had no subregs, 2363 the mapping would be wrong. Likewise, there are no checks to see 2364 that new pseudos have been added. These issues could be addressed 2365 by adding a problem specific flag to not use the compact mapping, 2366 if there was a need to do so. 2367 2368 ----------------------------------------------------------------------------*/ 2369 2370 /* Private data used to verify the solution for this problem. */ 2371 struct df_byte_lr_problem_data 2372 { 2373 /* Expanded versions of bitvectors used in lr. */ 2374 bitmap invalidated_by_call; 2375 bitmap hardware_regs_used; 2376 2377 /* Indexed by regno, this is true if there are subregs, extracts or 2378 strict_low_parts for this regno. */ 2379 bitmap needs_expansion; 2380 2381 /* The start position and len for each regno in the various bit 2382 vectors. */ 2383 unsigned int* regno_start; 2384 unsigned int* regno_len; 2385 /* An obstack for the bitmaps we need for this problem. */ 2386 bitmap_obstack byte_lr_bitmaps; 2387 }; 2388 2389 2390 /* Get the starting location for REGNO in the df_byte_lr bitmaps. */ 2391 2392 int 2393 df_byte_lr_get_regno_start (unsigned int regno) 2394 { 2395 struct df_byte_lr_problem_data *problem_data 2396 = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;; 2397 return problem_data->regno_start[regno]; 2398 } 2399 2400 2401 /* Get the len for REGNO in the df_byte_lr bitmaps. */ 2402 2403 int 2404 df_byte_lr_get_regno_len (unsigned int regno) 2405 { 2406 struct df_byte_lr_problem_data *problem_data 2407 = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;; 2408 return problem_data->regno_len[regno]; 2409 } 2410 2411 2412 /* Set basic block info. */ 2413 2414 static void 2415 df_byte_lr_set_bb_info (unsigned int index, 2416 struct df_byte_lr_bb_info *bb_info) 2417 { 2418 gcc_assert (df_byte_lr); 2419 gcc_assert (index < df_byte_lr->block_info_size); 2420 df_byte_lr->block_info[index] = bb_info; 2421 } 2422 2423 2424 /* Free basic block info. */ 2425 2426 static void 2427 df_byte_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, 2428 void *vbb_info) 2429 { 2430 struct df_byte_lr_bb_info *bb_info = (struct df_byte_lr_bb_info *) vbb_info; 2431 if (bb_info) 2432 { 2433 BITMAP_FREE (bb_info->use); 2434 BITMAP_FREE (bb_info->def); 2435 BITMAP_FREE (bb_info->in); 2436 BITMAP_FREE (bb_info->out); 2437 pool_free (df_byte_lr->block_pool, bb_info); 2438 } 2439 } 2440 2441 2442 /* Check all of the refs in REF_REC to see if any of them are 2443 extracts, subregs or strict_low_parts. */ 2444 2445 static void 2446 df_byte_lr_check_regs (df_ref *ref_rec) 2447 { 2448 struct df_byte_lr_problem_data *problem_data 2449 = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; 2450 2451 for (; *ref_rec; ref_rec++) 2452 { 2453 df_ref ref = *ref_rec; 2454 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT 2455 | DF_REF_ZERO_EXTRACT 2456 | DF_REF_STRICT_LOW_PART) 2457 || GET_CODE (DF_REF_REG (ref)) == SUBREG) 2458 bitmap_set_bit (problem_data->needs_expansion, DF_REF_REGNO (ref)); 2459 } 2460 } 2461 2462 2463 /* Expand bitmap SRC which is indexed by regno to DEST which is indexed by 2464 regno_start and regno_len. */ 2465 2466 static void 2467 df_byte_lr_expand_bitmap (bitmap dest, bitmap src) 2468 { 2469 struct df_byte_lr_problem_data *problem_data 2470 = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; 2471 bitmap_iterator bi; 2472 unsigned int i; 2473 2474 bitmap_clear (dest); 2475 EXECUTE_IF_SET_IN_BITMAP (src, 0, i, bi) 2476 { 2477 bitmap_set_range (dest, problem_data->regno_start[i], 2478 problem_data->regno_len[i]); 2479 } 2480 } 2481 2482 2483 /* Allocate or reset bitmaps for DF_BYTE_LR blocks. The solution bits are 2484 not touched unless the block is new. */ 2485 2486 static void 2487 df_byte_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) 2488 { 2489 unsigned int bb_index; 2490 bitmap_iterator bi; 2491 basic_block bb; 2492 unsigned int regno; 2493 unsigned int index = 0; 2494 unsigned int max_reg = max_reg_num(); 2495 struct df_byte_lr_problem_data *problem_data 2496 = XNEW (struct df_byte_lr_problem_data); 2497 2498 df_byte_lr->problem_data = problem_data; 2499 2500 if (!df_byte_lr->block_pool) 2501 df_byte_lr->block_pool = create_alloc_pool ("df_byte_lr_block pool", 2502 sizeof (struct df_byte_lr_bb_info), 50); 2503 2504 df_grow_bb_info (df_byte_lr); 2505 2506 /* Create the mapping from regnos to slots. This does not change 2507 unless the problem is destroyed and recreated. In particular, if 2508 we end up deleting the only insn that used a subreg, we do not 2509 want to redo the mapping because this would invalidate everything 2510 else. */ 2511 2512 bitmap_obstack_initialize (&problem_data->byte_lr_bitmaps); 2513 problem_data->regno_start = XNEWVEC (unsigned int, max_reg); 2514 problem_data->regno_len = XNEWVEC (unsigned int, max_reg); 2515 problem_data->hardware_regs_used = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps); 2516 problem_data->invalidated_by_call = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps); 2517 problem_data->needs_expansion = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps); 2518 2519 /* Discover which regno's use subregs, extracts or 2520 strict_low_parts. */ 2521 FOR_EACH_BB (bb) 2522 { 2523 rtx insn; 2524 FOR_BB_INSNS (bb, insn) 2525 { 2526 if (INSN_P (insn)) 2527 { 2528 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn); 2529 df_byte_lr_check_regs (DF_INSN_INFO_DEFS (insn_info)); 2530 df_byte_lr_check_regs (DF_INSN_INFO_USES (insn_info)); 2531 } 2532 } 2533 bitmap_set_bit (df_byte_lr->out_of_date_transfer_functions, bb->index); 2534 } 2535 2536 bitmap_set_bit (df_byte_lr->out_of_date_transfer_functions, ENTRY_BLOCK); 2537 bitmap_set_bit (df_byte_lr->out_of_date_transfer_functions, EXIT_BLOCK); 2538 2539 /* Allocate the slots for each regno. */ 2540 for (regno = 0; regno < max_reg; regno++) 2541 { 2542 int len; 2543 problem_data->regno_start[regno] = index; 2544 if (bitmap_bit_p (problem_data->needs_expansion, regno)) 2545 len = GET_MODE_SIZE (GET_MODE (regno_reg_rtx[regno])); 2546 else 2547 len = 1; 2548 2549 problem_data->regno_len[regno] = len; 2550 index += len; 2551 } 2552 2553 df_byte_lr_expand_bitmap (problem_data->hardware_regs_used, 2554 df->hardware_regs_used); 2555 df_byte_lr_expand_bitmap (problem_data->invalidated_by_call, 2556 regs_invalidated_by_call_regset); 2557 2558 EXECUTE_IF_SET_IN_BITMAP (df_byte_lr->out_of_date_transfer_functions, 0, bb_index, bi) 2559 { 2560 struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index); 2561 if (bb_info) 2562 { 2563 bitmap_clear (bb_info->def); 2564 bitmap_clear (bb_info->use); 2565 } 2566 else 2567 { 2568 bb_info = (struct df_byte_lr_bb_info *) pool_alloc (df_byte_lr->block_pool); 2569 df_byte_lr_set_bb_info (bb_index, bb_info); 2570 bb_info->use = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps); 2571 bb_info->def = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps); 2572 bb_info->in = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps); 2573 bb_info->out = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps); 2574 } 2575 } 2576 2577 df_byte_lr->optional_p = true; 2578 } 2579 2580 2581 /* Reset the global solution for recalculation. */ 2582 2583 static void 2584 df_byte_lr_reset (bitmap all_blocks) 2585 { 2586 unsigned int bb_index; 2587 bitmap_iterator bi; 2588 2589 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) 2590 { 2591 struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index); 2592 gcc_assert (bb_info); 2593 bitmap_clear (bb_info->in); 2594 bitmap_clear (bb_info->out); 2595 } 2596 } 2597 2598 2599 /* Compute local live register info for basic block BB. */ 2600 2601 static void 2602 df_byte_lr_bb_local_compute (unsigned int bb_index) 2603 { 2604 struct df_byte_lr_problem_data *problem_data 2605 = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; 2606 basic_block bb = BASIC_BLOCK (bb_index); 2607 struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index); 2608 rtx insn; 2609 df_ref *def_rec; 2610 df_ref *use_rec; 2611 2612 /* Process the registers set in an exception handler. */ 2613 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) 2614 { 2615 df_ref def = *def_rec; 2616 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0) 2617 { 2618 unsigned int dregno = DF_REF_REGNO (def); 2619 unsigned int start = problem_data->regno_start[dregno]; 2620 unsigned int len = problem_data->regno_len[dregno]; 2621 bitmap_set_range (bb_info->def, start, len); 2622 bitmap_clear_range (bb_info->use, start, len); 2623 } 2624 } 2625 2626 /* Process the hardware registers that are always live. */ 2627 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++) 2628 { 2629 df_ref use = *use_rec; 2630 /* Add use to set of uses in this BB. */ 2631 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0) 2632 { 2633 unsigned int uregno = DF_REF_REGNO (use); 2634 unsigned int start = problem_data->regno_start[uregno]; 2635 unsigned int len = problem_data->regno_len[uregno]; 2636 bitmap_set_range (bb_info->use, start, len); 2637 } 2638 } 2639 2640 FOR_BB_INSNS_REVERSE (bb, insn) 2641 { 2642 unsigned int uid = INSN_UID (insn); 2643 2644 if (!INSN_P (insn)) 2645 continue; 2646 2647 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) 2648 { 2649 df_ref def = *def_rec; 2650 /* If the def is to only part of the reg, it does 2651 not kill the other defs that reach here. */ 2652 if (!(DF_REF_FLAGS (def) & (DF_REF_CONDITIONAL))) 2653 { 2654 unsigned int dregno = DF_REF_REGNO (def); 2655 unsigned int start = problem_data->regno_start[dregno]; 2656 unsigned int len = problem_data->regno_len[dregno]; 2657 unsigned int sb; 2658 unsigned int lb; 2659 if (!df_compute_accessed_bytes (def, DF_MM_MUST, &sb, &lb)) 2660 { 2661 start += sb; 2662 len = lb - sb; 2663 } 2664 if (len) 2665 { 2666 bitmap_set_range (bb_info->def, start, len); 2667 bitmap_clear_range (bb_info->use, start, len); 2668 } 2669 } 2670 } 2671 2672 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++) 2673 { 2674 df_ref use = *use_rec; 2675 unsigned int uregno = DF_REF_REGNO (use); 2676 unsigned int start = problem_data->regno_start[uregno]; 2677 unsigned int len = problem_data->regno_len[uregno]; 2678 unsigned int sb; 2679 unsigned int lb; 2680 if (!df_compute_accessed_bytes (use, DF_MM_MAY, &sb, &lb)) 2681 { 2682 start += sb; 2683 len = lb - sb; 2684 } 2685 /* Add use to set of uses in this BB. */ 2686 if (len) 2687 bitmap_set_range (bb_info->use, start, len); 2688 } 2689 } 2690 2691 /* Process the registers set in an exception handler or the hard 2692 frame pointer if this block is the target of a non local 2693 goto. */ 2694 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) 2695 { 2696 df_ref def = *def_rec; 2697 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP) 2698 { 2699 unsigned int dregno = DF_REF_REGNO (def); 2700 unsigned int start = problem_data->regno_start[dregno]; 2701 unsigned int len = problem_data->regno_len[dregno]; 2702 bitmap_set_range (bb_info->def, start, len); 2703 bitmap_clear_range (bb_info->use, start, len); 2704 } 2705 } 2706 2707 #ifdef EH_USES 2708 /* Process the uses that are live into an exception handler. */ 2709 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++) 2710 { 2711 df_ref use = *use_rec; 2712 /* Add use to set of uses in this BB. */ 2713 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP) 2714 { 2715 unsigned int uregno = DF_REF_REGNO (use); 2716 unsigned int start = problem_data->regno_start[uregno]; 2717 unsigned int len = problem_data->regno_len[uregno]; 2718 bitmap_set_range (bb_info->use, start, len); 2719 } 2720 } 2721 #endif 2722 } 2723 2724 2725 /* Compute local live register info for each basic block within BLOCKS. */ 2726 2727 static void 2728 df_byte_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED) 2729 { 2730 unsigned int bb_index; 2731 bitmap_iterator bi; 2732 2733 EXECUTE_IF_SET_IN_BITMAP (df_byte_lr->out_of_date_transfer_functions, 0, bb_index, bi) 2734 { 2735 if (bb_index == EXIT_BLOCK) 2736 { 2737 /* The exit block is special for this problem and its bits are 2738 computed from thin air. */ 2739 struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (EXIT_BLOCK); 2740 df_byte_lr_expand_bitmap (bb_info->use, df->exit_block_uses); 2741 } 2742 else 2743 df_byte_lr_bb_local_compute (bb_index); 2744 } 2745 2746 bitmap_clear (df_byte_lr->out_of_date_transfer_functions); 2747 } 2748 2749 2750 /* Initialize the solution vectors. */ 2751 2752 static void 2753 df_byte_lr_init (bitmap all_blocks) 2754 { 2755 unsigned int bb_index; 2756 bitmap_iterator bi; 2757 2758 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) 2759 { 2760 struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index); 2761 bitmap_copy (bb_info->in, bb_info->use); 2762 bitmap_clear (bb_info->out); 2763 } 2764 } 2765 2766 2767 /* Confluence function that processes infinite loops. This might be a 2768 noreturn function that throws. And even if it isn't, getting the 2769 unwind info right helps debugging. */ 2770 static void 2771 df_byte_lr_confluence_0 (basic_block bb) 2772 { 2773 struct df_byte_lr_problem_data *problem_data 2774 = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; 2775 bitmap op1 = df_byte_lr_get_bb_info (bb->index)->out; 2776 if (bb != EXIT_BLOCK_PTR) 2777 bitmap_copy (op1, problem_data->hardware_regs_used); 2778 } 2779 2780 2781 /* Confluence function that ignores fake edges. */ 2782 2783 static void 2784 df_byte_lr_confluence_n (edge e) 2785 { 2786 struct df_byte_lr_problem_data *problem_data 2787 = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; 2788 bitmap op1 = df_byte_lr_get_bb_info (e->src->index)->out; 2789 bitmap op2 = df_byte_lr_get_bb_info (e->dest->index)->in; 2790 2791 /* Call-clobbered registers die across exception and call edges. */ 2792 /* ??? Abnormal call edges ignored for the moment, as this gets 2793 confused by sibling call edges, which crashes reg-stack. */ 2794 if (e->flags & EDGE_EH) 2795 bitmap_ior_and_compl_into (op1, op2, problem_data->invalidated_by_call); 2796 else 2797 bitmap_ior_into (op1, op2); 2798 2799 bitmap_ior_into (op1, problem_data->hardware_regs_used); 2800 } 2801 2802 2803 /* Transfer function. */ 2804 2805 static bool 2806 df_byte_lr_transfer_function (int bb_index) 2807 { 2808 struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index); 2809 bitmap in = bb_info->in; 2810 bitmap out = bb_info->out; 2811 bitmap use = bb_info->use; 2812 bitmap def = bb_info->def; 2813 2814 return bitmap_ior_and_compl (in, use, out, def); 2815 } 2816 2817 2818 /* Free all storage associated with the problem. */ 2819 2820 static void 2821 df_byte_lr_free (void) 2822 { 2823 struct df_byte_lr_problem_data *problem_data 2824 = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; 2825 2826 2827 if (df_byte_lr->block_info) 2828 { 2829 free_alloc_pool (df_byte_lr->block_pool); 2830 df_byte_lr->block_info_size = 0; 2831 free (df_byte_lr->block_info); 2832 } 2833 2834 BITMAP_FREE (df_byte_lr->out_of_date_transfer_functions); 2835 bitmap_obstack_release (&problem_data->byte_lr_bitmaps); 2836 free (problem_data->regno_start); 2837 free (problem_data->regno_len); 2838 free (problem_data); 2839 free (df_byte_lr); 2840 } 2841 2842 2843 /* Debugging info at top of bb. */ 2844 2845 static void 2846 df_byte_lr_top_dump (basic_block bb, FILE *file) 2847 { 2848 struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb->index); 2849 if (!bb_info || !bb_info->in) 2850 return; 2851 2852 fprintf (file, ";; blr in \t"); 2853 df_print_byte_regset (file, bb_info->in); 2854 fprintf (file, ";; blr use \t"); 2855 df_print_byte_regset (file, bb_info->use); 2856 fprintf (file, ";; blr def \t"); 2857 df_print_byte_regset (file, bb_info->def); 2858 } 2859 2860 2861 /* Debugging info at bottom of bb. */ 2862 2863 static void 2864 df_byte_lr_bottom_dump (basic_block bb, FILE *file) 2865 { 2866 struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb->index); 2867 if (!bb_info || !bb_info->out) 2868 return; 2869 2870 fprintf (file, ";; blr out \t"); 2871 df_print_byte_regset (file, bb_info->out); 2872 } 2873 2874 2875 /* All of the information associated with every instance of the problem. */ 2876 2877 static struct df_problem problem_BYTE_LR = 2878 { 2879 DF_BYTE_LR, /* Problem id. */ 2880 DF_BACKWARD, /* Direction. */ 2881 df_byte_lr_alloc, /* Allocate the problem specific data. */ 2882 df_byte_lr_reset, /* Reset global information. */ 2883 df_byte_lr_free_bb_info, /* Free basic block info. */ 2884 df_byte_lr_local_compute, /* Local compute function. */ 2885 df_byte_lr_init, /* Init the solution specific data. */ 2886 df_worklist_dataflow, /* Worklist solver. */ 2887 df_byte_lr_confluence_0, /* Confluence operator 0. */ 2888 df_byte_lr_confluence_n, /* Confluence operator n. */ 2889 df_byte_lr_transfer_function, /* Transfer function. */ 2890 NULL, /* Finalize function. */ 2891 df_byte_lr_free, /* Free all of the problem information. */ 2892 df_byte_lr_free, /* Remove this problem from the stack of dataflow problems. */ 2893 NULL, /* Debugging. */ 2894 df_byte_lr_top_dump, /* Debugging start block. */ 2895 df_byte_lr_bottom_dump, /* Debugging end block. */ 2896 NULL, /* Incremental solution verify start. */ 2897 NULL, /* Incremental solution verify end. */ 2898 NULL, /* Dependent problem. */ 2899 TV_DF_BYTE_LR, /* Timing variable. */ 2900 false /* Reset blocks on dropping out of blocks_to_analyze. */ 2901 }; 2902 2903 2904 /* Create a new DATAFLOW instance and add it to an existing instance 2905 of DF. The returned structure is what is used to get at the 2906 solution. */ 2907 2908 void 2909 df_byte_lr_add_problem (void) 2910 { 2911 df_add_problem (&problem_BYTE_LR); 2912 /* These will be initialized when df_scan_blocks processes each 2913 block. */ 2914 df_byte_lr->out_of_date_transfer_functions = BITMAP_ALLOC (NULL); 2915 } 2916 2917 2918 /* Simulate the effects of the defs of INSN on LIVE. */ 2919 2920 void 2921 df_byte_lr_simulate_defs (rtx insn, bitmap live) 2922 { 2923 struct df_byte_lr_problem_data *problem_data 2924 = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; 2925 df_ref *def_rec; 2926 unsigned int uid = INSN_UID (insn); 2927 2928 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) 2929 { 2930 df_ref def = *def_rec; 2931 2932 /* If the def is to only part of the reg, it does 2933 not kill the other defs that reach here. */ 2934 if (!(DF_REF_FLAGS (def) & DF_REF_CONDITIONAL)) 2935 { 2936 unsigned int dregno = DF_REF_REGNO (def); 2937 unsigned int start = problem_data->regno_start[dregno]; 2938 unsigned int len = problem_data->regno_len[dregno]; 2939 unsigned int sb; 2940 unsigned int lb; 2941 if (!df_compute_accessed_bytes (def, DF_MM_MUST, &sb, &lb)) 2942 { 2943 start += sb; 2944 len = lb - sb; 2945 } 2946 2947 if (len) 2948 bitmap_clear_range (live, start, len); 2949 } 2950 } 2951 } 2952 2953 2954 /* Simulate the effects of the uses of INSN on LIVE. */ 2955 2956 void 2957 df_byte_lr_simulate_uses (rtx insn, bitmap live) 2958 { 2959 struct df_byte_lr_problem_data *problem_data 2960 = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; 2961 df_ref *use_rec; 2962 unsigned int uid = INSN_UID (insn); 2963 2964 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++) 2965 { 2966 df_ref use = *use_rec; 2967 unsigned int uregno = DF_REF_REGNO (use); 2968 unsigned int start = problem_data->regno_start[uregno]; 2969 unsigned int len = problem_data->regno_len[uregno]; 2970 unsigned int sb; 2971 unsigned int lb; 2972 2973 if (!df_compute_accessed_bytes (use, DF_MM_MAY, &sb, &lb)) 2974 { 2975 start += sb; 2976 len = lb - sb; 2977 } 2978 2979 /* Add use to set of uses in this BB. */ 2980 if (len) 2981 bitmap_set_range (live, start, len); 2982 } 2983 } 2984 2985 2986 /* Apply the artificial uses and defs at the top of BB in a forwards 2987 direction. */ 2988 2989 void 2990 df_byte_lr_simulate_artificial_refs_at_top (basic_block bb, bitmap live) 2991 { 2992 struct df_byte_lr_problem_data *problem_data 2993 = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; 2994 df_ref *def_rec; 2995 #ifdef EH_USES 2996 df_ref *use_rec; 2997 #endif 2998 int bb_index = bb->index; 2999 3000 #ifdef EH_USES 3001 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++) 3002 { 3003 df_ref use = *use_rec; 3004 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP) 3005 { 3006 unsigned int uregno = DF_REF_REGNO (use); 3007 unsigned int start = problem_data->regno_start[uregno]; 3008 unsigned int len = problem_data->regno_len[uregno]; 3009 bitmap_set_range (live, start, len); 3010 } 3011 } 3012 #endif 3013 3014 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) 3015 { 3016 df_ref def = *def_rec; 3017 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP) 3018 { 3019 unsigned int dregno = DF_REF_REGNO (def); 3020 unsigned int start = problem_data->regno_start[dregno]; 3021 unsigned int len = problem_data->regno_len[dregno]; 3022 bitmap_clear_range (live, start, len); 3023 } 3024 } 3025 } 3026 3027 3028 /* Apply the artificial uses and defs at the end of BB in a backwards 3029 direction. */ 3030 3031 void 3032 df_byte_lr_simulate_artificial_refs_at_end (basic_block bb, bitmap live) 3033 { 3034 struct df_byte_lr_problem_data *problem_data 3035 = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; 3036 df_ref *def_rec; 3037 df_ref *use_rec; 3038 int bb_index = bb->index; 3039 3040 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) 3041 { 3042 df_ref def = *def_rec; 3043 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0) 3044 { 3045 unsigned int dregno = DF_REF_REGNO (def); 3046 unsigned int start = problem_data->regno_start[dregno]; 3047 unsigned int len = problem_data->regno_len[dregno]; 3048 bitmap_clear_range (live, start, len); 3049 } 3050 } 3051 3052 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++) 3053 { 3054 df_ref use = *use_rec; 3055 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0) 3056 { 3057 unsigned int uregno = DF_REF_REGNO (use); 3058 unsigned int start = problem_data->regno_start[uregno]; 3059 unsigned int len = problem_data->regno_len[uregno]; 3060 bitmap_set_range (live, start, len); 3061 } 3062 } 3063 } 3064 3065 3066 3067 /*---------------------------------------------------------------------------- 3068 This problem computes REG_DEAD and REG_UNUSED notes. 3069 ----------------------------------------------------------------------------*/ 3070 3071 static void 3072 df_note_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) 3073 { 3074 df_note->optional_p = true; 3075 } 3076 3077 #ifdef REG_DEAD_DEBUGGING 3078 static void 3079 df_print_note (const char *prefix, rtx insn, rtx note) 3080 { 3081 if (dump_file) 3082 { 3083 fprintf (dump_file, "%s %d ", prefix, INSN_UID (insn)); 3084 print_rtl (dump_file, note); 3085 fprintf (dump_file, "\n"); 3086 } 3087 } 3088 #endif 3089 3090 3091 /* After reg-stack, the x86 floating point stack regs are difficult to 3092 analyze because of all of the pushes, pops and rotations. Thus, we 3093 just leave the notes alone. */ 3094 3095 #ifdef STACK_REGS 3096 static inline bool 3097 df_ignore_stack_reg (int regno) 3098 { 3099 return regstack_completed 3100 && IN_RANGE (regno, FIRST_STACK_REG, LAST_STACK_REG); 3101 } 3102 #else 3103 static inline bool 3104 df_ignore_stack_reg (int regno ATTRIBUTE_UNUSED) 3105 { 3106 return false; 3107 } 3108 #endif 3109 3110 3111 /* Remove all of the REG_DEAD or REG_UNUSED notes from INSN and add 3112 them to OLD_DEAD_NOTES and OLD_UNUSED_NOTES. */ 3113 3114 static void 3115 df_kill_notes (rtx insn, rtx *old_dead_notes, rtx *old_unused_notes) 3116 { 3117 rtx *pprev = ®_NOTES (insn); 3118 rtx link = *pprev; 3119 rtx dead = NULL; 3120 rtx unused = NULL; 3121 3122 while (link) 3123 { 3124 switch (REG_NOTE_KIND (link)) 3125 { 3126 case REG_DEAD: 3127 /* After reg-stack, we need to ignore any unused notes 3128 for the stack registers. */ 3129 if (df_ignore_stack_reg (REGNO (XEXP (link, 0)))) 3130 { 3131 pprev = &XEXP (link, 1); 3132 link = *pprev; 3133 } 3134 else 3135 { 3136 rtx next = XEXP (link, 1); 3137 #ifdef REG_DEAD_DEBUGGING 3138 df_print_note ("deleting: ", insn, link); 3139 #endif 3140 XEXP (link, 1) = dead; 3141 dead = link; 3142 *pprev = link = next; 3143 } 3144 break; 3145 3146 case REG_UNUSED: 3147 /* After reg-stack, we need to ignore any unused notes 3148 for the stack registers. */ 3149 if (df_ignore_stack_reg (REGNO (XEXP (link, 0)))) 3150 { 3151 pprev = &XEXP (link, 1); 3152 link = *pprev; 3153 } 3154 else 3155 { 3156 rtx next = XEXP (link, 1); 3157 #ifdef REG_DEAD_DEBUGGING 3158 df_print_note ("deleting: ", insn, link); 3159 #endif 3160 XEXP (link, 1) = unused; 3161 unused = link; 3162 *pprev = link = next; 3163 } 3164 break; 3165 3166 default: 3167 pprev = &XEXP (link, 1); 3168 link = *pprev; 3169 break; 3170 } 3171 } 3172 3173 *old_dead_notes = dead; 3174 *old_unused_notes = unused; 3175 } 3176 3177 3178 /* Set a NOTE_TYPE note for REG in INSN. Try to pull it from the OLD 3179 list, otherwise create a new one. */ 3180 3181 static inline rtx 3182 df_set_note (enum reg_note note_type, rtx insn, rtx old, rtx reg) 3183 { 3184 rtx curr = old; 3185 rtx prev = NULL; 3186 3187 gcc_assert (!DEBUG_INSN_P (insn)); 3188 3189 while (curr) 3190 if (XEXP (curr, 0) == reg) 3191 { 3192 if (prev) 3193 XEXP (prev, 1) = XEXP (curr, 1); 3194 else 3195 old = XEXP (curr, 1); 3196 XEXP (curr, 1) = REG_NOTES (insn); 3197 REG_NOTES (insn) = curr; 3198 return old; 3199 } 3200 else 3201 { 3202 prev = curr; 3203 curr = XEXP (curr, 1); 3204 } 3205 3206 /* Did not find the note. */ 3207 add_reg_note (insn, note_type, reg); 3208 return old; 3209 } 3210 3211 /* A subroutine of df_set_unused_notes_for_mw, with a selection of its 3212 arguments. Return true if the register value described by MWS's 3213 mw_reg is known to be completely unused, and if mw_reg can therefore 3214 be used in a REG_UNUSED note. */ 3215 3216 static bool 3217 df_whole_mw_reg_unused_p (struct df_mw_hardreg *mws, 3218 bitmap live, bitmap artificial_uses) 3219 { 3220 unsigned int r; 3221 3222 /* If MWS describes a partial reference, create REG_UNUSED notes for 3223 individual hard registers. */ 3224 if (mws->flags & DF_REF_PARTIAL) 3225 return false; 3226 3227 /* Likewise if some part of the register is used. */ 3228 for (r = mws->start_regno; r <= mws->end_regno; r++) 3229 if (bitmap_bit_p (live, r) 3230 || bitmap_bit_p (artificial_uses, r)) 3231 return false; 3232 3233 gcc_assert (REG_P (mws->mw_reg)); 3234 return true; 3235 } 3236 3237 /* Set the REG_UNUSED notes for the multiword hardreg defs in INSN 3238 based on the bits in LIVE. Do not generate notes for registers in 3239 artificial uses. DO_NOT_GEN is updated so that REG_DEAD notes are 3240 not generated if the reg is both read and written by the 3241 instruction. 3242 */ 3243 3244 static rtx 3245 df_set_unused_notes_for_mw (rtx insn, rtx old, struct df_mw_hardreg *mws, 3246 bitmap live, bitmap do_not_gen, 3247 bitmap artificial_uses) 3248 { 3249 unsigned int r; 3250 3251 #ifdef REG_DEAD_DEBUGGING 3252 if (dump_file) 3253 fprintf (dump_file, "mw_set_unused looking at mws[%d..%d]\n", 3254 mws->start_regno, mws->end_regno); 3255 #endif 3256 3257 if (df_whole_mw_reg_unused_p (mws, live, artificial_uses)) 3258 { 3259 unsigned int regno = mws->start_regno; 3260 old = df_set_note (REG_UNUSED, insn, old, mws->mw_reg); 3261 3262 #ifdef REG_DEAD_DEBUGGING 3263 df_print_note ("adding 1: ", insn, REG_NOTES (insn)); 3264 #endif 3265 bitmap_set_bit (do_not_gen, regno); 3266 /* Only do this if the value is totally dead. */ 3267 } 3268 else 3269 for (r = mws->start_regno; r <= mws->end_regno; r++) 3270 { 3271 if (!bitmap_bit_p (live, r) 3272 && !bitmap_bit_p (artificial_uses, r)) 3273 { 3274 old = df_set_note (REG_UNUSED, insn, old, regno_reg_rtx[r]); 3275 #ifdef REG_DEAD_DEBUGGING 3276 df_print_note ("adding 2: ", insn, REG_NOTES (insn)); 3277 #endif 3278 } 3279 bitmap_set_bit (do_not_gen, r); 3280 } 3281 return old; 3282 } 3283 3284 3285 /* A subroutine of df_set_dead_notes_for_mw, with a selection of its 3286 arguments. Return true if the register value described by MWS's 3287 mw_reg is known to be completely dead, and if mw_reg can therefore 3288 be used in a REG_DEAD note. */ 3289 3290 static bool 3291 df_whole_mw_reg_dead_p (struct df_mw_hardreg *mws, 3292 bitmap live, bitmap artificial_uses, 3293 bitmap do_not_gen) 3294 { 3295 unsigned int r; 3296 3297 /* If MWS describes a partial reference, create REG_DEAD notes for 3298 individual hard registers. */ 3299 if (mws->flags & DF_REF_PARTIAL) 3300 return false; 3301 3302 /* Likewise if some part of the register is not dead. */ 3303 for (r = mws->start_regno; r <= mws->end_regno; r++) 3304 if (bitmap_bit_p (live, r) 3305 || bitmap_bit_p (artificial_uses, r) 3306 || bitmap_bit_p (do_not_gen, r)) 3307 return false; 3308 3309 gcc_assert (REG_P (mws->mw_reg)); 3310 return true; 3311 } 3312 3313 /* Set the REG_DEAD notes for the multiword hardreg use in INSN based 3314 on the bits in LIVE. DO_NOT_GEN is used to keep REG_DEAD notes 3315 from being set if the instruction both reads and writes the 3316 register. */ 3317 3318 static rtx 3319 df_set_dead_notes_for_mw (rtx insn, rtx old, struct df_mw_hardreg *mws, 3320 bitmap live, bitmap do_not_gen, 3321 bitmap artificial_uses, bool *added_notes_p) 3322 { 3323 unsigned int r; 3324 bool is_debug = *added_notes_p; 3325 3326 *added_notes_p = false; 3327 3328 #ifdef REG_DEAD_DEBUGGING 3329 if (dump_file) 3330 { 3331 fprintf (dump_file, "mw_set_dead looking at mws[%d..%d]\n do_not_gen =", 3332 mws->start_regno, mws->end_regno); 3333 df_print_regset (dump_file, do_not_gen); 3334 fprintf (dump_file, " live ="); 3335 df_print_regset (dump_file, live); 3336 fprintf (dump_file, " artificial uses ="); 3337 df_print_regset (dump_file, artificial_uses); 3338 } 3339 #endif 3340 3341 if (df_whole_mw_reg_dead_p (mws, live, artificial_uses, do_not_gen)) 3342 { 3343 /* Add a dead note for the entire multi word register. */ 3344 if (is_debug) 3345 { 3346 *added_notes_p = true; 3347 return old; 3348 } 3349 old = df_set_note (REG_DEAD, insn, old, mws->mw_reg); 3350 #ifdef REG_DEAD_DEBUGGING 3351 df_print_note ("adding 1: ", insn, REG_NOTES (insn)); 3352 #endif 3353 } 3354 else 3355 { 3356 for (r = mws->start_regno; r <= mws->end_regno; r++) 3357 if (!bitmap_bit_p (live, r) 3358 && !bitmap_bit_p (artificial_uses, r) 3359 && !bitmap_bit_p (do_not_gen, r)) 3360 { 3361 if (is_debug) 3362 { 3363 *added_notes_p = true; 3364 return old; 3365 } 3366 old = df_set_note (REG_DEAD, insn, old, regno_reg_rtx[r]); 3367 #ifdef REG_DEAD_DEBUGGING 3368 df_print_note ("adding 2: ", insn, REG_NOTES (insn)); 3369 #endif 3370 } 3371 } 3372 return old; 3373 } 3374 3375 3376 /* Create a REG_UNUSED note if necessary for DEF in INSN updating 3377 LIVE. Do not generate notes for registers in ARTIFICIAL_USES. */ 3378 3379 static rtx 3380 df_create_unused_note (rtx insn, rtx old, df_ref def, 3381 bitmap live, bitmap artificial_uses) 3382 { 3383 unsigned int dregno = DF_REF_REGNO (def); 3384 3385 #ifdef REG_DEAD_DEBUGGING 3386 if (dump_file) 3387 { 3388 fprintf (dump_file, " regular looking at def "); 3389 df_ref_debug (def, dump_file); 3390 } 3391 #endif 3392 3393 if (!(bitmap_bit_p (live, dregno) 3394 || (DF_REF_FLAGS (def) & DF_REF_MW_HARDREG) 3395 || bitmap_bit_p (artificial_uses, dregno) 3396 || df_ignore_stack_reg (dregno))) 3397 { 3398 rtx reg = (DF_REF_LOC (def)) 3399 ? *DF_REF_REAL_LOC (def): DF_REF_REG (def); 3400 old = df_set_note (REG_UNUSED, insn, old, reg); 3401 #ifdef REG_DEAD_DEBUGGING 3402 df_print_note ("adding 3: ", insn, REG_NOTES (insn)); 3403 #endif 3404 } 3405 3406 return old; 3407 } 3408 3409 3410 /* Recompute the REG_DEAD and REG_UNUSED notes and compute register 3411 info: lifetime, bb, and number of defs and uses for basic block 3412 BB. The three bitvectors are scratch regs used here. */ 3413 3414 static void 3415 df_note_bb_compute (unsigned int bb_index, 3416 bitmap live, bitmap do_not_gen, bitmap artificial_uses) 3417 { 3418 basic_block bb = BASIC_BLOCK (bb_index); 3419 rtx insn; 3420 df_ref *def_rec; 3421 df_ref *use_rec; 3422 3423 bitmap_copy (live, df_get_live_out (bb)); 3424 bitmap_clear (artificial_uses); 3425 3426 #ifdef REG_DEAD_DEBUGGING 3427 if (dump_file) 3428 { 3429 fprintf (dump_file, "live at bottom "); 3430 df_print_regset (dump_file, live); 3431 } 3432 #endif 3433 3434 /* Process the artificial defs and uses at the bottom of the block 3435 to begin processing. */ 3436 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) 3437 { 3438 df_ref def = *def_rec; 3439 #ifdef REG_DEAD_DEBUGGING 3440 if (dump_file) 3441 fprintf (dump_file, "artificial def %d\n", DF_REF_REGNO (def)); 3442 #endif 3443 3444 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0) 3445 bitmap_clear_bit (live, DF_REF_REGNO (def)); 3446 } 3447 3448 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++) 3449 { 3450 df_ref use = *use_rec; 3451 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0) 3452 { 3453 unsigned int regno = DF_REF_REGNO (use); 3454 bitmap_set_bit (live, regno); 3455 3456 /* Notes are not generated for any of the artificial registers 3457 at the bottom of the block. */ 3458 bitmap_set_bit (artificial_uses, regno); 3459 } 3460 } 3461 3462 #ifdef REG_DEAD_DEBUGGING 3463 if (dump_file) 3464 { 3465 fprintf (dump_file, "live before artificials out "); 3466 df_print_regset (dump_file, live); 3467 } 3468 #endif 3469 3470 FOR_BB_INSNS_REVERSE (bb, insn) 3471 { 3472 unsigned int uid = INSN_UID (insn); 3473 struct df_mw_hardreg **mws_rec; 3474 rtx old_dead_notes; 3475 rtx old_unused_notes; 3476 int debug_insn; 3477 3478 if (!INSN_P (insn)) 3479 continue; 3480 3481 debug_insn = DEBUG_INSN_P (insn); 3482 3483 bitmap_clear (do_not_gen); 3484 df_kill_notes (insn, &old_dead_notes, &old_unused_notes); 3485 3486 /* Process the defs. */ 3487 if (CALL_P (insn)) 3488 { 3489 #ifdef REG_DEAD_DEBUGGING 3490 if (dump_file) 3491 { 3492 fprintf (dump_file, "processing call %d\n live =", INSN_UID (insn)); 3493 df_print_regset (dump_file, live); 3494 } 3495 #endif 3496 /* We only care about real sets for calls. Clobbers cannot 3497 be depended on to really die. */ 3498 mws_rec = DF_INSN_UID_MWS (uid); 3499 while (*mws_rec) 3500 { 3501 struct df_mw_hardreg *mws = *mws_rec; 3502 if ((DF_MWS_REG_DEF_P (mws)) 3503 && !df_ignore_stack_reg (mws->start_regno)) 3504 old_unused_notes 3505 = df_set_unused_notes_for_mw (insn, old_unused_notes, 3506 mws, live, do_not_gen, 3507 artificial_uses); 3508 mws_rec++; 3509 } 3510 3511 /* All of the defs except the return value are some sort of 3512 clobber. This code is for the return. */ 3513 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) 3514 { 3515 df_ref def = *def_rec; 3516 unsigned int dregno = DF_REF_REGNO (def); 3517 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)) 3518 { 3519 old_unused_notes 3520 = df_create_unused_note (insn, old_unused_notes, 3521 def, live, artificial_uses); 3522 bitmap_set_bit (do_not_gen, dregno); 3523 } 3524 3525 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL | DF_REF_CONDITIONAL)) 3526 bitmap_clear_bit (live, dregno); 3527 } 3528 } 3529 else 3530 { 3531 /* Regular insn. */ 3532 mws_rec = DF_INSN_UID_MWS (uid); 3533 while (*mws_rec) 3534 { 3535 struct df_mw_hardreg *mws = *mws_rec; 3536 if (DF_MWS_REG_DEF_P (mws)) 3537 old_unused_notes 3538 = df_set_unused_notes_for_mw (insn, old_unused_notes, 3539 mws, live, do_not_gen, 3540 artificial_uses); 3541 mws_rec++; 3542 } 3543 3544 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) 3545 { 3546 df_ref def = *def_rec; 3547 unsigned int dregno = DF_REF_REGNO (def); 3548 old_unused_notes 3549 = df_create_unused_note (insn, old_unused_notes, 3550 def, live, artificial_uses); 3551 3552 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)) 3553 bitmap_set_bit (do_not_gen, dregno); 3554 3555 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL | DF_REF_CONDITIONAL)) 3556 bitmap_clear_bit (live, dregno); 3557 } 3558 } 3559 3560 /* Process the uses. */ 3561 mws_rec = DF_INSN_UID_MWS (uid); 3562 while (*mws_rec) 3563 { 3564 struct df_mw_hardreg *mws = *mws_rec; 3565 if ((DF_MWS_REG_DEF_P (mws)) 3566 && !df_ignore_stack_reg (mws->start_regno)) 3567 { 3568 bool really_add_notes = debug_insn != 0; 3569 3570 old_dead_notes 3571 = df_set_dead_notes_for_mw (insn, old_dead_notes, 3572 mws, live, do_not_gen, 3573 artificial_uses, 3574 &really_add_notes); 3575 3576 if (really_add_notes) 3577 debug_insn = -1; 3578 } 3579 mws_rec++; 3580 } 3581 3582 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++) 3583 { 3584 df_ref use = *use_rec; 3585 unsigned int uregno = DF_REF_REGNO (use); 3586 3587 #ifdef REG_DEAD_DEBUGGING 3588 if (dump_file && !debug_insn) 3589 { 3590 fprintf (dump_file, " regular looking at use "); 3591 df_ref_debug (use, dump_file); 3592 } 3593 #endif 3594 if (!bitmap_bit_p (live, uregno)) 3595 { 3596 if (debug_insn) 3597 { 3598 debug_insn = -1; 3599 break; 3600 } 3601 3602 if ( (!(DF_REF_FLAGS (use) & DF_REF_MW_HARDREG)) 3603 && (!bitmap_bit_p (do_not_gen, uregno)) 3604 && (!bitmap_bit_p (artificial_uses, uregno)) 3605 && (!(DF_REF_FLAGS (use) & DF_REF_READ_WRITE)) 3606 && (!df_ignore_stack_reg (uregno))) 3607 { 3608 rtx reg = (DF_REF_LOC (use)) 3609 ? *DF_REF_REAL_LOC (use) : DF_REF_REG (use); 3610 old_dead_notes = df_set_note (REG_DEAD, insn, old_dead_notes, reg); 3611 3612 #ifdef REG_DEAD_DEBUGGING 3613 df_print_note ("adding 4: ", insn, REG_NOTES (insn)); 3614 #endif 3615 } 3616 /* This register is now live. */ 3617 bitmap_set_bit (live, uregno); 3618 } 3619 } 3620 3621 while (old_unused_notes) 3622 { 3623 rtx next = XEXP (old_unused_notes, 1); 3624 free_EXPR_LIST_node (old_unused_notes); 3625 old_unused_notes = next; 3626 } 3627 while (old_dead_notes) 3628 { 3629 rtx next = XEXP (old_dead_notes, 1); 3630 free_EXPR_LIST_node (old_dead_notes); 3631 old_dead_notes = next; 3632 } 3633 3634 if (debug_insn == -1) 3635 { 3636 /* ??? We could probably do better here, replacing dead 3637 registers with their definitions. */ 3638 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC (); 3639 df_insn_rescan_debug_internal (insn); 3640 } 3641 } 3642 } 3643 3644 3645 /* Compute register info: lifetime, bb, and number of defs and uses. */ 3646 static void 3647 df_note_compute (bitmap all_blocks) 3648 { 3649 unsigned int bb_index; 3650 bitmap_iterator bi; 3651 bitmap live = BITMAP_ALLOC (&df_bitmap_obstack); 3652 bitmap do_not_gen = BITMAP_ALLOC (&df_bitmap_obstack); 3653 bitmap artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack); 3654 3655 #ifdef REG_DEAD_DEBUGGING 3656 if (dump_file) 3657 print_rtl_with_bb (dump_file, get_insns()); 3658 #endif 3659 3660 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) 3661 { 3662 df_note_bb_compute (bb_index, live, do_not_gen, artificial_uses); 3663 } 3664 3665 BITMAP_FREE (live); 3666 BITMAP_FREE (do_not_gen); 3667 BITMAP_FREE (artificial_uses); 3668 } 3669 3670 3671 /* Free all storage associated with the problem. */ 3672 3673 static void 3674 df_note_free (void) 3675 { 3676 free (df_note); 3677 } 3678 3679 3680 /* All of the information associated every instance of the problem. */ 3681 3682 static struct df_problem problem_NOTE = 3683 { 3684 DF_NOTE, /* Problem id. */ 3685 DF_NONE, /* Direction. */ 3686 df_note_alloc, /* Allocate the problem specific data. */ 3687 NULL, /* Reset global information. */ 3688 NULL, /* Free basic block info. */ 3689 df_note_compute, /* Local compute function. */ 3690 NULL, /* Init the solution specific data. */ 3691 NULL, /* Iterative solver. */ 3692 NULL, /* Confluence operator 0. */ 3693 NULL, /* Confluence operator n. */ 3694 NULL, /* Transfer function. */ 3695 NULL, /* Finalize function. */ 3696 df_note_free, /* Free all of the problem information. */ 3697 df_note_free, /* Remove this problem from the stack of dataflow problems. */ 3698 NULL, /* Debugging. */ 3699 NULL, /* Debugging start block. */ 3700 NULL, /* Debugging end block. */ 3701 NULL, /* Incremental solution verify start. */ 3702 NULL, /* Incremental solution verify end. */ 3703 &problem_LR, /* Dependent problem. */ 3704 TV_DF_NOTE, /* Timing variable. */ 3705 false /* Reset blocks on dropping out of blocks_to_analyze. */ 3706 }; 3707 3708 3709 /* Create a new DATAFLOW instance and add it to an existing instance 3710 of DF. The returned structure is what is used to get at the 3711 solution. */ 3712 3713 void 3714 df_note_add_problem (void) 3715 { 3716 df_add_problem (&problem_NOTE); 3717 } 3718 3719 3720 3721 3722 /*---------------------------------------------------------------------------- 3723 Functions for simulating the effects of single insns. 3724 3725 You can either simulate in the forwards direction, starting from 3726 the top of a block or the backwards direction from the end of the 3727 block. If you go backwards, defs are examined first to clear bits, 3728 then uses are examined to set bits. If you go forwards, defs are 3729 examined first to set bits, then REG_DEAD and REG_UNUSED notes 3730 are examined to clear bits. In either case, the result of examining 3731 a def can be undone (respectively by a use or a REG_UNUSED note). 3732 3733 If you start at the top of the block, use one of DF_LIVE_IN or 3734 DF_LR_IN. If you start at the bottom of the block use one of 3735 DF_LIVE_OUT or DF_LR_OUT. BE SURE TO PASS A COPY OF THESE SETS, 3736 THEY WILL BE DESTROYED. 3737 ----------------------------------------------------------------------------*/ 3738 3739 3740 /* Find the set of DEFs for INSN. */ 3741 3742 void 3743 df_simulate_find_defs (rtx insn, bitmap defs) 3744 { 3745 df_ref *def_rec; 3746 unsigned int uid = INSN_UID (insn); 3747 3748 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) 3749 { 3750 df_ref def = *def_rec; 3751 bitmap_set_bit (defs, DF_REF_REGNO (def)); 3752 } 3753 } 3754 3755 /* Find the set of real DEFs, which are not clobbers, for INSN. */ 3756 3757 void 3758 df_simulate_find_noclobber_defs (rtx insn, bitmap defs) 3759 { 3760 df_ref *def_rec; 3761 unsigned int uid = INSN_UID (insn); 3762 3763 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) 3764 { 3765 df_ref def = *def_rec; 3766 if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))) 3767 bitmap_set_bit (defs, DF_REF_REGNO (def)); 3768 } 3769 } 3770 3771 3772 /* Simulate the effects of the defs of INSN on LIVE. */ 3773 3774 void 3775 df_simulate_defs (rtx insn, bitmap live) 3776 { 3777 df_ref *def_rec; 3778 unsigned int uid = INSN_UID (insn); 3779 3780 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) 3781 { 3782 df_ref def = *def_rec; 3783 unsigned int dregno = DF_REF_REGNO (def); 3784 3785 /* If the def is to only part of the reg, it does 3786 not kill the other defs that reach here. */ 3787 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))) 3788 bitmap_clear_bit (live, dregno); 3789 } 3790 } 3791 3792 3793 /* Simulate the effects of the uses of INSN on LIVE. */ 3794 3795 void 3796 df_simulate_uses (rtx insn, bitmap live) 3797 { 3798 df_ref *use_rec; 3799 unsigned int uid = INSN_UID (insn); 3800 3801 if (DEBUG_INSN_P (insn)) 3802 return; 3803 3804 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++) 3805 { 3806 df_ref use = *use_rec; 3807 /* Add use to set of uses in this BB. */ 3808 bitmap_set_bit (live, DF_REF_REGNO (use)); 3809 } 3810 } 3811 3812 3813 /* Add back the always live regs in BB to LIVE. */ 3814 3815 static inline void 3816 df_simulate_fixup_sets (basic_block bb, bitmap live) 3817 { 3818 /* These regs are considered always live so if they end up dying 3819 because of some def, we need to bring the back again. */ 3820 if (bb_has_eh_pred (bb)) 3821 bitmap_ior_into (live, df->eh_block_artificial_uses); 3822 else 3823 bitmap_ior_into (live, df->regular_block_artificial_uses); 3824 } 3825 3826 3827 /*---------------------------------------------------------------------------- 3828 The following three functions are used only for BACKWARDS scanning: 3829 i.e. they process the defs before the uses. 3830 3831 df_simulate_initialize_backwards should be called first with a 3832 bitvector copyied from the DF_LIVE_OUT or DF_LR_OUT. Then 3833 df_simulate_one_insn_backwards should be called for each insn in 3834 the block, starting with the last one. Finally, 3835 df_simulate_finalize_backwards can be called to get a new value 3836 of the sets at the top of the block (this is rarely used). 3837 ----------------------------------------------------------------------------*/ 3838 3839 /* Apply the artificial uses and defs at the end of BB in a backwards 3840 direction. */ 3841 3842 void 3843 df_simulate_initialize_backwards (basic_block bb, bitmap live) 3844 { 3845 df_ref *def_rec; 3846 df_ref *use_rec; 3847 int bb_index = bb->index; 3848 3849 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) 3850 { 3851 df_ref def = *def_rec; 3852 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0) 3853 bitmap_clear_bit (live, DF_REF_REGNO (def)); 3854 } 3855 3856 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++) 3857 { 3858 df_ref use = *use_rec; 3859 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0) 3860 bitmap_set_bit (live, DF_REF_REGNO (use)); 3861 } 3862 } 3863 3864 3865 /* Simulate the backwards effects of INSN on the bitmap LIVE. */ 3866 3867 void 3868 df_simulate_one_insn_backwards (basic_block bb, rtx insn, bitmap live) 3869 { 3870 if (!NONDEBUG_INSN_P (insn)) 3871 return; 3872 3873 df_simulate_defs (insn, live); 3874 df_simulate_uses (insn, live); 3875 df_simulate_fixup_sets (bb, live); 3876 } 3877 3878 3879 /* Apply the artificial uses and defs at the top of BB in a backwards 3880 direction. */ 3881 3882 void 3883 df_simulate_finalize_backwards (basic_block bb, bitmap live) 3884 { 3885 df_ref *def_rec; 3886 #ifdef EH_USES 3887 df_ref *use_rec; 3888 #endif 3889 int bb_index = bb->index; 3890 3891 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) 3892 { 3893 df_ref def = *def_rec; 3894 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP) 3895 bitmap_clear_bit (live, DF_REF_REGNO (def)); 3896 } 3897 3898 #ifdef EH_USES 3899 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++) 3900 { 3901 df_ref use = *use_rec; 3902 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP) 3903 bitmap_set_bit (live, DF_REF_REGNO (use)); 3904 } 3905 #endif 3906 } 3907 /*---------------------------------------------------------------------------- 3908 The following three functions are used only for FORWARDS scanning: 3909 i.e. they process the defs and the REG_DEAD and REG_UNUSED notes. 3910 Thus it is important to add the DF_NOTES problem to the stack of 3911 problems computed before using these functions. 3912 3913 df_simulate_initialize_forwards should be called first with a 3914 bitvector copyied from the DF_LIVE_IN or DF_LR_IN. Then 3915 df_simulate_one_insn_forwards should be called for each insn in 3916 the block, starting with the first one. 3917 ----------------------------------------------------------------------------*/ 3918 3919 /* Initialize the LIVE bitmap, which should be copied from DF_LIVE_IN or 3920 DF_LR_IN for basic block BB, for forward scanning by marking artificial 3921 defs live. */ 3922 3923 void 3924 df_simulate_initialize_forwards (basic_block bb, bitmap live) 3925 { 3926 df_ref *def_rec; 3927 int bb_index = bb->index; 3928 3929 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) 3930 { 3931 df_ref def = *def_rec; 3932 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP) 3933 bitmap_set_bit (live, DF_REF_REGNO (def)); 3934 } 3935 } 3936 3937 /* Simulate the forwards effects of INSN on the bitmap LIVE. */ 3938 3939 void 3940 df_simulate_one_insn_forwards (basic_block bb, rtx insn, bitmap live) 3941 { 3942 rtx link; 3943 if (! INSN_P (insn)) 3944 return; 3945 3946 /* Make sure that DF_NOTE really is an active df problem. */ 3947 gcc_assert (df_note); 3948 3949 /* Note that this is the opposite as how the problem is defined, because 3950 in the LR problem defs _kill_ liveness. However, they do so backwards, 3951 while here the scan is performed forwards! So, first assume that the 3952 def is live, and if this is not true REG_UNUSED notes will rectify the 3953 situation. */ 3954 df_simulate_find_noclobber_defs (insn, live); 3955 3956 /* Clear all of the registers that go dead. */ 3957 for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) 3958 { 3959 switch (REG_NOTE_KIND (link)) 3960 { 3961 case REG_DEAD: 3962 case REG_UNUSED: 3963 { 3964 rtx reg = XEXP (link, 0); 3965 int regno = REGNO (reg); 3966 if (regno < FIRST_PSEUDO_REGISTER) 3967 { 3968 int n = hard_regno_nregs[regno][GET_MODE (reg)]; 3969 while (--n >= 0) 3970 bitmap_clear_bit (live, regno + n); 3971 } 3972 else 3973 bitmap_clear_bit (live, regno); 3974 } 3975 break; 3976 default: 3977 break; 3978 } 3979 } 3980 df_simulate_fixup_sets (bb, live); 3981 } 3982 3983 3984 3985 /*---------------------------------------------------------------------------- 3986 MULTIPLE DEFINITIONS 3987 3988 Find the locations in the function reached by multiple definition sites 3989 for a live pseudo. In and out bitvectors are built for each basic 3990 block. They are restricted for efficiency to live registers. 3991 3992 The gen and kill sets for the problem are obvious. Together they 3993 include all defined registers in a basic block; the gen set includes 3994 registers where a partial or conditional or may-clobber definition is 3995 last in the BB, while the kill set includes registers with a complete 3996 definition coming last. However, the computation of the dataflow 3997 itself is interesting. 3998 3999 The idea behind it comes from SSA form's iterated dominance frontier 4000 criterion for inserting PHI functions. Just like in that case, we can use 4001 the dominance frontier to find places where multiple definitions meet; 4002 a register X defined in a basic block BB1 has multiple definitions in 4003 basic blocks in BB1's dominance frontier. 4004 4005 So, the in-set of a basic block BB2 is not just the union of the 4006 out-sets of BB2's predecessors, but includes some more bits that come 4007 from the basic blocks of whose dominance frontier BB2 is part (BB1 in 4008 the previous paragraph). I called this set the init-set of BB2. 4009 4010 (Note: I actually use the kill-set only to build the init-set. 4011 gen bits are anyway propagated from BB1 to BB2 by dataflow). 4012 4013 For example, if you have 4014 4015 BB1 : r10 = 0 4016 r11 = 0 4017 if <...> goto BB2 else goto BB3; 4018 4019 BB2 : r10 = 1 4020 r12 = 1 4021 goto BB3; 4022 4023 BB3 : 4024 4025 you have BB3 in BB2's dominance frontier but not in BB1's, so that the 4026 init-set of BB3 includes r10 and r12, but not r11. Note that we do 4027 not need to iterate the dominance frontier, because we do not insert 4028 anything like PHI functions there! Instead, dataflow will take care of 4029 propagating the information to BB3's successors. 4030 ---------------------------------------------------------------------------*/ 4031 4032 /* Scratch var used by transfer functions. This is used to do md analysis 4033 only for live registers. */ 4034 static bitmap df_md_scratch; 4035 4036 /* Set basic block info. */ 4037 4038 static void 4039 df_md_set_bb_info (unsigned int index, 4040 struct df_md_bb_info *bb_info) 4041 { 4042 gcc_assert (df_md); 4043 gcc_assert (index < df_md->block_info_size); 4044 df_md->block_info[index] = bb_info; 4045 } 4046 4047 4048 static void 4049 df_md_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, 4050 void *vbb_info) 4051 { 4052 struct df_md_bb_info *bb_info = (struct df_md_bb_info *) vbb_info; 4053 if (bb_info) 4054 { 4055 BITMAP_FREE (bb_info->kill); 4056 BITMAP_FREE (bb_info->gen); 4057 BITMAP_FREE (bb_info->init); 4058 BITMAP_FREE (bb_info->in); 4059 BITMAP_FREE (bb_info->out); 4060 pool_free (df_md->block_pool, bb_info); 4061 } 4062 } 4063 4064 4065 /* Allocate or reset bitmaps for DF_MD. The solution bits are 4066 not touched unless the block is new. */ 4067 4068 static void 4069 df_md_alloc (bitmap all_blocks) 4070 { 4071 unsigned int bb_index; 4072 bitmap_iterator bi; 4073 4074 if (!df_md->block_pool) 4075 df_md->block_pool = create_alloc_pool ("df_md_block pool", 4076 sizeof (struct df_md_bb_info), 50); 4077 4078 df_grow_bb_info (df_md); 4079 df_md_scratch = BITMAP_ALLOC (NULL); 4080 4081 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) 4082 { 4083 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index); 4084 if (bb_info) 4085 { 4086 bitmap_clear (bb_info->init); 4087 bitmap_clear (bb_info->gen); 4088 bitmap_clear (bb_info->kill); 4089 bitmap_clear (bb_info->in); 4090 bitmap_clear (bb_info->out); 4091 } 4092 else 4093 { 4094 bb_info = (struct df_md_bb_info *) pool_alloc (df_md->block_pool); 4095 df_md_set_bb_info (bb_index, bb_info); 4096 bb_info->init = BITMAP_ALLOC (NULL); 4097 bb_info->gen = BITMAP_ALLOC (NULL); 4098 bb_info->kill = BITMAP_ALLOC (NULL); 4099 bb_info->in = BITMAP_ALLOC (NULL); 4100 bb_info->out = BITMAP_ALLOC (NULL); 4101 } 4102 } 4103 4104 df_md->optional_p = true; 4105 } 4106 4107 /* Add the effect of the top artificial defs of BB to the multiple definitions 4108 bitmap LOCAL_MD. */ 4109 4110 void 4111 df_md_simulate_artificial_defs_at_top (basic_block bb, bitmap local_md) 4112 { 4113 int bb_index = bb->index; 4114 df_ref *def_rec; 4115 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) 4116 { 4117 df_ref def = *def_rec; 4118 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP) 4119 { 4120 unsigned int dregno = DF_REF_REGNO (def); 4121 if (DF_REF_FLAGS (def) 4122 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER)) 4123 bitmap_set_bit (local_md, dregno); 4124 else 4125 bitmap_clear_bit (local_md, dregno); 4126 } 4127 } 4128 } 4129 4130 4131 /* Add the effect of the defs of INSN to the reaching definitions bitmap 4132 LOCAL_MD. */ 4133 4134 void 4135 df_md_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn, 4136 bitmap local_md) 4137 { 4138 unsigned uid = INSN_UID (insn); 4139 df_ref *def_rec; 4140 4141 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) 4142 { 4143 df_ref def = *def_rec; 4144 unsigned int dregno = DF_REF_REGNO (def); 4145 if ((!(df->changeable_flags & DF_NO_HARD_REGS)) 4146 || (dregno >= FIRST_PSEUDO_REGISTER)) 4147 { 4148 if (DF_REF_FLAGS (def) 4149 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER)) 4150 bitmap_set_bit (local_md, DF_REF_ID (def)); 4151 else 4152 bitmap_clear_bit (local_md, DF_REF_ID (def)); 4153 } 4154 } 4155 } 4156 4157 static void 4158 df_md_bb_local_compute_process_def (struct df_md_bb_info *bb_info, 4159 df_ref *def_rec, 4160 int top_flag) 4161 { 4162 df_ref def; 4163 bitmap_clear (seen_in_insn); 4164 4165 while ((def = *def_rec++) != NULL) 4166 { 4167 unsigned int dregno = DF_REF_REGNO (def); 4168 if (((!(df->changeable_flags & DF_NO_HARD_REGS)) 4169 || (dregno >= FIRST_PSEUDO_REGISTER)) 4170 && top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP)) 4171 { 4172 if (!bitmap_bit_p (seen_in_insn, dregno)) 4173 { 4174 if (DF_REF_FLAGS (def) 4175 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER)) 4176 { 4177 bitmap_set_bit (bb_info->gen, dregno); 4178 bitmap_clear_bit (bb_info->kill, dregno); 4179 } 4180 else 4181 { 4182 /* When we find a clobber and a regular def, 4183 make sure the regular def wins. */ 4184 bitmap_set_bit (seen_in_insn, dregno); 4185 bitmap_set_bit (bb_info->kill, dregno); 4186 bitmap_clear_bit (bb_info->gen, dregno); 4187 } 4188 } 4189 } 4190 } 4191 } 4192 4193 4194 /* Compute local multiple def info for basic block BB. */ 4195 4196 static void 4197 df_md_bb_local_compute (unsigned int bb_index) 4198 { 4199 basic_block bb = BASIC_BLOCK (bb_index); 4200 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index); 4201 rtx insn; 4202 4203 /* Artificials are only hard regs. */ 4204 if (!(df->changeable_flags & DF_NO_HARD_REGS)) 4205 df_md_bb_local_compute_process_def (bb_info, 4206 df_get_artificial_defs (bb_index), 4207 DF_REF_AT_TOP); 4208 4209 FOR_BB_INSNS (bb, insn) 4210 { 4211 unsigned int uid = INSN_UID (insn); 4212 if (!INSN_P (insn)) 4213 continue; 4214 4215 df_md_bb_local_compute_process_def (bb_info, DF_INSN_UID_DEFS (uid), 0); 4216 } 4217 4218 if (!(df->changeable_flags & DF_NO_HARD_REGS)) 4219 df_md_bb_local_compute_process_def (bb_info, 4220 df_get_artificial_defs (bb_index), 4221 0); 4222 } 4223 4224 /* Compute local reaching def info for each basic block within BLOCKS. */ 4225 4226 static void 4227 df_md_local_compute (bitmap all_blocks) 4228 { 4229 unsigned int bb_index, df_bb_index; 4230 bitmap_iterator bi1, bi2; 4231 basic_block bb; 4232 bitmap *frontiers; 4233 4234 seen_in_insn = BITMAP_ALLOC (NULL); 4235 4236 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi1) 4237 { 4238 df_md_bb_local_compute (bb_index); 4239 } 4240 4241 BITMAP_FREE (seen_in_insn); 4242 4243 frontiers = XNEWVEC (bitmap, last_basic_block); 4244 FOR_ALL_BB (bb) 4245 frontiers[bb->index] = BITMAP_ALLOC (NULL); 4246 4247 compute_dominance_frontiers (frontiers); 4248 4249 /* Add each basic block's kills to the nodes in the frontier of the BB. */ 4250 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi1) 4251 { 4252 bitmap kill = df_md_get_bb_info (bb_index)->kill; 4253 EXECUTE_IF_SET_IN_BITMAP (frontiers[bb_index], 0, df_bb_index, bi2) 4254 { 4255 basic_block bb = BASIC_BLOCK (df_bb_index); 4256 if (bitmap_bit_p (all_blocks, df_bb_index)) 4257 bitmap_ior_and_into (df_md_get_bb_info (df_bb_index)->init, kill, 4258 df_get_live_in (bb)); 4259 } 4260 } 4261 4262 FOR_ALL_BB (bb) 4263 BITMAP_FREE (frontiers[bb->index]); 4264 free (frontiers); 4265 } 4266 4267 4268 /* Reset the global solution for recalculation. */ 4269 4270 static void 4271 df_md_reset (bitmap all_blocks) 4272 { 4273 unsigned int bb_index; 4274 bitmap_iterator bi; 4275 4276 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) 4277 { 4278 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index); 4279 gcc_assert (bb_info); 4280 bitmap_clear (bb_info->in); 4281 bitmap_clear (bb_info->out); 4282 } 4283 } 4284 4285 static bool 4286 df_md_transfer_function (int bb_index) 4287 { 4288 basic_block bb = BASIC_BLOCK (bb_index); 4289 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index); 4290 bitmap in = bb_info->in; 4291 bitmap out = bb_info->out; 4292 bitmap gen = bb_info->gen; 4293 bitmap kill = bb_info->kill; 4294 4295 /* We need to use a scratch set here so that the value returned from this 4296 function invocation properly reflects whether the sets changed in a 4297 significant way; i.e. not just because the live set was anded in. */ 4298 bitmap_and (df_md_scratch, gen, df_get_live_out (bb)); 4299 4300 /* Multiple definitions of a register are not relevant if it is not 4301 live. Thus we trim the result to the places where it is live. */ 4302 bitmap_and_into (in, df_get_live_in (bb)); 4303 4304 return bitmap_ior_and_compl (out, df_md_scratch, in, kill); 4305 } 4306 4307 /* Initialize the solution bit vectors for problem. */ 4308 4309 static void 4310 df_md_init (bitmap all_blocks) 4311 { 4312 unsigned int bb_index; 4313 bitmap_iterator bi; 4314 4315 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) 4316 { 4317 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index); 4318 4319 bitmap_copy (bb_info->in, bb_info->init); 4320 df_md_transfer_function (bb_index); 4321 } 4322 } 4323 4324 static void 4325 df_md_confluence_0 (basic_block bb) 4326 { 4327 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index); 4328 bitmap_copy (bb_info->in, bb_info->init); 4329 } 4330 4331 /* In of target gets or of out of source. */ 4332 4333 static void 4334 df_md_confluence_n (edge e) 4335 { 4336 bitmap op1 = df_md_get_bb_info (e->dest->index)->in; 4337 bitmap op2 = df_md_get_bb_info (e->src->index)->out; 4338 4339 if (e->flags & EDGE_FAKE) 4340 return; 4341 4342 if (e->flags & EDGE_EH) 4343 bitmap_ior_and_compl_into (op1, op2, regs_invalidated_by_call_regset); 4344 else 4345 bitmap_ior_into (op1, op2); 4346 } 4347 4348 /* Free all storage associated with the problem. */ 4349 4350 static void 4351 df_md_free (void) 4352 { 4353 unsigned int i; 4354 for (i = 0; i < df_md->block_info_size; i++) 4355 { 4356 struct df_md_bb_info *bb_info = df_md_get_bb_info (i); 4357 if (bb_info) 4358 { 4359 BITMAP_FREE (bb_info->kill); 4360 BITMAP_FREE (bb_info->gen); 4361 BITMAP_FREE (bb_info->init); 4362 BITMAP_FREE (bb_info->in); 4363 BITMAP_FREE (bb_info->out); 4364 } 4365 } 4366 4367 BITMAP_FREE (df_md_scratch); 4368 free_alloc_pool (df_md->block_pool); 4369 4370 df_md->block_info_size = 0; 4371 free (df_md->block_info); 4372 free (df_md); 4373 } 4374 4375 4376 /* Debugging info at top of bb. */ 4377 4378 static void 4379 df_md_top_dump (basic_block bb, FILE *file) 4380 { 4381 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index); 4382 if (!bb_info || !bb_info->in) 4383 return; 4384 4385 fprintf (file, ";; md in \t"); 4386 df_print_regset (file, bb_info->in); 4387 fprintf (file, ";; md init \t"); 4388 df_print_regset (file, bb_info->init); 4389 fprintf (file, ";; md gen \t"); 4390 df_print_regset (file, bb_info->gen); 4391 fprintf (file, ";; md kill \t"); 4392 df_print_regset (file, bb_info->kill); 4393 } 4394 4395 /* Debugging info at bottom of bb. */ 4396 4397 static void 4398 df_md_bottom_dump (basic_block bb, FILE *file) 4399 { 4400 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index); 4401 if (!bb_info || !bb_info->out) 4402 return; 4403 4404 fprintf (file, ";; md out \t"); 4405 df_print_regset (file, bb_info->out); 4406 } 4407 4408 static struct df_problem problem_MD = 4409 { 4410 DF_MD, /* Problem id. */ 4411 DF_FORWARD, /* Direction. */ 4412 df_md_alloc, /* Allocate the problem specific data. */ 4413 df_md_reset, /* Reset global information. */ 4414 df_md_free_bb_info, /* Free basic block info. */ 4415 df_md_local_compute, /* Local compute function. */ 4416 df_md_init, /* Init the solution specific data. */ 4417 df_worklist_dataflow, /* Worklist solver. */ 4418 df_md_confluence_0, /* Confluence operator 0. */ 4419 df_md_confluence_n, /* Confluence operator n. */ 4420 df_md_transfer_function, /* Transfer function. */ 4421 NULL, /* Finalize function. */ 4422 df_md_free, /* Free all of the problem information. */ 4423 df_md_free, /* Remove this problem from the stack of dataflow problems. */ 4424 NULL, /* Debugging. */ 4425 df_md_top_dump, /* Debugging start block. */ 4426 df_md_bottom_dump, /* Debugging end block. */ 4427 NULL, /* Incremental solution verify start. */ 4428 NULL, /* Incremental solution verify end. */ 4429 NULL, /* Dependent problem. */ 4430 TV_DF_MD, /* Timing variable. */ 4431 false /* Reset blocks on dropping out of blocks_to_analyze. */ 4432 }; 4433 4434 /* Create a new MD instance and add it to the existing instance 4435 of DF. */ 4436 4437 void 4438 df_md_add_problem (void) 4439 { 4440 df_add_problem (&problem_MD); 4441 } 4442 4443 4444 4445