1 /* Scanning of rtl for dataflow analysis. 2 Copyright (C) 1999-2013 Free Software Foundation, Inc. 3 Originally contributed by Michael P. Hayes 4 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com) 5 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org) 6 and Kenneth Zadeck (zadeck@naturalbridge.com). 7 8 This file is part of GCC. 9 10 GCC is free software; you can redistribute it and/or modify it under 11 the terms of the GNU General Public License as published by the Free 12 Software Foundation; either version 3, or (at your option) any later 13 version. 14 15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 16 WARRANTY; without even the implied warranty of MERCHANTABILITY or 17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 18 for more details. 19 20 You should have received a copy of the GNU General Public License 21 along with GCC; see the file COPYING3. If not see 22 <http://www.gnu.org/licenses/>. */ 23 24 #include "config.h" 25 #include "system.h" 26 #include "coretypes.h" 27 #include "tm.h" 28 #include "rtl.h" 29 #include "tm_p.h" 30 #include "insn-config.h" 31 #include "recog.h" 32 #include "function.h" 33 #include "regs.h" 34 #include "alloc-pool.h" 35 #include "flags.h" 36 #include "hard-reg-set.h" 37 #include "basic-block.h" 38 #include "sbitmap.h" 39 #include "bitmap.h" 40 #include "dumpfile.h" 41 #include "tree.h" 42 #include "target.h" 43 #include "target-def.h" 44 #include "df.h" 45 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */ 46 47 48 typedef struct df_mw_hardreg *df_mw_hardreg_ptr; 49 50 51 #ifndef HAVE_epilogue 52 #define HAVE_epilogue 0 53 #endif 54 #ifndef HAVE_prologue 55 #define HAVE_prologue 0 56 #endif 57 #ifndef HAVE_sibcall_epilogue 58 #define HAVE_sibcall_epilogue 0 59 #endif 60 61 #ifndef EPILOGUE_USES 62 #define EPILOGUE_USES(REGNO) 0 63 #endif 64 65 /* The following two macros free the vecs that hold either the refs or 66 the mw refs. They are a little tricky because the vec has 0 67 elements is special and is not to be freed. */ 68 #define df_scan_free_ref_vec(V) \ 69 do { \ 70 if (V && *V) \ 71 free (V); \ 72 } while (0) 73 74 #define df_scan_free_mws_vec(V) \ 75 do { \ 76 if (V && *V) \ 77 free (V); \ 78 } while (0) 79 80 /* The set of hard registers in eliminables[i].from. */ 81 82 static HARD_REG_SET elim_reg_set; 83 84 /* Initialize ur_in and ur_out as if all hard registers were partially 85 available. */ 86 87 struct df_collection_rec 88 { 89 vec<df_ref, va_stack> def_vec; 90 vec<df_ref, va_stack> use_vec; 91 vec<df_ref, va_stack> eq_use_vec; 92 vec<df_mw_hardreg_ptr, va_stack> mw_vec; 93 }; 94 95 static df_ref df_null_ref_rec[1]; 96 static struct df_mw_hardreg * df_null_mw_rec[1]; 97 98 static void df_ref_record (enum df_ref_class, struct df_collection_rec *, 99 rtx, rtx *, 100 basic_block, struct df_insn_info *, 101 enum df_ref_type, int ref_flags); 102 static void df_def_record_1 (struct df_collection_rec *, rtx *, 103 basic_block, struct df_insn_info *, 104 int ref_flags); 105 static void df_defs_record (struct df_collection_rec *, rtx, 106 basic_block, struct df_insn_info *, 107 int ref_flags); 108 static void df_uses_record (struct df_collection_rec *, 109 rtx *, enum df_ref_type, 110 basic_block, struct df_insn_info *, 111 int ref_flags); 112 113 static void df_install_ref_incremental (df_ref); 114 static df_ref df_ref_create_structure (enum df_ref_class, 115 struct df_collection_rec *, rtx, rtx *, 116 basic_block, struct df_insn_info *, 117 enum df_ref_type, int ref_flags); 118 static void df_insn_refs_collect (struct df_collection_rec*, 119 basic_block, struct df_insn_info *); 120 static void df_canonize_collection_rec (struct df_collection_rec *); 121 122 static void df_get_regular_block_artificial_uses (bitmap); 123 static void df_get_eh_block_artificial_uses (bitmap); 124 125 static void df_record_entry_block_defs (bitmap); 126 static void df_record_exit_block_uses (bitmap); 127 static void df_get_exit_block_use_set (bitmap); 128 static void df_get_entry_block_def_set (bitmap); 129 static void df_grow_ref_info (struct df_ref_info *, unsigned int); 130 static void df_ref_chain_delete_du_chain (df_ref *); 131 static void df_ref_chain_delete (df_ref *); 132 133 static void df_refs_add_to_chains (struct df_collection_rec *, 134 basic_block, rtx); 135 136 static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool); 137 static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap); 138 static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap); 139 static void df_install_ref (df_ref, struct df_reg_info *, 140 struct df_ref_info *, bool); 141 142 static int df_ref_compare (const void *, const void *); 143 static int df_mw_compare (const void *, const void *); 144 145 /* Indexed by hardware reg number, is true if that register is ever 146 used in the current function. 147 148 In df-scan.c, this is set up to record the hard regs used 149 explicitly. Reload adds in the hard regs used for holding pseudo 150 regs. Final uses it to generate the code in the function prologue 151 and epilogue to save and restore registers as needed. */ 152 153 static bool regs_ever_live[FIRST_PSEUDO_REGISTER]; 154 155 /*---------------------------------------------------------------------------- 156 SCANNING DATAFLOW PROBLEM 157 158 There are several ways in which scanning looks just like the other 159 dataflow problems. It shares the all the mechanisms for local info 160 as well as basic block info. Where it differs is when and how often 161 it gets run. It also has no need for the iterative solver. 162 ----------------------------------------------------------------------------*/ 163 164 /* Problem data for the scanning dataflow function. */ 165 struct df_scan_problem_data 166 { 167 alloc_pool ref_base_pool; 168 alloc_pool ref_artificial_pool; 169 alloc_pool ref_regular_pool; 170 alloc_pool insn_pool; 171 alloc_pool reg_pool; 172 alloc_pool mw_reg_pool; 173 bitmap_obstack reg_bitmaps; 174 bitmap_obstack insn_bitmaps; 175 }; 176 177 typedef struct df_scan_bb_info *df_scan_bb_info_t; 178 179 180 /* Internal function to shut down the scanning problem. */ 181 static void 182 df_scan_free_internal (void) 183 { 184 struct df_scan_problem_data *problem_data 185 = (struct df_scan_problem_data *) df_scan->problem_data; 186 unsigned int i; 187 basic_block bb; 188 189 /* The vectors that hold the refs are not pool allocated because 190 they come in many sizes. This makes them impossible to delete 191 all at once. */ 192 for (i = 0; i < DF_INSN_SIZE(); i++) 193 { 194 struct df_insn_info *insn_info = DF_INSN_UID_GET(i); 195 /* Skip the insns that have no insn_info or have been 196 deleted. */ 197 if (insn_info) 198 { 199 df_scan_free_ref_vec (insn_info->defs); 200 df_scan_free_ref_vec (insn_info->uses); 201 df_scan_free_ref_vec (insn_info->eq_uses); 202 df_scan_free_mws_vec (insn_info->mw_hardregs); 203 } 204 } 205 206 FOR_ALL_BB (bb) 207 { 208 unsigned int bb_index = bb->index; 209 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index); 210 if (bb_info) 211 { 212 df_scan_free_ref_vec (bb_info->artificial_defs); 213 df_scan_free_ref_vec (bb_info->artificial_uses); 214 } 215 } 216 217 free (df->def_info.refs); 218 free (df->def_info.begin); 219 free (df->def_info.count); 220 memset (&df->def_info, 0, (sizeof (struct df_ref_info))); 221 222 free (df->use_info.refs); 223 free (df->use_info.begin); 224 free (df->use_info.count); 225 memset (&df->use_info, 0, (sizeof (struct df_ref_info))); 226 227 free (df->def_regs); 228 df->def_regs = NULL; 229 free (df->use_regs); 230 df->use_regs = NULL; 231 free (df->eq_use_regs); 232 df->eq_use_regs = NULL; 233 df->regs_size = 0; 234 DF_REG_SIZE(df) = 0; 235 236 free (df->insns); 237 df->insns = NULL; 238 DF_INSN_SIZE () = 0; 239 240 free (df_scan->block_info); 241 df_scan->block_info = NULL; 242 df_scan->block_info_size = 0; 243 244 bitmap_clear (&df->hardware_regs_used); 245 bitmap_clear (&df->regular_block_artificial_uses); 246 bitmap_clear (&df->eh_block_artificial_uses); 247 BITMAP_FREE (df->entry_block_defs); 248 BITMAP_FREE (df->exit_block_uses); 249 bitmap_clear (&df->insns_to_delete); 250 bitmap_clear (&df->insns_to_rescan); 251 bitmap_clear (&df->insns_to_notes_rescan); 252 253 free_alloc_pool (problem_data->ref_base_pool); 254 free_alloc_pool (problem_data->ref_artificial_pool); 255 free_alloc_pool (problem_data->ref_regular_pool); 256 free_alloc_pool (problem_data->insn_pool); 257 free_alloc_pool (problem_data->reg_pool); 258 free_alloc_pool (problem_data->mw_reg_pool); 259 bitmap_obstack_release (&problem_data->reg_bitmaps); 260 bitmap_obstack_release (&problem_data->insn_bitmaps); 261 free (df_scan->problem_data); 262 } 263 264 265 /* Free basic block info. */ 266 267 static void 268 df_scan_free_bb_info (basic_block bb, void *vbb_info) 269 { 270 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info; 271 unsigned int bb_index = bb->index; 272 273 /* See if bb_info is initialized. */ 274 if (bb_info->artificial_defs) 275 { 276 rtx insn; 277 FOR_BB_INSNS (bb, insn) 278 { 279 if (INSN_P (insn)) 280 /* Record defs within INSN. */ 281 df_insn_delete (bb, INSN_UID (insn)); 282 } 283 284 if (bb_index < df_scan->block_info_size) 285 bb_info = df_scan_get_bb_info (bb_index); 286 287 /* Get rid of any artificial uses or defs. */ 288 if (bb_info->artificial_defs) 289 { 290 df_ref_chain_delete_du_chain (bb_info->artificial_defs); 291 df_ref_chain_delete_du_chain (bb_info->artificial_uses); 292 df_ref_chain_delete (bb_info->artificial_defs); 293 df_ref_chain_delete (bb_info->artificial_uses); 294 bb_info->artificial_defs = NULL; 295 bb_info->artificial_uses = NULL; 296 } 297 } 298 } 299 300 301 /* Allocate the problem data for the scanning problem. This should be 302 called when the problem is created or when the entire function is to 303 be rescanned. */ 304 void 305 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) 306 { 307 struct df_scan_problem_data *problem_data; 308 unsigned int insn_num = get_max_uid () + 1; 309 unsigned int block_size = 512; 310 basic_block bb; 311 312 /* Given the number of pools, this is really faster than tearing 313 everything apart. */ 314 if (df_scan->problem_data) 315 df_scan_free_internal (); 316 317 problem_data = XNEW (struct df_scan_problem_data); 318 df_scan->problem_data = problem_data; 319 df_scan->computed = true; 320 321 problem_data->ref_base_pool 322 = create_alloc_pool ("df_scan ref base", 323 sizeof (struct df_base_ref), block_size); 324 problem_data->ref_artificial_pool 325 = create_alloc_pool ("df_scan ref artificial", 326 sizeof (struct df_artificial_ref), block_size); 327 problem_data->ref_regular_pool 328 = create_alloc_pool ("df_scan ref regular", 329 sizeof (struct df_regular_ref), block_size); 330 problem_data->insn_pool 331 = create_alloc_pool ("df_scan insn", 332 sizeof (struct df_insn_info), block_size); 333 problem_data->reg_pool 334 = create_alloc_pool ("df_scan reg", 335 sizeof (struct df_reg_info), block_size); 336 problem_data->mw_reg_pool 337 = create_alloc_pool ("df_scan mw_reg", 338 sizeof (struct df_mw_hardreg), block_size / 16); 339 340 bitmap_obstack_initialize (&problem_data->reg_bitmaps); 341 bitmap_obstack_initialize (&problem_data->insn_bitmaps); 342 343 insn_num += insn_num / 4; 344 df_grow_reg_info (); 345 346 df_grow_insn_info (); 347 df_grow_bb_info (df_scan); 348 349 FOR_ALL_BB (bb) 350 { 351 unsigned int bb_index = bb->index; 352 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index); 353 bb_info->artificial_defs = NULL; 354 bb_info->artificial_uses = NULL; 355 } 356 357 bitmap_initialize (&df->hardware_regs_used, &problem_data->reg_bitmaps); 358 bitmap_initialize (&df->regular_block_artificial_uses, &problem_data->reg_bitmaps); 359 bitmap_initialize (&df->eh_block_artificial_uses, &problem_data->reg_bitmaps); 360 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps); 361 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps); 362 bitmap_initialize (&df->insns_to_delete, &problem_data->insn_bitmaps); 363 bitmap_initialize (&df->insns_to_rescan, &problem_data->insn_bitmaps); 364 bitmap_initialize (&df->insns_to_notes_rescan, &problem_data->insn_bitmaps); 365 df_scan->optional_p = false; 366 } 367 368 369 /* Free all of the data associated with the scan problem. */ 370 371 static void 372 df_scan_free (void) 373 { 374 if (df_scan->problem_data) 375 df_scan_free_internal (); 376 377 if (df->blocks_to_analyze) 378 { 379 BITMAP_FREE (df->blocks_to_analyze); 380 df->blocks_to_analyze = NULL; 381 } 382 383 free (df_scan); 384 } 385 386 /* Dump the preamble for DF_SCAN dump. */ 387 static void 388 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED) 389 { 390 int i; 391 int dcount = 0; 392 int ucount = 0; 393 int ecount = 0; 394 int icount = 0; 395 int ccount = 0; 396 basic_block bb; 397 rtx insn; 398 399 fprintf (file, ";; invalidated by call \t"); 400 df_print_regset (file, regs_invalidated_by_call_regset); 401 fprintf (file, ";; hardware regs used \t"); 402 df_print_regset (file, &df->hardware_regs_used); 403 fprintf (file, ";; regular block artificial uses \t"); 404 df_print_regset (file, &df->regular_block_artificial_uses); 405 fprintf (file, ";; eh block artificial uses \t"); 406 df_print_regset (file, &df->eh_block_artificial_uses); 407 fprintf (file, ";; entry block defs \t"); 408 df_print_regset (file, df->entry_block_defs); 409 fprintf (file, ";; exit block uses \t"); 410 df_print_regset (file, df->exit_block_uses); 411 fprintf (file, ";; regs ever live \t"); 412 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 413 if (df_regs_ever_live_p (i)) 414 fprintf (file, " %d[%s]", i, reg_names[i]); 415 fprintf (file, "\n;; ref usage \t"); 416 417 for (i = 0; i < (int)df->regs_inited; i++) 418 if (DF_REG_DEF_COUNT (i) || DF_REG_USE_COUNT (i) || DF_REG_EQ_USE_COUNT (i)) 419 { 420 const char * sep = ""; 421 422 fprintf (file, "r%d={", i); 423 if (DF_REG_DEF_COUNT (i)) 424 { 425 fprintf (file, "%dd", DF_REG_DEF_COUNT (i)); 426 sep = ","; 427 dcount += DF_REG_DEF_COUNT (i); 428 } 429 if (DF_REG_USE_COUNT (i)) 430 { 431 fprintf (file, "%s%du", sep, DF_REG_USE_COUNT (i)); 432 sep = ","; 433 ucount += DF_REG_USE_COUNT (i); 434 } 435 if (DF_REG_EQ_USE_COUNT (i)) 436 { 437 fprintf (file, "%s%de", sep, DF_REG_EQ_USE_COUNT (i)); 438 ecount += DF_REG_EQ_USE_COUNT (i); 439 } 440 fprintf (file, "} "); 441 } 442 443 FOR_EACH_BB (bb) 444 FOR_BB_INSNS (bb, insn) 445 if (INSN_P (insn)) 446 { 447 if (CALL_P (insn)) 448 ccount++; 449 else 450 icount++; 451 } 452 453 fprintf (file, "\n;; total ref usage %d{%dd,%du,%de}" 454 " in %d{%d regular + %d call} insns.\n", 455 dcount + ucount + ecount, dcount, ucount, ecount, 456 icount + ccount, icount, ccount); 457 } 458 459 /* Dump the bb_info for a given basic block. */ 460 static void 461 df_scan_start_block (basic_block bb, FILE *file) 462 { 463 struct df_scan_bb_info *bb_info 464 = df_scan_get_bb_info (bb->index); 465 466 if (bb_info) 467 { 468 fprintf (file, ";; bb %d artificial_defs: ", bb->index); 469 df_refs_chain_dump (bb_info->artificial_defs, true, file); 470 fprintf (file, "\n;; bb %d artificial_uses: ", bb->index); 471 df_refs_chain_dump (bb_info->artificial_uses, true, file); 472 fprintf (file, "\n"); 473 } 474 #if 0 475 { 476 rtx insn; 477 FOR_BB_INSNS (bb, insn) 478 if (INSN_P (insn)) 479 df_insn_debug (insn, false, file); 480 } 481 #endif 482 } 483 484 static struct df_problem problem_SCAN = 485 { 486 DF_SCAN, /* Problem id. */ 487 DF_NONE, /* Direction. */ 488 df_scan_alloc, /* Allocate the problem specific data. */ 489 NULL, /* Reset global information. */ 490 df_scan_free_bb_info, /* Free basic block info. */ 491 NULL, /* Local compute function. */ 492 NULL, /* Init the solution specific data. */ 493 NULL, /* Iterative solver. */ 494 NULL, /* Confluence operator 0. */ 495 NULL, /* Confluence operator n. */ 496 NULL, /* Transfer function. */ 497 NULL, /* Finalize function. */ 498 df_scan_free, /* Free all of the problem information. */ 499 NULL, /* Remove this problem from the stack of dataflow problems. */ 500 df_scan_start_dump, /* Debugging. */ 501 df_scan_start_block, /* Debugging start block. */ 502 NULL, /* Debugging end block. */ 503 NULL, /* Debugging start insn. */ 504 NULL, /* Debugging end insn. */ 505 NULL, /* Incremental solution verify start. */ 506 NULL, /* Incremental solution verify end. */ 507 NULL, /* Dependent problem. */ 508 sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */ 509 TV_DF_SCAN, /* Timing variable. */ 510 false /* Reset blocks on dropping out of blocks_to_analyze. */ 511 }; 512 513 514 /* Create a new DATAFLOW instance and add it to an existing instance 515 of DF. The returned structure is what is used to get at the 516 solution. */ 517 518 void 519 df_scan_add_problem (void) 520 { 521 df_add_problem (&problem_SCAN); 522 } 523 524 525 /*---------------------------------------------------------------------------- 526 Storage Allocation Utilities 527 ----------------------------------------------------------------------------*/ 528 529 530 /* First, grow the reg_info information. If the current size is less than 531 the number of pseudos, grow to 25% more than the number of 532 pseudos. 533 534 Second, assure that all of the slots up to max_reg_num have been 535 filled with reg_info structures. */ 536 537 void 538 df_grow_reg_info (void) 539 { 540 unsigned int max_reg = max_reg_num (); 541 unsigned int new_size = max_reg; 542 struct df_scan_problem_data *problem_data 543 = (struct df_scan_problem_data *) df_scan->problem_data; 544 unsigned int i; 545 546 if (df->regs_size < new_size) 547 { 548 new_size += new_size / 4; 549 df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size); 550 df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size); 551 df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs, 552 new_size); 553 df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size); 554 df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size); 555 df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size); 556 df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size); 557 df->regs_size = new_size; 558 } 559 560 for (i = df->regs_inited; i < max_reg; i++) 561 { 562 struct df_reg_info *reg_info; 563 564 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool); 565 memset (reg_info, 0, sizeof (struct df_reg_info)); 566 df->def_regs[i] = reg_info; 567 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool); 568 memset (reg_info, 0, sizeof (struct df_reg_info)); 569 df->use_regs[i] = reg_info; 570 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool); 571 memset (reg_info, 0, sizeof (struct df_reg_info)); 572 df->eq_use_regs[i] = reg_info; 573 df->def_info.begin[i] = 0; 574 df->def_info.count[i] = 0; 575 df->use_info.begin[i] = 0; 576 df->use_info.count[i] = 0; 577 } 578 579 df->regs_inited = max_reg; 580 } 581 582 583 /* Grow the ref information. */ 584 585 static void 586 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size) 587 { 588 if (ref_info->refs_size < new_size) 589 { 590 ref_info->refs = XRESIZEVEC (df_ref, ref_info->refs, new_size); 591 memset (ref_info->refs + ref_info->refs_size, 0, 592 (new_size - ref_info->refs_size) *sizeof (df_ref)); 593 ref_info->refs_size = new_size; 594 } 595 } 596 597 598 /* Check and grow the ref information if necessary. This routine 599 guarantees total_size + BITMAP_ADDEND amount of entries in refs 600 array. It updates ref_info->refs_size only and does not change 601 ref_info->total_size. */ 602 603 static void 604 df_check_and_grow_ref_info (struct df_ref_info *ref_info, 605 unsigned bitmap_addend) 606 { 607 if (ref_info->refs_size < ref_info->total_size + bitmap_addend) 608 { 609 int new_size = ref_info->total_size + bitmap_addend; 610 new_size += ref_info->total_size / 4; 611 df_grow_ref_info (ref_info, new_size); 612 } 613 } 614 615 616 /* Grow the ref information. If the current size is less than the 617 number of instructions, grow to 25% more than the number of 618 instructions. */ 619 620 void 621 df_grow_insn_info (void) 622 { 623 unsigned int new_size = get_max_uid () + 1; 624 if (DF_INSN_SIZE () < new_size) 625 { 626 new_size += new_size / 4; 627 df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size); 628 memset (df->insns + df->insns_size, 0, 629 (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *)); 630 DF_INSN_SIZE () = new_size; 631 } 632 } 633 634 635 636 637 /*---------------------------------------------------------------------------- 638 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING. 639 ----------------------------------------------------------------------------*/ 640 641 /* Rescan all of the block_to_analyze or all of the blocks in the 642 function if df_set_blocks if blocks_to_analyze is NULL; */ 643 644 void 645 df_scan_blocks (void) 646 { 647 basic_block bb; 648 649 df->def_info.ref_order = DF_REF_ORDER_NO_TABLE; 650 df->use_info.ref_order = DF_REF_ORDER_NO_TABLE; 651 652 df_get_regular_block_artificial_uses (&df->regular_block_artificial_uses); 653 df_get_eh_block_artificial_uses (&df->eh_block_artificial_uses); 654 655 bitmap_ior_into (&df->eh_block_artificial_uses, 656 &df->regular_block_artificial_uses); 657 658 /* ENTRY and EXIT blocks have special defs/uses. */ 659 df_get_entry_block_def_set (df->entry_block_defs); 660 df_record_entry_block_defs (df->entry_block_defs); 661 df_get_exit_block_use_set (df->exit_block_uses); 662 df_record_exit_block_uses (df->exit_block_uses); 663 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK)); 664 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK)); 665 666 /* Regular blocks */ 667 FOR_EACH_BB (bb) 668 { 669 unsigned int bb_index = bb->index; 670 df_bb_refs_record (bb_index, true); 671 } 672 } 673 674 /* Create new refs under address LOC within INSN. This function is 675 only used externally. REF_FLAGS must be either 0 or DF_REF_IN_NOTE, 676 depending on whether LOC is inside PATTERN (INSN) or a note. */ 677 678 void 679 df_uses_create (rtx *loc, rtx insn, int ref_flags) 680 { 681 gcc_assert (!(ref_flags & ~DF_REF_IN_NOTE)); 682 df_uses_record (NULL, loc, DF_REF_REG_USE, 683 BLOCK_FOR_INSN (insn), 684 DF_INSN_INFO_GET (insn), 685 ref_flags); 686 } 687 688 /* Create a new ref of type DF_REF_TYPE for register REG at address 689 LOC within INSN of BB. This function is only used externally. */ 690 691 df_ref 692 df_ref_create (rtx reg, rtx *loc, rtx insn, 693 basic_block bb, 694 enum df_ref_type ref_type, 695 int ref_flags) 696 { 697 enum df_ref_class cl; 698 699 df_grow_reg_info (); 700 701 /* You cannot hack artificial refs. */ 702 gcc_assert (insn); 703 704 if (loc) 705 cl = DF_REF_REGULAR; 706 else 707 cl = DF_REF_BASE; 708 709 return df_ref_create_structure (cl, NULL, reg, loc, bb, 710 DF_INSN_INFO_GET (insn), 711 ref_type, ref_flags); 712 } 713 714 static void 715 df_install_ref_incremental (df_ref ref) 716 { 717 struct df_reg_info **reg_info; 718 struct df_ref_info *ref_info; 719 df_ref *ref_rec; 720 df_ref **ref_rec_ptr; 721 unsigned int count = 0; 722 bool add_to_table; 723 724 rtx insn = DF_REF_INSN (ref); 725 basic_block bb = BLOCK_FOR_INSN (insn); 726 727 if (DF_REF_REG_DEF_P (ref)) 728 { 729 reg_info = df->def_regs; 730 ref_info = &df->def_info; 731 ref_rec_ptr = &DF_INSN_DEFS (insn); 732 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE; 733 } 734 else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) 735 { 736 reg_info = df->eq_use_regs; 737 ref_info = &df->use_info; 738 ref_rec_ptr = &DF_INSN_EQ_USES (insn); 739 switch (ref_info->ref_order) 740 { 741 case DF_REF_ORDER_UNORDERED_WITH_NOTES: 742 case DF_REF_ORDER_BY_REG_WITH_NOTES: 743 case DF_REF_ORDER_BY_INSN_WITH_NOTES: 744 add_to_table = true; 745 break; 746 default: 747 add_to_table = false; 748 break; 749 } 750 } 751 else 752 { 753 reg_info = df->use_regs; 754 ref_info = &df->use_info; 755 ref_rec_ptr = &DF_INSN_USES (insn); 756 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE; 757 } 758 759 /* Do not add if ref is not in the right blocks. */ 760 if (add_to_table && df->analyze_subset) 761 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index); 762 763 df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table); 764 765 if (add_to_table) 766 switch (ref_info->ref_order) 767 { 768 case DF_REF_ORDER_UNORDERED_WITH_NOTES: 769 case DF_REF_ORDER_BY_REG_WITH_NOTES: 770 case DF_REF_ORDER_BY_INSN_WITH_NOTES: 771 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES; 772 break; 773 default: 774 ref_info->ref_order = DF_REF_ORDER_UNORDERED; 775 break; 776 } 777 778 ref_rec = *ref_rec_ptr; 779 while (*ref_rec) 780 { 781 count++; 782 ref_rec++; 783 } 784 785 ref_rec = *ref_rec_ptr; 786 if (count) 787 { 788 ref_rec = XRESIZEVEC (df_ref, ref_rec, count+2); 789 *ref_rec_ptr = ref_rec; 790 ref_rec[count] = ref; 791 ref_rec[count+1] = NULL; 792 qsort (ref_rec, count + 1, sizeof (df_ref), df_ref_compare); 793 } 794 else 795 { 796 df_ref *ref_rec = XNEWVEC (df_ref, 2); 797 ref_rec[0] = ref; 798 ref_rec[1] = NULL; 799 *ref_rec_ptr = ref_rec; 800 } 801 802 #if 0 803 if (dump_file) 804 { 805 fprintf (dump_file, "adding ref "); 806 df_ref_debug (ref, dump_file); 807 } 808 #endif 809 /* By adding the ref directly, df_insn_rescan my not find any 810 differences even though the block will have changed. So we need 811 to mark the block dirty ourselves. */ 812 if (!DEBUG_INSN_P (DF_REF_INSN (ref))) 813 df_set_bb_dirty (bb); 814 } 815 816 817 818 /*---------------------------------------------------------------------------- 819 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS. 820 ----------------------------------------------------------------------------*/ 821 822 static void 823 df_free_ref (df_ref ref) 824 { 825 struct df_scan_problem_data *problem_data 826 = (struct df_scan_problem_data *) df_scan->problem_data; 827 828 switch (DF_REF_CLASS (ref)) 829 { 830 case DF_REF_BASE: 831 pool_free (problem_data->ref_base_pool, ref); 832 break; 833 834 case DF_REF_ARTIFICIAL: 835 pool_free (problem_data->ref_artificial_pool, ref); 836 break; 837 838 case DF_REF_REGULAR: 839 pool_free (problem_data->ref_regular_pool, ref); 840 break; 841 } 842 } 843 844 845 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain. 846 Also delete the def-use or use-def chain if it exists. */ 847 848 static void 849 df_reg_chain_unlink (df_ref ref) 850 { 851 df_ref next = DF_REF_NEXT_REG (ref); 852 df_ref prev = DF_REF_PREV_REG (ref); 853 int id = DF_REF_ID (ref); 854 struct df_reg_info *reg_info; 855 df_ref *refs = NULL; 856 857 if (DF_REF_REG_DEF_P (ref)) 858 { 859 int regno = DF_REF_REGNO (ref); 860 reg_info = DF_REG_DEF_GET (regno); 861 refs = df->def_info.refs; 862 } 863 else 864 { 865 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) 866 { 867 reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref)); 868 switch (df->use_info.ref_order) 869 { 870 case DF_REF_ORDER_UNORDERED_WITH_NOTES: 871 case DF_REF_ORDER_BY_REG_WITH_NOTES: 872 case DF_REF_ORDER_BY_INSN_WITH_NOTES: 873 refs = df->use_info.refs; 874 break; 875 default: 876 break; 877 } 878 } 879 else 880 { 881 reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref)); 882 refs = df->use_info.refs; 883 } 884 } 885 886 if (refs) 887 { 888 if (df->analyze_subset) 889 { 890 if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (ref))) 891 refs[id] = NULL; 892 } 893 else 894 refs[id] = NULL; 895 } 896 897 /* Delete any def-use or use-def chains that start here. It is 898 possible that there is trash in this field. This happens for 899 insns that have been deleted when rescanning has been deferred 900 and the chain problem has also been deleted. The chain tear down 901 code skips deleted insns. */ 902 if (df_chain && DF_REF_CHAIN (ref)) 903 df_chain_unlink (ref); 904 905 reg_info->n_refs--; 906 if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE)) 907 { 908 gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER); 909 df->hard_regs_live_count[DF_REF_REGNO (ref)]--; 910 } 911 912 /* Unlink from the reg chain. If there is no prev, this is the 913 first of the list. If not, just join the next and prev. */ 914 if (prev) 915 DF_REF_NEXT_REG (prev) = next; 916 else 917 { 918 gcc_assert (reg_info->reg_chain == ref); 919 reg_info->reg_chain = next; 920 } 921 if (next) 922 DF_REF_PREV_REG (next) = prev; 923 924 df_free_ref (ref); 925 } 926 927 928 /* Remove REF from VEC. */ 929 930 static void 931 df_ref_compress_rec (df_ref **vec_ptr, df_ref ref) 932 { 933 df_ref *vec = *vec_ptr; 934 935 if (vec[1]) 936 { 937 while (*vec && *vec != ref) 938 vec++; 939 940 while (*vec) 941 { 942 *vec = *(vec+1); 943 vec++; 944 } 945 } 946 else 947 { 948 free (vec); 949 *vec_ptr = df_null_ref_rec; 950 } 951 } 952 953 954 /* Unlink REF from all def-use/use-def chains, etc. */ 955 956 void 957 df_ref_remove (df_ref ref) 958 { 959 #if 0 960 if (dump_file) 961 { 962 fprintf (dump_file, "removing ref "); 963 df_ref_debug (ref, dump_file); 964 } 965 #endif 966 967 if (DF_REF_REG_DEF_P (ref)) 968 { 969 if (DF_REF_IS_ARTIFICIAL (ref)) 970 { 971 struct df_scan_bb_info *bb_info 972 = df_scan_get_bb_info (DF_REF_BBNO (ref)); 973 df_ref_compress_rec (&bb_info->artificial_defs, ref); 974 } 975 else 976 { 977 unsigned int uid = DF_REF_INSN_UID (ref); 978 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid); 979 df_ref_compress_rec (&insn_rec->defs, ref); 980 } 981 } 982 else 983 { 984 if (DF_REF_IS_ARTIFICIAL (ref)) 985 { 986 struct df_scan_bb_info *bb_info 987 = df_scan_get_bb_info (DF_REF_BBNO (ref)); 988 df_ref_compress_rec (&bb_info->artificial_uses, ref); 989 } 990 else 991 { 992 unsigned int uid = DF_REF_INSN_UID (ref); 993 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid); 994 995 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) 996 df_ref_compress_rec (&insn_rec->eq_uses, ref); 997 else 998 df_ref_compress_rec (&insn_rec->uses, ref); 999 } 1000 } 1001 1002 /* By deleting the ref directly, df_insn_rescan my not find any 1003 differences even though the block will have changed. So we need 1004 to mark the block dirty ourselves. */ 1005 if (!DEBUG_INSN_P (DF_REF_INSN (ref))) 1006 df_set_bb_dirty (DF_REF_BB (ref)); 1007 df_reg_chain_unlink (ref); 1008 } 1009 1010 1011 /* Create the insn record for INSN. If there was one there, zero it 1012 out. */ 1013 1014 struct df_insn_info * 1015 df_insn_create_insn_record (rtx insn) 1016 { 1017 struct df_scan_problem_data *problem_data 1018 = (struct df_scan_problem_data *) df_scan->problem_data; 1019 struct df_insn_info *insn_rec; 1020 1021 df_grow_insn_info (); 1022 insn_rec = DF_INSN_INFO_GET (insn); 1023 if (!insn_rec) 1024 { 1025 insn_rec = (struct df_insn_info *) pool_alloc (problem_data->insn_pool); 1026 DF_INSN_INFO_SET (insn, insn_rec); 1027 } 1028 memset (insn_rec, 0, sizeof (struct df_insn_info)); 1029 insn_rec->insn = insn; 1030 return insn_rec; 1031 } 1032 1033 1034 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */ 1035 1036 static void 1037 df_ref_chain_delete_du_chain (df_ref *ref_rec) 1038 { 1039 while (*ref_rec) 1040 { 1041 df_ref ref = *ref_rec; 1042 /* CHAIN is allocated by DF_CHAIN. So make sure to 1043 pass df_scan instance for the problem. */ 1044 if (DF_REF_CHAIN (ref)) 1045 df_chain_unlink (ref); 1046 ref_rec++; 1047 } 1048 } 1049 1050 1051 /* Delete all refs in the ref chain. */ 1052 1053 static void 1054 df_ref_chain_delete (df_ref *ref_rec) 1055 { 1056 df_ref *start = ref_rec; 1057 while (*ref_rec) 1058 { 1059 df_reg_chain_unlink (*ref_rec); 1060 ref_rec++; 1061 } 1062 1063 /* If the list is empty, it has a special shared element that is not 1064 to be deleted. */ 1065 if (*start) 1066 free (start); 1067 } 1068 1069 1070 /* Delete the hardreg chain. */ 1071 1072 static void 1073 df_mw_hardreg_chain_delete (struct df_mw_hardreg **hardregs) 1074 { 1075 struct df_scan_problem_data *problem_data; 1076 1077 if (!hardregs) 1078 return; 1079 1080 problem_data = (struct df_scan_problem_data *) df_scan->problem_data; 1081 1082 while (*hardregs) 1083 { 1084 pool_free (problem_data->mw_reg_pool, *hardregs); 1085 hardregs++; 1086 } 1087 } 1088 1089 1090 /* Delete all of the refs information from INSN. BB must be passed in 1091 except when called from df_process_deferred_rescans to mark the block 1092 as dirty. */ 1093 1094 void 1095 df_insn_delete (basic_block bb, unsigned int uid) 1096 { 1097 struct df_insn_info *insn_info = NULL; 1098 if (!df) 1099 return; 1100 1101 df_grow_bb_info (df_scan); 1102 df_grow_reg_info (); 1103 1104 /* The block must be marked as dirty now, rather than later as in 1105 df_insn_rescan and df_notes_rescan because it may not be there at 1106 rescanning time and the mark would blow up. */ 1107 if (bb) 1108 df_set_bb_dirty (bb); 1109 1110 insn_info = DF_INSN_UID_SAFE_GET (uid); 1111 1112 /* The client has deferred rescanning. */ 1113 if (df->changeable_flags & DF_DEFER_INSN_RESCAN) 1114 { 1115 if (insn_info) 1116 { 1117 bitmap_clear_bit (&df->insns_to_rescan, uid); 1118 bitmap_clear_bit (&df->insns_to_notes_rescan, uid); 1119 bitmap_set_bit (&df->insns_to_delete, uid); 1120 } 1121 if (dump_file) 1122 fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid); 1123 return; 1124 } 1125 1126 if (dump_file) 1127 fprintf (dump_file, "deleting insn with uid = %d.\n", uid); 1128 1129 bitmap_clear_bit (&df->insns_to_delete, uid); 1130 bitmap_clear_bit (&df->insns_to_rescan, uid); 1131 bitmap_clear_bit (&df->insns_to_notes_rescan, uid); 1132 if (insn_info) 1133 { 1134 struct df_scan_problem_data *problem_data 1135 = (struct df_scan_problem_data *) df_scan->problem_data; 1136 1137 /* In general, notes do not have the insn_info fields 1138 initialized. However, combine deletes insns by changing them 1139 to notes. How clever. So we cannot just check if it is a 1140 valid insn before short circuiting this code, we need to see 1141 if we actually initialized it. */ 1142 if (insn_info->defs) 1143 { 1144 df_mw_hardreg_chain_delete (insn_info->mw_hardregs); 1145 1146 if (df_chain) 1147 { 1148 df_ref_chain_delete_du_chain (insn_info->defs); 1149 df_ref_chain_delete_du_chain (insn_info->uses); 1150 df_ref_chain_delete_du_chain (insn_info->eq_uses); 1151 } 1152 1153 df_ref_chain_delete (insn_info->defs); 1154 df_ref_chain_delete (insn_info->uses); 1155 df_ref_chain_delete (insn_info->eq_uses); 1156 df_scan_free_mws_vec (insn_info->mw_hardregs); 1157 } 1158 pool_free (problem_data->insn_pool, insn_info); 1159 DF_INSN_UID_SET (uid, NULL); 1160 } 1161 } 1162 1163 1164 /* Free all of the refs and the mw_hardregs in COLLECTION_REC. */ 1165 1166 static void 1167 df_free_collection_rec (struct df_collection_rec *collection_rec) 1168 { 1169 unsigned int ix; 1170 struct df_scan_problem_data *problem_data 1171 = (struct df_scan_problem_data *) df_scan->problem_data; 1172 df_ref ref; 1173 struct df_mw_hardreg *mw; 1174 1175 FOR_EACH_VEC_ELT (collection_rec->def_vec, ix, ref) 1176 df_free_ref (ref); 1177 FOR_EACH_VEC_ELT (collection_rec->use_vec, ix, ref) 1178 df_free_ref (ref); 1179 FOR_EACH_VEC_ELT (collection_rec->eq_use_vec, ix, ref) 1180 df_free_ref (ref); 1181 FOR_EACH_VEC_ELT (collection_rec->mw_vec, ix, mw) 1182 pool_free (problem_data->mw_reg_pool, mw); 1183 1184 collection_rec->def_vec.release (); 1185 collection_rec->use_vec.release (); 1186 collection_rec->eq_use_vec.release (); 1187 collection_rec->mw_vec.release (); 1188 } 1189 1190 /* Rescan INSN. Return TRUE if the rescanning produced any changes. */ 1191 1192 bool 1193 df_insn_rescan (rtx insn) 1194 { 1195 unsigned int uid = INSN_UID (insn); 1196 struct df_insn_info *insn_info = NULL; 1197 basic_block bb = BLOCK_FOR_INSN (insn); 1198 struct df_collection_rec collection_rec; 1199 1200 if ((!df) || (!INSN_P (insn))) 1201 return false; 1202 1203 if (!bb) 1204 { 1205 if (dump_file) 1206 fprintf (dump_file, "no bb for insn with uid = %d.\n", uid); 1207 return false; 1208 } 1209 1210 /* The client has disabled rescanning and plans to do it itself. */ 1211 if (df->changeable_flags & DF_NO_INSN_RESCAN) 1212 return false; 1213 1214 df_grow_bb_info (df_scan); 1215 df_grow_reg_info (); 1216 1217 insn_info = DF_INSN_UID_SAFE_GET (uid); 1218 1219 /* The client has deferred rescanning. */ 1220 if (df->changeable_flags & DF_DEFER_INSN_RESCAN) 1221 { 1222 if (!insn_info) 1223 { 1224 insn_info = df_insn_create_insn_record (insn); 1225 insn_info->defs = df_null_ref_rec; 1226 insn_info->uses = df_null_ref_rec; 1227 insn_info->eq_uses = df_null_ref_rec; 1228 insn_info->mw_hardregs = df_null_mw_rec; 1229 } 1230 if (dump_file) 1231 fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid); 1232 1233 bitmap_clear_bit (&df->insns_to_delete, uid); 1234 bitmap_clear_bit (&df->insns_to_notes_rescan, uid); 1235 bitmap_set_bit (&df->insns_to_rescan, INSN_UID (insn)); 1236 return false; 1237 } 1238 1239 vec_stack_alloc (df_ref, collection_rec.def_vec, 128); 1240 vec_stack_alloc (df_ref, collection_rec.use_vec, 32); 1241 vec_stack_alloc (df_ref, collection_rec.eq_use_vec, 32); 1242 vec_stack_alloc (df_mw_hardreg_ptr, collection_rec.mw_vec, 32); 1243 1244 bitmap_clear_bit (&df->insns_to_delete, uid); 1245 bitmap_clear_bit (&df->insns_to_rescan, uid); 1246 bitmap_clear_bit (&df->insns_to_notes_rescan, uid); 1247 if (insn_info) 1248 { 1249 int luid; 1250 bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false); 1251 /* If there's no change, return false. */ 1252 if (the_same) 1253 { 1254 df_free_collection_rec (&collection_rec); 1255 if (dump_file) 1256 fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid); 1257 return false; 1258 } 1259 if (dump_file) 1260 fprintf (dump_file, "rescanning insn with uid = %d.\n", uid); 1261 1262 /* There's change - we need to delete the existing info. 1263 Since the insn isn't moved, we can salvage its LUID. */ 1264 luid = DF_INSN_LUID (insn); 1265 df_insn_delete (NULL, uid); 1266 df_insn_create_insn_record (insn); 1267 DF_INSN_LUID (insn) = luid; 1268 } 1269 else 1270 { 1271 struct df_insn_info *insn_info = df_insn_create_insn_record (insn); 1272 df_insn_refs_collect (&collection_rec, bb, insn_info); 1273 if (dump_file) 1274 fprintf (dump_file, "scanning new insn with uid = %d.\n", uid); 1275 } 1276 1277 df_refs_add_to_chains (&collection_rec, bb, insn); 1278 if (!DEBUG_INSN_P (insn)) 1279 df_set_bb_dirty (bb); 1280 1281 collection_rec.def_vec.release (); 1282 collection_rec.use_vec.release (); 1283 collection_rec.eq_use_vec.release (); 1284 collection_rec.mw_vec.release (); 1285 1286 return true; 1287 } 1288 1289 /* Same as df_insn_rescan, but don't mark the basic block as 1290 dirty. */ 1291 1292 bool 1293 df_insn_rescan_debug_internal (rtx insn) 1294 { 1295 unsigned int uid = INSN_UID (insn); 1296 struct df_insn_info *insn_info; 1297 1298 gcc_assert (DEBUG_INSN_P (insn) 1299 && VAR_LOC_UNKNOWN_P (INSN_VAR_LOCATION_LOC (insn))); 1300 1301 if (!df) 1302 return false; 1303 1304 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn)); 1305 if (!insn_info) 1306 return false; 1307 1308 if (dump_file) 1309 fprintf (dump_file, "deleting debug_insn with uid = %d.\n", uid); 1310 1311 bitmap_clear_bit (&df->insns_to_delete, uid); 1312 bitmap_clear_bit (&df->insns_to_rescan, uid); 1313 bitmap_clear_bit (&df->insns_to_notes_rescan, uid); 1314 1315 if (!insn_info->defs) 1316 return false; 1317 1318 if (insn_info->defs == df_null_ref_rec 1319 && insn_info->uses == df_null_ref_rec 1320 && insn_info->eq_uses == df_null_ref_rec 1321 && insn_info->mw_hardregs == df_null_mw_rec) 1322 return false; 1323 1324 df_mw_hardreg_chain_delete (insn_info->mw_hardregs); 1325 1326 if (df_chain) 1327 { 1328 df_ref_chain_delete_du_chain (insn_info->defs); 1329 df_ref_chain_delete_du_chain (insn_info->uses); 1330 df_ref_chain_delete_du_chain (insn_info->eq_uses); 1331 } 1332 1333 df_ref_chain_delete (insn_info->defs); 1334 df_ref_chain_delete (insn_info->uses); 1335 df_ref_chain_delete (insn_info->eq_uses); 1336 df_scan_free_mws_vec (insn_info->mw_hardregs); 1337 1338 insn_info->defs = df_null_ref_rec; 1339 insn_info->uses = df_null_ref_rec; 1340 insn_info->eq_uses = df_null_ref_rec; 1341 insn_info->mw_hardregs = df_null_mw_rec; 1342 1343 return true; 1344 } 1345 1346 1347 /* Rescan all of the insns in the function. Note that the artificial 1348 uses and defs are not touched. This function will destroy def-se 1349 or use-def chains. */ 1350 1351 void 1352 df_insn_rescan_all (void) 1353 { 1354 bool no_insn_rescan = false; 1355 bool defer_insn_rescan = false; 1356 basic_block bb; 1357 bitmap_iterator bi; 1358 unsigned int uid; 1359 bitmap_head tmp; 1360 1361 bitmap_initialize (&tmp, &df_bitmap_obstack); 1362 1363 if (df->changeable_flags & DF_NO_INSN_RESCAN) 1364 { 1365 df_clear_flags (DF_NO_INSN_RESCAN); 1366 no_insn_rescan = true; 1367 } 1368 1369 if (df->changeable_flags & DF_DEFER_INSN_RESCAN) 1370 { 1371 df_clear_flags (DF_DEFER_INSN_RESCAN); 1372 defer_insn_rescan = true; 1373 } 1374 1375 bitmap_copy (&tmp, &df->insns_to_delete); 1376 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi) 1377 { 1378 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid); 1379 if (insn_info) 1380 df_insn_delete (NULL, uid); 1381 } 1382 1383 bitmap_clear (&tmp); 1384 bitmap_clear (&df->insns_to_delete); 1385 bitmap_clear (&df->insns_to_rescan); 1386 bitmap_clear (&df->insns_to_notes_rescan); 1387 1388 FOR_EACH_BB (bb) 1389 { 1390 rtx insn; 1391 FOR_BB_INSNS (bb, insn) 1392 { 1393 df_insn_rescan (insn); 1394 } 1395 } 1396 1397 if (no_insn_rescan) 1398 df_set_flags (DF_NO_INSN_RESCAN); 1399 if (defer_insn_rescan) 1400 df_set_flags (DF_DEFER_INSN_RESCAN); 1401 } 1402 1403 1404 /* Process all of the deferred rescans or deletions. */ 1405 1406 void 1407 df_process_deferred_rescans (void) 1408 { 1409 bool no_insn_rescan = false; 1410 bool defer_insn_rescan = false; 1411 bitmap_iterator bi; 1412 unsigned int uid; 1413 bitmap_head tmp; 1414 1415 bitmap_initialize (&tmp, &df_bitmap_obstack); 1416 1417 if (df->changeable_flags & DF_NO_INSN_RESCAN) 1418 { 1419 df_clear_flags (DF_NO_INSN_RESCAN); 1420 no_insn_rescan = true; 1421 } 1422 1423 if (df->changeable_flags & DF_DEFER_INSN_RESCAN) 1424 { 1425 df_clear_flags (DF_DEFER_INSN_RESCAN); 1426 defer_insn_rescan = true; 1427 } 1428 1429 if (dump_file) 1430 fprintf (dump_file, "starting the processing of deferred insns\n"); 1431 1432 bitmap_copy (&tmp, &df->insns_to_delete); 1433 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi) 1434 { 1435 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid); 1436 if (insn_info) 1437 df_insn_delete (NULL, uid); 1438 } 1439 1440 bitmap_copy (&tmp, &df->insns_to_rescan); 1441 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi) 1442 { 1443 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid); 1444 if (insn_info) 1445 df_insn_rescan (insn_info->insn); 1446 } 1447 1448 bitmap_copy (&tmp, &df->insns_to_notes_rescan); 1449 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi) 1450 { 1451 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid); 1452 if (insn_info) 1453 df_notes_rescan (insn_info->insn); 1454 } 1455 1456 if (dump_file) 1457 fprintf (dump_file, "ending the processing of deferred insns\n"); 1458 1459 bitmap_clear (&tmp); 1460 bitmap_clear (&df->insns_to_delete); 1461 bitmap_clear (&df->insns_to_rescan); 1462 bitmap_clear (&df->insns_to_notes_rescan); 1463 1464 if (no_insn_rescan) 1465 df_set_flags (DF_NO_INSN_RESCAN); 1466 if (defer_insn_rescan) 1467 df_set_flags (DF_DEFER_INSN_RESCAN); 1468 1469 /* If someone changed regs_ever_live during this pass, fix up the 1470 entry and exit blocks. */ 1471 if (df->redo_entry_and_exit) 1472 { 1473 df_update_entry_exit_and_calls (); 1474 df->redo_entry_and_exit = false; 1475 } 1476 } 1477 1478 1479 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include 1480 the uses if INCLUDE_USES. Include the eq_uses if 1481 INCLUDE_EQ_USES. */ 1482 1483 static unsigned int 1484 df_count_refs (bool include_defs, bool include_uses, 1485 bool include_eq_uses) 1486 { 1487 unsigned int regno; 1488 int size = 0; 1489 unsigned int m = df->regs_inited; 1490 1491 for (regno = 0; regno < m; regno++) 1492 { 1493 if (include_defs) 1494 size += DF_REG_DEF_COUNT (regno); 1495 if (include_uses) 1496 size += DF_REG_USE_COUNT (regno); 1497 if (include_eq_uses) 1498 size += DF_REG_EQ_USE_COUNT (regno); 1499 } 1500 return size; 1501 } 1502 1503 1504 /* Take build ref table for either the uses or defs from the reg-use 1505 or reg-def chains. This version processes the refs in reg order 1506 which is likely to be best if processing the whole function. */ 1507 1508 static void 1509 df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info, 1510 bool include_defs, 1511 bool include_uses, 1512 bool include_eq_uses) 1513 { 1514 unsigned int m = df->regs_inited; 1515 unsigned int regno; 1516 unsigned int offset = 0; 1517 unsigned int start; 1518 1519 if (df->changeable_flags & DF_NO_HARD_REGS) 1520 { 1521 start = FIRST_PSEUDO_REGISTER; 1522 memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER); 1523 memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER); 1524 } 1525 else 1526 start = 0; 1527 1528 ref_info->total_size 1529 = df_count_refs (include_defs, include_uses, include_eq_uses); 1530 1531 df_check_and_grow_ref_info (ref_info, 1); 1532 1533 for (regno = start; regno < m; regno++) 1534 { 1535 int count = 0; 1536 ref_info->begin[regno] = offset; 1537 if (include_defs) 1538 { 1539 df_ref ref = DF_REG_DEF_CHAIN (regno); 1540 while (ref) 1541 { 1542 ref_info->refs[offset] = ref; 1543 DF_REF_ID (ref) = offset++; 1544 count++; 1545 ref = DF_REF_NEXT_REG (ref); 1546 gcc_checking_assert (offset < ref_info->refs_size); 1547 } 1548 } 1549 if (include_uses) 1550 { 1551 df_ref ref = DF_REG_USE_CHAIN (regno); 1552 while (ref) 1553 { 1554 ref_info->refs[offset] = ref; 1555 DF_REF_ID (ref) = offset++; 1556 count++; 1557 ref = DF_REF_NEXT_REG (ref); 1558 gcc_checking_assert (offset < ref_info->refs_size); 1559 } 1560 } 1561 if (include_eq_uses) 1562 { 1563 df_ref ref = DF_REG_EQ_USE_CHAIN (regno); 1564 while (ref) 1565 { 1566 ref_info->refs[offset] = ref; 1567 DF_REF_ID (ref) = offset++; 1568 count++; 1569 ref = DF_REF_NEXT_REG (ref); 1570 gcc_checking_assert (offset < ref_info->refs_size); 1571 } 1572 } 1573 ref_info->count[regno] = count; 1574 } 1575 1576 /* The bitmap size is not decremented when refs are deleted. So 1577 reset it now that we have squished out all of the empty 1578 slots. */ 1579 ref_info->table_size = offset; 1580 } 1581 1582 1583 /* Take build ref table for either the uses or defs from the reg-use 1584 or reg-def chains. This version processes the refs in insn order 1585 which is likely to be best if processing some segment of the 1586 function. */ 1587 1588 static void 1589 df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info, 1590 bool include_defs, 1591 bool include_uses, 1592 bool include_eq_uses) 1593 { 1594 bitmap_iterator bi; 1595 unsigned int bb_index; 1596 unsigned int m = df->regs_inited; 1597 unsigned int offset = 0; 1598 unsigned int r; 1599 unsigned int start 1600 = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0; 1601 1602 memset (ref_info->begin, 0, sizeof (int) * df->regs_inited); 1603 memset (ref_info->count, 0, sizeof (int) * df->regs_inited); 1604 1605 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses); 1606 df_check_and_grow_ref_info (ref_info, 1); 1607 1608 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi) 1609 { 1610 basic_block bb = BASIC_BLOCK (bb_index); 1611 rtx insn; 1612 df_ref *ref_rec; 1613 1614 if (include_defs) 1615 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++) 1616 { 1617 unsigned int regno = DF_REF_REGNO (*ref_rec); 1618 ref_info->count[regno]++; 1619 } 1620 if (include_uses) 1621 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++) 1622 { 1623 unsigned int regno = DF_REF_REGNO (*ref_rec); 1624 ref_info->count[regno]++; 1625 } 1626 1627 FOR_BB_INSNS (bb, insn) 1628 { 1629 if (INSN_P (insn)) 1630 { 1631 unsigned int uid = INSN_UID (insn); 1632 1633 if (include_defs) 1634 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++) 1635 { 1636 unsigned int regno = DF_REF_REGNO (*ref_rec); 1637 ref_info->count[regno]++; 1638 } 1639 if (include_uses) 1640 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++) 1641 { 1642 unsigned int regno = DF_REF_REGNO (*ref_rec); 1643 ref_info->count[regno]++; 1644 } 1645 if (include_eq_uses) 1646 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++) 1647 { 1648 unsigned int regno = DF_REF_REGNO (*ref_rec); 1649 ref_info->count[regno]++; 1650 } 1651 } 1652 } 1653 } 1654 1655 for (r = start; r < m; r++) 1656 { 1657 ref_info->begin[r] = offset; 1658 offset += ref_info->count[r]; 1659 ref_info->count[r] = 0; 1660 } 1661 1662 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi) 1663 { 1664 basic_block bb = BASIC_BLOCK (bb_index); 1665 rtx insn; 1666 df_ref *ref_rec; 1667 1668 if (include_defs) 1669 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++) 1670 { 1671 df_ref ref = *ref_rec; 1672 unsigned int regno = DF_REF_REGNO (ref); 1673 if (regno >= start) 1674 { 1675 unsigned int id 1676 = ref_info->begin[regno] + ref_info->count[regno]++; 1677 DF_REF_ID (ref) = id; 1678 ref_info->refs[id] = ref; 1679 } 1680 } 1681 if (include_uses) 1682 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++) 1683 { 1684 df_ref ref = *ref_rec; 1685 unsigned int regno = DF_REF_REGNO (ref); 1686 if (regno >= start) 1687 { 1688 unsigned int id 1689 = ref_info->begin[regno] + ref_info->count[regno]++; 1690 DF_REF_ID (ref) = id; 1691 ref_info->refs[id] = ref; 1692 } 1693 } 1694 1695 FOR_BB_INSNS (bb, insn) 1696 { 1697 if (INSN_P (insn)) 1698 { 1699 unsigned int uid = INSN_UID (insn); 1700 1701 if (include_defs) 1702 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++) 1703 { 1704 df_ref ref = *ref_rec; 1705 unsigned int regno = DF_REF_REGNO (ref); 1706 if (regno >= start) 1707 { 1708 unsigned int id 1709 = ref_info->begin[regno] + ref_info->count[regno]++; 1710 DF_REF_ID (ref) = id; 1711 ref_info->refs[id] = ref; 1712 } 1713 } 1714 if (include_uses) 1715 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++) 1716 { 1717 df_ref ref = *ref_rec; 1718 unsigned int regno = DF_REF_REGNO (ref); 1719 if (regno >= start) 1720 { 1721 unsigned int id 1722 = ref_info->begin[regno] + ref_info->count[regno]++; 1723 DF_REF_ID (ref) = id; 1724 ref_info->refs[id] = ref; 1725 } 1726 } 1727 if (include_eq_uses) 1728 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++) 1729 { 1730 df_ref ref = *ref_rec; 1731 unsigned int regno = DF_REF_REGNO (ref); 1732 if (regno >= start) 1733 { 1734 unsigned int id 1735 = ref_info->begin[regno] + ref_info->count[regno]++; 1736 DF_REF_ID (ref) = id; 1737 ref_info->refs[id] = ref; 1738 } 1739 } 1740 } 1741 } 1742 } 1743 1744 /* The bitmap size is not decremented when refs are deleted. So 1745 reset it now that we have squished out all of the empty 1746 slots. */ 1747 1748 ref_info->table_size = offset; 1749 } 1750 1751 /* Take build ref table for either the uses or defs from the reg-use 1752 or reg-def chains. */ 1753 1754 static void 1755 df_reorganize_refs_by_reg (struct df_ref_info *ref_info, 1756 bool include_defs, 1757 bool include_uses, 1758 bool include_eq_uses) 1759 { 1760 if (df->analyze_subset) 1761 df_reorganize_refs_by_reg_by_insn (ref_info, include_defs, 1762 include_uses, include_eq_uses); 1763 else 1764 df_reorganize_refs_by_reg_by_reg (ref_info, include_defs, 1765 include_uses, include_eq_uses); 1766 } 1767 1768 1769 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */ 1770 static unsigned int 1771 df_add_refs_to_table (unsigned int offset, 1772 struct df_ref_info *ref_info, 1773 df_ref *ref_vec) 1774 { 1775 while (*ref_vec) 1776 { 1777 df_ref ref = *ref_vec; 1778 if ((!(df->changeable_flags & DF_NO_HARD_REGS)) 1779 || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER)) 1780 { 1781 ref_info->refs[offset] = ref; 1782 DF_REF_ID (*ref_vec) = offset++; 1783 } 1784 ref_vec++; 1785 } 1786 return offset; 1787 } 1788 1789 1790 /* Count the number of refs in all of the insns of BB. Include the 1791 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the 1792 eq_uses if INCLUDE_EQ_USES. */ 1793 1794 static unsigned int 1795 df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset, 1796 struct df_ref_info *ref_info, 1797 bool include_defs, bool include_uses, 1798 bool include_eq_uses) 1799 { 1800 rtx insn; 1801 1802 if (include_defs) 1803 offset = df_add_refs_to_table (offset, ref_info, 1804 df_get_artificial_defs (bb->index)); 1805 if (include_uses) 1806 offset = df_add_refs_to_table (offset, ref_info, 1807 df_get_artificial_uses (bb->index)); 1808 1809 FOR_BB_INSNS (bb, insn) 1810 if (INSN_P (insn)) 1811 { 1812 unsigned int uid = INSN_UID (insn); 1813 if (include_defs) 1814 offset = df_add_refs_to_table (offset, ref_info, 1815 DF_INSN_UID_DEFS (uid)); 1816 if (include_uses) 1817 offset = df_add_refs_to_table (offset, ref_info, 1818 DF_INSN_UID_USES (uid)); 1819 if (include_eq_uses) 1820 offset = df_add_refs_to_table (offset, ref_info, 1821 DF_INSN_UID_EQ_USES (uid)); 1822 } 1823 return offset; 1824 } 1825 1826 1827 /* Organize the refs by insn into the table in REF_INFO. If 1828 blocks_to_analyze is defined, use that set, otherwise the entire 1829 program. Include the defs if INCLUDE_DEFS. Include the uses if 1830 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */ 1831 1832 static void 1833 df_reorganize_refs_by_insn (struct df_ref_info *ref_info, 1834 bool include_defs, bool include_uses, 1835 bool include_eq_uses) 1836 { 1837 basic_block bb; 1838 unsigned int offset = 0; 1839 1840 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses); 1841 df_check_and_grow_ref_info (ref_info, 1); 1842 if (df->blocks_to_analyze) 1843 { 1844 bitmap_iterator bi; 1845 unsigned int index; 1846 1847 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi) 1848 { 1849 offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK (index), offset, ref_info, 1850 include_defs, include_uses, 1851 include_eq_uses); 1852 } 1853 1854 ref_info->table_size = offset; 1855 } 1856 else 1857 { 1858 FOR_ALL_BB (bb) 1859 offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info, 1860 include_defs, include_uses, 1861 include_eq_uses); 1862 ref_info->table_size = offset; 1863 } 1864 } 1865 1866 1867 /* If the use refs in DF are not organized, reorganize them. */ 1868 1869 void 1870 df_maybe_reorganize_use_refs (enum df_ref_order order) 1871 { 1872 if (order == df->use_info.ref_order) 1873 return; 1874 1875 switch (order) 1876 { 1877 case DF_REF_ORDER_BY_REG: 1878 df_reorganize_refs_by_reg (&df->use_info, false, true, false); 1879 break; 1880 1881 case DF_REF_ORDER_BY_REG_WITH_NOTES: 1882 df_reorganize_refs_by_reg (&df->use_info, false, true, true); 1883 break; 1884 1885 case DF_REF_ORDER_BY_INSN: 1886 df_reorganize_refs_by_insn (&df->use_info, false, true, false); 1887 break; 1888 1889 case DF_REF_ORDER_BY_INSN_WITH_NOTES: 1890 df_reorganize_refs_by_insn (&df->use_info, false, true, true); 1891 break; 1892 1893 case DF_REF_ORDER_NO_TABLE: 1894 free (df->use_info.refs); 1895 df->use_info.refs = NULL; 1896 df->use_info.refs_size = 0; 1897 break; 1898 1899 case DF_REF_ORDER_UNORDERED: 1900 case DF_REF_ORDER_UNORDERED_WITH_NOTES: 1901 gcc_unreachable (); 1902 break; 1903 } 1904 1905 df->use_info.ref_order = order; 1906 } 1907 1908 1909 /* If the def refs in DF are not organized, reorganize them. */ 1910 1911 void 1912 df_maybe_reorganize_def_refs (enum df_ref_order order) 1913 { 1914 if (order == df->def_info.ref_order) 1915 return; 1916 1917 switch (order) 1918 { 1919 case DF_REF_ORDER_BY_REG: 1920 df_reorganize_refs_by_reg (&df->def_info, true, false, false); 1921 break; 1922 1923 case DF_REF_ORDER_BY_INSN: 1924 df_reorganize_refs_by_insn (&df->def_info, true, false, false); 1925 break; 1926 1927 case DF_REF_ORDER_NO_TABLE: 1928 free (df->def_info.refs); 1929 df->def_info.refs = NULL; 1930 df->def_info.refs_size = 0; 1931 break; 1932 1933 case DF_REF_ORDER_BY_INSN_WITH_NOTES: 1934 case DF_REF_ORDER_BY_REG_WITH_NOTES: 1935 case DF_REF_ORDER_UNORDERED: 1936 case DF_REF_ORDER_UNORDERED_WITH_NOTES: 1937 gcc_unreachable (); 1938 break; 1939 } 1940 1941 df->def_info.ref_order = order; 1942 } 1943 1944 1945 /* Change all of the basic block references in INSN to use the insn's 1946 current basic block. This function is called from routines that move 1947 instructions from one block to another. */ 1948 1949 void 1950 df_insn_change_bb (rtx insn, basic_block new_bb) 1951 { 1952 basic_block old_bb = BLOCK_FOR_INSN (insn); 1953 struct df_insn_info *insn_info; 1954 unsigned int uid = INSN_UID (insn); 1955 1956 if (old_bb == new_bb) 1957 return; 1958 1959 set_block_for_insn (insn, new_bb); 1960 1961 if (!df) 1962 return; 1963 1964 if (dump_file) 1965 fprintf (dump_file, "changing bb of uid %d\n", uid); 1966 1967 insn_info = DF_INSN_UID_SAFE_GET (uid); 1968 if (insn_info == NULL) 1969 { 1970 if (dump_file) 1971 fprintf (dump_file, " unscanned insn\n"); 1972 df_insn_rescan (insn); 1973 return; 1974 } 1975 1976 if (!INSN_P (insn)) 1977 return; 1978 1979 df_set_bb_dirty (new_bb); 1980 if (old_bb) 1981 { 1982 if (dump_file) 1983 fprintf (dump_file, " from %d to %d\n", 1984 old_bb->index, new_bb->index); 1985 df_set_bb_dirty (old_bb); 1986 } 1987 else 1988 if (dump_file) 1989 fprintf (dump_file, " to %d\n", new_bb->index); 1990 } 1991 1992 1993 /* Helper function for df_ref_change_reg_with_loc. */ 1994 1995 static void 1996 df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df, 1997 struct df_reg_info *new_df, 1998 int new_regno, rtx loc) 1999 { 2000 df_ref the_ref = old_df->reg_chain; 2001 2002 while (the_ref) 2003 { 2004 if ((!DF_REF_IS_ARTIFICIAL (the_ref)) 2005 && DF_REF_LOC (the_ref) 2006 && (*DF_REF_LOC (the_ref) == loc)) 2007 { 2008 df_ref next_ref = DF_REF_NEXT_REG (the_ref); 2009 df_ref prev_ref = DF_REF_PREV_REG (the_ref); 2010 df_ref *ref_vec, *ref_vec_t; 2011 struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref); 2012 unsigned int count = 0; 2013 2014 DF_REF_REGNO (the_ref) = new_regno; 2015 DF_REF_REG (the_ref) = regno_reg_rtx[new_regno]; 2016 2017 /* Pull the_ref out of the old regno chain. */ 2018 if (prev_ref) 2019 DF_REF_NEXT_REG (prev_ref) = next_ref; 2020 else 2021 old_df->reg_chain = next_ref; 2022 if (next_ref) 2023 DF_REF_PREV_REG (next_ref) = prev_ref; 2024 old_df->n_refs--; 2025 2026 /* Put the ref into the new regno chain. */ 2027 DF_REF_PREV_REG (the_ref) = NULL; 2028 DF_REF_NEXT_REG (the_ref) = new_df->reg_chain; 2029 if (new_df->reg_chain) 2030 DF_REF_PREV_REG (new_df->reg_chain) = the_ref; 2031 new_df->reg_chain = the_ref; 2032 new_df->n_refs++; 2033 if (DF_REF_BB (the_ref)) 2034 df_set_bb_dirty (DF_REF_BB (the_ref)); 2035 2036 /* Need to sort the record again that the ref was in because 2037 the regno is a sorting key. First, find the right 2038 record. */ 2039 if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE) 2040 ref_vec = insn_info->eq_uses; 2041 else 2042 ref_vec = insn_info->uses; 2043 if (dump_file) 2044 fprintf (dump_file, "changing reg in insn %d\n", 2045 DF_REF_INSN_UID (the_ref)); 2046 2047 ref_vec_t = ref_vec; 2048 2049 /* Find the length. */ 2050 while (*ref_vec_t) 2051 { 2052 count++; 2053 ref_vec_t++; 2054 } 2055 qsort (ref_vec, count, sizeof (df_ref ), df_ref_compare); 2056 2057 the_ref = next_ref; 2058 } 2059 else 2060 the_ref = DF_REF_NEXT_REG (the_ref); 2061 } 2062 } 2063 2064 2065 /* Change the regno of all refs that contained LOC from OLD_REGNO to 2066 NEW_REGNO. Refs that do not match LOC are not changed which means 2067 that artificial refs are not changed since they have no loc. This 2068 call is to support the SET_REGNO macro. */ 2069 2070 void 2071 df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc) 2072 { 2073 if ((!df) || (old_regno == -1) || (old_regno == new_regno)) 2074 return; 2075 2076 df_grow_reg_info (); 2077 2078 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno), 2079 DF_REG_DEF_GET (new_regno), new_regno, loc); 2080 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno), 2081 DF_REG_USE_GET (new_regno), new_regno, loc); 2082 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno), 2083 DF_REG_EQ_USE_GET (new_regno), new_regno, loc); 2084 } 2085 2086 2087 /* Delete the mw_hardregs that point into the eq_notes. */ 2088 2089 static unsigned int 2090 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info) 2091 { 2092 struct df_mw_hardreg **mw_vec = insn_info->mw_hardregs; 2093 unsigned int deleted = 0; 2094 unsigned int count = 0; 2095 struct df_scan_problem_data *problem_data 2096 = (struct df_scan_problem_data *) df_scan->problem_data; 2097 2098 if (!*mw_vec) 2099 return 0; 2100 2101 while (*mw_vec) 2102 { 2103 if ((*mw_vec)->flags & DF_REF_IN_NOTE) 2104 { 2105 struct df_mw_hardreg **temp_vec = mw_vec; 2106 2107 pool_free (problem_data->mw_reg_pool, *mw_vec); 2108 temp_vec = mw_vec; 2109 /* Shove the remaining ones down one to fill the gap. While 2110 this looks n**2, it is highly unusual to have any mw regs 2111 in eq_notes and the chances of more than one are almost 2112 non existent. */ 2113 while (*temp_vec) 2114 { 2115 *temp_vec = *(temp_vec + 1); 2116 temp_vec++; 2117 } 2118 deleted++; 2119 } 2120 else 2121 { 2122 mw_vec++; 2123 count++; 2124 } 2125 } 2126 2127 if (count == 0) 2128 { 2129 df_scan_free_mws_vec (insn_info->mw_hardregs); 2130 insn_info->mw_hardregs = df_null_mw_rec; 2131 return 0; 2132 } 2133 return deleted; 2134 } 2135 2136 2137 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */ 2138 2139 void 2140 df_notes_rescan (rtx insn) 2141 { 2142 struct df_insn_info *insn_info; 2143 unsigned int uid = INSN_UID (insn); 2144 2145 if (!df) 2146 return; 2147 2148 /* The client has disabled rescanning and plans to do it itself. */ 2149 if (df->changeable_flags & DF_NO_INSN_RESCAN) 2150 return; 2151 2152 /* Do nothing if the insn hasn't been emitted yet. */ 2153 if (!BLOCK_FOR_INSN (insn)) 2154 return; 2155 2156 df_grow_bb_info (df_scan); 2157 df_grow_reg_info (); 2158 2159 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID(insn)); 2160 2161 /* The client has deferred rescanning. */ 2162 if (df->changeable_flags & DF_DEFER_INSN_RESCAN) 2163 { 2164 if (!insn_info) 2165 { 2166 insn_info = df_insn_create_insn_record (insn); 2167 insn_info->defs = df_null_ref_rec; 2168 insn_info->uses = df_null_ref_rec; 2169 insn_info->eq_uses = df_null_ref_rec; 2170 insn_info->mw_hardregs = df_null_mw_rec; 2171 } 2172 2173 bitmap_clear_bit (&df->insns_to_delete, uid); 2174 /* If the insn is set to be rescanned, it does not need to also 2175 be notes rescanned. */ 2176 if (!bitmap_bit_p (&df->insns_to_rescan, uid)) 2177 bitmap_set_bit (&df->insns_to_notes_rescan, INSN_UID (insn)); 2178 return; 2179 } 2180 2181 bitmap_clear_bit (&df->insns_to_delete, uid); 2182 bitmap_clear_bit (&df->insns_to_notes_rescan, uid); 2183 2184 if (insn_info) 2185 { 2186 basic_block bb = BLOCK_FOR_INSN (insn); 2187 rtx note; 2188 struct df_collection_rec collection_rec; 2189 unsigned int num_deleted; 2190 unsigned int mw_len; 2191 2192 memset (&collection_rec, 0, sizeof (struct df_collection_rec)); 2193 vec_stack_alloc (df_ref, collection_rec.eq_use_vec, 32); 2194 vec_stack_alloc (df_mw_hardreg_ptr, collection_rec.mw_vec, 32); 2195 2196 num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info); 2197 df_ref_chain_delete (insn_info->eq_uses); 2198 insn_info->eq_uses = NULL; 2199 2200 /* Process REG_EQUIV/REG_EQUAL notes */ 2201 for (note = REG_NOTES (insn); note; 2202 note = XEXP (note, 1)) 2203 { 2204 switch (REG_NOTE_KIND (note)) 2205 { 2206 case REG_EQUIV: 2207 case REG_EQUAL: 2208 df_uses_record (&collection_rec, 2209 &XEXP (note, 0), DF_REF_REG_USE, 2210 bb, insn_info, DF_REF_IN_NOTE); 2211 default: 2212 break; 2213 } 2214 } 2215 2216 /* Find some place to put any new mw_hardregs. */ 2217 df_canonize_collection_rec (&collection_rec); 2218 mw_len = collection_rec.mw_vec.length (); 2219 if (mw_len) 2220 { 2221 unsigned int count = 0; 2222 struct df_mw_hardreg **mw_rec = insn_info->mw_hardregs; 2223 while (*mw_rec) 2224 { 2225 count++; 2226 mw_rec++; 2227 } 2228 2229 if (count) 2230 { 2231 /* Append to the end of the existing record after 2232 expanding it if necessary. */ 2233 if (mw_len > num_deleted) 2234 { 2235 insn_info->mw_hardregs = 2236 XRESIZEVEC (struct df_mw_hardreg *, 2237 insn_info->mw_hardregs, 2238 count + 1 + mw_len); 2239 } 2240 memcpy (&insn_info->mw_hardregs[count], 2241 collection_rec.mw_vec.address (), 2242 mw_len * sizeof (struct df_mw_hardreg *)); 2243 insn_info->mw_hardregs[count + mw_len] = NULL; 2244 qsort (insn_info->mw_hardregs, count + mw_len, 2245 sizeof (struct df_mw_hardreg *), df_mw_compare); 2246 } 2247 else 2248 { 2249 /* No vector there. */ 2250 insn_info->mw_hardregs 2251 = XNEWVEC (struct df_mw_hardreg*, 1 + mw_len); 2252 memcpy (insn_info->mw_hardregs, 2253 collection_rec.mw_vec.address (), 2254 mw_len * sizeof (struct df_mw_hardreg *)); 2255 insn_info->mw_hardregs[mw_len] = NULL; 2256 } 2257 } 2258 /* Get rid of the mw_rec so that df_refs_add_to_chains will 2259 ignore it. */ 2260 collection_rec.mw_vec.release (); 2261 df_refs_add_to_chains (&collection_rec, bb, insn); 2262 collection_rec.eq_use_vec.release (); 2263 } 2264 else 2265 df_insn_rescan (insn); 2266 2267 } 2268 2269 2270 /*---------------------------------------------------------------------------- 2271 Hard core instruction scanning code. No external interfaces here, 2272 just a lot of routines that look inside insns. 2273 ----------------------------------------------------------------------------*/ 2274 2275 2276 /* Return true if the contents of two df_ref's are identical. 2277 It ignores DF_REF_MARKER. */ 2278 2279 static bool 2280 df_ref_equal_p (df_ref ref1, df_ref ref2) 2281 { 2282 if (!ref2) 2283 return false; 2284 2285 if (ref1 == ref2) 2286 return true; 2287 2288 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2) 2289 || DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2) 2290 || DF_REF_REG (ref1) != DF_REF_REG (ref2) 2291 || DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2) 2292 || ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)) 2293 != (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))) 2294 || DF_REF_BB (ref1) != DF_REF_BB (ref2) 2295 || DF_REF_INSN_INFO (ref1) != DF_REF_INSN_INFO (ref2)) 2296 return false; 2297 2298 switch (DF_REF_CLASS (ref1)) 2299 { 2300 case DF_REF_ARTIFICIAL: 2301 case DF_REF_BASE: 2302 return true; 2303 2304 case DF_REF_REGULAR: 2305 return DF_REF_LOC (ref1) == DF_REF_LOC (ref2); 2306 2307 default: 2308 gcc_unreachable (); 2309 } 2310 return false; 2311 } 2312 2313 2314 /* Compare REF1 and REF2 for sorting. This is only called from places 2315 where all of the refs are of the same type, in the same insn, and 2316 have the same bb. So these fields are not checked. */ 2317 2318 static int 2319 df_ref_compare (const void *r1, const void *r2) 2320 { 2321 const df_ref ref1 = *(const df_ref *)r1; 2322 const df_ref ref2 = *(const df_ref *)r2; 2323 2324 if (ref1 == ref2) 2325 return 0; 2326 2327 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2)) 2328 return (int)DF_REF_CLASS (ref1) - (int)DF_REF_CLASS (ref2); 2329 2330 if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2)) 2331 return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2); 2332 2333 if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2)) 2334 return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2); 2335 2336 if (DF_REF_REG (ref1) != DF_REF_REG (ref2)) 2337 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2); 2338 2339 /* Cannot look at the LOC field on artificial refs. */ 2340 if (DF_REF_CLASS (ref1) != DF_REF_ARTIFICIAL 2341 && DF_REF_LOC (ref1) != DF_REF_LOC (ref2)) 2342 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2); 2343 2344 if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2)) 2345 { 2346 /* If two refs are identical except that one of them has is from 2347 a mw and one is not, we need to have the one with the mw 2348 first. */ 2349 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) == 2350 DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG)) 2351 return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2); 2352 else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG)) 2353 return -1; 2354 else 2355 return 1; 2356 } 2357 2358 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2); 2359 } 2360 2361 static void 2362 df_swap_refs (vec<df_ref, va_stack> *ref_vec, int i, int j) 2363 { 2364 df_ref tmp = (*ref_vec)[i]; 2365 (*ref_vec)[i] = (*ref_vec)[j]; 2366 (*ref_vec)[j] = tmp; 2367 } 2368 2369 /* Sort and compress a set of refs. */ 2370 2371 static void 2372 df_sort_and_compress_refs (vec<df_ref, va_stack> *ref_vec) 2373 { 2374 unsigned int count; 2375 unsigned int i; 2376 unsigned int dist = 0; 2377 2378 count = ref_vec->length (); 2379 2380 /* If there are 1 or 0 elements, there is nothing to do. */ 2381 if (count < 2) 2382 return; 2383 else if (count == 2) 2384 { 2385 df_ref r0 = (*ref_vec)[0]; 2386 df_ref r1 = (*ref_vec)[1]; 2387 if (df_ref_compare (&r0, &r1) > 0) 2388 df_swap_refs (ref_vec, 0, 1); 2389 } 2390 else 2391 { 2392 for (i = 0; i < count - 1; i++) 2393 { 2394 df_ref r0 = (*ref_vec)[i]; 2395 df_ref r1 = (*ref_vec)[i + 1]; 2396 if (df_ref_compare (&r0, &r1) >= 0) 2397 break; 2398 } 2399 /* If the array is already strictly ordered, 2400 which is the most common case for large COUNT case 2401 (which happens for CALL INSNs), 2402 no need to sort and filter out duplicate. 2403 Simply return the count. 2404 Make sure DF_GET_ADD_REFS adds refs in the increasing order 2405 of DF_REF_COMPARE. */ 2406 if (i == count - 1) 2407 return; 2408 ref_vec->qsort (df_ref_compare); 2409 } 2410 2411 for (i=0; i<count-dist; i++) 2412 { 2413 /* Find the next ref that is not equal to the current ref. */ 2414 while (i + dist + 1 < count 2415 && df_ref_equal_p ((*ref_vec)[i], 2416 (*ref_vec)[i + dist + 1])) 2417 { 2418 df_free_ref ((*ref_vec)[i + dist + 1]); 2419 dist++; 2420 } 2421 /* Copy it down to the next position. */ 2422 if (dist && i + dist + 1 < count) 2423 (*ref_vec)[i + 1] = (*ref_vec)[i + dist + 1]; 2424 } 2425 2426 count -= dist; 2427 ref_vec->truncate (count); 2428 } 2429 2430 2431 /* Return true if the contents of two df_ref's are identical. 2432 It ignores DF_REF_MARKER. */ 2433 2434 static bool 2435 df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2) 2436 { 2437 if (!mw2) 2438 return false; 2439 return (mw1 == mw2) || 2440 (mw1->mw_reg == mw2->mw_reg 2441 && mw1->type == mw2->type 2442 && mw1->flags == mw2->flags 2443 && mw1->start_regno == mw2->start_regno 2444 && mw1->end_regno == mw2->end_regno); 2445 } 2446 2447 2448 /* Compare MW1 and MW2 for sorting. */ 2449 2450 static int 2451 df_mw_compare (const void *m1, const void *m2) 2452 { 2453 const struct df_mw_hardreg *const mw1 = *(const struct df_mw_hardreg *const*)m1; 2454 const struct df_mw_hardreg *const mw2 = *(const struct df_mw_hardreg *const*)m2; 2455 2456 if (mw1 == mw2) 2457 return 0; 2458 2459 if (mw1->type != mw2->type) 2460 return mw1->type - mw2->type; 2461 2462 if (mw1->flags != mw2->flags) 2463 return mw1->flags - mw2->flags; 2464 2465 if (mw1->start_regno != mw2->start_regno) 2466 return mw1->start_regno - mw2->start_regno; 2467 2468 if (mw1->end_regno != mw2->end_regno) 2469 return mw1->end_regno - mw2->end_regno; 2470 2471 if (mw1->mw_reg != mw2->mw_reg) 2472 return mw1->mw_order - mw2->mw_order; 2473 2474 return 0; 2475 } 2476 2477 2478 /* Sort and compress a set of refs. */ 2479 2480 static void 2481 df_sort_and_compress_mws (vec<df_mw_hardreg_ptr, va_stack> *mw_vec) 2482 { 2483 unsigned int count; 2484 struct df_scan_problem_data *problem_data 2485 = (struct df_scan_problem_data *) df_scan->problem_data; 2486 unsigned int i; 2487 unsigned int dist = 0; 2488 2489 count = mw_vec->length (); 2490 if (count < 2) 2491 return; 2492 else if (count == 2) 2493 { 2494 struct df_mw_hardreg *m0 = (*mw_vec)[0]; 2495 struct df_mw_hardreg *m1 = (*mw_vec)[1]; 2496 if (df_mw_compare (&m0, &m1) > 0) 2497 { 2498 struct df_mw_hardreg *tmp = (*mw_vec)[0]; 2499 (*mw_vec)[0] = (*mw_vec)[1]; 2500 (*mw_vec)[1] = tmp; 2501 } 2502 } 2503 else 2504 mw_vec->qsort (df_mw_compare); 2505 2506 for (i=0; i<count-dist; i++) 2507 { 2508 /* Find the next ref that is not equal to the current ref. */ 2509 while (i + dist + 1 < count 2510 && df_mw_equal_p ((*mw_vec)[i], (*mw_vec)[i + dist + 1])) 2511 { 2512 pool_free (problem_data->mw_reg_pool, 2513 (*mw_vec)[i + dist + 1]); 2514 dist++; 2515 } 2516 /* Copy it down to the next position. */ 2517 if (dist && i + dist + 1 < count) 2518 (*mw_vec)[i + 1] = (*mw_vec)[i + dist + 1]; 2519 } 2520 2521 count -= dist; 2522 mw_vec->truncate (count); 2523 } 2524 2525 2526 /* Sort and remove duplicates from the COLLECTION_REC. */ 2527 2528 static void 2529 df_canonize_collection_rec (struct df_collection_rec *collection_rec) 2530 { 2531 df_sort_and_compress_refs (&collection_rec->def_vec); 2532 df_sort_and_compress_refs (&collection_rec->use_vec); 2533 df_sort_and_compress_refs (&collection_rec->eq_use_vec); 2534 df_sort_and_compress_mws (&collection_rec->mw_vec); 2535 } 2536 2537 2538 /* Add the new df_ref to appropriate reg_info/ref_info chains. */ 2539 2540 static void 2541 df_install_ref (df_ref this_ref, 2542 struct df_reg_info *reg_info, 2543 struct df_ref_info *ref_info, 2544 bool add_to_table) 2545 { 2546 unsigned int regno = DF_REF_REGNO (this_ref); 2547 /* Add the ref to the reg_{def,use,eq_use} chain. */ 2548 df_ref head = reg_info->reg_chain; 2549 2550 reg_info->reg_chain = this_ref; 2551 reg_info->n_refs++; 2552 2553 if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE)) 2554 { 2555 gcc_assert (regno < FIRST_PSEUDO_REGISTER); 2556 df->hard_regs_live_count[regno]++; 2557 } 2558 2559 gcc_checking_assert (DF_REF_NEXT_REG (this_ref) == NULL 2560 && DF_REF_PREV_REG (this_ref) == NULL); 2561 2562 DF_REF_NEXT_REG (this_ref) = head; 2563 2564 /* We cannot actually link to the head of the chain. */ 2565 DF_REF_PREV_REG (this_ref) = NULL; 2566 2567 if (head) 2568 DF_REF_PREV_REG (head) = this_ref; 2569 2570 if (add_to_table) 2571 { 2572 gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE); 2573 df_check_and_grow_ref_info (ref_info, 1); 2574 DF_REF_ID (this_ref) = ref_info->table_size; 2575 /* Add the ref to the big array of defs. */ 2576 ref_info->refs[ref_info->table_size] = this_ref; 2577 ref_info->table_size++; 2578 } 2579 else 2580 DF_REF_ID (this_ref) = -1; 2581 2582 ref_info->total_size++; 2583 } 2584 2585 2586 /* This function takes one of the groups of refs (defs, uses or 2587 eq_uses) and installs the entire group into the insn. It also adds 2588 each of these refs into the appropriate chains. */ 2589 2590 static df_ref * 2591 df_install_refs (basic_block bb, 2592 vec<df_ref, va_stack> old_vec, 2593 struct df_reg_info **reg_info, 2594 struct df_ref_info *ref_info, 2595 bool is_notes) 2596 { 2597 unsigned int count; 2598 2599 count = old_vec.length (); 2600 if (count) 2601 { 2602 df_ref *new_vec = XNEWVEC (df_ref, count + 1); 2603 bool add_to_table; 2604 df_ref this_ref; 2605 unsigned int ix; 2606 2607 switch (ref_info->ref_order) 2608 { 2609 case DF_REF_ORDER_UNORDERED_WITH_NOTES: 2610 case DF_REF_ORDER_BY_REG_WITH_NOTES: 2611 case DF_REF_ORDER_BY_INSN_WITH_NOTES: 2612 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES; 2613 add_to_table = true; 2614 break; 2615 case DF_REF_ORDER_UNORDERED: 2616 case DF_REF_ORDER_BY_REG: 2617 case DF_REF_ORDER_BY_INSN: 2618 ref_info->ref_order = DF_REF_ORDER_UNORDERED; 2619 add_to_table = !is_notes; 2620 break; 2621 default: 2622 add_to_table = false; 2623 break; 2624 } 2625 2626 /* Do not add if ref is not in the right blocks. */ 2627 if (add_to_table && df->analyze_subset) 2628 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index); 2629 2630 FOR_EACH_VEC_ELT (old_vec, ix, this_ref) 2631 { 2632 new_vec[ix] = this_ref; 2633 df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)], 2634 ref_info, add_to_table); 2635 } 2636 2637 new_vec[count] = NULL; 2638 return new_vec; 2639 } 2640 else 2641 return df_null_ref_rec; 2642 } 2643 2644 2645 /* This function takes the mws installs the entire group into the 2646 insn. */ 2647 2648 static struct df_mw_hardreg ** 2649 df_install_mws (vec<df_mw_hardreg_ptr, va_stack> old_vec) 2650 { 2651 unsigned int count; 2652 2653 count = old_vec.length (); 2654 if (count) 2655 { 2656 struct df_mw_hardreg **new_vec 2657 = XNEWVEC (struct df_mw_hardreg*, count + 1); 2658 memcpy (new_vec, old_vec.address (), 2659 sizeof (struct df_mw_hardreg*) * count); 2660 new_vec[count] = NULL; 2661 return new_vec; 2662 } 2663 else 2664 return df_null_mw_rec; 2665 } 2666 2667 2668 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info 2669 chains and update other necessary information. */ 2670 2671 static void 2672 df_refs_add_to_chains (struct df_collection_rec *collection_rec, 2673 basic_block bb, rtx insn) 2674 { 2675 if (insn) 2676 { 2677 struct df_insn_info *insn_rec = DF_INSN_INFO_GET (insn); 2678 /* If there is a vector in the collection rec, add it to the 2679 insn. A null rec is a signal that the caller will handle the 2680 chain specially. */ 2681 if (collection_rec->def_vec.exists ()) 2682 { 2683 df_scan_free_ref_vec (insn_rec->defs); 2684 insn_rec->defs 2685 = df_install_refs (bb, collection_rec->def_vec, 2686 df->def_regs, 2687 &df->def_info, false); 2688 } 2689 if (collection_rec->use_vec.exists ()) 2690 { 2691 df_scan_free_ref_vec (insn_rec->uses); 2692 insn_rec->uses 2693 = df_install_refs (bb, collection_rec->use_vec, 2694 df->use_regs, 2695 &df->use_info, false); 2696 } 2697 if (collection_rec->eq_use_vec.exists ()) 2698 { 2699 df_scan_free_ref_vec (insn_rec->eq_uses); 2700 insn_rec->eq_uses 2701 = df_install_refs (bb, collection_rec->eq_use_vec, 2702 df->eq_use_regs, 2703 &df->use_info, true); 2704 } 2705 if (collection_rec->mw_vec.exists ()) 2706 { 2707 df_scan_free_mws_vec (insn_rec->mw_hardregs); 2708 insn_rec->mw_hardregs 2709 = df_install_mws (collection_rec->mw_vec); 2710 } 2711 } 2712 else 2713 { 2714 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index); 2715 2716 df_scan_free_ref_vec (bb_info->artificial_defs); 2717 bb_info->artificial_defs 2718 = df_install_refs (bb, collection_rec->def_vec, 2719 df->def_regs, 2720 &df->def_info, false); 2721 df_scan_free_ref_vec (bb_info->artificial_uses); 2722 bb_info->artificial_uses 2723 = df_install_refs (bb, collection_rec->use_vec, 2724 df->use_regs, 2725 &df->use_info, false); 2726 } 2727 } 2728 2729 2730 /* Allocate a ref and initialize its fields. */ 2731 2732 static df_ref 2733 df_ref_create_structure (enum df_ref_class cl, 2734 struct df_collection_rec *collection_rec, 2735 rtx reg, rtx *loc, 2736 basic_block bb, struct df_insn_info *info, 2737 enum df_ref_type ref_type, 2738 int ref_flags) 2739 { 2740 df_ref this_ref = NULL; 2741 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg); 2742 struct df_scan_problem_data *problem_data 2743 = (struct df_scan_problem_data *) df_scan->problem_data; 2744 2745 switch (cl) 2746 { 2747 case DF_REF_BASE: 2748 this_ref = (df_ref) pool_alloc (problem_data->ref_base_pool); 2749 gcc_checking_assert (loc == NULL); 2750 break; 2751 2752 case DF_REF_ARTIFICIAL: 2753 this_ref = (df_ref) pool_alloc (problem_data->ref_artificial_pool); 2754 this_ref->artificial_ref.bb = bb; 2755 gcc_checking_assert (loc == NULL); 2756 break; 2757 2758 case DF_REF_REGULAR: 2759 this_ref = (df_ref) pool_alloc (problem_data->ref_regular_pool); 2760 this_ref->regular_ref.loc = loc; 2761 gcc_checking_assert (loc); 2762 break; 2763 } 2764 2765 DF_REF_CLASS (this_ref) = cl; 2766 DF_REF_ID (this_ref) = -1; 2767 DF_REF_REG (this_ref) = reg; 2768 DF_REF_REGNO (this_ref) = regno; 2769 DF_REF_TYPE (this_ref) = ref_type; 2770 DF_REF_INSN_INFO (this_ref) = info; 2771 DF_REF_CHAIN (this_ref) = NULL; 2772 DF_REF_FLAGS (this_ref) = ref_flags; 2773 DF_REF_NEXT_REG (this_ref) = NULL; 2774 DF_REF_PREV_REG (this_ref) = NULL; 2775 DF_REF_ORDER (this_ref) = df->ref_order++; 2776 2777 /* We need to clear this bit because fwprop, and in the future 2778 possibly other optimizations sometimes create new refs using ond 2779 refs as the model. */ 2780 DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE); 2781 2782 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */ 2783 if (regno < FIRST_PSEUDO_REGISTER 2784 && !DF_REF_IS_ARTIFICIAL (this_ref) 2785 && !DEBUG_INSN_P (DF_REF_INSN (this_ref))) 2786 { 2787 if (DF_REF_REG_DEF_P (this_ref)) 2788 { 2789 if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER)) 2790 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE); 2791 } 2792 else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno) 2793 && (regno == FRAME_POINTER_REGNUM 2794 || regno == ARG_POINTER_REGNUM))) 2795 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE); 2796 } 2797 2798 if (collection_rec) 2799 { 2800 if (DF_REF_REG_DEF_P (this_ref)) 2801 collection_rec->def_vec.safe_push (this_ref); 2802 else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE) 2803 collection_rec->eq_use_vec.safe_push (this_ref); 2804 else 2805 collection_rec->use_vec.safe_push (this_ref); 2806 } 2807 else 2808 df_install_ref_incremental (this_ref); 2809 2810 return this_ref; 2811 } 2812 2813 2814 /* Create new references of type DF_REF_TYPE for each part of register REG 2815 at address LOC within INSN of BB. */ 2816 2817 2818 static void 2819 df_ref_record (enum df_ref_class cl, 2820 struct df_collection_rec *collection_rec, 2821 rtx reg, rtx *loc, 2822 basic_block bb, struct df_insn_info *insn_info, 2823 enum df_ref_type ref_type, 2824 int ref_flags) 2825 { 2826 unsigned int regno; 2827 2828 gcc_checking_assert (REG_P (reg) || GET_CODE (reg) == SUBREG); 2829 2830 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg); 2831 if (regno < FIRST_PSEUDO_REGISTER) 2832 { 2833 struct df_mw_hardreg *hardreg = NULL; 2834 struct df_scan_problem_data *problem_data 2835 = (struct df_scan_problem_data *) df_scan->problem_data; 2836 unsigned int i; 2837 unsigned int endregno; 2838 df_ref ref; 2839 2840 if (GET_CODE (reg) == SUBREG) 2841 { 2842 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)), 2843 SUBREG_BYTE (reg), GET_MODE (reg)); 2844 endregno = regno + subreg_nregs (reg); 2845 } 2846 else 2847 endregno = END_HARD_REGNO (reg); 2848 2849 /* If this is a multiword hardreg, we create some extra 2850 datastructures that will enable us to easily build REG_DEAD 2851 and REG_UNUSED notes. */ 2852 if (collection_rec 2853 && (endregno != regno + 1) && insn_info) 2854 { 2855 /* Sets to a subreg of a multiword register are partial. 2856 Sets to a non-subreg of a multiword register are not. */ 2857 if (GET_CODE (reg) == SUBREG) 2858 ref_flags |= DF_REF_PARTIAL; 2859 ref_flags |= DF_REF_MW_HARDREG; 2860 2861 hardreg = (struct df_mw_hardreg *) pool_alloc (problem_data->mw_reg_pool); 2862 hardreg->type = ref_type; 2863 hardreg->flags = ref_flags; 2864 hardreg->mw_reg = reg; 2865 hardreg->start_regno = regno; 2866 hardreg->end_regno = endregno - 1; 2867 hardreg->mw_order = df->ref_order++; 2868 collection_rec->mw_vec.safe_push (hardreg); 2869 } 2870 2871 for (i = regno; i < endregno; i++) 2872 { 2873 ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc, 2874 bb, insn_info, ref_type, ref_flags); 2875 2876 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i); 2877 } 2878 } 2879 else 2880 { 2881 df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info, 2882 ref_type, ref_flags); 2883 } 2884 } 2885 2886 2887 /* A set to a non-paradoxical SUBREG for which the number of word_mode units 2888 covered by the outer mode is smaller than that covered by the inner mode, 2889 is a read-modify-write operation. 2890 This function returns true iff the SUBREG X is such a SUBREG. */ 2891 2892 bool 2893 df_read_modify_subreg_p (rtx x) 2894 { 2895 unsigned int isize, osize; 2896 if (GET_CODE (x) != SUBREG) 2897 return false; 2898 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))); 2899 osize = GET_MODE_SIZE (GET_MODE (x)); 2900 return isize > osize 2901 && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x))); 2902 } 2903 2904 2905 /* Process all the registers defined in the rtx pointed by LOC. 2906 Autoincrement/decrement definitions will be picked up by df_uses_record. 2907 Any change here has to be matched in df_find_hard_reg_defs_1. */ 2908 2909 static void 2910 df_def_record_1 (struct df_collection_rec *collection_rec, 2911 rtx *loc, basic_block bb, struct df_insn_info *insn_info, 2912 int flags) 2913 { 2914 rtx dst = *loc; 2915 2916 /* It is legal to have a set destination be a parallel. */ 2917 if (GET_CODE (dst) == PARALLEL) 2918 { 2919 int i; 2920 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--) 2921 { 2922 rtx temp = XVECEXP (dst, 0, i); 2923 gcc_assert (GET_CODE (temp) == EXPR_LIST); 2924 df_def_record_1 (collection_rec, &XEXP (temp, 0), 2925 bb, insn_info, flags); 2926 } 2927 return; 2928 } 2929 2930 if (GET_CODE (dst) == STRICT_LOW_PART) 2931 { 2932 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART; 2933 2934 loc = &XEXP (dst, 0); 2935 dst = *loc; 2936 } 2937 2938 if (GET_CODE (dst) == ZERO_EXTRACT) 2939 { 2940 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT; 2941 2942 loc = &XEXP (dst, 0); 2943 dst = *loc; 2944 } 2945 2946 /* At this point if we do not have a reg or a subreg, just return. */ 2947 if (REG_P (dst)) 2948 { 2949 df_ref_record (DF_REF_REGULAR, collection_rec, 2950 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags); 2951 2952 /* We want to keep sp alive everywhere - by making all 2953 writes to sp also use of sp. */ 2954 if (REGNO (dst) == STACK_POINTER_REGNUM) 2955 df_ref_record (DF_REF_BASE, collection_rec, 2956 dst, NULL, bb, insn_info, DF_REF_REG_USE, flags); 2957 } 2958 else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))) 2959 { 2960 if (df_read_modify_subreg_p (dst)) 2961 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL; 2962 2963 flags |= DF_REF_SUBREG; 2964 2965 df_ref_record (DF_REF_REGULAR, collection_rec, 2966 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags); 2967 } 2968 } 2969 2970 2971 /* Process all the registers defined in the pattern rtx, X. Any change 2972 here has to be matched in df_find_hard_reg_defs. */ 2973 2974 static void 2975 df_defs_record (struct df_collection_rec *collection_rec, 2976 rtx x, basic_block bb, struct df_insn_info *insn_info, 2977 int flags) 2978 { 2979 RTX_CODE code = GET_CODE (x); 2980 int i; 2981 2982 switch (code) 2983 { 2984 case SET: 2985 df_def_record_1 (collection_rec, &SET_DEST (x), bb, insn_info, flags); 2986 break; 2987 2988 case CLOBBER: 2989 flags |= DF_REF_MUST_CLOBBER; 2990 df_def_record_1 (collection_rec, &XEXP (x, 0), bb, insn_info, flags); 2991 break; 2992 2993 case COND_EXEC: 2994 df_defs_record (collection_rec, COND_EXEC_CODE (x), 2995 bb, insn_info, DF_REF_CONDITIONAL); 2996 break; 2997 2998 case PARALLEL: 2999 for (i = 0; i < XVECLEN (x, 0); i++) 3000 df_defs_record (collection_rec, XVECEXP (x, 0, i), 3001 bb, insn_info, flags); 3002 break; 3003 default: 3004 /* No DEFs to record in other cases */ 3005 break; 3006 } 3007 } 3008 3009 /* Set bits in *DEFS for hard registers found in the rtx DST, which is the 3010 destination of a set or clobber. This has to match the logic in 3011 df_defs_record_1. */ 3012 3013 static void 3014 df_find_hard_reg_defs_1 (rtx dst, HARD_REG_SET *defs) 3015 { 3016 /* It is legal to have a set destination be a parallel. */ 3017 if (GET_CODE (dst) == PARALLEL) 3018 { 3019 int i; 3020 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--) 3021 { 3022 rtx temp = XVECEXP (dst, 0, i); 3023 gcc_assert (GET_CODE (temp) == EXPR_LIST); 3024 df_find_hard_reg_defs_1 (XEXP (temp, 0), defs); 3025 } 3026 return; 3027 } 3028 3029 if (GET_CODE (dst) == STRICT_LOW_PART) 3030 dst = XEXP (dst, 0); 3031 3032 if (GET_CODE (dst) == ZERO_EXTRACT) 3033 dst = XEXP (dst, 0); 3034 3035 /* At this point if we do not have a reg or a subreg, just return. */ 3036 if (REG_P (dst) && HARD_REGISTER_P (dst)) 3037 SET_HARD_REG_BIT (*defs, REGNO (dst)); 3038 else if (GET_CODE (dst) == SUBREG 3039 && REG_P (SUBREG_REG (dst)) && HARD_REGISTER_P (dst)) 3040 SET_HARD_REG_BIT (*defs, REGNO (SUBREG_REG (dst))); 3041 } 3042 3043 /* Set bits in *DEFS for hard registers defined in the pattern X. This 3044 has to match the logic in df_defs_record. */ 3045 3046 static void 3047 df_find_hard_reg_defs (rtx x, HARD_REG_SET *defs) 3048 { 3049 RTX_CODE code = GET_CODE (x); 3050 int i; 3051 3052 switch (code) 3053 { 3054 case SET: 3055 df_find_hard_reg_defs_1 (SET_DEST (x), defs); 3056 break; 3057 3058 case CLOBBER: 3059 df_find_hard_reg_defs_1 (XEXP (x, 0), defs); 3060 break; 3061 3062 case COND_EXEC: 3063 df_find_hard_reg_defs (COND_EXEC_CODE (x), defs); 3064 break; 3065 3066 case PARALLEL: 3067 for (i = 0; i < XVECLEN (x, 0); i++) 3068 df_find_hard_reg_defs (XVECEXP (x, 0, i), defs); 3069 break; 3070 default: 3071 /* No DEFs to record in other cases */ 3072 break; 3073 } 3074 } 3075 3076 3077 /* Process all the registers used in the rtx at address LOC. */ 3078 3079 static void 3080 df_uses_record (struct df_collection_rec *collection_rec, 3081 rtx *loc, enum df_ref_type ref_type, 3082 basic_block bb, struct df_insn_info *insn_info, 3083 int flags) 3084 { 3085 RTX_CODE code; 3086 rtx x; 3087 3088 retry: 3089 x = *loc; 3090 if (!x) 3091 return; 3092 code = GET_CODE (x); 3093 switch (code) 3094 { 3095 case LABEL_REF: 3096 case SYMBOL_REF: 3097 case CONST: 3098 CASE_CONST_ANY: 3099 case PC: 3100 case CC0: 3101 case ADDR_VEC: 3102 case ADDR_DIFF_VEC: 3103 return; 3104 3105 case CLOBBER: 3106 /* If we are clobbering a MEM, mark any registers inside the address 3107 as being used. */ 3108 if (MEM_P (XEXP (x, 0))) 3109 df_uses_record (collection_rec, 3110 &XEXP (XEXP (x, 0), 0), 3111 DF_REF_REG_MEM_STORE, 3112 bb, insn_info, 3113 flags); 3114 3115 /* If we're clobbering a REG then we have a def so ignore. */ 3116 return; 3117 3118 case MEM: 3119 df_uses_record (collection_rec, 3120 &XEXP (x, 0), DF_REF_REG_MEM_LOAD, 3121 bb, insn_info, flags & DF_REF_IN_NOTE); 3122 return; 3123 3124 case SUBREG: 3125 /* While we're here, optimize this case. */ 3126 flags |= DF_REF_PARTIAL; 3127 /* In case the SUBREG is not of a REG, do not optimize. */ 3128 if (!REG_P (SUBREG_REG (x))) 3129 { 3130 loc = &SUBREG_REG (x); 3131 df_uses_record (collection_rec, loc, ref_type, bb, insn_info, flags); 3132 return; 3133 } 3134 /* ... Fall through ... */ 3135 3136 case REG: 3137 df_ref_record (DF_REF_REGULAR, collection_rec, 3138 x, loc, bb, insn_info, 3139 ref_type, flags); 3140 return; 3141 3142 case SIGN_EXTRACT: 3143 case ZERO_EXTRACT: 3144 { 3145 df_uses_record (collection_rec, 3146 &XEXP (x, 1), ref_type, bb, insn_info, flags); 3147 df_uses_record (collection_rec, 3148 &XEXP (x, 2), ref_type, bb, insn_info, flags); 3149 3150 /* If the parameters to the zero or sign extract are 3151 constants, strip them off and recurse, otherwise there is 3152 no information that we can gain from this operation. */ 3153 if (code == ZERO_EXTRACT) 3154 flags |= DF_REF_ZERO_EXTRACT; 3155 else 3156 flags |= DF_REF_SIGN_EXTRACT; 3157 3158 df_uses_record (collection_rec, 3159 &XEXP (x, 0), ref_type, bb, insn_info, flags); 3160 return; 3161 } 3162 break; 3163 3164 case SET: 3165 { 3166 rtx dst = SET_DEST (x); 3167 gcc_assert (!(flags & DF_REF_IN_NOTE)); 3168 df_uses_record (collection_rec, 3169 &SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags); 3170 3171 switch (GET_CODE (dst)) 3172 { 3173 case SUBREG: 3174 if (df_read_modify_subreg_p (dst)) 3175 { 3176 df_uses_record (collection_rec, &SUBREG_REG (dst), 3177 DF_REF_REG_USE, bb, insn_info, 3178 flags | DF_REF_READ_WRITE | DF_REF_SUBREG); 3179 break; 3180 } 3181 /* Fall through. */ 3182 case REG: 3183 case PARALLEL: 3184 case SCRATCH: 3185 case PC: 3186 case CC0: 3187 break; 3188 case MEM: 3189 df_uses_record (collection_rec, &XEXP (dst, 0), 3190 DF_REF_REG_MEM_STORE, bb, insn_info, flags); 3191 break; 3192 case STRICT_LOW_PART: 3193 { 3194 rtx *temp = &XEXP (dst, 0); 3195 /* A strict_low_part uses the whole REG and not just the 3196 SUBREG. */ 3197 dst = XEXP (dst, 0); 3198 df_uses_record (collection_rec, 3199 (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp, 3200 DF_REF_REG_USE, bb, insn_info, 3201 DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART); 3202 } 3203 break; 3204 case ZERO_EXTRACT: 3205 { 3206 df_uses_record (collection_rec, &XEXP (dst, 1), 3207 DF_REF_REG_USE, bb, insn_info, flags); 3208 df_uses_record (collection_rec, &XEXP (dst, 2), 3209 DF_REF_REG_USE, bb, insn_info, flags); 3210 if (GET_CODE (XEXP (dst,0)) == MEM) 3211 df_uses_record (collection_rec, &XEXP (dst, 0), 3212 DF_REF_REG_USE, bb, insn_info, 3213 flags); 3214 else 3215 df_uses_record (collection_rec, &XEXP (dst, 0), 3216 DF_REF_REG_USE, bb, insn_info, 3217 DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT); 3218 } 3219 break; 3220 3221 default: 3222 gcc_unreachable (); 3223 } 3224 return; 3225 } 3226 3227 case RETURN: 3228 case SIMPLE_RETURN: 3229 break; 3230 3231 case ASM_OPERANDS: 3232 case UNSPEC_VOLATILE: 3233 case TRAP_IF: 3234 case ASM_INPUT: 3235 { 3236 /* Traditional and volatile asm instructions must be 3237 considered to use and clobber all hard registers, all 3238 pseudo-registers and all of memory. So must TRAP_IF and 3239 UNSPEC_VOLATILE operations. 3240 3241 Consider for instance a volatile asm that changes the fpu 3242 rounding mode. An insn should not be moved across this 3243 even if it only uses pseudo-regs because it might give an 3244 incorrectly rounded result. 3245 3246 However, flow.c's liveness computation did *not* do this, 3247 giving the reasoning as " ?!? Unfortunately, marking all 3248 hard registers as live causes massive problems for the 3249 register allocator and marking all pseudos as live creates 3250 mountains of uninitialized variable warnings." 3251 3252 In order to maintain the status quo with regard to liveness 3253 and uses, we do what flow.c did and just mark any regs we 3254 can find in ASM_OPERANDS as used. In global asm insns are 3255 scanned and regs_asm_clobbered is filled out. 3256 3257 For all ASM_OPERANDS, we must traverse the vector of input 3258 operands. We can not just fall through here since then we 3259 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS, 3260 which do not indicate traditional asms unlike their normal 3261 usage. */ 3262 if (code == ASM_OPERANDS) 3263 { 3264 int j; 3265 3266 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++) 3267 df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j), 3268 DF_REF_REG_USE, bb, insn_info, flags); 3269 return; 3270 } 3271 break; 3272 } 3273 3274 case VAR_LOCATION: 3275 df_uses_record (collection_rec, 3276 &PAT_VAR_LOCATION_LOC (x), 3277 DF_REF_REG_USE, bb, insn_info, flags); 3278 return; 3279 3280 case PRE_DEC: 3281 case POST_DEC: 3282 case PRE_INC: 3283 case POST_INC: 3284 case PRE_MODIFY: 3285 case POST_MODIFY: 3286 gcc_assert (!DEBUG_INSN_P (insn_info->insn)); 3287 /* Catch the def of the register being modified. */ 3288 df_ref_record (DF_REF_REGULAR, collection_rec, XEXP (x, 0), &XEXP (x, 0), 3289 bb, insn_info, 3290 DF_REF_REG_DEF, 3291 flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY); 3292 3293 /* ... Fall through to handle uses ... */ 3294 3295 default: 3296 break; 3297 } 3298 3299 /* Recursively scan the operands of this expression. */ 3300 { 3301 const char *fmt = GET_RTX_FORMAT (code); 3302 int i; 3303 3304 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 3305 { 3306 if (fmt[i] == 'e') 3307 { 3308 /* Tail recursive case: save a function call level. */ 3309 if (i == 0) 3310 { 3311 loc = &XEXP (x, 0); 3312 goto retry; 3313 } 3314 df_uses_record (collection_rec, &XEXP (x, i), ref_type, 3315 bb, insn_info, flags); 3316 } 3317 else if (fmt[i] == 'E') 3318 { 3319 int j; 3320 for (j = 0; j < XVECLEN (x, i); j++) 3321 df_uses_record (collection_rec, 3322 &XVECEXP (x, i, j), ref_type, 3323 bb, insn_info, flags); 3324 } 3325 } 3326 } 3327 3328 return; 3329 } 3330 3331 3332 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */ 3333 3334 static void 3335 df_get_conditional_uses (struct df_collection_rec *collection_rec) 3336 { 3337 unsigned int ix; 3338 df_ref ref; 3339 3340 FOR_EACH_VEC_ELT (collection_rec->def_vec, ix, ref) 3341 { 3342 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL)) 3343 { 3344 df_ref use; 3345 3346 use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref), 3347 DF_REF_LOC (ref), DF_REF_BB (ref), 3348 DF_REF_INSN_INFO (ref), DF_REF_REG_USE, 3349 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL); 3350 DF_REF_REGNO (use) = DF_REF_REGNO (ref); 3351 } 3352 } 3353 } 3354 3355 3356 /* Get call's extra defs and uses (track caller-saved registers). */ 3357 3358 static void 3359 df_get_call_refs (struct df_collection_rec *collection_rec, 3360 basic_block bb, 3361 struct df_insn_info *insn_info, 3362 int flags) 3363 { 3364 rtx note; 3365 bool is_sibling_call; 3366 unsigned int i; 3367 HARD_REG_SET defs_generated; 3368 3369 CLEAR_HARD_REG_SET (defs_generated); 3370 df_find_hard_reg_defs (PATTERN (insn_info->insn), &defs_generated); 3371 is_sibling_call = SIBLING_CALL_P (insn_info->insn); 3372 3373 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 3374 { 3375 if (i == STACK_POINTER_REGNUM) 3376 /* The stack ptr is used (honorarily) by a CALL insn. */ 3377 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i], 3378 NULL, bb, insn_info, DF_REF_REG_USE, 3379 DF_REF_CALL_STACK_USAGE | flags); 3380 else if (global_regs[i]) 3381 { 3382 /* Calls to const functions cannot access any global registers and 3383 calls to pure functions cannot set them. All other calls may 3384 reference any of the global registers, so they are recorded as 3385 used. */ 3386 if (!RTL_CONST_CALL_P (insn_info->insn)) 3387 { 3388 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i], 3389 NULL, bb, insn_info, DF_REF_REG_USE, flags); 3390 if (!RTL_PURE_CALL_P (insn_info->insn)) 3391 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i], 3392 NULL, bb, insn_info, DF_REF_REG_DEF, flags); 3393 } 3394 } 3395 else if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i) 3396 /* no clobbers for regs that are the result of the call */ 3397 && !TEST_HARD_REG_BIT (defs_generated, i) 3398 && (!is_sibling_call 3399 || !bitmap_bit_p (df->exit_block_uses, i) 3400 || refers_to_regno_p (i, i+1, 3401 crtl->return_rtx, NULL))) 3402 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i], 3403 NULL, bb, insn_info, DF_REF_REG_DEF, 3404 DF_REF_MAY_CLOBBER | flags); 3405 } 3406 3407 /* Record the registers used to pass arguments, and explicitly 3408 noted as clobbered. */ 3409 for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note; 3410 note = XEXP (note, 1)) 3411 { 3412 if (GET_CODE (XEXP (note, 0)) == USE) 3413 df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0), 3414 DF_REF_REG_USE, bb, insn_info, flags); 3415 else if (GET_CODE (XEXP (note, 0)) == CLOBBER) 3416 { 3417 if (REG_P (XEXP (XEXP (note, 0), 0))) 3418 { 3419 unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0)); 3420 if (!TEST_HARD_REG_BIT (defs_generated, regno)) 3421 df_defs_record (collection_rec, XEXP (note, 0), bb, 3422 insn_info, flags); 3423 } 3424 else 3425 df_uses_record (collection_rec, &XEXP (note, 0), 3426 DF_REF_REG_USE, bb, insn_info, flags); 3427 } 3428 } 3429 3430 return; 3431 } 3432 3433 /* Collect all refs in the INSN. This function is free of any 3434 side-effect - it will create and return a lists of df_ref's in the 3435 COLLECTION_REC without putting those refs into existing ref chains 3436 and reg chains. */ 3437 3438 static void 3439 df_insn_refs_collect (struct df_collection_rec *collection_rec, 3440 basic_block bb, struct df_insn_info *insn_info) 3441 { 3442 rtx note; 3443 bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC); 3444 3445 /* Clear out the collection record. */ 3446 collection_rec->def_vec.truncate (0); 3447 collection_rec->use_vec.truncate (0); 3448 collection_rec->eq_use_vec.truncate (0); 3449 collection_rec->mw_vec.truncate (0); 3450 3451 /* Process REG_EQUIV/REG_EQUAL notes. */ 3452 for (note = REG_NOTES (insn_info->insn); note; 3453 note = XEXP (note, 1)) 3454 { 3455 switch (REG_NOTE_KIND (note)) 3456 { 3457 case REG_EQUIV: 3458 case REG_EQUAL: 3459 df_uses_record (collection_rec, 3460 &XEXP (note, 0), DF_REF_REG_USE, 3461 bb, insn_info, DF_REF_IN_NOTE); 3462 break; 3463 case REG_NON_LOCAL_GOTO: 3464 /* The frame ptr is used by a non-local goto. */ 3465 df_ref_record (DF_REF_BASE, collection_rec, 3466 regno_reg_rtx[FRAME_POINTER_REGNUM], 3467 NULL, bb, insn_info, 3468 DF_REF_REG_USE, 0); 3469 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER 3470 df_ref_record (DF_REF_BASE, collection_rec, 3471 regno_reg_rtx[HARD_FRAME_POINTER_REGNUM], 3472 NULL, bb, insn_info, 3473 DF_REF_REG_USE, 0); 3474 #endif 3475 break; 3476 default: 3477 break; 3478 } 3479 } 3480 3481 /* For CALL_INSNs, first record DF_REF_BASE register defs, as well as 3482 uses from CALL_INSN_FUNCTION_USAGE. */ 3483 if (CALL_P (insn_info->insn)) 3484 df_get_call_refs (collection_rec, bb, insn_info, 3485 (is_cond_exec) ? DF_REF_CONDITIONAL : 0); 3486 3487 /* Record other defs. These should be mostly for DF_REF_REGULAR, so 3488 that a qsort on the defs is unnecessary in most cases. */ 3489 df_defs_record (collection_rec, 3490 PATTERN (insn_info->insn), bb, insn_info, 0); 3491 3492 /* Record the register uses. */ 3493 df_uses_record (collection_rec, 3494 &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0); 3495 3496 /* DF_REF_CONDITIONAL needs corresponding USES. */ 3497 if (is_cond_exec) 3498 df_get_conditional_uses (collection_rec); 3499 3500 df_canonize_collection_rec (collection_rec); 3501 } 3502 3503 /* Recompute the luids for the insns in BB. */ 3504 3505 void 3506 df_recompute_luids (basic_block bb) 3507 { 3508 rtx insn; 3509 int luid = 0; 3510 3511 df_grow_insn_info (); 3512 3513 /* Scan the block an insn at a time from beginning to end. */ 3514 FOR_BB_INSNS (bb, insn) 3515 { 3516 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn); 3517 /* Inserting labels does not always trigger the incremental 3518 rescanning. */ 3519 if (!insn_info) 3520 { 3521 gcc_assert (!INSN_P (insn)); 3522 insn_info = df_insn_create_insn_record (insn); 3523 } 3524 3525 DF_INSN_INFO_LUID (insn_info) = luid; 3526 if (INSN_P (insn)) 3527 luid++; 3528 } 3529 } 3530 3531 3532 /* Collect all artificial refs at the block level for BB and add them 3533 to COLLECTION_REC. */ 3534 3535 static void 3536 df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb) 3537 { 3538 collection_rec->def_vec.truncate (0); 3539 collection_rec->use_vec.truncate (0); 3540 collection_rec->eq_use_vec.truncate (0); 3541 collection_rec->mw_vec.truncate (0); 3542 3543 if (bb->index == ENTRY_BLOCK) 3544 { 3545 df_entry_block_defs_collect (collection_rec, df->entry_block_defs); 3546 return; 3547 } 3548 else if (bb->index == EXIT_BLOCK) 3549 { 3550 df_exit_block_uses_collect (collection_rec, df->exit_block_uses); 3551 return; 3552 } 3553 3554 #ifdef EH_RETURN_DATA_REGNO 3555 if (bb_has_eh_pred (bb)) 3556 { 3557 unsigned int i; 3558 /* Mark the registers that will contain data for the handler. */ 3559 for (i = 0; ; ++i) 3560 { 3561 unsigned regno = EH_RETURN_DATA_REGNO (i); 3562 if (regno == INVALID_REGNUM) 3563 break; 3564 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL, 3565 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP); 3566 } 3567 } 3568 #endif 3569 3570 /* Add the hard_frame_pointer if this block is the target of a 3571 non-local goto. */ 3572 if (bb->flags & BB_NON_LOCAL_GOTO_TARGET) 3573 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL, 3574 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP); 3575 3576 /* Add the artificial uses. */ 3577 if (bb->index >= NUM_FIXED_BLOCKS) 3578 { 3579 bitmap_iterator bi; 3580 unsigned int regno; 3581 bitmap au = bb_has_eh_pred (bb) 3582 ? &df->eh_block_artificial_uses 3583 : &df->regular_block_artificial_uses; 3584 3585 EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi) 3586 { 3587 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL, 3588 bb, NULL, DF_REF_REG_USE, 0); 3589 } 3590 } 3591 3592 df_canonize_collection_rec (collection_rec); 3593 } 3594 3595 3596 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */ 3597 3598 void 3599 df_bb_refs_record (int bb_index, bool scan_insns) 3600 { 3601 basic_block bb = BASIC_BLOCK (bb_index); 3602 rtx insn; 3603 int luid = 0; 3604 struct df_collection_rec collection_rec; 3605 3606 if (!df) 3607 return; 3608 3609 df_grow_bb_info (df_scan); 3610 vec_stack_alloc (df_ref, collection_rec.def_vec, 128); 3611 vec_stack_alloc (df_ref, collection_rec.use_vec, 32); 3612 vec_stack_alloc (df_ref, collection_rec.eq_use_vec, 32); 3613 vec_stack_alloc (df_mw_hardreg_ptr, collection_rec.mw_vec, 32); 3614 3615 if (scan_insns) 3616 /* Scan the block an insn at a time from beginning to end. */ 3617 FOR_BB_INSNS (bb, insn) 3618 { 3619 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn); 3620 gcc_assert (!insn_info); 3621 3622 insn_info = df_insn_create_insn_record (insn); 3623 if (INSN_P (insn)) 3624 { 3625 /* Record refs within INSN. */ 3626 DF_INSN_INFO_LUID (insn_info) = luid++; 3627 df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn)); 3628 df_refs_add_to_chains (&collection_rec, bb, insn); 3629 } 3630 DF_INSN_INFO_LUID (insn_info) = luid; 3631 } 3632 3633 /* Other block level artificial refs */ 3634 df_bb_refs_collect (&collection_rec, bb); 3635 df_refs_add_to_chains (&collection_rec, bb, NULL); 3636 3637 collection_rec.def_vec.release (); 3638 collection_rec.use_vec.release (); 3639 collection_rec.eq_use_vec.release (); 3640 collection_rec.mw_vec.release (); 3641 3642 /* Now that the block has been processed, set the block as dirty so 3643 LR and LIVE will get it processed. */ 3644 df_set_bb_dirty (bb); 3645 } 3646 3647 3648 /* Get the artificial use set for a regular (i.e. non-exit/non-entry) 3649 block. */ 3650 3651 static void 3652 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses) 3653 { 3654 #ifdef EH_USES 3655 unsigned int i; 3656 #endif 3657 3658 bitmap_clear (regular_block_artificial_uses); 3659 3660 if (reload_completed) 3661 { 3662 if (frame_pointer_needed) 3663 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM); 3664 } 3665 else 3666 /* Before reload, there are a few registers that must be forced 3667 live everywhere -- which might not already be the case for 3668 blocks within infinite loops. */ 3669 { 3670 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM; 3671 3672 /* Any reference to any pseudo before reload is a potential 3673 reference of the frame pointer. */ 3674 bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM); 3675 3676 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER 3677 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM); 3678 #endif 3679 3680 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM 3681 /* Pseudos with argument area equivalences may require 3682 reloading via the argument pointer. */ 3683 if (fixed_regs[ARG_POINTER_REGNUM]) 3684 bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM); 3685 #endif 3686 3687 /* Any constant, or pseudo with constant equivalences, may 3688 require reloading from memory using the pic register. */ 3689 if (picreg != INVALID_REGNUM 3690 && fixed_regs[picreg]) 3691 bitmap_set_bit (regular_block_artificial_uses, picreg); 3692 } 3693 /* The all-important stack pointer must always be live. */ 3694 bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM); 3695 3696 #ifdef EH_USES 3697 /* EH_USES registers are used: 3698 1) at all insns that might throw (calls or with -fnon-call-exceptions 3699 trapping insns) 3700 2) in all EH edges 3701 3) to support backtraces and/or debugging, anywhere between their 3702 initialization and where they the saved registers are restored 3703 from them, including the cases where we don't reach the epilogue 3704 (noreturn call or infinite loop). */ 3705 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 3706 if (EH_USES (i)) 3707 bitmap_set_bit (regular_block_artificial_uses, i); 3708 #endif 3709 } 3710 3711 3712 /* Get the artificial use set for an eh block. */ 3713 3714 static void 3715 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses) 3716 { 3717 bitmap_clear (eh_block_artificial_uses); 3718 3719 /* The following code (down through the arg_pointer setting APPEARS 3720 to be necessary because there is nothing that actually 3721 describes what the exception handling code may actually need 3722 to keep alive. */ 3723 if (reload_completed) 3724 { 3725 if (frame_pointer_needed) 3726 { 3727 bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM); 3728 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER 3729 bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM); 3730 #endif 3731 } 3732 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM 3733 if (fixed_regs[ARG_POINTER_REGNUM]) 3734 bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM); 3735 #endif 3736 } 3737 } 3738 3739 3740 3741 /*---------------------------------------------------------------------------- 3742 Specialized hard register scanning functions. 3743 ----------------------------------------------------------------------------*/ 3744 3745 3746 /* Mark a register in SET. Hard registers in large modes get all 3747 of their component registers set as well. */ 3748 3749 static void 3750 df_mark_reg (rtx reg, void *vset) 3751 { 3752 bitmap set = (bitmap) vset; 3753 int regno = REGNO (reg); 3754 3755 gcc_assert (GET_MODE (reg) != BLKmode); 3756 3757 if (regno < FIRST_PSEUDO_REGISTER) 3758 { 3759 int n = hard_regno_nregs[regno][GET_MODE (reg)]; 3760 bitmap_set_range (set, regno, n); 3761 } 3762 else 3763 bitmap_set_bit (set, regno); 3764 } 3765 3766 3767 /* Set the bit for regs that are considered being defined at the entry. */ 3768 3769 static void 3770 df_get_entry_block_def_set (bitmap entry_block_defs) 3771 { 3772 rtx r; 3773 int i; 3774 3775 bitmap_clear (entry_block_defs); 3776 3777 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 3778 { 3779 if (global_regs[i]) 3780 bitmap_set_bit (entry_block_defs, i); 3781 if (FUNCTION_ARG_REGNO_P (i)) 3782 bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i)); 3783 } 3784 3785 /* The always important stack pointer. */ 3786 bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM); 3787 3788 /* Once the prologue has been generated, all of these registers 3789 should just show up in the first regular block. */ 3790 if (HAVE_prologue && epilogue_completed) 3791 { 3792 /* Defs for the callee saved registers are inserted so that the 3793 pushes have some defining location. */ 3794 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 3795 if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i))) 3796 bitmap_set_bit (entry_block_defs, i); 3797 } 3798 3799 r = targetm.calls.struct_value_rtx (current_function_decl, true); 3800 if (r && REG_P (r)) 3801 bitmap_set_bit (entry_block_defs, REGNO (r)); 3802 3803 /* If the function has an incoming STATIC_CHAIN, it has to show up 3804 in the entry def set. */ 3805 r = targetm.calls.static_chain (current_function_decl, true); 3806 if (r && REG_P (r)) 3807 bitmap_set_bit (entry_block_defs, REGNO (r)); 3808 3809 if ((!reload_completed) || frame_pointer_needed) 3810 { 3811 /* Any reference to any pseudo before reload is a potential 3812 reference of the frame pointer. */ 3813 bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM); 3814 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER 3815 /* If they are different, also mark the hard frame pointer as live. */ 3816 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM)) 3817 bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM); 3818 #endif 3819 } 3820 3821 /* These registers are live everywhere. */ 3822 if (!reload_completed) 3823 { 3824 #ifdef PIC_OFFSET_TABLE_REGNUM 3825 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM; 3826 #endif 3827 3828 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM 3829 /* Pseudos with argument area equivalences may require 3830 reloading via the argument pointer. */ 3831 if (fixed_regs[ARG_POINTER_REGNUM]) 3832 bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM); 3833 #endif 3834 3835 #ifdef PIC_OFFSET_TABLE_REGNUM 3836 /* Any constant, or pseudo with constant equivalences, may 3837 require reloading from memory using the pic register. */ 3838 if (picreg != INVALID_REGNUM 3839 && fixed_regs[picreg]) 3840 bitmap_set_bit (entry_block_defs, picreg); 3841 #endif 3842 } 3843 3844 #ifdef INCOMING_RETURN_ADDR_RTX 3845 if (REG_P (INCOMING_RETURN_ADDR_RTX)) 3846 bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX)); 3847 #endif 3848 3849 targetm.extra_live_on_entry (entry_block_defs); 3850 } 3851 3852 3853 /* Return the (conservative) set of hard registers that are defined on 3854 entry to the function. 3855 It uses df->entry_block_defs to determine which register 3856 reference to include. */ 3857 3858 static void 3859 df_entry_block_defs_collect (struct df_collection_rec *collection_rec, 3860 bitmap entry_block_defs) 3861 { 3862 unsigned int i; 3863 bitmap_iterator bi; 3864 3865 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi) 3866 { 3867 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL, 3868 ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0); 3869 } 3870 3871 df_canonize_collection_rec (collection_rec); 3872 } 3873 3874 3875 /* Record the (conservative) set of hard registers that are defined on 3876 entry to the function. */ 3877 3878 static void 3879 df_record_entry_block_defs (bitmap entry_block_defs) 3880 { 3881 struct df_collection_rec collection_rec; 3882 memset (&collection_rec, 0, sizeof (struct df_collection_rec)); 3883 vec_stack_alloc (df_ref, collection_rec.def_vec, FIRST_PSEUDO_REGISTER); 3884 df_entry_block_defs_collect (&collection_rec, entry_block_defs); 3885 3886 /* Process bb_refs chain */ 3887 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL); 3888 collection_rec.def_vec.release (); 3889 } 3890 3891 3892 /* Update the defs in the entry block. */ 3893 3894 void 3895 df_update_entry_block_defs (void) 3896 { 3897 bitmap_head refs; 3898 bool changed = false; 3899 3900 bitmap_initialize (&refs, &df_bitmap_obstack); 3901 df_get_entry_block_def_set (&refs); 3902 if (df->entry_block_defs) 3903 { 3904 if (!bitmap_equal_p (df->entry_block_defs, &refs)) 3905 { 3906 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK); 3907 df_ref_chain_delete_du_chain (bb_info->artificial_defs); 3908 df_ref_chain_delete (bb_info->artificial_defs); 3909 bb_info->artificial_defs = NULL; 3910 changed = true; 3911 } 3912 } 3913 else 3914 { 3915 struct df_scan_problem_data *problem_data 3916 = (struct df_scan_problem_data *) df_scan->problem_data; 3917 gcc_unreachable (); 3918 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps); 3919 changed = true; 3920 } 3921 3922 if (changed) 3923 { 3924 df_record_entry_block_defs (&refs); 3925 bitmap_copy (df->entry_block_defs, &refs); 3926 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK)); 3927 } 3928 bitmap_clear (&refs); 3929 } 3930 3931 3932 /* Set the bit for regs that are considered being used at the exit. */ 3933 3934 static void 3935 df_get_exit_block_use_set (bitmap exit_block_uses) 3936 { 3937 unsigned int i; 3938 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM; 3939 3940 bitmap_clear (exit_block_uses); 3941 3942 /* Stack pointer is always live at the exit. */ 3943 bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM); 3944 3945 /* Mark the frame pointer if needed at the end of the function. 3946 If we end up eliminating it, it will be removed from the live 3947 list of each basic block by reload. */ 3948 3949 if ((!reload_completed) || frame_pointer_needed) 3950 { 3951 bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM); 3952 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER 3953 /* If they are different, also mark the hard frame pointer as live. */ 3954 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM)) 3955 bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM); 3956 #endif 3957 } 3958 3959 /* Many architectures have a GP register even without flag_pic. 3960 Assume the pic register is not in use, or will be handled by 3961 other means, if it is not fixed. */ 3962 if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED 3963 && picreg != INVALID_REGNUM 3964 && fixed_regs[picreg]) 3965 bitmap_set_bit (exit_block_uses, picreg); 3966 3967 /* Mark all global registers, and all registers used by the 3968 epilogue as being live at the end of the function since they 3969 may be referenced by our caller. */ 3970 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 3971 if (global_regs[i] || EPILOGUE_USES (i)) 3972 bitmap_set_bit (exit_block_uses, i); 3973 3974 if (HAVE_epilogue && epilogue_completed) 3975 { 3976 /* Mark all call-saved registers that we actually used. */ 3977 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 3978 if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i) 3979 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i)) 3980 bitmap_set_bit (exit_block_uses, i); 3981 } 3982 3983 #ifdef EH_RETURN_DATA_REGNO 3984 /* Mark the registers that will contain data for the handler. */ 3985 if (reload_completed && crtl->calls_eh_return) 3986 for (i = 0; ; ++i) 3987 { 3988 unsigned regno = EH_RETURN_DATA_REGNO (i); 3989 if (regno == INVALID_REGNUM) 3990 break; 3991 bitmap_set_bit (exit_block_uses, regno); 3992 } 3993 #endif 3994 3995 #ifdef EH_RETURN_STACKADJ_RTX 3996 if ((!HAVE_epilogue || ! epilogue_completed) 3997 && crtl->calls_eh_return) 3998 { 3999 rtx tmp = EH_RETURN_STACKADJ_RTX; 4000 if (tmp && REG_P (tmp)) 4001 df_mark_reg (tmp, exit_block_uses); 4002 } 4003 #endif 4004 4005 #ifdef EH_RETURN_HANDLER_RTX 4006 if ((!HAVE_epilogue || ! epilogue_completed) 4007 && crtl->calls_eh_return) 4008 { 4009 rtx tmp = EH_RETURN_HANDLER_RTX; 4010 if (tmp && REG_P (tmp)) 4011 df_mark_reg (tmp, exit_block_uses); 4012 } 4013 #endif 4014 4015 /* Mark function return value. */ 4016 diddle_return_value (df_mark_reg, (void*) exit_block_uses); 4017 } 4018 4019 4020 /* Return the refs of hard registers that are used in the exit block. 4021 It uses df->exit_block_uses to determine register to include. */ 4022 4023 static void 4024 df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses) 4025 { 4026 unsigned int i; 4027 bitmap_iterator bi; 4028 4029 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi) 4030 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL, 4031 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0); 4032 4033 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM 4034 /* It is deliberate that this is not put in the exit block uses but 4035 I do not know why. */ 4036 if (reload_completed 4037 && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM) 4038 && bb_has_eh_pred (EXIT_BLOCK_PTR) 4039 && fixed_regs[ARG_POINTER_REGNUM]) 4040 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL, 4041 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0); 4042 #endif 4043 4044 df_canonize_collection_rec (collection_rec); 4045 } 4046 4047 4048 /* Record the set of hard registers that are used in the exit block. 4049 It uses df->exit_block_uses to determine which bit to include. */ 4050 4051 static void 4052 df_record_exit_block_uses (bitmap exit_block_uses) 4053 { 4054 struct df_collection_rec collection_rec; 4055 memset (&collection_rec, 0, sizeof (struct df_collection_rec)); 4056 vec_stack_alloc (df_ref, collection_rec.use_vec, FIRST_PSEUDO_REGISTER); 4057 df_exit_block_uses_collect (&collection_rec, exit_block_uses); 4058 4059 /* Process bb_refs chain */ 4060 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL); 4061 collection_rec.use_vec.release (); 4062 } 4063 4064 4065 /* Update the uses in the exit block. */ 4066 4067 void 4068 df_update_exit_block_uses (void) 4069 { 4070 bitmap_head refs; 4071 bool changed = false; 4072 4073 bitmap_initialize (&refs, &df_bitmap_obstack); 4074 df_get_exit_block_use_set (&refs); 4075 if (df->exit_block_uses) 4076 { 4077 if (!bitmap_equal_p (df->exit_block_uses, &refs)) 4078 { 4079 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK); 4080 df_ref_chain_delete_du_chain (bb_info->artificial_uses); 4081 df_ref_chain_delete (bb_info->artificial_uses); 4082 bb_info->artificial_uses = NULL; 4083 changed = true; 4084 } 4085 } 4086 else 4087 { 4088 struct df_scan_problem_data *problem_data 4089 = (struct df_scan_problem_data *) df_scan->problem_data; 4090 gcc_unreachable (); 4091 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps); 4092 changed = true; 4093 } 4094 4095 if (changed) 4096 { 4097 df_record_exit_block_uses (&refs); 4098 bitmap_copy (df->exit_block_uses,& refs); 4099 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK)); 4100 } 4101 bitmap_clear (&refs); 4102 } 4103 4104 static bool initialized = false; 4105 4106 4107 /* Initialize some platform specific structures. */ 4108 4109 void 4110 df_hard_reg_init (void) 4111 { 4112 #ifdef ELIMINABLE_REGS 4113 int i; 4114 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS; 4115 #endif 4116 if (initialized) 4117 return; 4118 4119 /* Record which registers will be eliminated. We use this in 4120 mark_used_regs. */ 4121 CLEAR_HARD_REG_SET (elim_reg_set); 4122 4123 #ifdef ELIMINABLE_REGS 4124 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++) 4125 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from); 4126 #else 4127 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM); 4128 #endif 4129 4130 initialized = true; 4131 } 4132 4133 4134 /* Recompute the parts of scanning that are based on regs_ever_live 4135 because something changed in that array. */ 4136 4137 void 4138 df_update_entry_exit_and_calls (void) 4139 { 4140 basic_block bb; 4141 4142 df_update_entry_block_defs (); 4143 df_update_exit_block_uses (); 4144 4145 /* The call insns need to be rescanned because there may be changes 4146 in the set of registers clobbered across the call. */ 4147 FOR_EACH_BB (bb) 4148 { 4149 rtx insn; 4150 FOR_BB_INSNS (bb, insn) 4151 { 4152 if (INSN_P (insn) && CALL_P (insn)) 4153 df_insn_rescan (insn); 4154 } 4155 } 4156 } 4157 4158 4159 /* Return true if hard REG is actually used in the some instruction. 4160 There are a fair number of conditions that affect the setting of 4161 this array. See the comment in df.h for df->hard_regs_live_count 4162 for the conditions that this array is set. */ 4163 4164 bool 4165 df_hard_reg_used_p (unsigned int reg) 4166 { 4167 return df->hard_regs_live_count[reg] != 0; 4168 } 4169 4170 4171 /* A count of the number of times REG is actually used in the some 4172 instruction. There are a fair number of conditions that affect the 4173 setting of this array. See the comment in df.h for 4174 df->hard_regs_live_count for the conditions that this array is 4175 set. */ 4176 4177 4178 unsigned int 4179 df_hard_reg_used_count (unsigned int reg) 4180 { 4181 return df->hard_regs_live_count[reg]; 4182 } 4183 4184 4185 /* Get the value of regs_ever_live[REGNO]. */ 4186 4187 bool 4188 df_regs_ever_live_p (unsigned int regno) 4189 { 4190 return regs_ever_live[regno]; 4191 } 4192 4193 4194 /* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live 4195 to change, schedule that change for the next update. */ 4196 4197 void 4198 df_set_regs_ever_live (unsigned int regno, bool value) 4199 { 4200 if (regs_ever_live[regno] == value) 4201 return; 4202 4203 regs_ever_live[regno] = value; 4204 if (df) 4205 df->redo_entry_and_exit = true; 4206 } 4207 4208 4209 /* Compute "regs_ever_live" information from the underlying df 4210 information. Set the vector to all false if RESET. */ 4211 4212 void 4213 df_compute_regs_ever_live (bool reset) 4214 { 4215 unsigned int i; 4216 bool changed = df->redo_entry_and_exit; 4217 4218 if (reset) 4219 memset (regs_ever_live, 0, sizeof (regs_ever_live)); 4220 4221 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 4222 if ((!regs_ever_live[i]) && df_hard_reg_used_p (i)) 4223 { 4224 regs_ever_live[i] = true; 4225 changed = true; 4226 } 4227 if (changed) 4228 df_update_entry_exit_and_calls (); 4229 df->redo_entry_and_exit = false; 4230 } 4231 4232 4233 /*---------------------------------------------------------------------------- 4234 Dataflow ref information verification functions. 4235 4236 df_reg_chain_mark (refs, regno, is_def, is_eq_use) 4237 df_reg_chain_verify_unmarked (refs) 4238 df_refs_verify (vec<stack, va_df_ref>, ref*, bool) 4239 df_mws_verify (mw*, mw*, bool) 4240 df_insn_refs_verify (collection_rec, bb, insn, bool) 4241 df_bb_refs_verify (bb, refs, bool) 4242 df_bb_verify (bb) 4243 df_exit_block_bitmap_verify (bool) 4244 df_entry_block_bitmap_verify (bool) 4245 df_scan_verify () 4246 ----------------------------------------------------------------------------*/ 4247 4248 4249 /* Mark all refs in the reg chain. Verify that all of the registers 4250 are in the correct chain. */ 4251 4252 static unsigned int 4253 df_reg_chain_mark (df_ref refs, unsigned int regno, 4254 bool is_def, bool is_eq_use) 4255 { 4256 unsigned int count = 0; 4257 df_ref ref; 4258 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref)) 4259 { 4260 gcc_assert (!DF_REF_IS_REG_MARKED (ref)); 4261 4262 /* If there are no def-use or use-def chains, make sure that all 4263 of the chains are clear. */ 4264 if (!df_chain) 4265 gcc_assert (!DF_REF_CHAIN (ref)); 4266 4267 /* Check to make sure the ref is in the correct chain. */ 4268 gcc_assert (DF_REF_REGNO (ref) == regno); 4269 if (is_def) 4270 gcc_assert (DF_REF_REG_DEF_P (ref)); 4271 else 4272 gcc_assert (!DF_REF_REG_DEF_P (ref)); 4273 4274 if (is_eq_use) 4275 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)); 4276 else 4277 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0); 4278 4279 if (DF_REF_NEXT_REG (ref)) 4280 gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref)) == ref); 4281 count++; 4282 DF_REF_REG_MARK (ref); 4283 } 4284 return count; 4285 } 4286 4287 4288 /* Verify that all of the registers in the chain are unmarked. */ 4289 4290 static void 4291 df_reg_chain_verify_unmarked (df_ref refs) 4292 { 4293 df_ref ref; 4294 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref)) 4295 gcc_assert (!DF_REF_IS_REG_MARKED (ref)); 4296 } 4297 4298 4299 /* Verify that NEW_REC and OLD_REC have exactly the same members. */ 4300 4301 static bool 4302 df_refs_verify (vec<df_ref, va_stack> new_rec, df_ref *old_rec, 4303 bool abort_if_fail) 4304 { 4305 unsigned int ix; 4306 df_ref new_ref; 4307 4308 FOR_EACH_VEC_ELT (new_rec, ix, new_ref) 4309 { 4310 if (*old_rec == NULL || !df_ref_equal_p (new_ref, *old_rec)) 4311 { 4312 if (abort_if_fail) 4313 gcc_assert (0); 4314 else 4315 return false; 4316 } 4317 4318 /* Abort if fail is called from the function level verifier. If 4319 that is the context, mark this reg as being seem. */ 4320 if (abort_if_fail) 4321 { 4322 gcc_assert (DF_REF_IS_REG_MARKED (*old_rec)); 4323 DF_REF_REG_UNMARK (*old_rec); 4324 } 4325 4326 old_rec++; 4327 } 4328 4329 if (abort_if_fail) 4330 gcc_assert (*old_rec == NULL); 4331 else 4332 return *old_rec == NULL; 4333 return false; 4334 } 4335 4336 4337 /* Verify that NEW_REC and OLD_REC have exactly the same members. */ 4338 4339 static bool 4340 df_mws_verify (vec<df_mw_hardreg_ptr, va_stack> new_rec, 4341 struct df_mw_hardreg **old_rec, 4342 bool abort_if_fail) 4343 { 4344 unsigned int ix; 4345 struct df_mw_hardreg *new_reg; 4346 4347 FOR_EACH_VEC_ELT (new_rec, ix, new_reg) 4348 { 4349 if (*old_rec == NULL || !df_mw_equal_p (new_reg, *old_rec)) 4350 { 4351 if (abort_if_fail) 4352 gcc_assert (0); 4353 else 4354 return false; 4355 } 4356 old_rec++; 4357 } 4358 4359 if (abort_if_fail) 4360 gcc_assert (*old_rec == NULL); 4361 else 4362 return *old_rec == NULL; 4363 return false; 4364 } 4365 4366 4367 /* Return true if the existing insn refs information is complete and 4368 correct. Otherwise (i.e. if there's any missing or extra refs), 4369 return the correct df_ref chain in REFS_RETURN. 4370 4371 If ABORT_IF_FAIL, leave the refs that are verified (already in the 4372 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn 4373 verification mode instead of the whole function, so unmark 4374 everything. 4375 4376 If ABORT_IF_FAIL is set, this function never returns false. */ 4377 4378 static bool 4379 df_insn_refs_verify (struct df_collection_rec *collection_rec, 4380 basic_block bb, 4381 rtx insn, 4382 bool abort_if_fail) 4383 { 4384 bool ret1, ret2, ret3, ret4; 4385 unsigned int uid = INSN_UID (insn); 4386 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn); 4387 4388 df_insn_refs_collect (collection_rec, bb, insn_info); 4389 4390 if (!DF_INSN_UID_DEFS (uid)) 4391 { 4392 /* The insn_rec was created but it was never filled out. */ 4393 if (abort_if_fail) 4394 gcc_assert (0); 4395 else 4396 return false; 4397 } 4398 4399 /* Unfortunately we cannot opt out early if one of these is not 4400 right because the marks will not get cleared. */ 4401 ret1 = df_refs_verify (collection_rec->def_vec, DF_INSN_UID_DEFS (uid), 4402 abort_if_fail); 4403 ret2 = df_refs_verify (collection_rec->use_vec, DF_INSN_UID_USES (uid), 4404 abort_if_fail); 4405 ret3 = df_refs_verify (collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid), 4406 abort_if_fail); 4407 ret4 = df_mws_verify (collection_rec->mw_vec, DF_INSN_UID_MWS (uid), 4408 abort_if_fail); 4409 return (ret1 && ret2 && ret3 && ret4); 4410 } 4411 4412 4413 /* Return true if all refs in the basic block are correct and complete. 4414 Due to df_ref_chain_verify, it will cause all refs 4415 that are verified to have DF_REF_MARK bit set. */ 4416 4417 static bool 4418 df_bb_verify (basic_block bb) 4419 { 4420 rtx insn; 4421 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index); 4422 struct df_collection_rec collection_rec; 4423 4424 memset (&collection_rec, 0, sizeof (struct df_collection_rec)); 4425 vec_stack_alloc (df_ref, collection_rec.def_vec, 128); 4426 vec_stack_alloc (df_ref, collection_rec.use_vec, 32); 4427 vec_stack_alloc (df_ref, collection_rec.eq_use_vec, 32); 4428 vec_stack_alloc (df_mw_hardreg_ptr, collection_rec.mw_vec, 32); 4429 4430 gcc_assert (bb_info); 4431 4432 /* Scan the block, one insn at a time, from beginning to end. */ 4433 FOR_BB_INSNS_REVERSE (bb, insn) 4434 { 4435 if (!INSN_P (insn)) 4436 continue; 4437 df_insn_refs_verify (&collection_rec, bb, insn, true); 4438 df_free_collection_rec (&collection_rec); 4439 } 4440 4441 /* Do the artificial defs and uses. */ 4442 df_bb_refs_collect (&collection_rec, bb); 4443 df_refs_verify (collection_rec.def_vec, df_get_artificial_defs (bb->index), true); 4444 df_refs_verify (collection_rec.use_vec, df_get_artificial_uses (bb->index), true); 4445 df_free_collection_rec (&collection_rec); 4446 4447 return true; 4448 } 4449 4450 4451 /* Returns true if the entry block has correct and complete df_ref set. 4452 If not it either aborts if ABORT_IF_FAIL is true or returns false. */ 4453 4454 static bool 4455 df_entry_block_bitmap_verify (bool abort_if_fail) 4456 { 4457 bitmap_head entry_block_defs; 4458 bool is_eq; 4459 4460 bitmap_initialize (&entry_block_defs, &df_bitmap_obstack); 4461 df_get_entry_block_def_set (&entry_block_defs); 4462 4463 is_eq = bitmap_equal_p (&entry_block_defs, df->entry_block_defs); 4464 4465 if (!is_eq && abort_if_fail) 4466 { 4467 fprintf (stderr, "entry_block_defs = "); 4468 df_print_regset (stderr, &entry_block_defs); 4469 fprintf (stderr, "df->entry_block_defs = "); 4470 df_print_regset (stderr, df->entry_block_defs); 4471 gcc_assert (0); 4472 } 4473 4474 bitmap_clear (&entry_block_defs); 4475 4476 return is_eq; 4477 } 4478 4479 4480 /* Returns true if the exit block has correct and complete df_ref set. 4481 If not it either aborts if ABORT_IF_FAIL is true or returns false. */ 4482 4483 static bool 4484 df_exit_block_bitmap_verify (bool abort_if_fail) 4485 { 4486 bitmap_head exit_block_uses; 4487 bool is_eq; 4488 4489 bitmap_initialize (&exit_block_uses, &df_bitmap_obstack); 4490 df_get_exit_block_use_set (&exit_block_uses); 4491 4492 is_eq = bitmap_equal_p (&exit_block_uses, df->exit_block_uses); 4493 4494 if (!is_eq && abort_if_fail) 4495 { 4496 fprintf (stderr, "exit_block_uses = "); 4497 df_print_regset (stderr, &exit_block_uses); 4498 fprintf (stderr, "df->exit_block_uses = "); 4499 df_print_regset (stderr, df->exit_block_uses); 4500 gcc_assert (0); 4501 } 4502 4503 bitmap_clear (&exit_block_uses); 4504 4505 return is_eq; 4506 } 4507 4508 4509 /* Return true if df_ref information for all insns in all blocks are 4510 correct and complete. */ 4511 4512 void 4513 df_scan_verify (void) 4514 { 4515 unsigned int i; 4516 basic_block bb; 4517 bitmap_head regular_block_artificial_uses; 4518 bitmap_head eh_block_artificial_uses; 4519 4520 if (!df) 4521 return; 4522 4523 /* Verification is a 4 step process. */ 4524 4525 /* (1) All of the refs are marked by going through the reg chains. */ 4526 for (i = 0; i < DF_REG_SIZE (df); i++) 4527 { 4528 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false) 4529 == DF_REG_DEF_COUNT(i)); 4530 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false) 4531 == DF_REG_USE_COUNT(i)); 4532 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true) 4533 == DF_REG_EQ_USE_COUNT(i)); 4534 } 4535 4536 /* (2) There are various bitmaps whose value may change over the 4537 course of the compilation. This step recomputes them to make 4538 sure that they have not slipped out of date. */ 4539 bitmap_initialize (®ular_block_artificial_uses, &df_bitmap_obstack); 4540 bitmap_initialize (&eh_block_artificial_uses, &df_bitmap_obstack); 4541 4542 df_get_regular_block_artificial_uses (®ular_block_artificial_uses); 4543 df_get_eh_block_artificial_uses (&eh_block_artificial_uses); 4544 4545 bitmap_ior_into (&eh_block_artificial_uses, 4546 ®ular_block_artificial_uses); 4547 4548 /* Check artificial_uses bitmaps didn't change. */ 4549 gcc_assert (bitmap_equal_p (®ular_block_artificial_uses, 4550 &df->regular_block_artificial_uses)); 4551 gcc_assert (bitmap_equal_p (&eh_block_artificial_uses, 4552 &df->eh_block_artificial_uses)); 4553 4554 bitmap_clear (®ular_block_artificial_uses); 4555 bitmap_clear (&eh_block_artificial_uses); 4556 4557 /* Verify entry block and exit block. These only verify the bitmaps, 4558 the refs are verified in df_bb_verify. */ 4559 df_entry_block_bitmap_verify (true); 4560 df_exit_block_bitmap_verify (true); 4561 4562 /* (3) All of the insns in all of the blocks are traversed and the 4563 marks are cleared both in the artificial refs attached to the 4564 blocks and the real refs inside the insns. It is a failure to 4565 clear a mark that has not been set as this means that the ref in 4566 the block or insn was not in the reg chain. */ 4567 4568 FOR_ALL_BB (bb) 4569 df_bb_verify (bb); 4570 4571 /* (4) See if all reg chains are traversed a second time. This time 4572 a check is made that the marks are clear. A set mark would be a 4573 from a reg that is not in any insn or basic block. */ 4574 4575 for (i = 0; i < DF_REG_SIZE (df); i++) 4576 { 4577 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i)); 4578 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i)); 4579 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i)); 4580 } 4581 } 4582