1 /* Common subexpression elimination library for GNU compiler. 2 Copyright (C) 1987-2015 Free Software Foundation, Inc. 3 4 This file is part of GCC. 5 6 GCC is free software; you can redistribute it and/or modify it under 7 the terms of the GNU General Public License as published by the Free 8 Software Foundation; either version 3, or (at your option) any later 9 version. 10 11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 12 WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 14 for more details. 15 16 You should have received a copy of the GNU General Public License 17 along with GCC; see the file COPYING3. If not see 18 <http://www.gnu.org/licenses/>. */ 19 20 #include "config.h" 21 #include "system.h" 22 #include "coretypes.h" 23 #include "tm.h" 24 #include "rtl.h" 25 #include "hash-set.h" 26 #include "machmode.h" 27 #include "vec.h" 28 #include "double-int.h" 29 #include "input.h" 30 #include "alias.h" 31 #include "symtab.h" 32 #include "wide-int.h" 33 #include "inchash.h" 34 #include "tree.h"/* FIXME: For hashing DEBUG_EXPR & friends. */ 35 #include "tm_p.h" 36 #include "regs.h" 37 #include "hard-reg-set.h" 38 #include "flags.h" 39 #include "insn-config.h" 40 #include "recog.h" 41 #include "hashtab.h" 42 #include "input.h" 43 #include "function.h" 44 #include "emit-rtl.h" 45 #include "diagnostic-core.h" 46 #include "ggc.h" 47 #include "hash-table.h" 48 #include "dumpfile.h" 49 #include "cselib.h" 50 #include "predict.h" 51 #include "basic-block.h" 52 #include "valtrack.h" 53 #include "params.h" 54 #include "alloc-pool.h" 55 #include "target.h" 56 #include "bitmap.h" 57 58 /* A list of cselib_val structures. */ 59 struct elt_list { 60 struct elt_list *next; 61 cselib_val *elt; 62 }; 63 64 static bool cselib_record_memory; 65 static bool cselib_preserve_constants; 66 static bool cselib_any_perm_equivs; 67 static inline void promote_debug_loc (struct elt_loc_list *l); 68 static struct elt_list *new_elt_list (struct elt_list *, cselib_val *); 69 static void new_elt_loc_list (cselib_val *, rtx); 70 static void unchain_one_value (cselib_val *); 71 static void unchain_one_elt_list (struct elt_list **); 72 static void unchain_one_elt_loc_list (struct elt_loc_list **); 73 static void remove_useless_values (void); 74 static int rtx_equal_for_cselib_1 (rtx, rtx, machine_mode, int); 75 static unsigned int cselib_hash_rtx (rtx, int, machine_mode); 76 static cselib_val *new_cselib_val (unsigned int, machine_mode, rtx); 77 static void add_mem_for_addr (cselib_val *, cselib_val *, rtx); 78 static cselib_val *cselib_lookup_mem (rtx, int); 79 static void cselib_invalidate_regno (unsigned int, machine_mode); 80 static void cselib_invalidate_mem (rtx); 81 static void cselib_record_set (rtx, cselib_val *, cselib_val *); 82 static void cselib_record_sets (rtx_insn *); 83 84 struct expand_value_data 85 { 86 bitmap regs_active; 87 cselib_expand_callback callback; 88 void *callback_arg; 89 bool dummy; 90 }; 91 92 static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int); 93 94 /* There are three ways in which cselib can look up an rtx: 95 - for a REG, the reg_values table (which is indexed by regno) is used 96 - for a MEM, we recursively look up its address and then follow the 97 addr_list of that value 98 - for everything else, we compute a hash value and go through the hash 99 table. Since different rtx's can still have the same hash value, 100 this involves walking the table entries for a given value and comparing 101 the locations of the entries with the rtx we are looking up. */ 102 103 struct cselib_hasher : typed_noop_remove <cselib_val> 104 { 105 typedef cselib_val value_type; 106 struct compare_type { 107 /* The rtx value and its mode (needed separately for constant 108 integers). */ 109 machine_mode mode; 110 rtx x; 111 /* The mode of the contaning MEM, if any, otherwise VOIDmode. */ 112 machine_mode memmode; 113 }; 114 static inline hashval_t hash (const value_type *); 115 static inline bool equal (const value_type *, const compare_type *); 116 }; 117 118 /* The hash function for our hash table. The value is always computed with 119 cselib_hash_rtx when adding an element; this function just extracts the 120 hash value from a cselib_val structure. */ 121 122 inline hashval_t 123 cselib_hasher::hash (const value_type *v) 124 { 125 return v->hash; 126 } 127 128 /* The equality test for our hash table. The first argument V is a table 129 element (i.e. a cselib_val), while the second arg X is an rtx. We know 130 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a 131 CONST of an appropriate mode. */ 132 133 inline bool 134 cselib_hasher::equal (const value_type *v, const compare_type *x_arg) 135 { 136 struct elt_loc_list *l; 137 rtx x = x_arg->x; 138 machine_mode mode = x_arg->mode; 139 machine_mode memmode = x_arg->memmode; 140 141 if (mode != GET_MODE (v->val_rtx)) 142 return false; 143 144 if (GET_CODE (x) == VALUE) 145 return x == v->val_rtx; 146 147 /* We don't guarantee that distinct rtx's have different hash values, 148 so we need to do a comparison. */ 149 for (l = v->locs; l; l = l->next) 150 if (rtx_equal_for_cselib_1 (l->loc, x, memmode, 0)) 151 { 152 promote_debug_loc (l); 153 return true; 154 } 155 156 return false; 157 } 158 159 /* A table that enables us to look up elts by their value. */ 160 static hash_table<cselib_hasher> *cselib_hash_table; 161 162 /* A table to hold preserved values. */ 163 static hash_table<cselib_hasher> *cselib_preserved_hash_table; 164 165 /* This is a global so we don't have to pass this through every function. 166 It is used in new_elt_loc_list to set SETTING_INSN. */ 167 static rtx_insn *cselib_current_insn; 168 169 /* The unique id that the next create value will take. */ 170 static unsigned int next_uid; 171 172 /* The number of registers we had when the varrays were last resized. */ 173 static unsigned int cselib_nregs; 174 175 /* Count values without known locations, or with only locations that 176 wouldn't have been known except for debug insns. Whenever this 177 grows too big, we remove these useless values from the table. 178 179 Counting values with only debug values is a bit tricky. We don't 180 want to increment n_useless_values when we create a value for a 181 debug insn, for this would get n_useless_values out of sync, but we 182 want increment it if all locs in the list that were ever referenced 183 in nondebug insns are removed from the list. 184 185 In the general case, once we do that, we'd have to stop accepting 186 nondebug expressions in the loc list, to avoid having two values 187 equivalent that, without debug insns, would have been made into 188 separate values. However, because debug insns never introduce 189 equivalences themselves (no assignments), the only means for 190 growing loc lists is through nondebug assignments. If the locs 191 also happen to be referenced in debug insns, it will work just fine. 192 193 A consequence of this is that there's at most one debug-only loc in 194 each loc list. If we keep it in the first entry, testing whether 195 we have a debug-only loc list takes O(1). 196 197 Furthermore, since any additional entry in a loc list containing a 198 debug loc would have to come from an assignment (nondebug) that 199 references both the initial debug loc and the newly-equivalent loc, 200 the initial debug loc would be promoted to a nondebug loc, and the 201 loc list would not contain debug locs any more. 202 203 So the only case we have to be careful with in order to keep 204 n_useless_values in sync between debug and nondebug compilations is 205 to avoid incrementing n_useless_values when removing the single loc 206 from a value that turns out to not appear outside debug values. We 207 increment n_useless_debug_values instead, and leave such values 208 alone until, for other reasons, we garbage-collect useless 209 values. */ 210 static int n_useless_values; 211 static int n_useless_debug_values; 212 213 /* Count values whose locs have been taken exclusively from debug 214 insns for the entire life of the value. */ 215 static int n_debug_values; 216 217 /* Number of useless values before we remove them from the hash table. */ 218 #define MAX_USELESS_VALUES 32 219 220 /* This table maps from register number to values. It does not 221 contain pointers to cselib_val structures, but rather elt_lists. 222 The purpose is to be able to refer to the same register in 223 different modes. The first element of the list defines the mode in 224 which the register was set; if the mode is unknown or the value is 225 no longer valid in that mode, ELT will be NULL for the first 226 element. */ 227 static struct elt_list **reg_values; 228 static unsigned int reg_values_size; 229 #define REG_VALUES(i) reg_values[i] 230 231 /* The largest number of hard regs used by any entry added to the 232 REG_VALUES table. Cleared on each cselib_clear_table() invocation. */ 233 static unsigned int max_value_regs; 234 235 /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used 236 in cselib_clear_table() for fast emptying. */ 237 static unsigned int *used_regs; 238 static unsigned int n_used_regs; 239 240 /* We pass this to cselib_invalidate_mem to invalidate all of 241 memory for a non-const call instruction. */ 242 static GTY(()) rtx callmem; 243 244 /* Set by discard_useless_locs if it deleted the last location of any 245 value. */ 246 static int values_became_useless; 247 248 /* Used as stop element of the containing_mem list so we can check 249 presence in the list by checking the next pointer. */ 250 static cselib_val dummy_val; 251 252 /* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx 253 that is constant through the whole function and should never be 254 eliminated. */ 255 static cselib_val *cfa_base_preserved_val; 256 static unsigned int cfa_base_preserved_regno = INVALID_REGNUM; 257 258 /* Used to list all values that contain memory reference. 259 May or may not contain the useless values - the list is compacted 260 each time memory is invalidated. */ 261 static cselib_val *first_containing_mem = &dummy_val; 262 static alloc_pool elt_loc_list_pool, elt_list_pool, cselib_val_pool, value_pool; 263 264 /* If nonnull, cselib will call this function before freeing useless 265 VALUEs. A VALUE is deemed useless if its "locs" field is null. */ 266 void (*cselib_discard_hook) (cselib_val *); 267 268 /* If nonnull, cselib will call this function before recording sets or 269 even clobbering outputs of INSN. All the recorded sets will be 270 represented in the array sets[n_sets]. new_val_min can be used to 271 tell whether values present in sets are introduced by this 272 instruction. */ 273 void (*cselib_record_sets_hook) (rtx_insn *insn, struct cselib_set *sets, 274 int n_sets); 275 276 #define PRESERVED_VALUE_P(RTX) \ 277 (RTL_FLAG_CHECK1 ("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging) 278 279 #define SP_BASED_VALUE_P(RTX) \ 280 (RTL_FLAG_CHECK1 ("SP_BASED_VALUE_P", (RTX), VALUE)->jump) 281 282 283 284 /* Allocate a struct elt_list and fill in its two elements with the 285 arguments. */ 286 287 static inline struct elt_list * 288 new_elt_list (struct elt_list *next, cselib_val *elt) 289 { 290 struct elt_list *el; 291 el = (struct elt_list *) pool_alloc (elt_list_pool); 292 el->next = next; 293 el->elt = elt; 294 return el; 295 } 296 297 /* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc 298 list. */ 299 300 static inline void 301 new_elt_loc_list (cselib_val *val, rtx loc) 302 { 303 struct elt_loc_list *el, *next = val->locs; 304 305 gcc_checking_assert (!next || !next->setting_insn 306 || !DEBUG_INSN_P (next->setting_insn) 307 || cselib_current_insn == next->setting_insn); 308 309 /* If we're creating the first loc in a debug insn context, we've 310 just created a debug value. Count it. */ 311 if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn)) 312 n_debug_values++; 313 314 val = canonical_cselib_val (val); 315 next = val->locs; 316 317 if (GET_CODE (loc) == VALUE) 318 { 319 loc = canonical_cselib_val (CSELIB_VAL_PTR (loc))->val_rtx; 320 321 gcc_checking_assert (PRESERVED_VALUE_P (loc) 322 == PRESERVED_VALUE_P (val->val_rtx)); 323 324 if (val->val_rtx == loc) 325 return; 326 else if (val->uid > CSELIB_VAL_PTR (loc)->uid) 327 { 328 /* Reverse the insertion. */ 329 new_elt_loc_list (CSELIB_VAL_PTR (loc), val->val_rtx); 330 return; 331 } 332 333 gcc_checking_assert (val->uid < CSELIB_VAL_PTR (loc)->uid); 334 335 if (CSELIB_VAL_PTR (loc)->locs) 336 { 337 /* Bring all locs from LOC to VAL. */ 338 for (el = CSELIB_VAL_PTR (loc)->locs; el->next; el = el->next) 339 { 340 /* Adjust values that have LOC as canonical so that VAL 341 becomes their canonical. */ 342 if (el->loc && GET_CODE (el->loc) == VALUE) 343 { 344 gcc_checking_assert (CSELIB_VAL_PTR (el->loc)->locs->loc 345 == loc); 346 CSELIB_VAL_PTR (el->loc)->locs->loc = val->val_rtx; 347 } 348 } 349 el->next = val->locs; 350 next = val->locs = CSELIB_VAL_PTR (loc)->locs; 351 } 352 353 if (CSELIB_VAL_PTR (loc)->addr_list) 354 { 355 /* Bring in addr_list into canonical node. */ 356 struct elt_list *last = CSELIB_VAL_PTR (loc)->addr_list; 357 while (last->next) 358 last = last->next; 359 last->next = val->addr_list; 360 val->addr_list = CSELIB_VAL_PTR (loc)->addr_list; 361 CSELIB_VAL_PTR (loc)->addr_list = NULL; 362 } 363 364 if (CSELIB_VAL_PTR (loc)->next_containing_mem != NULL 365 && val->next_containing_mem == NULL) 366 { 367 /* Add VAL to the containing_mem list after LOC. LOC will 368 be removed when we notice it doesn't contain any 369 MEMs. */ 370 val->next_containing_mem = CSELIB_VAL_PTR (loc)->next_containing_mem; 371 CSELIB_VAL_PTR (loc)->next_containing_mem = val; 372 } 373 374 /* Chain LOC back to VAL. */ 375 el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool); 376 el->loc = val->val_rtx; 377 el->setting_insn = cselib_current_insn; 378 el->next = NULL; 379 CSELIB_VAL_PTR (loc)->locs = el; 380 } 381 382 el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool); 383 el->loc = loc; 384 el->setting_insn = cselib_current_insn; 385 el->next = next; 386 val->locs = el; 387 } 388 389 /* Promote loc L to a nondebug cselib_current_insn if L is marked as 390 originating from a debug insn, maintaining the debug values 391 count. */ 392 393 static inline void 394 promote_debug_loc (struct elt_loc_list *l) 395 { 396 if (l && l->setting_insn && DEBUG_INSN_P (l->setting_insn) 397 && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn))) 398 { 399 n_debug_values--; 400 l->setting_insn = cselib_current_insn; 401 if (cselib_preserve_constants && l->next) 402 { 403 gcc_assert (l->next->setting_insn 404 && DEBUG_INSN_P (l->next->setting_insn) 405 && !l->next->next); 406 l->next->setting_insn = cselib_current_insn; 407 } 408 else 409 gcc_assert (!l->next); 410 } 411 } 412 413 /* The elt_list at *PL is no longer needed. Unchain it and free its 414 storage. */ 415 416 static inline void 417 unchain_one_elt_list (struct elt_list **pl) 418 { 419 struct elt_list *l = *pl; 420 421 *pl = l->next; 422 pool_free (elt_list_pool, l); 423 } 424 425 /* Likewise for elt_loc_lists. */ 426 427 static void 428 unchain_one_elt_loc_list (struct elt_loc_list **pl) 429 { 430 struct elt_loc_list *l = *pl; 431 432 *pl = l->next; 433 pool_free (elt_loc_list_pool, l); 434 } 435 436 /* Likewise for cselib_vals. This also frees the addr_list associated with 437 V. */ 438 439 static void 440 unchain_one_value (cselib_val *v) 441 { 442 while (v->addr_list) 443 unchain_one_elt_list (&v->addr_list); 444 445 pool_free (cselib_val_pool, v); 446 } 447 448 /* Remove all entries from the hash table. Also used during 449 initialization. */ 450 451 void 452 cselib_clear_table (void) 453 { 454 cselib_reset_table (1); 455 } 456 457 /* Return TRUE if V is a constant, a function invariant or a VALUE 458 equivalence; FALSE otherwise. */ 459 460 static bool 461 invariant_or_equiv_p (cselib_val *v) 462 { 463 struct elt_loc_list *l; 464 465 if (v == cfa_base_preserved_val) 466 return true; 467 468 /* Keep VALUE equivalences around. */ 469 for (l = v->locs; l; l = l->next) 470 if (GET_CODE (l->loc) == VALUE) 471 return true; 472 473 if (v->locs != NULL 474 && v->locs->next == NULL) 475 { 476 if (CONSTANT_P (v->locs->loc) 477 && (GET_CODE (v->locs->loc) != CONST 478 || !references_value_p (v->locs->loc, 0))) 479 return true; 480 /* Although a debug expr may be bound to different expressions, 481 we can preserve it as if it was constant, to get unification 482 and proper merging within var-tracking. */ 483 if (GET_CODE (v->locs->loc) == DEBUG_EXPR 484 || GET_CODE (v->locs->loc) == DEBUG_IMPLICIT_PTR 485 || GET_CODE (v->locs->loc) == ENTRY_VALUE 486 || GET_CODE (v->locs->loc) == DEBUG_PARAMETER_REF) 487 return true; 488 489 /* (plus (value V) (const_int C)) is invariant iff V is invariant. */ 490 if (GET_CODE (v->locs->loc) == PLUS 491 && CONST_INT_P (XEXP (v->locs->loc, 1)) 492 && GET_CODE (XEXP (v->locs->loc, 0)) == VALUE 493 && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v->locs->loc, 0)))) 494 return true; 495 } 496 497 return false; 498 } 499 500 /* Remove from hash table all VALUEs except constants, function 501 invariants and VALUE equivalences. */ 502 503 int 504 preserve_constants_and_equivs (cselib_val **x, void *info ATTRIBUTE_UNUSED) 505 { 506 cselib_val *v = *x; 507 508 if (invariant_or_equiv_p (v)) 509 { 510 cselib_hasher::compare_type lookup = { 511 GET_MODE (v->val_rtx), v->val_rtx, VOIDmode 512 }; 513 cselib_val **slot 514 = cselib_preserved_hash_table->find_slot_with_hash (&lookup, 515 v->hash, INSERT); 516 gcc_assert (!*slot); 517 *slot = v; 518 } 519 520 cselib_hash_table->clear_slot (x); 521 522 return 1; 523 } 524 525 /* Remove all entries from the hash table, arranging for the next 526 value to be numbered NUM. */ 527 528 void 529 cselib_reset_table (unsigned int num) 530 { 531 unsigned int i; 532 533 max_value_regs = 0; 534 535 if (cfa_base_preserved_val) 536 { 537 unsigned int regno = cfa_base_preserved_regno; 538 unsigned int new_used_regs = 0; 539 for (i = 0; i < n_used_regs; i++) 540 if (used_regs[i] == regno) 541 { 542 new_used_regs = 1; 543 continue; 544 } 545 else 546 REG_VALUES (used_regs[i]) = 0; 547 gcc_assert (new_used_regs == 1); 548 n_used_regs = new_used_regs; 549 used_regs[0] = regno; 550 max_value_regs 551 = hard_regno_nregs[regno][GET_MODE (cfa_base_preserved_val->locs->loc)]; 552 } 553 else 554 { 555 for (i = 0; i < n_used_regs; i++) 556 REG_VALUES (used_regs[i]) = 0; 557 n_used_regs = 0; 558 } 559 560 if (cselib_preserve_constants) 561 cselib_hash_table->traverse <void *, preserve_constants_and_equivs> 562 (NULL); 563 else 564 { 565 cselib_hash_table->empty (); 566 gcc_checking_assert (!cselib_any_perm_equivs); 567 } 568 569 n_useless_values = 0; 570 n_useless_debug_values = 0; 571 n_debug_values = 0; 572 573 next_uid = num; 574 575 first_containing_mem = &dummy_val; 576 } 577 578 /* Return the number of the next value that will be generated. */ 579 580 unsigned int 581 cselib_get_next_uid (void) 582 { 583 return next_uid; 584 } 585 586 /* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE, 587 INSERTing if requested. When X is part of the address of a MEM, 588 MEMMODE should specify the mode of the MEM. */ 589 590 static cselib_val ** 591 cselib_find_slot (machine_mode mode, rtx x, hashval_t hash, 592 enum insert_option insert, machine_mode memmode) 593 { 594 cselib_val **slot = NULL; 595 cselib_hasher::compare_type lookup = { mode, x, memmode }; 596 if (cselib_preserve_constants) 597 slot = cselib_preserved_hash_table->find_slot_with_hash (&lookup, hash, 598 NO_INSERT); 599 if (!slot) 600 slot = cselib_hash_table->find_slot_with_hash (&lookup, hash, insert); 601 return slot; 602 } 603 604 /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we 605 only return true for values which point to a cselib_val whose value 606 element has been set to zero, which implies the cselib_val will be 607 removed. */ 608 609 int 610 references_value_p (const_rtx x, int only_useless) 611 { 612 const enum rtx_code code = GET_CODE (x); 613 const char *fmt = GET_RTX_FORMAT (code); 614 int i, j; 615 616 if (GET_CODE (x) == VALUE 617 && (! only_useless || 618 (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x)))) 619 return 1; 620 621 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 622 { 623 if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless)) 624 return 1; 625 else if (fmt[i] == 'E') 626 for (j = 0; j < XVECLEN (x, i); j++) 627 if (references_value_p (XVECEXP (x, i, j), only_useless)) 628 return 1; 629 } 630 631 return 0; 632 } 633 634 /* For all locations found in X, delete locations that reference useless 635 values (i.e. values without any location). Called through 636 htab_traverse. */ 637 638 int 639 discard_useless_locs (cselib_val **x, void *info ATTRIBUTE_UNUSED) 640 { 641 cselib_val *v = *x; 642 struct elt_loc_list **p = &v->locs; 643 bool had_locs = v->locs != NULL; 644 rtx setting_insn = v->locs ? v->locs->setting_insn : NULL; 645 646 while (*p) 647 { 648 if (references_value_p ((*p)->loc, 1)) 649 unchain_one_elt_loc_list (p); 650 else 651 p = &(*p)->next; 652 } 653 654 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx)) 655 { 656 if (setting_insn && DEBUG_INSN_P (setting_insn)) 657 n_useless_debug_values++; 658 else 659 n_useless_values++; 660 values_became_useless = 1; 661 } 662 return 1; 663 } 664 665 /* If X is a value with no locations, remove it from the hashtable. */ 666 667 int 668 discard_useless_values (cselib_val **x, void *info ATTRIBUTE_UNUSED) 669 { 670 cselib_val *v = *x; 671 672 if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx)) 673 { 674 if (cselib_discard_hook) 675 cselib_discard_hook (v); 676 677 CSELIB_VAL_PTR (v->val_rtx) = NULL; 678 cselib_hash_table->clear_slot (x); 679 unchain_one_value (v); 680 n_useless_values--; 681 } 682 683 return 1; 684 } 685 686 /* Clean out useless values (i.e. those which no longer have locations 687 associated with them) from the hash table. */ 688 689 static void 690 remove_useless_values (void) 691 { 692 cselib_val **p, *v; 693 694 /* First pass: eliminate locations that reference the value. That in 695 turn can make more values useless. */ 696 do 697 { 698 values_became_useless = 0; 699 cselib_hash_table->traverse <void *, discard_useless_locs> (NULL); 700 } 701 while (values_became_useless); 702 703 /* Second pass: actually remove the values. */ 704 705 p = &first_containing_mem; 706 for (v = *p; v != &dummy_val; v = v->next_containing_mem) 707 if (v->locs && v == canonical_cselib_val (v)) 708 { 709 *p = v; 710 p = &(*p)->next_containing_mem; 711 } 712 *p = &dummy_val; 713 714 n_useless_values += n_useless_debug_values; 715 n_debug_values -= n_useless_debug_values; 716 n_useless_debug_values = 0; 717 718 cselib_hash_table->traverse <void *, discard_useless_values> (NULL); 719 720 gcc_assert (!n_useless_values); 721 } 722 723 /* Arrange for a value to not be removed from the hash table even if 724 it becomes useless. */ 725 726 void 727 cselib_preserve_value (cselib_val *v) 728 { 729 PRESERVED_VALUE_P (v->val_rtx) = 1; 730 } 731 732 /* Test whether a value is preserved. */ 733 734 bool 735 cselib_preserved_value_p (cselib_val *v) 736 { 737 return PRESERVED_VALUE_P (v->val_rtx); 738 } 739 740 /* Arrange for a REG value to be assumed constant through the whole function, 741 never invalidated and preserved across cselib_reset_table calls. */ 742 743 void 744 cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno) 745 { 746 if (cselib_preserve_constants 747 && v->locs 748 && REG_P (v->locs->loc)) 749 { 750 cfa_base_preserved_val = v; 751 cfa_base_preserved_regno = regno; 752 } 753 } 754 755 /* Clean all non-constant expressions in the hash table, but retain 756 their values. */ 757 758 void 759 cselib_preserve_only_values (void) 760 { 761 int i; 762 763 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 764 cselib_invalidate_regno (i, reg_raw_mode[i]); 765 766 cselib_invalidate_mem (callmem); 767 768 remove_useless_values (); 769 770 gcc_assert (first_containing_mem == &dummy_val); 771 } 772 773 /* Arrange for a value to be marked as based on stack pointer 774 for find_base_term purposes. */ 775 776 void 777 cselib_set_value_sp_based (cselib_val *v) 778 { 779 SP_BASED_VALUE_P (v->val_rtx) = 1; 780 } 781 782 /* Test whether a value is based on stack pointer for 783 find_base_term purposes. */ 784 785 bool 786 cselib_sp_based_value_p (cselib_val *v) 787 { 788 return SP_BASED_VALUE_P (v->val_rtx); 789 } 790 791 /* Return the mode in which a register was last set. If X is not a 792 register, return its mode. If the mode in which the register was 793 set is not known, or the value was already clobbered, return 794 VOIDmode. */ 795 796 machine_mode 797 cselib_reg_set_mode (const_rtx x) 798 { 799 if (!REG_P (x)) 800 return GET_MODE (x); 801 802 if (REG_VALUES (REGNO (x)) == NULL 803 || REG_VALUES (REGNO (x))->elt == NULL) 804 return VOIDmode; 805 806 return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx); 807 } 808 809 /* Return nonzero if we can prove that X and Y contain the same value, taking 810 our gathered information into account. */ 811 812 int 813 rtx_equal_for_cselib_p (rtx x, rtx y) 814 { 815 return rtx_equal_for_cselib_1 (x, y, VOIDmode, 0); 816 } 817 818 /* If x is a PLUS or an autoinc operation, expand the operation, 819 storing the offset, if any, in *OFF. */ 820 821 static rtx 822 autoinc_split (rtx x, rtx *off, machine_mode memmode) 823 { 824 switch (GET_CODE (x)) 825 { 826 case PLUS: 827 *off = XEXP (x, 1); 828 return XEXP (x, 0); 829 830 case PRE_DEC: 831 if (memmode == VOIDmode) 832 return x; 833 834 *off = GEN_INT (-GET_MODE_SIZE (memmode)); 835 return XEXP (x, 0); 836 break; 837 838 case PRE_INC: 839 if (memmode == VOIDmode) 840 return x; 841 842 *off = GEN_INT (GET_MODE_SIZE (memmode)); 843 return XEXP (x, 0); 844 845 case PRE_MODIFY: 846 return XEXP (x, 1); 847 848 case POST_DEC: 849 case POST_INC: 850 case POST_MODIFY: 851 return XEXP (x, 0); 852 853 default: 854 return x; 855 } 856 } 857 858 /* Return nonzero if we can prove that X and Y contain the same value, 859 taking our gathered information into account. MEMMODE holds the 860 mode of the enclosing MEM, if any, as required to deal with autoinc 861 addressing modes. If X and Y are not (known to be) part of 862 addresses, MEMMODE should be VOIDmode. */ 863 864 static int 865 rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth) 866 { 867 enum rtx_code code; 868 const char *fmt; 869 int i; 870 871 if (REG_P (x) || MEM_P (x)) 872 { 873 cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode); 874 875 if (e) 876 x = e->val_rtx; 877 } 878 879 if (REG_P (y) || MEM_P (y)) 880 { 881 cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode); 882 883 if (e) 884 y = e->val_rtx; 885 } 886 887 if (x == y) 888 return 1; 889 890 if (GET_CODE (x) == VALUE) 891 { 892 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (x)); 893 struct elt_loc_list *l; 894 895 if (GET_CODE (y) == VALUE) 896 return e == canonical_cselib_val (CSELIB_VAL_PTR (y)); 897 898 if (depth == 128) 899 return 0; 900 901 for (l = e->locs; l; l = l->next) 902 { 903 rtx t = l->loc; 904 905 /* Avoid infinite recursion. We know we have the canonical 906 value, so we can just skip any values in the equivalence 907 list. */ 908 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE) 909 continue; 910 else if (rtx_equal_for_cselib_1 (t, y, memmode, depth + 1)) 911 return 1; 912 } 913 914 return 0; 915 } 916 else if (GET_CODE (y) == VALUE) 917 { 918 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y)); 919 struct elt_loc_list *l; 920 921 if (depth == 128) 922 return 0; 923 924 for (l = e->locs; l; l = l->next) 925 { 926 rtx t = l->loc; 927 928 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE) 929 continue; 930 else if (rtx_equal_for_cselib_1 (x, t, memmode, depth + 1)) 931 return 1; 932 } 933 934 return 0; 935 } 936 937 if (GET_MODE (x) != GET_MODE (y)) 938 return 0; 939 940 if (GET_CODE (x) != GET_CODE (y)) 941 { 942 rtx xorig = x, yorig = y; 943 rtx xoff = NULL, yoff = NULL; 944 945 x = autoinc_split (x, &xoff, memmode); 946 y = autoinc_split (y, &yoff, memmode); 947 948 if (!xoff != !yoff) 949 return 0; 950 951 if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode, depth)) 952 return 0; 953 954 /* Don't recurse if nothing changed. */ 955 if (x != xorig || y != yorig) 956 return rtx_equal_for_cselib_1 (x, y, memmode, depth); 957 958 return 0; 959 } 960 961 /* These won't be handled correctly by the code below. */ 962 switch (GET_CODE (x)) 963 { 964 CASE_CONST_UNIQUE: 965 case DEBUG_EXPR: 966 return 0; 967 968 case DEBUG_IMPLICIT_PTR: 969 return DEBUG_IMPLICIT_PTR_DECL (x) 970 == DEBUG_IMPLICIT_PTR_DECL (y); 971 972 case DEBUG_PARAMETER_REF: 973 return DEBUG_PARAMETER_REF_DECL (x) 974 == DEBUG_PARAMETER_REF_DECL (y); 975 976 case ENTRY_VALUE: 977 /* ENTRY_VALUEs are function invariant, it is thus undesirable to 978 use rtx_equal_for_cselib_1 to compare the operands. */ 979 return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y)); 980 981 case LABEL_REF: 982 return LABEL_REF_LABEL (x) == LABEL_REF_LABEL (y); 983 984 case MEM: 985 /* We have to compare any autoinc operations in the addresses 986 using this MEM's mode. */ 987 return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x), 988 depth); 989 990 default: 991 break; 992 } 993 994 code = GET_CODE (x); 995 fmt = GET_RTX_FORMAT (code); 996 997 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 998 { 999 int j; 1000 1001 switch (fmt[i]) 1002 { 1003 case 'w': 1004 if (XWINT (x, i) != XWINT (y, i)) 1005 return 0; 1006 break; 1007 1008 case 'n': 1009 case 'i': 1010 if (XINT (x, i) != XINT (y, i)) 1011 return 0; 1012 break; 1013 1014 case 'V': 1015 case 'E': 1016 /* Two vectors must have the same length. */ 1017 if (XVECLEN (x, i) != XVECLEN (y, i)) 1018 return 0; 1019 1020 /* And the corresponding elements must match. */ 1021 for (j = 0; j < XVECLEN (x, i); j++) 1022 if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j), 1023 XVECEXP (y, i, j), memmode, depth)) 1024 return 0; 1025 break; 1026 1027 case 'e': 1028 if (i == 1 1029 && targetm.commutative_p (x, UNKNOWN) 1030 && rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode, 1031 depth) 1032 && rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode, 1033 depth)) 1034 return 1; 1035 if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode, 1036 depth)) 1037 return 0; 1038 break; 1039 1040 case 'S': 1041 case 's': 1042 if (strcmp (XSTR (x, i), XSTR (y, i))) 1043 return 0; 1044 break; 1045 1046 case 'u': 1047 /* These are just backpointers, so they don't matter. */ 1048 break; 1049 1050 case '0': 1051 case 't': 1052 break; 1053 1054 /* It is believed that rtx's at this level will never 1055 contain anything but integers and other rtx's, 1056 except for within LABEL_REFs and SYMBOL_REFs. */ 1057 default: 1058 gcc_unreachable (); 1059 } 1060 } 1061 return 1; 1062 } 1063 1064 /* Hash an rtx. Return 0 if we couldn't hash the rtx. 1065 For registers and memory locations, we look up their cselib_val structure 1066 and return its VALUE element. 1067 Possible reasons for return 0 are: the object is volatile, or we couldn't 1068 find a register or memory location in the table and CREATE is zero. If 1069 CREATE is nonzero, table elts are created for regs and mem. 1070 N.B. this hash function returns the same hash value for RTXes that 1071 differ only in the order of operands, thus it is suitable for comparisons 1072 that take commutativity into account. 1073 If we wanted to also support associative rules, we'd have to use a different 1074 strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) . 1075 MEMMODE indicates the mode of an enclosing MEM, and it's only 1076 used to compute autoinc values. 1077 We used to have a MODE argument for hashing for CONST_INTs, but that 1078 didn't make sense, since it caused spurious hash differences between 1079 (set (reg:SI 1) (const_int)) 1080 (plus:SI (reg:SI 2) (reg:SI 1)) 1081 and 1082 (plus:SI (reg:SI 2) (const_int)) 1083 If the mode is important in any context, it must be checked specifically 1084 in a comparison anyway, since relying on hash differences is unsafe. */ 1085 1086 static unsigned int 1087 cselib_hash_rtx (rtx x, int create, machine_mode memmode) 1088 { 1089 cselib_val *e; 1090 int i, j; 1091 enum rtx_code code; 1092 const char *fmt; 1093 unsigned int hash = 0; 1094 1095 code = GET_CODE (x); 1096 hash += (unsigned) code + (unsigned) GET_MODE (x); 1097 1098 switch (code) 1099 { 1100 case VALUE: 1101 e = CSELIB_VAL_PTR (x); 1102 return e->hash; 1103 1104 case MEM: 1105 case REG: 1106 e = cselib_lookup (x, GET_MODE (x), create, memmode); 1107 if (! e) 1108 return 0; 1109 1110 return e->hash; 1111 1112 case DEBUG_EXPR: 1113 hash += ((unsigned) DEBUG_EXPR << 7) 1114 + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x)); 1115 return hash ? hash : (unsigned int) DEBUG_EXPR; 1116 1117 case DEBUG_IMPLICIT_PTR: 1118 hash += ((unsigned) DEBUG_IMPLICIT_PTR << 7) 1119 + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x)); 1120 return hash ? hash : (unsigned int) DEBUG_IMPLICIT_PTR; 1121 1122 case DEBUG_PARAMETER_REF: 1123 hash += ((unsigned) DEBUG_PARAMETER_REF << 7) 1124 + DECL_UID (DEBUG_PARAMETER_REF_DECL (x)); 1125 return hash ? hash : (unsigned int) DEBUG_PARAMETER_REF; 1126 1127 case ENTRY_VALUE: 1128 /* ENTRY_VALUEs are function invariant, thus try to avoid 1129 recursing on argument if ENTRY_VALUE is one of the 1130 forms emitted by expand_debug_expr, otherwise 1131 ENTRY_VALUE hash would depend on the current value 1132 in some register or memory. */ 1133 if (REG_P (ENTRY_VALUE_EXP (x))) 1134 hash += (unsigned int) REG 1135 + (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x)) 1136 + (unsigned int) REGNO (ENTRY_VALUE_EXP (x)); 1137 else if (MEM_P (ENTRY_VALUE_EXP (x)) 1138 && REG_P (XEXP (ENTRY_VALUE_EXP (x), 0))) 1139 hash += (unsigned int) MEM 1140 + (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x), 0)) 1141 + (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x), 0)); 1142 else 1143 hash += cselib_hash_rtx (ENTRY_VALUE_EXP (x), create, memmode); 1144 return hash ? hash : (unsigned int) ENTRY_VALUE; 1145 1146 case CONST_INT: 1147 hash += ((unsigned) CONST_INT << 7) + UINTVAL (x); 1148 return hash ? hash : (unsigned int) CONST_INT; 1149 1150 case CONST_WIDE_INT: 1151 for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++) 1152 hash += CONST_WIDE_INT_ELT (x, i); 1153 return hash; 1154 1155 case CONST_DOUBLE: 1156 /* This is like the general case, except that it only counts 1157 the integers representing the constant. */ 1158 hash += (unsigned) code + (unsigned) GET_MODE (x); 1159 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode) 1160 hash += ((unsigned) CONST_DOUBLE_LOW (x) 1161 + (unsigned) CONST_DOUBLE_HIGH (x)); 1162 else 1163 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x)); 1164 return hash ? hash : (unsigned int) CONST_DOUBLE; 1165 1166 case CONST_FIXED: 1167 hash += (unsigned int) code + (unsigned int) GET_MODE (x); 1168 hash += fixed_hash (CONST_FIXED_VALUE (x)); 1169 return hash ? hash : (unsigned int) CONST_FIXED; 1170 1171 case CONST_VECTOR: 1172 { 1173 int units; 1174 rtx elt; 1175 1176 units = CONST_VECTOR_NUNITS (x); 1177 1178 for (i = 0; i < units; ++i) 1179 { 1180 elt = CONST_VECTOR_ELT (x, i); 1181 hash += cselib_hash_rtx (elt, 0, memmode); 1182 } 1183 1184 return hash; 1185 } 1186 1187 /* Assume there is only one rtx object for any given label. */ 1188 case LABEL_REF: 1189 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap 1190 differences and differences between each stage's debugging dumps. */ 1191 hash += (((unsigned int) LABEL_REF << 7) 1192 + CODE_LABEL_NUMBER (LABEL_REF_LABEL (x))); 1193 return hash ? hash : (unsigned int) LABEL_REF; 1194 1195 case SYMBOL_REF: 1196 { 1197 /* Don't hash on the symbol's address to avoid bootstrap differences. 1198 Different hash values may cause expressions to be recorded in 1199 different orders and thus different registers to be used in the 1200 final assembler. This also avoids differences in the dump files 1201 between various stages. */ 1202 unsigned int h = 0; 1203 const unsigned char *p = (const unsigned char *) XSTR (x, 0); 1204 1205 while (*p) 1206 h += (h << 7) + *p++; /* ??? revisit */ 1207 1208 hash += ((unsigned int) SYMBOL_REF << 7) + h; 1209 return hash ? hash : (unsigned int) SYMBOL_REF; 1210 } 1211 1212 case PRE_DEC: 1213 case PRE_INC: 1214 /* We can't compute these without knowing the MEM mode. */ 1215 gcc_assert (memmode != VOIDmode); 1216 i = GET_MODE_SIZE (memmode); 1217 if (code == PRE_DEC) 1218 i = -i; 1219 /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes 1220 like (mem:MEMMODE (plus (reg) (const_int I))). */ 1221 hash += (unsigned) PLUS - (unsigned)code 1222 + cselib_hash_rtx (XEXP (x, 0), create, memmode) 1223 + cselib_hash_rtx (GEN_INT (i), create, memmode); 1224 return hash ? hash : 1 + (unsigned) PLUS; 1225 1226 case PRE_MODIFY: 1227 gcc_assert (memmode != VOIDmode); 1228 return cselib_hash_rtx (XEXP (x, 1), create, memmode); 1229 1230 case POST_DEC: 1231 case POST_INC: 1232 case POST_MODIFY: 1233 gcc_assert (memmode != VOIDmode); 1234 return cselib_hash_rtx (XEXP (x, 0), create, memmode); 1235 1236 case PC: 1237 case CC0: 1238 case CALL: 1239 case UNSPEC_VOLATILE: 1240 return 0; 1241 1242 case ASM_OPERANDS: 1243 if (MEM_VOLATILE_P (x)) 1244 return 0; 1245 1246 break; 1247 1248 default: 1249 break; 1250 } 1251 1252 i = GET_RTX_LENGTH (code) - 1; 1253 fmt = GET_RTX_FORMAT (code); 1254 for (; i >= 0; i--) 1255 { 1256 switch (fmt[i]) 1257 { 1258 case 'e': 1259 { 1260 rtx tem = XEXP (x, i); 1261 unsigned int tem_hash = cselib_hash_rtx (tem, create, memmode); 1262 1263 if (tem_hash == 0) 1264 return 0; 1265 1266 hash += tem_hash; 1267 } 1268 break; 1269 case 'E': 1270 for (j = 0; j < XVECLEN (x, i); j++) 1271 { 1272 unsigned int tem_hash 1273 = cselib_hash_rtx (XVECEXP (x, i, j), create, memmode); 1274 1275 if (tem_hash == 0) 1276 return 0; 1277 1278 hash += tem_hash; 1279 } 1280 break; 1281 1282 case 's': 1283 { 1284 const unsigned char *p = (const unsigned char *) XSTR (x, i); 1285 1286 if (p) 1287 while (*p) 1288 hash += *p++; 1289 break; 1290 } 1291 1292 case 'i': 1293 hash += XINT (x, i); 1294 break; 1295 1296 case '0': 1297 case 't': 1298 /* unused */ 1299 break; 1300 1301 default: 1302 gcc_unreachable (); 1303 } 1304 } 1305 1306 return hash ? hash : 1 + (unsigned int) GET_CODE (x); 1307 } 1308 1309 /* Create a new value structure for VALUE and initialize it. The mode of the 1310 value is MODE. */ 1311 1312 static inline cselib_val * 1313 new_cselib_val (unsigned int hash, machine_mode mode, rtx x) 1314 { 1315 cselib_val *e = (cselib_val *) pool_alloc (cselib_val_pool); 1316 1317 gcc_assert (hash); 1318 gcc_assert (next_uid); 1319 1320 e->hash = hash; 1321 e->uid = next_uid++; 1322 /* We use an alloc pool to allocate this RTL construct because it 1323 accounts for about 8% of the overall memory usage. We know 1324 precisely when we can have VALUE RTXen (when cselib is active) 1325 so we don't need to put them in garbage collected memory. 1326 ??? Why should a VALUE be an RTX in the first place? */ 1327 e->val_rtx = (rtx) pool_alloc (value_pool); 1328 memset (e->val_rtx, 0, RTX_HDR_SIZE); 1329 PUT_CODE (e->val_rtx, VALUE); 1330 PUT_MODE (e->val_rtx, mode); 1331 CSELIB_VAL_PTR (e->val_rtx) = e; 1332 e->addr_list = 0; 1333 e->locs = 0; 1334 e->next_containing_mem = 0; 1335 1336 if (dump_file && (dump_flags & TDF_CSELIB)) 1337 { 1338 fprintf (dump_file, "cselib value %u:%u ", e->uid, hash); 1339 if (flag_dump_noaddr || flag_dump_unnumbered) 1340 fputs ("# ", dump_file); 1341 else 1342 fprintf (dump_file, "%p ", (void*)e); 1343 print_rtl_single (dump_file, x); 1344 fputc ('\n', dump_file); 1345 } 1346 1347 return e; 1348 } 1349 1350 /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that 1351 contains the data at this address. X is a MEM that represents the 1352 value. Update the two value structures to represent this situation. */ 1353 1354 static void 1355 add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x) 1356 { 1357 struct elt_loc_list *l; 1358 1359 addr_elt = canonical_cselib_val (addr_elt); 1360 mem_elt = canonical_cselib_val (mem_elt); 1361 1362 /* Avoid duplicates. */ 1363 for (l = mem_elt->locs; l; l = l->next) 1364 if (MEM_P (l->loc) 1365 && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt) 1366 { 1367 promote_debug_loc (l); 1368 return; 1369 } 1370 1371 addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt); 1372 new_elt_loc_list (mem_elt, 1373 replace_equiv_address_nv (x, addr_elt->val_rtx)); 1374 if (mem_elt->next_containing_mem == NULL) 1375 { 1376 mem_elt->next_containing_mem = first_containing_mem; 1377 first_containing_mem = mem_elt; 1378 } 1379 } 1380 1381 /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx. 1382 If CREATE, make a new one if we haven't seen it before. */ 1383 1384 static cselib_val * 1385 cselib_lookup_mem (rtx x, int create) 1386 { 1387 machine_mode mode = GET_MODE (x); 1388 machine_mode addr_mode; 1389 cselib_val **slot; 1390 cselib_val *addr; 1391 cselib_val *mem_elt; 1392 struct elt_list *l; 1393 1394 if (MEM_VOLATILE_P (x) || mode == BLKmode 1395 || !cselib_record_memory 1396 || (FLOAT_MODE_P (mode) && flag_float_store)) 1397 return 0; 1398 1399 addr_mode = GET_MODE (XEXP (x, 0)); 1400 if (addr_mode == VOIDmode) 1401 addr_mode = Pmode; 1402 1403 /* Look up the value for the address. */ 1404 addr = cselib_lookup (XEXP (x, 0), addr_mode, create, mode); 1405 if (! addr) 1406 return 0; 1407 1408 addr = canonical_cselib_val (addr); 1409 /* Find a value that describes a value of our mode at that address. */ 1410 for (l = addr->addr_list; l; l = l->next) 1411 if (GET_MODE (l->elt->val_rtx) == mode) 1412 { 1413 promote_debug_loc (l->elt->locs); 1414 return l->elt; 1415 } 1416 1417 if (! create) 1418 return 0; 1419 1420 mem_elt = new_cselib_val (next_uid, mode, x); 1421 add_mem_for_addr (addr, mem_elt, x); 1422 slot = cselib_find_slot (mode, x, mem_elt->hash, INSERT, VOIDmode); 1423 *slot = mem_elt; 1424 return mem_elt; 1425 } 1426 1427 /* Search through the possible substitutions in P. We prefer a non reg 1428 substitution because this allows us to expand the tree further. If 1429 we find, just a reg, take the lowest regno. There may be several 1430 non-reg results, we just take the first one because they will all 1431 expand to the same place. */ 1432 1433 static rtx 1434 expand_loc (struct elt_loc_list *p, struct expand_value_data *evd, 1435 int max_depth) 1436 { 1437 rtx reg_result = NULL; 1438 unsigned int regno = UINT_MAX; 1439 struct elt_loc_list *p_in = p; 1440 1441 for (; p; p = p->next) 1442 { 1443 /* Return these right away to avoid returning stack pointer based 1444 expressions for frame pointer and vice versa, which is something 1445 that would confuse DSE. See the comment in cselib_expand_value_rtx_1 1446 for more details. */ 1447 if (REG_P (p->loc) 1448 && (REGNO (p->loc) == STACK_POINTER_REGNUM 1449 || REGNO (p->loc) == FRAME_POINTER_REGNUM 1450 || REGNO (p->loc) == HARD_FRAME_POINTER_REGNUM 1451 || REGNO (p->loc) == cfa_base_preserved_regno)) 1452 return p->loc; 1453 /* Avoid infinite recursion trying to expand a reg into a 1454 the same reg. */ 1455 if ((REG_P (p->loc)) 1456 && (REGNO (p->loc) < regno) 1457 && !bitmap_bit_p (evd->regs_active, REGNO (p->loc))) 1458 { 1459 reg_result = p->loc; 1460 regno = REGNO (p->loc); 1461 } 1462 /* Avoid infinite recursion and do not try to expand the 1463 value. */ 1464 else if (GET_CODE (p->loc) == VALUE 1465 && CSELIB_VAL_PTR (p->loc)->locs == p_in) 1466 continue; 1467 else if (!REG_P (p->loc)) 1468 { 1469 rtx result, note; 1470 if (dump_file && (dump_flags & TDF_CSELIB)) 1471 { 1472 print_inline_rtx (dump_file, p->loc, 0); 1473 fprintf (dump_file, "\n"); 1474 } 1475 if (GET_CODE (p->loc) == LO_SUM 1476 && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF 1477 && p->setting_insn 1478 && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX)) 1479 && XEXP (note, 0) == XEXP (p->loc, 1)) 1480 return XEXP (p->loc, 1); 1481 result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1); 1482 if (result) 1483 return result; 1484 } 1485 1486 } 1487 1488 if (regno != UINT_MAX) 1489 { 1490 rtx result; 1491 if (dump_file && (dump_flags & TDF_CSELIB)) 1492 fprintf (dump_file, "r%d\n", regno); 1493 1494 result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1); 1495 if (result) 1496 return result; 1497 } 1498 1499 if (dump_file && (dump_flags & TDF_CSELIB)) 1500 { 1501 if (reg_result) 1502 { 1503 print_inline_rtx (dump_file, reg_result, 0); 1504 fprintf (dump_file, "\n"); 1505 } 1506 else 1507 fprintf (dump_file, "NULL\n"); 1508 } 1509 return reg_result; 1510 } 1511 1512 1513 /* Forward substitute and expand an expression out to its roots. 1514 This is the opposite of common subexpression. Because local value 1515 numbering is such a weak optimization, the expanded expression is 1516 pretty much unique (not from a pointer equals point of view but 1517 from a tree shape point of view. 1518 1519 This function returns NULL if the expansion fails. The expansion 1520 will fail if there is no value number for one of the operands or if 1521 one of the operands has been overwritten between the current insn 1522 and the beginning of the basic block. For instance x has no 1523 expansion in: 1524 1525 r1 <- r1 + 3 1526 x <- r1 + 8 1527 1528 REGS_ACTIVE is a scratch bitmap that should be clear when passing in. 1529 It is clear on return. */ 1530 1531 rtx 1532 cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth) 1533 { 1534 struct expand_value_data evd; 1535 1536 evd.regs_active = regs_active; 1537 evd.callback = NULL; 1538 evd.callback_arg = NULL; 1539 evd.dummy = false; 1540 1541 return cselib_expand_value_rtx_1 (orig, &evd, max_depth); 1542 } 1543 1544 /* Same as cselib_expand_value_rtx, but using a callback to try to 1545 resolve some expressions. The CB function should return ORIG if it 1546 can't or does not want to deal with a certain RTX. Any other 1547 return value, including NULL, will be used as the expansion for 1548 VALUE, without any further changes. */ 1549 1550 rtx 1551 cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth, 1552 cselib_expand_callback cb, void *data) 1553 { 1554 struct expand_value_data evd; 1555 1556 evd.regs_active = regs_active; 1557 evd.callback = cb; 1558 evd.callback_arg = data; 1559 evd.dummy = false; 1560 1561 return cselib_expand_value_rtx_1 (orig, &evd, max_depth); 1562 } 1563 1564 /* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied 1565 or simplified. Useful to find out whether cselib_expand_value_rtx_cb 1566 would return NULL or non-NULL, without allocating new rtx. */ 1567 1568 bool 1569 cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth, 1570 cselib_expand_callback cb, void *data) 1571 { 1572 struct expand_value_data evd; 1573 1574 evd.regs_active = regs_active; 1575 evd.callback = cb; 1576 evd.callback_arg = data; 1577 evd.dummy = true; 1578 1579 return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL; 1580 } 1581 1582 /* Internal implementation of cselib_expand_value_rtx and 1583 cselib_expand_value_rtx_cb. */ 1584 1585 static rtx 1586 cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd, 1587 int max_depth) 1588 { 1589 rtx copy, scopy; 1590 int i, j; 1591 RTX_CODE code; 1592 const char *format_ptr; 1593 machine_mode mode; 1594 1595 code = GET_CODE (orig); 1596 1597 /* For the context of dse, if we end up expand into a huge tree, we 1598 will not have a useful address, so we might as well just give up 1599 quickly. */ 1600 if (max_depth <= 0) 1601 return NULL; 1602 1603 switch (code) 1604 { 1605 case REG: 1606 { 1607 struct elt_list *l = REG_VALUES (REGNO (orig)); 1608 1609 if (l && l->elt == NULL) 1610 l = l->next; 1611 for (; l; l = l->next) 1612 if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig)) 1613 { 1614 rtx result; 1615 unsigned regno = REGNO (orig); 1616 1617 /* The only thing that we are not willing to do (this 1618 is requirement of dse and if others potential uses 1619 need this function we should add a parm to control 1620 it) is that we will not substitute the 1621 STACK_POINTER_REGNUM, FRAME_POINTER or the 1622 HARD_FRAME_POINTER. 1623 1624 These expansions confuses the code that notices that 1625 stores into the frame go dead at the end of the 1626 function and that the frame is not effected by calls 1627 to subroutines. If you allow the 1628 STACK_POINTER_REGNUM substitution, then dse will 1629 think that parameter pushing also goes dead which is 1630 wrong. If you allow the FRAME_POINTER or the 1631 HARD_FRAME_POINTER then you lose the opportunity to 1632 make the frame assumptions. */ 1633 if (regno == STACK_POINTER_REGNUM 1634 || regno == FRAME_POINTER_REGNUM 1635 || regno == HARD_FRAME_POINTER_REGNUM 1636 || regno == cfa_base_preserved_regno) 1637 return orig; 1638 1639 bitmap_set_bit (evd->regs_active, regno); 1640 1641 if (dump_file && (dump_flags & TDF_CSELIB)) 1642 fprintf (dump_file, "expanding: r%d into: ", regno); 1643 1644 result = expand_loc (l->elt->locs, evd, max_depth); 1645 bitmap_clear_bit (evd->regs_active, regno); 1646 1647 if (result) 1648 return result; 1649 else 1650 return orig; 1651 } 1652 } 1653 1654 CASE_CONST_ANY: 1655 case SYMBOL_REF: 1656 case CODE_LABEL: 1657 case PC: 1658 case CC0: 1659 case SCRATCH: 1660 /* SCRATCH must be shared because they represent distinct values. */ 1661 return orig; 1662 case CLOBBER: 1663 if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))) 1664 return orig; 1665 break; 1666 1667 case CONST: 1668 if (shared_const_p (orig)) 1669 return orig; 1670 break; 1671 1672 case SUBREG: 1673 { 1674 rtx subreg; 1675 1676 if (evd->callback) 1677 { 1678 subreg = evd->callback (orig, evd->regs_active, max_depth, 1679 evd->callback_arg); 1680 if (subreg != orig) 1681 return subreg; 1682 } 1683 1684 subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd, 1685 max_depth - 1); 1686 if (!subreg) 1687 return NULL; 1688 scopy = simplify_gen_subreg (GET_MODE (orig), subreg, 1689 GET_MODE (SUBREG_REG (orig)), 1690 SUBREG_BYTE (orig)); 1691 if (scopy == NULL 1692 || (GET_CODE (scopy) == SUBREG 1693 && !REG_P (SUBREG_REG (scopy)) 1694 && !MEM_P (SUBREG_REG (scopy)))) 1695 return NULL; 1696 1697 return scopy; 1698 } 1699 1700 case VALUE: 1701 { 1702 rtx result; 1703 1704 if (dump_file && (dump_flags & TDF_CSELIB)) 1705 { 1706 fputs ("\nexpanding ", dump_file); 1707 print_rtl_single (dump_file, orig); 1708 fputs (" into...", dump_file); 1709 } 1710 1711 if (evd->callback) 1712 { 1713 result = evd->callback (orig, evd->regs_active, max_depth, 1714 evd->callback_arg); 1715 1716 if (result != orig) 1717 return result; 1718 } 1719 1720 result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth); 1721 return result; 1722 } 1723 1724 case DEBUG_EXPR: 1725 if (evd->callback) 1726 return evd->callback (orig, evd->regs_active, max_depth, 1727 evd->callback_arg); 1728 return orig; 1729 1730 default: 1731 break; 1732 } 1733 1734 /* Copy the various flags, fields, and other information. We assume 1735 that all fields need copying, and then clear the fields that should 1736 not be copied. That is the sensible default behavior, and forces 1737 us to explicitly document why we are *not* copying a flag. */ 1738 if (evd->dummy) 1739 copy = NULL; 1740 else 1741 copy = shallow_copy_rtx (orig); 1742 1743 format_ptr = GET_RTX_FORMAT (code); 1744 1745 for (i = 0; i < GET_RTX_LENGTH (code); i++) 1746 switch (*format_ptr++) 1747 { 1748 case 'e': 1749 if (XEXP (orig, i) != NULL) 1750 { 1751 rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd, 1752 max_depth - 1); 1753 if (!result) 1754 return NULL; 1755 if (copy) 1756 XEXP (copy, i) = result; 1757 } 1758 break; 1759 1760 case 'E': 1761 case 'V': 1762 if (XVEC (orig, i) != NULL) 1763 { 1764 if (copy) 1765 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i)); 1766 for (j = 0; j < XVECLEN (orig, i); j++) 1767 { 1768 rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j), 1769 evd, max_depth - 1); 1770 if (!result) 1771 return NULL; 1772 if (copy) 1773 XVECEXP (copy, i, j) = result; 1774 } 1775 } 1776 break; 1777 1778 case 't': 1779 case 'w': 1780 case 'i': 1781 case 's': 1782 case 'S': 1783 case 'T': 1784 case 'u': 1785 case 'B': 1786 case '0': 1787 /* These are left unchanged. */ 1788 break; 1789 1790 default: 1791 gcc_unreachable (); 1792 } 1793 1794 if (evd->dummy) 1795 return orig; 1796 1797 mode = GET_MODE (copy); 1798 /* If an operand has been simplified into CONST_INT, which doesn't 1799 have a mode and the mode isn't derivable from whole rtx's mode, 1800 try simplify_*_operation first with mode from original's operand 1801 and as a fallback wrap CONST_INT into gen_rtx_CONST. */ 1802 scopy = copy; 1803 switch (GET_RTX_CLASS (code)) 1804 { 1805 case RTX_UNARY: 1806 if (CONST_INT_P (XEXP (copy, 0)) 1807 && GET_MODE (XEXP (orig, 0)) != VOIDmode) 1808 { 1809 scopy = simplify_unary_operation (code, mode, XEXP (copy, 0), 1810 GET_MODE (XEXP (orig, 0))); 1811 if (scopy) 1812 return scopy; 1813 } 1814 break; 1815 case RTX_COMM_ARITH: 1816 case RTX_BIN_ARITH: 1817 /* These expressions can derive operand modes from the whole rtx's mode. */ 1818 break; 1819 case RTX_TERNARY: 1820 case RTX_BITFIELD_OPS: 1821 if (CONST_INT_P (XEXP (copy, 0)) 1822 && GET_MODE (XEXP (orig, 0)) != VOIDmode) 1823 { 1824 scopy = simplify_ternary_operation (code, mode, 1825 GET_MODE (XEXP (orig, 0)), 1826 XEXP (copy, 0), XEXP (copy, 1), 1827 XEXP (copy, 2)); 1828 if (scopy) 1829 return scopy; 1830 } 1831 break; 1832 case RTX_COMPARE: 1833 case RTX_COMM_COMPARE: 1834 if (CONST_INT_P (XEXP (copy, 0)) 1835 && GET_MODE (XEXP (copy, 1)) == VOIDmode 1836 && (GET_MODE (XEXP (orig, 0)) != VOIDmode 1837 || GET_MODE (XEXP (orig, 1)) != VOIDmode)) 1838 { 1839 scopy = simplify_relational_operation (code, mode, 1840 (GET_MODE (XEXP (orig, 0)) 1841 != VOIDmode) 1842 ? GET_MODE (XEXP (orig, 0)) 1843 : GET_MODE (XEXP (orig, 1)), 1844 XEXP (copy, 0), 1845 XEXP (copy, 1)); 1846 if (scopy) 1847 return scopy; 1848 } 1849 break; 1850 default: 1851 break; 1852 } 1853 scopy = simplify_rtx (copy); 1854 if (scopy) 1855 return scopy; 1856 return copy; 1857 } 1858 1859 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions 1860 with VALUE expressions. This way, it becomes independent of changes 1861 to registers and memory. 1862 X isn't actually modified; if modifications are needed, new rtl is 1863 allocated. However, the return value can share rtl with X. 1864 If X is within a MEM, MEMMODE must be the mode of the MEM. */ 1865 1866 rtx 1867 cselib_subst_to_values (rtx x, machine_mode memmode) 1868 { 1869 enum rtx_code code = GET_CODE (x); 1870 const char *fmt = GET_RTX_FORMAT (code); 1871 cselib_val *e; 1872 struct elt_list *l; 1873 rtx copy = x; 1874 int i; 1875 1876 switch (code) 1877 { 1878 case REG: 1879 l = REG_VALUES (REGNO (x)); 1880 if (l && l->elt == NULL) 1881 l = l->next; 1882 for (; l; l = l->next) 1883 if (GET_MODE (l->elt->val_rtx) == GET_MODE (x)) 1884 return l->elt->val_rtx; 1885 1886 gcc_unreachable (); 1887 1888 case MEM: 1889 e = cselib_lookup_mem (x, 0); 1890 /* This used to happen for autoincrements, but we deal with them 1891 properly now. Remove the if stmt for the next release. */ 1892 if (! e) 1893 { 1894 /* Assign a value that doesn't match any other. */ 1895 e = new_cselib_val (next_uid, GET_MODE (x), x); 1896 } 1897 return e->val_rtx; 1898 1899 case ENTRY_VALUE: 1900 e = cselib_lookup (x, GET_MODE (x), 0, memmode); 1901 if (! e) 1902 break; 1903 return e->val_rtx; 1904 1905 CASE_CONST_ANY: 1906 return x; 1907 1908 case PRE_DEC: 1909 case PRE_INC: 1910 gcc_assert (memmode != VOIDmode); 1911 i = GET_MODE_SIZE (memmode); 1912 if (code == PRE_DEC) 1913 i = -i; 1914 return cselib_subst_to_values (plus_constant (GET_MODE (x), 1915 XEXP (x, 0), i), 1916 memmode); 1917 1918 case PRE_MODIFY: 1919 gcc_assert (memmode != VOIDmode); 1920 return cselib_subst_to_values (XEXP (x, 1), memmode); 1921 1922 case POST_DEC: 1923 case POST_INC: 1924 case POST_MODIFY: 1925 gcc_assert (memmode != VOIDmode); 1926 return cselib_subst_to_values (XEXP (x, 0), memmode); 1927 1928 default: 1929 break; 1930 } 1931 1932 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 1933 { 1934 if (fmt[i] == 'e') 1935 { 1936 rtx t = cselib_subst_to_values (XEXP (x, i), memmode); 1937 1938 if (t != XEXP (x, i)) 1939 { 1940 if (x == copy) 1941 copy = shallow_copy_rtx (x); 1942 XEXP (copy, i) = t; 1943 } 1944 } 1945 else if (fmt[i] == 'E') 1946 { 1947 int j; 1948 1949 for (j = 0; j < XVECLEN (x, i); j++) 1950 { 1951 rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode); 1952 1953 if (t != XVECEXP (x, i, j)) 1954 { 1955 if (XVEC (x, i) == XVEC (copy, i)) 1956 { 1957 if (x == copy) 1958 copy = shallow_copy_rtx (x); 1959 XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i)); 1960 } 1961 XVECEXP (copy, i, j) = t; 1962 } 1963 } 1964 } 1965 } 1966 1967 return copy; 1968 } 1969 1970 /* Wrapper for cselib_subst_to_values, that indicates X is in INSN. */ 1971 1972 rtx 1973 cselib_subst_to_values_from_insn (rtx x, machine_mode memmode, rtx_insn *insn) 1974 { 1975 rtx ret; 1976 gcc_assert (!cselib_current_insn); 1977 cselib_current_insn = insn; 1978 ret = cselib_subst_to_values (x, memmode); 1979 cselib_current_insn = NULL; 1980 return ret; 1981 } 1982 1983 /* Look up the rtl expression X in our tables and return the value it 1984 has. If CREATE is zero, we return NULL if we don't know the value. 1985 Otherwise, we create a new one if possible, using mode MODE if X 1986 doesn't have a mode (i.e. because it's a constant). When X is part 1987 of an address, MEMMODE should be the mode of the enclosing MEM if 1988 we're tracking autoinc expressions. */ 1989 1990 static cselib_val * 1991 cselib_lookup_1 (rtx x, machine_mode mode, 1992 int create, machine_mode memmode) 1993 { 1994 cselib_val **slot; 1995 cselib_val *e; 1996 unsigned int hashval; 1997 1998 if (GET_MODE (x) != VOIDmode) 1999 mode = GET_MODE (x); 2000 2001 if (GET_CODE (x) == VALUE) 2002 return CSELIB_VAL_PTR (x); 2003 2004 if (REG_P (x)) 2005 { 2006 struct elt_list *l; 2007 unsigned int i = REGNO (x); 2008 2009 l = REG_VALUES (i); 2010 if (l && l->elt == NULL) 2011 l = l->next; 2012 for (; l; l = l->next) 2013 if (mode == GET_MODE (l->elt->val_rtx)) 2014 { 2015 promote_debug_loc (l->elt->locs); 2016 return l->elt; 2017 } 2018 2019 if (! create) 2020 return 0; 2021 2022 if (i < FIRST_PSEUDO_REGISTER) 2023 { 2024 unsigned int n = hard_regno_nregs[i][mode]; 2025 2026 if (n > max_value_regs) 2027 max_value_regs = n; 2028 } 2029 2030 e = new_cselib_val (next_uid, GET_MODE (x), x); 2031 new_elt_loc_list (e, x); 2032 if (REG_VALUES (i) == 0) 2033 { 2034 /* Maintain the invariant that the first entry of 2035 REG_VALUES, if present, must be the value used to set the 2036 register, or NULL. */ 2037 used_regs[n_used_regs++] = i; 2038 REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL); 2039 } 2040 else if (cselib_preserve_constants 2041 && GET_MODE_CLASS (mode) == MODE_INT) 2042 { 2043 /* During var-tracking, try harder to find equivalences 2044 for SUBREGs. If a setter sets say a DImode register 2045 and user uses that register only in SImode, add a lowpart 2046 subreg location. */ 2047 struct elt_list *lwider = NULL; 2048 l = REG_VALUES (i); 2049 if (l && l->elt == NULL) 2050 l = l->next; 2051 for (; l; l = l->next) 2052 if (GET_MODE_CLASS (GET_MODE (l->elt->val_rtx)) == MODE_INT 2053 && GET_MODE_SIZE (GET_MODE (l->elt->val_rtx)) 2054 > GET_MODE_SIZE (mode) 2055 && (lwider == NULL 2056 || GET_MODE_SIZE (GET_MODE (l->elt->val_rtx)) 2057 < GET_MODE_SIZE (GET_MODE (lwider->elt->val_rtx)))) 2058 { 2059 struct elt_loc_list *el; 2060 if (i < FIRST_PSEUDO_REGISTER 2061 && hard_regno_nregs[i][GET_MODE (l->elt->val_rtx)] != 1) 2062 continue; 2063 for (el = l->elt->locs; el; el = el->next) 2064 if (!REG_P (el->loc)) 2065 break; 2066 if (el) 2067 lwider = l; 2068 } 2069 if (lwider) 2070 { 2071 rtx sub = lowpart_subreg (mode, lwider->elt->val_rtx, 2072 GET_MODE (lwider->elt->val_rtx)); 2073 if (sub) 2074 new_elt_loc_list (e, sub); 2075 } 2076 } 2077 REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e); 2078 slot = cselib_find_slot (mode, x, e->hash, INSERT, memmode); 2079 *slot = e; 2080 return e; 2081 } 2082 2083 if (MEM_P (x)) 2084 return cselib_lookup_mem (x, create); 2085 2086 hashval = cselib_hash_rtx (x, create, memmode); 2087 /* Can't even create if hashing is not possible. */ 2088 if (! hashval) 2089 return 0; 2090 2091 slot = cselib_find_slot (mode, x, hashval, 2092 create ? INSERT : NO_INSERT, memmode); 2093 if (slot == 0) 2094 return 0; 2095 2096 e = (cselib_val *) *slot; 2097 if (e) 2098 return e; 2099 2100 e = new_cselib_val (hashval, mode, x); 2101 2102 /* We have to fill the slot before calling cselib_subst_to_values: 2103 the hash table is inconsistent until we do so, and 2104 cselib_subst_to_values will need to do lookups. */ 2105 *slot = e; 2106 new_elt_loc_list (e, cselib_subst_to_values (x, memmode)); 2107 return e; 2108 } 2109 2110 /* Wrapper for cselib_lookup, that indicates X is in INSN. */ 2111 2112 cselib_val * 2113 cselib_lookup_from_insn (rtx x, machine_mode mode, 2114 int create, machine_mode memmode, rtx_insn *insn) 2115 { 2116 cselib_val *ret; 2117 2118 gcc_assert (!cselib_current_insn); 2119 cselib_current_insn = insn; 2120 2121 ret = cselib_lookup (x, mode, create, memmode); 2122 2123 cselib_current_insn = NULL; 2124 2125 return ret; 2126 } 2127 2128 /* Wrapper for cselib_lookup_1, that logs the lookup result and 2129 maintains invariants related with debug insns. */ 2130 2131 cselib_val * 2132 cselib_lookup (rtx x, machine_mode mode, 2133 int create, machine_mode memmode) 2134 { 2135 cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode); 2136 2137 /* ??? Should we return NULL if we're not to create an entry, the 2138 found loc is a debug loc and cselib_current_insn is not DEBUG? 2139 If so, we should also avoid converting val to non-DEBUG; probably 2140 easiest setting cselib_current_insn to NULL before the call 2141 above. */ 2142 2143 if (dump_file && (dump_flags & TDF_CSELIB)) 2144 { 2145 fputs ("cselib lookup ", dump_file); 2146 print_inline_rtx (dump_file, x, 2); 2147 fprintf (dump_file, " => %u:%u\n", 2148 ret ? ret->uid : 0, 2149 ret ? ret->hash : 0); 2150 } 2151 2152 return ret; 2153 } 2154 2155 /* Invalidate any entries in reg_values that overlap REGNO. This is called 2156 if REGNO is changing. MODE is the mode of the assignment to REGNO, which 2157 is used to determine how many hard registers are being changed. If MODE 2158 is VOIDmode, then only REGNO is being changed; this is used when 2159 invalidating call clobbered registers across a call. */ 2160 2161 static void 2162 cselib_invalidate_regno (unsigned int regno, machine_mode mode) 2163 { 2164 unsigned int endregno; 2165 unsigned int i; 2166 2167 /* If we see pseudos after reload, something is _wrong_. */ 2168 gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER 2169 || reg_renumber[regno] < 0); 2170 2171 /* Determine the range of registers that must be invalidated. For 2172 pseudos, only REGNO is affected. For hard regs, we must take MODE 2173 into account, and we must also invalidate lower register numbers 2174 if they contain values that overlap REGNO. */ 2175 if (regno < FIRST_PSEUDO_REGISTER) 2176 { 2177 gcc_assert (mode != VOIDmode); 2178 2179 if (regno < max_value_regs) 2180 i = 0; 2181 else 2182 i = regno - max_value_regs; 2183 2184 endregno = end_hard_regno (mode, regno); 2185 } 2186 else 2187 { 2188 i = regno; 2189 endregno = regno + 1; 2190 } 2191 2192 for (; i < endregno; i++) 2193 { 2194 struct elt_list **l = ®_VALUES (i); 2195 2196 /* Go through all known values for this reg; if it overlaps the range 2197 we're invalidating, remove the value. */ 2198 while (*l) 2199 { 2200 cselib_val *v = (*l)->elt; 2201 bool had_locs; 2202 rtx setting_insn; 2203 struct elt_loc_list **p; 2204 unsigned int this_last = i; 2205 2206 if (i < FIRST_PSEUDO_REGISTER && v != NULL) 2207 this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1; 2208 2209 if (this_last < regno || v == NULL 2210 || (v == cfa_base_preserved_val 2211 && i == cfa_base_preserved_regno)) 2212 { 2213 l = &(*l)->next; 2214 continue; 2215 } 2216 2217 /* We have an overlap. */ 2218 if (*l == REG_VALUES (i)) 2219 { 2220 /* Maintain the invariant that the first entry of 2221 REG_VALUES, if present, must be the value used to set 2222 the register, or NULL. This is also nice because 2223 then we won't push the same regno onto user_regs 2224 multiple times. */ 2225 (*l)->elt = NULL; 2226 l = &(*l)->next; 2227 } 2228 else 2229 unchain_one_elt_list (l); 2230 2231 v = canonical_cselib_val (v); 2232 2233 had_locs = v->locs != NULL; 2234 setting_insn = v->locs ? v->locs->setting_insn : NULL; 2235 2236 /* Now, we clear the mapping from value to reg. It must exist, so 2237 this code will crash intentionally if it doesn't. */ 2238 for (p = &v->locs; ; p = &(*p)->next) 2239 { 2240 rtx x = (*p)->loc; 2241 2242 if (REG_P (x) && REGNO (x) == i) 2243 { 2244 unchain_one_elt_loc_list (p); 2245 break; 2246 } 2247 } 2248 2249 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx)) 2250 { 2251 if (setting_insn && DEBUG_INSN_P (setting_insn)) 2252 n_useless_debug_values++; 2253 else 2254 n_useless_values++; 2255 } 2256 } 2257 } 2258 } 2259 2260 /* Invalidate any locations in the table which are changed because of a 2261 store to MEM_RTX. If this is called because of a non-const call 2262 instruction, MEM_RTX is (mem:BLK const0_rtx). */ 2263 2264 static void 2265 cselib_invalidate_mem (rtx mem_rtx) 2266 { 2267 cselib_val **vp, *v, *next; 2268 int num_mems = 0; 2269 rtx mem_addr; 2270 2271 mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0))); 2272 mem_rtx = canon_rtx (mem_rtx); 2273 2274 vp = &first_containing_mem; 2275 for (v = *vp; v != &dummy_val; v = next) 2276 { 2277 bool has_mem = false; 2278 struct elt_loc_list **p = &v->locs; 2279 bool had_locs = v->locs != NULL; 2280 rtx setting_insn = v->locs ? v->locs->setting_insn : NULL; 2281 2282 while (*p) 2283 { 2284 rtx x = (*p)->loc; 2285 cselib_val *addr; 2286 struct elt_list **mem_chain; 2287 2288 /* MEMs may occur in locations only at the top level; below 2289 that every MEM or REG is substituted by its VALUE. */ 2290 if (!MEM_P (x)) 2291 { 2292 p = &(*p)->next; 2293 continue; 2294 } 2295 if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS) 2296 && ! canon_anti_dependence (x, false, mem_rtx, 2297 GET_MODE (mem_rtx), mem_addr)) 2298 { 2299 has_mem = true; 2300 num_mems++; 2301 p = &(*p)->next; 2302 continue; 2303 } 2304 2305 /* This one overlaps. */ 2306 /* We must have a mapping from this MEM's address to the 2307 value (E). Remove that, too. */ 2308 addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x)); 2309 addr = canonical_cselib_val (addr); 2310 gcc_checking_assert (v == canonical_cselib_val (v)); 2311 mem_chain = &addr->addr_list; 2312 for (;;) 2313 { 2314 cselib_val *canon = canonical_cselib_val ((*mem_chain)->elt); 2315 2316 if (canon == v) 2317 { 2318 unchain_one_elt_list (mem_chain); 2319 break; 2320 } 2321 2322 /* Record canonicalized elt. */ 2323 (*mem_chain)->elt = canon; 2324 2325 mem_chain = &(*mem_chain)->next; 2326 } 2327 2328 unchain_one_elt_loc_list (p); 2329 } 2330 2331 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx)) 2332 { 2333 if (setting_insn && DEBUG_INSN_P (setting_insn)) 2334 n_useless_debug_values++; 2335 else 2336 n_useless_values++; 2337 } 2338 2339 next = v->next_containing_mem; 2340 if (has_mem) 2341 { 2342 *vp = v; 2343 vp = &(*vp)->next_containing_mem; 2344 } 2345 else 2346 v->next_containing_mem = NULL; 2347 } 2348 *vp = &dummy_val; 2349 } 2350 2351 /* Invalidate DEST, which is being assigned to or clobbered. */ 2352 2353 void 2354 cselib_invalidate_rtx (rtx dest) 2355 { 2356 while (GET_CODE (dest) == SUBREG 2357 || GET_CODE (dest) == ZERO_EXTRACT 2358 || GET_CODE (dest) == STRICT_LOW_PART) 2359 dest = XEXP (dest, 0); 2360 2361 if (REG_P (dest)) 2362 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest)); 2363 else if (MEM_P (dest)) 2364 cselib_invalidate_mem (dest); 2365 } 2366 2367 /* A wrapper for cselib_invalidate_rtx to be called via note_stores. */ 2368 2369 static void 2370 cselib_invalidate_rtx_note_stores (rtx dest, const_rtx ignore ATTRIBUTE_UNUSED, 2371 void *data ATTRIBUTE_UNUSED) 2372 { 2373 cselib_invalidate_rtx (dest); 2374 } 2375 2376 /* Record the result of a SET instruction. DEST is being set; the source 2377 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT 2378 describes its address. */ 2379 2380 static void 2381 cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt) 2382 { 2383 int dreg = REG_P (dest) ? (int) REGNO (dest) : -1; 2384 2385 if (src_elt == 0 || side_effects_p (dest)) 2386 return; 2387 2388 if (dreg >= 0) 2389 { 2390 if (dreg < FIRST_PSEUDO_REGISTER) 2391 { 2392 unsigned int n = hard_regno_nregs[dreg][GET_MODE (dest)]; 2393 2394 if (n > max_value_regs) 2395 max_value_regs = n; 2396 } 2397 2398 if (REG_VALUES (dreg) == 0) 2399 { 2400 used_regs[n_used_regs++] = dreg; 2401 REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt); 2402 } 2403 else 2404 { 2405 /* The register should have been invalidated. */ 2406 gcc_assert (REG_VALUES (dreg)->elt == 0); 2407 REG_VALUES (dreg)->elt = src_elt; 2408 } 2409 2410 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx)) 2411 n_useless_values--; 2412 new_elt_loc_list (src_elt, dest); 2413 } 2414 else if (MEM_P (dest) && dest_addr_elt != 0 2415 && cselib_record_memory) 2416 { 2417 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx)) 2418 n_useless_values--; 2419 add_mem_for_addr (dest_addr_elt, src_elt, dest); 2420 } 2421 } 2422 2423 /* Make ELT and X's VALUE equivalent to each other at INSN. */ 2424 2425 void 2426 cselib_add_permanent_equiv (cselib_val *elt, rtx x, rtx_insn *insn) 2427 { 2428 cselib_val *nelt; 2429 rtx_insn *save_cselib_current_insn = cselib_current_insn; 2430 2431 gcc_checking_assert (elt); 2432 gcc_checking_assert (PRESERVED_VALUE_P (elt->val_rtx)); 2433 gcc_checking_assert (!side_effects_p (x)); 2434 2435 cselib_current_insn = insn; 2436 2437 nelt = cselib_lookup (x, GET_MODE (elt->val_rtx), 1, VOIDmode); 2438 2439 if (nelt != elt) 2440 { 2441 cselib_any_perm_equivs = true; 2442 2443 if (!PRESERVED_VALUE_P (nelt->val_rtx)) 2444 cselib_preserve_value (nelt); 2445 2446 new_elt_loc_list (nelt, elt->val_rtx); 2447 } 2448 2449 cselib_current_insn = save_cselib_current_insn; 2450 } 2451 2452 /* Return TRUE if any permanent equivalences have been recorded since 2453 the table was last initialized. */ 2454 bool 2455 cselib_have_permanent_equivalences (void) 2456 { 2457 return cselib_any_perm_equivs; 2458 } 2459 2460 /* There is no good way to determine how many elements there can be 2461 in a PARALLEL. Since it's fairly cheap, use a really large number. */ 2462 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2) 2463 2464 struct cselib_record_autoinc_data 2465 { 2466 struct cselib_set *sets; 2467 int n_sets; 2468 }; 2469 2470 /* Callback for for_each_inc_dec. Records in ARG the SETs implied by 2471 autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST. */ 2472 2473 static int 2474 cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED, 2475 rtx dest, rtx src, rtx srcoff, void *arg) 2476 { 2477 struct cselib_record_autoinc_data *data; 2478 data = (struct cselib_record_autoinc_data *)arg; 2479 2480 data->sets[data->n_sets].dest = dest; 2481 2482 if (srcoff) 2483 data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff); 2484 else 2485 data->sets[data->n_sets].src = src; 2486 2487 data->n_sets++; 2488 2489 return 0; 2490 } 2491 2492 /* Record the effects of any sets and autoincs in INSN. */ 2493 static void 2494 cselib_record_sets (rtx_insn *insn) 2495 { 2496 int n_sets = 0; 2497 int i; 2498 struct cselib_set sets[MAX_SETS]; 2499 rtx body = PATTERN (insn); 2500 rtx cond = 0; 2501 int n_sets_before_autoinc; 2502 struct cselib_record_autoinc_data data; 2503 2504 body = PATTERN (insn); 2505 if (GET_CODE (body) == COND_EXEC) 2506 { 2507 cond = COND_EXEC_TEST (body); 2508 body = COND_EXEC_CODE (body); 2509 } 2510 2511 /* Find all sets. */ 2512 if (GET_CODE (body) == SET) 2513 { 2514 sets[0].src = SET_SRC (body); 2515 sets[0].dest = SET_DEST (body); 2516 n_sets = 1; 2517 } 2518 else if (GET_CODE (body) == PARALLEL) 2519 { 2520 /* Look through the PARALLEL and record the values being 2521 set, if possible. Also handle any CLOBBERs. */ 2522 for (i = XVECLEN (body, 0) - 1; i >= 0; --i) 2523 { 2524 rtx x = XVECEXP (body, 0, i); 2525 2526 if (GET_CODE (x) == SET) 2527 { 2528 sets[n_sets].src = SET_SRC (x); 2529 sets[n_sets].dest = SET_DEST (x); 2530 n_sets++; 2531 } 2532 } 2533 } 2534 2535 if (n_sets == 1 2536 && MEM_P (sets[0].src) 2537 && !cselib_record_memory 2538 && MEM_READONLY_P (sets[0].src)) 2539 { 2540 rtx note = find_reg_equal_equiv_note (insn); 2541 2542 if (note && CONSTANT_P (XEXP (note, 0))) 2543 sets[0].src = XEXP (note, 0); 2544 } 2545 2546 data.sets = sets; 2547 data.n_sets = n_sets_before_autoinc = n_sets; 2548 for_each_inc_dec (PATTERN (insn), cselib_record_autoinc_cb, &data); 2549 n_sets = data.n_sets; 2550 2551 /* Look up the values that are read. Do this before invalidating the 2552 locations that are written. */ 2553 for (i = 0; i < n_sets; i++) 2554 { 2555 rtx dest = sets[i].dest; 2556 2557 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for 2558 the low part after invalidating any knowledge about larger modes. */ 2559 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART) 2560 sets[i].dest = dest = XEXP (dest, 0); 2561 2562 /* We don't know how to record anything but REG or MEM. */ 2563 if (REG_P (dest) 2564 || (MEM_P (dest) && cselib_record_memory)) 2565 { 2566 rtx src = sets[i].src; 2567 if (cond) 2568 src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest); 2569 sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode); 2570 if (MEM_P (dest)) 2571 { 2572 machine_mode address_mode = get_address_mode (dest); 2573 2574 sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0), 2575 address_mode, 1, 2576 GET_MODE (dest)); 2577 } 2578 else 2579 sets[i].dest_addr_elt = 0; 2580 } 2581 } 2582 2583 if (cselib_record_sets_hook) 2584 cselib_record_sets_hook (insn, sets, n_sets); 2585 2586 /* Invalidate all locations written by this insn. Note that the elts we 2587 looked up in the previous loop aren't affected, just some of their 2588 locations may go away. */ 2589 note_stores (body, cselib_invalidate_rtx_note_stores, NULL); 2590 2591 for (i = n_sets_before_autoinc; i < n_sets; i++) 2592 cselib_invalidate_rtx (sets[i].dest); 2593 2594 /* If this is an asm, look for duplicate sets. This can happen when the 2595 user uses the same value as an output multiple times. This is valid 2596 if the outputs are not actually used thereafter. Treat this case as 2597 if the value isn't actually set. We do this by smashing the destination 2598 to pc_rtx, so that we won't record the value later. */ 2599 if (n_sets >= 2 && asm_noperands (body) >= 0) 2600 { 2601 for (i = 0; i < n_sets; i++) 2602 { 2603 rtx dest = sets[i].dest; 2604 if (REG_P (dest) || MEM_P (dest)) 2605 { 2606 int j; 2607 for (j = i + 1; j < n_sets; j++) 2608 if (rtx_equal_p (dest, sets[j].dest)) 2609 { 2610 sets[i].dest = pc_rtx; 2611 sets[j].dest = pc_rtx; 2612 } 2613 } 2614 } 2615 } 2616 2617 /* Now enter the equivalences in our tables. */ 2618 for (i = 0; i < n_sets; i++) 2619 { 2620 rtx dest = sets[i].dest; 2621 if (REG_P (dest) 2622 || (MEM_P (dest) && cselib_record_memory)) 2623 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt); 2624 } 2625 } 2626 2627 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */ 2628 2629 bool 2630 fp_setter_insn (rtx insn) 2631 { 2632 rtx expr, pat = NULL_RTX; 2633 2634 if (!RTX_FRAME_RELATED_P (insn)) 2635 return false; 2636 2637 expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX); 2638 if (expr) 2639 pat = XEXP (expr, 0); 2640 if (!modified_in_p (hard_frame_pointer_rtx, pat ? pat : insn)) 2641 return false; 2642 2643 /* Don't return true for frame pointer restores in the epilogue. */ 2644 if (find_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx)) 2645 return false; 2646 return true; 2647 } 2648 2649 /* Record the effects of INSN. */ 2650 2651 void 2652 cselib_process_insn (rtx_insn *insn) 2653 { 2654 int i; 2655 rtx x; 2656 2657 cselib_current_insn = insn; 2658 2659 /* Forget everything at a CODE_LABEL or a setjmp. */ 2660 if ((LABEL_P (insn) 2661 || (CALL_P (insn) 2662 && find_reg_note (insn, REG_SETJMP, NULL))) 2663 && !cselib_preserve_constants) 2664 { 2665 cselib_reset_table (next_uid); 2666 cselib_current_insn = NULL; 2667 return; 2668 } 2669 2670 if (! INSN_P (insn)) 2671 { 2672 cselib_current_insn = NULL; 2673 return; 2674 } 2675 2676 /* If this is a call instruction, forget anything stored in a 2677 call clobbered register, or, if this is not a const call, in 2678 memory. */ 2679 if (CALL_P (insn)) 2680 { 2681 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 2682 if (call_used_regs[i] 2683 || (REG_VALUES (i) && REG_VALUES (i)->elt 2684 && HARD_REGNO_CALL_PART_CLOBBERED (i, 2685 GET_MODE (REG_VALUES (i)->elt->val_rtx)))) 2686 cselib_invalidate_regno (i, reg_raw_mode[i]); 2687 2688 /* Since it is not clear how cselib is going to be used, be 2689 conservative here and treat looping pure or const functions 2690 as if they were regular functions. */ 2691 if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn) 2692 || !(RTL_CONST_OR_PURE_CALL_P (insn))) 2693 cselib_invalidate_mem (callmem); 2694 } 2695 2696 cselib_record_sets (insn); 2697 2698 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only 2699 after we have processed the insn. */ 2700 if (CALL_P (insn)) 2701 { 2702 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1)) 2703 if (GET_CODE (XEXP (x, 0)) == CLOBBER) 2704 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0)); 2705 /* Flush evertything on setjmp. */ 2706 if (cselib_preserve_constants 2707 && find_reg_note (insn, REG_SETJMP, NULL)) 2708 { 2709 cselib_preserve_only_values (); 2710 cselib_reset_table (next_uid); 2711 } 2712 } 2713 2714 /* On setter of the hard frame pointer if frame_pointer_needed, 2715 invalidate stack_pointer_rtx, so that sp and {,h}fp based 2716 VALUEs are distinct. */ 2717 if (reload_completed 2718 && frame_pointer_needed 2719 && fp_setter_insn (insn)) 2720 cselib_invalidate_rtx (stack_pointer_rtx); 2721 2722 cselib_current_insn = NULL; 2723 2724 if (n_useless_values > MAX_USELESS_VALUES 2725 /* remove_useless_values is linear in the hash table size. Avoid 2726 quadratic behavior for very large hashtables with very few 2727 useless elements. */ 2728 && ((unsigned int)n_useless_values 2729 > (cselib_hash_table->elements () - n_debug_values) / 4)) 2730 remove_useless_values (); 2731 } 2732 2733 /* Initialize cselib for one pass. The caller must also call 2734 init_alias_analysis. */ 2735 2736 void 2737 cselib_init (int record_what) 2738 { 2739 elt_list_pool = create_alloc_pool ("elt_list", 2740 sizeof (struct elt_list), 10); 2741 elt_loc_list_pool = create_alloc_pool ("elt_loc_list", 2742 sizeof (struct elt_loc_list), 10); 2743 cselib_val_pool = create_alloc_pool ("cselib_val_list", 2744 sizeof (cselib_val), 10); 2745 value_pool = create_alloc_pool ("value", RTX_CODE_SIZE (VALUE), 100); 2746 cselib_record_memory = record_what & CSELIB_RECORD_MEMORY; 2747 cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS; 2748 cselib_any_perm_equivs = false; 2749 2750 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything, 2751 see canon_true_dependence. This is only created once. */ 2752 if (! callmem) 2753 callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)); 2754 2755 cselib_nregs = max_reg_num (); 2756 2757 /* We preserve reg_values to allow expensive clearing of the whole thing. 2758 Reallocate it however if it happens to be too large. */ 2759 if (!reg_values || reg_values_size < cselib_nregs 2760 || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4)) 2761 { 2762 free (reg_values); 2763 /* Some space for newly emit instructions so we don't end up 2764 reallocating in between passes. */ 2765 reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16; 2766 reg_values = XCNEWVEC (struct elt_list *, reg_values_size); 2767 } 2768 used_regs = XNEWVEC (unsigned int, cselib_nregs); 2769 n_used_regs = 0; 2770 cselib_hash_table = new hash_table<cselib_hasher> (31); 2771 if (cselib_preserve_constants) 2772 cselib_preserved_hash_table = new hash_table<cselib_hasher> (31); 2773 next_uid = 1; 2774 } 2775 2776 /* Called when the current user is done with cselib. */ 2777 2778 void 2779 cselib_finish (void) 2780 { 2781 bool preserved = cselib_preserve_constants; 2782 cselib_discard_hook = NULL; 2783 cselib_preserve_constants = false; 2784 cselib_any_perm_equivs = false; 2785 cfa_base_preserved_val = NULL; 2786 cfa_base_preserved_regno = INVALID_REGNUM; 2787 free_alloc_pool (elt_list_pool); 2788 free_alloc_pool (elt_loc_list_pool); 2789 free_alloc_pool (cselib_val_pool); 2790 free_alloc_pool (value_pool); 2791 cselib_clear_table (); 2792 delete cselib_hash_table; 2793 cselib_hash_table = NULL; 2794 if (preserved) 2795 delete cselib_preserved_hash_table; 2796 cselib_preserved_hash_table = NULL; 2797 free (used_regs); 2798 used_regs = 0; 2799 n_useless_values = 0; 2800 n_useless_debug_values = 0; 2801 n_debug_values = 0; 2802 next_uid = 0; 2803 } 2804 2805 /* Dump the cselib_val *X to FILE *OUT. */ 2806 2807 int 2808 dump_cselib_val (cselib_val **x, FILE *out) 2809 { 2810 cselib_val *v = *x; 2811 bool need_lf = true; 2812 2813 print_inline_rtx (out, v->val_rtx, 0); 2814 2815 if (v->locs) 2816 { 2817 struct elt_loc_list *l = v->locs; 2818 if (need_lf) 2819 { 2820 fputc ('\n', out); 2821 need_lf = false; 2822 } 2823 fputs (" locs:", out); 2824 do 2825 { 2826 if (l->setting_insn) 2827 fprintf (out, "\n from insn %i ", 2828 INSN_UID (l->setting_insn)); 2829 else 2830 fprintf (out, "\n "); 2831 print_inline_rtx (out, l->loc, 4); 2832 } 2833 while ((l = l->next)); 2834 fputc ('\n', out); 2835 } 2836 else 2837 { 2838 fputs (" no locs", out); 2839 need_lf = true; 2840 } 2841 2842 if (v->addr_list) 2843 { 2844 struct elt_list *e = v->addr_list; 2845 if (need_lf) 2846 { 2847 fputc ('\n', out); 2848 need_lf = false; 2849 } 2850 fputs (" addr list:", out); 2851 do 2852 { 2853 fputs ("\n ", out); 2854 print_inline_rtx (out, e->elt->val_rtx, 2); 2855 } 2856 while ((e = e->next)); 2857 fputc ('\n', out); 2858 } 2859 else 2860 { 2861 fputs (" no addrs", out); 2862 need_lf = true; 2863 } 2864 2865 if (v->next_containing_mem == &dummy_val) 2866 fputs (" last mem\n", out); 2867 else if (v->next_containing_mem) 2868 { 2869 fputs (" next mem ", out); 2870 print_inline_rtx (out, v->next_containing_mem->val_rtx, 2); 2871 fputc ('\n', out); 2872 } 2873 else if (need_lf) 2874 fputc ('\n', out); 2875 2876 return 1; 2877 } 2878 2879 /* Dump to OUT everything in the CSELIB table. */ 2880 2881 void 2882 dump_cselib_table (FILE *out) 2883 { 2884 fprintf (out, "cselib hash table:\n"); 2885 cselib_hash_table->traverse <FILE *, dump_cselib_val> (out); 2886 fprintf (out, "cselib preserved hash table:\n"); 2887 cselib_preserved_hash_table->traverse <FILE *, dump_cselib_val> (out); 2888 if (first_containing_mem != &dummy_val) 2889 { 2890 fputs ("first mem ", out); 2891 print_inline_rtx (out, first_containing_mem->val_rtx, 2); 2892 fputc ('\n', out); 2893 } 2894 fprintf (out, "next uid %i\n", next_uid); 2895 } 2896 2897 #include "gt-cselib.h" 2898