xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cselib.c (revision d90047b5d07facf36e6c01dcc0bded8997ce9cc2)
1 /* Common subexpression elimination library for GNU compiler.
2    Copyright (C) 1987-2017 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "df.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "regs.h"
31 #include "emit-rtl.h"
32 #include "dumpfile.h"
33 #include "cselib.h"
34 #include "params.h"
35 
36 /* A list of cselib_val structures.  */
37 struct elt_list
38 {
39   struct elt_list *next;
40   cselib_val *elt;
41 };
42 
43 static bool cselib_record_memory;
44 static bool cselib_preserve_constants;
45 static bool cselib_any_perm_equivs;
46 static inline void promote_debug_loc (struct elt_loc_list *l);
47 static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
48 static void new_elt_loc_list (cselib_val *, rtx);
49 static void unchain_one_value (cselib_val *);
50 static void unchain_one_elt_list (struct elt_list **);
51 static void unchain_one_elt_loc_list (struct elt_loc_list **);
52 static void remove_useless_values (void);
53 static unsigned int cselib_hash_rtx (rtx, int, machine_mode);
54 static cselib_val *new_cselib_val (unsigned int, machine_mode, rtx);
55 static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
56 static cselib_val *cselib_lookup_mem (rtx, int);
57 static void cselib_invalidate_regno (unsigned int, machine_mode);
58 static void cselib_invalidate_mem (rtx);
59 static void cselib_record_set (rtx, cselib_val *, cselib_val *);
60 static void cselib_record_sets (rtx_insn *);
61 
62 struct expand_value_data
63 {
64   bitmap regs_active;
65   cselib_expand_callback callback;
66   void *callback_arg;
67   bool dummy;
68 };
69 
70 static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
71 
72 /* There are three ways in which cselib can look up an rtx:
73    - for a REG, the reg_values table (which is indexed by regno) is used
74    - for a MEM, we recursively look up its address and then follow the
75      addr_list of that value
76    - for everything else, we compute a hash value and go through the hash
77      table.  Since different rtx's can still have the same hash value,
78      this involves walking the table entries for a given value and comparing
79      the locations of the entries with the rtx we are looking up.  */
80 
81 struct cselib_hasher : nofree_ptr_hash <cselib_val>
82 {
83   struct key {
84     /* The rtx value and its mode (needed separately for constant
85        integers).  */
86     machine_mode mode;
87     rtx x;
88     /* The mode of the contaning MEM, if any, otherwise VOIDmode.  */
89     machine_mode memmode;
90   };
91   typedef key *compare_type;
92   static inline hashval_t hash (const cselib_val *);
93   static inline bool equal (const cselib_val *, const key *);
94 };
95 
96 /* The hash function for our hash table.  The value is always computed with
97    cselib_hash_rtx when adding an element; this function just extracts the
98    hash value from a cselib_val structure.  */
99 
100 inline hashval_t
101 cselib_hasher::hash (const cselib_val *v)
102 {
103   return v->hash;
104 }
105 
106 /* The equality test for our hash table.  The first argument V is a table
107    element (i.e. a cselib_val), while the second arg X is an rtx.  We know
108    that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
109    CONST of an appropriate mode.  */
110 
111 inline bool
112 cselib_hasher::equal (const cselib_val *v, const key *x_arg)
113 {
114   struct elt_loc_list *l;
115   rtx x = x_arg->x;
116   machine_mode mode = x_arg->mode;
117   machine_mode memmode = x_arg->memmode;
118 
119   if (mode != GET_MODE (v->val_rtx))
120     return false;
121 
122   if (GET_CODE (x) == VALUE)
123     return x == v->val_rtx;
124 
125   /* We don't guarantee that distinct rtx's have different hash values,
126      so we need to do a comparison.  */
127   for (l = v->locs; l; l = l->next)
128     if (rtx_equal_for_cselib_1 (l->loc, x, memmode, 0))
129       {
130 	promote_debug_loc (l);
131 	return true;
132       }
133 
134   return false;
135 }
136 
137 /* A table that enables us to look up elts by their value.  */
138 static hash_table<cselib_hasher> *cselib_hash_table;
139 
140 /* A table to hold preserved values.  */
141 static hash_table<cselib_hasher> *cselib_preserved_hash_table;
142 
143 /* This is a global so we don't have to pass this through every function.
144    It is used in new_elt_loc_list to set SETTING_INSN.  */
145 static rtx_insn *cselib_current_insn;
146 
147 /* The unique id that the next create value will take.  */
148 static unsigned int next_uid;
149 
150 /* The number of registers we had when the varrays were last resized.  */
151 static unsigned int cselib_nregs;
152 
153 /* Count values without known locations, or with only locations that
154    wouldn't have been known except for debug insns.  Whenever this
155    grows too big, we remove these useless values from the table.
156 
157    Counting values with only debug values is a bit tricky.  We don't
158    want to increment n_useless_values when we create a value for a
159    debug insn, for this would get n_useless_values out of sync, but we
160    want increment it if all locs in the list that were ever referenced
161    in nondebug insns are removed from the list.
162 
163    In the general case, once we do that, we'd have to stop accepting
164    nondebug expressions in the loc list, to avoid having two values
165    equivalent that, without debug insns, would have been made into
166    separate values.  However, because debug insns never introduce
167    equivalences themselves (no assignments), the only means for
168    growing loc lists is through nondebug assignments.  If the locs
169    also happen to be referenced in debug insns, it will work just fine.
170 
171    A consequence of this is that there's at most one debug-only loc in
172    each loc list.  If we keep it in the first entry, testing whether
173    we have a debug-only loc list takes O(1).
174 
175    Furthermore, since any additional entry in a loc list containing a
176    debug loc would have to come from an assignment (nondebug) that
177    references both the initial debug loc and the newly-equivalent loc,
178    the initial debug loc would be promoted to a nondebug loc, and the
179    loc list would not contain debug locs any more.
180 
181    So the only case we have to be careful with in order to keep
182    n_useless_values in sync between debug and nondebug compilations is
183    to avoid incrementing n_useless_values when removing the single loc
184    from a value that turns out to not appear outside debug values.  We
185    increment n_useless_debug_values instead, and leave such values
186    alone until, for other reasons, we garbage-collect useless
187    values.  */
188 static int n_useless_values;
189 static int n_useless_debug_values;
190 
191 /* Count values whose locs have been taken exclusively from debug
192    insns for the entire life of the value.  */
193 static int n_debug_values;
194 
195 /* Number of useless values before we remove them from the hash table.  */
196 #define MAX_USELESS_VALUES 32
197 
198 /* This table maps from register number to values.  It does not
199    contain pointers to cselib_val structures, but rather elt_lists.
200    The purpose is to be able to refer to the same register in
201    different modes.  The first element of the list defines the mode in
202    which the register was set; if the mode is unknown or the value is
203    no longer valid in that mode, ELT will be NULL for the first
204    element.  */
205 static struct elt_list **reg_values;
206 static unsigned int reg_values_size;
207 #define REG_VALUES(i) reg_values[i]
208 
209 /* The largest number of hard regs used by any entry added to the
210    REG_VALUES table.  Cleared on each cselib_clear_table() invocation.  */
211 static unsigned int max_value_regs;
212 
213 /* Here the set of indices I with REG_VALUES(I) != 0 is saved.  This is used
214    in cselib_clear_table() for fast emptying.  */
215 static unsigned int *used_regs;
216 static unsigned int n_used_regs;
217 
218 /* We pass this to cselib_invalidate_mem to invalidate all of
219    memory for a non-const call instruction.  */
220 static GTY(()) rtx callmem;
221 
222 /* Set by discard_useless_locs if it deleted the last location of any
223    value.  */
224 static int values_became_useless;
225 
226 /* Used as stop element of the containing_mem list so we can check
227    presence in the list by checking the next pointer.  */
228 static cselib_val dummy_val;
229 
230 /* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
231    that is constant through the whole function and should never be
232    eliminated.  */
233 static cselib_val *cfa_base_preserved_val;
234 static unsigned int cfa_base_preserved_regno = INVALID_REGNUM;
235 
236 /* Used to list all values that contain memory reference.
237    May or may not contain the useless values - the list is compacted
238    each time memory is invalidated.  */
239 static cselib_val *first_containing_mem = &dummy_val;
240 
241 static object_allocator<elt_list> elt_list_pool ("elt_list");
242 static object_allocator<elt_loc_list> elt_loc_list_pool ("elt_loc_list");
243 static object_allocator<cselib_val> cselib_val_pool ("cselib_val_list");
244 
245 static pool_allocator value_pool ("value", RTX_CODE_SIZE (VALUE));
246 
247 /* If nonnull, cselib will call this function before freeing useless
248    VALUEs.  A VALUE is deemed useless if its "locs" field is null.  */
249 void (*cselib_discard_hook) (cselib_val *);
250 
251 /* If nonnull, cselib will call this function before recording sets or
252    even clobbering outputs of INSN.  All the recorded sets will be
253    represented in the array sets[n_sets].  new_val_min can be used to
254    tell whether values present in sets are introduced by this
255    instruction.  */
256 void (*cselib_record_sets_hook) (rtx_insn *insn, struct cselib_set *sets,
257 				 int n_sets);
258 
259 #define PRESERVED_VALUE_P(RTX) \
260   (RTL_FLAG_CHECK1 ("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
261 
262 #define SP_BASED_VALUE_P(RTX) \
263   (RTL_FLAG_CHECK1 ("SP_BASED_VALUE_P", (RTX), VALUE)->jump)
264 
265 
266 
267 /* Allocate a struct elt_list and fill in its two elements with the
268    arguments.  */
269 
270 static inline struct elt_list *
271 new_elt_list (struct elt_list *next, cselib_val *elt)
272 {
273   elt_list *el = elt_list_pool.allocate ();
274   el->next = next;
275   el->elt = elt;
276   return el;
277 }
278 
279 /* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc
280    list.  */
281 
282 static inline void
283 new_elt_loc_list (cselib_val *val, rtx loc)
284 {
285   struct elt_loc_list *el, *next = val->locs;
286 
287   gcc_checking_assert (!next || !next->setting_insn
288 		       || !DEBUG_INSN_P (next->setting_insn)
289 		       || cselib_current_insn == next->setting_insn);
290 
291   /* If we're creating the first loc in a debug insn context, we've
292      just created a debug value.  Count it.  */
293   if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
294     n_debug_values++;
295 
296   val = canonical_cselib_val (val);
297   next = val->locs;
298 
299   if (GET_CODE (loc) == VALUE)
300     {
301       loc = canonical_cselib_val (CSELIB_VAL_PTR (loc))->val_rtx;
302 
303       gcc_checking_assert (PRESERVED_VALUE_P (loc)
304 			   == PRESERVED_VALUE_P (val->val_rtx));
305 
306       if (val->val_rtx == loc)
307 	return;
308       else if (val->uid > CSELIB_VAL_PTR (loc)->uid)
309 	{
310 	  /* Reverse the insertion.  */
311 	  new_elt_loc_list (CSELIB_VAL_PTR (loc), val->val_rtx);
312 	  return;
313 	}
314 
315       gcc_checking_assert (val->uid < CSELIB_VAL_PTR (loc)->uid);
316 
317       if (CSELIB_VAL_PTR (loc)->locs)
318 	{
319 	  /* Bring all locs from LOC to VAL.  */
320 	  for (el = CSELIB_VAL_PTR (loc)->locs; el->next; el = el->next)
321 	    {
322 	      /* Adjust values that have LOC as canonical so that VAL
323 		 becomes their canonical.  */
324 	      if (el->loc && GET_CODE (el->loc) == VALUE)
325 		{
326 		  gcc_checking_assert (CSELIB_VAL_PTR (el->loc)->locs->loc
327 				       == loc);
328 		  CSELIB_VAL_PTR (el->loc)->locs->loc = val->val_rtx;
329 		}
330 	    }
331 	  el->next = val->locs;
332 	  next = val->locs = CSELIB_VAL_PTR (loc)->locs;
333 	}
334 
335       if (CSELIB_VAL_PTR (loc)->addr_list)
336 	{
337 	  /* Bring in addr_list into canonical node.  */
338 	  struct elt_list *last = CSELIB_VAL_PTR (loc)->addr_list;
339 	  while (last->next)
340 	    last = last->next;
341 	  last->next = val->addr_list;
342 	  val->addr_list = CSELIB_VAL_PTR (loc)->addr_list;
343 	  CSELIB_VAL_PTR (loc)->addr_list = NULL;
344 	}
345 
346       if (CSELIB_VAL_PTR (loc)->next_containing_mem != NULL
347 	  && val->next_containing_mem == NULL)
348 	{
349 	  /* Add VAL to the containing_mem list after LOC.  LOC will
350 	     be removed when we notice it doesn't contain any
351 	     MEMs.  */
352 	  val->next_containing_mem = CSELIB_VAL_PTR (loc)->next_containing_mem;
353 	  CSELIB_VAL_PTR (loc)->next_containing_mem = val;
354 	}
355 
356       /* Chain LOC back to VAL.  */
357       el = elt_loc_list_pool.allocate ();
358       el->loc = val->val_rtx;
359       el->setting_insn = cselib_current_insn;
360       el->next = NULL;
361       CSELIB_VAL_PTR (loc)->locs = el;
362     }
363 
364   el = elt_loc_list_pool.allocate ();
365   el->loc = loc;
366   el->setting_insn = cselib_current_insn;
367   el->next = next;
368   val->locs = el;
369 }
370 
371 /* Promote loc L to a nondebug cselib_current_insn if L is marked as
372    originating from a debug insn, maintaining the debug values
373    count.  */
374 
375 static inline void
376 promote_debug_loc (struct elt_loc_list *l)
377 {
378   if (l && l->setting_insn && DEBUG_INSN_P (l->setting_insn)
379       && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
380     {
381       n_debug_values--;
382       l->setting_insn = cselib_current_insn;
383       if (cselib_preserve_constants && l->next)
384 	{
385 	  gcc_assert (l->next->setting_insn
386 		      && DEBUG_INSN_P (l->next->setting_insn)
387 		      && !l->next->next);
388 	  l->next->setting_insn = cselib_current_insn;
389 	}
390       else
391 	gcc_assert (!l->next);
392     }
393 }
394 
395 /* The elt_list at *PL is no longer needed.  Unchain it and free its
396    storage.  */
397 
398 static inline void
399 unchain_one_elt_list (struct elt_list **pl)
400 {
401   struct elt_list *l = *pl;
402 
403   *pl = l->next;
404   elt_list_pool.remove (l);
405 }
406 
407 /* Likewise for elt_loc_lists.  */
408 
409 static void
410 unchain_one_elt_loc_list (struct elt_loc_list **pl)
411 {
412   struct elt_loc_list *l = *pl;
413 
414   *pl = l->next;
415   elt_loc_list_pool.remove (l);
416 }
417 
418 /* Likewise for cselib_vals.  This also frees the addr_list associated with
419    V.  */
420 
421 static void
422 unchain_one_value (cselib_val *v)
423 {
424   while (v->addr_list)
425     unchain_one_elt_list (&v->addr_list);
426 
427   cselib_val_pool.remove (v);
428 }
429 
430 /* Remove all entries from the hash table.  Also used during
431    initialization.  */
432 
433 void
434 cselib_clear_table (void)
435 {
436   cselib_reset_table (1);
437 }
438 
439 /* Return TRUE if V is a constant, a function invariant or a VALUE
440    equivalence; FALSE otherwise.  */
441 
442 static bool
443 invariant_or_equiv_p (cselib_val *v)
444 {
445   struct elt_loc_list *l;
446 
447   if (v == cfa_base_preserved_val)
448     return true;
449 
450   /* Keep VALUE equivalences around.  */
451   for (l = v->locs; l; l = l->next)
452     if (GET_CODE (l->loc) == VALUE)
453       return true;
454 
455   if (v->locs != NULL
456       && v->locs->next == NULL)
457     {
458       if (CONSTANT_P (v->locs->loc)
459 	  && (GET_CODE (v->locs->loc) != CONST
460 	      || !references_value_p (v->locs->loc, 0)))
461 	return true;
462       /* Although a debug expr may be bound to different expressions,
463 	 we can preserve it as if it was constant, to get unification
464 	 and proper merging within var-tracking.  */
465       if (GET_CODE (v->locs->loc) == DEBUG_EXPR
466 	  || GET_CODE (v->locs->loc) == DEBUG_IMPLICIT_PTR
467 	  || GET_CODE (v->locs->loc) == ENTRY_VALUE
468 	  || GET_CODE (v->locs->loc) == DEBUG_PARAMETER_REF)
469 	return true;
470 
471       /* (plus (value V) (const_int C)) is invariant iff V is invariant.  */
472       if (GET_CODE (v->locs->loc) == PLUS
473 	  && CONST_INT_P (XEXP (v->locs->loc, 1))
474 	  && GET_CODE (XEXP (v->locs->loc, 0)) == VALUE
475 	  && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v->locs->loc, 0))))
476 	return true;
477     }
478 
479   return false;
480 }
481 
482 /* Remove from hash table all VALUEs except constants, function
483    invariants and VALUE equivalences.  */
484 
485 int
486 preserve_constants_and_equivs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
487 {
488   cselib_val *v = *x;
489 
490   if (invariant_or_equiv_p (v))
491     {
492       cselib_hasher::key lookup = {
493 	GET_MODE (v->val_rtx), v->val_rtx, VOIDmode
494       };
495       cselib_val **slot
496 	= cselib_preserved_hash_table->find_slot_with_hash (&lookup,
497 							   v->hash, INSERT);
498       gcc_assert (!*slot);
499       *slot = v;
500     }
501 
502   cselib_hash_table->clear_slot (x);
503 
504   return 1;
505 }
506 
507 /* Remove all entries from the hash table, arranging for the next
508    value to be numbered NUM.  */
509 
510 void
511 cselib_reset_table (unsigned int num)
512 {
513   unsigned int i;
514 
515   max_value_regs = 0;
516 
517   if (cfa_base_preserved_val)
518     {
519       unsigned int regno = cfa_base_preserved_regno;
520       unsigned int new_used_regs = 0;
521       for (i = 0; i < n_used_regs; i++)
522 	if (used_regs[i] == regno)
523 	  {
524 	    new_used_regs = 1;
525 	    continue;
526 	  }
527 	else
528 	  REG_VALUES (used_regs[i]) = 0;
529       gcc_assert (new_used_regs == 1);
530       n_used_regs = new_used_regs;
531       used_regs[0] = regno;
532       max_value_regs
533 	= hard_regno_nregs[regno][GET_MODE (cfa_base_preserved_val->locs->loc)];
534     }
535   else
536     {
537       for (i = 0; i < n_used_regs; i++)
538 	REG_VALUES (used_regs[i]) = 0;
539       n_used_regs = 0;
540     }
541 
542   if (cselib_preserve_constants)
543     cselib_hash_table->traverse <void *, preserve_constants_and_equivs>
544       (NULL);
545   else
546     {
547       cselib_hash_table->empty ();
548       gcc_checking_assert (!cselib_any_perm_equivs);
549     }
550 
551   n_useless_values = 0;
552   n_useless_debug_values = 0;
553   n_debug_values = 0;
554 
555   next_uid = num;
556 
557   first_containing_mem = &dummy_val;
558 }
559 
560 /* Return the number of the next value that will be generated.  */
561 
562 unsigned int
563 cselib_get_next_uid (void)
564 {
565   return next_uid;
566 }
567 
568 /* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
569    INSERTing if requested.  When X is part of the address of a MEM,
570    MEMMODE should specify the mode of the MEM.  */
571 
572 static cselib_val **
573 cselib_find_slot (machine_mode mode, rtx x, hashval_t hash,
574 		  enum insert_option insert, machine_mode memmode)
575 {
576   cselib_val **slot = NULL;
577   cselib_hasher::key lookup = { mode, x, memmode };
578   if (cselib_preserve_constants)
579     slot = cselib_preserved_hash_table->find_slot_with_hash (&lookup, hash,
580 							     NO_INSERT);
581   if (!slot)
582     slot = cselib_hash_table->find_slot_with_hash (&lookup, hash, insert);
583   return slot;
584 }
585 
586 /* Return true if X contains a VALUE rtx.  If ONLY_USELESS is set, we
587    only return true for values which point to a cselib_val whose value
588    element has been set to zero, which implies the cselib_val will be
589    removed.  */
590 
591 int
592 references_value_p (const_rtx x, int only_useless)
593 {
594   const enum rtx_code code = GET_CODE (x);
595   const char *fmt = GET_RTX_FORMAT (code);
596   int i, j;
597 
598   if (GET_CODE (x) == VALUE
599       && (! only_useless ||
600 	  (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x))))
601     return 1;
602 
603   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
604     {
605       if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
606 	return 1;
607       else if (fmt[i] == 'E')
608 	for (j = 0; j < XVECLEN (x, i); j++)
609 	  if (references_value_p (XVECEXP (x, i, j), only_useless))
610 	    return 1;
611     }
612 
613   return 0;
614 }
615 
616 /* For all locations found in X, delete locations that reference useless
617    values (i.e. values without any location).  Called through
618    htab_traverse.  */
619 
620 int
621 discard_useless_locs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
622 {
623   cselib_val *v = *x;
624   struct elt_loc_list **p = &v->locs;
625   bool had_locs = v->locs != NULL;
626   rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
627 
628   while (*p)
629     {
630       if (references_value_p ((*p)->loc, 1))
631 	unchain_one_elt_loc_list (p);
632       else
633 	p = &(*p)->next;
634     }
635 
636   if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
637     {
638       if (setting_insn && DEBUG_INSN_P (setting_insn))
639 	n_useless_debug_values++;
640       else
641 	n_useless_values++;
642       values_became_useless = 1;
643     }
644   return 1;
645 }
646 
647 /* If X is a value with no locations, remove it from the hashtable.  */
648 
649 int
650 discard_useless_values (cselib_val **x, void *info ATTRIBUTE_UNUSED)
651 {
652   cselib_val *v = *x;
653 
654   if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
655     {
656       if (cselib_discard_hook)
657 	cselib_discard_hook (v);
658 
659       CSELIB_VAL_PTR (v->val_rtx) = NULL;
660       cselib_hash_table->clear_slot (x);
661       unchain_one_value (v);
662       n_useless_values--;
663     }
664 
665   return 1;
666 }
667 
668 /* Clean out useless values (i.e. those which no longer have locations
669    associated with them) from the hash table.  */
670 
671 static void
672 remove_useless_values (void)
673 {
674   cselib_val **p, *v;
675 
676   /* First pass: eliminate locations that reference the value.  That in
677      turn can make more values useless.  */
678   do
679     {
680       values_became_useless = 0;
681       cselib_hash_table->traverse <void *, discard_useless_locs> (NULL);
682     }
683   while (values_became_useless);
684 
685   /* Second pass: actually remove the values.  */
686 
687   p = &first_containing_mem;
688   for (v = *p; v != &dummy_val; v = v->next_containing_mem)
689     if (v->locs && v == canonical_cselib_val (v))
690       {
691 	*p = v;
692 	p = &(*p)->next_containing_mem;
693       }
694   *p = &dummy_val;
695 
696   n_useless_values += n_useless_debug_values;
697   n_debug_values -= n_useless_debug_values;
698   n_useless_debug_values = 0;
699 
700   cselib_hash_table->traverse <void *, discard_useless_values> (NULL);
701 
702   gcc_assert (!n_useless_values);
703 }
704 
705 /* Arrange for a value to not be removed from the hash table even if
706    it becomes useless.  */
707 
708 void
709 cselib_preserve_value (cselib_val *v)
710 {
711   PRESERVED_VALUE_P (v->val_rtx) = 1;
712 }
713 
714 /* Test whether a value is preserved.  */
715 
716 bool
717 cselib_preserved_value_p (cselib_val *v)
718 {
719   return PRESERVED_VALUE_P (v->val_rtx);
720 }
721 
722 /* Arrange for a REG value to be assumed constant through the whole function,
723    never invalidated and preserved across cselib_reset_table calls.  */
724 
725 void
726 cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno)
727 {
728   if (cselib_preserve_constants
729       && v->locs
730       && REG_P (v->locs->loc))
731     {
732       cfa_base_preserved_val = v;
733       cfa_base_preserved_regno = regno;
734     }
735 }
736 
737 /* Clean all non-constant expressions in the hash table, but retain
738    their values.  */
739 
740 void
741 cselib_preserve_only_values (void)
742 {
743   int i;
744 
745   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
746     cselib_invalidate_regno (i, reg_raw_mode[i]);
747 
748   cselib_invalidate_mem (callmem);
749 
750   remove_useless_values ();
751 
752   gcc_assert (first_containing_mem == &dummy_val);
753 }
754 
755 /* Arrange for a value to be marked as based on stack pointer
756    for find_base_term purposes.  */
757 
758 void
759 cselib_set_value_sp_based (cselib_val *v)
760 {
761   SP_BASED_VALUE_P (v->val_rtx) = 1;
762 }
763 
764 /* Test whether a value is based on stack pointer for
765    find_base_term purposes.  */
766 
767 bool
768 cselib_sp_based_value_p (cselib_val *v)
769 {
770   return SP_BASED_VALUE_P (v->val_rtx);
771 }
772 
773 /* Return the mode in which a register was last set.  If X is not a
774    register, return its mode.  If the mode in which the register was
775    set is not known, or the value was already clobbered, return
776    VOIDmode.  */
777 
778 machine_mode
779 cselib_reg_set_mode (const_rtx x)
780 {
781   if (!REG_P (x))
782     return GET_MODE (x);
783 
784   if (REG_VALUES (REGNO (x)) == NULL
785       || REG_VALUES (REGNO (x))->elt == NULL)
786     return VOIDmode;
787 
788   return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
789 }
790 
791 /* If x is a PLUS or an autoinc operation, expand the operation,
792    storing the offset, if any, in *OFF.  */
793 
794 static rtx
795 autoinc_split (rtx x, rtx *off, machine_mode memmode)
796 {
797   switch (GET_CODE (x))
798     {
799     case PLUS:
800       *off = XEXP (x, 1);
801       return XEXP (x, 0);
802 
803     case PRE_DEC:
804       if (memmode == VOIDmode)
805 	return x;
806 
807       *off = GEN_INT (-GET_MODE_SIZE (memmode));
808       return XEXP (x, 0);
809 
810     case PRE_INC:
811       if (memmode == VOIDmode)
812 	return x;
813 
814       *off = GEN_INT (GET_MODE_SIZE (memmode));
815       return XEXP (x, 0);
816 
817     case PRE_MODIFY:
818       return XEXP (x, 1);
819 
820     case POST_DEC:
821     case POST_INC:
822     case POST_MODIFY:
823       return XEXP (x, 0);
824 
825     default:
826       return x;
827     }
828 }
829 
830 /* Return nonzero if we can prove that X and Y contain the same value,
831    taking our gathered information into account.  MEMMODE holds the
832    mode of the enclosing MEM, if any, as required to deal with autoinc
833    addressing modes.  If X and Y are not (known to be) part of
834    addresses, MEMMODE should be VOIDmode.  */
835 
836 int
837 rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth)
838 {
839   enum rtx_code code;
840   const char *fmt;
841   int i;
842 
843   if (REG_P (x) || MEM_P (x))
844     {
845       cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode);
846 
847       if (e)
848 	x = e->val_rtx;
849     }
850 
851   if (REG_P (y) || MEM_P (y))
852     {
853       cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode);
854 
855       if (e)
856 	y = e->val_rtx;
857     }
858 
859   if (x == y)
860     return 1;
861 
862   if (GET_CODE (x) == VALUE)
863     {
864       cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (x));
865       struct elt_loc_list *l;
866 
867       if (GET_CODE (y) == VALUE)
868 	return e == canonical_cselib_val (CSELIB_VAL_PTR (y));
869 
870       if (depth == 128)
871 	return 0;
872 
873       for (l = e->locs; l; l = l->next)
874 	{
875 	  rtx t = l->loc;
876 
877 	  /* Avoid infinite recursion.  We know we have the canonical
878 	     value, so we can just skip any values in the equivalence
879 	     list.  */
880 	  if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
881 	    continue;
882 	  else if (rtx_equal_for_cselib_1 (t, y, memmode, depth + 1))
883 	    return 1;
884 	}
885 
886       return 0;
887     }
888   else if (GET_CODE (y) == VALUE)
889     {
890       cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y));
891       struct elt_loc_list *l;
892 
893       if (depth == 128)
894 	return 0;
895 
896       for (l = e->locs; l; l = l->next)
897 	{
898 	  rtx t = l->loc;
899 
900 	  if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
901 	    continue;
902 	  else if (rtx_equal_for_cselib_1 (x, t, memmode, depth + 1))
903 	    return 1;
904 	}
905 
906       return 0;
907     }
908 
909   if (GET_MODE (x) != GET_MODE (y))
910     return 0;
911 
912   if (GET_CODE (x) != GET_CODE (y))
913     {
914       rtx xorig = x, yorig = y;
915       rtx xoff = NULL, yoff = NULL;
916 
917       x = autoinc_split (x, &xoff, memmode);
918       y = autoinc_split (y, &yoff, memmode);
919 
920       if (!xoff != !yoff)
921 	return 0;
922 
923       if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode, depth))
924 	return 0;
925 
926       /* Don't recurse if nothing changed.  */
927       if (x != xorig || y != yorig)
928 	return rtx_equal_for_cselib_1 (x, y, memmode, depth);
929 
930       return 0;
931     }
932 
933   /* These won't be handled correctly by the code below.  */
934   switch (GET_CODE (x))
935     {
936     CASE_CONST_UNIQUE:
937     case DEBUG_EXPR:
938       return 0;
939 
940     case DEBUG_IMPLICIT_PTR:
941       return DEBUG_IMPLICIT_PTR_DECL (x)
942 	     == DEBUG_IMPLICIT_PTR_DECL (y);
943 
944     case DEBUG_PARAMETER_REF:
945       return DEBUG_PARAMETER_REF_DECL (x)
946 	     == DEBUG_PARAMETER_REF_DECL (y);
947 
948     case ENTRY_VALUE:
949       /* ENTRY_VALUEs are function invariant, it is thus undesirable to
950 	 use rtx_equal_for_cselib_1 to compare the operands.  */
951       return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
952 
953     case LABEL_REF:
954       return label_ref_label (x) == label_ref_label (y);
955 
956     case REG:
957       return REGNO (x) == REGNO (y);
958 
959     case MEM:
960       /* We have to compare any autoinc operations in the addresses
961 	 using this MEM's mode.  */
962       return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x),
963 				     depth);
964 
965     default:
966       break;
967     }
968 
969   code = GET_CODE (x);
970   fmt = GET_RTX_FORMAT (code);
971 
972   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
973     {
974       int j;
975 
976       switch (fmt[i])
977 	{
978 	case 'w':
979 	  if (XWINT (x, i) != XWINT (y, i))
980 	    return 0;
981 	  break;
982 
983 	case 'n':
984 	case 'i':
985 	  if (XINT (x, i) != XINT (y, i))
986 	    return 0;
987 	  break;
988 
989 	case 'V':
990 	case 'E':
991 	  /* Two vectors must have the same length.  */
992 	  if (XVECLEN (x, i) != XVECLEN (y, i))
993 	    return 0;
994 
995 	  /* And the corresponding elements must match.  */
996 	  for (j = 0; j < XVECLEN (x, i); j++)
997 	    if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
998 					  XVECEXP (y, i, j), memmode, depth))
999 	      return 0;
1000 	  break;
1001 
1002 	case 'e':
1003 	  if (i == 1
1004 	      && targetm.commutative_p (x, UNKNOWN)
1005 	      && rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode,
1006 					 depth)
1007 	      && rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode,
1008 					 depth))
1009 	    return 1;
1010 	  if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode,
1011 					depth))
1012 	    return 0;
1013 	  break;
1014 
1015 	case 'S':
1016 	case 's':
1017 	  if (strcmp (XSTR (x, i), XSTR (y, i)))
1018 	    return 0;
1019 	  break;
1020 
1021 	case 'u':
1022 	  /* These are just backpointers, so they don't matter.  */
1023 	  break;
1024 
1025 	case '0':
1026 	case 't':
1027 	  break;
1028 
1029 	  /* It is believed that rtx's at this level will never
1030 	     contain anything but integers and other rtx's,
1031 	     except for within LABEL_REFs and SYMBOL_REFs.  */
1032 	default:
1033 	  gcc_unreachable ();
1034 	}
1035     }
1036   return 1;
1037 }
1038 
1039 /* Hash an rtx.  Return 0 if we couldn't hash the rtx.
1040    For registers and memory locations, we look up their cselib_val structure
1041    and return its VALUE element.
1042    Possible reasons for return 0 are: the object is volatile, or we couldn't
1043    find a register or memory location in the table and CREATE is zero.  If
1044    CREATE is nonzero, table elts are created for regs and mem.
1045    N.B. this hash function returns the same hash value for RTXes that
1046    differ only in the order of operands, thus it is suitable for comparisons
1047    that take commutativity into account.
1048    If we wanted to also support associative rules, we'd have to use a different
1049    strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
1050    MEMMODE indicates the mode of an enclosing MEM, and it's only
1051    used to compute autoinc values.
1052    We used to have a MODE argument for hashing for CONST_INTs, but that
1053    didn't make sense, since it caused spurious hash differences between
1054     (set (reg:SI 1) (const_int))
1055     (plus:SI (reg:SI 2) (reg:SI 1))
1056    and
1057     (plus:SI (reg:SI 2) (const_int))
1058    If the mode is important in any context, it must be checked specifically
1059    in a comparison anyway, since relying on hash differences is unsafe.  */
1060 
1061 static unsigned int
1062 cselib_hash_rtx (rtx x, int create, machine_mode memmode)
1063 {
1064   cselib_val *e;
1065   int i, j;
1066   enum rtx_code code;
1067   const char *fmt;
1068   unsigned int hash = 0;
1069 
1070   code = GET_CODE (x);
1071   hash += (unsigned) code + (unsigned) GET_MODE (x);
1072 
1073   switch (code)
1074     {
1075     case VALUE:
1076       e = CSELIB_VAL_PTR (x);
1077       return e->hash;
1078 
1079     case MEM:
1080     case REG:
1081       e = cselib_lookup (x, GET_MODE (x), create, memmode);
1082       if (! e)
1083 	return 0;
1084 
1085       return e->hash;
1086 
1087     case DEBUG_EXPR:
1088       hash += ((unsigned) DEBUG_EXPR << 7)
1089 	      + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x));
1090       return hash ? hash : (unsigned int) DEBUG_EXPR;
1091 
1092     case DEBUG_IMPLICIT_PTR:
1093       hash += ((unsigned) DEBUG_IMPLICIT_PTR << 7)
1094 	      + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x));
1095       return hash ? hash : (unsigned int) DEBUG_IMPLICIT_PTR;
1096 
1097     case DEBUG_PARAMETER_REF:
1098       hash += ((unsigned) DEBUG_PARAMETER_REF << 7)
1099 	      + DECL_UID (DEBUG_PARAMETER_REF_DECL (x));
1100       return hash ? hash : (unsigned int) DEBUG_PARAMETER_REF;
1101 
1102     case ENTRY_VALUE:
1103       /* ENTRY_VALUEs are function invariant, thus try to avoid
1104 	 recursing on argument if ENTRY_VALUE is one of the
1105 	 forms emitted by expand_debug_expr, otherwise
1106 	 ENTRY_VALUE hash would depend on the current value
1107 	 in some register or memory.  */
1108       if (REG_P (ENTRY_VALUE_EXP (x)))
1109 	hash += (unsigned int) REG
1110 		+ (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x))
1111 		+ (unsigned int) REGNO (ENTRY_VALUE_EXP (x));
1112       else if (MEM_P (ENTRY_VALUE_EXP (x))
1113 	       && REG_P (XEXP (ENTRY_VALUE_EXP (x), 0)))
1114 	hash += (unsigned int) MEM
1115 		+ (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x), 0))
1116 		+ (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x), 0));
1117       else
1118 	hash += cselib_hash_rtx (ENTRY_VALUE_EXP (x), create, memmode);
1119       return hash ? hash : (unsigned int) ENTRY_VALUE;
1120 
1121     case CONST_INT:
1122       hash += ((unsigned) CONST_INT << 7) + UINTVAL (x);
1123       return hash ? hash : (unsigned int) CONST_INT;
1124 
1125     case CONST_WIDE_INT:
1126       for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
1127 	hash += CONST_WIDE_INT_ELT (x, i);
1128       return hash;
1129 
1130     case CONST_DOUBLE:
1131       /* This is like the general case, except that it only counts
1132 	 the integers representing the constant.  */
1133       hash += (unsigned) code + (unsigned) GET_MODE (x);
1134       if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode)
1135 	hash += ((unsigned) CONST_DOUBLE_LOW (x)
1136 		 + (unsigned) CONST_DOUBLE_HIGH (x));
1137       else
1138 	hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
1139       return hash ? hash : (unsigned int) CONST_DOUBLE;
1140 
1141     case CONST_FIXED:
1142       hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1143       hash += fixed_hash (CONST_FIXED_VALUE (x));
1144       return hash ? hash : (unsigned int) CONST_FIXED;
1145 
1146     case CONST_VECTOR:
1147       {
1148 	int units;
1149 	rtx elt;
1150 
1151 	units = CONST_VECTOR_NUNITS (x);
1152 
1153 	for (i = 0; i < units; ++i)
1154 	  {
1155 	    elt = CONST_VECTOR_ELT (x, i);
1156 	    hash += cselib_hash_rtx (elt, 0, memmode);
1157 	  }
1158 
1159 	return hash;
1160       }
1161 
1162       /* Assume there is only one rtx object for any given label.  */
1163     case LABEL_REF:
1164       /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1165 	 differences and differences between each stage's debugging dumps.  */
1166       hash += (((unsigned int) LABEL_REF << 7)
1167 	       + CODE_LABEL_NUMBER (label_ref_label (x)));
1168       return hash ? hash : (unsigned int) LABEL_REF;
1169 
1170     case SYMBOL_REF:
1171       {
1172 	/* Don't hash on the symbol's address to avoid bootstrap differences.
1173 	   Different hash values may cause expressions to be recorded in
1174 	   different orders and thus different registers to be used in the
1175 	   final assembler.  This also avoids differences in the dump files
1176 	   between various stages.  */
1177 	unsigned int h = 0;
1178 	const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1179 
1180 	while (*p)
1181 	  h += (h << 7) + *p++; /* ??? revisit */
1182 
1183 	hash += ((unsigned int) SYMBOL_REF << 7) + h;
1184 	return hash ? hash : (unsigned int) SYMBOL_REF;
1185       }
1186 
1187     case PRE_DEC:
1188     case PRE_INC:
1189       /* We can't compute these without knowing the MEM mode.  */
1190       gcc_assert (memmode != VOIDmode);
1191       i = GET_MODE_SIZE (memmode);
1192       if (code == PRE_DEC)
1193 	i = -i;
1194       /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
1195 	 like (mem:MEMMODE (plus (reg) (const_int I))).  */
1196       hash += (unsigned) PLUS - (unsigned)code
1197 	+ cselib_hash_rtx (XEXP (x, 0), create, memmode)
1198 	+ cselib_hash_rtx (GEN_INT (i), create, memmode);
1199       return hash ? hash : 1 + (unsigned) PLUS;
1200 
1201     case PRE_MODIFY:
1202       gcc_assert (memmode != VOIDmode);
1203       return cselib_hash_rtx (XEXP (x, 1), create, memmode);
1204 
1205     case POST_DEC:
1206     case POST_INC:
1207     case POST_MODIFY:
1208       gcc_assert (memmode != VOIDmode);
1209       return cselib_hash_rtx (XEXP (x, 0), create, memmode);
1210 
1211     case PC:
1212     case CC0:
1213     case CALL:
1214     case UNSPEC_VOLATILE:
1215       return 0;
1216 
1217     case ASM_OPERANDS:
1218       if (MEM_VOLATILE_P (x))
1219 	return 0;
1220 
1221       break;
1222 
1223     default:
1224       break;
1225     }
1226 
1227   i = GET_RTX_LENGTH (code) - 1;
1228   fmt = GET_RTX_FORMAT (code);
1229   for (; i >= 0; i--)
1230     {
1231       switch (fmt[i])
1232 	{
1233 	case 'e':
1234 	  {
1235 	    rtx tem = XEXP (x, i);
1236 	    unsigned int tem_hash = cselib_hash_rtx (tem, create, memmode);
1237 
1238 	    if (tem_hash == 0)
1239 	      return 0;
1240 
1241 	    hash += tem_hash;
1242 	  }
1243 	  break;
1244 	case 'E':
1245 	  for (j = 0; j < XVECLEN (x, i); j++)
1246 	    {
1247 	      unsigned int tem_hash
1248 		= cselib_hash_rtx (XVECEXP (x, i, j), create, memmode);
1249 
1250 	      if (tem_hash == 0)
1251 		return 0;
1252 
1253 	      hash += tem_hash;
1254 	    }
1255 	  break;
1256 
1257 	case 's':
1258 	  {
1259 	    const unsigned char *p = (const unsigned char *) XSTR (x, i);
1260 
1261 	    if (p)
1262 	      while (*p)
1263 		hash += *p++;
1264 	    break;
1265 	  }
1266 
1267 	case 'i':
1268 	  hash += XINT (x, i);
1269 	  break;
1270 
1271 	case '0':
1272 	case 't':
1273 	  /* unused */
1274 	  break;
1275 
1276 	default:
1277 	  gcc_unreachable ();
1278 	}
1279     }
1280 
1281   return hash ? hash : 1 + (unsigned int) GET_CODE (x);
1282 }
1283 
1284 /* Create a new value structure for VALUE and initialize it.  The mode of the
1285    value is MODE.  */
1286 
1287 static inline cselib_val *
1288 new_cselib_val (unsigned int hash, machine_mode mode, rtx x)
1289 {
1290   cselib_val *e = cselib_val_pool.allocate ();
1291 
1292   gcc_assert (hash);
1293   gcc_assert (next_uid);
1294 
1295   e->hash = hash;
1296   e->uid = next_uid++;
1297   /* We use an alloc pool to allocate this RTL construct because it
1298      accounts for about 8% of the overall memory usage.  We know
1299      precisely when we can have VALUE RTXen (when cselib is active)
1300      so we don't need to put them in garbage collected memory.
1301      ??? Why should a VALUE be an RTX in the first place?  */
1302   e->val_rtx = (rtx_def*) value_pool.allocate ();
1303   memset (e->val_rtx, 0, RTX_HDR_SIZE);
1304   PUT_CODE (e->val_rtx, VALUE);
1305   PUT_MODE (e->val_rtx, mode);
1306   CSELIB_VAL_PTR (e->val_rtx) = e;
1307   e->addr_list = 0;
1308   e->locs = 0;
1309   e->next_containing_mem = 0;
1310 
1311   if (dump_file && (dump_flags & TDF_CSELIB))
1312     {
1313       fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
1314       if (flag_dump_noaddr || flag_dump_unnumbered)
1315 	fputs ("# ", dump_file);
1316       else
1317 	fprintf (dump_file, "%p ", (void*)e);
1318       print_rtl_single (dump_file, x);
1319       fputc ('\n', dump_file);
1320     }
1321 
1322   return e;
1323 }
1324 
1325 /* ADDR_ELT is a value that is used as address.  MEM_ELT is the value that
1326    contains the data at this address.  X is a MEM that represents the
1327    value.  Update the two value structures to represent this situation.  */
1328 
1329 static void
1330 add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
1331 {
1332   addr_elt = canonical_cselib_val (addr_elt);
1333   mem_elt = canonical_cselib_val (mem_elt);
1334 
1335   /* Avoid duplicates.  */
1336   addr_space_t as = MEM_ADDR_SPACE (x);
1337   for (elt_loc_list *l = mem_elt->locs; l; l = l->next)
1338     if (MEM_P (l->loc)
1339 	&& CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt
1340         && MEM_ADDR_SPACE (l->loc) == as)
1341       {
1342 	promote_debug_loc (l);
1343 	return;
1344       }
1345 
1346   addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
1347   new_elt_loc_list (mem_elt,
1348 		    replace_equiv_address_nv (x, addr_elt->val_rtx));
1349   if (mem_elt->next_containing_mem == NULL)
1350     {
1351       mem_elt->next_containing_mem = first_containing_mem;
1352       first_containing_mem = mem_elt;
1353     }
1354 }
1355 
1356 /* Subroutine of cselib_lookup.  Return a value for X, which is a MEM rtx.
1357    If CREATE, make a new one if we haven't seen it before.  */
1358 
1359 static cselib_val *
1360 cselib_lookup_mem (rtx x, int create)
1361 {
1362   machine_mode mode = GET_MODE (x);
1363   machine_mode addr_mode;
1364   cselib_val **slot;
1365   cselib_val *addr;
1366   cselib_val *mem_elt;
1367 
1368   if (MEM_VOLATILE_P (x) || mode == BLKmode
1369       || !cselib_record_memory
1370       || (FLOAT_MODE_P (mode) && flag_float_store))
1371     return 0;
1372 
1373   addr_mode = GET_MODE (XEXP (x, 0));
1374   if (addr_mode == VOIDmode)
1375     addr_mode = Pmode;
1376 
1377   /* Look up the value for the address.  */
1378   addr = cselib_lookup (XEXP (x, 0), addr_mode, create, mode);
1379   if (! addr)
1380     return 0;
1381   addr = canonical_cselib_val (addr);
1382 
1383   /* Find a value that describes a value of our mode at that address.  */
1384   addr_space_t as = MEM_ADDR_SPACE (x);
1385   for (elt_list *l = addr->addr_list; l; l = l->next)
1386     if (GET_MODE (l->elt->val_rtx) == mode)
1387       {
1388 	for (elt_loc_list *l2 = l->elt->locs; l2; l2 = l2->next)
1389 	  if (MEM_P (l2->loc) && MEM_ADDR_SPACE (l2->loc) == as)
1390 	    {
1391 	      promote_debug_loc (l->elt->locs);
1392 	      return l->elt;
1393 	    }
1394       }
1395 
1396   if (! create)
1397     return 0;
1398 
1399   mem_elt = new_cselib_val (next_uid, mode, x);
1400   add_mem_for_addr (addr, mem_elt, x);
1401   slot = cselib_find_slot (mode, x, mem_elt->hash, INSERT, VOIDmode);
1402   *slot = mem_elt;
1403   return mem_elt;
1404 }
1405 
1406 /* Search through the possible substitutions in P.  We prefer a non reg
1407    substitution because this allows us to expand the tree further.  If
1408    we find, just a reg, take the lowest regno.  There may be several
1409    non-reg results, we just take the first one because they will all
1410    expand to the same place.  */
1411 
1412 static rtx
1413 expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
1414 	    int max_depth)
1415 {
1416   rtx reg_result = NULL;
1417   unsigned int regno = UINT_MAX;
1418   struct elt_loc_list *p_in = p;
1419 
1420   for (; p; p = p->next)
1421     {
1422       /* Return these right away to avoid returning stack pointer based
1423 	 expressions for frame pointer and vice versa, which is something
1424 	 that would confuse DSE.  See the comment in cselib_expand_value_rtx_1
1425 	 for more details.  */
1426       if (REG_P (p->loc)
1427 	  && (REGNO (p->loc) == STACK_POINTER_REGNUM
1428 	      || REGNO (p->loc) == FRAME_POINTER_REGNUM
1429 	      || REGNO (p->loc) == HARD_FRAME_POINTER_REGNUM
1430 	      || REGNO (p->loc) == cfa_base_preserved_regno))
1431 	return p->loc;
1432       /* Avoid infinite recursion trying to expand a reg into a
1433 	 the same reg.  */
1434       if ((REG_P (p->loc))
1435 	  && (REGNO (p->loc) < regno)
1436 	  && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
1437 	{
1438 	  reg_result = p->loc;
1439 	  regno = REGNO (p->loc);
1440 	}
1441       /* Avoid infinite recursion and do not try to expand the
1442 	 value.  */
1443       else if (GET_CODE (p->loc) == VALUE
1444 	       && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1445 	continue;
1446       else if (!REG_P (p->loc))
1447 	{
1448 	  rtx result, note;
1449 	  if (dump_file && (dump_flags & TDF_CSELIB))
1450 	    {
1451 	      print_inline_rtx (dump_file, p->loc, 0);
1452 	      fprintf (dump_file, "\n");
1453 	    }
1454 	  if (GET_CODE (p->loc) == LO_SUM
1455 	      && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1456 	      && p->setting_insn
1457 	      && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1458 	      && XEXP (note, 0) == XEXP (p->loc, 1))
1459 	    return XEXP (p->loc, 1);
1460 	  result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
1461 	  if (result)
1462 	    return result;
1463 	}
1464 
1465     }
1466 
1467   if (regno != UINT_MAX)
1468     {
1469       rtx result;
1470       if (dump_file && (dump_flags & TDF_CSELIB))
1471 	fprintf (dump_file, "r%d\n", regno);
1472 
1473       result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
1474       if (result)
1475 	return result;
1476     }
1477 
1478   if (dump_file && (dump_flags & TDF_CSELIB))
1479     {
1480       if (reg_result)
1481 	{
1482 	  print_inline_rtx (dump_file, reg_result, 0);
1483 	  fprintf (dump_file, "\n");
1484 	}
1485       else
1486 	fprintf (dump_file, "NULL\n");
1487     }
1488   return reg_result;
1489 }
1490 
1491 
1492 /* Forward substitute and expand an expression out to its roots.
1493    This is the opposite of common subexpression.  Because local value
1494    numbering is such a weak optimization, the expanded expression is
1495    pretty much unique (not from a pointer equals point of view but
1496    from a tree shape point of view.
1497 
1498    This function returns NULL if the expansion fails.  The expansion
1499    will fail if there is no value number for one of the operands or if
1500    one of the operands has been overwritten between the current insn
1501    and the beginning of the basic block.  For instance x has no
1502    expansion in:
1503 
1504    r1 <- r1 + 3
1505    x <- r1 + 8
1506 
1507    REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1508    It is clear on return.  */
1509 
1510 rtx
1511 cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
1512 {
1513   struct expand_value_data evd;
1514 
1515   evd.regs_active = regs_active;
1516   evd.callback = NULL;
1517   evd.callback_arg = NULL;
1518   evd.dummy = false;
1519 
1520   return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1521 }
1522 
1523 /* Same as cselib_expand_value_rtx, but using a callback to try to
1524    resolve some expressions.  The CB function should return ORIG if it
1525    can't or does not want to deal with a certain RTX.  Any other
1526    return value, including NULL, will be used as the expansion for
1527    VALUE, without any further changes.  */
1528 
1529 rtx
1530 cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1531 			    cselib_expand_callback cb, void *data)
1532 {
1533   struct expand_value_data evd;
1534 
1535   evd.regs_active = regs_active;
1536   evd.callback = cb;
1537   evd.callback_arg = data;
1538   evd.dummy = false;
1539 
1540   return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1541 }
1542 
1543 /* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1544    or simplified.  Useful to find out whether cselib_expand_value_rtx_cb
1545    would return NULL or non-NULL, without allocating new rtx.  */
1546 
1547 bool
1548 cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1549 				  cselib_expand_callback cb, void *data)
1550 {
1551   struct expand_value_data evd;
1552 
1553   evd.regs_active = regs_active;
1554   evd.callback = cb;
1555   evd.callback_arg = data;
1556   evd.dummy = true;
1557 
1558   return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1559 }
1560 
1561 /* Internal implementation of cselib_expand_value_rtx and
1562    cselib_expand_value_rtx_cb.  */
1563 
1564 static rtx
1565 cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1566 			   int max_depth)
1567 {
1568   rtx copy, scopy;
1569   int i, j;
1570   RTX_CODE code;
1571   const char *format_ptr;
1572   machine_mode mode;
1573 
1574   code = GET_CODE (orig);
1575 
1576   /* For the context of dse, if we end up expand into a huge tree, we
1577      will not have a useful address, so we might as well just give up
1578      quickly.  */
1579   if (max_depth <= 0)
1580     return NULL;
1581 
1582   switch (code)
1583     {
1584     case REG:
1585       {
1586 	struct elt_list *l = REG_VALUES (REGNO (orig));
1587 
1588 	if (l && l->elt == NULL)
1589 	  l = l->next;
1590 	for (; l; l = l->next)
1591 	  if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1592 	    {
1593 	      rtx result;
1594 	      unsigned regno = REGNO (orig);
1595 
1596 	      /* The only thing that we are not willing to do (this
1597 		 is requirement of dse and if others potential uses
1598 		 need this function we should add a parm to control
1599 		 it) is that we will not substitute the
1600 		 STACK_POINTER_REGNUM, FRAME_POINTER or the
1601 		 HARD_FRAME_POINTER.
1602 
1603 		 These expansions confuses the code that notices that
1604 		 stores into the frame go dead at the end of the
1605 		 function and that the frame is not effected by calls
1606 		 to subroutines.  If you allow the
1607 		 STACK_POINTER_REGNUM substitution, then dse will
1608 		 think that parameter pushing also goes dead which is
1609 		 wrong.  If you allow the FRAME_POINTER or the
1610 		 HARD_FRAME_POINTER then you lose the opportunity to
1611 		 make the frame assumptions.  */
1612 	      if (regno == STACK_POINTER_REGNUM
1613 		  || regno == FRAME_POINTER_REGNUM
1614 		  || regno == HARD_FRAME_POINTER_REGNUM
1615 		  || regno == cfa_base_preserved_regno)
1616 		return orig;
1617 
1618 	      bitmap_set_bit (evd->regs_active, regno);
1619 
1620 	      if (dump_file && (dump_flags & TDF_CSELIB))
1621 		fprintf (dump_file, "expanding: r%d into: ", regno);
1622 
1623 	      result = expand_loc (l->elt->locs, evd, max_depth);
1624 	      bitmap_clear_bit (evd->regs_active, regno);
1625 
1626 	      if (result)
1627 		return result;
1628 	      else
1629 		return orig;
1630 	    }
1631 	return orig;
1632       }
1633 
1634     CASE_CONST_ANY:
1635     case SYMBOL_REF:
1636     case CODE_LABEL:
1637     case PC:
1638     case CC0:
1639     case SCRATCH:
1640       /* SCRATCH must be shared because they represent distinct values.  */
1641       return orig;
1642     case CLOBBER:
1643       if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1644 	return orig;
1645       break;
1646 
1647     case CONST:
1648       if (shared_const_p (orig))
1649 	return orig;
1650       break;
1651 
1652     case SUBREG:
1653       {
1654 	rtx subreg;
1655 
1656 	if (evd->callback)
1657 	  {
1658 	    subreg = evd->callback (orig, evd->regs_active, max_depth,
1659 				    evd->callback_arg);
1660 	    if (subreg != orig)
1661 	      return subreg;
1662 	  }
1663 
1664 	subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
1665 					    max_depth - 1);
1666 	if (!subreg)
1667 	  return NULL;
1668 	scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
1669 				     GET_MODE (SUBREG_REG (orig)),
1670 				     SUBREG_BYTE (orig));
1671 	if (scopy == NULL
1672 	    || (GET_CODE (scopy) == SUBREG
1673 		&& !REG_P (SUBREG_REG (scopy))
1674 		&& !MEM_P (SUBREG_REG (scopy))))
1675 	  return NULL;
1676 
1677 	return scopy;
1678       }
1679 
1680     case VALUE:
1681       {
1682 	rtx result;
1683 
1684 	if (dump_file && (dump_flags & TDF_CSELIB))
1685 	  {
1686 	    fputs ("\nexpanding ", dump_file);
1687 	    print_rtl_single (dump_file, orig);
1688 	    fputs (" into...", dump_file);
1689 	  }
1690 
1691 	if (evd->callback)
1692 	  {
1693 	    result = evd->callback (orig, evd->regs_active, max_depth,
1694 				    evd->callback_arg);
1695 
1696 	    if (result != orig)
1697 	      return result;
1698 	  }
1699 
1700 	result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
1701 	return result;
1702       }
1703 
1704     case DEBUG_EXPR:
1705       if (evd->callback)
1706 	return evd->callback (orig, evd->regs_active, max_depth,
1707 			      evd->callback_arg);
1708       return orig;
1709 
1710     default:
1711       break;
1712     }
1713 
1714   /* Copy the various flags, fields, and other information.  We assume
1715      that all fields need copying, and then clear the fields that should
1716      not be copied.  That is the sensible default behavior, and forces
1717      us to explicitly document why we are *not* copying a flag.  */
1718   if (evd->dummy)
1719     copy = NULL;
1720   else
1721     copy = shallow_copy_rtx (orig);
1722 
1723   format_ptr = GET_RTX_FORMAT (code);
1724 
1725   for (i = 0; i < GET_RTX_LENGTH (code); i++)
1726     switch (*format_ptr++)
1727       {
1728       case 'e':
1729 	if (XEXP (orig, i) != NULL)
1730 	  {
1731 	    rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
1732 						    max_depth - 1);
1733 	    if (!result)
1734 	      return NULL;
1735 	    if (copy)
1736 	      XEXP (copy, i) = result;
1737 	  }
1738 	break;
1739 
1740       case 'E':
1741       case 'V':
1742 	if (XVEC (orig, i) != NULL)
1743 	  {
1744 	    if (copy)
1745 	      XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
1746 	    for (j = 0; j < XVECLEN (orig, i); j++)
1747 	      {
1748 		rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
1749 							evd, max_depth - 1);
1750 		if (!result)
1751 		  return NULL;
1752 		if (copy)
1753 		  XVECEXP (copy, i, j) = result;
1754 	      }
1755 	  }
1756 	break;
1757 
1758       case 't':
1759       case 'w':
1760       case 'i':
1761       case 's':
1762       case 'S':
1763       case 'T':
1764       case 'u':
1765       case 'B':
1766       case '0':
1767 	/* These are left unchanged.  */
1768 	break;
1769 
1770       default:
1771 	gcc_unreachable ();
1772       }
1773 
1774   if (evd->dummy)
1775     return orig;
1776 
1777   mode = GET_MODE (copy);
1778   /* If an operand has been simplified into CONST_INT, which doesn't
1779      have a mode and the mode isn't derivable from whole rtx's mode,
1780      try simplify_*_operation first with mode from original's operand
1781      and as a fallback wrap CONST_INT into gen_rtx_CONST.  */
1782   scopy = copy;
1783   switch (GET_RTX_CLASS (code))
1784     {
1785     case RTX_UNARY:
1786       if (CONST_INT_P (XEXP (copy, 0))
1787 	  && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1788 	{
1789 	  scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
1790 					    GET_MODE (XEXP (orig, 0)));
1791 	  if (scopy)
1792 	    return scopy;
1793 	}
1794       break;
1795     case RTX_COMM_ARITH:
1796     case RTX_BIN_ARITH:
1797       /* These expressions can derive operand modes from the whole rtx's mode.  */
1798       break;
1799     case RTX_TERNARY:
1800     case RTX_BITFIELD_OPS:
1801       if (CONST_INT_P (XEXP (copy, 0))
1802 	  && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1803 	{
1804 	  scopy = simplify_ternary_operation (code, mode,
1805 					      GET_MODE (XEXP (orig, 0)),
1806 					      XEXP (copy, 0), XEXP (copy, 1),
1807 					      XEXP (copy, 2));
1808 	  if (scopy)
1809 	    return scopy;
1810 	}
1811       break;
1812     case RTX_COMPARE:
1813     case RTX_COMM_COMPARE:
1814       if (CONST_INT_P (XEXP (copy, 0))
1815 	  && GET_MODE (XEXP (copy, 1)) == VOIDmode
1816 	  && (GET_MODE (XEXP (orig, 0)) != VOIDmode
1817 	      || GET_MODE (XEXP (orig, 1)) != VOIDmode))
1818 	{
1819 	  scopy = simplify_relational_operation (code, mode,
1820 						 (GET_MODE (XEXP (orig, 0))
1821 						  != VOIDmode)
1822 						 ? GET_MODE (XEXP (orig, 0))
1823 						 : GET_MODE (XEXP (orig, 1)),
1824 						 XEXP (copy, 0),
1825 						 XEXP (copy, 1));
1826 	  if (scopy)
1827 	    return scopy;
1828 	}
1829       break;
1830     default:
1831       break;
1832     }
1833   scopy = simplify_rtx (copy);
1834   if (scopy)
1835     return scopy;
1836   return copy;
1837 }
1838 
1839 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
1840    with VALUE expressions.  This way, it becomes independent of changes
1841    to registers and memory.
1842    X isn't actually modified; if modifications are needed, new rtl is
1843    allocated.  However, the return value can share rtl with X.
1844    If X is within a MEM, MEMMODE must be the mode of the MEM.  */
1845 
1846 rtx
1847 cselib_subst_to_values (rtx x, machine_mode memmode)
1848 {
1849   enum rtx_code code = GET_CODE (x);
1850   const char *fmt = GET_RTX_FORMAT (code);
1851   cselib_val *e;
1852   struct elt_list *l;
1853   rtx copy = x;
1854   int i;
1855 
1856   switch (code)
1857     {
1858     case REG:
1859       l = REG_VALUES (REGNO (x));
1860       if (l && l->elt == NULL)
1861 	l = l->next;
1862       for (; l; l = l->next)
1863 	if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
1864 	  return l->elt->val_rtx;
1865 
1866       gcc_unreachable ();
1867 
1868     case MEM:
1869       e = cselib_lookup_mem (x, 0);
1870       /* This used to happen for autoincrements, but we deal with them
1871 	 properly now.  Remove the if stmt for the next release.  */
1872       if (! e)
1873 	{
1874 	  /* Assign a value that doesn't match any other.  */
1875 	  e = new_cselib_val (next_uid, GET_MODE (x), x);
1876 	}
1877       return e->val_rtx;
1878 
1879     case ENTRY_VALUE:
1880       e = cselib_lookup (x, GET_MODE (x), 0, memmode);
1881       if (! e)
1882 	break;
1883       return e->val_rtx;
1884 
1885     CASE_CONST_ANY:
1886       return x;
1887 
1888     case PRE_DEC:
1889     case PRE_INC:
1890       gcc_assert (memmode != VOIDmode);
1891       i = GET_MODE_SIZE (memmode);
1892       if (code == PRE_DEC)
1893 	i = -i;
1894       return cselib_subst_to_values (plus_constant (GET_MODE (x),
1895 						    XEXP (x, 0), i),
1896 				     memmode);
1897 
1898     case PRE_MODIFY:
1899       gcc_assert (memmode != VOIDmode);
1900       return cselib_subst_to_values (XEXP (x, 1), memmode);
1901 
1902     case POST_DEC:
1903     case POST_INC:
1904     case POST_MODIFY:
1905       gcc_assert (memmode != VOIDmode);
1906       return cselib_subst_to_values (XEXP (x, 0), memmode);
1907 
1908     default:
1909       break;
1910     }
1911 
1912   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1913     {
1914       if (fmt[i] == 'e')
1915 	{
1916 	  rtx t = cselib_subst_to_values (XEXP (x, i), memmode);
1917 
1918 	  if (t != XEXP (x, i))
1919 	    {
1920 	      if (x == copy)
1921 		copy = shallow_copy_rtx (x);
1922 	      XEXP (copy, i) = t;
1923 	    }
1924 	}
1925       else if (fmt[i] == 'E')
1926 	{
1927 	  int j;
1928 
1929 	  for (j = 0; j < XVECLEN (x, i); j++)
1930 	    {
1931 	      rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode);
1932 
1933 	      if (t != XVECEXP (x, i, j))
1934 		{
1935 		  if (XVEC (x, i) == XVEC (copy, i))
1936 		    {
1937 		      if (x == copy)
1938 			copy = shallow_copy_rtx (x);
1939 		      XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
1940 		    }
1941 		  XVECEXP (copy, i, j) = t;
1942 		}
1943 	    }
1944 	}
1945     }
1946 
1947   return copy;
1948 }
1949 
1950 /* Wrapper for cselib_subst_to_values, that indicates X is in INSN.  */
1951 
1952 rtx
1953 cselib_subst_to_values_from_insn (rtx x, machine_mode memmode, rtx_insn *insn)
1954 {
1955   rtx ret;
1956   gcc_assert (!cselib_current_insn);
1957   cselib_current_insn = insn;
1958   ret = cselib_subst_to_values (x, memmode);
1959   cselib_current_insn = NULL;
1960   return ret;
1961 }
1962 
1963 /* Look up the rtl expression X in our tables and return the value it
1964    has.  If CREATE is zero, we return NULL if we don't know the value.
1965    Otherwise, we create a new one if possible, using mode MODE if X
1966    doesn't have a mode (i.e. because it's a constant).  When X is part
1967    of an address, MEMMODE should be the mode of the enclosing MEM if
1968    we're tracking autoinc expressions.  */
1969 
1970 static cselib_val *
1971 cselib_lookup_1 (rtx x, machine_mode mode,
1972 		 int create, machine_mode memmode)
1973 {
1974   cselib_val **slot;
1975   cselib_val *e;
1976   unsigned int hashval;
1977 
1978   if (GET_MODE (x) != VOIDmode)
1979     mode = GET_MODE (x);
1980 
1981   if (GET_CODE (x) == VALUE)
1982     return CSELIB_VAL_PTR (x);
1983 
1984   if (REG_P (x))
1985     {
1986       struct elt_list *l;
1987       unsigned int i = REGNO (x);
1988 
1989       l = REG_VALUES (i);
1990       if (l && l->elt == NULL)
1991 	l = l->next;
1992       for (; l; l = l->next)
1993 	if (mode == GET_MODE (l->elt->val_rtx))
1994 	  {
1995 	    promote_debug_loc (l->elt->locs);
1996 	    return l->elt;
1997 	  }
1998 
1999       if (! create)
2000 	return 0;
2001 
2002       if (i < FIRST_PSEUDO_REGISTER)
2003 	{
2004 	  unsigned int n = hard_regno_nregs[i][mode];
2005 
2006 	  if (n > max_value_regs)
2007 	    max_value_regs = n;
2008 	}
2009 
2010       e = new_cselib_val (next_uid, GET_MODE (x), x);
2011       new_elt_loc_list (e, x);
2012       if (REG_VALUES (i) == 0)
2013 	{
2014 	  /* Maintain the invariant that the first entry of
2015 	     REG_VALUES, if present, must be the value used to set the
2016 	     register, or NULL.  */
2017 	  used_regs[n_used_regs++] = i;
2018 	  REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
2019 	}
2020       else if (cselib_preserve_constants
2021 	       && GET_MODE_CLASS (mode) == MODE_INT)
2022 	{
2023 	  /* During var-tracking, try harder to find equivalences
2024 	     for SUBREGs.  If a setter sets say a DImode register
2025 	     and user uses that register only in SImode, add a lowpart
2026 	     subreg location.  */
2027 	  struct elt_list *lwider = NULL;
2028 	  l = REG_VALUES (i);
2029 	  if (l && l->elt == NULL)
2030 	    l = l->next;
2031 	  for (; l; l = l->next)
2032 	    if (GET_MODE_CLASS (GET_MODE (l->elt->val_rtx)) == MODE_INT
2033 		&& GET_MODE_SIZE (GET_MODE (l->elt->val_rtx))
2034 		   > GET_MODE_SIZE (mode)
2035 		&& (lwider == NULL
2036 		    || GET_MODE_SIZE (GET_MODE (l->elt->val_rtx))
2037 		       < GET_MODE_SIZE (GET_MODE (lwider->elt->val_rtx))))
2038 	      {
2039 		struct elt_loc_list *el;
2040 		if (i < FIRST_PSEUDO_REGISTER
2041 		    && hard_regno_nregs[i][GET_MODE (l->elt->val_rtx)] != 1)
2042 		  continue;
2043 		for (el = l->elt->locs; el; el = el->next)
2044 		  if (!REG_P (el->loc))
2045 		    break;
2046 		if (el)
2047 		  lwider = l;
2048 	      }
2049 	  if (lwider)
2050 	    {
2051 	      rtx sub = lowpart_subreg (mode, lwider->elt->val_rtx,
2052 					GET_MODE (lwider->elt->val_rtx));
2053 	      if (sub)
2054 		new_elt_loc_list (e, sub);
2055 	    }
2056 	}
2057       REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
2058       slot = cselib_find_slot (mode, x, e->hash, INSERT, memmode);
2059       *slot = e;
2060       return e;
2061     }
2062 
2063   if (MEM_P (x))
2064     return cselib_lookup_mem (x, create);
2065 
2066   hashval = cselib_hash_rtx (x, create, memmode);
2067   /* Can't even create if hashing is not possible.  */
2068   if (! hashval)
2069     return 0;
2070 
2071   slot = cselib_find_slot (mode, x, hashval,
2072 			   create ? INSERT : NO_INSERT, memmode);
2073   if (slot == 0)
2074     return 0;
2075 
2076   e = (cselib_val *) *slot;
2077   if (e)
2078     return e;
2079 
2080   e = new_cselib_val (hashval, mode, x);
2081 
2082   /* We have to fill the slot before calling cselib_subst_to_values:
2083      the hash table is inconsistent until we do so, and
2084      cselib_subst_to_values will need to do lookups.  */
2085   *slot = e;
2086   new_elt_loc_list (e, cselib_subst_to_values (x, memmode));
2087   return e;
2088 }
2089 
2090 /* Wrapper for cselib_lookup, that indicates X is in INSN.  */
2091 
2092 cselib_val *
2093 cselib_lookup_from_insn (rtx x, machine_mode mode,
2094 			 int create, machine_mode memmode, rtx_insn *insn)
2095 {
2096   cselib_val *ret;
2097 
2098   gcc_assert (!cselib_current_insn);
2099   cselib_current_insn = insn;
2100 
2101   ret = cselib_lookup (x, mode, create, memmode);
2102 
2103   cselib_current_insn = NULL;
2104 
2105   return ret;
2106 }
2107 
2108 /* Wrapper for cselib_lookup_1, that logs the lookup result and
2109    maintains invariants related with debug insns.  */
2110 
2111 cselib_val *
2112 cselib_lookup (rtx x, machine_mode mode,
2113 	       int create, machine_mode memmode)
2114 {
2115   cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode);
2116 
2117   /* ??? Should we return NULL if we're not to create an entry, the
2118      found loc is a debug loc and cselib_current_insn is not DEBUG?
2119      If so, we should also avoid converting val to non-DEBUG; probably
2120      easiest setting cselib_current_insn to NULL before the call
2121      above.  */
2122 
2123   if (dump_file && (dump_flags & TDF_CSELIB))
2124     {
2125       fputs ("cselib lookup ", dump_file);
2126       print_inline_rtx (dump_file, x, 2);
2127       fprintf (dump_file, " => %u:%u\n",
2128 	       ret ? ret->uid : 0,
2129 	       ret ? ret->hash : 0);
2130     }
2131 
2132   return ret;
2133 }
2134 
2135 /* Invalidate any entries in reg_values that overlap REGNO.  This is called
2136    if REGNO is changing.  MODE is the mode of the assignment to REGNO, which
2137    is used to determine how many hard registers are being changed.  If MODE
2138    is VOIDmode, then only REGNO is being changed; this is used when
2139    invalidating call clobbered registers across a call.  */
2140 
2141 static void
2142 cselib_invalidate_regno (unsigned int regno, machine_mode mode)
2143 {
2144   unsigned int endregno;
2145   unsigned int i;
2146 
2147   /* If we see pseudos after reload, something is _wrong_.  */
2148   gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
2149 	      || reg_renumber[regno] < 0);
2150 
2151   /* Determine the range of registers that must be invalidated.  For
2152      pseudos, only REGNO is affected.  For hard regs, we must take MODE
2153      into account, and we must also invalidate lower register numbers
2154      if they contain values that overlap REGNO.  */
2155   if (regno < FIRST_PSEUDO_REGISTER)
2156     {
2157       gcc_assert (mode != VOIDmode);
2158 
2159       if (regno < max_value_regs)
2160 	i = 0;
2161       else
2162 	i = regno - max_value_regs;
2163 
2164       endregno = end_hard_regno (mode, regno);
2165     }
2166   else
2167     {
2168       i = regno;
2169       endregno = regno + 1;
2170     }
2171 
2172   for (; i < endregno; i++)
2173     {
2174       struct elt_list **l = &REG_VALUES (i);
2175 
2176       /* Go through all known values for this reg; if it overlaps the range
2177 	 we're invalidating, remove the value.  */
2178       while (*l)
2179 	{
2180 	  cselib_val *v = (*l)->elt;
2181 	  bool had_locs;
2182 	  rtx_insn *setting_insn;
2183 	  struct elt_loc_list **p;
2184 	  unsigned int this_last = i;
2185 
2186 	  if (i < FIRST_PSEUDO_REGISTER && v != NULL)
2187 	    this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
2188 
2189 	  if (this_last < regno || v == NULL
2190 	      || (v == cfa_base_preserved_val
2191 		  && i == cfa_base_preserved_regno))
2192 	    {
2193 	      l = &(*l)->next;
2194 	      continue;
2195 	    }
2196 
2197 	  /* We have an overlap.  */
2198 	  if (*l == REG_VALUES (i))
2199 	    {
2200 	      /* Maintain the invariant that the first entry of
2201 		 REG_VALUES, if present, must be the value used to set
2202 		 the register, or NULL.  This is also nice because
2203 		 then we won't push the same regno onto user_regs
2204 		 multiple times.  */
2205 	      (*l)->elt = NULL;
2206 	      l = &(*l)->next;
2207 	    }
2208 	  else
2209 	    unchain_one_elt_list (l);
2210 
2211 	  v = canonical_cselib_val (v);
2212 
2213 	  had_locs = v->locs != NULL;
2214 	  setting_insn = v->locs ? v->locs->setting_insn : NULL;
2215 
2216 	  /* Now, we clear the mapping from value to reg.  It must exist, so
2217 	     this code will crash intentionally if it doesn't.  */
2218 	  for (p = &v->locs; ; p = &(*p)->next)
2219 	    {
2220 	      rtx x = (*p)->loc;
2221 
2222 	      if (REG_P (x) && REGNO (x) == i)
2223 		{
2224 		  unchain_one_elt_loc_list (p);
2225 		  break;
2226 		}
2227 	    }
2228 
2229 	  if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2230 	    {
2231 	      if (setting_insn && DEBUG_INSN_P (setting_insn))
2232 		n_useless_debug_values++;
2233 	      else
2234 		n_useless_values++;
2235 	    }
2236 	}
2237     }
2238 }
2239 
2240 /* Invalidate any locations in the table which are changed because of a
2241    store to MEM_RTX.  If this is called because of a non-const call
2242    instruction, MEM_RTX is (mem:BLK const0_rtx).  */
2243 
2244 static void
2245 cselib_invalidate_mem (rtx mem_rtx)
2246 {
2247   cselib_val **vp, *v, *next;
2248   int num_mems = 0;
2249   rtx mem_addr;
2250 
2251   mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
2252   mem_rtx = canon_rtx (mem_rtx);
2253 
2254   vp = &first_containing_mem;
2255   for (v = *vp; v != &dummy_val; v = next)
2256     {
2257       bool has_mem = false;
2258       struct elt_loc_list **p = &v->locs;
2259       bool had_locs = v->locs != NULL;
2260       rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
2261 
2262       while (*p)
2263 	{
2264 	  rtx x = (*p)->loc;
2265 	  cselib_val *addr;
2266 	  struct elt_list **mem_chain;
2267 
2268 	  /* MEMs may occur in locations only at the top level; below
2269 	     that every MEM or REG is substituted by its VALUE.  */
2270 	  if (!MEM_P (x))
2271 	    {
2272 	      p = &(*p)->next;
2273 	      continue;
2274 	    }
2275 	  if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS)
2276 	      && ! canon_anti_dependence (x, false, mem_rtx,
2277 					  GET_MODE (mem_rtx), mem_addr))
2278 	    {
2279 	      has_mem = true;
2280 	      num_mems++;
2281 	      p = &(*p)->next;
2282 	      continue;
2283 	    }
2284 
2285 	  /* This one overlaps.  */
2286 	  /* We must have a mapping from this MEM's address to the
2287 	     value (E).  Remove that, too.  */
2288 	  addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x));
2289 	  addr = canonical_cselib_val (addr);
2290 	  gcc_checking_assert (v == canonical_cselib_val (v));
2291 	  mem_chain = &addr->addr_list;
2292 	  for (;;)
2293 	    {
2294 	      cselib_val *canon = canonical_cselib_val ((*mem_chain)->elt);
2295 
2296 	      if (canon == v)
2297 		{
2298 		  unchain_one_elt_list (mem_chain);
2299 		  break;
2300 		}
2301 
2302 	      /* Record canonicalized elt.  */
2303 	      (*mem_chain)->elt = canon;
2304 
2305 	      mem_chain = &(*mem_chain)->next;
2306 	    }
2307 
2308 	  unchain_one_elt_loc_list (p);
2309 	}
2310 
2311       if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2312 	{
2313 	  if (setting_insn && DEBUG_INSN_P (setting_insn))
2314 	    n_useless_debug_values++;
2315 	  else
2316 	    n_useless_values++;
2317 	}
2318 
2319       next = v->next_containing_mem;
2320       if (has_mem)
2321 	{
2322 	  *vp = v;
2323 	  vp = &(*vp)->next_containing_mem;
2324 	}
2325       else
2326 	v->next_containing_mem = NULL;
2327     }
2328   *vp = &dummy_val;
2329 }
2330 
2331 /* Invalidate DEST, which is being assigned to or clobbered.  */
2332 
2333 void
2334 cselib_invalidate_rtx (rtx dest)
2335 {
2336   while (GET_CODE (dest) == SUBREG
2337 	 || GET_CODE (dest) == ZERO_EXTRACT
2338 	 || GET_CODE (dest) == STRICT_LOW_PART)
2339     dest = XEXP (dest, 0);
2340 
2341   if (REG_P (dest))
2342     cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
2343   else if (MEM_P (dest))
2344     cselib_invalidate_mem (dest);
2345 }
2346 
2347 /* A wrapper for cselib_invalidate_rtx to be called via note_stores.  */
2348 
2349 static void
2350 cselib_invalidate_rtx_note_stores (rtx dest, const_rtx ignore ATTRIBUTE_UNUSED,
2351 				   void *data ATTRIBUTE_UNUSED)
2352 {
2353   cselib_invalidate_rtx (dest);
2354 }
2355 
2356 /* Record the result of a SET instruction.  DEST is being set; the source
2357    contains the value described by SRC_ELT.  If DEST is a MEM, DEST_ADDR_ELT
2358    describes its address.  */
2359 
2360 static void
2361 cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
2362 {
2363   if (src_elt == 0 || side_effects_p (dest))
2364     return;
2365 
2366   if (REG_P (dest))
2367     {
2368       unsigned int dreg = REGNO (dest);
2369       if (dreg < FIRST_PSEUDO_REGISTER)
2370 	{
2371 	  unsigned int n = REG_NREGS (dest);
2372 
2373 	  if (n > max_value_regs)
2374 	    max_value_regs = n;
2375 	}
2376 
2377       if (REG_VALUES (dreg) == 0)
2378 	{
2379 	  used_regs[n_used_regs++] = dreg;
2380 	  REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
2381 	}
2382       else
2383 	{
2384 	  /* The register should have been invalidated.  */
2385 	  gcc_assert (REG_VALUES (dreg)->elt == 0);
2386 	  REG_VALUES (dreg)->elt = src_elt;
2387 	}
2388 
2389       if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2390 	n_useless_values--;
2391       new_elt_loc_list (src_elt, dest);
2392     }
2393   else if (MEM_P (dest) && dest_addr_elt != 0
2394 	   && cselib_record_memory)
2395     {
2396       if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2397 	n_useless_values--;
2398       add_mem_for_addr (dest_addr_elt, src_elt, dest);
2399     }
2400 }
2401 
2402 /* Make ELT and X's VALUE equivalent to each other at INSN.  */
2403 
2404 void
2405 cselib_add_permanent_equiv (cselib_val *elt, rtx x, rtx_insn *insn)
2406 {
2407   cselib_val *nelt;
2408   rtx_insn *save_cselib_current_insn = cselib_current_insn;
2409 
2410   gcc_checking_assert (elt);
2411   gcc_checking_assert (PRESERVED_VALUE_P (elt->val_rtx));
2412   gcc_checking_assert (!side_effects_p (x));
2413 
2414   cselib_current_insn = insn;
2415 
2416   nelt = cselib_lookup (x, GET_MODE (elt->val_rtx), 1, VOIDmode);
2417 
2418   if (nelt != elt)
2419     {
2420       cselib_any_perm_equivs = true;
2421 
2422       if (!PRESERVED_VALUE_P (nelt->val_rtx))
2423 	cselib_preserve_value (nelt);
2424 
2425       new_elt_loc_list (nelt, elt->val_rtx);
2426     }
2427 
2428   cselib_current_insn = save_cselib_current_insn;
2429 }
2430 
2431 /* Return TRUE if any permanent equivalences have been recorded since
2432    the table was last initialized.  */
2433 bool
2434 cselib_have_permanent_equivalences (void)
2435 {
2436   return cselib_any_perm_equivs;
2437 }
2438 
2439 /* There is no good way to determine how many elements there can be
2440    in a PARALLEL.  Since it's fairly cheap, use a really large number.  */
2441 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2442 
2443 struct cselib_record_autoinc_data
2444 {
2445   struct cselib_set *sets;
2446   int n_sets;
2447 };
2448 
2449 /* Callback for for_each_inc_dec.  Records in ARG the SETs implied by
2450    autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST.  */
2451 
2452 static int
2453 cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED,
2454 			  rtx dest, rtx src, rtx srcoff, void *arg)
2455 {
2456   struct cselib_record_autoinc_data *data;
2457   data = (struct cselib_record_autoinc_data *)arg;
2458 
2459   data->sets[data->n_sets].dest = dest;
2460 
2461   if (srcoff)
2462     data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff);
2463   else
2464     data->sets[data->n_sets].src = src;
2465 
2466   data->n_sets++;
2467 
2468   return 0;
2469 }
2470 
2471 /* Record the effects of any sets and autoincs in INSN.  */
2472 static void
2473 cselib_record_sets (rtx_insn *insn)
2474 {
2475   int n_sets = 0;
2476   int i;
2477   struct cselib_set sets[MAX_SETS];
2478   rtx body = PATTERN (insn);
2479   rtx cond = 0;
2480   int n_sets_before_autoinc;
2481   struct cselib_record_autoinc_data data;
2482 
2483   body = PATTERN (insn);
2484   if (GET_CODE (body) == COND_EXEC)
2485     {
2486       cond = COND_EXEC_TEST (body);
2487       body = COND_EXEC_CODE (body);
2488     }
2489 
2490   /* Find all sets.  */
2491   if (GET_CODE (body) == SET)
2492     {
2493       sets[0].src = SET_SRC (body);
2494       sets[0].dest = SET_DEST (body);
2495       n_sets = 1;
2496     }
2497   else if (GET_CODE (body) == PARALLEL)
2498     {
2499       /* Look through the PARALLEL and record the values being
2500 	 set, if possible.  Also handle any CLOBBERs.  */
2501       for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
2502 	{
2503 	  rtx x = XVECEXP (body, 0, i);
2504 
2505 	  if (GET_CODE (x) == SET)
2506 	    {
2507 	      sets[n_sets].src = SET_SRC (x);
2508 	      sets[n_sets].dest = SET_DEST (x);
2509 	      n_sets++;
2510 	    }
2511 	}
2512     }
2513 
2514   if (n_sets == 1
2515       && MEM_P (sets[0].src)
2516       && !cselib_record_memory
2517       && MEM_READONLY_P (sets[0].src))
2518     {
2519       rtx note = find_reg_equal_equiv_note (insn);
2520 
2521       if (note && CONSTANT_P (XEXP (note, 0)))
2522 	sets[0].src = XEXP (note, 0);
2523     }
2524 
2525   data.sets = sets;
2526   data.n_sets = n_sets_before_autoinc = n_sets;
2527   for_each_inc_dec (PATTERN (insn), cselib_record_autoinc_cb, &data);
2528   n_sets = data.n_sets;
2529 
2530   /* Look up the values that are read.  Do this before invalidating the
2531      locations that are written.  */
2532   for (i = 0; i < n_sets; i++)
2533     {
2534       rtx dest = sets[i].dest;
2535 
2536       /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
2537          the low part after invalidating any knowledge about larger modes.  */
2538       if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
2539 	sets[i].dest = dest = XEXP (dest, 0);
2540 
2541       /* We don't know how to record anything but REG or MEM.  */
2542       if (REG_P (dest)
2543 	  || (MEM_P (dest) && cselib_record_memory))
2544         {
2545 	  rtx src = sets[i].src;
2546 	  if (cond)
2547 	    src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
2548 	  sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode);
2549 	  if (MEM_P (dest))
2550 	    {
2551 	      machine_mode address_mode = get_address_mode (dest);
2552 
2553 	      sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
2554 						     address_mode, 1,
2555 						     GET_MODE (dest));
2556 	    }
2557 	  else
2558 	    sets[i].dest_addr_elt = 0;
2559 	}
2560     }
2561 
2562   if (cselib_record_sets_hook)
2563     cselib_record_sets_hook (insn, sets, n_sets);
2564 
2565   /* Invalidate all locations written by this insn.  Note that the elts we
2566      looked up in the previous loop aren't affected, just some of their
2567      locations may go away.  */
2568   note_stores (body, cselib_invalidate_rtx_note_stores, NULL);
2569 
2570   for (i = n_sets_before_autoinc; i < n_sets; i++)
2571     cselib_invalidate_rtx (sets[i].dest);
2572 
2573   /* If this is an asm, look for duplicate sets.  This can happen when the
2574      user uses the same value as an output multiple times.  This is valid
2575      if the outputs are not actually used thereafter.  Treat this case as
2576      if the value isn't actually set.  We do this by smashing the destination
2577      to pc_rtx, so that we won't record the value later.  */
2578   if (n_sets >= 2 && asm_noperands (body) >= 0)
2579     {
2580       for (i = 0; i < n_sets; i++)
2581 	{
2582 	  rtx dest = sets[i].dest;
2583 	  if (REG_P (dest) || MEM_P (dest))
2584 	    {
2585 	      int j;
2586 	      for (j = i + 1; j < n_sets; j++)
2587 		if (rtx_equal_p (dest, sets[j].dest))
2588 		  {
2589 		    sets[i].dest = pc_rtx;
2590 		    sets[j].dest = pc_rtx;
2591 		  }
2592 	    }
2593 	}
2594     }
2595 
2596   /* Now enter the equivalences in our tables.  */
2597   for (i = 0; i < n_sets; i++)
2598     {
2599       rtx dest = sets[i].dest;
2600       if (REG_P (dest)
2601 	  || (MEM_P (dest) && cselib_record_memory))
2602 	cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
2603     }
2604 }
2605 
2606 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx.  */
2607 
2608 bool
2609 fp_setter_insn (rtx_insn *insn)
2610 {
2611   rtx expr, pat = NULL_RTX;
2612 
2613   if (!RTX_FRAME_RELATED_P (insn))
2614     return false;
2615 
2616   expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
2617   if (expr)
2618     pat = XEXP (expr, 0);
2619   if (!modified_in_p (hard_frame_pointer_rtx, pat ? pat : insn))
2620     return false;
2621 
2622   /* Don't return true for frame pointer restores in the epilogue.  */
2623   if (find_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx))
2624     return false;
2625   return true;
2626 }
2627 
2628 /* Record the effects of INSN.  */
2629 
2630 void
2631 cselib_process_insn (rtx_insn *insn)
2632 {
2633   int i;
2634   rtx x;
2635 
2636   cselib_current_insn = insn;
2637 
2638   /* Forget everything at a CODE_LABEL or a setjmp.  */
2639   if ((LABEL_P (insn)
2640        || (CALL_P (insn)
2641 	   && find_reg_note (insn, REG_SETJMP, NULL)))
2642       && !cselib_preserve_constants)
2643     {
2644       cselib_reset_table (next_uid);
2645       cselib_current_insn = NULL;
2646       return;
2647     }
2648 
2649   if (! INSN_P (insn))
2650     {
2651       cselib_current_insn = NULL;
2652       return;
2653     }
2654 
2655   /* If this is a call instruction, forget anything stored in a
2656      call clobbered register, or, if this is not a const call, in
2657      memory.  */
2658   if (CALL_P (insn))
2659     {
2660       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2661 	if (call_used_regs[i]
2662 	    || (REG_VALUES (i) && REG_VALUES (i)->elt
2663 		&& HARD_REGNO_CALL_PART_CLOBBERED (i,
2664 		      GET_MODE (REG_VALUES (i)->elt->val_rtx))))
2665 	  cselib_invalidate_regno (i, reg_raw_mode[i]);
2666 
2667       /* Since it is not clear how cselib is going to be used, be
2668 	 conservative here and treat looping pure or const functions
2669 	 as if they were regular functions.  */
2670       if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
2671 	  || !(RTL_CONST_OR_PURE_CALL_P (insn)))
2672 	cselib_invalidate_mem (callmem);
2673       else
2674 	/* For const/pure calls, invalidate any argument slots because
2675 	   they are owned by the callee.  */
2676 	for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
2677 	  if (GET_CODE (XEXP (x, 0)) == USE
2678 	      && MEM_P (XEXP (XEXP (x, 0), 0)))
2679 	    cselib_invalidate_mem (XEXP (XEXP (x, 0), 0));
2680     }
2681 
2682   cselib_record_sets (insn);
2683 
2684   /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
2685      after we have processed the insn.  */
2686   if (CALL_P (insn))
2687     {
2688       for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
2689 	if (GET_CODE (XEXP (x, 0)) == CLOBBER)
2690 	  cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
2691       /* Flush evertything on setjmp.  */
2692       if (cselib_preserve_constants
2693 	  && find_reg_note (insn, REG_SETJMP, NULL))
2694 	{
2695 	  cselib_preserve_only_values ();
2696 	  cselib_reset_table (next_uid);
2697 	}
2698     }
2699 
2700   /* On setter of the hard frame pointer if frame_pointer_needed,
2701      invalidate stack_pointer_rtx, so that sp and {,h}fp based
2702      VALUEs are distinct.  */
2703   if (reload_completed
2704       && frame_pointer_needed
2705       && fp_setter_insn (insn))
2706     cselib_invalidate_rtx (stack_pointer_rtx);
2707 
2708   cselib_current_insn = NULL;
2709 
2710   if (n_useless_values > MAX_USELESS_VALUES
2711       /* remove_useless_values is linear in the hash table size.  Avoid
2712          quadratic behavior for very large hashtables with very few
2713 	 useless elements.  */
2714       && ((unsigned int)n_useless_values
2715 	  > (cselib_hash_table->elements () - n_debug_values) / 4))
2716     remove_useless_values ();
2717 }
2718 
2719 /* Initialize cselib for one pass.  The caller must also call
2720    init_alias_analysis.  */
2721 
2722 void
2723 cselib_init (int record_what)
2724 {
2725   cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
2726   cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
2727   cselib_any_perm_equivs = false;
2728 
2729   /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
2730      see canon_true_dependence.  This is only created once.  */
2731   if (! callmem)
2732     callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2733 
2734   cselib_nregs = max_reg_num ();
2735 
2736   /* We preserve reg_values to allow expensive clearing of the whole thing.
2737      Reallocate it however if it happens to be too large.  */
2738   if (!reg_values || reg_values_size < cselib_nregs
2739       || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
2740     {
2741       free (reg_values);
2742       /* Some space for newly emit instructions so we don't end up
2743 	 reallocating in between passes.  */
2744       reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
2745       reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
2746     }
2747   used_regs = XNEWVEC (unsigned int, cselib_nregs);
2748   n_used_regs = 0;
2749   cselib_hash_table = new hash_table<cselib_hasher> (31);
2750   if (cselib_preserve_constants)
2751     cselib_preserved_hash_table = new hash_table<cselib_hasher> (31);
2752   next_uid = 1;
2753 }
2754 
2755 /* Called when the current user is done with cselib.  */
2756 
2757 void
2758 cselib_finish (void)
2759 {
2760   bool preserved = cselib_preserve_constants;
2761   cselib_discard_hook = NULL;
2762   cselib_preserve_constants = false;
2763   cselib_any_perm_equivs = false;
2764   cfa_base_preserved_val = NULL;
2765   cfa_base_preserved_regno = INVALID_REGNUM;
2766   elt_list_pool.release ();
2767   elt_loc_list_pool.release ();
2768   cselib_val_pool.release ();
2769   value_pool.release ();
2770   cselib_clear_table ();
2771   delete cselib_hash_table;
2772   cselib_hash_table = NULL;
2773   if (preserved)
2774     delete cselib_preserved_hash_table;
2775   cselib_preserved_hash_table = NULL;
2776   free (used_regs);
2777   used_regs = 0;
2778   n_useless_values = 0;
2779   n_useless_debug_values = 0;
2780   n_debug_values = 0;
2781   next_uid = 0;
2782 }
2783 
2784 /* Dump the cselib_val *X to FILE *OUT.  */
2785 
2786 int
2787 dump_cselib_val (cselib_val **x, FILE *out)
2788 {
2789   cselib_val *v = *x;
2790   bool need_lf = true;
2791 
2792   print_inline_rtx (out, v->val_rtx, 0);
2793 
2794   if (v->locs)
2795     {
2796       struct elt_loc_list *l = v->locs;
2797       if (need_lf)
2798 	{
2799 	  fputc ('\n', out);
2800 	  need_lf = false;
2801 	}
2802       fputs (" locs:", out);
2803       do
2804 	{
2805 	  if (l->setting_insn)
2806 	    fprintf (out, "\n  from insn %i ",
2807 		     INSN_UID (l->setting_insn));
2808 	  else
2809 	    fprintf (out, "\n   ");
2810 	  print_inline_rtx (out, l->loc, 4);
2811 	}
2812       while ((l = l->next));
2813       fputc ('\n', out);
2814     }
2815   else
2816     {
2817       fputs (" no locs", out);
2818       need_lf = true;
2819     }
2820 
2821   if (v->addr_list)
2822     {
2823       struct elt_list *e = v->addr_list;
2824       if (need_lf)
2825 	{
2826 	  fputc ('\n', out);
2827 	  need_lf = false;
2828 	}
2829       fputs (" addr list:", out);
2830       do
2831 	{
2832 	  fputs ("\n  ", out);
2833 	  print_inline_rtx (out, e->elt->val_rtx, 2);
2834 	}
2835       while ((e = e->next));
2836       fputc ('\n', out);
2837     }
2838   else
2839     {
2840       fputs (" no addrs", out);
2841       need_lf = true;
2842     }
2843 
2844   if (v->next_containing_mem == &dummy_val)
2845     fputs (" last mem\n", out);
2846   else if (v->next_containing_mem)
2847     {
2848       fputs (" next mem ", out);
2849       print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
2850       fputc ('\n', out);
2851     }
2852   else if (need_lf)
2853     fputc ('\n', out);
2854 
2855   return 1;
2856 }
2857 
2858 /* Dump to OUT everything in the CSELIB table.  */
2859 
2860 void
2861 dump_cselib_table (FILE *out)
2862 {
2863   fprintf (out, "cselib hash table:\n");
2864   cselib_hash_table->traverse <FILE *, dump_cselib_val> (out);
2865   fprintf (out, "cselib preserved hash table:\n");
2866   cselib_preserved_hash_table->traverse <FILE *, dump_cselib_val> (out);
2867   if (first_containing_mem != &dummy_val)
2868     {
2869       fputs ("first mem ", out);
2870       print_inline_rtx (out, first_containing_mem->val_rtx, 2);
2871       fputc ('\n', out);
2872     }
2873   fprintf (out, "next uid %i\n", next_uid);
2874 }
2875 
2876 #include "gt-cselib.h"
2877