xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/reload1.c (revision 82d56013d7b633d116a93943de88e08335357a7c)
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2    Copyright (C) 1987-2019 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "predict.h"
28 #include "df.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "optabs.h"
32 #include "regs.h"
33 #include "ira.h"
34 #include "recog.h"
35 
36 #include "rtl-error.h"
37 #include "expr.h"
38 #include "addresses.h"
39 #include "cfgrtl.h"
40 #include "cfgbuild.h"
41 #include "reload.h"
42 #include "except.h"
43 #include "dumpfile.h"
44 #include "rtl-iter.h"
45 
46 /* This file contains the reload pass of the compiler, which is
47    run after register allocation has been done.  It checks that
48    each insn is valid (operands required to be in registers really
49    are in registers of the proper class) and fixes up invalid ones
50    by copying values temporarily into registers for the insns
51    that need them.
52 
53    The results of register allocation are described by the vector
54    reg_renumber; the insns still contain pseudo regs, but reg_renumber
55    can be used to find which hard reg, if any, a pseudo reg is in.
56 
57    The technique we always use is to free up a few hard regs that are
58    called ``reload regs'', and for each place where a pseudo reg
59    must be in a hard reg, copy it temporarily into one of the reload regs.
60 
61    Reload regs are allocated locally for every instruction that needs
62    reloads.  When there are pseudos which are allocated to a register that
63    has been chosen as a reload reg, such pseudos must be ``spilled''.
64    This means that they go to other hard regs, or to stack slots if no other
65    available hard regs can be found.  Spilling can invalidate more
66    insns, requiring additional need for reloads, so we must keep checking
67    until the process stabilizes.
68 
69    For machines with different classes of registers, we must keep track
70    of the register class needed for each reload, and make sure that
71    we allocate enough reload registers of each class.
72 
73    The file reload.c contains the code that checks one insn for
74    validity and reports the reloads that it needs.  This file
75    is in charge of scanning the entire rtl code, accumulating the
76    reload needs, spilling, assigning reload registers to use for
77    fixing up each insn, and generating the new insns to copy values
78    into the reload registers.  */
79 
80 struct target_reload default_target_reload;
81 #if SWITCHABLE_TARGET
82 struct target_reload *this_target_reload = &default_target_reload;
83 #endif
84 
85 #define spill_indirect_levels			\
86   (this_target_reload->x_spill_indirect_levels)
87 
88 /* During reload_as_needed, element N contains a REG rtx for the hard reg
89    into which reg N has been reloaded (perhaps for a previous insn).  */
90 static rtx *reg_last_reload_reg;
91 
92 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
93    for an output reload that stores into reg N.  */
94 static regset_head reg_has_output_reload;
95 
96 /* Indicates which hard regs are reload-registers for an output reload
97    in the current insn.  */
98 static HARD_REG_SET reg_is_output_reload;
99 
100 /* Widest mode in which each pseudo reg is referred to (via subreg).  */
101 static machine_mode *reg_max_ref_mode;
102 
103 /* Vector to remember old contents of reg_renumber before spilling.  */
104 static short *reg_old_renumber;
105 
106 /* During reload_as_needed, element N contains the last pseudo regno reloaded
107    into hard register N.  If that pseudo reg occupied more than one register,
108    reg_reloaded_contents points to that pseudo for each spill register in
109    use; all of these must remain set for an inheritance to occur.  */
110 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
111 
112 /* During reload_as_needed, element N contains the insn for which
113    hard register N was last used.   Its contents are significant only
114    when reg_reloaded_valid is set for this register.  */
115 static rtx_insn *reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
116 
117 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid.  */
118 static HARD_REG_SET reg_reloaded_valid;
119 /* Indicate if the register was dead at the end of the reload.
120    This is only valid if reg_reloaded_contents is set and valid.  */
121 static HARD_REG_SET reg_reloaded_dead;
122 
123 /* Indicate whether the register's current value is one that is not
124    safe to retain across a call, even for registers that are normally
125    call-saved.  This is only meaningful for members of reg_reloaded_valid.  */
126 static HARD_REG_SET reg_reloaded_call_part_clobbered;
127 
128 /* Number of spill-regs so far; number of valid elements of spill_regs.  */
129 static int n_spills;
130 
131 /* In parallel with spill_regs, contains REG rtx's for those regs.
132    Holds the last rtx used for any given reg, or 0 if it has never
133    been used for spilling yet.  This rtx is reused, provided it has
134    the proper mode.  */
135 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
136 
137 /* In parallel with spill_regs, contains nonzero for a spill reg
138    that was stored after the last time it was used.
139    The precise value is the insn generated to do the store.  */
140 static rtx_insn *spill_reg_store[FIRST_PSEUDO_REGISTER];
141 
142 /* This is the register that was stored with spill_reg_store.  This is a
143    copy of reload_out / reload_out_reg when the value was stored; if
144    reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg.  */
145 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
146 
147 /* This table is the inverse mapping of spill_regs:
148    indexed by hard reg number,
149    it contains the position of that reg in spill_regs,
150    or -1 for something that is not in spill_regs.
151 
152    ?!?  This is no longer accurate.  */
153 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
154 
155 /* This reg set indicates registers that can't be used as spill registers for
156    the currently processed insn.  These are the hard registers which are live
157    during the insn, but not allocated to pseudos, as well as fixed
158    registers.  */
159 static HARD_REG_SET bad_spill_regs;
160 
161 /* These are the hard registers that can't be used as spill register for any
162    insn.  This includes registers used for user variables and registers that
163    we can't eliminate.  A register that appears in this set also can't be used
164    to retry register allocation.  */
165 static HARD_REG_SET bad_spill_regs_global;
166 
167 /* Describes order of use of registers for reloading
168    of spilled pseudo-registers.  `n_spills' is the number of
169    elements that are actually valid; new ones are added at the end.
170 
171    Both spill_regs and spill_reg_order are used on two occasions:
172    once during find_reload_regs, where they keep track of the spill registers
173    for a single insn, but also during reload_as_needed where they show all
174    the registers ever used by reload.  For the latter case, the information
175    is calculated during finish_spills.  */
176 static short spill_regs[FIRST_PSEUDO_REGISTER];
177 
178 /* This vector of reg sets indicates, for each pseudo, which hard registers
179    may not be used for retrying global allocation because the register was
180    formerly spilled from one of them.  If we allowed reallocating a pseudo to
181    a register that it was already allocated to, reload might not
182    terminate.  */
183 static HARD_REG_SET *pseudo_previous_regs;
184 
185 /* This vector of reg sets indicates, for each pseudo, which hard
186    registers may not be used for retrying global allocation because they
187    are used as spill registers during one of the insns in which the
188    pseudo is live.  */
189 static HARD_REG_SET *pseudo_forbidden_regs;
190 
191 /* All hard regs that have been used as spill registers for any insn are
192    marked in this set.  */
193 static HARD_REG_SET used_spill_regs;
194 
195 /* Index of last register assigned as a spill register.  We allocate in
196    a round-robin fashion.  */
197 static int last_spill_reg;
198 
199 /* Record the stack slot for each spilled hard register.  */
200 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
201 
202 /* Width allocated so far for that stack slot.  */
203 static poly_uint64_pod spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
204 
205 /* Record which pseudos needed to be spilled.  */
206 static regset_head spilled_pseudos;
207 
208 /* Record which pseudos changed their allocation in finish_spills.  */
209 static regset_head changed_allocation_pseudos;
210 
211 /* Used for communication between order_regs_for_reload and count_pseudo.
212    Used to avoid counting one pseudo twice.  */
213 static regset_head pseudos_counted;
214 
215 /* First uid used by insns created by reload in this function.
216    Used in find_equiv_reg.  */
217 int reload_first_uid;
218 
219 /* Flag set by local-alloc or global-alloc if anything is live in
220    a call-clobbered reg across calls.  */
221 int caller_save_needed;
222 
223 /* Set to 1 while reload_as_needed is operating.
224    Required by some machines to handle any generated moves differently.  */
225 int reload_in_progress = 0;
226 
227 /* This obstack is used for allocation of rtl during register elimination.
228    The allocated storage can be freed once find_reloads has processed the
229    insn.  */
230 static struct obstack reload_obstack;
231 
232 /* Points to the beginning of the reload_obstack.  All insn_chain structures
233    are allocated first.  */
234 static char *reload_startobj;
235 
236 /* The point after all insn_chain structures.  Used to quickly deallocate
237    memory allocated in copy_reloads during calculate_needs_all_insns.  */
238 static char *reload_firstobj;
239 
240 /* This points before all local rtl generated by register elimination.
241    Used to quickly free all memory after processing one insn.  */
242 static char *reload_insn_firstobj;
243 
244 /* List of insn_chain instructions, one for every insn that reload needs to
245    examine.  */
246 struct insn_chain *reload_insn_chain;
247 
248 /* TRUE if we potentially left dead insns in the insn stream and want to
249    run DCE immediately after reload, FALSE otherwise.  */
250 static bool need_dce;
251 
252 /* List of all insns needing reloads.  */
253 static struct insn_chain *insns_need_reload;
254 
255 /* This structure is used to record information about register eliminations.
256    Each array entry describes one possible way of eliminating a register
257    in favor of another.   If there is more than one way of eliminating a
258    particular register, the most preferred should be specified first.  */
259 
260 struct elim_table
261 {
262   int from;			/* Register number to be eliminated.  */
263   int to;			/* Register number used as replacement.  */
264   poly_int64_pod initial_offset; /* Initial difference between values.  */
265   int can_eliminate;		/* Nonzero if this elimination can be done.  */
266   int can_eliminate_previous;	/* Value returned by TARGET_CAN_ELIMINATE
267 				   target hook in previous scan over insns
268 				   made by reload.  */
269   poly_int64_pod offset;	/* Current offset between the two regs.  */
270   poly_int64_pod previous_offset; /* Offset at end of previous insn.  */
271   int ref_outside_mem;		/* "to" has been referenced outside a MEM.  */
272   rtx from_rtx;			/* REG rtx for the register to be eliminated.
273 				   We cannot simply compare the number since
274 				   we might then spuriously replace a hard
275 				   register corresponding to a pseudo
276 				   assigned to the reg to be eliminated.  */
277   rtx to_rtx;			/* REG rtx for the replacement.  */
278 };
279 
280 static struct elim_table *reg_eliminate = 0;
281 
282 /* This is an intermediate structure to initialize the table.  It has
283    exactly the members provided by ELIMINABLE_REGS.  */
284 static const struct elim_table_1
285 {
286   const int from;
287   const int to;
288 } reg_eliminate_1[] =
289 
290   ELIMINABLE_REGS;
291 
292 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
293 
294 /* Record the number of pending eliminations that have an offset not equal
295    to their initial offset.  If nonzero, we use a new copy of each
296    replacement result in any insns encountered.  */
297 int num_not_at_initial_offset;
298 
299 /* Count the number of registers that we may be able to eliminate.  */
300 static int num_eliminable;
301 /* And the number of registers that are equivalent to a constant that
302    can be eliminated to frame_pointer / arg_pointer + constant.  */
303 static int num_eliminable_invariants;
304 
305 /* For each label, we record the offset of each elimination.  If we reach
306    a label by more than one path and an offset differs, we cannot do the
307    elimination.  This information is indexed by the difference of the
308    number of the label and the first label number.  We can't offset the
309    pointer itself as this can cause problems on machines with segmented
310    memory.  The first table is an array of flags that records whether we
311    have yet encountered a label and the second table is an array of arrays,
312    one entry in the latter array for each elimination.  */
313 
314 static int first_label_num;
315 static char *offsets_known_at;
316 static poly_int64_pod (*offsets_at)[NUM_ELIMINABLE_REGS];
317 
318 vec<reg_equivs_t, va_gc> *reg_equivs;
319 
320 /* Stack of addresses where an rtx has been changed.  We can undo the
321    changes by popping items off the stack and restoring the original
322    value at each location.
323 
324    We use this simplistic undo capability rather than copy_rtx as copy_rtx
325    will not make a deep copy of a normally sharable rtx, such as
326    (const (plus (symbol_ref) (const_int))).  If such an expression appears
327    as R1 in gen_reload_chain_without_interm_reg_p, then a shared
328    rtx expression would be changed.  See PR 42431.  */
329 
330 typedef rtx *rtx_p;
331 static vec<rtx_p> substitute_stack;
332 
333 /* Number of labels in the current function.  */
334 
335 static int num_labels;
336 
337 static void replace_pseudos_in (rtx *, machine_mode, rtx);
338 static void maybe_fix_stack_asms (void);
339 static void copy_reloads (struct insn_chain *);
340 static void calculate_needs_all_insns (int);
341 static int find_reg (struct insn_chain *, int);
342 static void find_reload_regs (struct insn_chain *);
343 static void select_reload_regs (void);
344 static void delete_caller_save_insns (void);
345 
346 static void spill_failure (rtx_insn *, enum reg_class);
347 static void count_spilled_pseudo (int, int, int);
348 static void delete_dead_insn (rtx_insn *);
349 static void alter_reg (int, int, bool);
350 static void set_label_offsets (rtx, rtx_insn *, int);
351 static void check_eliminable_occurrences (rtx);
352 static void elimination_effects (rtx, machine_mode);
353 static rtx eliminate_regs_1 (rtx, machine_mode, rtx, bool, bool);
354 static int eliminate_regs_in_insn (rtx_insn *, int);
355 static void update_eliminable_offsets (void);
356 static void mark_not_eliminable (rtx, const_rtx, void *);
357 static void set_initial_elim_offsets (void);
358 static bool verify_initial_elim_offsets (void);
359 static void set_initial_label_offsets (void);
360 static void set_offsets_for_label (rtx_insn *);
361 static void init_eliminable_invariants (rtx_insn *, bool);
362 static void init_elim_table (void);
363 static void free_reg_equiv (void);
364 static void update_eliminables (HARD_REG_SET *);
365 static bool update_eliminables_and_spill (void);
366 static void elimination_costs_in_insn (rtx_insn *);
367 static void spill_hard_reg (unsigned int, int);
368 static int finish_spills (int);
369 static void scan_paradoxical_subregs (rtx);
370 static void count_pseudo (int);
371 static void order_regs_for_reload (struct insn_chain *);
372 static void reload_as_needed (int);
373 static void forget_old_reloads_1 (rtx, const_rtx, void *);
374 static void forget_marked_reloads (regset);
375 static int reload_reg_class_lower (const void *, const void *);
376 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
377 				    machine_mode);
378 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
379 				     machine_mode);
380 static int reload_reg_free_p (unsigned int, int, enum reload_type);
381 static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
382 					rtx, rtx, int, int);
383 static int free_for_value_p (int, machine_mode, int, enum reload_type,
384 			     rtx, rtx, int, int);
385 static int allocate_reload_reg (struct insn_chain *, int, int);
386 static int conflicts_with_override (rtx);
387 static void failed_reload (rtx_insn *, int);
388 static int set_reload_reg (int, int);
389 static void choose_reload_regs_init (struct insn_chain *, rtx *);
390 static void choose_reload_regs (struct insn_chain *);
391 static void emit_input_reload_insns (struct insn_chain *, struct reload *,
392 				     rtx, int);
393 static void emit_output_reload_insns (struct insn_chain *, struct reload *,
394 				      int);
395 static void do_input_reload (struct insn_chain *, struct reload *, int);
396 static void do_output_reload (struct insn_chain *, struct reload *, int);
397 static void emit_reload_insns (struct insn_chain *);
398 static void delete_output_reload (rtx_insn *, int, int, rtx);
399 static void delete_address_reloads (rtx_insn *, rtx_insn *);
400 static void delete_address_reloads_1 (rtx_insn *, rtx, rtx_insn *);
401 static void inc_for_reload (rtx, rtx, rtx, poly_int64);
402 static void add_auto_inc_notes (rtx_insn *, rtx);
403 static void substitute (rtx *, const_rtx, rtx);
404 static bool gen_reload_chain_without_interm_reg_p (int, int);
405 static int reloads_conflict (int, int);
406 static rtx_insn *gen_reload (rtx, rtx, int, enum reload_type);
407 static rtx_insn *emit_insn_if_valid_for_reload (rtx);
408 
409 /* Initialize the reload pass.  This is called at the beginning of compilation
410    and may be called again if the target is reinitialized.  */
411 
412 void
413 init_reload (void)
414 {
415   int i;
416 
417   /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
418      Set spill_indirect_levels to the number of levels such addressing is
419      permitted, zero if it is not permitted at all.  */
420 
421   rtx tem
422     = gen_rtx_MEM (Pmode,
423 		   gen_rtx_PLUS (Pmode,
424 				 gen_rtx_REG (Pmode,
425 					      LAST_VIRTUAL_REGISTER + 1),
426 				 gen_int_mode (4, Pmode)));
427   spill_indirect_levels = 0;
428 
429   while (memory_address_p (QImode, tem))
430     {
431       spill_indirect_levels++;
432       tem = gen_rtx_MEM (Pmode, tem);
433     }
434 
435   /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)).  */
436 
437   tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
438   indirect_symref_ok = memory_address_p (QImode, tem);
439 
440   /* See if reg+reg is a valid (and offsettable) address.  */
441 
442   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
443     {
444       tem = gen_rtx_PLUS (Pmode,
445 			  gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
446 			  gen_rtx_REG (Pmode, i));
447 
448       /* This way, we make sure that reg+reg is an offsettable address.  */
449       tem = plus_constant (Pmode, tem, 4);
450 
451       for (int mode = 0; mode < MAX_MACHINE_MODE; mode++)
452 	if (!double_reg_address_ok[mode]
453 	    && memory_address_p ((enum machine_mode)mode, tem))
454 	  double_reg_address_ok[mode] = 1;
455     }
456 
457   /* Initialize obstack for our rtl allocation.  */
458   if (reload_startobj == NULL)
459     {
460       gcc_obstack_init (&reload_obstack);
461       reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
462     }
463 
464   INIT_REG_SET (&spilled_pseudos);
465   INIT_REG_SET (&changed_allocation_pseudos);
466   INIT_REG_SET (&pseudos_counted);
467 }
468 
469 /* List of insn chains that are currently unused.  */
470 static struct insn_chain *unused_insn_chains = 0;
471 
472 /* Allocate an empty insn_chain structure.  */
473 struct insn_chain *
474 new_insn_chain (void)
475 {
476   struct insn_chain *c;
477 
478   if (unused_insn_chains == 0)
479     {
480       c = XOBNEW (&reload_obstack, struct insn_chain);
481       INIT_REG_SET (&c->live_throughout);
482       INIT_REG_SET (&c->dead_or_set);
483     }
484   else
485     {
486       c = unused_insn_chains;
487       unused_insn_chains = c->next;
488     }
489   c->is_caller_save_insn = 0;
490   c->need_operand_change = 0;
491   c->need_reload = 0;
492   c->need_elim = 0;
493   return c;
494 }
495 
496 /* Small utility function to set all regs in hard reg set TO which are
497    allocated to pseudos in regset FROM.  */
498 
499 void
500 compute_use_by_pseudos (HARD_REG_SET *to, regset from)
501 {
502   unsigned int regno;
503   reg_set_iterator rsi;
504 
505   EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
506     {
507       int r = reg_renumber[regno];
508 
509       if (r < 0)
510 	{
511 	  /* reload_combine uses the information from DF_LIVE_IN,
512 	     which might still contain registers that have not
513 	     actually been allocated since they have an
514 	     equivalence.  */
515 	  gcc_assert (ira_conflicts_p || reload_completed);
516 	}
517       else
518 	add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
519     }
520 }
521 
522 /* Replace all pseudos found in LOC with their corresponding
523    equivalences.  */
524 
525 static void
526 replace_pseudos_in (rtx *loc, machine_mode mem_mode, rtx usage)
527 {
528   rtx x = *loc;
529   enum rtx_code code;
530   const char *fmt;
531   int i, j;
532 
533   if (! x)
534     return;
535 
536   code = GET_CODE (x);
537   if (code == REG)
538     {
539       unsigned int regno = REGNO (x);
540 
541       if (regno < FIRST_PSEUDO_REGISTER)
542 	return;
543 
544       x = eliminate_regs_1 (x, mem_mode, usage, true, false);
545       if (x != *loc)
546 	{
547 	  *loc = x;
548 	  replace_pseudos_in (loc, mem_mode, usage);
549 	  return;
550 	}
551 
552       if (reg_equiv_constant (regno))
553 	*loc = reg_equiv_constant (regno);
554       else if (reg_equiv_invariant (regno))
555 	*loc = reg_equiv_invariant (regno);
556       else if (reg_equiv_mem (regno))
557 	*loc = reg_equiv_mem (regno);
558       else if (reg_equiv_address (regno))
559 	*loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address (regno));
560       else
561 	{
562 	  gcc_assert (!REG_P (regno_reg_rtx[regno])
563 		      || REGNO (regno_reg_rtx[regno]) != regno);
564 	  *loc = regno_reg_rtx[regno];
565 	}
566 
567       return;
568     }
569   else if (code == MEM)
570     {
571       replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
572       return;
573     }
574 
575   /* Process each of our operands recursively.  */
576   fmt = GET_RTX_FORMAT (code);
577   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
578     if (*fmt == 'e')
579       replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
580     else if (*fmt == 'E')
581       for (j = 0; j < XVECLEN (x, i); j++)
582 	replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
583 }
584 
585 /* Determine if the current function has an exception receiver block
586    that reaches the exit block via non-exceptional edges  */
587 
588 static bool
589 has_nonexceptional_receiver (void)
590 {
591   edge e;
592   edge_iterator ei;
593   basic_block *tos, *worklist, bb;
594 
595   /* If we're not optimizing, then just err on the safe side.  */
596   if (!optimize)
597     return true;
598 
599   /* First determine which blocks can reach exit via normal paths.  */
600   tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
601 
602   FOR_EACH_BB_FN (bb, cfun)
603     bb->flags &= ~BB_REACHABLE;
604 
605   /* Place the exit block on our worklist.  */
606   EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_REACHABLE;
607   *tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun);
608 
609   /* Iterate: find everything reachable from what we've already seen.  */
610   while (tos != worklist)
611     {
612       bb = *--tos;
613 
614       FOR_EACH_EDGE (e, ei, bb->preds)
615 	if (!(e->flags & EDGE_ABNORMAL))
616 	  {
617 	    basic_block src = e->src;
618 
619 	    if (!(src->flags & BB_REACHABLE))
620 	      {
621 		src->flags |= BB_REACHABLE;
622 		*tos++ = src;
623 	      }
624 	  }
625     }
626   free (worklist);
627 
628   /* Now see if there's a reachable block with an exceptional incoming
629      edge.  */
630   FOR_EACH_BB_FN (bb, cfun)
631     if (bb->flags & BB_REACHABLE && bb_has_abnormal_pred (bb))
632       return true;
633 
634   /* No exceptional block reached exit unexceptionally.  */
635   return false;
636 }
637 
638 /* Grow (or allocate) the REG_EQUIVS array from its current size (which may be
639    zero elements) to MAX_REG_NUM elements.
640 
641    Initialize all new fields to NULL and update REG_EQUIVS_SIZE.  */
642 void
643 grow_reg_equivs (void)
644 {
645   int old_size = vec_safe_length (reg_equivs);
646   int max_regno = max_reg_num ();
647   int i;
648   reg_equivs_t ze;
649 
650   memset (&ze, 0, sizeof (reg_equivs_t));
651   vec_safe_reserve (reg_equivs, max_regno);
652   for (i = old_size; i < max_regno; i++)
653     reg_equivs->quick_insert (i, ze);
654 }
655 
656 
657 /* Global variables used by reload and its subroutines.  */
658 
659 /* The current basic block while in calculate_elim_costs_all_insns.  */
660 static basic_block elim_bb;
661 
662 /* Set during calculate_needs if an insn needs register elimination.  */
663 static int something_needs_elimination;
664 /* Set during calculate_needs if an insn needs an operand changed.  */
665 static int something_needs_operands_changed;
666 /* Set by alter_regs if we spilled a register to the stack.  */
667 static bool something_was_spilled;
668 
669 /* Nonzero means we couldn't get enough spill regs.  */
670 static int failure;
671 
672 /* Temporary array of pseudo-register number.  */
673 static int *temp_pseudo_reg_arr;
674 
675 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
676    If that insn didn't set the register (i.e., it copied the register to
677    memory), just delete that insn instead of the equivalencing insn plus
678    anything now dead.  If we call delete_dead_insn on that insn, we may
679    delete the insn that actually sets the register if the register dies
680    there and that is incorrect.  */
681 static void
682 remove_init_insns ()
683 {
684   for (int i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
685     {
686       if (reg_renumber[i] < 0 && reg_equiv_init (i) != 0)
687 	{
688 	  rtx list;
689 	  for (list = reg_equiv_init (i); list; list = XEXP (list, 1))
690 	    {
691 	      rtx_insn *equiv_insn = as_a <rtx_insn *> (XEXP (list, 0));
692 
693 	      /* If we already deleted the insn or if it may trap, we can't
694 		 delete it.  The latter case shouldn't happen, but can
695 		 if an insn has a variable address, gets a REG_EH_REGION
696 		 note added to it, and then gets converted into a load
697 		 from a constant address.  */
698 	      if (NOTE_P (equiv_insn)
699 		  || can_throw_internal (equiv_insn))
700 		;
701 	      else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
702 		delete_dead_insn (equiv_insn);
703 	      else
704 		SET_INSN_DELETED (equiv_insn);
705 	    }
706 	}
707     }
708 }
709 
710 /* Return true if remove_init_insns will delete INSN.  */
711 static bool
712 will_delete_init_insn_p (rtx_insn *insn)
713 {
714   rtx set = single_set (insn);
715   if (!set || !REG_P (SET_DEST (set)))
716     return false;
717   unsigned regno = REGNO (SET_DEST (set));
718 
719   if (can_throw_internal (insn))
720     return false;
721 
722   if (regno < FIRST_PSEUDO_REGISTER || reg_renumber[regno] >= 0)
723     return false;
724 
725   for (rtx list = reg_equiv_init (regno); list; list = XEXP (list, 1))
726     {
727       rtx equiv_insn = XEXP (list, 0);
728       if (equiv_insn == insn)
729 	return true;
730     }
731   return false;
732 }
733 
734 /* Main entry point for the reload pass.
735 
736    FIRST is the first insn of the function being compiled.
737 
738    GLOBAL nonzero means we were called from global_alloc
739    and should attempt to reallocate any pseudoregs that we
740    displace from hard regs we will use for reloads.
741    If GLOBAL is zero, we do not have enough information to do that,
742    so any pseudo reg that is spilled must go to the stack.
743 
744    Return value is TRUE if reload likely left dead insns in the
745    stream and a DCE pass should be run to elimiante them.  Else the
746    return value is FALSE.  */
747 
748 bool
749 reload (rtx_insn *first, int global)
750 {
751   int i, n;
752   rtx_insn *insn;
753   struct elim_table *ep;
754   basic_block bb;
755   bool inserted;
756 
757   /* Make sure even insns with volatile mem refs are recognizable.  */
758   init_recog ();
759 
760   failure = 0;
761 
762   reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
763 
764   /* Make sure that the last insn in the chain
765      is not something that needs reloading.  */
766   emit_note (NOTE_INSN_DELETED);
767 
768   /* Enable find_equiv_reg to distinguish insns made by reload.  */
769   reload_first_uid = get_max_uid ();
770 
771   /* Initialize the secondary memory table.  */
772   clear_secondary_mem ();
773 
774   /* We don't have a stack slot for any spill reg yet.  */
775   memset (spill_stack_slot, 0, sizeof spill_stack_slot);
776   memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
777 
778   /* Initialize the save area information for caller-save, in case some
779      are needed.  */
780   init_save_areas ();
781 
782   /* Compute which hard registers are now in use
783      as homes for pseudo registers.
784      This is done here rather than (eg) in global_alloc
785      because this point is reached even if not optimizing.  */
786   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
787     mark_home_live (i);
788 
789   /* A function that has a nonlocal label that can reach the exit
790      block via non-exceptional paths must save all call-saved
791      registers.  */
792   if (cfun->has_nonlocal_label
793       && has_nonexceptional_receiver ())
794     crtl->saves_all_registers = 1;
795 
796   if (crtl->saves_all_registers)
797     for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
798       if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
799 	df_set_regs_ever_live (i, true);
800 
801   /* Find all the pseudo registers that didn't get hard regs
802      but do have known equivalent constants or memory slots.
803      These include parameters (known equivalent to parameter slots)
804      and cse'd or loop-moved constant memory addresses.
805 
806      Record constant equivalents in reg_equiv_constant
807      so they will be substituted by find_reloads.
808      Record memory equivalents in reg_mem_equiv so they can
809      be substituted eventually by altering the REG-rtx's.  */
810 
811   grow_reg_equivs ();
812   reg_old_renumber = XCNEWVEC (short, max_regno);
813   memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
814   pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
815   pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
816 
817   CLEAR_HARD_REG_SET (bad_spill_regs_global);
818 
819   init_eliminable_invariants (first, true);
820   init_elim_table ();
821 
822   /* Alter each pseudo-reg rtx to contain its hard reg number.  Assign
823      stack slots to the pseudos that lack hard regs or equivalents.
824      Do not touch virtual registers.  */
825 
826   temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
827   for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
828     temp_pseudo_reg_arr[n++] = i;
829 
830   if (ira_conflicts_p)
831     /* Ask IRA to order pseudo-registers for better stack slot
832        sharing.  */
833     ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_mode);
834 
835   for (i = 0; i < n; i++)
836     alter_reg (temp_pseudo_reg_arr[i], -1, false);
837 
838   /* If we have some registers we think can be eliminated, scan all insns to
839      see if there is an insn that sets one of these registers to something
840      other than itself plus a constant.  If so, the register cannot be
841      eliminated.  Doing this scan here eliminates an extra pass through the
842      main reload loop in the most common case where register elimination
843      cannot be done.  */
844   for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
845     if (INSN_P (insn))
846       note_stores (PATTERN (insn), mark_not_eliminable, NULL);
847 
848   maybe_fix_stack_asms ();
849 
850   insns_need_reload = 0;
851   something_needs_elimination = 0;
852 
853   /* Initialize to -1, which means take the first spill register.  */
854   last_spill_reg = -1;
855 
856   /* Spill any hard regs that we know we can't eliminate.  */
857   CLEAR_HARD_REG_SET (used_spill_regs);
858   /* There can be multiple ways to eliminate a register;
859      they should be listed adjacently.
860      Elimination for any register fails only if all possible ways fail.  */
861   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
862     {
863       int from = ep->from;
864       int can_eliminate = 0;
865       do
866 	{
867           can_eliminate |= ep->can_eliminate;
868           ep++;
869 	}
870       while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
871       if (! can_eliminate)
872 	spill_hard_reg (from, 1);
873     }
874 
875   if (!HARD_FRAME_POINTER_IS_FRAME_POINTER && frame_pointer_needed)
876     spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
877 
878   finish_spills (global);
879 
880   /* From now on, we may need to generate moves differently.  We may also
881      allow modifications of insns which cause them to not be recognized.
882      Any such modifications will be cleaned up during reload itself.  */
883   reload_in_progress = 1;
884 
885   /* This loop scans the entire function each go-round
886      and repeats until one repetition spills no additional hard regs.  */
887   for (;;)
888     {
889       int something_changed;
890       poly_int64 starting_frame_size;
891 
892       starting_frame_size = get_frame_size ();
893       something_was_spilled = false;
894 
895       set_initial_elim_offsets ();
896       set_initial_label_offsets ();
897 
898       /* For each pseudo register that has an equivalent location defined,
899 	 try to eliminate any eliminable registers (such as the frame pointer)
900 	 assuming initial offsets for the replacement register, which
901 	 is the normal case.
902 
903 	 If the resulting location is directly addressable, substitute
904 	 the MEM we just got directly for the old REG.
905 
906 	 If it is not addressable but is a constant or the sum of a hard reg
907 	 and constant, it is probably not addressable because the constant is
908 	 out of range, in that case record the address; we will generate
909 	 hairy code to compute the address in a register each time it is
910 	 needed.  Similarly if it is a hard register, but one that is not
911 	 valid as an address register.
912 
913 	 If the location is not addressable, but does not have one of the
914 	 above forms, assign a stack slot.  We have to do this to avoid the
915 	 potential of producing lots of reloads if, e.g., a location involves
916 	 a pseudo that didn't get a hard register and has an equivalent memory
917 	 location that also involves a pseudo that didn't get a hard register.
918 
919 	 Perhaps at some point we will improve reload_when_needed handling
920 	 so this problem goes away.  But that's very hairy.  */
921 
922       for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
923 	if (reg_renumber[i] < 0 && reg_equiv_memory_loc (i))
924 	  {
925 	    rtx x = eliminate_regs (reg_equiv_memory_loc (i), VOIDmode,
926 				    NULL_RTX);
927 
928 	    if (strict_memory_address_addr_space_p
929 		  (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0),
930 		   MEM_ADDR_SPACE (x)))
931 	      reg_equiv_mem (i) = x, reg_equiv_address (i) = 0;
932 	    else if (CONSTANT_P (XEXP (x, 0))
933 		     || (REG_P (XEXP (x, 0))
934 			 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
935 		     || (GET_CODE (XEXP (x, 0)) == PLUS
936 			 && REG_P (XEXP (XEXP (x, 0), 0))
937 			 && (REGNO (XEXP (XEXP (x, 0), 0))
938 			     < FIRST_PSEUDO_REGISTER)
939 			 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
940 	      reg_equiv_address (i) = XEXP (x, 0), reg_equiv_mem (i) = 0;
941 	    else
942 	      {
943 		/* Make a new stack slot.  Then indicate that something
944 		   changed so we go back and recompute offsets for
945 		   eliminable registers because the allocation of memory
946 		   below might change some offset.  reg_equiv_{mem,address}
947 		   will be set up for this pseudo on the next pass around
948 		   the loop.  */
949 		reg_equiv_memory_loc (i) = 0;
950 		reg_equiv_init (i) = 0;
951 		alter_reg (i, -1, true);
952 	      }
953 	  }
954 
955       if (caller_save_needed)
956 	setup_save_areas ();
957 
958       if (maybe_ne (starting_frame_size, 0) && crtl->stack_alignment_needed)
959 	{
960 	  /* If we have a stack frame, we must align it now.  The
961 	     stack size may be a part of the offset computation for
962 	     register elimination.  So if this changes the stack size,
963 	     then repeat the elimination bookkeeping.  We don't
964 	     realign when there is no stack, as that will cause a
965 	     stack frame when none is needed should
966 	     TARGET_STARTING_FRAME_OFFSET not be already aligned to
967 	     STACK_BOUNDARY.  */
968 	  assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
969 	}
970       /* If we allocated another stack slot, redo elimination bookkeeping.  */
971       if (something_was_spilled
972 	  || maybe_ne (starting_frame_size, get_frame_size ()))
973 	{
974 	  if (update_eliminables_and_spill ())
975 	    finish_spills (0);
976 	  continue;
977 	}
978 
979       if (caller_save_needed)
980 	{
981 	  save_call_clobbered_regs ();
982 	  /* That might have allocated new insn_chain structures.  */
983 	  reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
984 	}
985 
986       calculate_needs_all_insns (global);
987 
988       if (! ira_conflicts_p)
989 	/* Don't do it for IRA.  We need this info because we don't
990 	   change live_throughout and dead_or_set for chains when IRA
991 	   is used.  */
992 	CLEAR_REG_SET (&spilled_pseudos);
993 
994       something_changed = 0;
995 
996       /* If we allocated any new memory locations, make another pass
997 	 since it might have changed elimination offsets.  */
998       if (something_was_spilled
999 	  || maybe_ne (starting_frame_size, get_frame_size ()))
1000 	something_changed = 1;
1001 
1002       /* Even if the frame size remained the same, we might still have
1003 	 changed elimination offsets, e.g. if find_reloads called
1004 	 force_const_mem requiring the back end to allocate a constant
1005 	 pool base register that needs to be saved on the stack.  */
1006       else if (!verify_initial_elim_offsets ())
1007 	something_changed = 1;
1008 
1009       if (update_eliminables_and_spill ())
1010 	{
1011 	  finish_spills (0);
1012 	  something_changed = 1;
1013 	}
1014       else
1015 	{
1016 	  select_reload_regs ();
1017 	  if (failure)
1018 	    goto failed;
1019 	  if (insns_need_reload)
1020 	    something_changed |= finish_spills (global);
1021 	}
1022 
1023       if (! something_changed)
1024 	break;
1025 
1026       if (caller_save_needed)
1027 	delete_caller_save_insns ();
1028 
1029       obstack_free (&reload_obstack, reload_firstobj);
1030     }
1031 
1032   /* If global-alloc was run, notify it of any register eliminations we have
1033      done.  */
1034   if (global)
1035     for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1036       if (ep->can_eliminate)
1037 	mark_elimination (ep->from, ep->to);
1038 
1039   remove_init_insns ();
1040 
1041   /* Use the reload registers where necessary
1042      by generating move instructions to move the must-be-register
1043      values into or out of the reload registers.  */
1044 
1045   if (insns_need_reload != 0 || something_needs_elimination
1046       || something_needs_operands_changed)
1047     {
1048       poly_int64 old_frame_size = get_frame_size ();
1049 
1050       reload_as_needed (global);
1051 
1052       gcc_assert (known_eq (old_frame_size, get_frame_size ()));
1053 
1054       gcc_assert (verify_initial_elim_offsets ());
1055     }
1056 
1057   /* If we were able to eliminate the frame pointer, show that it is no
1058      longer live at the start of any basic block.  If it ls live by
1059      virtue of being in a pseudo, that pseudo will be marked live
1060      and hence the frame pointer will be known to be live via that
1061      pseudo.  */
1062 
1063   if (! frame_pointer_needed)
1064     FOR_EACH_BB_FN (bb, cfun)
1065       bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1066 
1067   /* Come here (with failure set nonzero) if we can't get enough spill
1068      regs.  */
1069  failed:
1070 
1071   CLEAR_REG_SET (&changed_allocation_pseudos);
1072   CLEAR_REG_SET (&spilled_pseudos);
1073   reload_in_progress = 0;
1074 
1075   /* Now eliminate all pseudo regs by modifying them into
1076      their equivalent memory references.
1077      The REG-rtx's for the pseudos are modified in place,
1078      so all insns that used to refer to them now refer to memory.
1079 
1080      For a reg that has a reg_equiv_address, all those insns
1081      were changed by reloading so that no insns refer to it any longer;
1082      but the DECL_RTL of a variable decl may refer to it,
1083      and if so this causes the debugging info to mention the variable.  */
1084 
1085   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1086     {
1087       rtx addr = 0;
1088 
1089       if (reg_equiv_mem (i))
1090 	addr = XEXP (reg_equiv_mem (i), 0);
1091 
1092       if (reg_equiv_address (i))
1093 	addr = reg_equiv_address (i);
1094 
1095       if (addr)
1096 	{
1097 	  if (reg_renumber[i] < 0)
1098 	    {
1099 	      rtx reg = regno_reg_rtx[i];
1100 
1101 	      REG_USERVAR_P (reg) = 0;
1102 	      PUT_CODE (reg, MEM);
1103 	      XEXP (reg, 0) = addr;
1104 	      if (reg_equiv_memory_loc (i))
1105 		MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc (i));
1106 	      else
1107 		MEM_ATTRS (reg) = 0;
1108 	      MEM_NOTRAP_P (reg) = 1;
1109 	    }
1110 	  else if (reg_equiv_mem (i))
1111 	    XEXP (reg_equiv_mem (i), 0) = addr;
1112 	}
1113 
1114       /* We don't want complex addressing modes in debug insns
1115 	 if simpler ones will do, so delegitimize equivalences
1116 	 in debug insns.  */
1117       if (MAY_HAVE_DEBUG_BIND_INSNS && reg_renumber[i] < 0)
1118 	{
1119 	  rtx reg = regno_reg_rtx[i];
1120 	  rtx equiv = 0;
1121 	  df_ref use, next;
1122 
1123 	  if (reg_equiv_constant (i))
1124 	    equiv = reg_equiv_constant (i);
1125 	  else if (reg_equiv_invariant (i))
1126 	    equiv = reg_equiv_invariant (i);
1127 	  else if (reg && MEM_P (reg))
1128 	    equiv = targetm.delegitimize_address (reg);
1129 	  else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1130 	    equiv = reg;
1131 
1132 	  if (equiv == reg)
1133 	    continue;
1134 
1135 	  for (use = DF_REG_USE_CHAIN (i); use; use = next)
1136 	    {
1137 	      insn = DF_REF_INSN (use);
1138 
1139 	      /* Make sure the next ref is for a different instruction,
1140 		 so that we're not affected by the rescan.  */
1141 	      next = DF_REF_NEXT_REG (use);
1142 	      while (next && DF_REF_INSN (next) == insn)
1143 		next = DF_REF_NEXT_REG (next);
1144 
1145 	      if (DEBUG_BIND_INSN_P (insn))
1146 		{
1147 		  if (!equiv)
1148 		    {
1149 		      INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
1150 		      df_insn_rescan_debug_internal (insn);
1151 		    }
1152 		  else
1153 		    INSN_VAR_LOCATION_LOC (insn)
1154 		      = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn),
1155 					      reg, equiv);
1156 		}
1157 	    }
1158 	}
1159     }
1160 
1161   /* We must set reload_completed now since the cleanup_subreg_operands call
1162      below will re-recognize each insn and reload may have generated insns
1163      which are only valid during and after reload.  */
1164   reload_completed = 1;
1165 
1166   /* Make a pass over all the insns and delete all USEs which we inserted
1167      only to tag a REG_EQUAL note on them.  Remove all REG_DEAD and REG_UNUSED
1168      notes.  Delete all CLOBBER insns, except those that refer to the return
1169      value and the special mem:BLK CLOBBERs added to prevent the scheduler
1170      from misarranging variable-array code, and simplify (subreg (reg))
1171      operands.  Strip and regenerate REG_INC notes that may have been moved
1172      around.  */
1173 
1174   for (insn = first; insn; insn = NEXT_INSN (insn))
1175     if (INSN_P (insn))
1176       {
1177 	rtx *pnote;
1178 
1179 	if (CALL_P (insn))
1180 	  replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1181 			      VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1182 
1183 	if ((GET_CODE (PATTERN (insn)) == USE
1184 	     /* We mark with QImode USEs introduced by reload itself.  */
1185 	     && (GET_MODE (insn) == QImode
1186 		 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1187 	    || (GET_CODE (PATTERN (insn)) == CLOBBER
1188 		&& (!MEM_P (XEXP (PATTERN (insn), 0))
1189 		    || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1190 		    || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1191 			&& XEXP (XEXP (PATTERN (insn), 0), 0)
1192 				!= stack_pointer_rtx))
1193 		&& (!REG_P (XEXP (PATTERN (insn), 0))
1194 		    || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1195 	  {
1196 	    delete_insn (insn);
1197 	    continue;
1198 	  }
1199 
1200 	/* Some CLOBBERs may survive until here and still reference unassigned
1201 	   pseudos with const equivalent, which may in turn cause ICE in later
1202 	   passes if the reference remains in place.  */
1203 	if (GET_CODE (PATTERN (insn)) == CLOBBER)
1204 	  replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1205 			      VOIDmode, PATTERN (insn));
1206 
1207 	/* Discard obvious no-ops, even without -O.  This optimization
1208 	   is fast and doesn't interfere with debugging.  */
1209 	if (NONJUMP_INSN_P (insn)
1210 	    && GET_CODE (PATTERN (insn)) == SET
1211 	    && REG_P (SET_SRC (PATTERN (insn)))
1212 	    && REG_P (SET_DEST (PATTERN (insn)))
1213 	    && (REGNO (SET_SRC (PATTERN (insn)))
1214 		== REGNO (SET_DEST (PATTERN (insn)))))
1215 	  {
1216 	    delete_insn (insn);
1217 	    continue;
1218 	  }
1219 
1220 	pnote = &REG_NOTES (insn);
1221 	while (*pnote != 0)
1222 	  {
1223 	    if (REG_NOTE_KIND (*pnote) == REG_DEAD
1224 		|| REG_NOTE_KIND (*pnote) == REG_UNUSED
1225 		|| REG_NOTE_KIND (*pnote) == REG_INC)
1226 	      *pnote = XEXP (*pnote, 1);
1227 	    else
1228 	      pnote = &XEXP (*pnote, 1);
1229 	  }
1230 
1231 	if (AUTO_INC_DEC)
1232 	  add_auto_inc_notes (insn, PATTERN (insn));
1233 
1234 	/* Simplify (subreg (reg)) if it appears as an operand.  */
1235 	cleanup_subreg_operands (insn);
1236 
1237 	/* Clean up invalid ASMs so that they don't confuse later passes.
1238 	   See PR 21299.  */
1239 	if (asm_noperands (PATTERN (insn)) >= 0)
1240 	  {
1241 	    extract_insn (insn);
1242 	    if (!constrain_operands (1, get_enabled_alternatives (insn)))
1243 	      {
1244 		error_for_asm (insn,
1245 			       "%<asm%> operand has impossible constraints");
1246 		delete_insn (insn);
1247 		continue;
1248 	      }
1249 	  }
1250       }
1251 
1252   free (temp_pseudo_reg_arr);
1253 
1254   /* Indicate that we no longer have known memory locations or constants.  */
1255   free_reg_equiv ();
1256 
1257   free (reg_max_ref_mode);
1258   free (reg_old_renumber);
1259   free (pseudo_previous_regs);
1260   free (pseudo_forbidden_regs);
1261 
1262   CLEAR_HARD_REG_SET (used_spill_regs);
1263   for (i = 0; i < n_spills; i++)
1264     SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1265 
1266   /* Free all the insn_chain structures at once.  */
1267   obstack_free (&reload_obstack, reload_startobj);
1268   unused_insn_chains = 0;
1269 
1270   inserted = fixup_abnormal_edges ();
1271 
1272   /* We've possibly turned single trapping insn into multiple ones.  */
1273   if (cfun->can_throw_non_call_exceptions)
1274     {
1275       auto_sbitmap blocks (last_basic_block_for_fn (cfun));
1276       bitmap_ones (blocks);
1277       find_many_sub_basic_blocks (blocks);
1278     }
1279 
1280   if (inserted)
1281     commit_edge_insertions ();
1282 
1283   /* Replacing pseudos with their memory equivalents might have
1284      created shared rtx.  Subsequent passes would get confused
1285      by this, so unshare everything here.  */
1286   unshare_all_rtl_again (first);
1287 
1288 #ifdef STACK_BOUNDARY
1289   /* init_emit has set the alignment of the hard frame pointer
1290      to STACK_BOUNDARY.  It is very likely no longer valid if
1291      the hard frame pointer was used for register allocation.  */
1292   if (!frame_pointer_needed)
1293     REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1294 #endif
1295 
1296   substitute_stack.release ();
1297 
1298   gcc_assert (bitmap_empty_p (&spilled_pseudos));
1299 
1300   reload_completed = !failure;
1301 
1302   return need_dce;
1303 }
1304 
1305 /* Yet another special case.  Unfortunately, reg-stack forces people to
1306    write incorrect clobbers in asm statements.  These clobbers must not
1307    cause the register to appear in bad_spill_regs, otherwise we'll call
1308    fatal_insn later.  We clear the corresponding regnos in the live
1309    register sets to avoid this.
1310    The whole thing is rather sick, I'm afraid.  */
1311 
1312 static void
1313 maybe_fix_stack_asms (void)
1314 {
1315 #ifdef STACK_REGS
1316   const char *constraints[MAX_RECOG_OPERANDS];
1317   machine_mode operand_mode[MAX_RECOG_OPERANDS];
1318   struct insn_chain *chain;
1319 
1320   for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1321     {
1322       int i, noperands;
1323       HARD_REG_SET clobbered, allowed;
1324       rtx pat;
1325 
1326       if (! INSN_P (chain->insn)
1327 	  || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1328 	continue;
1329       pat = PATTERN (chain->insn);
1330       if (GET_CODE (pat) != PARALLEL)
1331 	continue;
1332 
1333       CLEAR_HARD_REG_SET (clobbered);
1334       CLEAR_HARD_REG_SET (allowed);
1335 
1336       /* First, make a mask of all stack regs that are clobbered.  */
1337       for (i = 0; i < XVECLEN (pat, 0); i++)
1338 	{
1339 	  rtx t = XVECEXP (pat, 0, i);
1340 	  if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1341 	    SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1342 	  /* CLOBBER_HIGH is only supported for LRA.  */
1343 	  gcc_assert (GET_CODE (t) != CLOBBER_HIGH);
1344 	}
1345 
1346       /* Get the operand values and constraints out of the insn.  */
1347       decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1348 			   constraints, operand_mode, NULL);
1349 
1350       /* For every operand, see what registers are allowed.  */
1351       for (i = 0; i < noperands; i++)
1352 	{
1353 	  const char *p = constraints[i];
1354 	  /* For every alternative, we compute the class of registers allowed
1355 	     for reloading in CLS, and merge its contents into the reg set
1356 	     ALLOWED.  */
1357 	  int cls = (int) NO_REGS;
1358 
1359 	  for (;;)
1360 	    {
1361 	      char c = *p;
1362 
1363 	      if (c == '\0' || c == ',' || c == '#')
1364 		{
1365 		  /* End of one alternative - mark the regs in the current
1366 		     class, and reset the class.  */
1367 		  IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1368 		  cls = NO_REGS;
1369 		  p++;
1370 		  if (c == '#')
1371 		    do {
1372 		      c = *p++;
1373 		    } while (c != '\0' && c != ',');
1374 		  if (c == '\0')
1375 		    break;
1376 		  continue;
1377 		}
1378 
1379 	      switch (c)
1380 		{
1381 		case 'g':
1382 		  cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1383 		  break;
1384 
1385 		default:
1386 		  enum constraint_num cn = lookup_constraint (p);
1387 		  if (insn_extra_address_constraint (cn))
1388 		    cls = (int) reg_class_subunion[cls]
1389 		      [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1390 					     ADDRESS, SCRATCH)];
1391 		  else
1392 		    cls = (int) reg_class_subunion[cls]
1393 		      [reg_class_for_constraint (cn)];
1394 		  break;
1395 		}
1396 	      p += CONSTRAINT_LEN (c, p);
1397 	    }
1398 	}
1399       /* Those of the registers which are clobbered, but allowed by the
1400 	 constraints, must be usable as reload registers.  So clear them
1401 	 out of the life information.  */
1402       AND_HARD_REG_SET (allowed, clobbered);
1403       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1404 	if (TEST_HARD_REG_BIT (allowed, i))
1405 	  {
1406 	    CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1407 	    CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1408 	  }
1409     }
1410 
1411 #endif
1412 }
1413 
1414 /* Copy the global variables n_reloads and rld into the corresponding elts
1415    of CHAIN.  */
1416 static void
1417 copy_reloads (struct insn_chain *chain)
1418 {
1419   chain->n_reloads = n_reloads;
1420   chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
1421   memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1422   reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1423 }
1424 
1425 /* Walk the chain of insns, and determine for each whether it needs reloads
1426    and/or eliminations.  Build the corresponding insns_need_reload list, and
1427    set something_needs_elimination as appropriate.  */
1428 static void
1429 calculate_needs_all_insns (int global)
1430 {
1431   struct insn_chain **pprev_reload = &insns_need_reload;
1432   struct insn_chain *chain, *next = 0;
1433 
1434   something_needs_elimination = 0;
1435 
1436   reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1437   for (chain = reload_insn_chain; chain != 0; chain = next)
1438     {
1439       rtx_insn *insn = chain->insn;
1440 
1441       next = chain->next;
1442 
1443       /* Clear out the shortcuts.  */
1444       chain->n_reloads = 0;
1445       chain->need_elim = 0;
1446       chain->need_reload = 0;
1447       chain->need_operand_change = 0;
1448 
1449       /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1450 	 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1451 	 what effects this has on the known offsets at labels.  */
1452 
1453       if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1454 	  || (INSN_P (insn) && REG_NOTES (insn) != 0))
1455 	set_label_offsets (insn, insn, 0);
1456 
1457       if (INSN_P (insn))
1458 	{
1459 	  rtx old_body = PATTERN (insn);
1460 	  int old_code = INSN_CODE (insn);
1461 	  rtx old_notes = REG_NOTES (insn);
1462 	  int did_elimination = 0;
1463 	  int operands_changed = 0;
1464 
1465 	  /* Skip insns that only set an equivalence.  */
1466 	  if (will_delete_init_insn_p (insn))
1467 	    continue;
1468 
1469 	  /* If needed, eliminate any eliminable registers.  */
1470 	  if (num_eliminable || num_eliminable_invariants)
1471 	    did_elimination = eliminate_regs_in_insn (insn, 0);
1472 
1473 	  /* Analyze the instruction.  */
1474 	  operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1475 					   global, spill_reg_order);
1476 
1477 	  /* If a no-op set needs more than one reload, this is likely
1478 	     to be something that needs input address reloads.  We
1479 	     can't get rid of this cleanly later, and it is of no use
1480 	     anyway, so discard it now.
1481 	     We only do this when expensive_optimizations is enabled,
1482 	     since this complements reload inheritance / output
1483 	     reload deletion, and it can make debugging harder.  */
1484 	  if (flag_expensive_optimizations && n_reloads > 1)
1485 	    {
1486 	      rtx set = single_set (insn);
1487 	      if (set
1488 		  &&
1489 		  ((SET_SRC (set) == SET_DEST (set)
1490 		    && REG_P (SET_SRC (set))
1491 		    && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1492 		   || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1493 		       && reg_renumber[REGNO (SET_SRC (set))] < 0
1494 		       && reg_renumber[REGNO (SET_DEST (set))] < 0
1495 		       && reg_equiv_memory_loc (REGNO (SET_SRC (set))) != NULL
1496 		       && reg_equiv_memory_loc (REGNO (SET_DEST (set))) != NULL
1497 		       && rtx_equal_p (reg_equiv_memory_loc (REGNO (SET_SRC (set))),
1498 				       reg_equiv_memory_loc (REGNO (SET_DEST (set)))))))
1499 		{
1500 		  if (ira_conflicts_p)
1501 		    /* Inform IRA about the insn deletion.  */
1502 		    ira_mark_memory_move_deletion (REGNO (SET_DEST (set)),
1503 						   REGNO (SET_SRC (set)));
1504 		  delete_insn (insn);
1505 		  /* Delete it from the reload chain.  */
1506 		  if (chain->prev)
1507 		    chain->prev->next = next;
1508 		  else
1509 		    reload_insn_chain = next;
1510 		  if (next)
1511 		    next->prev = chain->prev;
1512 		  chain->next = unused_insn_chains;
1513 		  unused_insn_chains = chain;
1514 		  continue;
1515 		}
1516 	    }
1517 	  if (num_eliminable)
1518 	    update_eliminable_offsets ();
1519 
1520 	  /* Remember for later shortcuts which insns had any reloads or
1521 	     register eliminations.  */
1522 	  chain->need_elim = did_elimination;
1523 	  chain->need_reload = n_reloads > 0;
1524 	  chain->need_operand_change = operands_changed;
1525 
1526 	  /* Discard any register replacements done.  */
1527 	  if (did_elimination)
1528 	    {
1529 	      obstack_free (&reload_obstack, reload_insn_firstobj);
1530 	      PATTERN (insn) = old_body;
1531 	      INSN_CODE (insn) = old_code;
1532 	      REG_NOTES (insn) = old_notes;
1533 	      something_needs_elimination = 1;
1534 	    }
1535 
1536 	  something_needs_operands_changed |= operands_changed;
1537 
1538 	  if (n_reloads != 0)
1539 	    {
1540 	      copy_reloads (chain);
1541 	      *pprev_reload = chain;
1542 	      pprev_reload = &chain->next_need_reload;
1543 	    }
1544 	}
1545     }
1546   *pprev_reload = 0;
1547 }
1548 
1549 /* This function is called from the register allocator to set up estimates
1550    for the cost of eliminating pseudos which have REG_EQUIV equivalences to
1551    an invariant.  The structure is similar to calculate_needs_all_insns.  */
1552 
1553 void
1554 calculate_elim_costs_all_insns (void)
1555 {
1556   int *reg_equiv_init_cost;
1557   basic_block bb;
1558   int i;
1559 
1560   reg_equiv_init_cost = XCNEWVEC (int, max_regno);
1561   init_elim_table ();
1562   init_eliminable_invariants (get_insns (), false);
1563 
1564   set_initial_elim_offsets ();
1565   set_initial_label_offsets ();
1566 
1567   FOR_EACH_BB_FN (bb, cfun)
1568     {
1569       rtx_insn *insn;
1570       elim_bb = bb;
1571 
1572       FOR_BB_INSNS (bb, insn)
1573 	{
1574 	  /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1575 	     include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1576 	     what effects this has on the known offsets at labels.  */
1577 
1578 	  if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1579 	      || (INSN_P (insn) && REG_NOTES (insn) != 0))
1580 	    set_label_offsets (insn, insn, 0);
1581 
1582 	  if (INSN_P (insn))
1583 	    {
1584 	      rtx set = single_set (insn);
1585 
1586 	      /* Skip insns that only set an equivalence.  */
1587 	      if (set && REG_P (SET_DEST (set))
1588 		  && reg_renumber[REGNO (SET_DEST (set))] < 0
1589 		  && (reg_equiv_constant (REGNO (SET_DEST (set)))
1590 		      || reg_equiv_invariant (REGNO (SET_DEST (set)))))
1591 		{
1592 		  unsigned regno = REGNO (SET_DEST (set));
1593 		  rtx_insn_list *init = reg_equiv_init (regno);
1594 		  if (init)
1595 		    {
1596 		      rtx t = eliminate_regs_1 (SET_SRC (set), VOIDmode, insn,
1597 						false, true);
1598 		      machine_mode mode = GET_MODE (SET_DEST (set));
1599 		      int cost = set_src_cost (t, mode,
1600 					       optimize_bb_for_speed_p (bb));
1601 		      int freq = REG_FREQ_FROM_BB (bb);
1602 
1603 		      reg_equiv_init_cost[regno] = cost * freq;
1604 		      continue;
1605 		    }
1606 		}
1607 	      /* If needed, eliminate any eliminable registers.  */
1608 	      if (num_eliminable || num_eliminable_invariants)
1609 		elimination_costs_in_insn (insn);
1610 
1611 	      if (num_eliminable)
1612 		update_eliminable_offsets ();
1613 	    }
1614 	}
1615     }
1616   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1617     {
1618       if (reg_equiv_invariant (i))
1619 	{
1620 	  if (reg_equiv_init (i))
1621 	    {
1622 	      int cost = reg_equiv_init_cost[i];
1623 	      if (dump_file)
1624 		fprintf (dump_file,
1625 			 "Reg %d has equivalence, initial gains %d\n", i, cost);
1626 	      if (cost != 0)
1627 		ira_adjust_equiv_reg_cost (i, cost);
1628 	    }
1629 	  else
1630 	    {
1631 	      if (dump_file)
1632 		fprintf (dump_file,
1633 			 "Reg %d had equivalence, but can't be eliminated\n",
1634 			 i);
1635 	      ira_adjust_equiv_reg_cost (i, 0);
1636 	    }
1637 	}
1638     }
1639 
1640   free (reg_equiv_init_cost);
1641   free (offsets_known_at);
1642   free (offsets_at);
1643   offsets_at = NULL;
1644   offsets_known_at = NULL;
1645 }
1646 
1647 /* Comparison function for qsort to decide which of two reloads
1648    should be handled first.  *P1 and *P2 are the reload numbers.  */
1649 
1650 static int
1651 reload_reg_class_lower (const void *r1p, const void *r2p)
1652 {
1653   int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1654   int t;
1655 
1656   /* Consider required reloads before optional ones.  */
1657   t = rld[r1].optional - rld[r2].optional;
1658   if (t != 0)
1659     return t;
1660 
1661   /* Count all solitary classes before non-solitary ones.  */
1662   t = ((reg_class_size[(int) rld[r2].rclass] == 1)
1663        - (reg_class_size[(int) rld[r1].rclass] == 1));
1664   if (t != 0)
1665     return t;
1666 
1667   /* Aside from solitaires, consider all multi-reg groups first.  */
1668   t = rld[r2].nregs - rld[r1].nregs;
1669   if (t != 0)
1670     return t;
1671 
1672   /* Consider reloads in order of increasing reg-class number.  */
1673   t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1674   if (t != 0)
1675     return t;
1676 
1677   /* If reloads are equally urgent, sort by reload number,
1678      so that the results of qsort leave nothing to chance.  */
1679   return r1 - r2;
1680 }
1681 
1682 /* The cost of spilling each hard reg.  */
1683 static int spill_cost[FIRST_PSEUDO_REGISTER];
1684 
1685 /* When spilling multiple hard registers, we use SPILL_COST for the first
1686    spilled hard reg and SPILL_ADD_COST for subsequent regs.  SPILL_ADD_COST
1687    only the first hard reg for a multi-reg pseudo.  */
1688 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1689 
1690 /* Map of hard regno to pseudo regno currently occupying the hard
1691    reg.  */
1692 static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1693 
1694 /* Update the spill cost arrays, considering that pseudo REG is live.  */
1695 
1696 static void
1697 count_pseudo (int reg)
1698 {
1699   int freq = REG_FREQ (reg);
1700   int r = reg_renumber[reg];
1701   int nregs;
1702 
1703   /* Ignore spilled pseudo-registers which can be here only if IRA is used.  */
1704   if (ira_conflicts_p && r < 0)
1705     return;
1706 
1707   if (REGNO_REG_SET_P (&pseudos_counted, reg)
1708       || REGNO_REG_SET_P (&spilled_pseudos, reg))
1709     return;
1710 
1711   SET_REGNO_REG_SET (&pseudos_counted, reg);
1712 
1713   gcc_assert (r >= 0);
1714 
1715   spill_add_cost[r] += freq;
1716   nregs = hard_regno_nregs (r, PSEUDO_REGNO_MODE (reg));
1717   while (nregs-- > 0)
1718     {
1719       hard_regno_to_pseudo_regno[r + nregs] = reg;
1720       spill_cost[r + nregs] += freq;
1721     }
1722 }
1723 
1724 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1725    contents of BAD_SPILL_REGS for the insn described by CHAIN.  */
1726 
1727 static void
1728 order_regs_for_reload (struct insn_chain *chain)
1729 {
1730   unsigned i;
1731   HARD_REG_SET used_by_pseudos;
1732   HARD_REG_SET used_by_pseudos2;
1733   reg_set_iterator rsi;
1734 
1735   COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1736 
1737   memset (spill_cost, 0, sizeof spill_cost);
1738   memset (spill_add_cost, 0, sizeof spill_add_cost);
1739   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1740     hard_regno_to_pseudo_regno[i] = -1;
1741 
1742   /* Count number of uses of each hard reg by pseudo regs allocated to it
1743      and then order them by decreasing use.  First exclude hard registers
1744      that are live in or across this insn.  */
1745 
1746   REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1747   REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1748   IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1749   IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1750 
1751   /* Now find out which pseudos are allocated to it, and update
1752      hard_reg_n_uses.  */
1753   CLEAR_REG_SET (&pseudos_counted);
1754 
1755   EXECUTE_IF_SET_IN_REG_SET
1756     (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1757     {
1758       count_pseudo (i);
1759     }
1760   EXECUTE_IF_SET_IN_REG_SET
1761     (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1762     {
1763       count_pseudo (i);
1764     }
1765   CLEAR_REG_SET (&pseudos_counted);
1766 }
1767 
1768 /* Vector of reload-numbers showing the order in which the reloads should
1769    be processed.  */
1770 static short reload_order[MAX_RELOADS];
1771 
1772 /* This is used to keep track of the spill regs used in one insn.  */
1773 static HARD_REG_SET used_spill_regs_local;
1774 
1775 /* We decided to spill hard register SPILLED, which has a size of
1776    SPILLED_NREGS.  Determine how pseudo REG, which is live during the insn,
1777    is affected.  We will add it to SPILLED_PSEUDOS if necessary, and we will
1778    update SPILL_COST/SPILL_ADD_COST.  */
1779 
1780 static void
1781 count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1782 {
1783   int freq = REG_FREQ (reg);
1784   int r = reg_renumber[reg];
1785   int nregs;
1786 
1787   /* Ignore spilled pseudo-registers which can be here only if IRA is used.  */
1788   if (ira_conflicts_p && r < 0)
1789     return;
1790 
1791   gcc_assert (r >= 0);
1792 
1793   nregs = hard_regno_nregs (r, PSEUDO_REGNO_MODE (reg));
1794 
1795   if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1796       || spilled + spilled_nregs <= r || r + nregs <= spilled)
1797     return;
1798 
1799   SET_REGNO_REG_SET (&spilled_pseudos, reg);
1800 
1801   spill_add_cost[r] -= freq;
1802   while (nregs-- > 0)
1803     {
1804       hard_regno_to_pseudo_regno[r + nregs] = -1;
1805       spill_cost[r + nregs] -= freq;
1806     }
1807 }
1808 
1809 /* Find reload register to use for reload number ORDER.  */
1810 
1811 static int
1812 find_reg (struct insn_chain *chain, int order)
1813 {
1814   int rnum = reload_order[order];
1815   struct reload *rl = rld + rnum;
1816   int best_cost = INT_MAX;
1817   int best_reg = -1;
1818   unsigned int i, j, n;
1819   int k;
1820   HARD_REG_SET not_usable;
1821   HARD_REG_SET used_by_other_reload;
1822   reg_set_iterator rsi;
1823   static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1824   static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1825 
1826   COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1827   IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1828   IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->rclass]);
1829 
1830   CLEAR_HARD_REG_SET (used_by_other_reload);
1831   for (k = 0; k < order; k++)
1832     {
1833       int other = reload_order[k];
1834 
1835       if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1836 	for (j = 0; j < rld[other].nregs; j++)
1837 	  SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1838     }
1839 
1840   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1841     {
1842 #ifdef REG_ALLOC_ORDER
1843       unsigned int regno = reg_alloc_order[i];
1844 #else
1845       unsigned int regno = i;
1846 #endif
1847 
1848       if (! TEST_HARD_REG_BIT (not_usable, regno)
1849 	  && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1850 	  && targetm.hard_regno_mode_ok (regno, rl->mode))
1851 	{
1852 	  int this_cost = spill_cost[regno];
1853 	  int ok = 1;
1854 	  unsigned int this_nregs = hard_regno_nregs (regno, rl->mode);
1855 
1856 	  for (j = 1; j < this_nregs; j++)
1857 	    {
1858 	      this_cost += spill_add_cost[regno + j];
1859 	      if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1860 		  || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1861 		ok = 0;
1862 	    }
1863 	  if (! ok)
1864 	    continue;
1865 
1866 	  if (ira_conflicts_p)
1867 	    {
1868 	      /* Ask IRA to find a better pseudo-register for
1869 		 spilling.  */
1870 	      for (n = j = 0; j < this_nregs; j++)
1871 		{
1872 		  int r = hard_regno_to_pseudo_regno[regno + j];
1873 
1874 		  if (r < 0)
1875 		    continue;
1876 		  if (n == 0 || regno_pseudo_regs[n - 1] != r)
1877 		    regno_pseudo_regs[n++] = r;
1878 		}
1879 	      regno_pseudo_regs[n++] = -1;
1880 	      if (best_reg < 0
1881 		  || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1882 						      best_regno_pseudo_regs,
1883 						      rl->in, rl->out,
1884 						      chain->insn))
1885 		{
1886 		  best_reg = regno;
1887 		  for (j = 0;; j++)
1888 		    {
1889 		      best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1890 		      if (regno_pseudo_regs[j] < 0)
1891 			break;
1892 		    }
1893 		}
1894 	      continue;
1895 	    }
1896 
1897 	  if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1898 	    this_cost--;
1899 	  if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1900 	    this_cost--;
1901 	  if (this_cost < best_cost
1902 	      /* Among registers with equal cost, prefer caller-saved ones, or
1903 		 use REG_ALLOC_ORDER if it is defined.  */
1904 	      || (this_cost == best_cost
1905 #ifdef REG_ALLOC_ORDER
1906 		  && (inv_reg_alloc_order[regno]
1907 		      < inv_reg_alloc_order[best_reg])
1908 #else
1909 		  && call_used_regs[regno]
1910 		  && ! call_used_regs[best_reg]
1911 #endif
1912 		  ))
1913 	    {
1914 	      best_reg = regno;
1915 	      best_cost = this_cost;
1916 	    }
1917 	}
1918     }
1919   if (best_reg == -1)
1920     return 0;
1921 
1922   if (dump_file)
1923     fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1924 
1925   rl->nregs = hard_regno_nregs (best_reg, rl->mode);
1926   rl->regno = best_reg;
1927 
1928   EXECUTE_IF_SET_IN_REG_SET
1929     (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1930     {
1931       count_spilled_pseudo (best_reg, rl->nregs, j);
1932     }
1933 
1934   EXECUTE_IF_SET_IN_REG_SET
1935     (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1936     {
1937       count_spilled_pseudo (best_reg, rl->nregs, j);
1938     }
1939 
1940   for (i = 0; i < rl->nregs; i++)
1941     {
1942       gcc_assert (spill_cost[best_reg + i] == 0);
1943       gcc_assert (spill_add_cost[best_reg + i] == 0);
1944       gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
1945       SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1946     }
1947   return 1;
1948 }
1949 
1950 /* Find more reload regs to satisfy the remaining need of an insn, which
1951    is given by CHAIN.
1952    Do it by ascending class number, since otherwise a reg
1953    might be spilled for a big class and might fail to count
1954    for a smaller class even though it belongs to that class.  */
1955 
1956 static void
1957 find_reload_regs (struct insn_chain *chain)
1958 {
1959   int i;
1960 
1961   /* In order to be certain of getting the registers we need,
1962      we must sort the reloads into order of increasing register class.
1963      Then our grabbing of reload registers will parallel the process
1964      that provided the reload registers.  */
1965   for (i = 0; i < chain->n_reloads; i++)
1966     {
1967       /* Show whether this reload already has a hard reg.  */
1968       if (chain->rld[i].reg_rtx)
1969 	{
1970 	  chain->rld[i].regno = REGNO (chain->rld[i].reg_rtx);
1971 	  chain->rld[i].nregs = REG_NREGS (chain->rld[i].reg_rtx);
1972 	}
1973       else
1974 	chain->rld[i].regno = -1;
1975       reload_order[i] = i;
1976     }
1977 
1978   n_reloads = chain->n_reloads;
1979   memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
1980 
1981   CLEAR_HARD_REG_SET (used_spill_regs_local);
1982 
1983   if (dump_file)
1984     fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1985 
1986   qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
1987 
1988   /* Compute the order of preference for hard registers to spill.  */
1989 
1990   order_regs_for_reload (chain);
1991 
1992   for (i = 0; i < n_reloads; i++)
1993     {
1994       int r = reload_order[i];
1995 
1996       /* Ignore reloads that got marked inoperative.  */
1997       if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
1998 	  && ! rld[r].optional
1999 	  && rld[r].regno == -1)
2000 	if (! find_reg (chain, i))
2001 	  {
2002 	    if (dump_file)
2003 	      fprintf (dump_file, "reload failure for reload %d\n", r);
2004 	    spill_failure (chain->insn, rld[r].rclass);
2005 	    failure = 1;
2006 	    return;
2007 	  }
2008     }
2009 
2010   COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
2011   IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
2012 
2013   memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2014 }
2015 
2016 static void
2017 select_reload_regs (void)
2018 {
2019   struct insn_chain *chain;
2020 
2021   /* Try to satisfy the needs for each insn.  */
2022   for (chain = insns_need_reload; chain != 0;
2023        chain = chain->next_need_reload)
2024     find_reload_regs (chain);
2025 }
2026 
2027 /* Delete all insns that were inserted by emit_caller_save_insns during
2028    this iteration.  */
2029 static void
2030 delete_caller_save_insns (void)
2031 {
2032   struct insn_chain *c = reload_insn_chain;
2033 
2034   while (c != 0)
2035     {
2036       while (c != 0 && c->is_caller_save_insn)
2037 	{
2038 	  struct insn_chain *next = c->next;
2039 	  rtx_insn *insn = c->insn;
2040 
2041 	  if (c == reload_insn_chain)
2042 	    reload_insn_chain = next;
2043 	  delete_insn (insn);
2044 
2045 	  if (next)
2046 	    next->prev = c->prev;
2047 	  if (c->prev)
2048 	    c->prev->next = next;
2049 	  c->next = unused_insn_chains;
2050 	  unused_insn_chains = c;
2051 	  c = next;
2052 	}
2053       if (c != 0)
2054 	c = c->next;
2055     }
2056 }
2057 
2058 /* Handle the failure to find a register to spill.
2059    INSN should be one of the insns which needed this particular spill reg.  */
2060 
2061 static void
2062 spill_failure (rtx_insn *insn, enum reg_class rclass)
2063 {
2064   if (asm_noperands (PATTERN (insn)) >= 0)
2065     error_for_asm (insn, "can%'t find a register in class %qs while "
2066 		   "reloading %<asm%>",
2067 		   reg_class_names[rclass]);
2068   else
2069     {
2070       error ("unable to find a register to spill in class %qs",
2071 	     reg_class_names[rclass]);
2072 
2073       if (dump_file)
2074 	{
2075 	  fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2076 	  debug_reload_to_stream (dump_file);
2077 	}
2078       fatal_insn ("this is the insn:", insn);
2079     }
2080 }
2081 
2082 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2083    data that is dead in INSN.  */
2084 
2085 static void
2086 delete_dead_insn (rtx_insn *insn)
2087 {
2088   rtx_insn *prev = prev_active_insn (insn);
2089   rtx prev_dest;
2090 
2091   /* If the previous insn sets a register that dies in our insn make
2092      a note that we want to run DCE immediately after reload.
2093 
2094      We used to delete the previous insn & recurse, but that's wrong for
2095      block local equivalences.  Instead of trying to figure out the exact
2096      circumstances where we can delete the potentially dead insns, just
2097      let DCE do the job.  */
2098   if (prev && BLOCK_FOR_INSN (prev) == BLOCK_FOR_INSN (insn)
2099       && GET_CODE (PATTERN (prev)) == SET
2100       && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2101       && reg_mentioned_p (prev_dest, PATTERN (insn))
2102       && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2103       && ! side_effects_p (SET_SRC (PATTERN (prev))))
2104     need_dce = 1;
2105 
2106   SET_INSN_DELETED (insn);
2107 }
2108 
2109 /* Modify the home of pseudo-reg I.
2110    The new home is present in reg_renumber[I].
2111 
2112    FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2113    or it may be -1, meaning there is none or it is not relevant.
2114    This is used so that all pseudos spilled from a given hard reg
2115    can share one stack slot.  */
2116 
2117 static void
2118 alter_reg (int i, int from_reg, bool dont_share_p)
2119 {
2120   /* When outputting an inline function, this can happen
2121      for a reg that isn't actually used.  */
2122   if (regno_reg_rtx[i] == 0)
2123     return;
2124 
2125   /* If the reg got changed to a MEM at rtl-generation time,
2126      ignore it.  */
2127   if (!REG_P (regno_reg_rtx[i]))
2128     return;
2129 
2130   /* Modify the reg-rtx to contain the new hard reg
2131      number or else to contain its pseudo reg number.  */
2132   SET_REGNO (regno_reg_rtx[i],
2133 	     reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2134 
2135   /* If we have a pseudo that is needed but has no hard reg or equivalent,
2136      allocate a stack slot for it.  */
2137 
2138   if (reg_renumber[i] < 0
2139       && REG_N_REFS (i) > 0
2140       && reg_equiv_constant (i) == 0
2141       && (reg_equiv_invariant (i) == 0
2142 	  || reg_equiv_init (i) == 0)
2143       && reg_equiv_memory_loc (i) == 0)
2144     {
2145       rtx x = NULL_RTX;
2146       machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2147       poly_uint64 inherent_size = GET_MODE_SIZE (mode);
2148       unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2149       machine_mode wider_mode = wider_subreg_mode (mode, reg_max_ref_mode[i]);
2150       poly_uint64 total_size = GET_MODE_SIZE (wider_mode);
2151       /* ??? Seems strange to derive the minimum alignment from the size,
2152 	 but that's the traditional behavior.  For polynomial-size modes,
2153 	 the natural extension is to use the minimum possible size.  */
2154       unsigned int min_align
2155 	= constant_lower_bound (GET_MODE_BITSIZE (reg_max_ref_mode[i]));
2156       poly_int64 adjust = 0;
2157 
2158       something_was_spilled = true;
2159 
2160       if (ira_conflicts_p)
2161 	{
2162 	  /* Mark the spill for IRA.  */
2163 	  SET_REGNO_REG_SET (&spilled_pseudos, i);
2164 	  if (!dont_share_p)
2165 	    x = ira_reuse_stack_slot (i, inherent_size, total_size);
2166 	}
2167 
2168       if (x)
2169 	;
2170 
2171       /* Each pseudo reg has an inherent size which comes from its own mode,
2172 	 and a total size which provides room for paradoxical subregs
2173 	 which refer to the pseudo reg in wider modes.
2174 
2175 	 We can use a slot already allocated if it provides both
2176 	 enough inherent space and enough total space.
2177 	 Otherwise, we allocate a new slot, making sure that it has no less
2178 	 inherent space, and no less total space, then the previous slot.  */
2179       else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2180 	{
2181 	  rtx stack_slot;
2182 
2183 	  /* The sizes are taken from a subreg operation, which guarantees
2184 	     that they're ordered.  */
2185 	  gcc_checking_assert (ordered_p (total_size, inherent_size));
2186 
2187 	  /* No known place to spill from => no slot to reuse.  */
2188 	  x = assign_stack_local (mode, total_size,
2189 				  min_align > inherent_align
2190 				  || maybe_gt (total_size, inherent_size)
2191 				  ? -1 : 0);
2192 
2193 	  stack_slot = x;
2194 
2195 	  /* Cancel the big-endian correction done in assign_stack_local.
2196 	     Get the address of the beginning of the slot.  This is so we
2197 	     can do a big-endian correction unconditionally below.  */
2198 	  if (BYTES_BIG_ENDIAN)
2199 	    {
2200 	      adjust = inherent_size - total_size;
2201 	      if (maybe_ne (adjust, 0))
2202 		{
2203 		  poly_uint64 total_bits = total_size * BITS_PER_UNIT;
2204 		  machine_mode mem_mode
2205 		    = int_mode_for_size (total_bits, 1).else_blk ();
2206 		  stack_slot = adjust_address_nv (x, mem_mode, adjust);
2207 		}
2208 	    }
2209 
2210 	  if (! dont_share_p && ira_conflicts_p)
2211 	    /* Inform IRA about allocation a new stack slot.  */
2212 	    ira_mark_new_stack_slot (stack_slot, i, total_size);
2213 	}
2214 
2215       /* Reuse a stack slot if possible.  */
2216       else if (spill_stack_slot[from_reg] != 0
2217 	       && known_ge (spill_stack_slot_width[from_reg], total_size)
2218 	       && known_ge (GET_MODE_SIZE
2219 			    (GET_MODE (spill_stack_slot[from_reg])),
2220 			    inherent_size)
2221 	       && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2222 	x = spill_stack_slot[from_reg];
2223 
2224       /* Allocate a bigger slot.  */
2225       else
2226 	{
2227 	  /* Compute maximum size needed, both for inherent size
2228 	     and for total size.  */
2229 	  rtx stack_slot;
2230 
2231 	  if (spill_stack_slot[from_reg])
2232 	    {
2233 	      if (partial_subreg_p (mode,
2234 				    GET_MODE (spill_stack_slot[from_reg])))
2235 		mode = GET_MODE (spill_stack_slot[from_reg]);
2236 	      total_size = ordered_max (total_size,
2237 					spill_stack_slot_width[from_reg]);
2238 	      if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2239 		min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2240 	    }
2241 
2242 	  /* The sizes are taken from a subreg operation, which guarantees
2243 	     that they're ordered.  */
2244 	  gcc_checking_assert (ordered_p (total_size, inherent_size));
2245 
2246 	  /* Make a slot with that size.  */
2247 	  x = assign_stack_local (mode, total_size,
2248 				  min_align > inherent_align
2249 				  || maybe_gt (total_size, inherent_size)
2250 				  ? -1 : 0);
2251 	  stack_slot = x;
2252 
2253 	  /* Cancel the  big-endian correction done in assign_stack_local.
2254 	     Get the address of the beginning of the slot.  This is so we
2255 	     can do a big-endian correction unconditionally below.  */
2256 	  if (BYTES_BIG_ENDIAN)
2257 	    {
2258 	      adjust = GET_MODE_SIZE (mode) - total_size;
2259 	      if (maybe_ne (adjust, 0))
2260 		{
2261 		  poly_uint64 total_bits = total_size * BITS_PER_UNIT;
2262 		  machine_mode mem_mode
2263 		    = int_mode_for_size (total_bits, 1).else_blk ();
2264 		  stack_slot = adjust_address_nv (x, mem_mode, adjust);
2265 		}
2266 	    }
2267 
2268 	  spill_stack_slot[from_reg] = stack_slot;
2269 	  spill_stack_slot_width[from_reg] = total_size;
2270 	}
2271 
2272       /* On a big endian machine, the "address" of the slot
2273 	 is the address of the low part that fits its inherent mode.  */
2274       adjust += subreg_size_lowpart_offset (inherent_size, total_size);
2275 
2276       /* If we have any adjustment to make, or if the stack slot is the
2277 	 wrong mode, make a new stack slot.  */
2278       x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2279 
2280       /* Set all of the memory attributes as appropriate for a spill.  */
2281       set_mem_attrs_for_spill (x);
2282 
2283       /* Save the stack slot for later.  */
2284       reg_equiv_memory_loc (i) = x;
2285     }
2286 }
2287 
2288 /* Mark the slots in regs_ever_live for the hard regs used by
2289    pseudo-reg number REGNO, accessed in MODE.  */
2290 
2291 static void
2292 mark_home_live_1 (int regno, machine_mode mode)
2293 {
2294   int i, lim;
2295 
2296   i = reg_renumber[regno];
2297   if (i < 0)
2298     return;
2299   lim = end_hard_regno (mode, i);
2300   while (i < lim)
2301     df_set_regs_ever_live (i++, true);
2302 }
2303 
2304 /* Mark the slots in regs_ever_live for the hard regs
2305    used by pseudo-reg number REGNO.  */
2306 
2307 void
2308 mark_home_live (int regno)
2309 {
2310   if (reg_renumber[regno] >= 0)
2311     mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2312 }
2313 
2314 /* This function handles the tracking of elimination offsets around branches.
2315 
2316    X is a piece of RTL being scanned.
2317 
2318    INSN is the insn that it came from, if any.
2319 
2320    INITIAL_P is nonzero if we are to set the offset to be the initial
2321    offset and zero if we are setting the offset of the label to be the
2322    current offset.  */
2323 
2324 static void
2325 set_label_offsets (rtx x, rtx_insn *insn, int initial_p)
2326 {
2327   enum rtx_code code = GET_CODE (x);
2328   rtx tem;
2329   unsigned int i;
2330   struct elim_table *p;
2331 
2332   switch (code)
2333     {
2334     case LABEL_REF:
2335       if (LABEL_REF_NONLOCAL_P (x))
2336 	return;
2337 
2338       x = label_ref_label (x);
2339 
2340       /* fall through */
2341 
2342     case CODE_LABEL:
2343       /* If we know nothing about this label, set the desired offsets.  Note
2344 	 that this sets the offset at a label to be the offset before a label
2345 	 if we don't know anything about the label.  This is not correct for
2346 	 the label after a BARRIER, but is the best guess we can make.  If
2347 	 we guessed wrong, we will suppress an elimination that might have
2348 	 been possible had we been able to guess correctly.  */
2349 
2350       if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2351 	{
2352 	  for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2353 	    offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2354 	      = (initial_p ? reg_eliminate[i].initial_offset
2355 		 : reg_eliminate[i].offset);
2356 	  offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2357 	}
2358 
2359       /* Otherwise, if this is the definition of a label and it is
2360 	 preceded by a BARRIER, set our offsets to the known offset of
2361 	 that label.  */
2362 
2363       else if (x == insn
2364 	       && (tem = prev_nonnote_insn (insn)) != 0
2365 	       && BARRIER_P (tem))
2366 	set_offsets_for_label (insn);
2367       else
2368 	/* If neither of the above cases is true, compare each offset
2369 	   with those previously recorded and suppress any eliminations
2370 	   where the offsets disagree.  */
2371 
2372 	for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2373 	  if (maybe_ne (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i],
2374 			(initial_p ? reg_eliminate[i].initial_offset
2375 			 : reg_eliminate[i].offset)))
2376 	    reg_eliminate[i].can_eliminate = 0;
2377 
2378       return;
2379 
2380     case JUMP_TABLE_DATA:
2381       set_label_offsets (PATTERN (insn), insn, initial_p);
2382       return;
2383 
2384     case JUMP_INSN:
2385       set_label_offsets (PATTERN (insn), insn, initial_p);
2386 
2387       /* fall through */
2388 
2389     case INSN:
2390     case CALL_INSN:
2391       /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2392 	 to indirectly and hence must have all eliminations at their
2393 	 initial offsets.  */
2394       for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2395 	if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2396 	  set_label_offsets (XEXP (tem, 0), insn, 1);
2397       return;
2398 
2399     case PARALLEL:
2400     case ADDR_VEC:
2401     case ADDR_DIFF_VEC:
2402       /* Each of the labels in the parallel or address vector must be
2403 	 at their initial offsets.  We want the first field for PARALLEL
2404 	 and ADDR_VEC and the second field for ADDR_DIFF_VEC.  */
2405 
2406       for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2407 	set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2408 			   insn, initial_p);
2409       return;
2410 
2411     case SET:
2412       /* We only care about setting PC.  If the source is not RETURN,
2413 	 IF_THEN_ELSE, or a label, disable any eliminations not at
2414 	 their initial offsets.  Similarly if any arm of the IF_THEN_ELSE
2415 	 isn't one of those possibilities.  For branches to a label,
2416 	 call ourselves recursively.
2417 
2418 	 Note that this can disable elimination unnecessarily when we have
2419 	 a non-local goto since it will look like a non-constant jump to
2420 	 someplace in the current function.  This isn't a significant
2421 	 problem since such jumps will normally be when all elimination
2422 	 pairs are back to their initial offsets.  */
2423 
2424       if (SET_DEST (x) != pc_rtx)
2425 	return;
2426 
2427       switch (GET_CODE (SET_SRC (x)))
2428 	{
2429 	case PC:
2430 	case RETURN:
2431 	  return;
2432 
2433 	case LABEL_REF:
2434 	  set_label_offsets (SET_SRC (x), insn, initial_p);
2435 	  return;
2436 
2437 	case IF_THEN_ELSE:
2438 	  tem = XEXP (SET_SRC (x), 1);
2439 	  if (GET_CODE (tem) == LABEL_REF)
2440 	    set_label_offsets (label_ref_label (tem), insn, initial_p);
2441 	  else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2442 	    break;
2443 
2444 	  tem = XEXP (SET_SRC (x), 2);
2445 	  if (GET_CODE (tem) == LABEL_REF)
2446 	    set_label_offsets (label_ref_label (tem), insn, initial_p);
2447 	  else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2448 	    break;
2449 	  return;
2450 
2451 	default:
2452 	  break;
2453 	}
2454 
2455       /* If we reach here, all eliminations must be at their initial
2456 	 offset because we are doing a jump to a variable address.  */
2457       for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2458 	if (maybe_ne (p->offset, p->initial_offset))
2459 	  p->can_eliminate = 0;
2460       break;
2461 
2462     default:
2463       break;
2464     }
2465 }
2466 
2467 /* This function examines every reg that occurs in X and adjusts the
2468    costs for its elimination which are gathered by IRA.  INSN is the
2469    insn in which X occurs.  We do not recurse into MEM expressions.  */
2470 
2471 static void
2472 note_reg_elim_costly (const_rtx x, rtx insn)
2473 {
2474   subrtx_iterator::array_type array;
2475   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
2476     {
2477       const_rtx x = *iter;
2478       if (MEM_P (x))
2479 	iter.skip_subrtxes ();
2480       else if (REG_P (x)
2481 	       && REGNO (x) >= FIRST_PSEUDO_REGISTER
2482 	       && reg_equiv_init (REGNO (x))
2483 	       && reg_equiv_invariant (REGNO (x)))
2484 	{
2485 	  rtx t = reg_equiv_invariant (REGNO (x));
2486 	  rtx new_rtx = eliminate_regs_1 (t, Pmode, insn, true, true);
2487 	  int cost = set_src_cost (new_rtx, Pmode,
2488 				   optimize_bb_for_speed_p (elim_bb));
2489 	  int freq = REG_FREQ_FROM_BB (elim_bb);
2490 
2491 	  if (cost != 0)
2492 	    ira_adjust_equiv_reg_cost (REGNO (x), -cost * freq);
2493 	}
2494     }
2495 }
2496 
2497 /* Scan X and replace any eliminable registers (such as fp) with a
2498    replacement (such as sp), plus an offset.
2499 
2500    MEM_MODE is the mode of an enclosing MEM.  We need this to know how
2501    much to adjust a register for, e.g., PRE_DEC.  Also, if we are inside a
2502    MEM, we are allowed to replace a sum of a register and the constant zero
2503    with the register, which we cannot do outside a MEM.  In addition, we need
2504    to record the fact that a register is referenced outside a MEM.
2505 
2506    If INSN is an insn, it is the insn containing X.  If we replace a REG
2507    in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2508    CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2509    the REG is being modified.
2510 
2511    Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2512    That's used when we eliminate in expressions stored in notes.
2513    This means, do not set ref_outside_mem even if the reference
2514    is outside of MEMs.
2515 
2516    If FOR_COSTS is true, we are being called before reload in order to
2517    estimate the costs of keeping registers with an equivalence unallocated.
2518 
2519    REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2520    replacements done assuming all offsets are at their initial values.  If
2521    they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2522    encounter, return the actual location so that find_reloads will do
2523    the proper thing.  */
2524 
2525 static rtx
2526 eliminate_regs_1 (rtx x, machine_mode mem_mode, rtx insn,
2527 		  bool may_use_invariant, bool for_costs)
2528 {
2529   enum rtx_code code = GET_CODE (x);
2530   struct elim_table *ep;
2531   int regno;
2532   rtx new_rtx;
2533   int i, j;
2534   const char *fmt;
2535   int copied = 0;
2536 
2537   if (! current_function_decl)
2538     return x;
2539 
2540   switch (code)
2541     {
2542     CASE_CONST_ANY:
2543     case CONST:
2544     case SYMBOL_REF:
2545     case CODE_LABEL:
2546     case PC:
2547     case CC0:
2548     case ASM_INPUT:
2549     case ADDR_VEC:
2550     case ADDR_DIFF_VEC:
2551     case RETURN:
2552       return x;
2553 
2554     case REG:
2555       regno = REGNO (x);
2556 
2557       /* First handle the case where we encounter a bare register that
2558 	 is eliminable.  Replace it with a PLUS.  */
2559       if (regno < FIRST_PSEUDO_REGISTER)
2560 	{
2561 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2562 	       ep++)
2563 	    if (ep->from_rtx == x && ep->can_eliminate)
2564 	      return plus_constant (Pmode, ep->to_rtx, ep->previous_offset);
2565 
2566 	}
2567       else if (reg_renumber && reg_renumber[regno] < 0
2568 	       && reg_equivs
2569 	       && reg_equiv_invariant (regno))
2570 	{
2571 	  if (may_use_invariant || (insn && DEBUG_INSN_P (insn)))
2572 	    return eliminate_regs_1 (copy_rtx (reg_equiv_invariant (regno)),
2573 			             mem_mode, insn, true, for_costs);
2574 	  /* There exists at least one use of REGNO that cannot be
2575 	     eliminated.  Prevent the defining insn from being deleted.  */
2576 	  reg_equiv_init (regno) = NULL;
2577 	  if (!for_costs)
2578 	    alter_reg (regno, -1, true);
2579 	}
2580       return x;
2581 
2582     /* You might think handling MINUS in a manner similar to PLUS is a
2583        good idea.  It is not.  It has been tried multiple times and every
2584        time the change has had to have been reverted.
2585 
2586        Other parts of reload know a PLUS is special (gen_reload for example)
2587        and require special code to handle code a reloaded PLUS operand.
2588 
2589        Also consider backends where the flags register is clobbered by a
2590        MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2591        lea instruction comes to mind).  If we try to reload a MINUS, we
2592        may kill the flags register that was holding a useful value.
2593 
2594        So, please before trying to handle MINUS, consider reload as a
2595        whole instead of this little section as well as the backend issues.  */
2596     case PLUS:
2597       /* If this is the sum of an eliminable register and a constant, rework
2598 	 the sum.  */
2599       if (REG_P (XEXP (x, 0))
2600 	  && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2601 	  && CONSTANT_P (XEXP (x, 1)))
2602 	{
2603 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2604 	       ep++)
2605 	    if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2606 	      {
2607 		/* The only time we want to replace a PLUS with a REG (this
2608 		   occurs when the constant operand of the PLUS is the negative
2609 		   of the offset) is when we are inside a MEM.  We won't want
2610 		   to do so at other times because that would change the
2611 		   structure of the insn in a way that reload can't handle.
2612 		   We special-case the commonest situation in
2613 		   eliminate_regs_in_insn, so just replace a PLUS with a
2614 		   PLUS here, unless inside a MEM.  */
2615 		if (mem_mode != 0
2616 		    && CONST_INT_P (XEXP (x, 1))
2617 		    && known_eq (INTVAL (XEXP (x, 1)), -ep->previous_offset))
2618 		  return ep->to_rtx;
2619 		else
2620 		  return gen_rtx_PLUS (Pmode, ep->to_rtx,
2621 				       plus_constant (Pmode, XEXP (x, 1),
2622 						      ep->previous_offset));
2623 	      }
2624 
2625 	  /* If the register is not eliminable, we are done since the other
2626 	     operand is a constant.  */
2627 	  return x;
2628 	}
2629 
2630       /* If this is part of an address, we want to bring any constant to the
2631 	 outermost PLUS.  We will do this by doing register replacement in
2632 	 our operands and seeing if a constant shows up in one of them.
2633 
2634 	 Note that there is no risk of modifying the structure of the insn,
2635 	 since we only get called for its operands, thus we are either
2636 	 modifying the address inside a MEM, or something like an address
2637 	 operand of a load-address insn.  */
2638 
2639       {
2640 	rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2641 				     for_costs);
2642 	rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2643 				     for_costs);
2644 
2645 	if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2646 	  {
2647 	    /* If one side is a PLUS and the other side is a pseudo that
2648 	       didn't get a hard register but has a reg_equiv_constant,
2649 	       we must replace the constant here since it may no longer
2650 	       be in the position of any operand.  */
2651 	    if (GET_CODE (new0) == PLUS && REG_P (new1)
2652 		&& REGNO (new1) >= FIRST_PSEUDO_REGISTER
2653 		&& reg_renumber[REGNO (new1)] < 0
2654 		&& reg_equivs
2655 		&& reg_equiv_constant (REGNO (new1)) != 0)
2656 	      new1 = reg_equiv_constant (REGNO (new1));
2657 	    else if (GET_CODE (new1) == PLUS && REG_P (new0)
2658 		     && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2659 		     && reg_renumber[REGNO (new0)] < 0
2660 		     && reg_equiv_constant (REGNO (new0)) != 0)
2661 	      new0 = reg_equiv_constant (REGNO (new0));
2662 
2663 	    new_rtx = form_sum (GET_MODE (x), new0, new1);
2664 
2665 	    /* As above, if we are not inside a MEM we do not want to
2666 	       turn a PLUS into something else.  We might try to do so here
2667 	       for an addition of 0 if we aren't optimizing.  */
2668 	    if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2669 	      return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
2670 	    else
2671 	      return new_rtx;
2672 	  }
2673       }
2674       return x;
2675 
2676     case MULT:
2677       /* If this is the product of an eliminable register and a
2678 	 constant, apply the distribute law and move the constant out
2679 	 so that we have (plus (mult ..) ..).  This is needed in order
2680 	 to keep load-address insns valid.   This case is pathological.
2681 	 We ignore the possibility of overflow here.  */
2682       if (REG_P (XEXP (x, 0))
2683 	  && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2684 	  && CONST_INT_P (XEXP (x, 1)))
2685 	for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2686 	     ep++)
2687 	  if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2688 	    {
2689 	      if (! mem_mode
2690 		  /* Refs inside notes or in DEBUG_INSNs don't count for
2691 		     this purpose.  */
2692 		  && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2693 				      || GET_CODE (insn) == INSN_LIST
2694 				      || DEBUG_INSN_P (insn))))
2695 		ep->ref_outside_mem = 1;
2696 
2697 	      return
2698 		plus_constant (Pmode,
2699 			       gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2700 			       ep->previous_offset * INTVAL (XEXP (x, 1)));
2701 	    }
2702 
2703       /* fall through */
2704 
2705     case CALL:
2706     case COMPARE:
2707     /* See comments before PLUS about handling MINUS.  */
2708     case MINUS:
2709     case DIV:      case UDIV:
2710     case MOD:      case UMOD:
2711     case AND:      case IOR:      case XOR:
2712     case ROTATERT: case ROTATE:
2713     case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2714     case NE:       case EQ:
2715     case GE:       case GT:       case GEU:    case GTU:
2716     case LE:       case LT:       case LEU:    case LTU:
2717       {
2718 	rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2719 				     for_costs);
2720 	rtx new1 = XEXP (x, 1)
2721 	  ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false,
2722 			      for_costs) : 0;
2723 
2724 	if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2725 	  return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2726       }
2727       return x;
2728 
2729     case EXPR_LIST:
2730       /* If we have something in XEXP (x, 0), the usual case, eliminate it.  */
2731       if (XEXP (x, 0))
2732 	{
2733 	  new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2734 				      for_costs);
2735 	  if (new_rtx != XEXP (x, 0))
2736 	    {
2737 	      /* If this is a REG_DEAD note, it is not valid anymore.
2738 		 Using the eliminated version could result in creating a
2739 		 REG_DEAD note for the stack or frame pointer.  */
2740 	      if (REG_NOTE_KIND (x) == REG_DEAD)
2741 		return (XEXP (x, 1)
2742 			? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2743 					    for_costs)
2744 			: NULL_RTX);
2745 
2746 	      x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2747 	    }
2748 	}
2749 
2750       /* fall through */
2751 
2752     case INSN_LIST:
2753     case INT_LIST:
2754       /* Now do eliminations in the rest of the chain.  If this was
2755 	 an EXPR_LIST, this might result in allocating more memory than is
2756 	 strictly needed, but it simplifies the code.  */
2757       if (XEXP (x, 1))
2758 	{
2759 	  new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2760 				      for_costs);
2761 	  if (new_rtx != XEXP (x, 1))
2762 	    return
2763 	      gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
2764 	}
2765       return x;
2766 
2767     case PRE_INC:
2768     case POST_INC:
2769     case PRE_DEC:
2770     case POST_DEC:
2771       /* We do not support elimination of a register that is modified.
2772 	 elimination_effects has already make sure that this does not
2773 	 happen.  */
2774       return x;
2775 
2776     case PRE_MODIFY:
2777     case POST_MODIFY:
2778       /* We do not support elimination of a register that is modified.
2779 	 elimination_effects has already make sure that this does not
2780 	 happen.  The only remaining case we need to consider here is
2781 	 that the increment value may be an eliminable register.  */
2782       if (GET_CODE (XEXP (x, 1)) == PLUS
2783 	  && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2784 	{
2785 	  rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2786 					  insn, true, for_costs);
2787 
2788 	  if (new_rtx != XEXP (XEXP (x, 1), 1))
2789 	    return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2790 				   gen_rtx_PLUS (GET_MODE (x),
2791 						 XEXP (x, 0), new_rtx));
2792 	}
2793       return x;
2794 
2795     case STRICT_LOW_PART:
2796     case NEG:          case NOT:
2797     case SIGN_EXTEND:  case ZERO_EXTEND:
2798     case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2799     case FLOAT:        case FIX:
2800     case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2801     case ABS:
2802     case SQRT:
2803     case FFS:
2804     case CLZ:
2805     case CTZ:
2806     case POPCOUNT:
2807     case PARITY:
2808     case BSWAP:
2809       new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2810 				  for_costs);
2811       if (new_rtx != XEXP (x, 0))
2812 	return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
2813       return x;
2814 
2815     case SUBREG:
2816       /* Similar to above processing, but preserve SUBREG_BYTE.
2817 	 Convert (subreg (mem)) to (mem) if not paradoxical.
2818 	 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2819 	 pseudo didn't get a hard reg, we must replace this with the
2820 	 eliminated version of the memory location because push_reload
2821 	 may do the replacement in certain circumstances.  */
2822       if (REG_P (SUBREG_REG (x))
2823 	  && !paradoxical_subreg_p (x)
2824 	  && reg_equivs
2825 	  && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
2826 	{
2827 	  new_rtx = SUBREG_REG (x);
2828 	}
2829       else
2830 	new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false, for_costs);
2831 
2832       if (new_rtx != SUBREG_REG (x))
2833 	{
2834 	  poly_int64 x_size = GET_MODE_SIZE (GET_MODE (x));
2835 	  poly_int64 new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
2836 
2837 	  if (MEM_P (new_rtx)
2838 	      && ((partial_subreg_p (GET_MODE (x), GET_MODE (new_rtx))
2839 		   /* On RISC machines, combine can create rtl of the form
2840 		      (set (subreg:m1 (reg:m2 R) 0) ...)
2841 		      where m1 < m2, and expects something interesting to
2842 		      happen to the entire word.  Moreover, it will use the
2843 		      (reg:m2 R) later, expecting all bits to be preserved.
2844 		      So if the number of words is the same, preserve the
2845 		      subreg so that push_reload can see it.  */
2846 		   && !(WORD_REGISTER_OPERATIONS
2847 			&& known_equal_after_align_down (x_size - 1,
2848 							 new_size - 1,
2849 							 UNITS_PER_WORD)))
2850 		  || known_eq (x_size, new_size))
2851 	      )
2852 	    return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
2853 	  else if (insn && GET_CODE (insn) == DEBUG_INSN)
2854 	    return gen_rtx_raw_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2855 	  else
2856 	    return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2857 	}
2858 
2859       return x;
2860 
2861     case MEM:
2862       /* Our only special processing is to pass the mode of the MEM to our
2863 	 recursive call and copy the flags.  While we are here, handle this
2864 	 case more efficiently.  */
2865 
2866       new_rtx = eliminate_regs_1 (XEXP (x, 0), GET_MODE (x), insn, true,
2867 				  for_costs);
2868       if (for_costs
2869 	  && memory_address_p (GET_MODE (x), XEXP (x, 0))
2870 	  && !memory_address_p (GET_MODE (x), new_rtx))
2871 	note_reg_elim_costly (XEXP (x, 0), insn);
2872 
2873       return replace_equiv_address_nv (x, new_rtx);
2874 
2875     case USE:
2876       /* Handle insn_list USE that a call to a pure function may generate.  */
2877       new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false,
2878 				  for_costs);
2879       if (new_rtx != XEXP (x, 0))
2880 	return gen_rtx_USE (GET_MODE (x), new_rtx);
2881       return x;
2882 
2883     case CLOBBER:
2884     case CLOBBER_HIGH:
2885     case ASM_OPERANDS:
2886       gcc_assert (insn && DEBUG_INSN_P (insn));
2887       break;
2888 
2889     case SET:
2890       gcc_unreachable ();
2891 
2892     default:
2893       break;
2894     }
2895 
2896   /* Process each of our operands recursively.  If any have changed, make a
2897      copy of the rtx.  */
2898   fmt = GET_RTX_FORMAT (code);
2899   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2900     {
2901       if (*fmt == 'e')
2902 	{
2903 	  new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false,
2904 				      for_costs);
2905 	  if (new_rtx != XEXP (x, i) && ! copied)
2906 	    {
2907 	      x = shallow_copy_rtx (x);
2908 	      copied = 1;
2909 	    }
2910 	  XEXP (x, i) = new_rtx;
2911 	}
2912       else if (*fmt == 'E')
2913 	{
2914 	  int copied_vec = 0;
2915 	  for (j = 0; j < XVECLEN (x, i); j++)
2916 	    {
2917 	      new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false,
2918 					  for_costs);
2919 	      if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
2920 		{
2921 		  rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2922 					     XVEC (x, i)->elem);
2923 		  if (! copied)
2924 		    {
2925 		      x = shallow_copy_rtx (x);
2926 		      copied = 1;
2927 		    }
2928 		  XVEC (x, i) = new_v;
2929 		  copied_vec = 1;
2930 		}
2931 	      XVECEXP (x, i, j) = new_rtx;
2932 	    }
2933 	}
2934     }
2935 
2936   return x;
2937 }
2938 
2939 rtx
2940 eliminate_regs (rtx x, machine_mode mem_mode, rtx insn)
2941 {
2942   if (reg_eliminate == NULL)
2943     {
2944       gcc_assert (targetm.no_register_allocation);
2945       return x;
2946     }
2947   return eliminate_regs_1 (x, mem_mode, insn, false, false);
2948 }
2949 
2950 /* Scan rtx X for modifications of elimination target registers.  Update
2951    the table of eliminables to reflect the changed state.  MEM_MODE is
2952    the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM.  */
2953 
2954 static void
2955 elimination_effects (rtx x, machine_mode mem_mode)
2956 {
2957   enum rtx_code code = GET_CODE (x);
2958   struct elim_table *ep;
2959   int regno;
2960   int i, j;
2961   const char *fmt;
2962 
2963   switch (code)
2964     {
2965     CASE_CONST_ANY:
2966     case CONST:
2967     case SYMBOL_REF:
2968     case CODE_LABEL:
2969     case PC:
2970     case CC0:
2971     case ASM_INPUT:
2972     case ADDR_VEC:
2973     case ADDR_DIFF_VEC:
2974     case RETURN:
2975       return;
2976 
2977     case REG:
2978       regno = REGNO (x);
2979 
2980       /* First handle the case where we encounter a bare register that
2981 	 is eliminable.  Replace it with a PLUS.  */
2982       if (regno < FIRST_PSEUDO_REGISTER)
2983 	{
2984 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2985 	       ep++)
2986 	    if (ep->from_rtx == x && ep->can_eliminate)
2987 	      {
2988 		if (! mem_mode)
2989 		  ep->ref_outside_mem = 1;
2990 		return;
2991 	      }
2992 
2993 	}
2994       else if (reg_renumber[regno] < 0
2995 	       && reg_equivs
2996 	       && reg_equiv_constant (regno)
2997 	       && ! function_invariant_p (reg_equiv_constant (regno)))
2998 	elimination_effects (reg_equiv_constant (regno), mem_mode);
2999       return;
3000 
3001     case PRE_INC:
3002     case POST_INC:
3003     case PRE_DEC:
3004     case POST_DEC:
3005     case POST_MODIFY:
3006     case PRE_MODIFY:
3007       /* If we modify the source of an elimination rule, disable it.  */
3008       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3009 	if (ep->from_rtx == XEXP (x, 0))
3010 	  ep->can_eliminate = 0;
3011 
3012       /* If we modify the target of an elimination rule by adding a constant,
3013 	 update its offset.  If we modify the target in any other way, we'll
3014 	 have to disable the rule as well.  */
3015       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3016 	if (ep->to_rtx == XEXP (x, 0))
3017 	  {
3018 	    poly_int64 size = GET_MODE_SIZE (mem_mode);
3019 
3020 	    /* If more bytes than MEM_MODE are pushed, account for them.  */
3021 #ifdef PUSH_ROUNDING
3022 	    if (ep->to_rtx == stack_pointer_rtx)
3023 	      size = PUSH_ROUNDING (size);
3024 #endif
3025 	    if (code == PRE_DEC || code == POST_DEC)
3026 	      ep->offset += size;
3027 	    else if (code == PRE_INC || code == POST_INC)
3028 	      ep->offset -= size;
3029 	    else if (code == PRE_MODIFY || code == POST_MODIFY)
3030 	      {
3031 		if (GET_CODE (XEXP (x, 1)) == PLUS
3032 		    && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3033 		    && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3034 		  ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3035 		else
3036 		  ep->can_eliminate = 0;
3037 	      }
3038 	  }
3039 
3040       /* These two aren't unary operators.  */
3041       if (code == POST_MODIFY || code == PRE_MODIFY)
3042 	break;
3043 
3044       /* Fall through to generic unary operation case.  */
3045       gcc_fallthrough ();
3046     case STRICT_LOW_PART:
3047     case NEG:          case NOT:
3048     case SIGN_EXTEND:  case ZERO_EXTEND:
3049     case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3050     case FLOAT:        case FIX:
3051     case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3052     case ABS:
3053     case SQRT:
3054     case FFS:
3055     case CLZ:
3056     case CTZ:
3057     case POPCOUNT:
3058     case PARITY:
3059     case BSWAP:
3060       elimination_effects (XEXP (x, 0), mem_mode);
3061       return;
3062 
3063     case SUBREG:
3064       if (REG_P (SUBREG_REG (x))
3065 	  && !paradoxical_subreg_p (x)
3066 	  && reg_equivs
3067 	  && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
3068 	return;
3069 
3070       elimination_effects (SUBREG_REG (x), mem_mode);
3071       return;
3072 
3073     case USE:
3074       /* If using a register that is the source of an eliminate we still
3075 	 think can be performed, note it cannot be performed since we don't
3076 	 know how this register is used.  */
3077       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3078 	if (ep->from_rtx == XEXP (x, 0))
3079 	  ep->can_eliminate = 0;
3080 
3081       elimination_effects (XEXP (x, 0), mem_mode);
3082       return;
3083 
3084     case CLOBBER:
3085       /* If clobbering a register that is the replacement register for an
3086 	 elimination we still think can be performed, note that it cannot
3087 	 be performed.  Otherwise, we need not be concerned about it.  */
3088       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3089 	if (ep->to_rtx == XEXP (x, 0))
3090 	  ep->can_eliminate = 0;
3091 
3092       elimination_effects (XEXP (x, 0), mem_mode);
3093       return;
3094 
3095     case CLOBBER_HIGH:
3096       /* CLOBBER_HIGH is only supported for LRA.  */
3097       return;
3098 
3099     case SET:
3100       /* Check for setting a register that we know about.  */
3101       if (REG_P (SET_DEST (x)))
3102 	{
3103 	  /* See if this is setting the replacement register for an
3104 	     elimination.
3105 
3106 	     If DEST is the hard frame pointer, we do nothing because we
3107 	     assume that all assignments to the frame pointer are for
3108 	     non-local gotos and are being done at a time when they are valid
3109 	     and do not disturb anything else.  Some machines want to
3110 	     eliminate a fake argument pointer (or even a fake frame pointer)
3111 	     with either the real frame or the stack pointer.  Assignments to
3112 	     the hard frame pointer must not prevent this elimination.  */
3113 
3114 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3115 	       ep++)
3116 	    if (ep->to_rtx == SET_DEST (x)
3117 		&& SET_DEST (x) != hard_frame_pointer_rtx)
3118 	      {
3119 		/* If it is being incremented, adjust the offset.  Otherwise,
3120 		   this elimination can't be done.  */
3121 		rtx src = SET_SRC (x);
3122 
3123 		if (GET_CODE (src) == PLUS
3124 		    && XEXP (src, 0) == SET_DEST (x)
3125 		    && CONST_INT_P (XEXP (src, 1)))
3126 		  ep->offset -= INTVAL (XEXP (src, 1));
3127 		else
3128 		  ep->can_eliminate = 0;
3129 	      }
3130 	}
3131 
3132       elimination_effects (SET_DEST (x), VOIDmode);
3133       elimination_effects (SET_SRC (x), VOIDmode);
3134       return;
3135 
3136     case MEM:
3137       /* Our only special processing is to pass the mode of the MEM to our
3138 	 recursive call.  */
3139       elimination_effects (XEXP (x, 0), GET_MODE (x));
3140       return;
3141 
3142     default:
3143       break;
3144     }
3145 
3146   fmt = GET_RTX_FORMAT (code);
3147   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3148     {
3149       if (*fmt == 'e')
3150 	elimination_effects (XEXP (x, i), mem_mode);
3151       else if (*fmt == 'E')
3152 	for (j = 0; j < XVECLEN (x, i); j++)
3153 	  elimination_effects (XVECEXP (x, i, j), mem_mode);
3154     }
3155 }
3156 
3157 /* Descend through rtx X and verify that no references to eliminable registers
3158    remain.  If any do remain, mark the involved register as not
3159    eliminable.  */
3160 
3161 static void
3162 check_eliminable_occurrences (rtx x)
3163 {
3164   const char *fmt;
3165   int i;
3166   enum rtx_code code;
3167 
3168   if (x == 0)
3169     return;
3170 
3171   code = GET_CODE (x);
3172 
3173   if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3174     {
3175       struct elim_table *ep;
3176 
3177       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3178 	if (ep->from_rtx == x)
3179 	  ep->can_eliminate = 0;
3180       return;
3181     }
3182 
3183   fmt = GET_RTX_FORMAT (code);
3184   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3185     {
3186       if (*fmt == 'e')
3187 	check_eliminable_occurrences (XEXP (x, i));
3188       else if (*fmt == 'E')
3189 	{
3190 	  int j;
3191 	  for (j = 0; j < XVECLEN (x, i); j++)
3192 	    check_eliminable_occurrences (XVECEXP (x, i, j));
3193 	}
3194     }
3195 }
3196 
3197 /* Scan INSN and eliminate all eliminable registers in it.
3198 
3199    If REPLACE is nonzero, do the replacement destructively.  Also
3200    delete the insn as dead it if it is setting an eliminable register.
3201 
3202    If REPLACE is zero, do all our allocations in reload_obstack.
3203 
3204    If no eliminations were done and this insn doesn't require any elimination
3205    processing (these are not identical conditions: it might be updating sp,
3206    but not referencing fp; this needs to be seen during reload_as_needed so
3207    that the offset between fp and sp can be taken into consideration), zero
3208    is returned.  Otherwise, 1 is returned.  */
3209 
3210 static int
3211 eliminate_regs_in_insn (rtx_insn *insn, int replace)
3212 {
3213   int icode = recog_memoized (insn);
3214   rtx old_body = PATTERN (insn);
3215   int insn_is_asm = asm_noperands (old_body) >= 0;
3216   rtx old_set = single_set (insn);
3217   rtx new_body;
3218   int val = 0;
3219   int i;
3220   rtx substed_operand[MAX_RECOG_OPERANDS];
3221   rtx orig_operand[MAX_RECOG_OPERANDS];
3222   struct elim_table *ep;
3223   rtx plus_src, plus_cst_src;
3224 
3225   if (! insn_is_asm && icode < 0)
3226     {
3227       gcc_assert (DEBUG_INSN_P (insn)
3228 		  || GET_CODE (PATTERN (insn)) == USE
3229 		  || GET_CODE (PATTERN (insn)) == CLOBBER
3230 		  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3231       if (DEBUG_BIND_INSN_P (insn))
3232 	INSN_VAR_LOCATION_LOC (insn)
3233 	  = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn);
3234       return 0;
3235     }
3236 
3237   if (old_set != 0 && REG_P (SET_DEST (old_set))
3238       && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3239     {
3240       /* Check for setting an eliminable register.  */
3241       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3242 	if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3243 	  {
3244 	    /* If this is setting the frame pointer register to the
3245 	       hardware frame pointer register and this is an elimination
3246 	       that will be done (tested above), this insn is really
3247 	       adjusting the frame pointer downward to compensate for
3248 	       the adjustment done before a nonlocal goto.  */
3249 	    if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
3250 		&& ep->from == FRAME_POINTER_REGNUM
3251 		&& ep->to == HARD_FRAME_POINTER_REGNUM)
3252 	      {
3253 		rtx base = SET_SRC (old_set);
3254 		rtx_insn *base_insn = insn;
3255 		HOST_WIDE_INT offset = 0;
3256 
3257 		while (base != ep->to_rtx)
3258 		  {
3259 		    rtx_insn *prev_insn;
3260 		    rtx prev_set;
3261 
3262 		    if (GET_CODE (base) == PLUS
3263 		        && CONST_INT_P (XEXP (base, 1)))
3264 		      {
3265 		        offset += INTVAL (XEXP (base, 1));
3266 		        base = XEXP (base, 0);
3267 		      }
3268 		    else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3269 			     && (prev_set = single_set (prev_insn)) != 0
3270 			     && rtx_equal_p (SET_DEST (prev_set), base))
3271 		      {
3272 		        base = SET_SRC (prev_set);
3273 		        base_insn = prev_insn;
3274 		      }
3275 		    else
3276 		      break;
3277 		  }
3278 
3279 		if (base == ep->to_rtx)
3280 		  {
3281 		    rtx src = plus_constant (Pmode, ep->to_rtx,
3282 					     offset - ep->offset);
3283 
3284 		    new_body = old_body;
3285 		    if (! replace)
3286 		      {
3287 			new_body = copy_insn (old_body);
3288 			if (REG_NOTES (insn))
3289 			  REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3290 		      }
3291 		    PATTERN (insn) = new_body;
3292 		    old_set = single_set (insn);
3293 
3294 		    /* First see if this insn remains valid when we
3295 		       make the change.  If not, keep the INSN_CODE
3296 		       the same and let reload fit it up.  */
3297 		    validate_change (insn, &SET_SRC (old_set), src, 1);
3298 		    validate_change (insn, &SET_DEST (old_set),
3299 				     ep->to_rtx, 1);
3300 		    if (! apply_change_group ())
3301 		      {
3302 			SET_SRC (old_set) = src;
3303 			SET_DEST (old_set) = ep->to_rtx;
3304 		      }
3305 
3306 		    val = 1;
3307 		    goto done;
3308 		  }
3309 	      }
3310 
3311 	    /* In this case this insn isn't serving a useful purpose.  We
3312 	       will delete it in reload_as_needed once we know that this
3313 	       elimination is, in fact, being done.
3314 
3315 	       If REPLACE isn't set, we can't delete this insn, but needn't
3316 	       process it since it won't be used unless something changes.  */
3317 	    if (replace)
3318 	      {
3319 		delete_dead_insn (insn);
3320 		return 1;
3321 	      }
3322 	    val = 1;
3323 	    goto done;
3324 	  }
3325     }
3326 
3327   /* We allow one special case which happens to work on all machines we
3328      currently support: a single set with the source or a REG_EQUAL
3329      note being a PLUS of an eliminable register and a constant.  */
3330   plus_src = plus_cst_src = 0;
3331   if (old_set && REG_P (SET_DEST (old_set)))
3332     {
3333       if (GET_CODE (SET_SRC (old_set)) == PLUS)
3334 	plus_src = SET_SRC (old_set);
3335       /* First see if the source is of the form (plus (...) CST).  */
3336       if (plus_src
3337 	  && CONST_INT_P (XEXP (plus_src, 1)))
3338 	plus_cst_src = plus_src;
3339       else if (REG_P (SET_SRC (old_set))
3340 	       || plus_src)
3341 	{
3342 	  /* Otherwise, see if we have a REG_EQUAL note of the form
3343 	     (plus (...) CST).  */
3344 	  rtx links;
3345 	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3346 	    {
3347 	      if ((REG_NOTE_KIND (links) == REG_EQUAL
3348 		   || REG_NOTE_KIND (links) == REG_EQUIV)
3349 		  && GET_CODE (XEXP (links, 0)) == PLUS
3350 		  && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3351 		{
3352 		  plus_cst_src = XEXP (links, 0);
3353 		  break;
3354 		}
3355 	    }
3356 	}
3357 
3358       /* Check that the first operand of the PLUS is a hard reg or
3359 	 the lowpart subreg of one.  */
3360       if (plus_cst_src)
3361 	{
3362 	  rtx reg = XEXP (plus_cst_src, 0);
3363 	  if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3364 	    reg = SUBREG_REG (reg);
3365 
3366 	  if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3367 	    plus_cst_src = 0;
3368 	}
3369     }
3370   if (plus_cst_src)
3371     {
3372       rtx reg = XEXP (plus_cst_src, 0);
3373       poly_int64 offset = INTVAL (XEXP (plus_cst_src, 1));
3374 
3375       if (GET_CODE (reg) == SUBREG)
3376 	reg = SUBREG_REG (reg);
3377 
3378       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3379 	if (ep->from_rtx == reg && ep->can_eliminate)
3380 	  {
3381 	    rtx to_rtx = ep->to_rtx;
3382 	    offset += ep->offset;
3383 	    offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
3384 
3385 	    if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3386 	      to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3387 				    to_rtx);
3388 	    /* If we have a nonzero offset, and the source is already
3389 	       a simple REG, the following transformation would
3390 	       increase the cost of the insn by replacing a simple REG
3391 	       with (plus (reg sp) CST).  So try only when we already
3392 	       had a PLUS before.  */
3393 	    if (known_eq (offset, 0) || plus_src)
3394 	      {
3395 		rtx new_src = plus_constant (GET_MODE (to_rtx),
3396 					     to_rtx, offset);
3397 
3398 		new_body = old_body;
3399 		if (! replace)
3400 		  {
3401 		    new_body = copy_insn (old_body);
3402 		    if (REG_NOTES (insn))
3403 		      REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3404 		  }
3405 		PATTERN (insn) = new_body;
3406 		old_set = single_set (insn);
3407 
3408 		/* First see if this insn remains valid when we make the
3409 		   change.  If not, try to replace the whole pattern with
3410 		   a simple set (this may help if the original insn was a
3411 		   PARALLEL that was only recognized as single_set due to
3412 		   REG_UNUSED notes).  If this isn't valid either, keep
3413 		   the INSN_CODE the same and let reload fix it up.  */
3414 		if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3415 		  {
3416 		    rtx new_pat = gen_rtx_SET (SET_DEST (old_set), new_src);
3417 
3418 		    if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3419 		      SET_SRC (old_set) = new_src;
3420 		  }
3421 	      }
3422 	    else
3423 	      break;
3424 
3425 	    val = 1;
3426 	    /* This can't have an effect on elimination offsets, so skip right
3427 	       to the end.  */
3428 	    goto done;
3429 	  }
3430     }
3431 
3432   /* Determine the effects of this insn on elimination offsets.  */
3433   elimination_effects (old_body, VOIDmode);
3434 
3435   /* Eliminate all eliminable registers occurring in operands that
3436      can be handled by reload.  */
3437   extract_insn (insn);
3438   for (i = 0; i < recog_data.n_operands; i++)
3439     {
3440       orig_operand[i] = recog_data.operand[i];
3441       substed_operand[i] = recog_data.operand[i];
3442 
3443       /* For an asm statement, every operand is eliminable.  */
3444       if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3445 	{
3446 	  bool is_set_src, in_plus;
3447 
3448 	  /* Check for setting a register that we know about.  */
3449 	  if (recog_data.operand_type[i] != OP_IN
3450 	      && REG_P (orig_operand[i]))
3451 	    {
3452 	      /* If we are assigning to a register that can be eliminated, it
3453 		 must be as part of a PARALLEL, since the code above handles
3454 		 single SETs.  We must indicate that we can no longer
3455 		 eliminate this reg.  */
3456 	      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3457 		   ep++)
3458 		if (ep->from_rtx == orig_operand[i])
3459 		  ep->can_eliminate = 0;
3460 	    }
3461 
3462 	  /* Companion to the above plus substitution, we can allow
3463 	     invariants as the source of a plain move.  */
3464 	  is_set_src = false;
3465 	  if (old_set
3466 	      && recog_data.operand_loc[i] == &SET_SRC (old_set))
3467 	    is_set_src = true;
3468 	  in_plus = false;
3469 	  if (plus_src
3470 	      && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3471 		  || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3472 	    in_plus = true;
3473 
3474 	  substed_operand[i]
3475 	    = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3476 			        replace ? insn : NULL_RTX,
3477 				is_set_src || in_plus, false);
3478 	  if (substed_operand[i] != orig_operand[i])
3479 	    val = 1;
3480 	  /* Terminate the search in check_eliminable_occurrences at
3481 	     this point.  */
3482 	  *recog_data.operand_loc[i] = 0;
3483 
3484 	  /* If an output operand changed from a REG to a MEM and INSN is an
3485 	     insn, write a CLOBBER insn.  */
3486 	  if (recog_data.operand_type[i] != OP_IN
3487 	      && REG_P (orig_operand[i])
3488 	      && MEM_P (substed_operand[i])
3489 	      && replace)
3490 	    emit_insn_after (gen_clobber (orig_operand[i]), insn);
3491 	}
3492     }
3493 
3494   for (i = 0; i < recog_data.n_dups; i++)
3495     *recog_data.dup_loc[i]
3496       = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3497 
3498   /* If any eliminable remain, they aren't eliminable anymore.  */
3499   check_eliminable_occurrences (old_body);
3500 
3501   /* Substitute the operands; the new values are in the substed_operand
3502      array.  */
3503   for (i = 0; i < recog_data.n_operands; i++)
3504     *recog_data.operand_loc[i] = substed_operand[i];
3505   for (i = 0; i < recog_data.n_dups; i++)
3506     *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3507 
3508   /* If we are replacing a body that was a (set X (plus Y Z)), try to
3509      re-recognize the insn.  We do this in case we had a simple addition
3510      but now can do this as a load-address.  This saves an insn in this
3511      common case.
3512      If re-recognition fails, the old insn code number will still be used,
3513      and some register operands may have changed into PLUS expressions.
3514      These will be handled by find_reloads by loading them into a register
3515      again.  */
3516 
3517   if (val)
3518     {
3519       /* If we aren't replacing things permanently and we changed something,
3520 	 make another copy to ensure that all the RTL is new.  Otherwise
3521 	 things can go wrong if find_reload swaps commutative operands
3522 	 and one is inside RTL that has been copied while the other is not.  */
3523       new_body = old_body;
3524       if (! replace)
3525 	{
3526 	  new_body = copy_insn (old_body);
3527 	  if (REG_NOTES (insn))
3528 	    REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3529 	}
3530       PATTERN (insn) = new_body;
3531 
3532       /* If we had a move insn but now we don't, rerecognize it.  This will
3533 	 cause spurious re-recognition if the old move had a PARALLEL since
3534 	 the new one still will, but we can't call single_set without
3535 	 having put NEW_BODY into the insn and the re-recognition won't
3536 	 hurt in this rare case.  */
3537       /* ??? Why this huge if statement - why don't we just rerecognize the
3538 	 thing always?  */
3539       if (! insn_is_asm
3540 	  && old_set != 0
3541 	  && ((REG_P (SET_SRC (old_set))
3542 	       && (GET_CODE (new_body) != SET
3543 		   || !REG_P (SET_SRC (new_body))))
3544 	      /* If this was a load from or store to memory, compare
3545 		 the MEM in recog_data.operand to the one in the insn.
3546 		 If they are not equal, then rerecognize the insn.  */
3547 	      || (old_set != 0
3548 		  && ((MEM_P (SET_SRC (old_set))
3549 		       && SET_SRC (old_set) != recog_data.operand[1])
3550 		      || (MEM_P (SET_DEST (old_set))
3551 			  && SET_DEST (old_set) != recog_data.operand[0])))
3552 	      /* If this was an add insn before, rerecognize.  */
3553 	      || GET_CODE (SET_SRC (old_set)) == PLUS))
3554 	{
3555 	  int new_icode = recog (PATTERN (insn), insn, 0);
3556 	  if (new_icode >= 0)
3557 	    INSN_CODE (insn) = new_icode;
3558 	}
3559     }
3560 
3561   /* Restore the old body.  If there were any changes to it, we made a copy
3562      of it while the changes were still in place, so we'll correctly return
3563      a modified insn below.  */
3564   if (! replace)
3565     {
3566       /* Restore the old body.  */
3567       for (i = 0; i < recog_data.n_operands; i++)
3568 	/* Restoring a top-level match_parallel would clobber the new_body
3569 	   we installed in the insn.  */
3570 	if (recog_data.operand_loc[i] != &PATTERN (insn))
3571 	  *recog_data.operand_loc[i] = orig_operand[i];
3572       for (i = 0; i < recog_data.n_dups; i++)
3573 	*recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3574     }
3575 
3576   /* Update all elimination pairs to reflect the status after the current
3577      insn.  The changes we make were determined by the earlier call to
3578      elimination_effects.
3579 
3580      We also detect cases where register elimination cannot be done,
3581      namely, if a register would be both changed and referenced outside a MEM
3582      in the resulting insn since such an insn is often undefined and, even if
3583      not, we cannot know what meaning will be given to it.  Note that it is
3584      valid to have a register used in an address in an insn that changes it
3585      (presumably with a pre- or post-increment or decrement).
3586 
3587      If anything changes, return nonzero.  */
3588 
3589   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3590     {
3591       if (maybe_ne (ep->previous_offset, ep->offset) && ep->ref_outside_mem)
3592 	ep->can_eliminate = 0;
3593 
3594       ep->ref_outside_mem = 0;
3595 
3596       if (maybe_ne (ep->previous_offset, ep->offset))
3597 	val = 1;
3598     }
3599 
3600  done:
3601   /* If we changed something, perform elimination in REG_NOTES.  This is
3602      needed even when REPLACE is zero because a REG_DEAD note might refer
3603      to a register that we eliminate and could cause a different number
3604      of spill registers to be needed in the final reload pass than in
3605      the pre-passes.  */
3606   if (val && REG_NOTES (insn) != 0)
3607     REG_NOTES (insn)
3608       = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true,
3609 			  false);
3610 
3611   return val;
3612 }
3613 
3614 /* Like eliminate_regs_in_insn, but only estimate costs for the use of the
3615    register allocator.  INSN is the instruction we need to examine, we perform
3616    eliminations in its operands and record cases where eliminating a reg with
3617    an invariant equivalence would add extra cost.  */
3618 
3619 #pragma GCC diagnostic push
3620 #pragma GCC diagnostic warning "-Wmaybe-uninitialized"
3621 static void
3622 elimination_costs_in_insn (rtx_insn *insn)
3623 {
3624   int icode = recog_memoized (insn);
3625   rtx old_body = PATTERN (insn);
3626   int insn_is_asm = asm_noperands (old_body) >= 0;
3627   rtx old_set = single_set (insn);
3628   int i;
3629   rtx orig_operand[MAX_RECOG_OPERANDS];
3630   rtx orig_dup[MAX_RECOG_OPERANDS];
3631   struct elim_table *ep;
3632   rtx plus_src, plus_cst_src;
3633   bool sets_reg_p;
3634 
3635   if (! insn_is_asm && icode < 0)
3636     {
3637       gcc_assert (DEBUG_INSN_P (insn)
3638 		  || GET_CODE (PATTERN (insn)) == USE
3639 		  || GET_CODE (PATTERN (insn)) == CLOBBER
3640 		  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3641       return;
3642     }
3643 
3644   if (old_set != 0 && REG_P (SET_DEST (old_set))
3645       && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3646     {
3647       /* Check for setting an eliminable register.  */
3648       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3649 	if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3650 	  return;
3651     }
3652 
3653   /* We allow one special case which happens to work on all machines we
3654      currently support: a single set with the source or a REG_EQUAL
3655      note being a PLUS of an eliminable register and a constant.  */
3656   plus_src = plus_cst_src = 0;
3657   sets_reg_p = false;
3658   if (old_set && REG_P (SET_DEST (old_set)))
3659     {
3660       sets_reg_p = true;
3661       if (GET_CODE (SET_SRC (old_set)) == PLUS)
3662 	plus_src = SET_SRC (old_set);
3663       /* First see if the source is of the form (plus (...) CST).  */
3664       if (plus_src
3665 	  && CONST_INT_P (XEXP (plus_src, 1)))
3666 	plus_cst_src = plus_src;
3667       else if (REG_P (SET_SRC (old_set))
3668 	       || plus_src)
3669 	{
3670 	  /* Otherwise, see if we have a REG_EQUAL note of the form
3671 	     (plus (...) CST).  */
3672 	  rtx links;
3673 	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3674 	    {
3675 	      if ((REG_NOTE_KIND (links) == REG_EQUAL
3676 		   || REG_NOTE_KIND (links) == REG_EQUIV)
3677 		  && GET_CODE (XEXP (links, 0)) == PLUS
3678 		  && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3679 		{
3680 		  plus_cst_src = XEXP (links, 0);
3681 		  break;
3682 		}
3683 	    }
3684 	}
3685     }
3686 
3687   /* Determine the effects of this insn on elimination offsets.  */
3688   elimination_effects (old_body, VOIDmode);
3689 
3690   /* Eliminate all eliminable registers occurring in operands that
3691      can be handled by reload.  */
3692   extract_insn (insn);
3693   int n_dups = recog_data.n_dups;
3694   for (i = 0; i < n_dups; i++)
3695     orig_dup[i] = *recog_data.dup_loc[i];
3696 
3697   int n_operands = recog_data.n_operands;
3698   for (i = 0; i < n_operands; i++)
3699     {
3700       orig_operand[i] = recog_data.operand[i];
3701 
3702       /* For an asm statement, every operand is eliminable.  */
3703       if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3704 	{
3705 	  bool is_set_src, in_plus;
3706 
3707 	  /* Check for setting a register that we know about.  */
3708 	  if (recog_data.operand_type[i] != OP_IN
3709 	      && REG_P (orig_operand[i]))
3710 	    {
3711 	      /* If we are assigning to a register that can be eliminated, it
3712 		 must be as part of a PARALLEL, since the code above handles
3713 		 single SETs.  We must indicate that we can no longer
3714 		 eliminate this reg.  */
3715 	      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3716 		   ep++)
3717 		if (ep->from_rtx == orig_operand[i])
3718 		  ep->can_eliminate = 0;
3719 	    }
3720 
3721 	  /* Companion to the above plus substitution, we can allow
3722 	     invariants as the source of a plain move.  */
3723 	  is_set_src = false;
3724 	  if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3725 	    is_set_src = true;
3726 	  if (is_set_src && !sets_reg_p)
3727 	    note_reg_elim_costly (SET_SRC (old_set), insn);
3728 	  in_plus = false;
3729 	  if (plus_src && sets_reg_p
3730 	      && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3731 		  || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3732 	    in_plus = true;
3733 
3734 	  eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3735 			    NULL_RTX,
3736 			    is_set_src || in_plus, true);
3737 	  /* Terminate the search in check_eliminable_occurrences at
3738 	     this point.  */
3739 	  *recog_data.operand_loc[i] = 0;
3740 	}
3741     }
3742 
3743   for (i = 0; i < n_dups; i++)
3744     *recog_data.dup_loc[i]
3745       = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3746 
3747   /* If any eliminable remain, they aren't eliminable anymore.  */
3748   check_eliminable_occurrences (old_body);
3749 
3750   /* Restore the old body.  */
3751   for (i = 0; i < n_operands; i++)
3752     *recog_data.operand_loc[i] = orig_operand[i];
3753   for (i = 0; i < n_dups; i++)
3754     *recog_data.dup_loc[i] = orig_dup[i];
3755 
3756   /* Update all elimination pairs to reflect the status after the current
3757      insn.  The changes we make were determined by the earlier call to
3758      elimination_effects.  */
3759 
3760   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3761     {
3762       if (maybe_ne (ep->previous_offset, ep->offset) && ep->ref_outside_mem)
3763 	ep->can_eliminate = 0;
3764 
3765       ep->ref_outside_mem = 0;
3766     }
3767 
3768   return;
3769 }
3770 #pragma GCC diagnostic pop
3771 
3772 /* Loop through all elimination pairs.
3773    Recalculate the number not at initial offset.
3774 
3775    Compute the maximum offset (minimum offset if the stack does not
3776    grow downward) for each elimination pair.  */
3777 
3778 static void
3779 update_eliminable_offsets (void)
3780 {
3781   struct elim_table *ep;
3782 
3783   num_not_at_initial_offset = 0;
3784   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3785     {
3786       ep->previous_offset = ep->offset;
3787       if (ep->can_eliminate && maybe_ne (ep->offset, ep->initial_offset))
3788 	num_not_at_initial_offset++;
3789     }
3790 }
3791 
3792 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3793    replacement we currently believe is valid, mark it as not eliminable if X
3794    modifies DEST in any way other than by adding a constant integer to it.
3795 
3796    If DEST is the frame pointer, we do nothing because we assume that
3797    all assignments to the hard frame pointer are nonlocal gotos and are being
3798    done at a time when they are valid and do not disturb anything else.
3799    Some machines want to eliminate a fake argument pointer with either the
3800    frame or stack pointer.  Assignments to the hard frame pointer must not
3801    prevent this elimination.
3802 
3803    Called via note_stores from reload before starting its passes to scan
3804    the insns of the function.  */
3805 
3806 static void
3807 mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3808 {
3809   unsigned int i;
3810 
3811   /* A SUBREG of a hard register here is just changing its mode.  We should
3812      not see a SUBREG of an eliminable hard register, but check just in
3813      case.  */
3814   if (GET_CODE (dest) == SUBREG)
3815     dest = SUBREG_REG (dest);
3816 
3817   if (dest == hard_frame_pointer_rtx)
3818     return;
3819 
3820   /* CLOBBER_HIGH is only supported for LRA.  */
3821   gcc_assert (GET_CODE (x) != CLOBBER_HIGH);
3822 
3823   for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3824     if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3825 	&& (GET_CODE (x) != SET
3826 	    || GET_CODE (SET_SRC (x)) != PLUS
3827 	    || XEXP (SET_SRC (x), 0) != dest
3828 	    || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3829       {
3830 	reg_eliminate[i].can_eliminate_previous
3831 	  = reg_eliminate[i].can_eliminate = 0;
3832 	num_eliminable--;
3833       }
3834 }
3835 
3836 /* Verify that the initial elimination offsets did not change since the
3837    last call to set_initial_elim_offsets.  This is used to catch cases
3838    where something illegal happened during reload_as_needed that could
3839    cause incorrect code to be generated if we did not check for it.  */
3840 
3841 static bool
3842 verify_initial_elim_offsets (void)
3843 {
3844   poly_int64 t;
3845   struct elim_table *ep;
3846 
3847   if (!num_eliminable)
3848     return true;
3849 
3850   targetm.compute_frame_layout ();
3851   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3852     {
3853       INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3854       if (maybe_ne (t, ep->initial_offset))
3855 	return false;
3856     }
3857 
3858   return true;
3859 }
3860 
3861 /* Reset all offsets on eliminable registers to their initial values.  */
3862 
3863 static void
3864 set_initial_elim_offsets (void)
3865 {
3866   struct elim_table *ep = reg_eliminate;
3867 
3868   targetm.compute_frame_layout ();
3869   for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3870     {
3871       INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3872       ep->previous_offset = ep->offset = ep->initial_offset;
3873     }
3874 
3875   num_not_at_initial_offset = 0;
3876 }
3877 
3878 /* Subroutine of set_initial_label_offsets called via for_each_eh_label.  */
3879 
3880 static void
3881 set_initial_eh_label_offset (rtx label)
3882 {
3883   set_label_offsets (label, NULL, 1);
3884 }
3885 
3886 /* Initialize the known label offsets.
3887    Set a known offset for each forced label to be at the initial offset
3888    of each elimination.  We do this because we assume that all
3889    computed jumps occur from a location where each elimination is
3890    at its initial offset.
3891    For all other labels, show that we don't know the offsets.  */
3892 
3893 static void
3894 set_initial_label_offsets (void)
3895 {
3896   memset (offsets_known_at, 0, num_labels);
3897 
3898   unsigned int i;
3899   rtx_insn *insn;
3900   FOR_EACH_VEC_SAFE_ELT (forced_labels, i, insn)
3901     set_label_offsets (insn, NULL, 1);
3902 
3903   for (rtx_insn_list *x = nonlocal_goto_handler_labels; x; x = x->next ())
3904     if (x->insn ())
3905       set_label_offsets (x->insn (), NULL, 1);
3906 
3907   for_each_eh_label (set_initial_eh_label_offset);
3908 }
3909 
3910 /* Set all elimination offsets to the known values for the code label given
3911    by INSN.  */
3912 
3913 static void
3914 set_offsets_for_label (rtx_insn *insn)
3915 {
3916   unsigned int i;
3917   int label_nr = CODE_LABEL_NUMBER (insn);
3918   struct elim_table *ep;
3919 
3920   num_not_at_initial_offset = 0;
3921   for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3922     {
3923       ep->offset = ep->previous_offset
3924 		 = offsets_at[label_nr - first_label_num][i];
3925       if (ep->can_eliminate && maybe_ne (ep->offset, ep->initial_offset))
3926 	num_not_at_initial_offset++;
3927     }
3928 }
3929 
3930 /* See if anything that happened changes which eliminations are valid.
3931    For example, on the SPARC, whether or not the frame pointer can
3932    be eliminated can depend on what registers have been used.  We need
3933    not check some conditions again (such as flag_omit_frame_pointer)
3934    since they can't have changed.  */
3935 
3936 static void
3937 update_eliminables (HARD_REG_SET *pset)
3938 {
3939   int previous_frame_pointer_needed = frame_pointer_needed;
3940   struct elim_table *ep;
3941 
3942   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3943     if ((ep->from == HARD_FRAME_POINTER_REGNUM
3944          && targetm.frame_pointer_required ())
3945 	|| ! targetm.can_eliminate (ep->from, ep->to)
3946 	)
3947       ep->can_eliminate = 0;
3948 
3949   /* Look for the case where we have discovered that we can't replace
3950      register A with register B and that means that we will now be
3951      trying to replace register A with register C.  This means we can
3952      no longer replace register C with register B and we need to disable
3953      such an elimination, if it exists.  This occurs often with A == ap,
3954      B == sp, and C == fp.  */
3955 
3956   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3957     {
3958       struct elim_table *op;
3959       int new_to = -1;
3960 
3961       if (! ep->can_eliminate && ep->can_eliminate_previous)
3962 	{
3963 	  /* Find the current elimination for ep->from, if there is a
3964 	     new one.  */
3965 	  for (op = reg_eliminate;
3966 	       op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3967 	    if (op->from == ep->from && op->can_eliminate)
3968 	      {
3969 		new_to = op->to;
3970 		break;
3971 	      }
3972 
3973 	  /* See if there is an elimination of NEW_TO -> EP->TO.  If so,
3974 	     disable it.  */
3975 	  for (op = reg_eliminate;
3976 	       op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3977 	    if (op->from == new_to && op->to == ep->to)
3978 	      op->can_eliminate = 0;
3979 	}
3980     }
3981 
3982   /* See if any registers that we thought we could eliminate the previous
3983      time are no longer eliminable.  If so, something has changed and we
3984      must spill the register.  Also, recompute the number of eliminable
3985      registers and see if the frame pointer is needed; it is if there is
3986      no elimination of the frame pointer that we can perform.  */
3987 
3988   frame_pointer_needed = 1;
3989   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3990     {
3991       if (ep->can_eliminate
3992 	  && ep->from == FRAME_POINTER_REGNUM
3993 	  && ep->to != HARD_FRAME_POINTER_REGNUM
3994 	  && (! SUPPORTS_STACK_ALIGNMENT
3995 	      || ! crtl->stack_realign_needed))
3996 	frame_pointer_needed = 0;
3997 
3998       if (! ep->can_eliminate && ep->can_eliminate_previous)
3999 	{
4000 	  ep->can_eliminate_previous = 0;
4001 	  SET_HARD_REG_BIT (*pset, ep->from);
4002 	  num_eliminable--;
4003 	}
4004     }
4005 
4006   /* If we didn't need a frame pointer last time, but we do now, spill
4007      the hard frame pointer.  */
4008   if (frame_pointer_needed && ! previous_frame_pointer_needed)
4009     SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
4010 }
4011 
4012 /* Call update_eliminables an spill any registers we can't eliminate anymore.
4013    Return true iff a register was spilled.  */
4014 
4015 static bool
4016 update_eliminables_and_spill (void)
4017 {
4018   int i;
4019   bool did_spill = false;
4020   HARD_REG_SET to_spill;
4021   CLEAR_HARD_REG_SET (to_spill);
4022   update_eliminables (&to_spill);
4023   AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
4024 
4025   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4026     if (TEST_HARD_REG_BIT (to_spill, i))
4027       {
4028 	spill_hard_reg (i, 1);
4029 	did_spill = true;
4030 
4031 	/* Regardless of the state of spills, if we previously had
4032 	   a register that we thought we could eliminate, but now
4033 	   cannot eliminate, we must run another pass.
4034 
4035 	   Consider pseudos which have an entry in reg_equiv_* which
4036 	   reference an eliminable register.  We must make another pass
4037 	   to update reg_equiv_* so that we do not substitute in the
4038 	   old value from when we thought the elimination could be
4039 	   performed.  */
4040       }
4041   return did_spill;
4042 }
4043 
4044 /* Return true if X is used as the target register of an elimination.  */
4045 
4046 bool
4047 elimination_target_reg_p (rtx x)
4048 {
4049   struct elim_table *ep;
4050 
4051   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4052     if (ep->to_rtx == x && ep->can_eliminate)
4053       return true;
4054 
4055   return false;
4056 }
4057 
4058 /* Initialize the table of registers to eliminate.
4059    Pre-condition: global flag frame_pointer_needed has been set before
4060    calling this function.  */
4061 
4062 static void
4063 init_elim_table (void)
4064 {
4065   struct elim_table *ep;
4066   const struct elim_table_1 *ep1;
4067 
4068   if (!reg_eliminate)
4069     reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
4070 
4071   num_eliminable = 0;
4072 
4073   for (ep = reg_eliminate, ep1 = reg_eliminate_1;
4074        ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
4075     {
4076       ep->from = ep1->from;
4077       ep->to = ep1->to;
4078       ep->can_eliminate = ep->can_eliminate_previous
4079 	= (targetm.can_eliminate (ep->from, ep->to)
4080 	   && ! (ep->to == STACK_POINTER_REGNUM
4081 		 && frame_pointer_needed
4082 		 && (! SUPPORTS_STACK_ALIGNMENT
4083 		     || ! stack_realign_fp)));
4084     }
4085 
4086   /* Count the number of eliminable registers and build the FROM and TO
4087      REG rtx's.  Note that code in gen_rtx_REG will cause, e.g.,
4088      gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
4089      We depend on this.  */
4090   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4091     {
4092       num_eliminable += ep->can_eliminate;
4093       ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
4094       ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
4095     }
4096 }
4097 
4098 /* Find all the pseudo registers that didn't get hard regs
4099    but do have known equivalent constants or memory slots.
4100    These include parameters (known equivalent to parameter slots)
4101    and cse'd or loop-moved constant memory addresses.
4102 
4103    Record constant equivalents in reg_equiv_constant
4104    so they will be substituted by find_reloads.
4105    Record memory equivalents in reg_mem_equiv so they can
4106    be substituted eventually by altering the REG-rtx's.  */
4107 
4108 static void
4109 init_eliminable_invariants (rtx_insn *first, bool do_subregs)
4110 {
4111   int i;
4112   rtx_insn *insn;
4113 
4114   grow_reg_equivs ();
4115   if (do_subregs)
4116     reg_max_ref_mode = XCNEWVEC (machine_mode, max_regno);
4117   else
4118     reg_max_ref_mode = NULL;
4119 
4120   num_eliminable_invariants = 0;
4121 
4122   first_label_num = get_first_label_num ();
4123   num_labels = max_label_num () - first_label_num;
4124 
4125   /* Allocate the tables used to store offset information at labels.  */
4126   offsets_known_at = XNEWVEC (char, num_labels);
4127   offsets_at = (poly_int64_pod (*)[NUM_ELIMINABLE_REGS])
4128     xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (poly_int64));
4129 
4130 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
4131    to.  If DO_SUBREGS is true, also find all paradoxical subregs and
4132    find largest such for each pseudo.  FIRST is the head of the insn
4133    list.  */
4134 
4135   for (insn = first; insn; insn = NEXT_INSN (insn))
4136     {
4137       rtx set = single_set (insn);
4138 
4139       /* We may introduce USEs that we want to remove at the end, so
4140 	 we'll mark them with QImode.  Make sure there are no
4141 	 previously-marked insns left by say regmove.  */
4142       if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
4143 	  && GET_MODE (insn) != VOIDmode)
4144 	PUT_MODE (insn, VOIDmode);
4145 
4146       if (do_subregs && NONDEBUG_INSN_P (insn))
4147 	scan_paradoxical_subregs (PATTERN (insn));
4148 
4149       if (set != 0 && REG_P (SET_DEST (set)))
4150 	{
4151 	  rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
4152 	  rtx x;
4153 
4154 	  if (! note)
4155 	    continue;
4156 
4157 	  i = REGNO (SET_DEST (set));
4158 	  x = XEXP (note, 0);
4159 
4160 	  if (i <= LAST_VIRTUAL_REGISTER)
4161 	    continue;
4162 
4163 	  /* If flag_pic and we have constant, verify it's legitimate.  */
4164 	  if (!CONSTANT_P (x)
4165 	      || !flag_pic || LEGITIMATE_PIC_OPERAND_P (x))
4166 	    {
4167 	      /* It can happen that a REG_EQUIV note contains a MEM
4168 		 that is not a legitimate memory operand.  As later
4169 		 stages of reload assume that all addresses found
4170 		 in the reg_equiv_* arrays were originally legitimate,
4171 		 we ignore such REG_EQUIV notes.  */
4172 	      if (memory_operand (x, VOIDmode))
4173 		{
4174 		  /* Always unshare the equivalence, so we can
4175 		     substitute into this insn without touching the
4176 		       equivalence.  */
4177 		  reg_equiv_memory_loc (i) = copy_rtx (x);
4178 		}
4179 	      else if (function_invariant_p (x))
4180 		{
4181 		  machine_mode mode;
4182 
4183 		  mode = GET_MODE (SET_DEST (set));
4184 		  if (GET_CODE (x) == PLUS)
4185 		    {
4186 		      /* This is PLUS of frame pointer and a constant,
4187 			 and might be shared.  Unshare it.  */
4188 		      reg_equiv_invariant (i) = copy_rtx (x);
4189 		      num_eliminable_invariants++;
4190 		    }
4191 		  else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
4192 		    {
4193 		      reg_equiv_invariant (i) = x;
4194 		      num_eliminable_invariants++;
4195 		    }
4196 		  else if (targetm.legitimate_constant_p (mode, x))
4197 		    reg_equiv_constant (i) = x;
4198 		  else
4199 		    {
4200 		      reg_equiv_memory_loc (i) = force_const_mem (mode, x);
4201 		      if (! reg_equiv_memory_loc (i))
4202 			reg_equiv_init (i) = NULL;
4203 		    }
4204 		}
4205 	      else
4206 		{
4207 		  reg_equiv_init (i) = NULL;
4208 		  continue;
4209 		}
4210 	    }
4211 	  else
4212 	    reg_equiv_init (i) = NULL;
4213 	}
4214     }
4215 
4216   if (dump_file)
4217     for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4218       if (reg_equiv_init (i))
4219 	{
4220 	  fprintf (dump_file, "init_insns for %u: ", i);
4221 	  print_inline_rtx (dump_file, reg_equiv_init (i), 20);
4222 	  fprintf (dump_file, "\n");
4223 	}
4224 }
4225 
4226 /* Indicate that we no longer have known memory locations or constants.
4227    Free all data involved in tracking these.  */
4228 
4229 static void
4230 free_reg_equiv (void)
4231 {
4232   int i;
4233 
4234   free (offsets_known_at);
4235   free (offsets_at);
4236   offsets_at = 0;
4237   offsets_known_at = 0;
4238 
4239   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4240     if (reg_equiv_alt_mem_list (i))
4241       free_EXPR_LIST_list (&reg_equiv_alt_mem_list (i));
4242   vec_free (reg_equivs);
4243 }
4244 
4245 /* Kick all pseudos out of hard register REGNO.
4246 
4247    If CANT_ELIMINATE is nonzero, it means that we are doing this spill
4248    because we found we can't eliminate some register.  In the case, no pseudos
4249    are allowed to be in the register, even if they are only in a block that
4250    doesn't require spill registers, unlike the case when we are spilling this
4251    hard reg to produce another spill register.
4252 
4253    Return nonzero if any pseudos needed to be kicked out.  */
4254 
4255 static void
4256 spill_hard_reg (unsigned int regno, int cant_eliminate)
4257 {
4258   int i;
4259 
4260   if (cant_eliminate)
4261     {
4262       SET_HARD_REG_BIT (bad_spill_regs_global, regno);
4263       df_set_regs_ever_live (regno, true);
4264     }
4265 
4266   /* Spill every pseudo reg that was allocated to this reg
4267      or to something that overlaps this reg.  */
4268 
4269   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4270     if (reg_renumber[i] >= 0
4271 	&& (unsigned int) reg_renumber[i] <= regno
4272 	&& end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
4273       SET_REGNO_REG_SET (&spilled_pseudos, i);
4274 }
4275 
4276 /* After spill_hard_reg was called and/or find_reload_regs was run for all
4277    insns that need reloads, this function is used to actually spill pseudo
4278    registers and try to reallocate them.  It also sets up the spill_regs
4279    array for use by choose_reload_regs.
4280 
4281    GLOBAL nonzero means we should attempt to reallocate any pseudo registers
4282    that we displace from hard registers.  */
4283 
4284 static int
4285 finish_spills (int global)
4286 {
4287   struct insn_chain *chain;
4288   int something_changed = 0;
4289   unsigned i;
4290   reg_set_iterator rsi;
4291 
4292   /* Build the spill_regs array for the function.  */
4293   /* If there are some registers still to eliminate and one of the spill regs
4294      wasn't ever used before, additional stack space may have to be
4295      allocated to store this register.  Thus, we may have changed the offset
4296      between the stack and frame pointers, so mark that something has changed.
4297 
4298      One might think that we need only set VAL to 1 if this is a call-used
4299      register.  However, the set of registers that must be saved by the
4300      prologue is not identical to the call-used set.  For example, the
4301      register used by the call insn for the return PC is a call-used register,
4302      but must be saved by the prologue.  */
4303 
4304   n_spills = 0;
4305   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4306     if (TEST_HARD_REG_BIT (used_spill_regs, i))
4307       {
4308 	spill_reg_order[i] = n_spills;
4309 	spill_regs[n_spills++] = i;
4310 	if (num_eliminable && ! df_regs_ever_live_p (i))
4311 	  something_changed = 1;
4312 	df_set_regs_ever_live (i, true);
4313       }
4314     else
4315       spill_reg_order[i] = -1;
4316 
4317   EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
4318     if (! ira_conflicts_p || reg_renumber[i] >= 0)
4319       {
4320 	/* Record the current hard register the pseudo is allocated to
4321 	   in pseudo_previous_regs so we avoid reallocating it to the
4322 	   same hard reg in a later pass.  */
4323 	gcc_assert (reg_renumber[i] >= 0);
4324 
4325 	SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
4326 	/* Mark it as no longer having a hard register home.  */
4327 	reg_renumber[i] = -1;
4328 	if (ira_conflicts_p)
4329 	  /* Inform IRA about the change.  */
4330 	  ira_mark_allocation_change (i);
4331 	/* We will need to scan everything again.  */
4332 	something_changed = 1;
4333       }
4334 
4335   /* Retry global register allocation if possible.  */
4336   if (global && ira_conflicts_p)
4337     {
4338       unsigned int n;
4339 
4340       memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
4341       /* For every insn that needs reloads, set the registers used as spill
4342 	 regs in pseudo_forbidden_regs for every pseudo live across the
4343 	 insn.  */
4344       for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
4345 	{
4346 	  EXECUTE_IF_SET_IN_REG_SET
4347 	    (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
4348 	    {
4349 	      IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4350 				chain->used_spill_regs);
4351 	    }
4352 	  EXECUTE_IF_SET_IN_REG_SET
4353 	    (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
4354 	    {
4355 	      IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4356 				chain->used_spill_regs);
4357 	    }
4358 	}
4359 
4360       /* Retry allocating the pseudos spilled in IRA and the
4361 	 reload.  For each reg, merge the various reg sets that
4362 	 indicate which hard regs can't be used, and call
4363 	 ira_reassign_pseudos.  */
4364       for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4365 	if (reg_old_renumber[i] != reg_renumber[i])
4366 	  {
4367 	    if (reg_renumber[i] < 0)
4368 	      temp_pseudo_reg_arr[n++] = i;
4369 	    else
4370 	      CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4371 	  }
4372       if (ira_reassign_pseudos (temp_pseudo_reg_arr, n,
4373 				bad_spill_regs_global,
4374 				pseudo_forbidden_regs, pseudo_previous_regs,
4375 				&spilled_pseudos))
4376 	something_changed = 1;
4377     }
4378   /* Fix up the register information in the insn chain.
4379      This involves deleting those of the spilled pseudos which did not get
4380      a new hard register home from the live_{before,after} sets.  */
4381   for (chain = reload_insn_chain; chain; chain = chain->next)
4382     {
4383       HARD_REG_SET used_by_pseudos;
4384       HARD_REG_SET used_by_pseudos2;
4385 
4386       if (! ira_conflicts_p)
4387 	{
4388 	  /* Don't do it for IRA because IRA and the reload still can
4389 	     assign hard registers to the spilled pseudos on next
4390 	     reload iterations.  */
4391 	  AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4392 	  AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4393 	}
4394       /* Mark any unallocated hard regs as available for spills.  That
4395 	 makes inheritance work somewhat better.  */
4396       if (chain->need_reload)
4397 	{
4398 	  REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4399 	  REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4400 	  IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
4401 
4402 	  compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4403 	  compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4404 	  /* Value of chain->used_spill_regs from previous iteration
4405 	     may be not included in the value calculated here because
4406 	     of possible removing caller-saves insns (see function
4407 	     delete_caller_save_insns.  */
4408 	  COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
4409 	  AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
4410 	}
4411     }
4412 
4413   CLEAR_REG_SET (&changed_allocation_pseudos);
4414   /* Let alter_reg modify the reg rtx's for the modified pseudos.  */
4415   for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4416     {
4417       int regno = reg_renumber[i];
4418       if (reg_old_renumber[i] == regno)
4419 	continue;
4420 
4421       SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4422 
4423       alter_reg (i, reg_old_renumber[i], false);
4424       reg_old_renumber[i] = regno;
4425       if (dump_file)
4426 	{
4427 	  if (regno == -1)
4428 	    fprintf (dump_file, " Register %d now on stack.\n\n", i);
4429 	  else
4430 	    fprintf (dump_file, " Register %d now in %d.\n\n",
4431 		     i, reg_renumber[i]);
4432 	}
4433     }
4434 
4435   return something_changed;
4436 }
4437 
4438 /* Find all paradoxical subregs within X and update reg_max_ref_mode.  */
4439 
4440 static void
4441 scan_paradoxical_subregs (rtx x)
4442 {
4443   int i;
4444   const char *fmt;
4445   enum rtx_code code = GET_CODE (x);
4446 
4447   switch (code)
4448     {
4449     case REG:
4450     case CONST:
4451     case SYMBOL_REF:
4452     case LABEL_REF:
4453     CASE_CONST_ANY:
4454     case CC0:
4455     case PC:
4456     case USE:
4457     case CLOBBER:
4458     case CLOBBER_HIGH:
4459       return;
4460 
4461     case SUBREG:
4462       if (REG_P (SUBREG_REG (x)))
4463 	{
4464 	  unsigned int regno = REGNO (SUBREG_REG (x));
4465 	  if (partial_subreg_p (reg_max_ref_mode[regno], GET_MODE (x)))
4466 	    {
4467 	      reg_max_ref_mode[regno] = GET_MODE (x);
4468 	      mark_home_live_1 (regno, GET_MODE (x));
4469 	    }
4470 	}
4471       return;
4472 
4473     default:
4474       break;
4475     }
4476 
4477   fmt = GET_RTX_FORMAT (code);
4478   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4479     {
4480       if (fmt[i] == 'e')
4481 	scan_paradoxical_subregs (XEXP (x, i));
4482       else if (fmt[i] == 'E')
4483 	{
4484 	  int j;
4485 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4486 	    scan_paradoxical_subregs (XVECEXP (x, i, j));
4487 	}
4488     }
4489 }
4490 
4491 /* *OP_PTR and *OTHER_PTR are two operands to a conceptual reload.
4492    If *OP_PTR is a paradoxical subreg, try to remove that subreg
4493    and apply the corresponding narrowing subreg to *OTHER_PTR.
4494    Return true if the operands were changed, false otherwise.  */
4495 
4496 static bool
4497 strip_paradoxical_subreg (rtx *op_ptr, rtx *other_ptr)
4498 {
4499   rtx op, inner, other, tem;
4500 
4501   op = *op_ptr;
4502   if (!paradoxical_subreg_p (op))
4503     return false;
4504   inner = SUBREG_REG (op);
4505 
4506   other = *other_ptr;
4507   tem = gen_lowpart_common (GET_MODE (inner), other);
4508   if (!tem)
4509     return false;
4510 
4511   /* If the lowpart operation turned a hard register into a subreg,
4512      rather than simplifying it to another hard register, then the
4513      mode change cannot be properly represented.  For example, OTHER
4514      might be valid in its current mode, but not in the new one.  */
4515   if (GET_CODE (tem) == SUBREG
4516       && REG_P (other)
4517       && HARD_REGISTER_P (other))
4518     return false;
4519 
4520   *op_ptr = inner;
4521   *other_ptr = tem;
4522   return true;
4523 }
4524 
4525 /* A subroutine of reload_as_needed.  If INSN has a REG_EH_REGION note,
4526    examine all of the reload insns between PREV and NEXT exclusive, and
4527    annotate all that may trap.  */
4528 
4529 static void
4530 fixup_eh_region_note (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4531 {
4532   rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4533   if (note == NULL)
4534     return;
4535   if (!insn_could_throw_p (insn))
4536     remove_note (insn, note);
4537   copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next);
4538 }
4539 
4540 /* Reload pseudo-registers into hard regs around each insn as needed.
4541    Additional register load insns are output before the insn that needs it
4542    and perhaps store insns after insns that modify the reloaded pseudo reg.
4543 
4544    reg_last_reload_reg and reg_reloaded_contents keep track of
4545    which registers are already available in reload registers.
4546    We update these for the reloads that we perform,
4547    as the insns are scanned.  */
4548 
4549 static void
4550 reload_as_needed (int live_known)
4551 {
4552   struct insn_chain *chain;
4553 #if AUTO_INC_DEC
4554   int i;
4555 #endif
4556   rtx_note *marker;
4557 
4558   memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4559   memset (spill_reg_store, 0, sizeof spill_reg_store);
4560   reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4561   INIT_REG_SET (&reg_has_output_reload);
4562   CLEAR_HARD_REG_SET (reg_reloaded_valid);
4563   CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4564 
4565   set_initial_elim_offsets ();
4566 
4567   /* Generate a marker insn that we will move around.  */
4568   marker = emit_note (NOTE_INSN_DELETED);
4569   unlink_insn_chain (marker, marker);
4570 
4571   for (chain = reload_insn_chain; chain; chain = chain->next)
4572     {
4573       rtx_insn *prev = 0;
4574       rtx_insn *insn = chain->insn;
4575       rtx_insn *old_next = NEXT_INSN (insn);
4576 #if AUTO_INC_DEC
4577       rtx_insn *old_prev = PREV_INSN (insn);
4578 #endif
4579 
4580       if (will_delete_init_insn_p (insn))
4581 	continue;
4582 
4583       /* If we pass a label, copy the offsets from the label information
4584 	 into the current offsets of each elimination.  */
4585       if (LABEL_P (insn))
4586 	set_offsets_for_label (insn);
4587 
4588       else if (INSN_P (insn))
4589 	{
4590 	  regset_head regs_to_forget;
4591 	  INIT_REG_SET (&regs_to_forget);
4592 	  note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
4593 
4594 	  /* If this is a USE and CLOBBER of a MEM, ensure that any
4595 	     references to eliminable registers have been removed.  */
4596 
4597 	  if ((GET_CODE (PATTERN (insn)) == USE
4598 	       || GET_CODE (PATTERN (insn)) == CLOBBER)
4599 	      && MEM_P (XEXP (PATTERN (insn), 0)))
4600 	    XEXP (XEXP (PATTERN (insn), 0), 0)
4601 	      = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4602 				GET_MODE (XEXP (PATTERN (insn), 0)),
4603 				NULL_RTX);
4604 
4605 	  /* If we need to do register elimination processing, do so.
4606 	     This might delete the insn, in which case we are done.  */
4607 	  if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4608 	    {
4609 	      eliminate_regs_in_insn (insn, 1);
4610 	      if (NOTE_P (insn))
4611 		{
4612 		  update_eliminable_offsets ();
4613 		  CLEAR_REG_SET (&regs_to_forget);
4614 		  continue;
4615 		}
4616 	    }
4617 
4618 	  /* If need_elim is nonzero but need_reload is zero, one might think
4619 	     that we could simply set n_reloads to 0.  However, find_reloads
4620 	     could have done some manipulation of the insn (such as swapping
4621 	     commutative operands), and these manipulations are lost during
4622 	     the first pass for every insn that needs register elimination.
4623 	     So the actions of find_reloads must be redone here.  */
4624 
4625 	  if (! chain->need_elim && ! chain->need_reload
4626 	      && ! chain->need_operand_change)
4627 	    n_reloads = 0;
4628 	  /* First find the pseudo regs that must be reloaded for this insn.
4629 	     This info is returned in the tables reload_... (see reload.h).
4630 	     Also modify the body of INSN by substituting RELOAD
4631 	     rtx's for those pseudo regs.  */
4632 	  else
4633 	    {
4634 	      CLEAR_REG_SET (&reg_has_output_reload);
4635 	      CLEAR_HARD_REG_SET (reg_is_output_reload);
4636 
4637 	      find_reloads (insn, 1, spill_indirect_levels, live_known,
4638 			    spill_reg_order);
4639 	    }
4640 
4641 	  if (n_reloads > 0)
4642 	    {
4643 	      rtx_insn *next = NEXT_INSN (insn);
4644 
4645 	      /* ??? PREV can get deleted by reload inheritance.
4646 		 Work around this by emitting a marker note.  */
4647 	      prev = PREV_INSN (insn);
4648 	      reorder_insns_nobb (marker, marker, prev);
4649 
4650 	      /* Now compute which reload regs to reload them into.  Perhaps
4651 		 reusing reload regs from previous insns, or else output
4652 		 load insns to reload them.  Maybe output store insns too.
4653 		 Record the choices of reload reg in reload_reg_rtx.  */
4654 	      choose_reload_regs (chain);
4655 
4656 	      /* Generate the insns to reload operands into or out of
4657 		 their reload regs.  */
4658 	      emit_reload_insns (chain);
4659 
4660 	      /* Substitute the chosen reload regs from reload_reg_rtx
4661 		 into the insn's body (or perhaps into the bodies of other
4662 		 load and store insn that we just made for reloading
4663 		 and that we moved the structure into).  */
4664 	      subst_reloads (insn);
4665 
4666 	      prev = PREV_INSN (marker);
4667 	      unlink_insn_chain (marker, marker);
4668 
4669 	      /* Adjust the exception region notes for loads and stores.  */
4670 	      if (cfun->can_throw_non_call_exceptions && !CALL_P (insn))
4671 		fixup_eh_region_note (insn, prev, next);
4672 
4673 	      /* Adjust the location of REG_ARGS_SIZE.  */
4674 	      rtx p = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4675 	      if (p)
4676 		{
4677 		  remove_note (insn, p);
4678 		  fixup_args_size_notes (prev, PREV_INSN (next),
4679 					 get_args_size (p));
4680 		}
4681 
4682 	      /* If this was an ASM, make sure that all the reload insns
4683 		 we have generated are valid.  If not, give an error
4684 		 and delete them.  */
4685 	      if (asm_noperands (PATTERN (insn)) >= 0)
4686 		for (rtx_insn *p = NEXT_INSN (prev);
4687 		     p != next;
4688 		     p = NEXT_INSN (p))
4689 		  if (p != insn && INSN_P (p)
4690 		      && GET_CODE (PATTERN (p)) != USE
4691 		      && (recog_memoized (p) < 0
4692 			  || (extract_insn (p),
4693 			      !(constrain_operands (1,
4694 				  get_enabled_alternatives (p))))))
4695 		    {
4696 		      error_for_asm (insn,
4697 				     "%<asm%> operand requires "
4698 				     "impossible reload");
4699 		      delete_insn (p);
4700 		    }
4701 	    }
4702 
4703 	  if (num_eliminable && chain->need_elim)
4704 	    update_eliminable_offsets ();
4705 
4706 	  /* Any previously reloaded spilled pseudo reg, stored in this insn,
4707 	     is no longer validly lying around to save a future reload.
4708 	     Note that this does not detect pseudos that were reloaded
4709 	     for this insn in order to be stored in
4710 	     (obeying register constraints).  That is correct; such reload
4711 	     registers ARE still valid.  */
4712 	  forget_marked_reloads (&regs_to_forget);
4713 	  CLEAR_REG_SET (&regs_to_forget);
4714 
4715 	  /* There may have been CLOBBER insns placed after INSN.  So scan
4716 	     between INSN and NEXT and use them to forget old reloads.  */
4717 	  for (rtx_insn *x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4718 	    if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4719 	      note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4720 
4721 #if AUTO_INC_DEC
4722 	  /* Likewise for regs altered by auto-increment in this insn.
4723 	     REG_INC notes have been changed by reloading:
4724 	     find_reloads_address_1 records substitutions for them,
4725 	     which have been performed by subst_reloads above.  */
4726 	  for (i = n_reloads - 1; i >= 0; i--)
4727 	    {
4728 	      rtx in_reg = rld[i].in_reg;
4729 	      if (in_reg)
4730 		{
4731 		  enum rtx_code code = GET_CODE (in_reg);
4732 		  /* PRE_INC / PRE_DEC will have the reload register ending up
4733 		     with the same value as the stack slot, but that doesn't
4734 		     hold true for POST_INC / POST_DEC.  Either we have to
4735 		     convert the memory access to a true POST_INC / POST_DEC,
4736 		     or we can't use the reload register for inheritance.  */
4737 		  if ((code == POST_INC || code == POST_DEC)
4738 		      && TEST_HARD_REG_BIT (reg_reloaded_valid,
4739 					    REGNO (rld[i].reg_rtx))
4740 		      /* Make sure it is the inc/dec pseudo, and not
4741 			 some other (e.g. output operand) pseudo.  */
4742 		      && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4743 			  == REGNO (XEXP (in_reg, 0))))
4744 
4745 		    {
4746 		      rtx reload_reg = rld[i].reg_rtx;
4747 		      machine_mode mode = GET_MODE (reload_reg);
4748 		      int n = 0;
4749 		      rtx_insn *p;
4750 
4751 		      for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4752 			{
4753 			  /* We really want to ignore REG_INC notes here, so
4754 			     use PATTERN (p) as argument to reg_set_p .  */
4755 			  if (reg_set_p (reload_reg, PATTERN (p)))
4756 			    break;
4757 			  n = count_occurrences (PATTERN (p), reload_reg, 0);
4758 			  if (! n)
4759 			    continue;
4760 			  if (n == 1)
4761 			    {
4762 			      rtx replace_reg
4763 				= gen_rtx_fmt_e (code, mode, reload_reg);
4764 
4765 			      validate_replace_rtx_group (reload_reg,
4766 							  replace_reg, p);
4767 			      n = verify_changes (0);
4768 
4769 			      /* We must also verify that the constraints
4770 				 are met after the replacement.  Make sure
4771 				 extract_insn is only called for an insn
4772 				 where the replacements were found to be
4773 				 valid so far. */
4774 			      if (n)
4775 				{
4776 				  extract_insn (p);
4777 				  n = constrain_operands (1,
4778 				    get_enabled_alternatives (p));
4779 				}
4780 
4781 			      /* If the constraints were not met, then
4782 				 undo the replacement, else confirm it.  */
4783 			      if (!n)
4784 				cancel_changes (0);
4785 			      else
4786 				confirm_change_group ();
4787 			    }
4788 			  break;
4789 			}
4790 		      if (n == 1)
4791 			{
4792 			  add_reg_note (p, REG_INC, reload_reg);
4793 			  /* Mark this as having an output reload so that the
4794 			     REG_INC processing code below won't invalidate
4795 			     the reload for inheritance.  */
4796 			  SET_HARD_REG_BIT (reg_is_output_reload,
4797 					    REGNO (reload_reg));
4798 			  SET_REGNO_REG_SET (&reg_has_output_reload,
4799 					     REGNO (XEXP (in_reg, 0)));
4800 			}
4801 		      else
4802 			forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4803 					      NULL);
4804 		    }
4805 		  else if ((code == PRE_INC || code == PRE_DEC)
4806 			   && TEST_HARD_REG_BIT (reg_reloaded_valid,
4807 						 REGNO (rld[i].reg_rtx))
4808 			   /* Make sure it is the inc/dec pseudo, and not
4809 			      some other (e.g. output operand) pseudo.  */
4810 			   && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4811 			       == REGNO (XEXP (in_reg, 0))))
4812 		    {
4813 		      SET_HARD_REG_BIT (reg_is_output_reload,
4814 					REGNO (rld[i].reg_rtx));
4815 		      SET_REGNO_REG_SET (&reg_has_output_reload,
4816 					 REGNO (XEXP (in_reg, 0)));
4817 		    }
4818 		  else if (code == PRE_INC || code == PRE_DEC
4819 			   || code == POST_INC || code == POST_DEC)
4820 		    {
4821 		      int in_regno = REGNO (XEXP (in_reg, 0));
4822 
4823 		      if (reg_last_reload_reg[in_regno] != NULL_RTX)
4824 			{
4825 			  int in_hard_regno;
4826 			  bool forget_p = true;
4827 
4828 			  in_hard_regno = REGNO (reg_last_reload_reg[in_regno]);
4829 			  if (TEST_HARD_REG_BIT (reg_reloaded_valid,
4830 						 in_hard_regno))
4831 			    {
4832 			      for (rtx_insn *x = (old_prev ?
4833 						  NEXT_INSN (old_prev) : insn);
4834 				   x != old_next;
4835 				   x = NEXT_INSN (x))
4836 				if (x == reg_reloaded_insn[in_hard_regno])
4837 				  {
4838 				    forget_p = false;
4839 				    break;
4840 				  }
4841 			    }
4842 			  /* If for some reasons, we didn't set up
4843 			     reg_last_reload_reg in this insn,
4844 			     invalidate inheritance from previous
4845 			     insns for the incremented/decremented
4846 			     register.  Such registers will be not in
4847 			     reg_has_output_reload.  Invalidate it
4848 			     also if the corresponding element in
4849 			     reg_reloaded_insn is also
4850 			     invalidated.  */
4851 			  if (forget_p)
4852 			    forget_old_reloads_1 (XEXP (in_reg, 0),
4853 						  NULL_RTX, NULL);
4854 			}
4855 		    }
4856 		}
4857 	    }
4858 	  /* If a pseudo that got a hard register is auto-incremented,
4859 	     we must purge records of copying it into pseudos without
4860 	     hard registers.  */
4861 	  for (rtx x = REG_NOTES (insn); x; x = XEXP (x, 1))
4862 	    if (REG_NOTE_KIND (x) == REG_INC)
4863 	      {
4864 		/* See if this pseudo reg was reloaded in this insn.
4865 		   If so, its last-reload info is still valid
4866 		   because it is based on this insn's reload.  */
4867 		for (i = 0; i < n_reloads; i++)
4868 		  if (rld[i].out == XEXP (x, 0))
4869 		    break;
4870 
4871 		if (i == n_reloads)
4872 		  forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4873 	      }
4874 #endif
4875 	}
4876       /* A reload reg's contents are unknown after a label.  */
4877       if (LABEL_P (insn))
4878 	CLEAR_HARD_REG_SET (reg_reloaded_valid);
4879 
4880       /* Don't assume a reload reg is still good after a call insn
4881 	 if it is a call-used reg, or if it contains a value that will
4882          be partially clobbered by the call.  */
4883       else if (CALL_P (insn))
4884 	{
4885 	  AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4886 	  AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4887 
4888 	  /* If this is a call to a setjmp-type function, we must not
4889 	     reuse any reload reg contents across the call; that will
4890 	     just be clobbered by other uses of the register in later
4891 	     code, before the longjmp.  */
4892 	  if (find_reg_note (insn, REG_SETJMP, NULL_RTX))
4893 	    CLEAR_HARD_REG_SET (reg_reloaded_valid);
4894 	}
4895     }
4896 
4897   /* Clean up.  */
4898   free (reg_last_reload_reg);
4899   CLEAR_REG_SET (&reg_has_output_reload);
4900 }
4901 
4902 /* Discard all record of any value reloaded from X,
4903    or reloaded in X from someplace else;
4904    unless X is an output reload reg of the current insn.
4905 
4906    X may be a hard reg (the reload reg)
4907    or it may be a pseudo reg that was reloaded from.
4908 
4909    When DATA is non-NULL just mark the registers in regset
4910    to be forgotten later.  */
4911 
4912 static void
4913 forget_old_reloads_1 (rtx x, const_rtx setter,
4914 		      void *data)
4915 {
4916   unsigned int regno;
4917   unsigned int nr;
4918   regset regs = (regset) data;
4919 
4920   /* note_stores does give us subregs of hard regs,
4921      subreg_regno_offset requires a hard reg.  */
4922   while (GET_CODE (x) == SUBREG)
4923     {
4924       /* We ignore the subreg offset when calculating the regno,
4925 	 because we are using the entire underlying hard register
4926 	 below.  */
4927       x = SUBREG_REG (x);
4928     }
4929 
4930   if (!REG_P (x))
4931     return;
4932 
4933   /* CLOBBER_HIGH is only supported for LRA.  */
4934   gcc_assert (setter == NULL_RTX || GET_CODE (setter) != CLOBBER_HIGH);
4935 
4936   regno = REGNO (x);
4937 
4938   if (regno >= FIRST_PSEUDO_REGISTER)
4939     nr = 1;
4940   else
4941     {
4942       unsigned int i;
4943 
4944       nr = REG_NREGS (x);
4945       /* Storing into a spilled-reg invalidates its contents.
4946 	 This can happen if a block-local pseudo is allocated to that reg
4947 	 and it wasn't spilled because this block's total need is 0.
4948 	 Then some insn might have an optional reload and use this reg.  */
4949       if (!regs)
4950 	for (i = 0; i < nr; i++)
4951 	  /* But don't do this if the reg actually serves as an output
4952 	     reload reg in the current instruction.  */
4953 	  if (n_reloads == 0
4954 	      || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4955 	    {
4956 	      CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4957 	      spill_reg_store[regno + i] = 0;
4958 	    }
4959     }
4960 
4961   if (regs)
4962     while (nr-- > 0)
4963       SET_REGNO_REG_SET (regs, regno + nr);
4964   else
4965     {
4966       /* Since value of X has changed,
4967 	 forget any value previously copied from it.  */
4968 
4969       while (nr-- > 0)
4970 	/* But don't forget a copy if this is the output reload
4971 	   that establishes the copy's validity.  */
4972 	if (n_reloads == 0
4973 	    || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
4974 	  reg_last_reload_reg[regno + nr] = 0;
4975      }
4976 }
4977 
4978 /* Forget the reloads marked in regset by previous function.  */
4979 static void
4980 forget_marked_reloads (regset regs)
4981 {
4982   unsigned int reg;
4983   reg_set_iterator rsi;
4984   EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
4985     {
4986       if (reg < FIRST_PSEUDO_REGISTER
4987 	  /* But don't do this if the reg actually serves as an output
4988 	     reload reg in the current instruction.  */
4989 	  && (n_reloads == 0
4990 	      || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
4991 	  {
4992 	    CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
4993 	    spill_reg_store[reg] = 0;
4994 	  }
4995       if (n_reloads == 0
4996 	  || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
4997 	reg_last_reload_reg[reg] = 0;
4998     }
4999 }
5000 
5001 /* The following HARD_REG_SETs indicate when each hard register is
5002    used for a reload of various parts of the current insn.  */
5003 
5004 /* If reg is unavailable for all reloads.  */
5005 static HARD_REG_SET reload_reg_unavailable;
5006 /* If reg is in use as a reload reg for a RELOAD_OTHER reload.  */
5007 static HARD_REG_SET reload_reg_used;
5008 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I.  */
5009 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
5010 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I.  */
5011 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
5012 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I.  */
5013 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5014 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I.  */
5015 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5016 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I.  */
5017 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5018 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I.  */
5019 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5020 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload.  */
5021 static HARD_REG_SET reload_reg_used_in_op_addr;
5022 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload.  */
5023 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
5024 /* If reg is in use for a RELOAD_FOR_INSN reload.  */
5025 static HARD_REG_SET reload_reg_used_in_insn;
5026 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload.  */
5027 static HARD_REG_SET reload_reg_used_in_other_addr;
5028 
5029 /* If reg is in use as a reload reg for any sort of reload.  */
5030 static HARD_REG_SET reload_reg_used_at_all;
5031 
5032 /* If reg is use as an inherited reload.  We just mark the first register
5033    in the group.  */
5034 static HARD_REG_SET reload_reg_used_for_inherit;
5035 
5036 /* Records which hard regs are used in any way, either as explicit use or
5037    by being allocated to a pseudo during any point of the current insn.  */
5038 static HARD_REG_SET reg_used_in_insn;
5039 
5040 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
5041    TYPE. MODE is used to indicate how many consecutive regs are
5042    actually used.  */
5043 
5044 static void
5045 mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
5046 			machine_mode mode)
5047 {
5048   switch (type)
5049     {
5050     case RELOAD_OTHER:
5051       add_to_hard_reg_set (&reload_reg_used, mode, regno);
5052       break;
5053 
5054     case RELOAD_FOR_INPUT_ADDRESS:
5055       add_to_hard_reg_set (&reload_reg_used_in_input_addr[opnum], mode, regno);
5056       break;
5057 
5058     case RELOAD_FOR_INPADDR_ADDRESS:
5059       add_to_hard_reg_set (&reload_reg_used_in_inpaddr_addr[opnum], mode, regno);
5060       break;
5061 
5062     case RELOAD_FOR_OUTPUT_ADDRESS:
5063       add_to_hard_reg_set (&reload_reg_used_in_output_addr[opnum], mode, regno);
5064       break;
5065 
5066     case RELOAD_FOR_OUTADDR_ADDRESS:
5067       add_to_hard_reg_set (&reload_reg_used_in_outaddr_addr[opnum], mode, regno);
5068       break;
5069 
5070     case RELOAD_FOR_OPERAND_ADDRESS:
5071       add_to_hard_reg_set (&reload_reg_used_in_op_addr, mode, regno);
5072       break;
5073 
5074     case RELOAD_FOR_OPADDR_ADDR:
5075       add_to_hard_reg_set (&reload_reg_used_in_op_addr_reload, mode, regno);
5076       break;
5077 
5078     case RELOAD_FOR_OTHER_ADDRESS:
5079       add_to_hard_reg_set (&reload_reg_used_in_other_addr, mode, regno);
5080       break;
5081 
5082     case RELOAD_FOR_INPUT:
5083       add_to_hard_reg_set (&reload_reg_used_in_input[opnum], mode, regno);
5084       break;
5085 
5086     case RELOAD_FOR_OUTPUT:
5087       add_to_hard_reg_set (&reload_reg_used_in_output[opnum], mode, regno);
5088       break;
5089 
5090     case RELOAD_FOR_INSN:
5091       add_to_hard_reg_set (&reload_reg_used_in_insn,  mode, regno);
5092       break;
5093     }
5094 
5095   add_to_hard_reg_set (&reload_reg_used_at_all, mode, regno);
5096 }
5097 
5098 /* Similarly, but show REGNO is no longer in use for a reload.  */
5099 
5100 static void
5101 clear_reload_reg_in_use (unsigned int regno, int opnum,
5102 			 enum reload_type type, machine_mode mode)
5103 {
5104   unsigned int nregs = hard_regno_nregs (regno, mode);
5105   unsigned int start_regno, end_regno, r;
5106   int i;
5107   /* A complication is that for some reload types, inheritance might
5108      allow multiple reloads of the same types to share a reload register.
5109      We set check_opnum if we have to check only reloads with the same
5110      operand number, and check_any if we have to check all reloads.  */
5111   int check_opnum = 0;
5112   int check_any = 0;
5113   HARD_REG_SET *used_in_set;
5114 
5115   switch (type)
5116     {
5117     case RELOAD_OTHER:
5118       used_in_set = &reload_reg_used;
5119       break;
5120 
5121     case RELOAD_FOR_INPUT_ADDRESS:
5122       used_in_set = &reload_reg_used_in_input_addr[opnum];
5123       break;
5124 
5125     case RELOAD_FOR_INPADDR_ADDRESS:
5126       check_opnum = 1;
5127       used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
5128       break;
5129 
5130     case RELOAD_FOR_OUTPUT_ADDRESS:
5131       used_in_set = &reload_reg_used_in_output_addr[opnum];
5132       break;
5133 
5134     case RELOAD_FOR_OUTADDR_ADDRESS:
5135       check_opnum = 1;
5136       used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
5137       break;
5138 
5139     case RELOAD_FOR_OPERAND_ADDRESS:
5140       used_in_set = &reload_reg_used_in_op_addr;
5141       break;
5142 
5143     case RELOAD_FOR_OPADDR_ADDR:
5144       check_any = 1;
5145       used_in_set = &reload_reg_used_in_op_addr_reload;
5146       break;
5147 
5148     case RELOAD_FOR_OTHER_ADDRESS:
5149       used_in_set = &reload_reg_used_in_other_addr;
5150       check_any = 1;
5151       break;
5152 
5153     case RELOAD_FOR_INPUT:
5154       used_in_set = &reload_reg_used_in_input[opnum];
5155       break;
5156 
5157     case RELOAD_FOR_OUTPUT:
5158       used_in_set = &reload_reg_used_in_output[opnum];
5159       break;
5160 
5161     case RELOAD_FOR_INSN:
5162       used_in_set = &reload_reg_used_in_insn;
5163       break;
5164     default:
5165       gcc_unreachable ();
5166     }
5167   /* We resolve conflicts with remaining reloads of the same type by
5168      excluding the intervals of reload registers by them from the
5169      interval of freed reload registers.  Since we only keep track of
5170      one set of interval bounds, we might have to exclude somewhat
5171      more than what would be necessary if we used a HARD_REG_SET here.
5172      But this should only happen very infrequently, so there should
5173      be no reason to worry about it.  */
5174 
5175   start_regno = regno;
5176   end_regno = regno + nregs;
5177   if (check_opnum || check_any)
5178     {
5179       for (i = n_reloads - 1; i >= 0; i--)
5180 	{
5181 	  if (rld[i].when_needed == type
5182 	      && (check_any || rld[i].opnum == opnum)
5183 	      && rld[i].reg_rtx)
5184 	    {
5185 	      unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
5186 	      unsigned int conflict_end
5187 		= end_hard_regno (rld[i].mode, conflict_start);
5188 
5189 	      /* If there is an overlap with the first to-be-freed register,
5190 		 adjust the interval start.  */
5191 	      if (conflict_start <= start_regno && conflict_end > start_regno)
5192 		start_regno = conflict_end;
5193 	      /* Otherwise, if there is a conflict with one of the other
5194 		 to-be-freed registers, adjust the interval end.  */
5195 	      if (conflict_start > start_regno && conflict_start < end_regno)
5196 		end_regno = conflict_start;
5197 	    }
5198 	}
5199     }
5200 
5201   for (r = start_regno; r < end_regno; r++)
5202     CLEAR_HARD_REG_BIT (*used_in_set, r);
5203 }
5204 
5205 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
5206    specified by OPNUM and TYPE.  */
5207 
5208 static int
5209 reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
5210 {
5211   int i;
5212 
5213   /* In use for a RELOAD_OTHER means it's not available for anything.  */
5214   if (TEST_HARD_REG_BIT (reload_reg_used, regno)
5215       || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5216     return 0;
5217 
5218   switch (type)
5219     {
5220     case RELOAD_OTHER:
5221       /* In use for anything means we can't use it for RELOAD_OTHER.  */
5222       if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
5223 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5224 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5225 	  || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5226 	return 0;
5227 
5228       for (i = 0; i < reload_n_operands; i++)
5229 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5230 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5231 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5232 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5233 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5234 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5235 	  return 0;
5236 
5237       return 1;
5238 
5239     case RELOAD_FOR_INPUT:
5240       if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5241 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
5242 	return 0;
5243 
5244       if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5245 	return 0;
5246 
5247       /* If it is used for some other input, can't use it.  */
5248       for (i = 0; i < reload_n_operands; i++)
5249 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5250 	  return 0;
5251 
5252       /* If it is used in a later operand's address, can't use it.  */
5253       for (i = opnum + 1; i < reload_n_operands; i++)
5254 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5255 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5256 	  return 0;
5257 
5258       return 1;
5259 
5260     case RELOAD_FOR_INPUT_ADDRESS:
5261       /* Can't use a register if it is used for an input address for this
5262 	 operand or used as an input in an earlier one.  */
5263       if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
5264 	  || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5265 	return 0;
5266 
5267       for (i = 0; i < opnum; i++)
5268 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5269 	  return 0;
5270 
5271       return 1;
5272 
5273     case RELOAD_FOR_INPADDR_ADDRESS:
5274       /* Can't use a register if it is used for an input address
5275 	 for this operand or used as an input in an earlier
5276 	 one.  */
5277       if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5278 	return 0;
5279 
5280       for (i = 0; i < opnum; i++)
5281 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5282 	  return 0;
5283 
5284       return 1;
5285 
5286     case RELOAD_FOR_OUTPUT_ADDRESS:
5287       /* Can't use a register if it is used for an output address for this
5288 	 operand or used as an output in this or a later operand.  Note
5289 	 that multiple output operands are emitted in reverse order, so
5290 	 the conflicting ones are those with lower indices.  */
5291       if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
5292 	return 0;
5293 
5294       for (i = 0; i <= opnum; i++)
5295 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5296 	  return 0;
5297 
5298       return 1;
5299 
5300     case RELOAD_FOR_OUTADDR_ADDRESS:
5301       /* Can't use a register if it is used for an output address
5302 	 for this operand or used as an output in this or a
5303 	 later operand.  Note that multiple output operands are
5304 	 emitted in reverse order, so the conflicting ones are
5305 	 those with lower indices.  */
5306       if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5307 	return 0;
5308 
5309       for (i = 0; i <= opnum; i++)
5310 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5311 	  return 0;
5312 
5313       return 1;
5314 
5315     case RELOAD_FOR_OPERAND_ADDRESS:
5316       for (i = 0; i < reload_n_operands; i++)
5317 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5318 	  return 0;
5319 
5320       return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5321 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5322 
5323     case RELOAD_FOR_OPADDR_ADDR:
5324       for (i = 0; i < reload_n_operands; i++)
5325 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5326 	  return 0;
5327 
5328       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
5329 
5330     case RELOAD_FOR_OUTPUT:
5331       /* This cannot share a register with RELOAD_FOR_INSN reloads, other
5332 	 outputs, or an operand address for this or an earlier output.
5333 	 Note that multiple output operands are emitted in reverse order,
5334 	 so the conflicting ones are those with higher indices.  */
5335       if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5336 	return 0;
5337 
5338       for (i = 0; i < reload_n_operands; i++)
5339 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5340 	  return 0;
5341 
5342       for (i = opnum; i < reload_n_operands; i++)
5343 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5344 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5345 	  return 0;
5346 
5347       return 1;
5348 
5349     case RELOAD_FOR_INSN:
5350       for (i = 0; i < reload_n_operands; i++)
5351 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5352 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5353 	  return 0;
5354 
5355       return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5356 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5357 
5358     case RELOAD_FOR_OTHER_ADDRESS:
5359       return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
5360 
5361     default:
5362       gcc_unreachable ();
5363     }
5364 }
5365 
5366 /* Return 1 if the value in reload reg REGNO, as used by the reload with
5367    the number RELOADNUM, is still available in REGNO at the end of the insn.
5368 
5369    We can assume that the reload reg was already tested for availability
5370    at the time it is needed, and we should not check this again,
5371    in case the reg has already been marked in use.  */
5372 
5373 static int
5374 reload_reg_reaches_end_p (unsigned int regno, int reloadnum)
5375 {
5376   int opnum = rld[reloadnum].opnum;
5377   enum reload_type type = rld[reloadnum].when_needed;
5378   int i;
5379 
5380   /* See if there is a reload with the same type for this operand, using
5381      the same register. This case is not handled by the code below.  */
5382   for (i = reloadnum + 1; i < n_reloads; i++)
5383     {
5384       rtx reg;
5385 
5386       if (rld[i].opnum != opnum || rld[i].when_needed != type)
5387 	continue;
5388       reg = rld[i].reg_rtx;
5389       if (reg == NULL_RTX)
5390 	continue;
5391       if (regno >= REGNO (reg) && regno < END_REGNO (reg))
5392 	return 0;
5393     }
5394 
5395   switch (type)
5396     {
5397     case RELOAD_OTHER:
5398       /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
5399 	 its value must reach the end.  */
5400       return 1;
5401 
5402       /* If this use is for part of the insn,
5403 	 its value reaches if no subsequent part uses the same register.
5404 	 Just like the above function, don't try to do this with lots
5405 	 of fallthroughs.  */
5406 
5407     case RELOAD_FOR_OTHER_ADDRESS:
5408       /* Here we check for everything else, since these don't conflict
5409 	 with anything else and everything comes later.  */
5410 
5411       for (i = 0; i < reload_n_operands; i++)
5412 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5413 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5414 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5415 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5416 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5417 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5418 	  return 0;
5419 
5420       return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5421 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5422 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5423 	      && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5424 
5425     case RELOAD_FOR_INPUT_ADDRESS:
5426     case RELOAD_FOR_INPADDR_ADDRESS:
5427       /* Similar, except that we check only for this and subsequent inputs
5428 	 and the address of only subsequent inputs and we do not need
5429 	 to check for RELOAD_OTHER objects since they are known not to
5430 	 conflict.  */
5431 
5432       for (i = opnum; i < reload_n_operands; i++)
5433 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5434 	  return 0;
5435 
5436       /* Reload register of reload with type RELOAD_FOR_INPADDR_ADDRESS
5437 	 could be killed if the register is also used by reload with type
5438 	 RELOAD_FOR_INPUT_ADDRESS, so check it.  */
5439       if (type == RELOAD_FOR_INPADDR_ADDRESS
5440 	  && TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
5441 	return 0;
5442 
5443       for (i = opnum + 1; i < reload_n_operands; i++)
5444 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5445 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5446 	  return 0;
5447 
5448       for (i = 0; i < reload_n_operands; i++)
5449 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5450 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5451 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5452 	  return 0;
5453 
5454       if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5455 	return 0;
5456 
5457       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5458 	      && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5459 	      && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5460 
5461     case RELOAD_FOR_INPUT:
5462       /* Similar to input address, except we start at the next operand for
5463 	 both input and input address and we do not check for
5464 	 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5465 	 would conflict.  */
5466 
5467       for (i = opnum + 1; i < reload_n_operands; i++)
5468 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5469 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5470 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5471 	  return 0;
5472 
5473       /* ... fall through ...  */
5474 
5475     case RELOAD_FOR_OPERAND_ADDRESS:
5476       /* Check outputs and their addresses.  */
5477 
5478       for (i = 0; i < reload_n_operands; i++)
5479 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5480 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5481 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5482 	  return 0;
5483 
5484       return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
5485 
5486     case RELOAD_FOR_OPADDR_ADDR:
5487       for (i = 0; i < reload_n_operands; i++)
5488 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5489 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5490 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5491 	  return 0;
5492 
5493       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5494 	      && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5495 	      && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5496 
5497     case RELOAD_FOR_INSN:
5498       /* These conflict with other outputs with RELOAD_OTHER.  So
5499 	 we need only check for output addresses.  */
5500 
5501       opnum = reload_n_operands;
5502 
5503       /* fall through */
5504 
5505     case RELOAD_FOR_OUTPUT:
5506     case RELOAD_FOR_OUTPUT_ADDRESS:
5507     case RELOAD_FOR_OUTADDR_ADDRESS:
5508       /* We already know these can't conflict with a later output.  So the
5509 	 only thing to check are later output addresses.
5510 	 Note that multiple output operands are emitted in reverse order,
5511 	 so the conflicting ones are those with lower indices.  */
5512       for (i = 0; i < opnum; i++)
5513 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5514 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5515 	  return 0;
5516 
5517       /* Reload register of reload with type RELOAD_FOR_OUTADDR_ADDRESS
5518 	 could be killed if the register is also used by reload with type
5519 	 RELOAD_FOR_OUTPUT_ADDRESS, so check it.  */
5520       if (type == RELOAD_FOR_OUTADDR_ADDRESS
5521 	  && TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5522 	return 0;
5523 
5524       return 1;
5525 
5526     default:
5527       gcc_unreachable ();
5528     }
5529 }
5530 
5531 /* Like reload_reg_reaches_end_p, but check that the condition holds for
5532    every register in REG.  */
5533 
5534 static bool
5535 reload_reg_rtx_reaches_end_p (rtx reg, int reloadnum)
5536 {
5537   unsigned int i;
5538 
5539   for (i = REGNO (reg); i < END_REGNO (reg); i++)
5540     if (!reload_reg_reaches_end_p (i, reloadnum))
5541       return false;
5542   return true;
5543 }
5544 
5545 
5546 /*  Returns whether R1 and R2 are uniquely chained: the value of one
5547     is used by the other, and that value is not used by any other
5548     reload for this insn.  This is used to partially undo the decision
5549     made in find_reloads when in the case of multiple
5550     RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5551     RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5552     reloads.  This code tries to avoid the conflict created by that
5553     change.  It might be cleaner to explicitly keep track of which
5554     RELOAD_FOR_OPADDR_ADDR reload is associated with which
5555     RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5556     this after the fact. */
5557 static bool
5558 reloads_unique_chain_p (int r1, int r2)
5559 {
5560   int i;
5561 
5562   /* We only check input reloads.  */
5563   if (! rld[r1].in || ! rld[r2].in)
5564     return false;
5565 
5566   /* Avoid anything with output reloads.  */
5567   if (rld[r1].out || rld[r2].out)
5568     return false;
5569 
5570   /* "chained" means one reload is a component of the other reload,
5571      not the same as the other reload.  */
5572   if (rld[r1].opnum != rld[r2].opnum
5573       || rtx_equal_p (rld[r1].in, rld[r2].in)
5574       || rld[r1].optional || rld[r2].optional
5575       || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5576 	    || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5577     return false;
5578 
5579   /* The following loop assumes that r1 is the reload that feeds r2.  */
5580   if (r1 > r2)
5581     std::swap (r1, r2);
5582 
5583   for (i = 0; i < n_reloads; i ++)
5584     /* Look for input reloads that aren't our two */
5585     if (i != r1 && i != r2 && rld[i].in)
5586       {
5587 	/* If our reload is mentioned at all, it isn't a simple chain.  */
5588 	if (reg_mentioned_p (rld[r1].in, rld[i].in))
5589 	  return false;
5590       }
5591   return true;
5592 }
5593 
5594 /* The recursive function change all occurrences of WHAT in *WHERE
5595    to REPL.  */
5596 static void
5597 substitute (rtx *where, const_rtx what, rtx repl)
5598 {
5599   const char *fmt;
5600   int i;
5601   enum rtx_code code;
5602 
5603   if (*where == 0)
5604     return;
5605 
5606   if (*where == what || rtx_equal_p (*where, what))
5607     {
5608       /* Record the location of the changed rtx.  */
5609       substitute_stack.safe_push (where);
5610       *where = repl;
5611       return;
5612     }
5613 
5614   code = GET_CODE (*where);
5615   fmt = GET_RTX_FORMAT (code);
5616   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5617     {
5618       if (fmt[i] == 'E')
5619 	{
5620 	  int j;
5621 
5622 	  for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5623 	    substitute (&XVECEXP (*where, i, j), what, repl);
5624 	}
5625       else if (fmt[i] == 'e')
5626 	substitute (&XEXP (*where, i), what, repl);
5627     }
5628 }
5629 
5630 /* The function returns TRUE if chain of reload R1 and R2 (in any
5631    order) can be evaluated without usage of intermediate register for
5632    the reload containing another reload.  It is important to see
5633    gen_reload to understand what the function is trying to do.  As an
5634    example, let us have reload chain
5635 
5636       r2: const
5637       r1: <something> + const
5638 
5639    and reload R2 got reload reg HR.  The function returns true if
5640    there is a correct insn HR = HR + <something>.  Otherwise,
5641    gen_reload will use intermediate register (and this is the reload
5642    reg for R1) to reload <something>.
5643 
5644    We need this function to find a conflict for chain reloads.  In our
5645    example, if HR = HR + <something> is incorrect insn, then we cannot
5646    use HR as a reload register for R2.  If we do use it then we get a
5647    wrong code:
5648 
5649       HR = const
5650       HR = <something>
5651       HR = HR + HR
5652 
5653 */
5654 static bool
5655 gen_reload_chain_without_interm_reg_p (int r1, int r2)
5656 {
5657   /* Assume other cases in gen_reload are not possible for
5658      chain reloads or do need an intermediate hard registers.  */
5659   bool result = true;
5660   int regno, code;
5661   rtx out, in;
5662   rtx_insn *insn;
5663   rtx_insn *last = get_last_insn ();
5664 
5665   /* Make r2 a component of r1.  */
5666   if (reg_mentioned_p (rld[r1].in, rld[r2].in))
5667     std::swap (r1, r2);
5668 
5669   gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in));
5670   regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno;
5671   gcc_assert (regno >= 0);
5672   out = gen_rtx_REG (rld[r1].mode, regno);
5673   in = rld[r1].in;
5674   substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno));
5675 
5676   /* If IN is a paradoxical SUBREG, remove it and try to put the
5677      opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
5678   strip_paradoxical_subreg (&in, &out);
5679 
5680   if (GET_CODE (in) == PLUS
5681       && (REG_P (XEXP (in, 0))
5682 	  || GET_CODE (XEXP (in, 0)) == SUBREG
5683 	  || MEM_P (XEXP (in, 0)))
5684       && (REG_P (XEXP (in, 1))
5685 	  || GET_CODE (XEXP (in, 1)) == SUBREG
5686 	  || CONSTANT_P (XEXP (in, 1))
5687 	  || MEM_P (XEXP (in, 1))))
5688     {
5689       insn = emit_insn (gen_rtx_SET (out, in));
5690       code = recog_memoized (insn);
5691       result = false;
5692 
5693       if (code >= 0)
5694 	{
5695 	  extract_insn (insn);
5696 	  /* We want constrain operands to treat this insn strictly in
5697 	     its validity determination, i.e., the way it would after
5698 	     reload has completed.  */
5699 	  result = constrain_operands (1, get_enabled_alternatives (insn));
5700 	}
5701 
5702       delete_insns_since (last);
5703     }
5704 
5705   /* Restore the original value at each changed address within R1.  */
5706   while (!substitute_stack.is_empty ())
5707     {
5708       rtx *where = substitute_stack.pop ();
5709       *where = rld[r2].in;
5710     }
5711 
5712   return result;
5713 }
5714 
5715 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5716    Return 0 otherwise.
5717 
5718    This function uses the same algorithm as reload_reg_free_p above.  */
5719 
5720 static int
5721 reloads_conflict (int r1, int r2)
5722 {
5723   enum reload_type r1_type = rld[r1].when_needed;
5724   enum reload_type r2_type = rld[r2].when_needed;
5725   int r1_opnum = rld[r1].opnum;
5726   int r2_opnum = rld[r2].opnum;
5727 
5728   /* RELOAD_OTHER conflicts with everything.  */
5729   if (r2_type == RELOAD_OTHER)
5730     return 1;
5731 
5732   /* Otherwise, check conflicts differently for each type.  */
5733 
5734   switch (r1_type)
5735     {
5736     case RELOAD_FOR_INPUT:
5737       return (r2_type == RELOAD_FOR_INSN
5738 	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5739 	      || r2_type == RELOAD_FOR_OPADDR_ADDR
5740 	      || r2_type == RELOAD_FOR_INPUT
5741 	      || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5742 		   || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5743 		  && r2_opnum > r1_opnum));
5744 
5745     case RELOAD_FOR_INPUT_ADDRESS:
5746       return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5747 	      || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5748 
5749     case RELOAD_FOR_INPADDR_ADDRESS:
5750       return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5751 	      || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5752 
5753     case RELOAD_FOR_OUTPUT_ADDRESS:
5754       return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5755 	      || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5756 
5757     case RELOAD_FOR_OUTADDR_ADDRESS:
5758       return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5759 	      || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5760 
5761     case RELOAD_FOR_OPERAND_ADDRESS:
5762       return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5763 	      || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5764 		  && (!reloads_unique_chain_p (r1, r2)
5765 		      || !gen_reload_chain_without_interm_reg_p (r1, r2))));
5766 
5767     case RELOAD_FOR_OPADDR_ADDR:
5768       return (r2_type == RELOAD_FOR_INPUT
5769 	      || r2_type == RELOAD_FOR_OPADDR_ADDR);
5770 
5771     case RELOAD_FOR_OUTPUT:
5772       return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5773 	      || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5774 		   || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5775 		  && r2_opnum >= r1_opnum));
5776 
5777     case RELOAD_FOR_INSN:
5778       return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5779 	      || r2_type == RELOAD_FOR_INSN
5780 	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5781 
5782     case RELOAD_FOR_OTHER_ADDRESS:
5783       return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5784 
5785     case RELOAD_OTHER:
5786       return 1;
5787 
5788     default:
5789       gcc_unreachable ();
5790     }
5791 }
5792 
5793 /* Indexed by reload number, 1 if incoming value
5794    inherited from previous insns.  */
5795 static char reload_inherited[MAX_RELOADS];
5796 
5797 /* For an inherited reload, this is the insn the reload was inherited from,
5798    if we know it.  Otherwise, this is 0.  */
5799 static rtx_insn *reload_inheritance_insn[MAX_RELOADS];
5800 
5801 /* If nonzero, this is a place to get the value of the reload,
5802    rather than using reload_in.  */
5803 static rtx reload_override_in[MAX_RELOADS];
5804 
5805 /* For each reload, the hard register number of the register used,
5806    or -1 if we did not need a register for this reload.  */
5807 static int reload_spill_index[MAX_RELOADS];
5808 
5809 /* Index X is the value of rld[X].reg_rtx, adjusted for the input mode.  */
5810 static rtx reload_reg_rtx_for_input[MAX_RELOADS];
5811 
5812 /* Index X is the value of rld[X].reg_rtx, adjusted for the output mode.  */
5813 static rtx reload_reg_rtx_for_output[MAX_RELOADS];
5814 
5815 /* Subroutine of free_for_value_p, used to check a single register.
5816    START_REGNO is the starting regno of the full reload register
5817    (possibly comprising multiple hard registers) that we are considering.  */
5818 
5819 static int
5820 reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5821 			     enum reload_type type, rtx value, rtx out,
5822 			     int reloadnum, int ignore_address_reloads)
5823 {
5824   int time1;
5825   /* Set if we see an input reload that must not share its reload register
5826      with any new earlyclobber, but might otherwise share the reload
5827      register with an output or input-output reload.  */
5828   int check_earlyclobber = 0;
5829   int i;
5830   int copy = 0;
5831 
5832   if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5833     return 0;
5834 
5835   if (out == const0_rtx)
5836     {
5837       copy = 1;
5838       out = NULL_RTX;
5839     }
5840 
5841   /* We use some pseudo 'time' value to check if the lifetimes of the
5842      new register use would overlap with the one of a previous reload
5843      that is not read-only or uses a different value.
5844      The 'time' used doesn't have to be linear in any shape or form, just
5845      monotonic.
5846      Some reload types use different 'buckets' for each operand.
5847      So there are MAX_RECOG_OPERANDS different time values for each
5848      such reload type.
5849      We compute TIME1 as the time when the register for the prospective
5850      new reload ceases to be live, and TIME2 for each existing
5851      reload as the time when that the reload register of that reload
5852      becomes live.
5853      Where there is little to be gained by exact lifetime calculations,
5854      we just make conservative assumptions, i.e. a longer lifetime;
5855      this is done in the 'default:' cases.  */
5856   switch (type)
5857     {
5858     case RELOAD_FOR_OTHER_ADDRESS:
5859       /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads.  */
5860       time1 = copy ? 0 : 1;
5861       break;
5862     case RELOAD_OTHER:
5863       time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5864       break;
5865       /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5866 	 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT.  By adding 0 / 1 / 2 ,
5867 	 respectively, to the time values for these, we get distinct time
5868 	 values.  To get distinct time values for each operand, we have to
5869 	 multiply opnum by at least three.  We round that up to four because
5870 	 multiply by four is often cheaper.  */
5871     case RELOAD_FOR_INPADDR_ADDRESS:
5872       time1 = opnum * 4 + 2;
5873       break;
5874     case RELOAD_FOR_INPUT_ADDRESS:
5875       time1 = opnum * 4 + 3;
5876       break;
5877     case RELOAD_FOR_INPUT:
5878       /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5879 	 executes (inclusive).  */
5880       time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5881       break;
5882     case RELOAD_FOR_OPADDR_ADDR:
5883       /* opnum * 4 + 4
5884 	 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5885       time1 = MAX_RECOG_OPERANDS * 4 + 1;
5886       break;
5887     case RELOAD_FOR_OPERAND_ADDRESS:
5888       /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5889 	 is executed.  */
5890       time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5891       break;
5892     case RELOAD_FOR_OUTADDR_ADDRESS:
5893       time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5894       break;
5895     case RELOAD_FOR_OUTPUT_ADDRESS:
5896       time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5897       break;
5898     default:
5899       time1 = MAX_RECOG_OPERANDS * 5 + 5;
5900     }
5901 
5902   for (i = 0; i < n_reloads; i++)
5903     {
5904       rtx reg = rld[i].reg_rtx;
5905       if (reg && REG_P (reg)
5906 	  && (unsigned) regno - true_regnum (reg) < REG_NREGS (reg)
5907 	  && i != reloadnum)
5908 	{
5909 	  rtx other_input = rld[i].in;
5910 
5911 	  /* If the other reload loads the same input value, that
5912 	     will not cause a conflict only if it's loading it into
5913 	     the same register.  */
5914 	  if (true_regnum (reg) != start_regno)
5915 	    other_input = NULL_RTX;
5916 	  if (! other_input || ! rtx_equal_p (other_input, value)
5917 	      || rld[i].out || out)
5918 	    {
5919 	      int time2;
5920 	      switch (rld[i].when_needed)
5921 		{
5922 		case RELOAD_FOR_OTHER_ADDRESS:
5923 		  time2 = 0;
5924 		  break;
5925 		case RELOAD_FOR_INPADDR_ADDRESS:
5926 		  /* find_reloads makes sure that a
5927 		     RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5928 		     by at most one - the first -
5929 		     RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS .  If the
5930 		     address reload is inherited, the address address reload
5931 		     goes away, so we can ignore this conflict.  */
5932 		  if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5933 		      && ignore_address_reloads
5934 		      /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5935 			 Then the address address is still needed to store
5936 			 back the new address.  */
5937 		      && ! rld[reloadnum].out)
5938 		    continue;
5939 		  /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5940 		     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5941 		     reloads go away.  */
5942 		  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5943 		      && ignore_address_reloads
5944 		      /* Unless we are reloading an auto_inc expression.  */
5945 		      && ! rld[reloadnum].out)
5946 		    continue;
5947 		  time2 = rld[i].opnum * 4 + 2;
5948 		  break;
5949 		case RELOAD_FOR_INPUT_ADDRESS:
5950 		  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5951 		      && ignore_address_reloads
5952 		      && ! rld[reloadnum].out)
5953 		    continue;
5954 		  time2 = rld[i].opnum * 4 + 3;
5955 		  break;
5956 		case RELOAD_FOR_INPUT:
5957 		  time2 = rld[i].opnum * 4 + 4;
5958 		  check_earlyclobber = 1;
5959 		  break;
5960 		  /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5961 		     == MAX_RECOG_OPERAND * 4  */
5962 		case RELOAD_FOR_OPADDR_ADDR:
5963 		  if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5964 		      && ignore_address_reloads
5965 		      && ! rld[reloadnum].out)
5966 		    continue;
5967 		  time2 = MAX_RECOG_OPERANDS * 4 + 1;
5968 		  break;
5969 		case RELOAD_FOR_OPERAND_ADDRESS:
5970 		  time2 = MAX_RECOG_OPERANDS * 4 + 2;
5971 		  check_earlyclobber = 1;
5972 		  break;
5973 		case RELOAD_FOR_INSN:
5974 		  time2 = MAX_RECOG_OPERANDS * 4 + 3;
5975 		  break;
5976 		case RELOAD_FOR_OUTPUT:
5977 		  /* All RELOAD_FOR_OUTPUT reloads become live just after the
5978 		     instruction is executed.  */
5979 		  time2 = MAX_RECOG_OPERANDS * 4 + 4;
5980 		  break;
5981 		  /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5982 		     the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5983 		     value.  */
5984 		case RELOAD_FOR_OUTADDR_ADDRESS:
5985 		  if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5986 		      && ignore_address_reloads
5987 		      && ! rld[reloadnum].out)
5988 		    continue;
5989 		  time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5990 		  break;
5991 		case RELOAD_FOR_OUTPUT_ADDRESS:
5992 		  time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5993 		  break;
5994 		case RELOAD_OTHER:
5995 		  /* If there is no conflict in the input part, handle this
5996 		     like an output reload.  */
5997 		  if (! rld[i].in || rtx_equal_p (other_input, value))
5998 		    {
5999 		      time2 = MAX_RECOG_OPERANDS * 4 + 4;
6000 		      /* Earlyclobbered outputs must conflict with inputs.  */
6001 		      if (earlyclobber_operand_p (rld[i].out))
6002 			time2 = MAX_RECOG_OPERANDS * 4 + 3;
6003 
6004 		      break;
6005 		    }
6006 		  time2 = 1;
6007 		  /* RELOAD_OTHER might be live beyond instruction execution,
6008 		     but this is not obvious when we set time2 = 1.  So check
6009 		     here if there might be a problem with the new reload
6010 		     clobbering the register used by the RELOAD_OTHER.  */
6011 		  if (out)
6012 		    return 0;
6013 		  break;
6014 		default:
6015 		  return 0;
6016 		}
6017 	      if ((time1 >= time2
6018 		   && (! rld[i].in || rld[i].out
6019 		       || ! rtx_equal_p (other_input, value)))
6020 		  || (out && rld[reloadnum].out_reg
6021 		      && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
6022 		return 0;
6023 	    }
6024 	}
6025     }
6026 
6027   /* Earlyclobbered outputs must conflict with inputs.  */
6028   if (check_earlyclobber && out && earlyclobber_operand_p (out))
6029     return 0;
6030 
6031   return 1;
6032 }
6033 
6034 /* Return 1 if the value in reload reg REGNO, as used by a reload
6035    needed for the part of the insn specified by OPNUM and TYPE,
6036    may be used to load VALUE into it.
6037 
6038    MODE is the mode in which the register is used, this is needed to
6039    determine how many hard regs to test.
6040 
6041    Other read-only reloads with the same value do not conflict
6042    unless OUT is nonzero and these other reloads have to live while
6043    output reloads live.
6044    If OUT is CONST0_RTX, this is a special case: it means that the
6045    test should not be for using register REGNO as reload register, but
6046    for copying from register REGNO into the reload register.
6047 
6048    RELOADNUM is the number of the reload we want to load this value for;
6049    a reload does not conflict with itself.
6050 
6051    When IGNORE_ADDRESS_RELOADS is set, we cannot have conflicts with
6052    reloads that load an address for the very reload we are considering.
6053 
6054    The caller has to make sure that there is no conflict with the return
6055    register.  */
6056 
6057 static int
6058 free_for_value_p (int regno, machine_mode mode, int opnum,
6059 		  enum reload_type type, rtx value, rtx out, int reloadnum,
6060 		  int ignore_address_reloads)
6061 {
6062   int nregs = hard_regno_nregs (regno, mode);
6063   while (nregs-- > 0)
6064     if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
6065 				       value, out, reloadnum,
6066 				       ignore_address_reloads))
6067       return 0;
6068   return 1;
6069 }
6070 
6071 /* Return nonzero if the rtx X is invariant over the current function.  */
6072 /* ??? Actually, the places where we use this expect exactly what is
6073    tested here, and not everything that is function invariant.  In
6074    particular, the frame pointer and arg pointer are special cased;
6075    pic_offset_table_rtx is not, and we must not spill these things to
6076    memory.  */
6077 
6078 int
6079 function_invariant_p (const_rtx x)
6080 {
6081   if (CONSTANT_P (x))
6082     return 1;
6083   if (x == frame_pointer_rtx || x == arg_pointer_rtx)
6084     return 1;
6085   if (GET_CODE (x) == PLUS
6086       && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
6087       && GET_CODE (XEXP (x, 1)) == CONST_INT)
6088     return 1;
6089   return 0;
6090 }
6091 
6092 /* Determine whether the reload reg X overlaps any rtx'es used for
6093    overriding inheritance.  Return nonzero if so.  */
6094 
6095 static int
6096 conflicts_with_override (rtx x)
6097 {
6098   int i;
6099   for (i = 0; i < n_reloads; i++)
6100     if (reload_override_in[i]
6101 	&& reg_overlap_mentioned_p (x, reload_override_in[i]))
6102       return 1;
6103   return 0;
6104 }
6105 
6106 /* Give an error message saying we failed to find a reload for INSN,
6107    and clear out reload R.  */
6108 static void
6109 failed_reload (rtx_insn *insn, int r)
6110 {
6111   if (asm_noperands (PATTERN (insn)) < 0)
6112     /* It's the compiler's fault.  */
6113     fatal_insn ("could not find a spill register", insn);
6114 
6115   /* It's the user's fault; the operand's mode and constraint
6116      don't match.  Disable this reload so we don't crash in final.  */
6117   error_for_asm (insn,
6118 		 "%<asm%> operand constraint incompatible with operand size");
6119   rld[r].in = 0;
6120   rld[r].out = 0;
6121   rld[r].reg_rtx = 0;
6122   rld[r].optional = 1;
6123   rld[r].secondary_p = 1;
6124 }
6125 
6126 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
6127    for reload R.  If it's valid, get an rtx for it.  Return nonzero if
6128    successful.  */
6129 static int
6130 set_reload_reg (int i, int r)
6131 {
6132   int regno;
6133   rtx reg = spill_reg_rtx[i];
6134 
6135   if (reg == 0 || GET_MODE (reg) != rld[r].mode)
6136     spill_reg_rtx[i] = reg
6137       = gen_rtx_REG (rld[r].mode, spill_regs[i]);
6138 
6139   regno = true_regnum (reg);
6140 
6141   /* Detect when the reload reg can't hold the reload mode.
6142      This used to be one `if', but Sequent compiler can't handle that.  */
6143   if (targetm.hard_regno_mode_ok (regno, rld[r].mode))
6144     {
6145       machine_mode test_mode = VOIDmode;
6146       if (rld[r].in)
6147 	test_mode = GET_MODE (rld[r].in);
6148       /* If rld[r].in has VOIDmode, it means we will load it
6149 	 in whatever mode the reload reg has: to wit, rld[r].mode.
6150 	 We have already tested that for validity.  */
6151       /* Aside from that, we need to test that the expressions
6152 	 to reload from or into have modes which are valid for this
6153 	 reload register.  Otherwise the reload insns would be invalid.  */
6154       if (! (rld[r].in != 0 && test_mode != VOIDmode
6155 	     && !targetm.hard_regno_mode_ok (regno, test_mode)))
6156 	if (! (rld[r].out != 0
6157 	       && !targetm.hard_regno_mode_ok (regno, GET_MODE (rld[r].out))))
6158 	  {
6159 	    /* The reg is OK.  */
6160 	    last_spill_reg = i;
6161 
6162 	    /* Mark as in use for this insn the reload regs we use
6163 	       for this.  */
6164 	    mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
6165 				    rld[r].when_needed, rld[r].mode);
6166 
6167 	    rld[r].reg_rtx = reg;
6168 	    reload_spill_index[r] = spill_regs[i];
6169 	    return 1;
6170 	  }
6171     }
6172   return 0;
6173 }
6174 
6175 /* Find a spill register to use as a reload register for reload R.
6176    LAST_RELOAD is nonzero if this is the last reload for the insn being
6177    processed.
6178 
6179    Set rld[R].reg_rtx to the register allocated.
6180 
6181    We return 1 if successful, or 0 if we couldn't find a spill reg and
6182    we didn't change anything.  */
6183 
6184 static int
6185 allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
6186 		     int last_reload)
6187 {
6188   int i, pass, count;
6189 
6190   /* If we put this reload ahead, thinking it is a group,
6191      then insist on finding a group.  Otherwise we can grab a
6192      reg that some other reload needs.
6193      (That can happen when we have a 68000 DATA_OR_FP_REG
6194      which is a group of data regs or one fp reg.)
6195      We need not be so restrictive if there are no more reloads
6196      for this insn.
6197 
6198      ??? Really it would be nicer to have smarter handling
6199      for that kind of reg class, where a problem like this is normal.
6200      Perhaps those classes should be avoided for reloading
6201      by use of more alternatives.  */
6202 
6203   int force_group = rld[r].nregs > 1 && ! last_reload;
6204 
6205   /* If we want a single register and haven't yet found one,
6206      take any reg in the right class and not in use.
6207      If we want a consecutive group, here is where we look for it.
6208 
6209      We use three passes so we can first look for reload regs to
6210      reuse, which are already in use for other reloads in this insn,
6211      and only then use additional registers which are not "bad", then
6212      finally any register.
6213 
6214      I think that maximizing reuse is needed to make sure we don't
6215      run out of reload regs.  Suppose we have three reloads, and
6216      reloads A and B can share regs.  These need two regs.
6217      Suppose A and B are given different regs.
6218      That leaves none for C.  */
6219   for (pass = 0; pass < 3; pass++)
6220     {
6221       /* I is the index in spill_regs.
6222 	 We advance it round-robin between insns to use all spill regs
6223 	 equally, so that inherited reloads have a chance
6224 	 of leapfrogging each other.  */
6225 
6226       i = last_spill_reg;
6227 
6228       for (count = 0; count < n_spills; count++)
6229 	{
6230 	  int rclass = (int) rld[r].rclass;
6231 	  int regnum;
6232 
6233 	  i++;
6234 	  if (i >= n_spills)
6235 	    i -= n_spills;
6236 	  regnum = spill_regs[i];
6237 
6238 	  if ((reload_reg_free_p (regnum, rld[r].opnum,
6239 				  rld[r].when_needed)
6240 	       || (rld[r].in
6241 		   /* We check reload_reg_used to make sure we
6242 		      don't clobber the return register.  */
6243 		   && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
6244 		   && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
6245 					rld[r].when_needed, rld[r].in,
6246 					rld[r].out, r, 1)))
6247 	      && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
6248 	      && targetm.hard_regno_mode_ok (regnum, rld[r].mode)
6249 	      /* Look first for regs to share, then for unshared.  But
6250 		 don't share regs used for inherited reloads; they are
6251 		 the ones we want to preserve.  */
6252 	      && (pass
6253 		  || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
6254 					 regnum)
6255 		      && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
6256 					      regnum))))
6257 	    {
6258 	      int nr = hard_regno_nregs (regnum, rld[r].mode);
6259 
6260 	      /* During the second pass we want to avoid reload registers
6261 		 which are "bad" for this reload.  */
6262 	      if (pass == 1
6263 		  && ira_bad_reload_regno (regnum, rld[r].in, rld[r].out))
6264 		continue;
6265 
6266 	      /* Avoid the problem where spilling a GENERAL_OR_FP_REG
6267 		 (on 68000) got us two FP regs.  If NR is 1,
6268 		 we would reject both of them.  */
6269 	      if (force_group)
6270 		nr = rld[r].nregs;
6271 	      /* If we need only one reg, we have already won.  */
6272 	      if (nr == 1)
6273 		{
6274 		  /* But reject a single reg if we demand a group.  */
6275 		  if (force_group)
6276 		    continue;
6277 		  break;
6278 		}
6279 	      /* Otherwise check that as many consecutive regs as we need
6280 		 are available here.  */
6281 	      while (nr > 1)
6282 		{
6283 		  int regno = regnum + nr - 1;
6284 		  if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
6285 			&& spill_reg_order[regno] >= 0
6286 			&& reload_reg_free_p (regno, rld[r].opnum,
6287 					      rld[r].when_needed)))
6288 		    break;
6289 		  nr--;
6290 		}
6291 	      if (nr == 1)
6292 		break;
6293 	    }
6294 	}
6295 
6296       /* If we found something on the current pass, omit later passes.  */
6297       if (count < n_spills)
6298 	break;
6299     }
6300 
6301   /* We should have found a spill register by now.  */
6302   if (count >= n_spills)
6303     return 0;
6304 
6305   /* I is the index in SPILL_REG_RTX of the reload register we are to
6306      allocate.  Get an rtx for it and find its register number.  */
6307 
6308   return set_reload_reg (i, r);
6309 }
6310 
6311 /* Initialize all the tables needed to allocate reload registers.
6312    CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
6313    is the array we use to restore the reg_rtx field for every reload.  */
6314 
6315 static void
6316 choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
6317 {
6318   int i;
6319 
6320   for (i = 0; i < n_reloads; i++)
6321     rld[i].reg_rtx = save_reload_reg_rtx[i];
6322 
6323   memset (reload_inherited, 0, MAX_RELOADS);
6324   memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
6325   memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
6326 
6327   CLEAR_HARD_REG_SET (reload_reg_used);
6328   CLEAR_HARD_REG_SET (reload_reg_used_at_all);
6329   CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
6330   CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
6331   CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
6332   CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
6333 
6334   CLEAR_HARD_REG_SET (reg_used_in_insn);
6335   {
6336     HARD_REG_SET tmp;
6337     REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
6338     IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6339     REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
6340     IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6341     compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
6342     compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
6343   }
6344 
6345   for (i = 0; i < reload_n_operands; i++)
6346     {
6347       CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
6348       CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
6349       CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
6350       CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
6351       CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
6352       CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
6353     }
6354 
6355   COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
6356 
6357   CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
6358 
6359   for (i = 0; i < n_reloads; i++)
6360     /* If we have already decided to use a certain register,
6361        don't use it in another way.  */
6362     if (rld[i].reg_rtx)
6363       mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
6364 			      rld[i].when_needed, rld[i].mode);
6365 }
6366 
6367 /* If X is not a subreg, return it unmodified.  If it is a subreg,
6368    look up whether we made a replacement for the SUBREG_REG.  Return
6369    either the replacement or the SUBREG_REG.  */
6370 
6371 static rtx
6372 replaced_subreg (rtx x)
6373 {
6374   if (GET_CODE (x) == SUBREG)
6375     return find_replacement (&SUBREG_REG (x));
6376   return x;
6377 }
6378 
6379 /* Compute the offset to pass to subreg_regno_offset, for a pseudo of
6380    mode OUTERMODE that is available in a hard reg of mode INNERMODE.
6381    SUBREG is non-NULL if the pseudo is a subreg whose reg is a pseudo,
6382    otherwise it is NULL.  */
6383 
6384 static poly_int64
6385 compute_reload_subreg_offset (machine_mode outermode,
6386 			      rtx subreg,
6387 			      machine_mode innermode)
6388 {
6389   poly_int64 outer_offset;
6390   machine_mode middlemode;
6391 
6392   if (!subreg)
6393     return subreg_lowpart_offset (outermode, innermode);
6394 
6395   outer_offset = SUBREG_BYTE (subreg);
6396   middlemode = GET_MODE (SUBREG_REG (subreg));
6397 
6398   /* If SUBREG is paradoxical then return the normal lowpart offset
6399      for OUTERMODE and INNERMODE.  Our caller has already checked
6400      that OUTERMODE fits in INNERMODE.  */
6401   if (paradoxical_subreg_p (outermode, middlemode))
6402     return subreg_lowpart_offset (outermode, innermode);
6403 
6404   /* SUBREG is normal, but may not be lowpart; return OUTER_OFFSET
6405      plus the normal lowpart offset for MIDDLEMODE and INNERMODE.  */
6406   return outer_offset + subreg_lowpart_offset (middlemode, innermode);
6407 }
6408 
6409 /* Assign hard reg targets for the pseudo-registers we must reload
6410    into hard regs for this insn.
6411    Also output the instructions to copy them in and out of the hard regs.
6412 
6413    For machines with register classes, we are responsible for
6414    finding a reload reg in the proper class.  */
6415 
6416 static void
6417 choose_reload_regs (struct insn_chain *chain)
6418 {
6419   rtx_insn *insn = chain->insn;
6420   int i, j;
6421   unsigned int max_group_size = 1;
6422   enum reg_class group_class = NO_REGS;
6423   int pass, win, inheritance;
6424 
6425   rtx save_reload_reg_rtx[MAX_RELOADS];
6426 
6427   /* In order to be certain of getting the registers we need,
6428      we must sort the reloads into order of increasing register class.
6429      Then our grabbing of reload registers will parallel the process
6430      that provided the reload registers.
6431 
6432      Also note whether any of the reloads wants a consecutive group of regs.
6433      If so, record the maximum size of the group desired and what
6434      register class contains all the groups needed by this insn.  */
6435 
6436   for (j = 0; j < n_reloads; j++)
6437     {
6438       reload_order[j] = j;
6439       if (rld[j].reg_rtx != NULL_RTX)
6440 	{
6441 	  gcc_assert (REG_P (rld[j].reg_rtx)
6442 		      && HARD_REGISTER_P (rld[j].reg_rtx));
6443 	  reload_spill_index[j] = REGNO (rld[j].reg_rtx);
6444 	}
6445       else
6446 	reload_spill_index[j] = -1;
6447 
6448       if (rld[j].nregs > 1)
6449 	{
6450 	  max_group_size = MAX (rld[j].nregs, max_group_size);
6451 	  group_class
6452 	    = reg_class_superunion[(int) rld[j].rclass][(int) group_class];
6453 	}
6454 
6455       save_reload_reg_rtx[j] = rld[j].reg_rtx;
6456     }
6457 
6458   if (n_reloads > 1)
6459     qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
6460 
6461   /* If -O, try first with inheritance, then turning it off.
6462      If not -O, don't do inheritance.
6463      Using inheritance when not optimizing leads to paradoxes
6464      with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
6465      because one side of the comparison might be inherited.  */
6466   win = 0;
6467   for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
6468     {
6469       choose_reload_regs_init (chain, save_reload_reg_rtx);
6470 
6471       /* Process the reloads in order of preference just found.
6472 	 Beyond this point, subregs can be found in reload_reg_rtx.
6473 
6474 	 This used to look for an existing reloaded home for all of the
6475 	 reloads, and only then perform any new reloads.  But that could lose
6476 	 if the reloads were done out of reg-class order because a later
6477 	 reload with a looser constraint might have an old home in a register
6478 	 needed by an earlier reload with a tighter constraint.
6479 
6480 	 To solve this, we make two passes over the reloads, in the order
6481 	 described above.  In the first pass we try to inherit a reload
6482 	 from a previous insn.  If there is a later reload that needs a
6483 	 class that is a proper subset of the class being processed, we must
6484 	 also allocate a spill register during the first pass.
6485 
6486 	 Then make a second pass over the reloads to allocate any reloads
6487 	 that haven't been given registers yet.  */
6488 
6489       for (j = 0; j < n_reloads; j++)
6490 	{
6491 	  int r = reload_order[j];
6492 	  rtx search_equiv = NULL_RTX;
6493 
6494 	  /* Ignore reloads that got marked inoperative.  */
6495 	  if (rld[r].out == 0 && rld[r].in == 0
6496 	      && ! rld[r].secondary_p)
6497 	    continue;
6498 
6499 	  /* If find_reloads chose to use reload_in or reload_out as a reload
6500 	     register, we don't need to chose one.  Otherwise, try even if it
6501 	     found one since we might save an insn if we find the value lying
6502 	     around.
6503 	     Try also when reload_in is a pseudo without a hard reg.  */
6504 	  if (rld[r].in != 0 && rld[r].reg_rtx != 0
6505 	      && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
6506 		  || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
6507 		      && !MEM_P (rld[r].in)
6508 		      && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
6509 	    continue;
6510 
6511 #if 0 /* No longer needed for correct operation.
6512 	 It might give better code, or might not; worth an experiment?  */
6513 	  /* If this is an optional reload, we can't inherit from earlier insns
6514 	     until we are sure that any non-optional reloads have been allocated.
6515 	     The following code takes advantage of the fact that optional reloads
6516 	     are at the end of reload_order.  */
6517 	  if (rld[r].optional != 0)
6518 	    for (i = 0; i < j; i++)
6519 	      if ((rld[reload_order[i]].out != 0
6520 		   || rld[reload_order[i]].in != 0
6521 		   || rld[reload_order[i]].secondary_p)
6522 		  && ! rld[reload_order[i]].optional
6523 		  && rld[reload_order[i]].reg_rtx == 0)
6524 		allocate_reload_reg (chain, reload_order[i], 0);
6525 #endif
6526 
6527 	  /* First see if this pseudo is already available as reloaded
6528 	     for a previous insn.  We cannot try to inherit for reloads
6529 	     that are smaller than the maximum number of registers needed
6530 	     for groups unless the register we would allocate cannot be used
6531 	     for the groups.
6532 
6533 	     We could check here to see if this is a secondary reload for
6534 	     an object that is already in a register of the desired class.
6535 	     This would avoid the need for the secondary reload register.
6536 	     But this is complex because we can't easily determine what
6537 	     objects might want to be loaded via this reload.  So let a
6538 	     register be allocated here.  In `emit_reload_insns' we suppress
6539 	     one of the loads in the case described above.  */
6540 
6541 	  if (inheritance)
6542 	    {
6543 	      poly_int64 byte = 0;
6544 	      int regno = -1;
6545 	      machine_mode mode = VOIDmode;
6546 	      rtx subreg = NULL_RTX;
6547 
6548 	      if (rld[r].in == 0)
6549 		;
6550 	      else if (REG_P (rld[r].in))
6551 		{
6552 		  regno = REGNO (rld[r].in);
6553 		  mode = GET_MODE (rld[r].in);
6554 		}
6555 	      else if (REG_P (rld[r].in_reg))
6556 		{
6557 		  regno = REGNO (rld[r].in_reg);
6558 		  mode = GET_MODE (rld[r].in_reg);
6559 		}
6560 	      else if (GET_CODE (rld[r].in_reg) == SUBREG
6561 		       && REG_P (SUBREG_REG (rld[r].in_reg)))
6562 		{
6563 		  regno = REGNO (SUBREG_REG (rld[r].in_reg));
6564 		  if (regno < FIRST_PSEUDO_REGISTER)
6565 		    regno = subreg_regno (rld[r].in_reg);
6566 		  else
6567 		    {
6568 		      subreg = rld[r].in_reg;
6569 		      byte = SUBREG_BYTE (subreg);
6570 		    }
6571 		  mode = GET_MODE (rld[r].in_reg);
6572 		}
6573 #if AUTO_INC_DEC
6574 	      else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
6575 		       && REG_P (XEXP (rld[r].in_reg, 0)))
6576 		{
6577 		  regno = REGNO (XEXP (rld[r].in_reg, 0));
6578 		  mode = GET_MODE (XEXP (rld[r].in_reg, 0));
6579 		  rld[r].out = rld[r].in;
6580 		}
6581 #endif
6582 #if 0
6583 	      /* This won't work, since REGNO can be a pseudo reg number.
6584 		 Also, it takes much more hair to keep track of all the things
6585 		 that can invalidate an inherited reload of part of a pseudoreg.  */
6586 	      else if (GET_CODE (rld[r].in) == SUBREG
6587 		       && REG_P (SUBREG_REG (rld[r].in)))
6588 		regno = subreg_regno (rld[r].in);
6589 #endif
6590 
6591 	      if (regno >= 0
6592 		  && reg_last_reload_reg[regno] != 0
6593 		  && (known_ge
6594 		      (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno])),
6595 		       GET_MODE_SIZE (mode) + byte))
6596 		  /* Verify that the register it's in can be used in
6597 		     mode MODE.  */
6598 		  && (REG_CAN_CHANGE_MODE_P
6599 		      (REGNO (reg_last_reload_reg[regno]),
6600 		       GET_MODE (reg_last_reload_reg[regno]),
6601 		       mode)))
6602 		{
6603 		  enum reg_class rclass = rld[r].rclass, last_class;
6604 		  rtx last_reg = reg_last_reload_reg[regno];
6605 
6606 		  i = REGNO (last_reg);
6607 		  byte = compute_reload_subreg_offset (mode,
6608 						       subreg,
6609 						       GET_MODE (last_reg));
6610 		  i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
6611 		  last_class = REGNO_REG_CLASS (i);
6612 
6613 		  if (reg_reloaded_contents[i] == regno
6614 		      && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
6615 		      && targetm.hard_regno_mode_ok (i, rld[r].mode)
6616 		      && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
6617 			  /* Even if we can't use this register as a reload
6618 			     register, we might use it for reload_override_in,
6619 			     if copying it to the desired class is cheap
6620 			     enough.  */
6621 			  || ((register_move_cost (mode, last_class, rclass)
6622 			       < memory_move_cost (mode, rclass, true))
6623 			      && (secondary_reload_class (1, rclass, mode,
6624 							  last_reg)
6625 				  == NO_REGS)
6626 			      && !(targetm.secondary_memory_needed
6627 				   (mode, last_class, rclass))))
6628 		      && (rld[r].nregs == max_group_size
6629 			  || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
6630 						  i))
6631 		      && free_for_value_p (i, rld[r].mode, rld[r].opnum,
6632 					   rld[r].when_needed, rld[r].in,
6633 					   const0_rtx, r, 1))
6634 		    {
6635 		      /* If a group is needed, verify that all the subsequent
6636 			 registers still have their values intact.  */
6637 		      int nr = hard_regno_nregs (i, rld[r].mode);
6638 		      int k;
6639 
6640 		      for (k = 1; k < nr; k++)
6641 			if (reg_reloaded_contents[i + k] != regno
6642 			    || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6643 			  break;
6644 
6645 		      if (k == nr)
6646 			{
6647 			  int i1;
6648 			  int bad_for_class;
6649 
6650 			  last_reg = (GET_MODE (last_reg) == mode
6651 				      ? last_reg : gen_rtx_REG (mode, i));
6652 
6653 			  bad_for_class = 0;
6654 			  for (k = 0; k < nr; k++)
6655 			    bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6656 								  i+k);
6657 
6658 			  /* We found a register that contains the
6659 			     value we need.  If this register is the
6660 			     same as an `earlyclobber' operand of the
6661 			     current insn, just mark it as a place to
6662 			     reload from since we can't use it as the
6663 			     reload register itself.  */
6664 
6665 			  for (i1 = 0; i1 < n_earlyclobbers; i1++)
6666 			    if (reg_overlap_mentioned_for_reload_p
6667 				(reg_last_reload_reg[regno],
6668 				 reload_earlyclobbers[i1]))
6669 			      break;
6670 
6671 			  if (i1 != n_earlyclobbers
6672 			      || ! (free_for_value_p (i, rld[r].mode,
6673 						      rld[r].opnum,
6674 						      rld[r].when_needed, rld[r].in,
6675 						      rld[r].out, r, 1))
6676 			      /* Don't use it if we'd clobber a pseudo reg.  */
6677 			      || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6678 				  && rld[r].out
6679 				  && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6680 			      /* Don't clobber the frame pointer.  */
6681 			      || (i == HARD_FRAME_POINTER_REGNUM
6682 				  && frame_pointer_needed
6683 				  && rld[r].out)
6684 			      /* Don't really use the inherited spill reg
6685 				 if we need it wider than we've got it.  */
6686 			      || paradoxical_subreg_p (rld[r].mode, mode)
6687 			      || bad_for_class
6688 
6689 			      /* If find_reloads chose reload_out as reload
6690 				 register, stay with it - that leaves the
6691 				 inherited register for subsequent reloads.  */
6692 			      || (rld[r].out && rld[r].reg_rtx
6693 				  && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6694 			    {
6695 			      if (! rld[r].optional)
6696 				{
6697 				  reload_override_in[r] = last_reg;
6698 				  reload_inheritance_insn[r]
6699 				    = reg_reloaded_insn[i];
6700 				}
6701 			    }
6702 			  else
6703 			    {
6704 			      int k;
6705 			      /* We can use this as a reload reg.  */
6706 			      /* Mark the register as in use for this part of
6707 				 the insn.  */
6708 			      mark_reload_reg_in_use (i,
6709 						      rld[r].opnum,
6710 						      rld[r].when_needed,
6711 						      rld[r].mode);
6712 			      rld[r].reg_rtx = last_reg;
6713 			      reload_inherited[r] = 1;
6714 			      reload_inheritance_insn[r]
6715 				= reg_reloaded_insn[i];
6716 			      reload_spill_index[r] = i;
6717 			      for (k = 0; k < nr; k++)
6718 				SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6719 						  i + k);
6720 			    }
6721 			}
6722 		    }
6723 		}
6724 	    }
6725 
6726 	  /* Here's another way to see if the value is already lying around.  */
6727 	  if (inheritance
6728 	      && rld[r].in != 0
6729 	      && ! reload_inherited[r]
6730 	      && rld[r].out == 0
6731 	      && (CONSTANT_P (rld[r].in)
6732 		  || GET_CODE (rld[r].in) == PLUS
6733 		  || REG_P (rld[r].in)
6734 		  || MEM_P (rld[r].in))
6735 	      && (rld[r].nregs == max_group_size
6736 		  || ! reg_classes_intersect_p (rld[r].rclass, group_class)))
6737 	    search_equiv = rld[r].in;
6738 
6739 	  if (search_equiv)
6740 	    {
6741 	      rtx equiv
6742 		= find_equiv_reg (search_equiv, insn, rld[r].rclass,
6743 				  -1, NULL, 0, rld[r].mode);
6744 	      int regno = 0;
6745 
6746 	      if (equiv != 0)
6747 		{
6748 		  if (REG_P (equiv))
6749 		    regno = REGNO (equiv);
6750 		  else
6751 		    {
6752 		      /* This must be a SUBREG of a hard register.
6753 			 Make a new REG since this might be used in an
6754 			 address and not all machines support SUBREGs
6755 			 there.  */
6756 		      gcc_assert (GET_CODE (equiv) == SUBREG);
6757 		      regno = subreg_regno (equiv);
6758 		      equiv = gen_rtx_REG (rld[r].mode, regno);
6759 		      /* If we choose EQUIV as the reload register, but the
6760 			 loop below decides to cancel the inheritance, we'll
6761 			 end up reloading EQUIV in rld[r].mode, not the mode
6762 			 it had originally.  That isn't safe when EQUIV isn't
6763 			 available as a spill register since its value might
6764 			 still be live at this point.  */
6765 		      for (i = regno; i < regno + (int) rld[r].nregs; i++)
6766 			if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6767 			  equiv = 0;
6768 		    }
6769 		}
6770 
6771 	      /* If we found a spill reg, reject it unless it is free
6772 		 and of the desired class.  */
6773 	      if (equiv != 0)
6774 		{
6775 		  int regs_used = 0;
6776 		  int bad_for_class = 0;
6777 		  int max_regno = regno + rld[r].nregs;
6778 
6779 		  for (i = regno; i < max_regno; i++)
6780 		    {
6781 		      regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6782 						      i);
6783 		      bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6784 							   i);
6785 		    }
6786 
6787 		  if ((regs_used
6788 		       && ! free_for_value_p (regno, rld[r].mode,
6789 					      rld[r].opnum, rld[r].when_needed,
6790 					      rld[r].in, rld[r].out, r, 1))
6791 		      || bad_for_class)
6792 		    equiv = 0;
6793 		}
6794 
6795 	      if (equiv != 0
6796 		  && !targetm.hard_regno_mode_ok (regno, rld[r].mode))
6797 		equiv = 0;
6798 
6799 	      /* We found a register that contains the value we need.
6800 		 If this register is the same as an `earlyclobber' operand
6801 		 of the current insn, just mark it as a place to reload from
6802 		 since we can't use it as the reload register itself.  */
6803 
6804 	      if (equiv != 0)
6805 		for (i = 0; i < n_earlyclobbers; i++)
6806 		  if (reg_overlap_mentioned_for_reload_p (equiv,
6807 							  reload_earlyclobbers[i]))
6808 		    {
6809 		      if (! rld[r].optional)
6810 			reload_override_in[r] = equiv;
6811 		      equiv = 0;
6812 		      break;
6813 		    }
6814 
6815 	      /* If the equiv register we have found is explicitly clobbered
6816 		 in the current insn, it depends on the reload type if we
6817 		 can use it, use it for reload_override_in, or not at all.
6818 		 In particular, we then can't use EQUIV for a
6819 		 RELOAD_FOR_OUTPUT_ADDRESS reload.  */
6820 
6821 	      if (equiv != 0)
6822 		{
6823 		  if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6824 		    switch (rld[r].when_needed)
6825 		      {
6826 		      case RELOAD_FOR_OTHER_ADDRESS:
6827 		      case RELOAD_FOR_INPADDR_ADDRESS:
6828 		      case RELOAD_FOR_INPUT_ADDRESS:
6829 		      case RELOAD_FOR_OPADDR_ADDR:
6830 			break;
6831 		      case RELOAD_OTHER:
6832 		      case RELOAD_FOR_INPUT:
6833 		      case RELOAD_FOR_OPERAND_ADDRESS:
6834 			if (! rld[r].optional)
6835 			  reload_override_in[r] = equiv;
6836 			/* Fall through.  */
6837 		      default:
6838 			equiv = 0;
6839 			break;
6840 		      }
6841 		  else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6842 		    switch (rld[r].when_needed)
6843 		      {
6844 		      case RELOAD_FOR_OTHER_ADDRESS:
6845 		      case RELOAD_FOR_INPADDR_ADDRESS:
6846 		      case RELOAD_FOR_INPUT_ADDRESS:
6847 		      case RELOAD_FOR_OPADDR_ADDR:
6848 		      case RELOAD_FOR_OPERAND_ADDRESS:
6849 		      case RELOAD_FOR_INPUT:
6850 			break;
6851 		      case RELOAD_OTHER:
6852 			if (! rld[r].optional)
6853 			  reload_override_in[r] = equiv;
6854 			/* Fall through.  */
6855 		      default:
6856 			equiv = 0;
6857 			break;
6858 		      }
6859 		}
6860 
6861 	      /* If we found an equivalent reg, say no code need be generated
6862 		 to load it, and use it as our reload reg.  */
6863 	      if (equiv != 0
6864 		  && (regno != HARD_FRAME_POINTER_REGNUM
6865 		      || !frame_pointer_needed))
6866 		{
6867 		  int nr = hard_regno_nregs (regno, rld[r].mode);
6868 		  int k;
6869 		  rld[r].reg_rtx = equiv;
6870 		  reload_spill_index[r] = regno;
6871 		  reload_inherited[r] = 1;
6872 
6873 		  /* If reg_reloaded_valid is not set for this register,
6874 		     there might be a stale spill_reg_store lying around.
6875 		     We must clear it, since otherwise emit_reload_insns
6876 		     might delete the store.  */
6877 		  if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6878 		    spill_reg_store[regno] = NULL;
6879 		  /* If any of the hard registers in EQUIV are spill
6880 		     registers, mark them as in use for this insn.  */
6881 		  for (k = 0; k < nr; k++)
6882 		    {
6883 		      i = spill_reg_order[regno + k];
6884 		      if (i >= 0)
6885 			{
6886 			  mark_reload_reg_in_use (regno, rld[r].opnum,
6887 						  rld[r].when_needed,
6888 						  rld[r].mode);
6889 			  SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6890 					    regno + k);
6891 			}
6892 		    }
6893 		}
6894 	    }
6895 
6896 	  /* If we found a register to use already, or if this is an optional
6897 	     reload, we are done.  */
6898 	  if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6899 	    continue;
6900 
6901 #if 0
6902 	  /* No longer needed for correct operation.  Might or might
6903 	     not give better code on the average.  Want to experiment?  */
6904 
6905 	  /* See if there is a later reload that has a class different from our
6906 	     class that intersects our class or that requires less register
6907 	     than our reload.  If so, we must allocate a register to this
6908 	     reload now, since that reload might inherit a previous reload
6909 	     and take the only available register in our class.  Don't do this
6910 	     for optional reloads since they will force all previous reloads
6911 	     to be allocated.  Also don't do this for reloads that have been
6912 	     turned off.  */
6913 
6914 	  for (i = j + 1; i < n_reloads; i++)
6915 	    {
6916 	      int s = reload_order[i];
6917 
6918 	      if ((rld[s].in == 0 && rld[s].out == 0
6919 		   && ! rld[s].secondary_p)
6920 		  || rld[s].optional)
6921 		continue;
6922 
6923 	      if ((rld[s].rclass != rld[r].rclass
6924 		   && reg_classes_intersect_p (rld[r].rclass,
6925 					       rld[s].rclass))
6926 		  || rld[s].nregs < rld[r].nregs)
6927 		break;
6928 	    }
6929 
6930 	  if (i == n_reloads)
6931 	    continue;
6932 
6933 	  allocate_reload_reg (chain, r, j == n_reloads - 1);
6934 #endif
6935 	}
6936 
6937       /* Now allocate reload registers for anything non-optional that
6938 	 didn't get one yet.  */
6939       for (j = 0; j < n_reloads; j++)
6940 	{
6941 	  int r = reload_order[j];
6942 
6943 	  /* Ignore reloads that got marked inoperative.  */
6944 	  if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6945 	    continue;
6946 
6947 	  /* Skip reloads that already have a register allocated or are
6948 	     optional.  */
6949 	  if (rld[r].reg_rtx != 0 || rld[r].optional)
6950 	    continue;
6951 
6952 	  if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6953 	    break;
6954 	}
6955 
6956       /* If that loop got all the way, we have won.  */
6957       if (j == n_reloads)
6958 	{
6959 	  win = 1;
6960 	  break;
6961 	}
6962 
6963       /* Loop around and try without any inheritance.  */
6964     }
6965 
6966   if (! win)
6967     {
6968       /* First undo everything done by the failed attempt
6969 	 to allocate with inheritance.  */
6970       choose_reload_regs_init (chain, save_reload_reg_rtx);
6971 
6972       /* Some sanity tests to verify that the reloads found in the first
6973 	 pass are identical to the ones we have now.  */
6974       gcc_assert (chain->n_reloads == n_reloads);
6975 
6976       for (i = 0; i < n_reloads; i++)
6977 	{
6978 	  if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
6979 	    continue;
6980 	  gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
6981 	  for (j = 0; j < n_spills; j++)
6982 	    if (spill_regs[j] == chain->rld[i].regno)
6983 	      if (! set_reload_reg (j, i))
6984 		failed_reload (chain->insn, i);
6985 	}
6986     }
6987 
6988   /* If we thought we could inherit a reload, because it seemed that
6989      nothing else wanted the same reload register earlier in the insn,
6990      verify that assumption, now that all reloads have been assigned.
6991      Likewise for reloads where reload_override_in has been set.  */
6992 
6993   /* If doing expensive optimizations, do one preliminary pass that doesn't
6994      cancel any inheritance, but removes reloads that have been needed only
6995      for reloads that we know can be inherited.  */
6996   for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6997     {
6998       for (j = 0; j < n_reloads; j++)
6999 	{
7000 	  int r = reload_order[j];
7001 	  rtx check_reg;
7002 	  rtx tem;
7003 	  if (reload_inherited[r] && rld[r].reg_rtx)
7004 	    check_reg = rld[r].reg_rtx;
7005 	  else if (reload_override_in[r]
7006 		   && (REG_P (reload_override_in[r])
7007 		       || GET_CODE (reload_override_in[r]) == SUBREG))
7008 	    check_reg = reload_override_in[r];
7009 	  else
7010 	    continue;
7011 	  if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
7012 				  rld[r].opnum, rld[r].when_needed, rld[r].in,
7013 				  (reload_inherited[r]
7014 				   ? rld[r].out : const0_rtx),
7015 				  r, 1))
7016 	    {
7017 	      if (pass)
7018 		continue;
7019 	      reload_inherited[r] = 0;
7020 	      reload_override_in[r] = 0;
7021 	    }
7022 	  /* If we can inherit a RELOAD_FOR_INPUT, or can use a
7023 	     reload_override_in, then we do not need its related
7024 	     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
7025 	     likewise for other reload types.
7026 	     We handle this by removing a reload when its only replacement
7027 	     is mentioned in reload_in of the reload we are going to inherit.
7028 	     A special case are auto_inc expressions; even if the input is
7029 	     inherited, we still need the address for the output.  We can
7030 	     recognize them because they have RELOAD_OUT set to RELOAD_IN.
7031 	     If we succeeded removing some reload and we are doing a preliminary
7032 	     pass just to remove such reloads, make another pass, since the
7033 	     removal of one reload might allow us to inherit another one.  */
7034 	  else if (rld[r].in
7035 		   && rld[r].out != rld[r].in
7036 		   && remove_address_replacements (rld[r].in))
7037 	    {
7038 	      if (pass)
7039 	        pass = 2;
7040 	    }
7041 	  /* If we needed a memory location for the reload, we also have to
7042 	     remove its related reloads.  */
7043 	  else if (rld[r].in
7044 		   && rld[r].out != rld[r].in
7045 		   && (tem = replaced_subreg (rld[r].in), REG_P (tem))
7046 		   && REGNO (tem) < FIRST_PSEUDO_REGISTER
7047 		   && (targetm.secondary_memory_needed
7048 		       (rld[r].inmode, REGNO_REG_CLASS (REGNO (tem)),
7049 			rld[r].rclass))
7050 		   && remove_address_replacements
7051 		      (get_secondary_mem (tem, rld[r].inmode, rld[r].opnum,
7052 					  rld[r].when_needed)))
7053 	    {
7054 	      if (pass)
7055 	        pass = 2;
7056 	    }
7057 	}
7058     }
7059 
7060   /* Now that reload_override_in is known valid,
7061      actually override reload_in.  */
7062   for (j = 0; j < n_reloads; j++)
7063     if (reload_override_in[j])
7064       rld[j].in = reload_override_in[j];
7065 
7066   /* If this reload won't be done because it has been canceled or is
7067      optional and not inherited, clear reload_reg_rtx so other
7068      routines (such as subst_reloads) don't get confused.  */
7069   for (j = 0; j < n_reloads; j++)
7070     if (rld[j].reg_rtx != 0
7071 	&& ((rld[j].optional && ! reload_inherited[j])
7072 	    || (rld[j].in == 0 && rld[j].out == 0
7073 		&& ! rld[j].secondary_p)))
7074       {
7075 	int regno = true_regnum (rld[j].reg_rtx);
7076 
7077 	if (spill_reg_order[regno] >= 0)
7078 	  clear_reload_reg_in_use (regno, rld[j].opnum,
7079 				   rld[j].when_needed, rld[j].mode);
7080 	rld[j].reg_rtx = 0;
7081 	reload_spill_index[j] = -1;
7082       }
7083 
7084   /* Record which pseudos and which spill regs have output reloads.  */
7085   for (j = 0; j < n_reloads; j++)
7086     {
7087       int r = reload_order[j];
7088 
7089       i = reload_spill_index[r];
7090 
7091       /* I is nonneg if this reload uses a register.
7092 	 If rld[r].reg_rtx is 0, this is an optional reload
7093 	 that we opted to ignore.  */
7094       if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
7095 	  && rld[r].reg_rtx != 0)
7096 	{
7097 	  int nregno = REGNO (rld[r].out_reg);
7098 	  int nr = 1;
7099 
7100 	  if (nregno < FIRST_PSEUDO_REGISTER)
7101 	    nr = hard_regno_nregs (nregno, rld[r].mode);
7102 
7103 	  while (--nr >= 0)
7104 	    SET_REGNO_REG_SET (&reg_has_output_reload,
7105 			       nregno + nr);
7106 
7107 	  if (i >= 0)
7108 	    add_to_hard_reg_set (&reg_is_output_reload, rld[r].mode, i);
7109 
7110 	  gcc_assert (rld[r].when_needed == RELOAD_OTHER
7111 		      || rld[r].when_needed == RELOAD_FOR_OUTPUT
7112 		      || rld[r].when_needed == RELOAD_FOR_INSN);
7113 	}
7114     }
7115 }
7116 
7117 /* Deallocate the reload register for reload R.  This is called from
7118    remove_address_replacements.  */
7119 
7120 void
7121 deallocate_reload_reg (int r)
7122 {
7123   int regno;
7124 
7125   if (! rld[r].reg_rtx)
7126     return;
7127   regno = true_regnum (rld[r].reg_rtx);
7128   rld[r].reg_rtx = 0;
7129   if (spill_reg_order[regno] >= 0)
7130     clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
7131 			     rld[r].mode);
7132   reload_spill_index[r] = -1;
7133 }
7134 
7135 /* These arrays are filled by emit_reload_insns and its subroutines.  */
7136 static rtx_insn *input_reload_insns[MAX_RECOG_OPERANDS];
7137 static rtx_insn *other_input_address_reload_insns = 0;
7138 static rtx_insn *other_input_reload_insns = 0;
7139 static rtx_insn *input_address_reload_insns[MAX_RECOG_OPERANDS];
7140 static rtx_insn *inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7141 static rtx_insn *output_reload_insns[MAX_RECOG_OPERANDS];
7142 static rtx_insn *output_address_reload_insns[MAX_RECOG_OPERANDS];
7143 static rtx_insn *outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7144 static rtx_insn *operand_reload_insns = 0;
7145 static rtx_insn *other_operand_reload_insns = 0;
7146 static rtx_insn *other_output_reload_insns[MAX_RECOG_OPERANDS];
7147 
7148 /* Values to be put in spill_reg_store are put here first.  Instructions
7149    must only be placed here if the associated reload register reaches
7150    the end of the instruction's reload sequence.  */
7151 static rtx_insn *new_spill_reg_store[FIRST_PSEUDO_REGISTER];
7152 static HARD_REG_SET reg_reloaded_died;
7153 
7154 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
7155    of class NEW_CLASS with mode NEW_MODE.  Or alternatively, if alt_reload_reg
7156    is nonzero, if that is suitable.  On success, change *RELOAD_REG to the
7157    adjusted register, and return true.  Otherwise, return false.  */
7158 static bool
7159 reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
7160 			    enum reg_class new_class,
7161 			    machine_mode new_mode)
7162 
7163 {
7164   rtx reg;
7165 
7166   for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
7167     {
7168       unsigned regno = REGNO (reg);
7169 
7170       if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
7171 	continue;
7172       if (GET_MODE (reg) != new_mode)
7173 	{
7174 	  if (!targetm.hard_regno_mode_ok (regno, new_mode))
7175 	    continue;
7176 	  if (hard_regno_nregs (regno, new_mode) > REG_NREGS (reg))
7177 	    continue;
7178 	  reg = reload_adjust_reg_for_mode (reg, new_mode);
7179 	}
7180       *reload_reg = reg;
7181       return true;
7182     }
7183   return false;
7184 }
7185 
7186 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
7187    pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
7188    nonzero, if that is suitable.  On success, change *RELOAD_REG to the
7189    adjusted register, and return true.  Otherwise, return false.  */
7190 static bool
7191 reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
7192 			     enum insn_code icode)
7193 
7194 {
7195   enum reg_class new_class = scratch_reload_class (icode);
7196   machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
7197 
7198   return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
7199 				     new_class, new_mode);
7200 }
7201 
7202 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
7203    has the number J.  OLD contains the value to be used as input.  */
7204 
7205 static void
7206 emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
7207 			 rtx old, int j)
7208 {
7209   rtx_insn *insn = chain->insn;
7210   rtx reloadreg;
7211   rtx oldequiv_reg = 0;
7212   rtx oldequiv = 0;
7213   int special = 0;
7214   machine_mode mode;
7215   rtx_insn **where;
7216 
7217   /* delete_output_reload is only invoked properly if old contains
7218      the original pseudo register.  Since this is replaced with a
7219      hard reg when RELOAD_OVERRIDE_IN is set, see if we can
7220      find the pseudo in RELOAD_IN_REG.  This is also used to
7221      determine whether a secondary reload is needed.  */
7222   if (reload_override_in[j]
7223       && (REG_P (rl->in_reg)
7224 	  || (GET_CODE (rl->in_reg) == SUBREG
7225 	      && REG_P (SUBREG_REG (rl->in_reg)))))
7226     {
7227       oldequiv = old;
7228       old = rl->in_reg;
7229     }
7230   if (oldequiv == 0)
7231     oldequiv = old;
7232   else if (REG_P (oldequiv))
7233     oldequiv_reg = oldequiv;
7234   else if (GET_CODE (oldequiv) == SUBREG)
7235     oldequiv_reg = SUBREG_REG (oldequiv);
7236 
7237   reloadreg = reload_reg_rtx_for_input[j];
7238   mode = GET_MODE (reloadreg);
7239 
7240   /* If we are reloading from a register that was recently stored in
7241      with an output-reload, see if we can prove there was
7242      actually no need to store the old value in it.  */
7243 
7244   if (optimize && REG_P (oldequiv)
7245       && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
7246       && spill_reg_store[REGNO (oldequiv)]
7247       && REG_P (old)
7248       && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
7249 	  || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
7250 			  rl->out_reg)))
7251     delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
7252 
7253   /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
7254      OLDEQUIV.  */
7255 
7256   while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
7257     oldequiv = SUBREG_REG (oldequiv);
7258   if (GET_MODE (oldequiv) != VOIDmode
7259       && mode != GET_MODE (oldequiv))
7260     oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
7261 
7262   /* Switch to the right place to emit the reload insns.  */
7263   switch (rl->when_needed)
7264     {
7265     case RELOAD_OTHER:
7266       where = &other_input_reload_insns;
7267       break;
7268     case RELOAD_FOR_INPUT:
7269       where = &input_reload_insns[rl->opnum];
7270       break;
7271     case RELOAD_FOR_INPUT_ADDRESS:
7272       where = &input_address_reload_insns[rl->opnum];
7273       break;
7274     case RELOAD_FOR_INPADDR_ADDRESS:
7275       where = &inpaddr_address_reload_insns[rl->opnum];
7276       break;
7277     case RELOAD_FOR_OUTPUT_ADDRESS:
7278       where = &output_address_reload_insns[rl->opnum];
7279       break;
7280     case RELOAD_FOR_OUTADDR_ADDRESS:
7281       where = &outaddr_address_reload_insns[rl->opnum];
7282       break;
7283     case RELOAD_FOR_OPERAND_ADDRESS:
7284       where = &operand_reload_insns;
7285       break;
7286     case RELOAD_FOR_OPADDR_ADDR:
7287       where = &other_operand_reload_insns;
7288       break;
7289     case RELOAD_FOR_OTHER_ADDRESS:
7290       where = &other_input_address_reload_insns;
7291       break;
7292     default:
7293       gcc_unreachable ();
7294     }
7295 
7296   push_to_sequence (*where);
7297 
7298   /* Auto-increment addresses must be reloaded in a special way.  */
7299   if (rl->out && ! rl->out_reg)
7300     {
7301       /* We are not going to bother supporting the case where a
7302 	 incremented register can't be copied directly from
7303 	 OLDEQUIV since this seems highly unlikely.  */
7304       gcc_assert (rl->secondary_in_reload < 0);
7305 
7306       if (reload_inherited[j])
7307 	oldequiv = reloadreg;
7308 
7309       old = XEXP (rl->in_reg, 0);
7310 
7311       /* Prevent normal processing of this reload.  */
7312       special = 1;
7313       /* Output a special code sequence for this case.  */
7314       inc_for_reload (reloadreg, oldequiv, rl->out, rl->inc);
7315     }
7316 
7317   /* If we are reloading a pseudo-register that was set by the previous
7318      insn, see if we can get rid of that pseudo-register entirely
7319      by redirecting the previous insn into our reload register.  */
7320 
7321   else if (optimize && REG_P (old)
7322 	   && REGNO (old) >= FIRST_PSEUDO_REGISTER
7323 	   && dead_or_set_p (insn, old)
7324 	   /* This is unsafe if some other reload
7325 	      uses the same reg first.  */
7326 	   && ! conflicts_with_override (reloadreg)
7327 	   && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
7328 				rl->when_needed, old, rl->out, j, 0))
7329     {
7330       rtx_insn *temp = PREV_INSN (insn);
7331       while (temp && (NOTE_P (temp) || DEBUG_INSN_P (temp)))
7332 	temp = PREV_INSN (temp);
7333       if (temp
7334 	  && NONJUMP_INSN_P (temp)
7335 	  && GET_CODE (PATTERN (temp)) == SET
7336 	  && SET_DEST (PATTERN (temp)) == old
7337 	  /* Make sure we can access insn_operand_constraint.  */
7338 	  && asm_noperands (PATTERN (temp)) < 0
7339 	  /* This is unsafe if operand occurs more than once in current
7340 	     insn.  Perhaps some occurrences aren't reloaded.  */
7341 	  && count_occurrences (PATTERN (insn), old, 0) == 1)
7342 	{
7343 	  rtx old = SET_DEST (PATTERN (temp));
7344 	  /* Store into the reload register instead of the pseudo.  */
7345 	  SET_DEST (PATTERN (temp)) = reloadreg;
7346 
7347 	  /* Verify that resulting insn is valid.
7348 
7349 	     Note that we have replaced the destination of TEMP with
7350 	     RELOADREG.  If TEMP references RELOADREG within an
7351 	     autoincrement addressing mode, then the resulting insn
7352 	     is ill-formed and we must reject this optimization.  */
7353 	  extract_insn (temp);
7354 	  if (constrain_operands (1, get_enabled_alternatives (temp))
7355 	      && (!AUTO_INC_DEC || ! find_reg_note (temp, REG_INC, reloadreg)))
7356 	    {
7357 	      /* If the previous insn is an output reload, the source is
7358 		 a reload register, and its spill_reg_store entry will
7359 		 contain the previous destination.  This is now
7360 		 invalid.  */
7361 	      if (REG_P (SET_SRC (PATTERN (temp)))
7362 		  && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
7363 		{
7364 		  spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7365 		  spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7366 		}
7367 
7368 	      /* If these are the only uses of the pseudo reg,
7369 		 pretend for GDB it lives in the reload reg we used.  */
7370 	      if (REG_N_DEATHS (REGNO (old)) == 1
7371 		  && REG_N_SETS (REGNO (old)) == 1)
7372 		{
7373 		  reg_renumber[REGNO (old)] = REGNO (reloadreg);
7374 		  if (ira_conflicts_p)
7375 		    /* Inform IRA about the change.  */
7376 		    ira_mark_allocation_change (REGNO (old));
7377 		  alter_reg (REGNO (old), -1, false);
7378 		}
7379 	      special = 1;
7380 
7381 	      /* Adjust any debug insns between temp and insn.  */
7382 	      while ((temp = NEXT_INSN (temp)) != insn)
7383 		if (DEBUG_BIND_INSN_P (temp))
7384 		  INSN_VAR_LOCATION_LOC (temp)
7385 		    = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (temp),
7386 					    old, reloadreg);
7387 		else
7388 		  gcc_assert (DEBUG_INSN_P (temp) || NOTE_P (temp));
7389 	    }
7390 	  else
7391 	    {
7392 	      SET_DEST (PATTERN (temp)) = old;
7393 	    }
7394 	}
7395     }
7396 
7397   /* We can't do that, so output an insn to load RELOADREG.  */
7398 
7399   /* If we have a secondary reload, pick up the secondary register
7400      and icode, if any.  If OLDEQUIV and OLD are different or
7401      if this is an in-out reload, recompute whether or not we
7402      still need a secondary register and what the icode should
7403      be.  If we still need a secondary register and the class or
7404      icode is different, go back to reloading from OLD if using
7405      OLDEQUIV means that we got the wrong type of register.  We
7406      cannot have different class or icode due to an in-out reload
7407      because we don't make such reloads when both the input and
7408      output need secondary reload registers.  */
7409 
7410   if (! special && rl->secondary_in_reload >= 0)
7411     {
7412       rtx second_reload_reg = 0;
7413       rtx third_reload_reg = 0;
7414       int secondary_reload = rl->secondary_in_reload;
7415       rtx real_oldequiv = oldequiv;
7416       rtx real_old = old;
7417       rtx tmp;
7418       enum insn_code icode;
7419       enum insn_code tertiary_icode = CODE_FOR_nothing;
7420 
7421       /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7422 	 and similarly for OLD.
7423 	 See comments in get_secondary_reload in reload.c.  */
7424       /* If it is a pseudo that cannot be replaced with its
7425 	 equivalent MEM, we must fall back to reload_in, which
7426 	 will have all the necessary substitutions registered.
7427 	 Likewise for a pseudo that can't be replaced with its
7428 	 equivalent constant.
7429 
7430 	 Take extra care for subregs of such pseudos.  Note that
7431 	 we cannot use reg_equiv_mem in this case because it is
7432 	 not in the right mode.  */
7433 
7434       tmp = oldequiv;
7435       if (GET_CODE (tmp) == SUBREG)
7436 	tmp = SUBREG_REG (tmp);
7437       if (REG_P (tmp)
7438 	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7439 	  && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7440 	      || reg_equiv_constant (REGNO (tmp)) != 0))
7441 	{
7442 	  if (! reg_equiv_mem (REGNO (tmp))
7443 	      || num_not_at_initial_offset
7444 	      || GET_CODE (oldequiv) == SUBREG)
7445 	    real_oldequiv = rl->in;
7446 	  else
7447 	    real_oldequiv = reg_equiv_mem (REGNO (tmp));
7448 	}
7449 
7450       tmp = old;
7451       if (GET_CODE (tmp) == SUBREG)
7452 	tmp = SUBREG_REG (tmp);
7453       if (REG_P (tmp)
7454 	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7455 	  && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7456 	      || reg_equiv_constant (REGNO (tmp)) != 0))
7457 	{
7458 	  if (! reg_equiv_mem (REGNO (tmp))
7459 	      || num_not_at_initial_offset
7460 	      || GET_CODE (old) == SUBREG)
7461 	    real_old = rl->in;
7462 	  else
7463 	    real_old = reg_equiv_mem (REGNO (tmp));
7464 	}
7465 
7466       second_reload_reg = rld[secondary_reload].reg_rtx;
7467       if (rld[secondary_reload].secondary_in_reload >= 0)
7468 	{
7469 	  int tertiary_reload = rld[secondary_reload].secondary_in_reload;
7470 
7471 	  third_reload_reg = rld[tertiary_reload].reg_rtx;
7472 	  tertiary_icode = rld[secondary_reload].secondary_in_icode;
7473 	  /* We'd have to add more code for quartary reloads.  */
7474 	  gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
7475 	}
7476       icode = rl->secondary_in_icode;
7477 
7478       if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
7479 	  || (rl->in != 0 && rl->out != 0))
7480 	{
7481 	  secondary_reload_info sri, sri2;
7482 	  enum reg_class new_class, new_t_class;
7483 
7484 	  sri.icode = CODE_FOR_nothing;
7485 	  sri.prev_sri = NULL;
7486 	  new_class
7487 	    = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7488 							 rl->rclass, mode,
7489 							 &sri);
7490 
7491 	  if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7492 	    second_reload_reg = 0;
7493 	  else if (new_class == NO_REGS)
7494 	    {
7495 	      if (reload_adjust_reg_for_icode (&second_reload_reg,
7496 					       third_reload_reg,
7497 					       (enum insn_code) sri.icode))
7498 		{
7499 		  icode = (enum insn_code) sri.icode;
7500 		  third_reload_reg = 0;
7501 		}
7502 	      else
7503 		{
7504 		  oldequiv = old;
7505 		  real_oldequiv = real_old;
7506 		}
7507 	    }
7508 	  else if (sri.icode != CODE_FOR_nothing)
7509 	    /* We currently lack a way to express this in reloads.  */
7510 	    gcc_unreachable ();
7511 	  else
7512 	    {
7513 	      sri2.icode = CODE_FOR_nothing;
7514 	      sri2.prev_sri = &sri;
7515 	      new_t_class
7516 		= (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7517 							     new_class, mode,
7518 							     &sri);
7519 	      if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7520 		{
7521 		  if (reload_adjust_reg_for_temp (&second_reload_reg,
7522 						  third_reload_reg,
7523 						  new_class, mode))
7524 		    {
7525 		      third_reload_reg = 0;
7526 		      tertiary_icode = (enum insn_code) sri2.icode;
7527 		    }
7528 		  else
7529 		    {
7530 		      oldequiv = old;
7531 		      real_oldequiv = real_old;
7532 		    }
7533 		}
7534 	      else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7535 		{
7536 		  rtx intermediate = second_reload_reg;
7537 
7538 		  if (reload_adjust_reg_for_temp (&intermediate, NULL,
7539 						  new_class, mode)
7540 		      && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7541 						      ((enum insn_code)
7542 						       sri2.icode)))
7543 		    {
7544 		      second_reload_reg = intermediate;
7545 		      tertiary_icode = (enum insn_code) sri2.icode;
7546 		    }
7547 		  else
7548 		    {
7549 		      oldequiv = old;
7550 		      real_oldequiv = real_old;
7551 		    }
7552 		}
7553 	      else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7554 		{
7555 		  rtx intermediate = second_reload_reg;
7556 
7557 		  if (reload_adjust_reg_for_temp (&intermediate, NULL,
7558 						  new_class, mode)
7559 		      && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7560 						      new_t_class, mode))
7561 		    {
7562 		      second_reload_reg = intermediate;
7563 		      tertiary_icode = (enum insn_code) sri2.icode;
7564 		    }
7565 		  else
7566 		    {
7567 		      oldequiv = old;
7568 		      real_oldequiv = real_old;
7569 		    }
7570 		}
7571 	      else
7572 		{
7573 		  /* This could be handled more intelligently too.  */
7574 		  oldequiv = old;
7575 		  real_oldequiv = real_old;
7576 		}
7577 	    }
7578 	}
7579 
7580       /* If we still need a secondary reload register, check
7581 	 to see if it is being used as a scratch or intermediate
7582 	 register and generate code appropriately.  If we need
7583 	 a scratch register, use REAL_OLDEQUIV since the form of
7584 	 the insn may depend on the actual address if it is
7585 	 a MEM.  */
7586 
7587       if (second_reload_reg)
7588 	{
7589 	  if (icode != CODE_FOR_nothing)
7590 	    {
7591 	      /* We'd have to add extra code to handle this case.  */
7592 	      gcc_assert (!third_reload_reg);
7593 
7594 	      emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7595 					  second_reload_reg));
7596 	      special = 1;
7597 	    }
7598 	  else
7599 	    {
7600 	      /* See if we need a scratch register to load the
7601 		 intermediate register (a tertiary reload).  */
7602 	      if (tertiary_icode != CODE_FOR_nothing)
7603 		{
7604 		  emit_insn ((GEN_FCN (tertiary_icode)
7605 			      (second_reload_reg, real_oldequiv,
7606 			       third_reload_reg)));
7607 		}
7608 	      else if (third_reload_reg)
7609 		{
7610 		  gen_reload (third_reload_reg, real_oldequiv,
7611 			      rl->opnum,
7612 			      rl->when_needed);
7613 		  gen_reload (second_reload_reg, third_reload_reg,
7614 			      rl->opnum,
7615 			      rl->when_needed);
7616 		}
7617 	      else
7618 		gen_reload (second_reload_reg, real_oldequiv,
7619 			    rl->opnum,
7620 			    rl->when_needed);
7621 
7622 	      oldequiv = second_reload_reg;
7623 	    }
7624 	}
7625     }
7626 
7627   if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7628     {
7629       rtx real_oldequiv = oldequiv;
7630 
7631       if ((REG_P (oldequiv)
7632 	   && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7633 	   && (reg_equiv_memory_loc (REGNO (oldequiv)) != 0
7634 	       || reg_equiv_constant (REGNO (oldequiv)) != 0))
7635 	  || (GET_CODE (oldequiv) == SUBREG
7636 	      && REG_P (SUBREG_REG (oldequiv))
7637 	      && (REGNO (SUBREG_REG (oldequiv))
7638 		  >= FIRST_PSEUDO_REGISTER)
7639 	      && ((reg_equiv_memory_loc (REGNO (SUBREG_REG (oldequiv))) != 0)
7640 		  || (reg_equiv_constant (REGNO (SUBREG_REG (oldequiv))) != 0)))
7641 	  || (CONSTANT_P (oldequiv)
7642 	      && (targetm.preferred_reload_class (oldequiv,
7643 						  REGNO_REG_CLASS (REGNO (reloadreg)))
7644 		  == NO_REGS)))
7645 	real_oldequiv = rl->in;
7646       gen_reload (reloadreg, real_oldequiv, rl->opnum,
7647 		  rl->when_needed);
7648     }
7649 
7650   if (cfun->can_throw_non_call_exceptions)
7651     copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7652 
7653   /* End this sequence.  */
7654   *where = get_insns ();
7655   end_sequence ();
7656 
7657   /* Update reload_override_in so that delete_address_reloads_1
7658      can see the actual register usage.  */
7659   if (oldequiv_reg)
7660     reload_override_in[j] = oldequiv;
7661 }
7662 
7663 /* Generate insns to for the output reload RL, which is for the insn described
7664    by CHAIN and has the number J.  */
7665 static void
7666 emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
7667 			  int j)
7668 {
7669   rtx reloadreg;
7670   rtx_insn *insn = chain->insn;
7671   int special = 0;
7672   rtx old = rl->out;
7673   machine_mode mode;
7674   rtx_insn *p;
7675   rtx rl_reg_rtx;
7676 
7677   if (rl->when_needed == RELOAD_OTHER)
7678     start_sequence ();
7679   else
7680     push_to_sequence (output_reload_insns[rl->opnum]);
7681 
7682   rl_reg_rtx = reload_reg_rtx_for_output[j];
7683   mode = GET_MODE (rl_reg_rtx);
7684 
7685   reloadreg = rl_reg_rtx;
7686 
7687   /* If we need two reload regs, set RELOADREG to the intermediate
7688      one, since it will be stored into OLD.  We might need a secondary
7689      register only for an input reload, so check again here.  */
7690 
7691   if (rl->secondary_out_reload >= 0)
7692     {
7693       rtx real_old = old;
7694       int secondary_reload = rl->secondary_out_reload;
7695       int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7696 
7697       if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7698 	  && reg_equiv_mem (REGNO (old)) != 0)
7699 	real_old = reg_equiv_mem (REGNO (old));
7700 
7701       if (secondary_reload_class (0, rl->rclass, mode, real_old) != NO_REGS)
7702 	{
7703 	  rtx second_reloadreg = reloadreg;
7704 	  reloadreg = rld[secondary_reload].reg_rtx;
7705 
7706 	  /* See if RELOADREG is to be used as a scratch register
7707 	     or as an intermediate register.  */
7708 	  if (rl->secondary_out_icode != CODE_FOR_nothing)
7709 	    {
7710 	      /* We'd have to add extra code to handle this case.  */
7711 	      gcc_assert (tertiary_reload < 0);
7712 
7713 	      emit_insn ((GEN_FCN (rl->secondary_out_icode)
7714 			  (real_old, second_reloadreg, reloadreg)));
7715 	      special = 1;
7716 	    }
7717 	  else
7718 	    {
7719 	      /* See if we need both a scratch and intermediate reload
7720 		 register.  */
7721 
7722 	      enum insn_code tertiary_icode
7723 		= rld[secondary_reload].secondary_out_icode;
7724 
7725 	      /* We'd have to add more code for quartary reloads.  */
7726 	      gcc_assert (tertiary_reload < 0
7727 			  || rld[tertiary_reload].secondary_out_reload < 0);
7728 
7729 	      if (GET_MODE (reloadreg) != mode)
7730 		reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7731 
7732 	      if (tertiary_icode != CODE_FOR_nothing)
7733 		{
7734 		  rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7735 
7736 		  /* Copy primary reload reg to secondary reload reg.
7737 		     (Note that these have been swapped above, then
7738 		     secondary reload reg to OLD using our insn.)  */
7739 
7740 		  /* If REAL_OLD is a paradoxical SUBREG, remove it
7741 		     and try to put the opposite SUBREG on
7742 		     RELOADREG.  */
7743 		  strip_paradoxical_subreg (&real_old, &reloadreg);
7744 
7745 		  gen_reload (reloadreg, second_reloadreg,
7746 			      rl->opnum, rl->when_needed);
7747 		  emit_insn ((GEN_FCN (tertiary_icode)
7748 			      (real_old, reloadreg, third_reloadreg)));
7749 		  special = 1;
7750 		}
7751 
7752 	      else
7753 		{
7754 		  /* Copy between the reload regs here and then to
7755 		     OUT later.  */
7756 
7757 		  gen_reload (reloadreg, second_reloadreg,
7758 			      rl->opnum, rl->when_needed);
7759 		  if (tertiary_reload >= 0)
7760 		    {
7761 		      rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7762 
7763 		      gen_reload (third_reloadreg, reloadreg,
7764 				  rl->opnum, rl->when_needed);
7765 		      reloadreg = third_reloadreg;
7766 		    }
7767 		}
7768 	    }
7769 	}
7770     }
7771 
7772   /* Output the last reload insn.  */
7773   if (! special)
7774     {
7775       rtx set;
7776 
7777       /* Don't output the last reload if OLD is not the dest of
7778 	 INSN and is in the src and is clobbered by INSN.  */
7779       if (! flag_expensive_optimizations
7780 	  || !REG_P (old)
7781 	  || !(set = single_set (insn))
7782 	  || rtx_equal_p (old, SET_DEST (set))
7783 	  || !reg_mentioned_p (old, SET_SRC (set))
7784 	  || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7785 	       && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7786 	gen_reload (old, reloadreg, rl->opnum,
7787 		    rl->when_needed);
7788     }
7789 
7790   /* Look at all insns we emitted, just to be safe.  */
7791   for (p = get_insns (); p; p = NEXT_INSN (p))
7792     if (INSN_P (p))
7793       {
7794 	rtx pat = PATTERN (p);
7795 
7796 	/* If this output reload doesn't come from a spill reg,
7797 	   clear any memory of reloaded copies of the pseudo reg.
7798 	   If this output reload comes from a spill reg,
7799 	   reg_has_output_reload will make this do nothing.  */
7800 	note_stores (pat, forget_old_reloads_1, NULL);
7801 
7802 	if (reg_mentioned_p (rl_reg_rtx, pat))
7803 	  {
7804 	    rtx set = single_set (insn);
7805 	    if (reload_spill_index[j] < 0
7806 		&& set
7807 		&& SET_SRC (set) == rl_reg_rtx)
7808 	      {
7809 		int src = REGNO (SET_SRC (set));
7810 
7811 		reload_spill_index[j] = src;
7812 		SET_HARD_REG_BIT (reg_is_output_reload, src);
7813 		if (find_regno_note (insn, REG_DEAD, src))
7814 		  SET_HARD_REG_BIT (reg_reloaded_died, src);
7815 	      }
7816 	    if (HARD_REGISTER_P (rl_reg_rtx))
7817 	      {
7818 		int s = rl->secondary_out_reload;
7819 		set = single_set (p);
7820 		/* If this reload copies only to the secondary reload
7821 		   register, the secondary reload does the actual
7822 		   store.  */
7823 		if (s >= 0 && set == NULL_RTX)
7824 		  /* We can't tell what function the secondary reload
7825 		     has and where the actual store to the pseudo is
7826 		     made; leave new_spill_reg_store alone.  */
7827 		  ;
7828 		else if (s >= 0
7829 			 && SET_SRC (set) == rl_reg_rtx
7830 			 && SET_DEST (set) == rld[s].reg_rtx)
7831 		  {
7832 		    /* Usually the next instruction will be the
7833 		       secondary reload insn;  if we can confirm
7834 		       that it is, setting new_spill_reg_store to
7835 		       that insn will allow an extra optimization.  */
7836 		    rtx s_reg = rld[s].reg_rtx;
7837 		    rtx_insn *next = NEXT_INSN (p);
7838 		    rld[s].out = rl->out;
7839 		    rld[s].out_reg = rl->out_reg;
7840 		    set = single_set (next);
7841 		    if (set && SET_SRC (set) == s_reg
7842 			&& reload_reg_rtx_reaches_end_p (s_reg, s))
7843 		      {
7844 			SET_HARD_REG_BIT (reg_is_output_reload,
7845 					  REGNO (s_reg));
7846 			new_spill_reg_store[REGNO (s_reg)] = next;
7847 		      }
7848 		  }
7849 		else if (reload_reg_rtx_reaches_end_p (rl_reg_rtx, j))
7850 		  new_spill_reg_store[REGNO (rl_reg_rtx)] = p;
7851 	      }
7852 	  }
7853       }
7854 
7855   if (rl->when_needed == RELOAD_OTHER)
7856     {
7857       emit_insn (other_output_reload_insns[rl->opnum]);
7858       other_output_reload_insns[rl->opnum] = get_insns ();
7859     }
7860   else
7861     output_reload_insns[rl->opnum] = get_insns ();
7862 
7863   if (cfun->can_throw_non_call_exceptions)
7864     copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7865 
7866   end_sequence ();
7867 }
7868 
7869 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7870    and has the number J.  */
7871 static void
7872 do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7873 {
7874   rtx_insn *insn = chain->insn;
7875   rtx old = (rl->in && MEM_P (rl->in)
7876 	     ? rl->in_reg : rl->in);
7877   rtx reg_rtx = rl->reg_rtx;
7878 
7879   if (old && reg_rtx)
7880     {
7881       machine_mode mode;
7882 
7883       /* Determine the mode to reload in.
7884 	 This is very tricky because we have three to choose from.
7885 	 There is the mode the insn operand wants (rl->inmode).
7886 	 There is the mode of the reload register RELOADREG.
7887 	 There is the intrinsic mode of the operand, which we could find
7888 	 by stripping some SUBREGs.
7889 	 It turns out that RELOADREG's mode is irrelevant:
7890 	 we can change that arbitrarily.
7891 
7892 	 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7893 	 then the reload reg may not support QImode moves, so use SImode.
7894 	 If foo is in memory due to spilling a pseudo reg, this is safe,
7895 	 because the QImode value is in the least significant part of a
7896 	 slot big enough for a SImode.  If foo is some other sort of
7897 	 memory reference, then it is impossible to reload this case,
7898 	 so previous passes had better make sure this never happens.
7899 
7900 	 Then consider a one-word union which has SImode and one of its
7901 	 members is a float, being fetched as (SUBREG:SF union:SI).
7902 	 We must fetch that as SFmode because we could be loading into
7903 	 a float-only register.  In this case OLD's mode is correct.
7904 
7905 	 Consider an immediate integer: it has VOIDmode.  Here we need
7906 	 to get a mode from something else.
7907 
7908 	 In some cases, there is a fourth mode, the operand's
7909 	 containing mode.  If the insn specifies a containing mode for
7910 	 this operand, it overrides all others.
7911 
7912 	 I am not sure whether the algorithm here is always right,
7913 	 but it does the right things in those cases.  */
7914 
7915       mode = GET_MODE (old);
7916       if (mode == VOIDmode)
7917 	mode = rl->inmode;
7918 
7919       /* We cannot use gen_lowpart_common since it can do the wrong thing
7920 	 when REG_RTX has a multi-word mode.  Note that REG_RTX must
7921 	 always be a REG here.  */
7922       if (GET_MODE (reg_rtx) != mode)
7923 	reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7924     }
7925   reload_reg_rtx_for_input[j] = reg_rtx;
7926 
7927   if (old != 0
7928       /* AUTO_INC reloads need to be handled even if inherited.  We got an
7929 	 AUTO_INC reload if reload_out is set but reload_out_reg isn't.  */
7930       && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7931       && ! rtx_equal_p (reg_rtx, old)
7932       && reg_rtx != 0)
7933     emit_input_reload_insns (chain, rld + j, old, j);
7934 
7935   /* When inheriting a wider reload, we have a MEM in rl->in,
7936      e.g. inheriting a SImode output reload for
7937      (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10)))  */
7938   if (optimize && reload_inherited[j] && rl->in
7939       && MEM_P (rl->in)
7940       && MEM_P (rl->in_reg)
7941       && reload_spill_index[j] >= 0
7942       && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7943     rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7944 
7945   /* If we are reloading a register that was recently stored in with an
7946      output-reload, see if we can prove there was
7947      actually no need to store the old value in it.  */
7948 
7949   if (optimize
7950       && (reload_inherited[j] || reload_override_in[j])
7951       && reg_rtx
7952       && REG_P (reg_rtx)
7953       && spill_reg_store[REGNO (reg_rtx)] != 0
7954 #if 0
7955       /* There doesn't seem to be any reason to restrict this to pseudos
7956 	 and doing so loses in the case where we are copying from a
7957 	 register of the wrong class.  */
7958       && !HARD_REGISTER_P (spill_reg_stored_to[REGNO (reg_rtx)])
7959 #endif
7960       /* The insn might have already some references to stackslots
7961 	 replaced by MEMs, while reload_out_reg still names the
7962 	 original pseudo.  */
7963       && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (reg_rtx)])
7964 	  || rtx_equal_p (spill_reg_stored_to[REGNO (reg_rtx)], rl->out_reg)))
7965     delete_output_reload (insn, j, REGNO (reg_rtx), reg_rtx);
7966 }
7967 
7968 /* Do output reloading for reload RL, which is for the insn described by
7969    CHAIN and has the number J.
7970    ??? At some point we need to support handling output reloads of
7971    JUMP_INSNs or insns that set cc0.  */
7972 static void
7973 do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
7974 {
7975   rtx note, old;
7976   rtx_insn *insn = chain->insn;
7977   /* If this is an output reload that stores something that is
7978      not loaded in this same reload, see if we can eliminate a previous
7979      store.  */
7980   rtx pseudo = rl->out_reg;
7981   rtx reg_rtx = rl->reg_rtx;
7982 
7983   if (rl->out && reg_rtx)
7984     {
7985       machine_mode mode;
7986 
7987       /* Determine the mode to reload in.
7988 	 See comments above (for input reloading).  */
7989       mode = GET_MODE (rl->out);
7990       if (mode == VOIDmode)
7991 	{
7992 	  /* VOIDmode should never happen for an output.  */
7993 	  if (asm_noperands (PATTERN (insn)) < 0)
7994 	    /* It's the compiler's fault.  */
7995 	    fatal_insn ("VOIDmode on an output", insn);
7996 	  error_for_asm (insn, "output operand is constant in %<asm%>");
7997 	  /* Prevent crash--use something we know is valid.  */
7998 	  mode = word_mode;
7999 	  rl->out = gen_rtx_REG (mode, REGNO (reg_rtx));
8000 	}
8001       if (GET_MODE (reg_rtx) != mode)
8002 	reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
8003     }
8004   reload_reg_rtx_for_output[j] = reg_rtx;
8005 
8006   if (pseudo
8007       && optimize
8008       && REG_P (pseudo)
8009       && ! rtx_equal_p (rl->in_reg, pseudo)
8010       && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
8011       && reg_last_reload_reg[REGNO (pseudo)])
8012     {
8013       int pseudo_no = REGNO (pseudo);
8014       int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
8015 
8016       /* We don't need to test full validity of last_regno for
8017 	 inherit here; we only want to know if the store actually
8018 	 matches the pseudo.  */
8019       if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
8020 	  && reg_reloaded_contents[last_regno] == pseudo_no
8021 	  && spill_reg_store[last_regno]
8022 	  && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
8023 	delete_output_reload (insn, j, last_regno, reg_rtx);
8024     }
8025 
8026   old = rl->out_reg;
8027   if (old == 0
8028       || reg_rtx == 0
8029       || rtx_equal_p (old, reg_rtx))
8030     return;
8031 
8032   /* An output operand that dies right away does need a reload,
8033      but need not be copied from it.  Show the new location in the
8034      REG_UNUSED note.  */
8035   if ((REG_P (old) || GET_CODE (old) == SCRATCH)
8036       && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
8037     {
8038       XEXP (note, 0) = reg_rtx;
8039       return;
8040     }
8041   /* Likewise for a SUBREG of an operand that dies.  */
8042   else if (GET_CODE (old) == SUBREG
8043 	   && REG_P (SUBREG_REG (old))
8044 	   && (note = find_reg_note (insn, REG_UNUSED,
8045 				     SUBREG_REG (old))) != 0)
8046     {
8047       XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx);
8048       return;
8049     }
8050   else if (GET_CODE (old) == SCRATCH)
8051     /* If we aren't optimizing, there won't be a REG_UNUSED note,
8052        but we don't want to make an output reload.  */
8053     return;
8054 
8055   /* If is a JUMP_INSN, we can't support output reloads yet.  */
8056   gcc_assert (NONJUMP_INSN_P (insn));
8057 
8058   emit_output_reload_insns (chain, rld + j, j);
8059 }
8060 
8061 /* A reload copies values of MODE from register SRC to register DEST.
8062    Return true if it can be treated for inheritance purposes like a
8063    group of reloads, each one reloading a single hard register.  The
8064    caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
8065    occupy the same number of hard registers.  */
8066 
8067 static bool
8068 inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
8069 		     int src ATTRIBUTE_UNUSED,
8070 		     machine_mode mode ATTRIBUTE_UNUSED)
8071 {
8072   return (REG_CAN_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
8073 	  && REG_CAN_CHANGE_MODE_P (src, mode, reg_raw_mode[src]));
8074 }
8075 
8076 /* Output insns to reload values in and out of the chosen reload regs.  */
8077 
8078 static void
8079 emit_reload_insns (struct insn_chain *chain)
8080 {
8081   rtx_insn *insn = chain->insn;
8082 
8083   int j;
8084 
8085   CLEAR_HARD_REG_SET (reg_reloaded_died);
8086 
8087   for (j = 0; j < reload_n_operands; j++)
8088     input_reload_insns[j] = input_address_reload_insns[j]
8089       = inpaddr_address_reload_insns[j]
8090       = output_reload_insns[j] = output_address_reload_insns[j]
8091       = outaddr_address_reload_insns[j]
8092       = other_output_reload_insns[j] = 0;
8093   other_input_address_reload_insns = 0;
8094   other_input_reload_insns = 0;
8095   operand_reload_insns = 0;
8096   other_operand_reload_insns = 0;
8097 
8098   /* Dump reloads into the dump file.  */
8099   if (dump_file)
8100     {
8101       fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
8102       debug_reload_to_stream (dump_file);
8103     }
8104 
8105   for (j = 0; j < n_reloads; j++)
8106     if (rld[j].reg_rtx && HARD_REGISTER_P (rld[j].reg_rtx))
8107       {
8108 	unsigned int i;
8109 
8110 	for (i = REGNO (rld[j].reg_rtx); i < END_REGNO (rld[j].reg_rtx); i++)
8111 	  new_spill_reg_store[i] = 0;
8112       }
8113 
8114   /* Now output the instructions to copy the data into and out of the
8115      reload registers.  Do these in the order that the reloads were reported,
8116      since reloads of base and index registers precede reloads of operands
8117      and the operands may need the base and index registers reloaded.  */
8118 
8119   for (j = 0; j < n_reloads; j++)
8120     {
8121       do_input_reload (chain, rld + j, j);
8122       do_output_reload (chain, rld + j, j);
8123     }
8124 
8125   /* Now write all the insns we made for reloads in the order expected by
8126      the allocation functions.  Prior to the insn being reloaded, we write
8127      the following reloads:
8128 
8129      RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
8130 
8131      RELOAD_OTHER reloads.
8132 
8133      For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
8134      by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
8135      RELOAD_FOR_INPUT reload for the operand.
8136 
8137      RELOAD_FOR_OPADDR_ADDRS reloads.
8138 
8139      RELOAD_FOR_OPERAND_ADDRESS reloads.
8140 
8141      After the insn being reloaded, we write the following:
8142 
8143      For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
8144      by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
8145      RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
8146      reloads for the operand.  The RELOAD_OTHER output reloads are
8147      output in descending order by reload number.  */
8148 
8149   emit_insn_before (other_input_address_reload_insns, insn);
8150   emit_insn_before (other_input_reload_insns, insn);
8151 
8152   for (j = 0; j < reload_n_operands; j++)
8153     {
8154       emit_insn_before (inpaddr_address_reload_insns[j], insn);
8155       emit_insn_before (input_address_reload_insns[j], insn);
8156       emit_insn_before (input_reload_insns[j], insn);
8157     }
8158 
8159   emit_insn_before (other_operand_reload_insns, insn);
8160   emit_insn_before (operand_reload_insns, insn);
8161 
8162   for (j = 0; j < reload_n_operands; j++)
8163     {
8164       rtx_insn *x = emit_insn_after (outaddr_address_reload_insns[j], insn);
8165       x = emit_insn_after (output_address_reload_insns[j], x);
8166       x = emit_insn_after (output_reload_insns[j], x);
8167       emit_insn_after (other_output_reload_insns[j], x);
8168     }
8169 
8170   /* For all the spill regs newly reloaded in this instruction,
8171      record what they were reloaded from, so subsequent instructions
8172      can inherit the reloads.
8173 
8174      Update spill_reg_store for the reloads of this insn.
8175      Copy the elements that were updated in the loop above.  */
8176 
8177   for (j = 0; j < n_reloads; j++)
8178     {
8179       int r = reload_order[j];
8180       int i = reload_spill_index[r];
8181 
8182       /* If this is a non-inherited input reload from a pseudo, we must
8183 	 clear any memory of a previous store to the same pseudo.  Only do
8184 	 something if there will not be an output reload for the pseudo
8185 	 being reloaded.  */
8186       if (rld[r].in_reg != 0
8187 	  && ! (reload_inherited[r] || reload_override_in[r]))
8188 	{
8189 	  rtx reg = rld[r].in_reg;
8190 
8191 	  if (GET_CODE (reg) == SUBREG)
8192 	    reg = SUBREG_REG (reg);
8193 
8194 	  if (REG_P (reg)
8195 	      && REGNO (reg) >= FIRST_PSEUDO_REGISTER
8196 	      && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
8197 	    {
8198 	      int nregno = REGNO (reg);
8199 
8200 	      if (reg_last_reload_reg[nregno])
8201 		{
8202 		  int last_regno = REGNO (reg_last_reload_reg[nregno]);
8203 
8204 		  if (reg_reloaded_contents[last_regno] == nregno)
8205 		    spill_reg_store[last_regno] = 0;
8206 		}
8207 	    }
8208 	}
8209 
8210       /* I is nonneg if this reload used a register.
8211 	 If rld[r].reg_rtx is 0, this is an optional reload
8212 	 that we opted to ignore.  */
8213 
8214       if (i >= 0 && rld[r].reg_rtx != 0)
8215 	{
8216 	  int nr = hard_regno_nregs (i, GET_MODE (rld[r].reg_rtx));
8217 	  int k;
8218 
8219 	  /* For a multi register reload, we need to check if all or part
8220 	     of the value lives to the end.  */
8221 	  for (k = 0; k < nr; k++)
8222 	    if (reload_reg_reaches_end_p (i + k, r))
8223 	      CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
8224 
8225 	  /* Maybe the spill reg contains a copy of reload_out.  */
8226 	  if (rld[r].out != 0
8227 	      && (REG_P (rld[r].out)
8228 		  || (rld[r].out_reg
8229 		      ? REG_P (rld[r].out_reg)
8230 		      /* The reload value is an auto-modification of
8231 			 some kind.  For PRE_INC, POST_INC, PRE_DEC
8232 			 and POST_DEC, we record an equivalence
8233 			 between the reload register and the operand
8234 			 on the optimistic assumption that we can make
8235 			 the equivalence hold.  reload_as_needed must
8236 			 then either make it hold or invalidate the
8237 			 equivalence.
8238 
8239 			 PRE_MODIFY and POST_MODIFY addresses are reloaded
8240 			 somewhat differently, and allowing them here leads
8241 			 to problems.  */
8242 		      : (GET_CODE (rld[r].out) != POST_MODIFY
8243 			 && GET_CODE (rld[r].out) != PRE_MODIFY))))
8244 	    {
8245 	      rtx reg;
8246 
8247 	      reg = reload_reg_rtx_for_output[r];
8248 	      if (reload_reg_rtx_reaches_end_p (reg, r))
8249 		{
8250 		  machine_mode mode = GET_MODE (reg);
8251 		  int regno = REGNO (reg);
8252 		  int nregs = REG_NREGS (reg);
8253 		  rtx out = (REG_P (rld[r].out)
8254 			     ? rld[r].out
8255 			     : rld[r].out_reg
8256 			     ? rld[r].out_reg
8257 /* AUTO_INC */		     : XEXP (rld[r].in_reg, 0));
8258 		  int out_regno = REGNO (out);
8259 		  int out_nregs = (!HARD_REGISTER_NUM_P (out_regno) ? 1
8260 				   : hard_regno_nregs (out_regno, mode));
8261 		  bool piecemeal;
8262 
8263 		  spill_reg_store[regno] = new_spill_reg_store[regno];
8264 		  spill_reg_stored_to[regno] = out;
8265 		  reg_last_reload_reg[out_regno] = reg;
8266 
8267 		  piecemeal = (HARD_REGISTER_NUM_P (out_regno)
8268 			       && nregs == out_nregs
8269 			       && inherit_piecemeal_p (out_regno, regno, mode));
8270 
8271 		  /* If OUT_REGNO is a hard register, it may occupy more than
8272 		     one register.  If it does, say what is in the
8273 		     rest of the registers assuming that both registers
8274 		     agree on how many words the object takes.  If not,
8275 		     invalidate the subsequent registers.  */
8276 
8277 		  if (HARD_REGISTER_NUM_P (out_regno))
8278 		    for (k = 1; k < out_nregs; k++)
8279 		      reg_last_reload_reg[out_regno + k]
8280 			= (piecemeal ? regno_reg_rtx[regno + k] : 0);
8281 
8282 		  /* Now do the inverse operation.  */
8283 		  for (k = 0; k < nregs; k++)
8284 		    {
8285 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8286 		      reg_reloaded_contents[regno + k]
8287 			= (!HARD_REGISTER_NUM_P (out_regno) || !piecemeal
8288 			   ? out_regno
8289 			   : out_regno + k);
8290 		      reg_reloaded_insn[regno + k] = insn;
8291 		      SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8292 		      if (targetm.hard_regno_call_part_clobbered (NULL,
8293 								  regno + k,
8294 								  mode))
8295 			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8296 					  regno + k);
8297 		      else
8298 			CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8299 					    regno + k);
8300 		    }
8301 		}
8302 	    }
8303 	  /* Maybe the spill reg contains a copy of reload_in.  Only do
8304 	     something if there will not be an output reload for
8305 	     the register being reloaded.  */
8306 	  else if (rld[r].out_reg == 0
8307 		   && rld[r].in != 0
8308 		   && ((REG_P (rld[r].in)
8309 			&& !HARD_REGISTER_P (rld[r].in)
8310 			&& !REGNO_REG_SET_P (&reg_has_output_reload,
8311 					     REGNO (rld[r].in)))
8312 		       || (REG_P (rld[r].in_reg)
8313 			   && !REGNO_REG_SET_P (&reg_has_output_reload,
8314 						REGNO (rld[r].in_reg))))
8315 		   && !reg_set_p (reload_reg_rtx_for_input[r], PATTERN (insn)))
8316 	    {
8317 	      rtx reg;
8318 
8319 	      reg = reload_reg_rtx_for_input[r];
8320 	      if (reload_reg_rtx_reaches_end_p (reg, r))
8321 		{
8322 		  machine_mode mode;
8323 		  int regno;
8324 		  int nregs;
8325 		  int in_regno;
8326 		  int in_nregs;
8327 		  rtx in;
8328 		  bool piecemeal;
8329 
8330 		  mode = GET_MODE (reg);
8331 		  regno = REGNO (reg);
8332 		  nregs = REG_NREGS (reg);
8333 		  if (REG_P (rld[r].in)
8334 		      && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
8335 		    in = rld[r].in;
8336 		  else if (REG_P (rld[r].in_reg))
8337 		    in = rld[r].in_reg;
8338 		  else
8339 		    in = XEXP (rld[r].in_reg, 0);
8340 		  in_regno = REGNO (in);
8341 
8342 		  in_nregs = (!HARD_REGISTER_NUM_P (in_regno) ? 1
8343 			      : hard_regno_nregs (in_regno, mode));
8344 
8345 		  reg_last_reload_reg[in_regno] = reg;
8346 
8347 		  piecemeal = (HARD_REGISTER_NUM_P (in_regno)
8348 			       && nregs == in_nregs
8349 			       && inherit_piecemeal_p (regno, in_regno, mode));
8350 
8351 		  if (HARD_REGISTER_NUM_P (in_regno))
8352 		    for (k = 1; k < in_nregs; k++)
8353 		      reg_last_reload_reg[in_regno + k]
8354 			= (piecemeal ? regno_reg_rtx[regno + k] : 0);
8355 
8356 		  /* Unless we inherited this reload, show we haven't
8357 		     recently done a store.
8358 		     Previous stores of inherited auto_inc expressions
8359 		     also have to be discarded.  */
8360 		  if (! reload_inherited[r]
8361 		      || (rld[r].out && ! rld[r].out_reg))
8362 		    spill_reg_store[regno] = 0;
8363 
8364 		  for (k = 0; k < nregs; k++)
8365 		    {
8366 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8367 		      reg_reloaded_contents[regno + k]
8368 			= (!HARD_REGISTER_NUM_P (in_regno) || !piecemeal
8369 			   ? in_regno
8370 			   : in_regno + k);
8371 		      reg_reloaded_insn[regno + k] = insn;
8372 		      SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8373 		      if (targetm.hard_regno_call_part_clobbered (NULL,
8374 								  regno + k,
8375 								  mode))
8376 			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8377 					  regno + k);
8378 		      else
8379 			CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8380 					    regno + k);
8381 		    }
8382 		}
8383 	    }
8384 	}
8385 
8386       /* The following if-statement was #if 0'd in 1.34 (or before...).
8387 	 It's reenabled in 1.35 because supposedly nothing else
8388 	 deals with this problem.  */
8389 
8390       /* If a register gets output-reloaded from a non-spill register,
8391 	 that invalidates any previous reloaded copy of it.
8392 	 But forget_old_reloads_1 won't get to see it, because
8393 	 it thinks only about the original insn.  So invalidate it here.
8394 	 Also do the same thing for RELOAD_OTHER constraints where the
8395 	 output is discarded.  */
8396       if (i < 0
8397 	  && ((rld[r].out != 0
8398 	       && (REG_P (rld[r].out)
8399 		   || (MEM_P (rld[r].out)
8400 		       && REG_P (rld[r].out_reg))))
8401 	      || (rld[r].out == 0 && rld[r].out_reg
8402 		  && REG_P (rld[r].out_reg))))
8403 	{
8404 	  rtx out = ((rld[r].out && REG_P (rld[r].out))
8405 		     ? rld[r].out : rld[r].out_reg);
8406 	  int out_regno = REGNO (out);
8407 	  machine_mode mode = GET_MODE (out);
8408 
8409 	  /* REG_RTX is now set or clobbered by the main instruction.
8410 	     As the comment above explains, forget_old_reloads_1 only
8411 	     sees the original instruction, and there is no guarantee
8412 	     that the original instruction also clobbered REG_RTX.
8413 	     For example, if find_reloads sees that the input side of
8414 	     a matched operand pair dies in this instruction, it may
8415 	     use the input register as the reload register.
8416 
8417 	     Calling forget_old_reloads_1 is a waste of effort if
8418 	     REG_RTX is also the output register.
8419 
8420 	     If we know that REG_RTX holds the value of a pseudo
8421 	     register, the code after the call will record that fact.  */
8422 	  if (rld[r].reg_rtx && rld[r].reg_rtx != out)
8423 	    forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
8424 
8425 	  if (!HARD_REGISTER_NUM_P (out_regno))
8426 	    {
8427 	      rtx src_reg;
8428 	      rtx_insn *store_insn = NULL;
8429 
8430 	      reg_last_reload_reg[out_regno] = 0;
8431 
8432 	      /* If we can find a hard register that is stored, record
8433 		 the storing insn so that we may delete this insn with
8434 		 delete_output_reload.  */
8435 	      src_reg = reload_reg_rtx_for_output[r];
8436 
8437 	      if (src_reg)
8438 		{
8439 		  if (reload_reg_rtx_reaches_end_p (src_reg, r))
8440 		    store_insn = new_spill_reg_store[REGNO (src_reg)];
8441 		  else
8442 		    src_reg = NULL_RTX;
8443 		}
8444 	      else
8445 		{
8446 		  /* If this is an optional reload, try to find the
8447 		     source reg from an input reload.  */
8448 		  rtx set = single_set (insn);
8449 		  if (set && SET_DEST (set) == rld[r].out)
8450 		    {
8451 		      int k;
8452 
8453 		      src_reg = SET_SRC (set);
8454 		      store_insn = insn;
8455 		      for (k = 0; k < n_reloads; k++)
8456 			{
8457 			  if (rld[k].in == src_reg)
8458 			    {
8459 			      src_reg = reload_reg_rtx_for_input[k];
8460 			      break;
8461 			    }
8462 			}
8463 		    }
8464 		}
8465 	      if (src_reg && REG_P (src_reg)
8466 		  && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
8467 		{
8468 		  int src_regno, src_nregs, k;
8469 		  rtx note;
8470 
8471 		  gcc_assert (GET_MODE (src_reg) == mode);
8472 		  src_regno = REGNO (src_reg);
8473 		  src_nregs = hard_regno_nregs (src_regno, mode);
8474 		  /* The place where to find a death note varies with
8475 		     PRESERVE_DEATH_INFO_REGNO_P .  The condition is not
8476 		     necessarily checked exactly in the code that moves
8477 		     notes, so just check both locations.  */
8478 		  note = find_regno_note (insn, REG_DEAD, src_regno);
8479 		  if (! note && store_insn)
8480 		    note = find_regno_note (store_insn, REG_DEAD, src_regno);
8481 		  for (k = 0; k < src_nregs; k++)
8482 		    {
8483 		      spill_reg_store[src_regno + k] = store_insn;
8484 		      spill_reg_stored_to[src_regno + k] = out;
8485 		      reg_reloaded_contents[src_regno + k] = out_regno;
8486 		      reg_reloaded_insn[src_regno + k] = store_insn;
8487 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8488 		      SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8489 		      if (targetm.hard_regno_call_part_clobbered
8490 			  (NULL, src_regno + k, mode))
8491 			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8492 					  src_regno + k);
8493 		      else
8494 			CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8495 					    src_regno + k);
8496 		      SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8497 		      if (note)
8498 			SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8499 		      else
8500 			CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8501 		    }
8502 		  reg_last_reload_reg[out_regno] = src_reg;
8503 		  /* We have to set reg_has_output_reload here, or else
8504 		     forget_old_reloads_1 will clear reg_last_reload_reg
8505 		     right away.  */
8506 		  SET_REGNO_REG_SET (&reg_has_output_reload,
8507 				     out_regno);
8508 		}
8509 	    }
8510 	  else
8511 	    {
8512 	      int k, out_nregs = hard_regno_nregs (out_regno, mode);
8513 
8514 	      for (k = 0; k < out_nregs; k++)
8515 		reg_last_reload_reg[out_regno + k] = 0;
8516 	    }
8517 	}
8518     }
8519   IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
8520 }
8521 
8522 /* Go through the motions to emit INSN and test if it is strictly valid.
8523    Return the emitted insn if valid, else return NULL.  */
8524 
8525 static rtx_insn *
8526 emit_insn_if_valid_for_reload (rtx pat)
8527 {
8528   rtx_insn *last = get_last_insn ();
8529   int code;
8530 
8531   rtx_insn *insn = emit_insn (pat);
8532   code = recog_memoized (insn);
8533 
8534   if (code >= 0)
8535     {
8536       extract_insn (insn);
8537       /* We want constrain operands to treat this insn strictly in its
8538 	 validity determination, i.e., the way it would after reload has
8539 	 completed.  */
8540       if (constrain_operands (1, get_enabled_alternatives (insn)))
8541 	return insn;
8542     }
8543 
8544   delete_insns_since (last);
8545   return NULL;
8546 }
8547 
8548 /* Emit code to perform a reload from IN (which may be a reload register) to
8549    OUT (which may also be a reload register).  IN or OUT is from operand
8550    OPNUM with reload type TYPE.
8551 
8552    Returns first insn emitted.  */
8553 
8554 static rtx_insn *
8555 gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
8556 {
8557   rtx_insn *last = get_last_insn ();
8558   rtx_insn *tem;
8559   rtx tem1, tem2;
8560 
8561   /* If IN is a paradoxical SUBREG, remove it and try to put the
8562      opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
8563   if (!strip_paradoxical_subreg (&in, &out))
8564     strip_paradoxical_subreg (&out, &in);
8565 
8566   /* How to do this reload can get quite tricky.  Normally, we are being
8567      asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8568      register that didn't get a hard register.  In that case we can just
8569      call emit_move_insn.
8570 
8571      We can also be asked to reload a PLUS that adds a register or a MEM to
8572      another register, constant or MEM.  This can occur during frame pointer
8573      elimination and while reloading addresses.  This case is handled by
8574      trying to emit a single insn to perform the add.  If it is not valid,
8575      we use a two insn sequence.
8576 
8577      Or we can be asked to reload an unary operand that was a fragment of
8578      an addressing mode, into a register.  If it isn't recognized as-is,
8579      we try making the unop operand and the reload-register the same:
8580      (set reg:X (unop:X expr:Y))
8581      -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8582 
8583      Finally, we could be called to handle an 'o' constraint by putting
8584      an address into a register.  In that case, we first try to do this
8585      with a named pattern of "reload_load_address".  If no such pattern
8586      exists, we just emit a SET insn and hope for the best (it will normally
8587      be valid on machines that use 'o').
8588 
8589      This entire process is made complex because reload will never
8590      process the insns we generate here and so we must ensure that
8591      they will fit their constraints and also by the fact that parts of
8592      IN might be being reloaded separately and replaced with spill registers.
8593      Because of this, we are, in some sense, just guessing the right approach
8594      here.  The one listed above seems to work.
8595 
8596      ??? At some point, this whole thing needs to be rethought.  */
8597 
8598   if (GET_CODE (in) == PLUS
8599       && (REG_P (XEXP (in, 0))
8600 	  || GET_CODE (XEXP (in, 0)) == SUBREG
8601 	  || MEM_P (XEXP (in, 0)))
8602       && (REG_P (XEXP (in, 1))
8603 	  || GET_CODE (XEXP (in, 1)) == SUBREG
8604 	  || CONSTANT_P (XEXP (in, 1))
8605 	  || MEM_P (XEXP (in, 1))))
8606     {
8607       /* We need to compute the sum of a register or a MEM and another
8608 	 register, constant, or MEM, and put it into the reload
8609 	 register.  The best possible way of doing this is if the machine
8610 	 has a three-operand ADD insn that accepts the required operands.
8611 
8612 	 The simplest approach is to try to generate such an insn and see if it
8613 	 is recognized and matches its constraints.  If so, it can be used.
8614 
8615 	 It might be better not to actually emit the insn unless it is valid,
8616 	 but we need to pass the insn as an operand to `recog' and
8617 	 `extract_insn' and it is simpler to emit and then delete the insn if
8618 	 not valid than to dummy things up.  */
8619 
8620       rtx op0, op1, tem;
8621       rtx_insn *insn;
8622       enum insn_code code;
8623 
8624       op0 = find_replacement (&XEXP (in, 0));
8625       op1 = find_replacement (&XEXP (in, 1));
8626 
8627       /* Since constraint checking is strict, commutativity won't be
8628 	 checked, so we need to do that here to avoid spurious failure
8629 	 if the add instruction is two-address and the second operand
8630 	 of the add is the same as the reload reg, which is frequently
8631 	 the case.  If the insn would be A = B + A, rearrange it so
8632 	 it will be A = A + B as constrain_operands expects.  */
8633 
8634       if (REG_P (XEXP (in, 1))
8635 	  && REGNO (out) == REGNO (XEXP (in, 1)))
8636 	tem = op0, op0 = op1, op1 = tem;
8637 
8638       if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8639 	in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8640 
8641       insn = emit_insn_if_valid_for_reload (gen_rtx_SET (out, in));
8642       if (insn)
8643 	return insn;
8644 
8645       /* If that failed, we must use a conservative two-insn sequence.
8646 
8647 	 Use a move to copy one operand into the reload register.  Prefer
8648 	 to reload a constant, MEM or pseudo since the move patterns can
8649 	 handle an arbitrary operand.  If OP1 is not a constant, MEM or
8650 	 pseudo and OP1 is not a valid operand for an add instruction, then
8651 	 reload OP1.
8652 
8653 	 After reloading one of the operands into the reload register, add
8654 	 the reload register to the output register.
8655 
8656 	 If there is another way to do this for a specific machine, a
8657 	 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8658 	 we emit below.  */
8659 
8660       code = optab_handler (add_optab, GET_MODE (out));
8661 
8662       if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8663 	  || (REG_P (op1)
8664 	      && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8665 	  || (code != CODE_FOR_nothing
8666 	      && !insn_operand_matches (code, 2, op1)))
8667 	tem = op0, op0 = op1, op1 = tem;
8668 
8669       gen_reload (out, op0, opnum, type);
8670 
8671       /* If OP0 and OP1 are the same, we can use OUT for OP1.
8672 	 This fixes a problem on the 32K where the stack pointer cannot
8673 	 be used as an operand of an add insn.  */
8674 
8675       if (rtx_equal_p (op0, op1))
8676 	op1 = out;
8677 
8678       insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8679       if (insn)
8680 	{
8681 	  /* Add a REG_EQUIV note so that find_equiv_reg can find it.  */
8682 	  set_dst_reg_note (insn, REG_EQUIV, in, out);
8683 	  return insn;
8684 	}
8685 
8686       /* If that failed, copy the address register to the reload register.
8687 	 Then add the constant to the reload register.  */
8688 
8689       gcc_assert (!reg_overlap_mentioned_p (out, op0));
8690       gen_reload (out, op1, opnum, type);
8691       insn = emit_insn (gen_add2_insn (out, op0));
8692       set_dst_reg_note (insn, REG_EQUIV, in, out);
8693     }
8694 
8695   /* If we need a memory location to do the move, do it that way.  */
8696   else if ((tem1 = replaced_subreg (in), tem2 = replaced_subreg (out),
8697 	    (REG_P (tem1) && REG_P (tem2)))
8698 	   && REGNO (tem1) < FIRST_PSEUDO_REGISTER
8699 	   && REGNO (tem2) < FIRST_PSEUDO_REGISTER
8700 	   && targetm.secondary_memory_needed (GET_MODE (out),
8701 					       REGNO_REG_CLASS (REGNO (tem1)),
8702 					       REGNO_REG_CLASS (REGNO (tem2))))
8703     {
8704       /* Get the memory to use and rewrite both registers to its mode.  */
8705       rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8706 
8707       if (GET_MODE (loc) != GET_MODE (out))
8708 	out = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (out));
8709 
8710       if (GET_MODE (loc) != GET_MODE (in))
8711 	in = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (in));
8712 
8713       gen_reload (loc, in, opnum, type);
8714       gen_reload (out, loc, opnum, type);
8715     }
8716   else if (REG_P (out) && UNARY_P (in))
8717     {
8718       rtx op1;
8719       rtx out_moded;
8720       rtx_insn *set;
8721 
8722       op1 = find_replacement (&XEXP (in, 0));
8723       if (op1 != XEXP (in, 0))
8724 	in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8725 
8726       /* First, try a plain SET.  */
8727       set = emit_insn_if_valid_for_reload (gen_rtx_SET (out, in));
8728       if (set)
8729 	return set;
8730 
8731       /* If that failed, move the inner operand to the reload
8732 	 register, and try the same unop with the inner expression
8733 	 replaced with the reload register.  */
8734 
8735       if (GET_MODE (op1) != GET_MODE (out))
8736 	out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8737       else
8738 	out_moded = out;
8739 
8740       gen_reload (out_moded, op1, opnum, type);
8741 
8742       rtx temp = gen_rtx_SET (out, gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8743 						  out_moded));
8744       rtx_insn *insn = emit_insn_if_valid_for_reload (temp);
8745       if (insn)
8746 	{
8747 	  set_unique_reg_note (insn, REG_EQUIV, in);
8748 	  return insn;
8749 	}
8750 
8751       fatal_insn ("failure trying to reload:", set);
8752     }
8753   /* If IN is a simple operand, use gen_move_insn.  */
8754   else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8755     {
8756       tem = emit_insn (gen_move_insn (out, in));
8757       /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note.  */
8758       mark_jump_label (in, tem, 0);
8759     }
8760 
8761   else if (targetm.have_reload_load_address ())
8762     emit_insn (targetm.gen_reload_load_address (out, in));
8763 
8764   /* Otherwise, just write (set OUT IN) and hope for the best.  */
8765   else
8766     emit_insn (gen_rtx_SET (out, in));
8767 
8768   /* Return the first insn emitted.
8769      We cannot just return get_last_insn, because there may have
8770      been multiple instructions emitted.  Also note that gen_move_insn may
8771      emit more than one insn itself, so we cannot assume that there is one
8772      insn emitted per emit_insn_before call.  */
8773 
8774   return last ? NEXT_INSN (last) : get_insns ();
8775 }
8776 
8777 /* Delete a previously made output-reload whose result we now believe
8778    is not needed.  First we double-check.
8779 
8780    INSN is the insn now being processed.
8781    LAST_RELOAD_REG is the hard register number for which we want to delete
8782    the last output reload.
8783    J is the reload-number that originally used REG.  The caller has made
8784    certain that reload J doesn't use REG any longer for input.
8785    NEW_RELOAD_REG is reload register that reload J is using for REG.  */
8786 
8787 static void
8788 delete_output_reload (rtx_insn *insn, int j, int last_reload_reg,
8789 		      rtx new_reload_reg)
8790 {
8791   rtx_insn *output_reload_insn = spill_reg_store[last_reload_reg];
8792   rtx reg = spill_reg_stored_to[last_reload_reg];
8793   int k;
8794   int n_occurrences;
8795   int n_inherited = 0;
8796   rtx substed;
8797   unsigned regno;
8798   int nregs;
8799 
8800   /* It is possible that this reload has been only used to set another reload
8801      we eliminated earlier and thus deleted this instruction too.  */
8802   if (output_reload_insn->deleted ())
8803     return;
8804 
8805   /* Get the raw pseudo-register referred to.  */
8806 
8807   while (GET_CODE (reg) == SUBREG)
8808     reg = SUBREG_REG (reg);
8809   substed = reg_equiv_memory_loc (REGNO (reg));
8810 
8811   /* This is unsafe if the operand occurs more often in the current
8812      insn than it is inherited.  */
8813   for (k = n_reloads - 1; k >= 0; k--)
8814     {
8815       rtx reg2 = rld[k].in;
8816       if (! reg2)
8817 	continue;
8818       if (MEM_P (reg2) || reload_override_in[k])
8819 	reg2 = rld[k].in_reg;
8820 
8821       if (AUTO_INC_DEC && rld[k].out && ! rld[k].out_reg)
8822 	reg2 = XEXP (rld[k].in_reg, 0);
8823 
8824       while (GET_CODE (reg2) == SUBREG)
8825 	reg2 = SUBREG_REG (reg2);
8826       if (rtx_equal_p (reg2, reg))
8827 	{
8828 	  if (reload_inherited[k] || reload_override_in[k] || k == j)
8829 	    n_inherited++;
8830 	  else
8831 	    return;
8832 	}
8833     }
8834   n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8835   if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8836     n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8837 					reg, 0);
8838   if (substed)
8839     n_occurrences += count_occurrences (PATTERN (insn),
8840 					eliminate_regs (substed, VOIDmode,
8841 							NULL_RTX), 0);
8842   for (rtx i1 = reg_equiv_alt_mem_list (REGNO (reg)); i1; i1 = XEXP (i1, 1))
8843     {
8844       gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8845       n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8846     }
8847   if (n_occurrences > n_inherited)
8848     return;
8849 
8850   regno = REGNO (reg);
8851   nregs = REG_NREGS (reg);
8852 
8853   /* If the pseudo-reg we are reloading is no longer referenced
8854      anywhere between the store into it and here,
8855      and we're within the same basic block, then the value can only
8856      pass through the reload reg and end up here.
8857      Otherwise, give up--return.  */
8858   for (rtx_insn *i1 = NEXT_INSN (output_reload_insn);
8859        i1 != insn; i1 = NEXT_INSN (i1))
8860     {
8861       if (NOTE_INSN_BASIC_BLOCK_P (i1))
8862 	return;
8863       if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8864 	  && refers_to_regno_p (regno, regno + nregs, PATTERN (i1), NULL))
8865 	{
8866 	  /* If this is USE in front of INSN, we only have to check that
8867 	     there are no more references than accounted for by inheritance.  */
8868 	  while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8869 	    {
8870 	      n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8871 	      i1 = NEXT_INSN (i1);
8872 	    }
8873 	  if (n_occurrences <= n_inherited && i1 == insn)
8874 	    break;
8875 	  return;
8876 	}
8877     }
8878 
8879   /* We will be deleting the insn.  Remove the spill reg information.  */
8880   for (k = hard_regno_nregs (last_reload_reg, GET_MODE (reg)); k-- > 0; )
8881     {
8882       spill_reg_store[last_reload_reg + k] = 0;
8883       spill_reg_stored_to[last_reload_reg + k] = 0;
8884     }
8885 
8886   /* The caller has already checked that REG dies or is set in INSN.
8887      It has also checked that we are optimizing, and thus some
8888      inaccuracies in the debugging information are acceptable.
8889      So we could just delete output_reload_insn.  But in some cases
8890      we can improve the debugging information without sacrificing
8891      optimization - maybe even improving the code: See if the pseudo
8892      reg has been completely replaced with reload regs.  If so, delete
8893      the store insn and forget we had a stack slot for the pseudo.  */
8894   if (rld[j].out != rld[j].in
8895       && REG_N_DEATHS (REGNO (reg)) == 1
8896       && REG_N_SETS (REGNO (reg)) == 1
8897       && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8898       && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8899     {
8900       rtx_insn *i2;
8901 
8902       /* We know that it was used only between here and the beginning of
8903 	 the current basic block.  (We also know that the last use before
8904 	 INSN was the output reload we are thinking of deleting, but never
8905 	 mind that.)  Search that range; see if any ref remains.  */
8906       for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8907 	{
8908 	  rtx set = single_set (i2);
8909 
8910 	  /* Uses which just store in the pseudo don't count,
8911 	     since if they are the only uses, they are dead.  */
8912 	  if (set != 0 && SET_DEST (set) == reg)
8913 	    continue;
8914 	  if (LABEL_P (i2) || JUMP_P (i2))
8915 	    break;
8916 	  if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8917 	      && reg_mentioned_p (reg, PATTERN (i2)))
8918 	    {
8919 	      /* Some other ref remains; just delete the output reload we
8920 		 know to be dead.  */
8921 	      delete_address_reloads (output_reload_insn, insn);
8922 	      delete_insn (output_reload_insn);
8923 	      return;
8924 	    }
8925 	}
8926 
8927       /* Delete the now-dead stores into this pseudo.  Note that this
8928 	 loop also takes care of deleting output_reload_insn.  */
8929       for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8930 	{
8931 	  rtx set = single_set (i2);
8932 
8933 	  if (set != 0 && SET_DEST (set) == reg)
8934 	    {
8935 	      delete_address_reloads (i2, insn);
8936 	      delete_insn (i2);
8937 	    }
8938 	  if (LABEL_P (i2) || JUMP_P (i2))
8939 	    break;
8940 	}
8941 
8942       /* For the debugging info, say the pseudo lives in this reload reg.  */
8943       reg_renumber[REGNO (reg)] = REGNO (new_reload_reg);
8944       if (ira_conflicts_p)
8945 	/* Inform IRA about the change.  */
8946 	ira_mark_allocation_change (REGNO (reg));
8947       alter_reg (REGNO (reg), -1, false);
8948     }
8949   else
8950     {
8951       delete_address_reloads (output_reload_insn, insn);
8952       delete_insn (output_reload_insn);
8953     }
8954 }
8955 
8956 /* We are going to delete DEAD_INSN.  Recursively delete loads of
8957    reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8958    CURRENT_INSN is being reloaded, so we have to check its reloads too.  */
8959 static void
8960 delete_address_reloads (rtx_insn *dead_insn, rtx_insn *current_insn)
8961 {
8962   rtx set = single_set (dead_insn);
8963   rtx set2, dst;
8964   rtx_insn *prev, *next;
8965   if (set)
8966     {
8967       rtx dst = SET_DEST (set);
8968       if (MEM_P (dst))
8969 	delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8970     }
8971   /* If we deleted the store from a reloaded post_{in,de}c expression,
8972      we can delete the matching adds.  */
8973   prev = PREV_INSN (dead_insn);
8974   next = NEXT_INSN (dead_insn);
8975   if (! prev || ! next)
8976     return;
8977   set = single_set (next);
8978   set2 = single_set (prev);
8979   if (! set || ! set2
8980       || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8981       || !CONST_INT_P (XEXP (SET_SRC (set), 1))
8982       || !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
8983     return;
8984   dst = SET_DEST (set);
8985   if (! rtx_equal_p (dst, SET_DEST (set2))
8986       || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8987       || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8988       || (INTVAL (XEXP (SET_SRC (set), 1))
8989 	  != -INTVAL (XEXP (SET_SRC (set2), 1))))
8990     return;
8991   delete_related_insns (prev);
8992   delete_related_insns (next);
8993 }
8994 
8995 /* Subfunction of delete_address_reloads: process registers found in X.  */
8996 static void
8997 delete_address_reloads_1 (rtx_insn *dead_insn, rtx x, rtx_insn *current_insn)
8998 {
8999   rtx_insn *prev, *i2;
9000   rtx set, dst;
9001   int i, j;
9002   enum rtx_code code = GET_CODE (x);
9003 
9004   if (code != REG)
9005     {
9006       const char *fmt = GET_RTX_FORMAT (code);
9007       for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9008 	{
9009 	  if (fmt[i] == 'e')
9010 	    delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
9011 	  else if (fmt[i] == 'E')
9012 	    {
9013 	      for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9014 		delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
9015 					  current_insn);
9016 	    }
9017 	}
9018       return;
9019     }
9020 
9021   if (spill_reg_order[REGNO (x)] < 0)
9022     return;
9023 
9024   /* Scan backwards for the insn that sets x.  This might be a way back due
9025      to inheritance.  */
9026   for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
9027     {
9028       code = GET_CODE (prev);
9029       if (code == CODE_LABEL || code == JUMP_INSN)
9030 	return;
9031       if (!INSN_P (prev))
9032 	continue;
9033       if (reg_set_p (x, PATTERN (prev)))
9034 	break;
9035       if (reg_referenced_p (x, PATTERN (prev)))
9036 	return;
9037     }
9038   if (! prev || INSN_UID (prev) < reload_first_uid)
9039     return;
9040   /* Check that PREV only sets the reload register.  */
9041   set = single_set (prev);
9042   if (! set)
9043     return;
9044   dst = SET_DEST (set);
9045   if (!REG_P (dst)
9046       || ! rtx_equal_p (dst, x))
9047     return;
9048   if (! reg_set_p (dst, PATTERN (dead_insn)))
9049     {
9050       /* Check if DST was used in a later insn -
9051 	 it might have been inherited.  */
9052       for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
9053 	{
9054 	  if (LABEL_P (i2))
9055 	    break;
9056 	  if (! INSN_P (i2))
9057 	    continue;
9058 	  if (reg_referenced_p (dst, PATTERN (i2)))
9059 	    {
9060 	      /* If there is a reference to the register in the current insn,
9061 		 it might be loaded in a non-inherited reload.  If no other
9062 		 reload uses it, that means the register is set before
9063 		 referenced.  */
9064 	      if (i2 == current_insn)
9065 		{
9066 		  for (j = n_reloads - 1; j >= 0; j--)
9067 		    if ((rld[j].reg_rtx == dst && reload_inherited[j])
9068 			|| reload_override_in[j] == dst)
9069 		      return;
9070 		  for (j = n_reloads - 1; j >= 0; j--)
9071 		    if (rld[j].in && rld[j].reg_rtx == dst)
9072 		      break;
9073 		  if (j >= 0)
9074 		    break;
9075 		}
9076 	      return;
9077 	    }
9078 	  if (JUMP_P (i2))
9079 	    break;
9080 	  /* If DST is still live at CURRENT_INSN, check if it is used for
9081 	     any reload.  Note that even if CURRENT_INSN sets DST, we still
9082 	     have to check the reloads.  */
9083 	  if (i2 == current_insn)
9084 	    {
9085 	      for (j = n_reloads - 1; j >= 0; j--)
9086 		if ((rld[j].reg_rtx == dst && reload_inherited[j])
9087 		    || reload_override_in[j] == dst)
9088 		  return;
9089 	      /* ??? We can't finish the loop here, because dst might be
9090 		 allocated to a pseudo in this block if no reload in this
9091 		 block needs any of the classes containing DST - see
9092 		 spill_hard_reg.  There is no easy way to tell this, so we
9093 		 have to scan till the end of the basic block.  */
9094 	    }
9095 	  if (reg_set_p (dst, PATTERN (i2)))
9096 	    break;
9097 	}
9098     }
9099   delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
9100   reg_reloaded_contents[REGNO (dst)] = -1;
9101   delete_insn (prev);
9102 }
9103 
9104 /* Output reload-insns to reload VALUE into RELOADREG.
9105    VALUE is an autoincrement or autodecrement RTX whose operand
9106    is a register or memory location;
9107    so reloading involves incrementing that location.
9108    IN is either identical to VALUE, or some cheaper place to reload from.
9109 
9110    INC_AMOUNT is the number to increment or decrement by (always positive).
9111    This cannot be deduced from VALUE.  */
9112 
9113 static void
9114 inc_for_reload (rtx reloadreg, rtx in, rtx value, poly_int64 inc_amount)
9115 {
9116   /* REG or MEM to be copied and incremented.  */
9117   rtx incloc = find_replacement (&XEXP (value, 0));
9118   /* Nonzero if increment after copying.  */
9119   int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
9120 	      || GET_CODE (value) == POST_MODIFY);
9121   rtx_insn *last;
9122   rtx inc;
9123   rtx_insn *add_insn;
9124   int code;
9125   rtx real_in = in == value ? incloc : in;
9126 
9127   /* No hard register is equivalent to this register after
9128      inc/dec operation.  If REG_LAST_RELOAD_REG were nonzero,
9129      we could inc/dec that register as well (maybe even using it for
9130      the source), but I'm not sure it's worth worrying about.  */
9131   if (REG_P (incloc))
9132     reg_last_reload_reg[REGNO (incloc)] = 0;
9133 
9134   if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
9135     {
9136       gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
9137       inc = find_replacement (&XEXP (XEXP (value, 1), 1));
9138     }
9139   else
9140     {
9141       if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
9142 	inc_amount = -inc_amount;
9143 
9144       inc = gen_int_mode (inc_amount, Pmode);
9145     }
9146 
9147   /* If this is post-increment, first copy the location to the reload reg.  */
9148   if (post && real_in != reloadreg)
9149     emit_insn (gen_move_insn (reloadreg, real_in));
9150 
9151   if (in == value)
9152     {
9153       /* See if we can directly increment INCLOC.  Use a method similar to
9154 	 that in gen_reload.  */
9155 
9156       last = get_last_insn ();
9157       add_insn = emit_insn (gen_rtx_SET (incloc,
9158 					 gen_rtx_PLUS (GET_MODE (incloc),
9159 						       incloc, inc)));
9160 
9161       code = recog_memoized (add_insn);
9162       if (code >= 0)
9163 	{
9164 	  extract_insn (add_insn);
9165 	  if (constrain_operands (1, get_enabled_alternatives (add_insn)))
9166 	    {
9167 	      /* If this is a pre-increment and we have incremented the value
9168 		 where it lives, copy the incremented value to RELOADREG to
9169 		 be used as an address.  */
9170 
9171 	      if (! post)
9172 		emit_insn (gen_move_insn (reloadreg, incloc));
9173 	      return;
9174 	    }
9175 	}
9176       delete_insns_since (last);
9177     }
9178 
9179   /* If couldn't do the increment directly, must increment in RELOADREG.
9180      The way we do this depends on whether this is pre- or post-increment.
9181      For pre-increment, copy INCLOC to the reload register, increment it
9182      there, then save back.  */
9183 
9184   if (! post)
9185     {
9186       if (in != reloadreg)
9187 	emit_insn (gen_move_insn (reloadreg, real_in));
9188       emit_insn (gen_add2_insn (reloadreg, inc));
9189       emit_insn (gen_move_insn (incloc, reloadreg));
9190     }
9191   else
9192     {
9193       /* Postincrement.
9194 	 Because this might be a jump insn or a compare, and because RELOADREG
9195 	 may not be available after the insn in an input reload, we must do
9196 	 the incrementation before the insn being reloaded for.
9197 
9198 	 We have already copied IN to RELOADREG.  Increment the copy in
9199 	 RELOADREG, save that back, then decrement RELOADREG so it has
9200 	 the original value.  */
9201 
9202       emit_insn (gen_add2_insn (reloadreg, inc));
9203       emit_insn (gen_move_insn (incloc, reloadreg));
9204       if (CONST_INT_P (inc))
9205 	emit_insn (gen_add2_insn (reloadreg,
9206 				  gen_int_mode (-INTVAL (inc),
9207 						GET_MODE (reloadreg))));
9208       else
9209 	emit_insn (gen_sub2_insn (reloadreg, inc));
9210     }
9211 }
9212 
9213 static void
9214 add_auto_inc_notes (rtx_insn *insn, rtx x)
9215 {
9216   enum rtx_code code = GET_CODE (x);
9217   const char *fmt;
9218   int i, j;
9219 
9220   if (code == MEM && auto_inc_p (XEXP (x, 0)))
9221     {
9222       add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
9223       return;
9224     }
9225 
9226   /* Scan all the operand sub-expressions.  */
9227   fmt = GET_RTX_FORMAT (code);
9228   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9229     {
9230       if (fmt[i] == 'e')
9231 	add_auto_inc_notes (insn, XEXP (x, i));
9232       else if (fmt[i] == 'E')
9233 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9234 	  add_auto_inc_notes (insn, XVECEXP (x, i, j));
9235     }
9236 }
9237