xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/reload1.c (revision cef8759bd76c1b621f8eab8faa6f208faabc2e15)
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2    Copyright (C) 1987-2017 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "predict.h"
28 #include "df.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "optabs.h"
32 #include "regs.h"
33 #include "ira.h"
34 #include "recog.h"
35 
36 #include "rtl-error.h"
37 #include "expr.h"
38 #include "addresses.h"
39 #include "cfgrtl.h"
40 #include "cfgbuild.h"
41 #include "reload.h"
42 #include "except.h"
43 #include "dumpfile.h"
44 #include "rtl-iter.h"
45 
46 /* This file contains the reload pass of the compiler, which is
47    run after register allocation has been done.  It checks that
48    each insn is valid (operands required to be in registers really
49    are in registers of the proper class) and fixes up invalid ones
50    by copying values temporarily into registers for the insns
51    that need them.
52 
53    The results of register allocation are described by the vector
54    reg_renumber; the insns still contain pseudo regs, but reg_renumber
55    can be used to find which hard reg, if any, a pseudo reg is in.
56 
57    The technique we always use is to free up a few hard regs that are
58    called ``reload regs'', and for each place where a pseudo reg
59    must be in a hard reg, copy it temporarily into one of the reload regs.
60 
61    Reload regs are allocated locally for every instruction that needs
62    reloads.  When there are pseudos which are allocated to a register that
63    has been chosen as a reload reg, such pseudos must be ``spilled''.
64    This means that they go to other hard regs, or to stack slots if no other
65    available hard regs can be found.  Spilling can invalidate more
66    insns, requiring additional need for reloads, so we must keep checking
67    until the process stabilizes.
68 
69    For machines with different classes of registers, we must keep track
70    of the register class needed for each reload, and make sure that
71    we allocate enough reload registers of each class.
72 
73    The file reload.c contains the code that checks one insn for
74    validity and reports the reloads that it needs.  This file
75    is in charge of scanning the entire rtl code, accumulating the
76    reload needs, spilling, assigning reload registers to use for
77    fixing up each insn, and generating the new insns to copy values
78    into the reload registers.  */
79 
80 struct target_reload default_target_reload;
81 #if SWITCHABLE_TARGET
82 struct target_reload *this_target_reload = &default_target_reload;
83 #endif
84 
85 #define spill_indirect_levels			\
86   (this_target_reload->x_spill_indirect_levels)
87 
88 /* During reload_as_needed, element N contains a REG rtx for the hard reg
89    into which reg N has been reloaded (perhaps for a previous insn).  */
90 static rtx *reg_last_reload_reg;
91 
92 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
93    for an output reload that stores into reg N.  */
94 static regset_head reg_has_output_reload;
95 
96 /* Indicates which hard regs are reload-registers for an output reload
97    in the current insn.  */
98 static HARD_REG_SET reg_is_output_reload;
99 
100 /* Widest width in which each pseudo reg is referred to (via subreg).  */
101 static unsigned int *reg_max_ref_width;
102 
103 /* Vector to remember old contents of reg_renumber before spilling.  */
104 static short *reg_old_renumber;
105 
106 /* During reload_as_needed, element N contains the last pseudo regno reloaded
107    into hard register N.  If that pseudo reg occupied more than one register,
108    reg_reloaded_contents points to that pseudo for each spill register in
109    use; all of these must remain set for an inheritance to occur.  */
110 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
111 
112 /* During reload_as_needed, element N contains the insn for which
113    hard register N was last used.   Its contents are significant only
114    when reg_reloaded_valid is set for this register.  */
115 static rtx_insn *reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
116 
117 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid.  */
118 static HARD_REG_SET reg_reloaded_valid;
119 /* Indicate if the register was dead at the end of the reload.
120    This is only valid if reg_reloaded_contents is set and valid.  */
121 static HARD_REG_SET reg_reloaded_dead;
122 
123 /* Indicate whether the register's current value is one that is not
124    safe to retain across a call, even for registers that are normally
125    call-saved.  This is only meaningful for members of reg_reloaded_valid.  */
126 static HARD_REG_SET reg_reloaded_call_part_clobbered;
127 
128 /* Number of spill-regs so far; number of valid elements of spill_regs.  */
129 static int n_spills;
130 
131 /* In parallel with spill_regs, contains REG rtx's for those regs.
132    Holds the last rtx used for any given reg, or 0 if it has never
133    been used for spilling yet.  This rtx is reused, provided it has
134    the proper mode.  */
135 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
136 
137 /* In parallel with spill_regs, contains nonzero for a spill reg
138    that was stored after the last time it was used.
139    The precise value is the insn generated to do the store.  */
140 static rtx_insn *spill_reg_store[FIRST_PSEUDO_REGISTER];
141 
142 /* This is the register that was stored with spill_reg_store.  This is a
143    copy of reload_out / reload_out_reg when the value was stored; if
144    reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg.  */
145 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
146 
147 /* This table is the inverse mapping of spill_regs:
148    indexed by hard reg number,
149    it contains the position of that reg in spill_regs,
150    or -1 for something that is not in spill_regs.
151 
152    ?!?  This is no longer accurate.  */
153 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
154 
155 /* This reg set indicates registers that can't be used as spill registers for
156    the currently processed insn.  These are the hard registers which are live
157    during the insn, but not allocated to pseudos, as well as fixed
158    registers.  */
159 static HARD_REG_SET bad_spill_regs;
160 
161 /* These are the hard registers that can't be used as spill register for any
162    insn.  This includes registers used for user variables and registers that
163    we can't eliminate.  A register that appears in this set also can't be used
164    to retry register allocation.  */
165 static HARD_REG_SET bad_spill_regs_global;
166 
167 /* Describes order of use of registers for reloading
168    of spilled pseudo-registers.  `n_spills' is the number of
169    elements that are actually valid; new ones are added at the end.
170 
171    Both spill_regs and spill_reg_order are used on two occasions:
172    once during find_reload_regs, where they keep track of the spill registers
173    for a single insn, but also during reload_as_needed where they show all
174    the registers ever used by reload.  For the latter case, the information
175    is calculated during finish_spills.  */
176 static short spill_regs[FIRST_PSEUDO_REGISTER];
177 
178 /* This vector of reg sets indicates, for each pseudo, which hard registers
179    may not be used for retrying global allocation because the register was
180    formerly spilled from one of them.  If we allowed reallocating a pseudo to
181    a register that it was already allocated to, reload might not
182    terminate.  */
183 static HARD_REG_SET *pseudo_previous_regs;
184 
185 /* This vector of reg sets indicates, for each pseudo, which hard
186    registers may not be used for retrying global allocation because they
187    are used as spill registers during one of the insns in which the
188    pseudo is live.  */
189 static HARD_REG_SET *pseudo_forbidden_regs;
190 
191 /* All hard regs that have been used as spill registers for any insn are
192    marked in this set.  */
193 static HARD_REG_SET used_spill_regs;
194 
195 /* Index of last register assigned as a spill register.  We allocate in
196    a round-robin fashion.  */
197 static int last_spill_reg;
198 
199 /* Record the stack slot for each spilled hard register.  */
200 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
201 
202 /* Width allocated so far for that stack slot.  */
203 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
204 
205 /* Record which pseudos needed to be spilled.  */
206 static regset_head spilled_pseudos;
207 
208 /* Record which pseudos changed their allocation in finish_spills.  */
209 static regset_head changed_allocation_pseudos;
210 
211 /* Used for communication between order_regs_for_reload and count_pseudo.
212    Used to avoid counting one pseudo twice.  */
213 static regset_head pseudos_counted;
214 
215 /* First uid used by insns created by reload in this function.
216    Used in find_equiv_reg.  */
217 int reload_first_uid;
218 
219 /* Flag set by local-alloc or global-alloc if anything is live in
220    a call-clobbered reg across calls.  */
221 int caller_save_needed;
222 
223 /* Set to 1 while reload_as_needed is operating.
224    Required by some machines to handle any generated moves differently.  */
225 int reload_in_progress = 0;
226 
227 /* This obstack is used for allocation of rtl during register elimination.
228    The allocated storage can be freed once find_reloads has processed the
229    insn.  */
230 static struct obstack reload_obstack;
231 
232 /* Points to the beginning of the reload_obstack.  All insn_chain structures
233    are allocated first.  */
234 static char *reload_startobj;
235 
236 /* The point after all insn_chain structures.  Used to quickly deallocate
237    memory allocated in copy_reloads during calculate_needs_all_insns.  */
238 static char *reload_firstobj;
239 
240 /* This points before all local rtl generated by register elimination.
241    Used to quickly free all memory after processing one insn.  */
242 static char *reload_insn_firstobj;
243 
244 /* List of insn_chain instructions, one for every insn that reload needs to
245    examine.  */
246 struct insn_chain *reload_insn_chain;
247 
248 /* TRUE if we potentially left dead insns in the insn stream and want to
249    run DCE immediately after reload, FALSE otherwise.  */
250 static bool need_dce;
251 
252 /* List of all insns needing reloads.  */
253 static struct insn_chain *insns_need_reload;
254 
255 /* This structure is used to record information about register eliminations.
256    Each array entry describes one possible way of eliminating a register
257    in favor of another.   If there is more than one way of eliminating a
258    particular register, the most preferred should be specified first.  */
259 
260 struct elim_table
261 {
262   int from;			/* Register number to be eliminated.  */
263   int to;			/* Register number used as replacement.  */
264   HOST_WIDE_INT initial_offset;	/* Initial difference between values.  */
265   int can_eliminate;		/* Nonzero if this elimination can be done.  */
266   int can_eliminate_previous;	/* Value returned by TARGET_CAN_ELIMINATE
267 				   target hook in previous scan over insns
268 				   made by reload.  */
269   HOST_WIDE_INT offset;		/* Current offset between the two regs.  */
270   HOST_WIDE_INT previous_offset;/* Offset at end of previous insn.  */
271   int ref_outside_mem;		/* "to" has been referenced outside a MEM.  */
272   rtx from_rtx;			/* REG rtx for the register to be eliminated.
273 				   We cannot simply compare the number since
274 				   we might then spuriously replace a hard
275 				   register corresponding to a pseudo
276 				   assigned to the reg to be eliminated.  */
277   rtx to_rtx;			/* REG rtx for the replacement.  */
278 };
279 
280 static struct elim_table *reg_eliminate = 0;
281 
282 /* This is an intermediate structure to initialize the table.  It has
283    exactly the members provided by ELIMINABLE_REGS.  */
284 static const struct elim_table_1
285 {
286   const int from;
287   const int to;
288 } reg_eliminate_1[] =
289 
290   ELIMINABLE_REGS;
291 
292 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
293 
294 /* Record the number of pending eliminations that have an offset not equal
295    to their initial offset.  If nonzero, we use a new copy of each
296    replacement result in any insns encountered.  */
297 int num_not_at_initial_offset;
298 
299 /* Count the number of registers that we may be able to eliminate.  */
300 static int num_eliminable;
301 /* And the number of registers that are equivalent to a constant that
302    can be eliminated to frame_pointer / arg_pointer + constant.  */
303 static int num_eliminable_invariants;
304 
305 /* For each label, we record the offset of each elimination.  If we reach
306    a label by more than one path and an offset differs, we cannot do the
307    elimination.  This information is indexed by the difference of the
308    number of the label and the first label number.  We can't offset the
309    pointer itself as this can cause problems on machines with segmented
310    memory.  The first table is an array of flags that records whether we
311    have yet encountered a label and the second table is an array of arrays,
312    one entry in the latter array for each elimination.  */
313 
314 static int first_label_num;
315 static char *offsets_known_at;
316 static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
317 
318 vec<reg_equivs_t, va_gc> *reg_equivs;
319 
320 /* Stack of addresses where an rtx has been changed.  We can undo the
321    changes by popping items off the stack and restoring the original
322    value at each location.
323 
324    We use this simplistic undo capability rather than copy_rtx as copy_rtx
325    will not make a deep copy of a normally sharable rtx, such as
326    (const (plus (symbol_ref) (const_int))).  If such an expression appears
327    as R1 in gen_reload_chain_without_interm_reg_p, then a shared
328    rtx expression would be changed.  See PR 42431.  */
329 
330 typedef rtx *rtx_p;
331 static vec<rtx_p> substitute_stack;
332 
333 /* Number of labels in the current function.  */
334 
335 static int num_labels;
336 
337 static void replace_pseudos_in (rtx *, machine_mode, rtx);
338 static void maybe_fix_stack_asms (void);
339 static void copy_reloads (struct insn_chain *);
340 static void calculate_needs_all_insns (int);
341 static int find_reg (struct insn_chain *, int);
342 static void find_reload_regs (struct insn_chain *);
343 static void select_reload_regs (void);
344 static void delete_caller_save_insns (void);
345 
346 static void spill_failure (rtx_insn *, enum reg_class);
347 static void count_spilled_pseudo (int, int, int);
348 static void delete_dead_insn (rtx_insn *);
349 static void alter_reg (int, int, bool);
350 static void set_label_offsets (rtx, rtx_insn *, int);
351 static void check_eliminable_occurrences (rtx);
352 static void elimination_effects (rtx, machine_mode);
353 static rtx eliminate_regs_1 (rtx, machine_mode, rtx, bool, bool);
354 static int eliminate_regs_in_insn (rtx_insn *, int);
355 static void update_eliminable_offsets (void);
356 static void mark_not_eliminable (rtx, const_rtx, void *);
357 static void set_initial_elim_offsets (void);
358 static bool verify_initial_elim_offsets (void);
359 static void set_initial_label_offsets (void);
360 static void set_offsets_for_label (rtx_insn *);
361 static void init_eliminable_invariants (rtx_insn *, bool);
362 static void init_elim_table (void);
363 static void free_reg_equiv (void);
364 static void update_eliminables (HARD_REG_SET *);
365 static bool update_eliminables_and_spill (void);
366 static void elimination_costs_in_insn (rtx_insn *);
367 static void spill_hard_reg (unsigned int, int);
368 static int finish_spills (int);
369 static void scan_paradoxical_subregs (rtx);
370 static void count_pseudo (int);
371 static void order_regs_for_reload (struct insn_chain *);
372 static void reload_as_needed (int);
373 static void forget_old_reloads_1 (rtx, const_rtx, void *);
374 static void forget_marked_reloads (regset);
375 static int reload_reg_class_lower (const void *, const void *);
376 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
377 				    machine_mode);
378 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
379 				     machine_mode);
380 static int reload_reg_free_p (unsigned int, int, enum reload_type);
381 static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
382 					rtx, rtx, int, int);
383 static int free_for_value_p (int, machine_mode, int, enum reload_type,
384 			     rtx, rtx, int, int);
385 static int allocate_reload_reg (struct insn_chain *, int, int);
386 static int conflicts_with_override (rtx);
387 static void failed_reload (rtx_insn *, int);
388 static int set_reload_reg (int, int);
389 static void choose_reload_regs_init (struct insn_chain *, rtx *);
390 static void choose_reload_regs (struct insn_chain *);
391 static void emit_input_reload_insns (struct insn_chain *, struct reload *,
392 				     rtx, int);
393 static void emit_output_reload_insns (struct insn_chain *, struct reload *,
394 				      int);
395 static void do_input_reload (struct insn_chain *, struct reload *, int);
396 static void do_output_reload (struct insn_chain *, struct reload *, int);
397 static void emit_reload_insns (struct insn_chain *);
398 static void delete_output_reload (rtx_insn *, int, int, rtx);
399 static void delete_address_reloads (rtx_insn *, rtx_insn *);
400 static void delete_address_reloads_1 (rtx_insn *, rtx, rtx_insn *);
401 static void inc_for_reload (rtx, rtx, rtx, int);
402 static void add_auto_inc_notes (rtx_insn *, rtx);
403 static void substitute (rtx *, const_rtx, rtx);
404 static bool gen_reload_chain_without_interm_reg_p (int, int);
405 static int reloads_conflict (int, int);
406 static rtx_insn *gen_reload (rtx, rtx, int, enum reload_type);
407 static rtx_insn *emit_insn_if_valid_for_reload (rtx);
408 
409 /* Initialize the reload pass.  This is called at the beginning of compilation
410    and may be called again if the target is reinitialized.  */
411 
412 void
413 init_reload (void)
414 {
415   int i;
416 
417   /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
418      Set spill_indirect_levels to the number of levels such addressing is
419      permitted, zero if it is not permitted at all.  */
420 
421   rtx tem
422     = gen_rtx_MEM (Pmode,
423 		   gen_rtx_PLUS (Pmode,
424 				 gen_rtx_REG (Pmode,
425 					      LAST_VIRTUAL_REGISTER + 1),
426 				 gen_int_mode (4, Pmode)));
427   spill_indirect_levels = 0;
428 
429   while (memory_address_p (QImode, tem))
430     {
431       spill_indirect_levels++;
432       tem = gen_rtx_MEM (Pmode, tem);
433     }
434 
435   /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)).  */
436 
437   tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
438   indirect_symref_ok = memory_address_p (QImode, tem);
439 
440   /* See if reg+reg is a valid (and offsettable) address.  */
441 
442   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
443     {
444       tem = gen_rtx_PLUS (Pmode,
445 			  gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
446 			  gen_rtx_REG (Pmode, i));
447 
448       /* This way, we make sure that reg+reg is an offsettable address.  */
449       tem = plus_constant (Pmode, tem, 4);
450 
451       for (int mode = 0; mode < MAX_MACHINE_MODE; mode++)
452 	if (!double_reg_address_ok[mode]
453 	    && memory_address_p ((enum machine_mode)mode, tem))
454 	  double_reg_address_ok[mode] = 1;
455     }
456 
457   /* Initialize obstack for our rtl allocation.  */
458   if (reload_startobj == NULL)
459     {
460       gcc_obstack_init (&reload_obstack);
461       reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
462     }
463 
464   INIT_REG_SET (&spilled_pseudos);
465   INIT_REG_SET (&changed_allocation_pseudos);
466   INIT_REG_SET (&pseudos_counted);
467 }
468 
469 /* List of insn chains that are currently unused.  */
470 static struct insn_chain *unused_insn_chains = 0;
471 
472 /* Allocate an empty insn_chain structure.  */
473 struct insn_chain *
474 new_insn_chain (void)
475 {
476   struct insn_chain *c;
477 
478   if (unused_insn_chains == 0)
479     {
480       c = XOBNEW (&reload_obstack, struct insn_chain);
481       INIT_REG_SET (&c->live_throughout);
482       INIT_REG_SET (&c->dead_or_set);
483     }
484   else
485     {
486       c = unused_insn_chains;
487       unused_insn_chains = c->next;
488     }
489   c->is_caller_save_insn = 0;
490   c->need_operand_change = 0;
491   c->need_reload = 0;
492   c->need_elim = 0;
493   return c;
494 }
495 
496 /* Small utility function to set all regs in hard reg set TO which are
497    allocated to pseudos in regset FROM.  */
498 
499 void
500 compute_use_by_pseudos (HARD_REG_SET *to, regset from)
501 {
502   unsigned int regno;
503   reg_set_iterator rsi;
504 
505   EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
506     {
507       int r = reg_renumber[regno];
508 
509       if (r < 0)
510 	{
511 	  /* reload_combine uses the information from DF_LIVE_IN,
512 	     which might still contain registers that have not
513 	     actually been allocated since they have an
514 	     equivalence.  */
515 	  gcc_assert (ira_conflicts_p || reload_completed);
516 	}
517       else
518 	add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
519     }
520 }
521 
522 /* Replace all pseudos found in LOC with their corresponding
523    equivalences.  */
524 
525 static void
526 replace_pseudos_in (rtx *loc, machine_mode mem_mode, rtx usage)
527 {
528   rtx x = *loc;
529   enum rtx_code code;
530   const char *fmt;
531   int i, j;
532 
533   if (! x)
534     return;
535 
536   code = GET_CODE (x);
537   if (code == REG)
538     {
539       unsigned int regno = REGNO (x);
540 
541       if (regno < FIRST_PSEUDO_REGISTER)
542 	return;
543 
544       x = eliminate_regs_1 (x, mem_mode, usage, true, false);
545       if (x != *loc)
546 	{
547 	  *loc = x;
548 	  replace_pseudos_in (loc, mem_mode, usage);
549 	  return;
550 	}
551 
552       if (reg_equiv_constant (regno))
553 	*loc = reg_equiv_constant (regno);
554       else if (reg_equiv_invariant (regno))
555 	*loc = reg_equiv_invariant (regno);
556       else if (reg_equiv_mem (regno))
557 	*loc = reg_equiv_mem (regno);
558       else if (reg_equiv_address (regno))
559 	*loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address (regno));
560       else
561 	{
562 	  gcc_assert (!REG_P (regno_reg_rtx[regno])
563 		      || REGNO (regno_reg_rtx[regno]) != regno);
564 	  *loc = regno_reg_rtx[regno];
565 	}
566 
567       return;
568     }
569   else if (code == MEM)
570     {
571       replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
572       return;
573     }
574 
575   /* Process each of our operands recursively.  */
576   fmt = GET_RTX_FORMAT (code);
577   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
578     if (*fmt == 'e')
579       replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
580     else if (*fmt == 'E')
581       for (j = 0; j < XVECLEN (x, i); j++)
582 	replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
583 }
584 
585 /* Determine if the current function has an exception receiver block
586    that reaches the exit block via non-exceptional edges  */
587 
588 static bool
589 has_nonexceptional_receiver (void)
590 {
591   edge e;
592   edge_iterator ei;
593   basic_block *tos, *worklist, bb;
594 
595   /* If we're not optimizing, then just err on the safe side.  */
596   if (!optimize)
597     return true;
598 
599   /* First determine which blocks can reach exit via normal paths.  */
600   tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
601 
602   FOR_EACH_BB_FN (bb, cfun)
603     bb->flags &= ~BB_REACHABLE;
604 
605   /* Place the exit block on our worklist.  */
606   EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_REACHABLE;
607   *tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun);
608 
609   /* Iterate: find everything reachable from what we've already seen.  */
610   while (tos != worklist)
611     {
612       bb = *--tos;
613 
614       FOR_EACH_EDGE (e, ei, bb->preds)
615 	if (!(e->flags & EDGE_ABNORMAL))
616 	  {
617 	    basic_block src = e->src;
618 
619 	    if (!(src->flags & BB_REACHABLE))
620 	      {
621 		src->flags |= BB_REACHABLE;
622 		*tos++ = src;
623 	      }
624 	  }
625     }
626   free (worklist);
627 
628   /* Now see if there's a reachable block with an exceptional incoming
629      edge.  */
630   FOR_EACH_BB_FN (bb, cfun)
631     if (bb->flags & BB_REACHABLE && bb_has_abnormal_pred (bb))
632       return true;
633 
634   /* No exceptional block reached exit unexceptionally.  */
635   return false;
636 }
637 
638 /* Grow (or allocate) the REG_EQUIVS array from its current size (which may be
639    zero elements) to MAX_REG_NUM elements.
640 
641    Initialize all new fields to NULL and update REG_EQUIVS_SIZE.  */
642 void
643 grow_reg_equivs (void)
644 {
645   int old_size = vec_safe_length (reg_equivs);
646   int max_regno = max_reg_num ();
647   int i;
648   reg_equivs_t ze;
649 
650   memset (&ze, 0, sizeof (reg_equivs_t));
651   vec_safe_reserve (reg_equivs, max_regno);
652   for (i = old_size; i < max_regno; i++)
653     reg_equivs->quick_insert (i, ze);
654 }
655 
656 
657 /* Global variables used by reload and its subroutines.  */
658 
659 /* The current basic block while in calculate_elim_costs_all_insns.  */
660 static basic_block elim_bb;
661 
662 /* Set during calculate_needs if an insn needs register elimination.  */
663 static int something_needs_elimination;
664 /* Set during calculate_needs if an insn needs an operand changed.  */
665 static int something_needs_operands_changed;
666 /* Set by alter_regs if we spilled a register to the stack.  */
667 static bool something_was_spilled;
668 
669 /* Nonzero means we couldn't get enough spill regs.  */
670 static int failure;
671 
672 /* Temporary array of pseudo-register number.  */
673 static int *temp_pseudo_reg_arr;
674 
675 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
676    If that insn didn't set the register (i.e., it copied the register to
677    memory), just delete that insn instead of the equivalencing insn plus
678    anything now dead.  If we call delete_dead_insn on that insn, we may
679    delete the insn that actually sets the register if the register dies
680    there and that is incorrect.  */
681 static void
682 remove_init_insns ()
683 {
684   for (int i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
685     {
686       if (reg_renumber[i] < 0 && reg_equiv_init (i) != 0)
687 	{
688 	  rtx list;
689 	  for (list = reg_equiv_init (i); list; list = XEXP (list, 1))
690 	    {
691 	      rtx_insn *equiv_insn = as_a <rtx_insn *> (XEXP (list, 0));
692 
693 	      /* If we already deleted the insn or if it may trap, we can't
694 		 delete it.  The latter case shouldn't happen, but can
695 		 if an insn has a variable address, gets a REG_EH_REGION
696 		 note added to it, and then gets converted into a load
697 		 from a constant address.  */
698 	      if (NOTE_P (equiv_insn)
699 		  || can_throw_internal (equiv_insn))
700 		;
701 	      else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
702 		delete_dead_insn (equiv_insn);
703 	      else
704 		SET_INSN_DELETED (equiv_insn);
705 	    }
706 	}
707     }
708 }
709 
710 /* Return true if remove_init_insns will delete INSN.  */
711 static bool
712 will_delete_init_insn_p (rtx_insn *insn)
713 {
714   rtx set = single_set (insn);
715   if (!set || !REG_P (SET_DEST (set)))
716     return false;
717   unsigned regno = REGNO (SET_DEST (set));
718 
719   if (can_throw_internal (insn))
720     return false;
721 
722   if (regno < FIRST_PSEUDO_REGISTER || reg_renumber[regno] >= 0)
723     return false;
724 
725   for (rtx list = reg_equiv_init (regno); list; list = XEXP (list, 1))
726     {
727       rtx equiv_insn = XEXP (list, 0);
728       if (equiv_insn == insn)
729 	return true;
730     }
731   return false;
732 }
733 
734 /* Main entry point for the reload pass.
735 
736    FIRST is the first insn of the function being compiled.
737 
738    GLOBAL nonzero means we were called from global_alloc
739    and should attempt to reallocate any pseudoregs that we
740    displace from hard regs we will use for reloads.
741    If GLOBAL is zero, we do not have enough information to do that,
742    so any pseudo reg that is spilled must go to the stack.
743 
744    Return value is TRUE if reload likely left dead insns in the
745    stream and a DCE pass should be run to elimiante them.  Else the
746    return value is FALSE.  */
747 
748 bool
749 reload (rtx_insn *first, int global)
750 {
751   int i, n;
752   rtx_insn *insn;
753   struct elim_table *ep;
754   basic_block bb;
755   bool inserted;
756 
757   /* Make sure even insns with volatile mem refs are recognizable.  */
758   init_recog ();
759 
760   failure = 0;
761 
762   reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
763 
764   /* Make sure that the last insn in the chain
765      is not something that needs reloading.  */
766   emit_note (NOTE_INSN_DELETED);
767 
768   /* Enable find_equiv_reg to distinguish insns made by reload.  */
769   reload_first_uid = get_max_uid ();
770 
771 #ifdef SECONDARY_MEMORY_NEEDED
772   /* Initialize the secondary memory table.  */
773   clear_secondary_mem ();
774 #endif
775 
776   /* We don't have a stack slot for any spill reg yet.  */
777   memset (spill_stack_slot, 0, sizeof spill_stack_slot);
778   memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
779 
780   /* Initialize the save area information for caller-save, in case some
781      are needed.  */
782   init_save_areas ();
783 
784   /* Compute which hard registers are now in use
785      as homes for pseudo registers.
786      This is done here rather than (eg) in global_alloc
787      because this point is reached even if not optimizing.  */
788   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
789     mark_home_live (i);
790 
791   /* A function that has a nonlocal label that can reach the exit
792      block via non-exceptional paths must save all call-saved
793      registers.  */
794   if (cfun->has_nonlocal_label
795       && has_nonexceptional_receiver ())
796     crtl->saves_all_registers = 1;
797 
798   if (crtl->saves_all_registers)
799     for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
800       if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
801 	df_set_regs_ever_live (i, true);
802 
803   /* Find all the pseudo registers that didn't get hard regs
804      but do have known equivalent constants or memory slots.
805      These include parameters (known equivalent to parameter slots)
806      and cse'd or loop-moved constant memory addresses.
807 
808      Record constant equivalents in reg_equiv_constant
809      so they will be substituted by find_reloads.
810      Record memory equivalents in reg_mem_equiv so they can
811      be substituted eventually by altering the REG-rtx's.  */
812 
813   grow_reg_equivs ();
814   reg_old_renumber = XCNEWVEC (short, max_regno);
815   memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
816   pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
817   pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
818 
819   CLEAR_HARD_REG_SET (bad_spill_regs_global);
820 
821   init_eliminable_invariants (first, true);
822   init_elim_table ();
823 
824   /* Alter each pseudo-reg rtx to contain its hard reg number.  Assign
825      stack slots to the pseudos that lack hard regs or equivalents.
826      Do not touch virtual registers.  */
827 
828   temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
829   for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
830     temp_pseudo_reg_arr[n++] = i;
831 
832   if (ira_conflicts_p)
833     /* Ask IRA to order pseudo-registers for better stack slot
834        sharing.  */
835     ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_width);
836 
837   for (i = 0; i < n; i++)
838     alter_reg (temp_pseudo_reg_arr[i], -1, false);
839 
840   /* If we have some registers we think can be eliminated, scan all insns to
841      see if there is an insn that sets one of these registers to something
842      other than itself plus a constant.  If so, the register cannot be
843      eliminated.  Doing this scan here eliminates an extra pass through the
844      main reload loop in the most common case where register elimination
845      cannot be done.  */
846   for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
847     if (INSN_P (insn))
848       note_stores (PATTERN (insn), mark_not_eliminable, NULL);
849 
850   maybe_fix_stack_asms ();
851 
852   insns_need_reload = 0;
853   something_needs_elimination = 0;
854 
855   /* Initialize to -1, which means take the first spill register.  */
856   last_spill_reg = -1;
857 
858   /* Spill any hard regs that we know we can't eliminate.  */
859   CLEAR_HARD_REG_SET (used_spill_regs);
860   /* There can be multiple ways to eliminate a register;
861      they should be listed adjacently.
862      Elimination for any register fails only if all possible ways fail.  */
863   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
864     {
865       int from = ep->from;
866       int can_eliminate = 0;
867       do
868 	{
869           can_eliminate |= ep->can_eliminate;
870           ep++;
871 	}
872       while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
873       if (! can_eliminate)
874 	spill_hard_reg (from, 1);
875     }
876 
877   if (!HARD_FRAME_POINTER_IS_FRAME_POINTER && frame_pointer_needed)
878     spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
879 
880   finish_spills (global);
881 
882   /* From now on, we may need to generate moves differently.  We may also
883      allow modifications of insns which cause them to not be recognized.
884      Any such modifications will be cleaned up during reload itself.  */
885   reload_in_progress = 1;
886 
887   /* This loop scans the entire function each go-round
888      and repeats until one repetition spills no additional hard regs.  */
889   for (;;)
890     {
891       int something_changed;
892       HOST_WIDE_INT starting_frame_size;
893 
894       starting_frame_size = get_frame_size ();
895       something_was_spilled = false;
896 
897       set_initial_elim_offsets ();
898       set_initial_label_offsets ();
899 
900       /* For each pseudo register that has an equivalent location defined,
901 	 try to eliminate any eliminable registers (such as the frame pointer)
902 	 assuming initial offsets for the replacement register, which
903 	 is the normal case.
904 
905 	 If the resulting location is directly addressable, substitute
906 	 the MEM we just got directly for the old REG.
907 
908 	 If it is not addressable but is a constant or the sum of a hard reg
909 	 and constant, it is probably not addressable because the constant is
910 	 out of range, in that case record the address; we will generate
911 	 hairy code to compute the address in a register each time it is
912 	 needed.  Similarly if it is a hard register, but one that is not
913 	 valid as an address register.
914 
915 	 If the location is not addressable, but does not have one of the
916 	 above forms, assign a stack slot.  We have to do this to avoid the
917 	 potential of producing lots of reloads if, e.g., a location involves
918 	 a pseudo that didn't get a hard register and has an equivalent memory
919 	 location that also involves a pseudo that didn't get a hard register.
920 
921 	 Perhaps at some point we will improve reload_when_needed handling
922 	 so this problem goes away.  But that's very hairy.  */
923 
924       for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
925 	if (reg_renumber[i] < 0 && reg_equiv_memory_loc (i))
926 	  {
927 	    rtx x = eliminate_regs (reg_equiv_memory_loc (i), VOIDmode,
928 				    NULL_RTX);
929 
930 	    if (strict_memory_address_addr_space_p
931 		  (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0),
932 		   MEM_ADDR_SPACE (x)))
933 	      reg_equiv_mem (i) = x, reg_equiv_address (i) = 0;
934 	    else if (CONSTANT_P (XEXP (x, 0))
935 		     || (REG_P (XEXP (x, 0))
936 			 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
937 		     || (GET_CODE (XEXP (x, 0)) == PLUS
938 			 && REG_P (XEXP (XEXP (x, 0), 0))
939 			 && (REGNO (XEXP (XEXP (x, 0), 0))
940 			     < FIRST_PSEUDO_REGISTER)
941 			 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
942 	      reg_equiv_address (i) = XEXP (x, 0), reg_equiv_mem (i) = 0;
943 	    else
944 	      {
945 		/* Make a new stack slot.  Then indicate that something
946 		   changed so we go back and recompute offsets for
947 		   eliminable registers because the allocation of memory
948 		   below might change some offset.  reg_equiv_{mem,address}
949 		   will be set up for this pseudo on the next pass around
950 		   the loop.  */
951 		reg_equiv_memory_loc (i) = 0;
952 		reg_equiv_init (i) = 0;
953 		alter_reg (i, -1, true);
954 	      }
955 	  }
956 
957       if (caller_save_needed)
958 	setup_save_areas ();
959 
960       if (starting_frame_size && crtl->stack_alignment_needed)
961 	{
962 	  /* If we have a stack frame, we must align it now.  The
963 	     stack size may be a part of the offset computation for
964 	     register elimination.  So if this changes the stack size,
965 	     then repeat the elimination bookkeeping.  We don't
966 	     realign when there is no stack, as that will cause a
967 	     stack frame when none is needed should
968 	     STARTING_FRAME_OFFSET not be already aligned to
969 	     STACK_BOUNDARY.  */
970 	  assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
971 	}
972       /* If we allocated another stack slot, redo elimination bookkeeping.  */
973       if (something_was_spilled || starting_frame_size != get_frame_size ())
974 	{
975 	  if (update_eliminables_and_spill ())
976 	    finish_spills (0);
977 	  continue;
978 	}
979 
980       if (caller_save_needed)
981 	{
982 	  save_call_clobbered_regs ();
983 	  /* That might have allocated new insn_chain structures.  */
984 	  reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
985 	}
986 
987       calculate_needs_all_insns (global);
988 
989       if (! ira_conflicts_p)
990 	/* Don't do it for IRA.  We need this info because we don't
991 	   change live_throughout and dead_or_set for chains when IRA
992 	   is used.  */
993 	CLEAR_REG_SET (&spilled_pseudos);
994 
995       something_changed = 0;
996 
997       /* If we allocated any new memory locations, make another pass
998 	 since it might have changed elimination offsets.  */
999       if (something_was_spilled || starting_frame_size != get_frame_size ())
1000 	something_changed = 1;
1001 
1002       /* Even if the frame size remained the same, we might still have
1003 	 changed elimination offsets, e.g. if find_reloads called
1004 	 force_const_mem requiring the back end to allocate a constant
1005 	 pool base register that needs to be saved on the stack.  */
1006       else if (!verify_initial_elim_offsets ())
1007 	something_changed = 1;
1008 
1009       if (update_eliminables_and_spill ())
1010 	{
1011 	  finish_spills (0);
1012 	  something_changed = 1;
1013 	}
1014       else
1015 	{
1016 	  select_reload_regs ();
1017 	  if (failure)
1018 	    goto failed;
1019 	  if (insns_need_reload)
1020 	    something_changed |= finish_spills (global);
1021 	}
1022 
1023       if (! something_changed)
1024 	break;
1025 
1026       if (caller_save_needed)
1027 	delete_caller_save_insns ();
1028 
1029       obstack_free (&reload_obstack, reload_firstobj);
1030     }
1031 
1032   /* If global-alloc was run, notify it of any register eliminations we have
1033      done.  */
1034   if (global)
1035     for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1036       if (ep->can_eliminate)
1037 	mark_elimination (ep->from, ep->to);
1038 
1039   remove_init_insns ();
1040 
1041   /* Use the reload registers where necessary
1042      by generating move instructions to move the must-be-register
1043      values into or out of the reload registers.  */
1044 
1045   if (insns_need_reload != 0 || something_needs_elimination
1046       || something_needs_operands_changed)
1047     {
1048       HOST_WIDE_INT old_frame_size = get_frame_size ();
1049 
1050       reload_as_needed (global);
1051 
1052       gcc_assert (old_frame_size == get_frame_size ());
1053 
1054       gcc_assert (verify_initial_elim_offsets ());
1055     }
1056 
1057   /* If we were able to eliminate the frame pointer, show that it is no
1058      longer live at the start of any basic block.  If it ls live by
1059      virtue of being in a pseudo, that pseudo will be marked live
1060      and hence the frame pointer will be known to be live via that
1061      pseudo.  */
1062 
1063   if (! frame_pointer_needed)
1064     FOR_EACH_BB_FN (bb, cfun)
1065       bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1066 
1067   /* Come here (with failure set nonzero) if we can't get enough spill
1068      regs.  */
1069  failed:
1070 
1071   CLEAR_REG_SET (&changed_allocation_pseudos);
1072   CLEAR_REG_SET (&spilled_pseudos);
1073   reload_in_progress = 0;
1074 
1075   /* Now eliminate all pseudo regs by modifying them into
1076      their equivalent memory references.
1077      The REG-rtx's for the pseudos are modified in place,
1078      so all insns that used to refer to them now refer to memory.
1079 
1080      For a reg that has a reg_equiv_address, all those insns
1081      were changed by reloading so that no insns refer to it any longer;
1082      but the DECL_RTL of a variable decl may refer to it,
1083      and if so this causes the debugging info to mention the variable.  */
1084 
1085   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1086     {
1087       rtx addr = 0;
1088 
1089       if (reg_equiv_mem (i))
1090 	addr = XEXP (reg_equiv_mem (i), 0);
1091 
1092       if (reg_equiv_address (i))
1093 	addr = reg_equiv_address (i);
1094 
1095       if (addr)
1096 	{
1097 	  if (reg_renumber[i] < 0)
1098 	    {
1099 	      rtx reg = regno_reg_rtx[i];
1100 
1101 	      REG_USERVAR_P (reg) = 0;
1102 	      PUT_CODE (reg, MEM);
1103 	      XEXP (reg, 0) = addr;
1104 	      if (reg_equiv_memory_loc (i))
1105 		MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc (i));
1106 	      else
1107 		MEM_ATTRS (reg) = 0;
1108 	      MEM_NOTRAP_P (reg) = 1;
1109 	    }
1110 	  else if (reg_equiv_mem (i))
1111 	    XEXP (reg_equiv_mem (i), 0) = addr;
1112 	}
1113 
1114       /* We don't want complex addressing modes in debug insns
1115 	 if simpler ones will do, so delegitimize equivalences
1116 	 in debug insns.  */
1117       if (MAY_HAVE_DEBUG_INSNS && reg_renumber[i] < 0)
1118 	{
1119 	  rtx reg = regno_reg_rtx[i];
1120 	  rtx equiv = 0;
1121 	  df_ref use, next;
1122 
1123 	  if (reg_equiv_constant (i))
1124 	    equiv = reg_equiv_constant (i);
1125 	  else if (reg_equiv_invariant (i))
1126 	    equiv = reg_equiv_invariant (i);
1127 	  else if (reg && MEM_P (reg))
1128 	    equiv = targetm.delegitimize_address (reg);
1129 	  else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1130 	    equiv = reg;
1131 
1132 	  if (equiv == reg)
1133 	    continue;
1134 
1135 	  for (use = DF_REG_USE_CHAIN (i); use; use = next)
1136 	    {
1137 	      insn = DF_REF_INSN (use);
1138 
1139 	      /* Make sure the next ref is for a different instruction,
1140 		 so that we're not affected by the rescan.  */
1141 	      next = DF_REF_NEXT_REG (use);
1142 	      while (next && DF_REF_INSN (next) == insn)
1143 		next = DF_REF_NEXT_REG (next);
1144 
1145 	      if (DEBUG_INSN_P (insn))
1146 		{
1147 		  if (!equiv)
1148 		    {
1149 		      INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
1150 		      df_insn_rescan_debug_internal (insn);
1151 		    }
1152 		  else
1153 		    INSN_VAR_LOCATION_LOC (insn)
1154 		      = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn),
1155 					      reg, equiv);
1156 		}
1157 	    }
1158 	}
1159     }
1160 
1161   /* We must set reload_completed now since the cleanup_subreg_operands call
1162      below will re-recognize each insn and reload may have generated insns
1163      which are only valid during and after reload.  */
1164   reload_completed = 1;
1165 
1166   /* Make a pass over all the insns and delete all USEs which we inserted
1167      only to tag a REG_EQUAL note on them.  Remove all REG_DEAD and REG_UNUSED
1168      notes.  Delete all CLOBBER insns, except those that refer to the return
1169      value and the special mem:BLK CLOBBERs added to prevent the scheduler
1170      from misarranging variable-array code, and simplify (subreg (reg))
1171      operands.  Strip and regenerate REG_INC notes that may have been moved
1172      around.  */
1173 
1174   for (insn = first; insn; insn = NEXT_INSN (insn))
1175     if (INSN_P (insn))
1176       {
1177 	rtx *pnote;
1178 
1179 	if (CALL_P (insn))
1180 	  replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1181 			      VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1182 
1183 	if ((GET_CODE (PATTERN (insn)) == USE
1184 	     /* We mark with QImode USEs introduced by reload itself.  */
1185 	     && (GET_MODE (insn) == QImode
1186 		 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1187 	    || (GET_CODE (PATTERN (insn)) == CLOBBER
1188 		&& (!MEM_P (XEXP (PATTERN (insn), 0))
1189 		    || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1190 		    || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1191 			&& XEXP (XEXP (PATTERN (insn), 0), 0)
1192 				!= stack_pointer_rtx))
1193 		&& (!REG_P (XEXP (PATTERN (insn), 0))
1194 		    || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1195 	  {
1196 	    delete_insn (insn);
1197 	    continue;
1198 	  }
1199 
1200 	/* Some CLOBBERs may survive until here and still reference unassigned
1201 	   pseudos with const equivalent, which may in turn cause ICE in later
1202 	   passes if the reference remains in place.  */
1203 	if (GET_CODE (PATTERN (insn)) == CLOBBER)
1204 	  replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1205 			      VOIDmode, PATTERN (insn));
1206 
1207 	/* Discard obvious no-ops, even without -O.  This optimization
1208 	   is fast and doesn't interfere with debugging.  */
1209 	if (NONJUMP_INSN_P (insn)
1210 	    && GET_CODE (PATTERN (insn)) == SET
1211 	    && REG_P (SET_SRC (PATTERN (insn)))
1212 	    && REG_P (SET_DEST (PATTERN (insn)))
1213 	    && (REGNO (SET_SRC (PATTERN (insn)))
1214 		== REGNO (SET_DEST (PATTERN (insn)))))
1215 	  {
1216 	    delete_insn (insn);
1217 	    continue;
1218 	  }
1219 
1220 	pnote = &REG_NOTES (insn);
1221 	while (*pnote != 0)
1222 	  {
1223 	    if (REG_NOTE_KIND (*pnote) == REG_DEAD
1224 		|| REG_NOTE_KIND (*pnote) == REG_UNUSED
1225 		|| REG_NOTE_KIND (*pnote) == REG_INC)
1226 	      *pnote = XEXP (*pnote, 1);
1227 	    else
1228 	      pnote = &XEXP (*pnote, 1);
1229 	  }
1230 
1231 	if (AUTO_INC_DEC)
1232 	  add_auto_inc_notes (insn, PATTERN (insn));
1233 
1234 	/* Simplify (subreg (reg)) if it appears as an operand.  */
1235 	cleanup_subreg_operands (insn);
1236 
1237 	/* Clean up invalid ASMs so that they don't confuse later passes.
1238 	   See PR 21299.  */
1239 	if (asm_noperands (PATTERN (insn)) >= 0)
1240 	  {
1241 	    extract_insn (insn);
1242 	    if (!constrain_operands (1, get_enabled_alternatives (insn)))
1243 	      {
1244 		error_for_asm (insn,
1245 			       "%<asm%> operand has impossible constraints");
1246 		delete_insn (insn);
1247 		continue;
1248 	      }
1249 	  }
1250       }
1251 
1252   free (temp_pseudo_reg_arr);
1253 
1254   /* Indicate that we no longer have known memory locations or constants.  */
1255   free_reg_equiv ();
1256 
1257   free (reg_max_ref_width);
1258   free (reg_old_renumber);
1259   free (pseudo_previous_regs);
1260   free (pseudo_forbidden_regs);
1261 
1262   CLEAR_HARD_REG_SET (used_spill_regs);
1263   for (i = 0; i < n_spills; i++)
1264     SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1265 
1266   /* Free all the insn_chain structures at once.  */
1267   obstack_free (&reload_obstack, reload_startobj);
1268   unused_insn_chains = 0;
1269 
1270   inserted = fixup_abnormal_edges ();
1271 
1272   /* We've possibly turned single trapping insn into multiple ones.  */
1273   if (cfun->can_throw_non_call_exceptions)
1274     {
1275       auto_sbitmap blocks (last_basic_block_for_fn (cfun));
1276       bitmap_ones (blocks);
1277       find_many_sub_basic_blocks (blocks);
1278     }
1279 
1280   if (inserted)
1281     commit_edge_insertions ();
1282 
1283   /* Replacing pseudos with their memory equivalents might have
1284      created shared rtx.  Subsequent passes would get confused
1285      by this, so unshare everything here.  */
1286   unshare_all_rtl_again (first);
1287 
1288 #ifdef STACK_BOUNDARY
1289   /* init_emit has set the alignment of the hard frame pointer
1290      to STACK_BOUNDARY.  It is very likely no longer valid if
1291      the hard frame pointer was used for register allocation.  */
1292   if (!frame_pointer_needed)
1293     REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1294 #endif
1295 
1296   substitute_stack.release ();
1297 
1298   gcc_assert (bitmap_empty_p (&spilled_pseudos));
1299 
1300   reload_completed = !failure;
1301 
1302   return need_dce;
1303 }
1304 
1305 /* Yet another special case.  Unfortunately, reg-stack forces people to
1306    write incorrect clobbers in asm statements.  These clobbers must not
1307    cause the register to appear in bad_spill_regs, otherwise we'll call
1308    fatal_insn later.  We clear the corresponding regnos in the live
1309    register sets to avoid this.
1310    The whole thing is rather sick, I'm afraid.  */
1311 
1312 static void
1313 maybe_fix_stack_asms (void)
1314 {
1315 #ifdef STACK_REGS
1316   const char *constraints[MAX_RECOG_OPERANDS];
1317   machine_mode operand_mode[MAX_RECOG_OPERANDS];
1318   struct insn_chain *chain;
1319 
1320   for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1321     {
1322       int i, noperands;
1323       HARD_REG_SET clobbered, allowed;
1324       rtx pat;
1325 
1326       if (! INSN_P (chain->insn)
1327 	  || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1328 	continue;
1329       pat = PATTERN (chain->insn);
1330       if (GET_CODE (pat) != PARALLEL)
1331 	continue;
1332 
1333       CLEAR_HARD_REG_SET (clobbered);
1334       CLEAR_HARD_REG_SET (allowed);
1335 
1336       /* First, make a mask of all stack regs that are clobbered.  */
1337       for (i = 0; i < XVECLEN (pat, 0); i++)
1338 	{
1339 	  rtx t = XVECEXP (pat, 0, i);
1340 	  if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1341 	    SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1342 	}
1343 
1344       /* Get the operand values and constraints out of the insn.  */
1345       decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1346 			   constraints, operand_mode, NULL);
1347 
1348       /* For every operand, see what registers are allowed.  */
1349       for (i = 0; i < noperands; i++)
1350 	{
1351 	  const char *p = constraints[i];
1352 	  /* For every alternative, we compute the class of registers allowed
1353 	     for reloading in CLS, and merge its contents into the reg set
1354 	     ALLOWED.  */
1355 	  int cls = (int) NO_REGS;
1356 
1357 	  for (;;)
1358 	    {
1359 	      char c = *p;
1360 
1361 	      if (c == '\0' || c == ',' || c == '#')
1362 		{
1363 		  /* End of one alternative - mark the regs in the current
1364 		     class, and reset the class.  */
1365 		  IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1366 		  cls = NO_REGS;
1367 		  p++;
1368 		  if (c == '#')
1369 		    do {
1370 		      c = *p++;
1371 		    } while (c != '\0' && c != ',');
1372 		  if (c == '\0')
1373 		    break;
1374 		  continue;
1375 		}
1376 
1377 	      switch (c)
1378 		{
1379 		case 'g':
1380 		  cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1381 		  break;
1382 
1383 		default:
1384 		  enum constraint_num cn = lookup_constraint (p);
1385 		  if (insn_extra_address_constraint (cn))
1386 		    cls = (int) reg_class_subunion[cls]
1387 		      [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1388 					     ADDRESS, SCRATCH)];
1389 		  else
1390 		    cls = (int) reg_class_subunion[cls]
1391 		      [reg_class_for_constraint (cn)];
1392 		  break;
1393 		}
1394 	      p += CONSTRAINT_LEN (c, p);
1395 	    }
1396 	}
1397       /* Those of the registers which are clobbered, but allowed by the
1398 	 constraints, must be usable as reload registers.  So clear them
1399 	 out of the life information.  */
1400       AND_HARD_REG_SET (allowed, clobbered);
1401       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1402 	if (TEST_HARD_REG_BIT (allowed, i))
1403 	  {
1404 	    CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1405 	    CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1406 	  }
1407     }
1408 
1409 #endif
1410 }
1411 
1412 /* Copy the global variables n_reloads and rld into the corresponding elts
1413    of CHAIN.  */
1414 static void
1415 copy_reloads (struct insn_chain *chain)
1416 {
1417   chain->n_reloads = n_reloads;
1418   chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
1419   memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1420   reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1421 }
1422 
1423 /* Walk the chain of insns, and determine for each whether it needs reloads
1424    and/or eliminations.  Build the corresponding insns_need_reload list, and
1425    set something_needs_elimination as appropriate.  */
1426 static void
1427 calculate_needs_all_insns (int global)
1428 {
1429   struct insn_chain **pprev_reload = &insns_need_reload;
1430   struct insn_chain *chain, *next = 0;
1431 
1432   something_needs_elimination = 0;
1433 
1434   reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1435   for (chain = reload_insn_chain; chain != 0; chain = next)
1436     {
1437       rtx_insn *insn = chain->insn;
1438 
1439       next = chain->next;
1440 
1441       /* Clear out the shortcuts.  */
1442       chain->n_reloads = 0;
1443       chain->need_elim = 0;
1444       chain->need_reload = 0;
1445       chain->need_operand_change = 0;
1446 
1447       /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1448 	 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1449 	 what effects this has on the known offsets at labels.  */
1450 
1451       if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1452 	  || (INSN_P (insn) && REG_NOTES (insn) != 0))
1453 	set_label_offsets (insn, insn, 0);
1454 
1455       if (INSN_P (insn))
1456 	{
1457 	  rtx old_body = PATTERN (insn);
1458 	  int old_code = INSN_CODE (insn);
1459 	  rtx old_notes = REG_NOTES (insn);
1460 	  int did_elimination = 0;
1461 	  int operands_changed = 0;
1462 
1463 	  /* Skip insns that only set an equivalence.  */
1464 	  if (will_delete_init_insn_p (insn))
1465 	    continue;
1466 
1467 	  /* If needed, eliminate any eliminable registers.  */
1468 	  if (num_eliminable || num_eliminable_invariants)
1469 	    did_elimination = eliminate_regs_in_insn (insn, 0);
1470 
1471 	  /* Analyze the instruction.  */
1472 	  operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1473 					   global, spill_reg_order);
1474 
1475 	  /* If a no-op set needs more than one reload, this is likely
1476 	     to be something that needs input address reloads.  We
1477 	     can't get rid of this cleanly later, and it is of no use
1478 	     anyway, so discard it now.
1479 	     We only do this when expensive_optimizations is enabled,
1480 	     since this complements reload inheritance / output
1481 	     reload deletion, and it can make debugging harder.  */
1482 	  if (flag_expensive_optimizations && n_reloads > 1)
1483 	    {
1484 	      rtx set = single_set (insn);
1485 	      if (set
1486 		  &&
1487 		  ((SET_SRC (set) == SET_DEST (set)
1488 		    && REG_P (SET_SRC (set))
1489 		    && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1490 		   || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1491 		       && reg_renumber[REGNO (SET_SRC (set))] < 0
1492 		       && reg_renumber[REGNO (SET_DEST (set))] < 0
1493 		       && reg_equiv_memory_loc (REGNO (SET_SRC (set))) != NULL
1494 		       && reg_equiv_memory_loc (REGNO (SET_DEST (set))) != NULL
1495 		       && rtx_equal_p (reg_equiv_memory_loc (REGNO (SET_SRC (set))),
1496 				       reg_equiv_memory_loc (REGNO (SET_DEST (set)))))))
1497 		{
1498 		  if (ira_conflicts_p)
1499 		    /* Inform IRA about the insn deletion.  */
1500 		    ira_mark_memory_move_deletion (REGNO (SET_DEST (set)),
1501 						   REGNO (SET_SRC (set)));
1502 		  delete_insn (insn);
1503 		  /* Delete it from the reload chain.  */
1504 		  if (chain->prev)
1505 		    chain->prev->next = next;
1506 		  else
1507 		    reload_insn_chain = next;
1508 		  if (next)
1509 		    next->prev = chain->prev;
1510 		  chain->next = unused_insn_chains;
1511 		  unused_insn_chains = chain;
1512 		  continue;
1513 		}
1514 	    }
1515 	  if (num_eliminable)
1516 	    update_eliminable_offsets ();
1517 
1518 	  /* Remember for later shortcuts which insns had any reloads or
1519 	     register eliminations.  */
1520 	  chain->need_elim = did_elimination;
1521 	  chain->need_reload = n_reloads > 0;
1522 	  chain->need_operand_change = operands_changed;
1523 
1524 	  /* Discard any register replacements done.  */
1525 	  if (did_elimination)
1526 	    {
1527 	      obstack_free (&reload_obstack, reload_insn_firstobj);
1528 	      PATTERN (insn) = old_body;
1529 	      INSN_CODE (insn) = old_code;
1530 	      REG_NOTES (insn) = old_notes;
1531 	      something_needs_elimination = 1;
1532 	    }
1533 
1534 	  something_needs_operands_changed |= operands_changed;
1535 
1536 	  if (n_reloads != 0)
1537 	    {
1538 	      copy_reloads (chain);
1539 	      *pprev_reload = chain;
1540 	      pprev_reload = &chain->next_need_reload;
1541 	    }
1542 	}
1543     }
1544   *pprev_reload = 0;
1545 }
1546 
1547 /* This function is called from the register allocator to set up estimates
1548    for the cost of eliminating pseudos which have REG_EQUIV equivalences to
1549    an invariant.  The structure is similar to calculate_needs_all_insns.  */
1550 
1551 void
1552 calculate_elim_costs_all_insns (void)
1553 {
1554   int *reg_equiv_init_cost;
1555   basic_block bb;
1556   int i;
1557 
1558   reg_equiv_init_cost = XCNEWVEC (int, max_regno);
1559   init_elim_table ();
1560   init_eliminable_invariants (get_insns (), false);
1561 
1562   set_initial_elim_offsets ();
1563   set_initial_label_offsets ();
1564 
1565   FOR_EACH_BB_FN (bb, cfun)
1566     {
1567       rtx_insn *insn;
1568       elim_bb = bb;
1569 
1570       FOR_BB_INSNS (bb, insn)
1571 	{
1572 	  /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1573 	     include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1574 	     what effects this has on the known offsets at labels.  */
1575 
1576 	  if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1577 	      || (INSN_P (insn) && REG_NOTES (insn) != 0))
1578 	    set_label_offsets (insn, insn, 0);
1579 
1580 	  if (INSN_P (insn))
1581 	    {
1582 	      rtx set = single_set (insn);
1583 
1584 	      /* Skip insns that only set an equivalence.  */
1585 	      if (set && REG_P (SET_DEST (set))
1586 		  && reg_renumber[REGNO (SET_DEST (set))] < 0
1587 		  && (reg_equiv_constant (REGNO (SET_DEST (set)))
1588 		      || reg_equiv_invariant (REGNO (SET_DEST (set)))))
1589 		{
1590 		  unsigned regno = REGNO (SET_DEST (set));
1591 		  rtx_insn_list *init = reg_equiv_init (regno);
1592 		  if (init)
1593 		    {
1594 		      rtx t = eliminate_regs_1 (SET_SRC (set), VOIDmode, insn,
1595 						false, true);
1596 		      machine_mode mode = GET_MODE (SET_DEST (set));
1597 		      int cost = set_src_cost (t, mode,
1598 					       optimize_bb_for_speed_p (bb));
1599 		      int freq = REG_FREQ_FROM_BB (bb);
1600 
1601 		      reg_equiv_init_cost[regno] = cost * freq;
1602 		      continue;
1603 		    }
1604 		}
1605 	      /* If needed, eliminate any eliminable registers.  */
1606 	      if (num_eliminable || num_eliminable_invariants)
1607 		elimination_costs_in_insn (insn);
1608 
1609 	      if (num_eliminable)
1610 		update_eliminable_offsets ();
1611 	    }
1612 	}
1613     }
1614   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1615     {
1616       if (reg_equiv_invariant (i))
1617 	{
1618 	  if (reg_equiv_init (i))
1619 	    {
1620 	      int cost = reg_equiv_init_cost[i];
1621 	      if (dump_file)
1622 		fprintf (dump_file,
1623 			 "Reg %d has equivalence, initial gains %d\n", i, cost);
1624 	      if (cost != 0)
1625 		ira_adjust_equiv_reg_cost (i, cost);
1626 	    }
1627 	  else
1628 	    {
1629 	      if (dump_file)
1630 		fprintf (dump_file,
1631 			 "Reg %d had equivalence, but can't be eliminated\n",
1632 			 i);
1633 	      ira_adjust_equiv_reg_cost (i, 0);
1634 	    }
1635 	}
1636     }
1637 
1638   free (reg_equiv_init_cost);
1639   free (offsets_known_at);
1640   free (offsets_at);
1641   offsets_at = NULL;
1642   offsets_known_at = NULL;
1643 }
1644 
1645 /* Comparison function for qsort to decide which of two reloads
1646    should be handled first.  *P1 and *P2 are the reload numbers.  */
1647 
1648 static int
1649 reload_reg_class_lower (const void *r1p, const void *r2p)
1650 {
1651   int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1652   int t;
1653 
1654   /* Consider required reloads before optional ones.  */
1655   t = rld[r1].optional - rld[r2].optional;
1656   if (t != 0)
1657     return t;
1658 
1659   /* Count all solitary classes before non-solitary ones.  */
1660   t = ((reg_class_size[(int) rld[r2].rclass] == 1)
1661        - (reg_class_size[(int) rld[r1].rclass] == 1));
1662   if (t != 0)
1663     return t;
1664 
1665   /* Aside from solitaires, consider all multi-reg groups first.  */
1666   t = rld[r2].nregs - rld[r1].nregs;
1667   if (t != 0)
1668     return t;
1669 
1670   /* Consider reloads in order of increasing reg-class number.  */
1671   t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1672   if (t != 0)
1673     return t;
1674 
1675   /* If reloads are equally urgent, sort by reload number,
1676      so that the results of qsort leave nothing to chance.  */
1677   return r1 - r2;
1678 }
1679 
1680 /* The cost of spilling each hard reg.  */
1681 static int spill_cost[FIRST_PSEUDO_REGISTER];
1682 
1683 /* When spilling multiple hard registers, we use SPILL_COST for the first
1684    spilled hard reg and SPILL_ADD_COST for subsequent regs.  SPILL_ADD_COST
1685    only the first hard reg for a multi-reg pseudo.  */
1686 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1687 
1688 /* Map of hard regno to pseudo regno currently occupying the hard
1689    reg.  */
1690 static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1691 
1692 /* Update the spill cost arrays, considering that pseudo REG is live.  */
1693 
1694 static void
1695 count_pseudo (int reg)
1696 {
1697   int freq = REG_FREQ (reg);
1698   int r = reg_renumber[reg];
1699   int nregs;
1700 
1701   /* Ignore spilled pseudo-registers which can be here only if IRA is used.  */
1702   if (ira_conflicts_p && r < 0)
1703     return;
1704 
1705   if (REGNO_REG_SET_P (&pseudos_counted, reg)
1706       || REGNO_REG_SET_P (&spilled_pseudos, reg))
1707     return;
1708 
1709   SET_REGNO_REG_SET (&pseudos_counted, reg);
1710 
1711   gcc_assert (r >= 0);
1712 
1713   spill_add_cost[r] += freq;
1714   nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1715   while (nregs-- > 0)
1716     {
1717       hard_regno_to_pseudo_regno[r + nregs] = reg;
1718       spill_cost[r + nregs] += freq;
1719     }
1720 }
1721 
1722 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1723    contents of BAD_SPILL_REGS for the insn described by CHAIN.  */
1724 
1725 static void
1726 order_regs_for_reload (struct insn_chain *chain)
1727 {
1728   unsigned i;
1729   HARD_REG_SET used_by_pseudos;
1730   HARD_REG_SET used_by_pseudos2;
1731   reg_set_iterator rsi;
1732 
1733   COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1734 
1735   memset (spill_cost, 0, sizeof spill_cost);
1736   memset (spill_add_cost, 0, sizeof spill_add_cost);
1737   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1738     hard_regno_to_pseudo_regno[i] = -1;
1739 
1740   /* Count number of uses of each hard reg by pseudo regs allocated to it
1741      and then order them by decreasing use.  First exclude hard registers
1742      that are live in or across this insn.  */
1743 
1744   REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1745   REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1746   IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1747   IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1748 
1749   /* Now find out which pseudos are allocated to it, and update
1750      hard_reg_n_uses.  */
1751   CLEAR_REG_SET (&pseudos_counted);
1752 
1753   EXECUTE_IF_SET_IN_REG_SET
1754     (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1755     {
1756       count_pseudo (i);
1757     }
1758   EXECUTE_IF_SET_IN_REG_SET
1759     (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1760     {
1761       count_pseudo (i);
1762     }
1763   CLEAR_REG_SET (&pseudos_counted);
1764 }
1765 
1766 /* Vector of reload-numbers showing the order in which the reloads should
1767    be processed.  */
1768 static short reload_order[MAX_RELOADS];
1769 
1770 /* This is used to keep track of the spill regs used in one insn.  */
1771 static HARD_REG_SET used_spill_regs_local;
1772 
1773 /* We decided to spill hard register SPILLED, which has a size of
1774    SPILLED_NREGS.  Determine how pseudo REG, which is live during the insn,
1775    is affected.  We will add it to SPILLED_PSEUDOS if necessary, and we will
1776    update SPILL_COST/SPILL_ADD_COST.  */
1777 
1778 static void
1779 count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1780 {
1781   int freq = REG_FREQ (reg);
1782   int r = reg_renumber[reg];
1783   int nregs;
1784 
1785   /* Ignore spilled pseudo-registers which can be here only if IRA is used.  */
1786   if (ira_conflicts_p && r < 0)
1787     return;
1788 
1789   gcc_assert (r >= 0);
1790 
1791   nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1792 
1793   if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1794       || spilled + spilled_nregs <= r || r + nregs <= spilled)
1795     return;
1796 
1797   SET_REGNO_REG_SET (&spilled_pseudos, reg);
1798 
1799   spill_add_cost[r] -= freq;
1800   while (nregs-- > 0)
1801     {
1802       hard_regno_to_pseudo_regno[r + nregs] = -1;
1803       spill_cost[r + nregs] -= freq;
1804     }
1805 }
1806 
1807 /* Find reload register to use for reload number ORDER.  */
1808 
1809 static int
1810 find_reg (struct insn_chain *chain, int order)
1811 {
1812   int rnum = reload_order[order];
1813   struct reload *rl = rld + rnum;
1814   int best_cost = INT_MAX;
1815   int best_reg = -1;
1816   unsigned int i, j, n;
1817   int k;
1818   HARD_REG_SET not_usable;
1819   HARD_REG_SET used_by_other_reload;
1820   reg_set_iterator rsi;
1821   static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1822   static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1823 
1824   COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1825   IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1826   IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->rclass]);
1827 
1828   CLEAR_HARD_REG_SET (used_by_other_reload);
1829   for (k = 0; k < order; k++)
1830     {
1831       int other = reload_order[k];
1832 
1833       if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1834 	for (j = 0; j < rld[other].nregs; j++)
1835 	  SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1836     }
1837 
1838   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1839     {
1840 #ifdef REG_ALLOC_ORDER
1841       unsigned int regno = reg_alloc_order[i];
1842 #else
1843       unsigned int regno = i;
1844 #endif
1845 
1846       if (! TEST_HARD_REG_BIT (not_usable, regno)
1847 	  && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1848 	  && HARD_REGNO_MODE_OK (regno, rl->mode))
1849 	{
1850 	  int this_cost = spill_cost[regno];
1851 	  int ok = 1;
1852 	  unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1853 
1854 	  for (j = 1; j < this_nregs; j++)
1855 	    {
1856 	      this_cost += spill_add_cost[regno + j];
1857 	      if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1858 		  || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1859 		ok = 0;
1860 	    }
1861 	  if (! ok)
1862 	    continue;
1863 
1864 	  if (ira_conflicts_p)
1865 	    {
1866 	      /* Ask IRA to find a better pseudo-register for
1867 		 spilling.  */
1868 	      for (n = j = 0; j < this_nregs; j++)
1869 		{
1870 		  int r = hard_regno_to_pseudo_regno[regno + j];
1871 
1872 		  if (r < 0)
1873 		    continue;
1874 		  if (n == 0 || regno_pseudo_regs[n - 1] != r)
1875 		    regno_pseudo_regs[n++] = r;
1876 		}
1877 	      regno_pseudo_regs[n++] = -1;
1878 	      if (best_reg < 0
1879 		  || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1880 						      best_regno_pseudo_regs,
1881 						      rl->in, rl->out,
1882 						      chain->insn))
1883 		{
1884 		  best_reg = regno;
1885 		  for (j = 0;; j++)
1886 		    {
1887 		      best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1888 		      if (regno_pseudo_regs[j] < 0)
1889 			break;
1890 		    }
1891 		}
1892 	      continue;
1893 	    }
1894 
1895 	  if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1896 	    this_cost--;
1897 	  if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1898 	    this_cost--;
1899 	  if (this_cost < best_cost
1900 	      /* Among registers with equal cost, prefer caller-saved ones, or
1901 		 use REG_ALLOC_ORDER if it is defined.  */
1902 	      || (this_cost == best_cost
1903 #ifdef REG_ALLOC_ORDER
1904 		  && (inv_reg_alloc_order[regno]
1905 		      < inv_reg_alloc_order[best_reg])
1906 #else
1907 		  && call_used_regs[regno]
1908 		  && ! call_used_regs[best_reg]
1909 #endif
1910 		  ))
1911 	    {
1912 	      best_reg = regno;
1913 	      best_cost = this_cost;
1914 	    }
1915 	}
1916     }
1917   if (best_reg == -1)
1918     return 0;
1919 
1920   if (dump_file)
1921     fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1922 
1923   rl->nregs = hard_regno_nregs[best_reg][rl->mode];
1924   rl->regno = best_reg;
1925 
1926   EXECUTE_IF_SET_IN_REG_SET
1927     (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1928     {
1929       count_spilled_pseudo (best_reg, rl->nregs, j);
1930     }
1931 
1932   EXECUTE_IF_SET_IN_REG_SET
1933     (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1934     {
1935       count_spilled_pseudo (best_reg, rl->nregs, j);
1936     }
1937 
1938   for (i = 0; i < rl->nregs; i++)
1939     {
1940       gcc_assert (spill_cost[best_reg + i] == 0);
1941       gcc_assert (spill_add_cost[best_reg + i] == 0);
1942       gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
1943       SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1944     }
1945   return 1;
1946 }
1947 
1948 /* Find more reload regs to satisfy the remaining need of an insn, which
1949    is given by CHAIN.
1950    Do it by ascending class number, since otherwise a reg
1951    might be spilled for a big class and might fail to count
1952    for a smaller class even though it belongs to that class.  */
1953 
1954 static void
1955 find_reload_regs (struct insn_chain *chain)
1956 {
1957   int i;
1958 
1959   /* In order to be certain of getting the registers we need,
1960      we must sort the reloads into order of increasing register class.
1961      Then our grabbing of reload registers will parallel the process
1962      that provided the reload registers.  */
1963   for (i = 0; i < chain->n_reloads; i++)
1964     {
1965       /* Show whether this reload already has a hard reg.  */
1966       if (chain->rld[i].reg_rtx)
1967 	{
1968 	  int regno = REGNO (chain->rld[i].reg_rtx);
1969 	  chain->rld[i].regno = regno;
1970 	  chain->rld[i].nregs
1971 	    = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
1972 	}
1973       else
1974 	chain->rld[i].regno = -1;
1975       reload_order[i] = i;
1976     }
1977 
1978   n_reloads = chain->n_reloads;
1979   memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
1980 
1981   CLEAR_HARD_REG_SET (used_spill_regs_local);
1982 
1983   if (dump_file)
1984     fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1985 
1986   qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
1987 
1988   /* Compute the order of preference for hard registers to spill.  */
1989 
1990   order_regs_for_reload (chain);
1991 
1992   for (i = 0; i < n_reloads; i++)
1993     {
1994       int r = reload_order[i];
1995 
1996       /* Ignore reloads that got marked inoperative.  */
1997       if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
1998 	  && ! rld[r].optional
1999 	  && rld[r].regno == -1)
2000 	if (! find_reg (chain, i))
2001 	  {
2002 	    if (dump_file)
2003 	      fprintf (dump_file, "reload failure for reload %d\n", r);
2004 	    spill_failure (chain->insn, rld[r].rclass);
2005 	    failure = 1;
2006 	    return;
2007 	  }
2008     }
2009 
2010   COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
2011   IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
2012 
2013   memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2014 }
2015 
2016 static void
2017 select_reload_regs (void)
2018 {
2019   struct insn_chain *chain;
2020 
2021   /* Try to satisfy the needs for each insn.  */
2022   for (chain = insns_need_reload; chain != 0;
2023        chain = chain->next_need_reload)
2024     find_reload_regs (chain);
2025 }
2026 
2027 /* Delete all insns that were inserted by emit_caller_save_insns during
2028    this iteration.  */
2029 static void
2030 delete_caller_save_insns (void)
2031 {
2032   struct insn_chain *c = reload_insn_chain;
2033 
2034   while (c != 0)
2035     {
2036       while (c != 0 && c->is_caller_save_insn)
2037 	{
2038 	  struct insn_chain *next = c->next;
2039 	  rtx_insn *insn = c->insn;
2040 
2041 	  if (c == reload_insn_chain)
2042 	    reload_insn_chain = next;
2043 	  delete_insn (insn);
2044 
2045 	  if (next)
2046 	    next->prev = c->prev;
2047 	  if (c->prev)
2048 	    c->prev->next = next;
2049 	  c->next = unused_insn_chains;
2050 	  unused_insn_chains = c;
2051 	  c = next;
2052 	}
2053       if (c != 0)
2054 	c = c->next;
2055     }
2056 }
2057 
2058 /* Handle the failure to find a register to spill.
2059    INSN should be one of the insns which needed this particular spill reg.  */
2060 
2061 static void
2062 spill_failure (rtx_insn *insn, enum reg_class rclass)
2063 {
2064   if (asm_noperands (PATTERN (insn)) >= 0)
2065     error_for_asm (insn, "can%'t find a register in class %qs while "
2066 		   "reloading %<asm%>",
2067 		   reg_class_names[rclass]);
2068   else
2069     {
2070       error ("unable to find a register to spill in class %qs",
2071 	     reg_class_names[rclass]);
2072 
2073       if (dump_file)
2074 	{
2075 	  fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2076 	  debug_reload_to_stream (dump_file);
2077 	}
2078       fatal_insn ("this is the insn:", insn);
2079     }
2080 }
2081 
2082 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2083    data that is dead in INSN.  */
2084 
2085 static void
2086 delete_dead_insn (rtx_insn *insn)
2087 {
2088   rtx_insn *prev = prev_active_insn (insn);
2089   rtx prev_dest;
2090 
2091   /* If the previous insn sets a register that dies in our insn make
2092      a note that we want to run DCE immediately after reload.
2093 
2094      We used to delete the previous insn & recurse, but that's wrong for
2095      block local equivalences.  Instead of trying to figure out the exact
2096      circumstances where we can delete the potentially dead insns, just
2097      let DCE do the job.  */
2098   if (prev && BLOCK_FOR_INSN (prev) == BLOCK_FOR_INSN (insn)
2099       && GET_CODE (PATTERN (prev)) == SET
2100       && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2101       && reg_mentioned_p (prev_dest, PATTERN (insn))
2102       && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2103       && ! side_effects_p (SET_SRC (PATTERN (prev))))
2104     need_dce = 1;
2105 
2106   SET_INSN_DELETED (insn);
2107 }
2108 
2109 /* Modify the home of pseudo-reg I.
2110    The new home is present in reg_renumber[I].
2111 
2112    FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2113    or it may be -1, meaning there is none or it is not relevant.
2114    This is used so that all pseudos spilled from a given hard reg
2115    can share one stack slot.  */
2116 
2117 static void
2118 alter_reg (int i, int from_reg, bool dont_share_p)
2119 {
2120   /* When outputting an inline function, this can happen
2121      for a reg that isn't actually used.  */
2122   if (regno_reg_rtx[i] == 0)
2123     return;
2124 
2125   /* If the reg got changed to a MEM at rtl-generation time,
2126      ignore it.  */
2127   if (!REG_P (regno_reg_rtx[i]))
2128     return;
2129 
2130   /* Modify the reg-rtx to contain the new hard reg
2131      number or else to contain its pseudo reg number.  */
2132   SET_REGNO (regno_reg_rtx[i],
2133 	     reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2134 
2135   /* If we have a pseudo that is needed but has no hard reg or equivalent,
2136      allocate a stack slot for it.  */
2137 
2138   if (reg_renumber[i] < 0
2139       && REG_N_REFS (i) > 0
2140       && reg_equiv_constant (i) == 0
2141       && (reg_equiv_invariant (i) == 0
2142 	  || reg_equiv_init (i) == 0)
2143       && reg_equiv_memory_loc (i) == 0)
2144     {
2145       rtx x = NULL_RTX;
2146       machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2147       unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2148       unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2149       unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2150       unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
2151       int adjust = 0;
2152 
2153       something_was_spilled = true;
2154 
2155       if (ira_conflicts_p)
2156 	{
2157 	  /* Mark the spill for IRA.  */
2158 	  SET_REGNO_REG_SET (&spilled_pseudos, i);
2159 	  if (!dont_share_p)
2160 	    x = ira_reuse_stack_slot (i, inherent_size, total_size);
2161 	}
2162 
2163       if (x)
2164 	;
2165 
2166       /* Each pseudo reg has an inherent size which comes from its own mode,
2167 	 and a total size which provides room for paradoxical subregs
2168 	 which refer to the pseudo reg in wider modes.
2169 
2170 	 We can use a slot already allocated if it provides both
2171 	 enough inherent space and enough total space.
2172 	 Otherwise, we allocate a new slot, making sure that it has no less
2173 	 inherent space, and no less total space, then the previous slot.  */
2174       else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2175 	{
2176 	  rtx stack_slot;
2177 
2178 	  /* No known place to spill from => no slot to reuse.  */
2179 	  x = assign_stack_local (mode, total_size,
2180 				  min_align > inherent_align
2181 				  || total_size > inherent_size ? -1 : 0);
2182 
2183 	  stack_slot = x;
2184 
2185 	  /* Cancel the big-endian correction done in assign_stack_local.
2186 	     Get the address of the beginning of the slot.  This is so we
2187 	     can do a big-endian correction unconditionally below.  */
2188 	  if (BYTES_BIG_ENDIAN)
2189 	    {
2190 	      adjust = inherent_size - total_size;
2191 	      if (adjust)
2192 		stack_slot
2193 		  = adjust_address_nv (x, mode_for_size (total_size
2194 						         * BITS_PER_UNIT,
2195 						         MODE_INT, 1),
2196 				       adjust);
2197 	    }
2198 
2199 	  if (! dont_share_p && ira_conflicts_p)
2200 	    /* Inform IRA about allocation a new stack slot.  */
2201 	    ira_mark_new_stack_slot (stack_slot, i, total_size);
2202 	}
2203 
2204       /* Reuse a stack slot if possible.  */
2205       else if (spill_stack_slot[from_reg] != 0
2206 	       && spill_stack_slot_width[from_reg] >= total_size
2207 	       && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2208 		   >= inherent_size)
2209 	       && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2210 	x = spill_stack_slot[from_reg];
2211 
2212       /* Allocate a bigger slot.  */
2213       else
2214 	{
2215 	  /* Compute maximum size needed, both for inherent size
2216 	     and for total size.  */
2217 	  rtx stack_slot;
2218 
2219 	  if (spill_stack_slot[from_reg])
2220 	    {
2221 	      if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2222 		  > inherent_size)
2223 		mode = GET_MODE (spill_stack_slot[from_reg]);
2224 	      if (spill_stack_slot_width[from_reg] > total_size)
2225 		total_size = spill_stack_slot_width[from_reg];
2226 	      if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2227 		min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2228 	    }
2229 
2230 	  /* Make a slot with that size.  */
2231 	  x = assign_stack_local (mode, total_size,
2232 				  min_align > inherent_align
2233 				  || total_size > inherent_size ? -1 : 0);
2234 	  stack_slot = x;
2235 
2236 	  /* Cancel the  big-endian correction done in assign_stack_local.
2237 	     Get the address of the beginning of the slot.  This is so we
2238 	     can do a big-endian correction unconditionally below.  */
2239 	  if (BYTES_BIG_ENDIAN)
2240 	    {
2241 	      adjust = GET_MODE_SIZE (mode) - total_size;
2242 	      if (adjust)
2243 		stack_slot
2244 		  = adjust_address_nv (x, mode_for_size (total_size
2245 							 * BITS_PER_UNIT,
2246 							 MODE_INT, 1),
2247 				       adjust);
2248 	    }
2249 
2250 	  spill_stack_slot[from_reg] = stack_slot;
2251 	  spill_stack_slot_width[from_reg] = total_size;
2252 	}
2253 
2254       /* On a big endian machine, the "address" of the slot
2255 	 is the address of the low part that fits its inherent mode.  */
2256       if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2257 	adjust += (total_size - inherent_size);
2258 
2259       /* If we have any adjustment to make, or if the stack slot is the
2260 	 wrong mode, make a new stack slot.  */
2261       x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2262 
2263       /* Set all of the memory attributes as appropriate for a spill.  */
2264       set_mem_attrs_for_spill (x);
2265 
2266       /* Save the stack slot for later.  */
2267       reg_equiv_memory_loc (i) = x;
2268     }
2269 }
2270 
2271 /* Mark the slots in regs_ever_live for the hard regs used by
2272    pseudo-reg number REGNO, accessed in MODE.  */
2273 
2274 static void
2275 mark_home_live_1 (int regno, machine_mode mode)
2276 {
2277   int i, lim;
2278 
2279   i = reg_renumber[regno];
2280   if (i < 0)
2281     return;
2282   lim = end_hard_regno (mode, i);
2283   while (i < lim)
2284     df_set_regs_ever_live (i++, true);
2285 }
2286 
2287 /* Mark the slots in regs_ever_live for the hard regs
2288    used by pseudo-reg number REGNO.  */
2289 
2290 void
2291 mark_home_live (int regno)
2292 {
2293   if (reg_renumber[regno] >= 0)
2294     mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2295 }
2296 
2297 /* This function handles the tracking of elimination offsets around branches.
2298 
2299    X is a piece of RTL being scanned.
2300 
2301    INSN is the insn that it came from, if any.
2302 
2303    INITIAL_P is nonzero if we are to set the offset to be the initial
2304    offset and zero if we are setting the offset of the label to be the
2305    current offset.  */
2306 
2307 static void
2308 set_label_offsets (rtx x, rtx_insn *insn, int initial_p)
2309 {
2310   enum rtx_code code = GET_CODE (x);
2311   rtx tem;
2312   unsigned int i;
2313   struct elim_table *p;
2314 
2315   switch (code)
2316     {
2317     case LABEL_REF:
2318       if (LABEL_REF_NONLOCAL_P (x))
2319 	return;
2320 
2321       x = label_ref_label (x);
2322 
2323       /* fall through */
2324 
2325     case CODE_LABEL:
2326       /* If we know nothing about this label, set the desired offsets.  Note
2327 	 that this sets the offset at a label to be the offset before a label
2328 	 if we don't know anything about the label.  This is not correct for
2329 	 the label after a BARRIER, but is the best guess we can make.  If
2330 	 we guessed wrong, we will suppress an elimination that might have
2331 	 been possible had we been able to guess correctly.  */
2332 
2333       if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2334 	{
2335 	  for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2336 	    offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2337 	      = (initial_p ? reg_eliminate[i].initial_offset
2338 		 : reg_eliminate[i].offset);
2339 	  offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2340 	}
2341 
2342       /* Otherwise, if this is the definition of a label and it is
2343 	 preceded by a BARRIER, set our offsets to the known offset of
2344 	 that label.  */
2345 
2346       else if (x == insn
2347 	       && (tem = prev_nonnote_insn (insn)) != 0
2348 	       && BARRIER_P (tem))
2349 	set_offsets_for_label (insn);
2350       else
2351 	/* If neither of the above cases is true, compare each offset
2352 	   with those previously recorded and suppress any eliminations
2353 	   where the offsets disagree.  */
2354 
2355 	for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2356 	  if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2357 	      != (initial_p ? reg_eliminate[i].initial_offset
2358 		  : reg_eliminate[i].offset))
2359 	    reg_eliminate[i].can_eliminate = 0;
2360 
2361       return;
2362 
2363     case JUMP_TABLE_DATA:
2364       set_label_offsets (PATTERN (insn), insn, initial_p);
2365       return;
2366 
2367     case JUMP_INSN:
2368       set_label_offsets (PATTERN (insn), insn, initial_p);
2369 
2370       /* fall through */
2371 
2372     case INSN:
2373     case CALL_INSN:
2374       /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2375 	 to indirectly and hence must have all eliminations at their
2376 	 initial offsets.  */
2377       for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2378 	if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2379 	  set_label_offsets (XEXP (tem, 0), insn, 1);
2380       return;
2381 
2382     case PARALLEL:
2383     case ADDR_VEC:
2384     case ADDR_DIFF_VEC:
2385       /* Each of the labels in the parallel or address vector must be
2386 	 at their initial offsets.  We want the first field for PARALLEL
2387 	 and ADDR_VEC and the second field for ADDR_DIFF_VEC.  */
2388 
2389       for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2390 	set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2391 			   insn, initial_p);
2392       return;
2393 
2394     case SET:
2395       /* We only care about setting PC.  If the source is not RETURN,
2396 	 IF_THEN_ELSE, or a label, disable any eliminations not at
2397 	 their initial offsets.  Similarly if any arm of the IF_THEN_ELSE
2398 	 isn't one of those possibilities.  For branches to a label,
2399 	 call ourselves recursively.
2400 
2401 	 Note that this can disable elimination unnecessarily when we have
2402 	 a non-local goto since it will look like a non-constant jump to
2403 	 someplace in the current function.  This isn't a significant
2404 	 problem since such jumps will normally be when all elimination
2405 	 pairs are back to their initial offsets.  */
2406 
2407       if (SET_DEST (x) != pc_rtx)
2408 	return;
2409 
2410       switch (GET_CODE (SET_SRC (x)))
2411 	{
2412 	case PC:
2413 	case RETURN:
2414 	  return;
2415 
2416 	case LABEL_REF:
2417 	  set_label_offsets (SET_SRC (x), insn, initial_p);
2418 	  return;
2419 
2420 	case IF_THEN_ELSE:
2421 	  tem = XEXP (SET_SRC (x), 1);
2422 	  if (GET_CODE (tem) == LABEL_REF)
2423 	    set_label_offsets (label_ref_label (tem), insn, initial_p);
2424 	  else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2425 	    break;
2426 
2427 	  tem = XEXP (SET_SRC (x), 2);
2428 	  if (GET_CODE (tem) == LABEL_REF)
2429 	    set_label_offsets (label_ref_label (tem), insn, initial_p);
2430 	  else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2431 	    break;
2432 	  return;
2433 
2434 	default:
2435 	  break;
2436 	}
2437 
2438       /* If we reach here, all eliminations must be at their initial
2439 	 offset because we are doing a jump to a variable address.  */
2440       for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2441 	if (p->offset != p->initial_offset)
2442 	  p->can_eliminate = 0;
2443       break;
2444 
2445     default:
2446       break;
2447     }
2448 }
2449 
2450 /* This function examines every reg that occurs in X and adjusts the
2451    costs for its elimination which are gathered by IRA.  INSN is the
2452    insn in which X occurs.  We do not recurse into MEM expressions.  */
2453 
2454 static void
2455 note_reg_elim_costly (const_rtx x, rtx insn)
2456 {
2457   subrtx_iterator::array_type array;
2458   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
2459     {
2460       const_rtx x = *iter;
2461       if (MEM_P (x))
2462 	iter.skip_subrtxes ();
2463       else if (REG_P (x)
2464 	       && REGNO (x) >= FIRST_PSEUDO_REGISTER
2465 	       && reg_equiv_init (REGNO (x))
2466 	       && reg_equiv_invariant (REGNO (x)))
2467 	{
2468 	  rtx t = reg_equiv_invariant (REGNO (x));
2469 	  rtx new_rtx = eliminate_regs_1 (t, Pmode, insn, true, true);
2470 	  int cost = set_src_cost (new_rtx, Pmode,
2471 				   optimize_bb_for_speed_p (elim_bb));
2472 	  int freq = REG_FREQ_FROM_BB (elim_bb);
2473 
2474 	  if (cost != 0)
2475 	    ira_adjust_equiv_reg_cost (REGNO (x), -cost * freq);
2476 	}
2477     }
2478 }
2479 
2480 /* Scan X and replace any eliminable registers (such as fp) with a
2481    replacement (such as sp), plus an offset.
2482 
2483    MEM_MODE is the mode of an enclosing MEM.  We need this to know how
2484    much to adjust a register for, e.g., PRE_DEC.  Also, if we are inside a
2485    MEM, we are allowed to replace a sum of a register and the constant zero
2486    with the register, which we cannot do outside a MEM.  In addition, we need
2487    to record the fact that a register is referenced outside a MEM.
2488 
2489    If INSN is an insn, it is the insn containing X.  If we replace a REG
2490    in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2491    CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2492    the REG is being modified.
2493 
2494    Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2495    That's used when we eliminate in expressions stored in notes.
2496    This means, do not set ref_outside_mem even if the reference
2497    is outside of MEMs.
2498 
2499    If FOR_COSTS is true, we are being called before reload in order to
2500    estimate the costs of keeping registers with an equivalence unallocated.
2501 
2502    REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2503    replacements done assuming all offsets are at their initial values.  If
2504    they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2505    encounter, return the actual location so that find_reloads will do
2506    the proper thing.  */
2507 
2508 static rtx
2509 eliminate_regs_1 (rtx x, machine_mode mem_mode, rtx insn,
2510 		  bool may_use_invariant, bool for_costs)
2511 {
2512   enum rtx_code code = GET_CODE (x);
2513   struct elim_table *ep;
2514   int regno;
2515   rtx new_rtx;
2516   int i, j;
2517   const char *fmt;
2518   int copied = 0;
2519 
2520   if (! current_function_decl)
2521     return x;
2522 
2523   switch (code)
2524     {
2525     CASE_CONST_ANY:
2526     case CONST:
2527     case SYMBOL_REF:
2528     case CODE_LABEL:
2529     case PC:
2530     case CC0:
2531     case ASM_INPUT:
2532     case ADDR_VEC:
2533     case ADDR_DIFF_VEC:
2534     case RETURN:
2535       return x;
2536 
2537     case REG:
2538       regno = REGNO (x);
2539 
2540       /* First handle the case where we encounter a bare register that
2541 	 is eliminable.  Replace it with a PLUS.  */
2542       if (regno < FIRST_PSEUDO_REGISTER)
2543 	{
2544 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2545 	       ep++)
2546 	    if (ep->from_rtx == x && ep->can_eliminate)
2547 	      return plus_constant (Pmode, ep->to_rtx, ep->previous_offset);
2548 
2549 	}
2550       else if (reg_renumber && reg_renumber[regno] < 0
2551 	       && reg_equivs
2552 	       && reg_equiv_invariant (regno))
2553 	{
2554 	  if (may_use_invariant || (insn && DEBUG_INSN_P (insn)))
2555 	    return eliminate_regs_1 (copy_rtx (reg_equiv_invariant (regno)),
2556 			             mem_mode, insn, true, for_costs);
2557 	  /* There exists at least one use of REGNO that cannot be
2558 	     eliminated.  Prevent the defining insn from being deleted.  */
2559 	  reg_equiv_init (regno) = NULL;
2560 	  if (!for_costs)
2561 	    alter_reg (regno, -1, true);
2562 	}
2563       return x;
2564 
2565     /* You might think handling MINUS in a manner similar to PLUS is a
2566        good idea.  It is not.  It has been tried multiple times and every
2567        time the change has had to have been reverted.
2568 
2569        Other parts of reload know a PLUS is special (gen_reload for example)
2570        and require special code to handle code a reloaded PLUS operand.
2571 
2572        Also consider backends where the flags register is clobbered by a
2573        MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2574        lea instruction comes to mind).  If we try to reload a MINUS, we
2575        may kill the flags register that was holding a useful value.
2576 
2577        So, please before trying to handle MINUS, consider reload as a
2578        whole instead of this little section as well as the backend issues.  */
2579     case PLUS:
2580       /* If this is the sum of an eliminable register and a constant, rework
2581 	 the sum.  */
2582       if (REG_P (XEXP (x, 0))
2583 	  && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2584 	  && CONSTANT_P (XEXP (x, 1)))
2585 	{
2586 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2587 	       ep++)
2588 	    if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2589 	      {
2590 		/* The only time we want to replace a PLUS with a REG (this
2591 		   occurs when the constant operand of the PLUS is the negative
2592 		   of the offset) is when we are inside a MEM.  We won't want
2593 		   to do so at other times because that would change the
2594 		   structure of the insn in a way that reload can't handle.
2595 		   We special-case the commonest situation in
2596 		   eliminate_regs_in_insn, so just replace a PLUS with a
2597 		   PLUS here, unless inside a MEM.  */
2598 		if (mem_mode != 0 && CONST_INT_P (XEXP (x, 1))
2599 		    && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2600 		  return ep->to_rtx;
2601 		else
2602 		  return gen_rtx_PLUS (Pmode, ep->to_rtx,
2603 				       plus_constant (Pmode, XEXP (x, 1),
2604 						      ep->previous_offset));
2605 	      }
2606 
2607 	  /* If the register is not eliminable, we are done since the other
2608 	     operand is a constant.  */
2609 	  return x;
2610 	}
2611 
2612       /* If this is part of an address, we want to bring any constant to the
2613 	 outermost PLUS.  We will do this by doing register replacement in
2614 	 our operands and seeing if a constant shows up in one of them.
2615 
2616 	 Note that there is no risk of modifying the structure of the insn,
2617 	 since we only get called for its operands, thus we are either
2618 	 modifying the address inside a MEM, or something like an address
2619 	 operand of a load-address insn.  */
2620 
2621       {
2622 	rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2623 				     for_costs);
2624 	rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2625 				     for_costs);
2626 
2627 	if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2628 	  {
2629 	    /* If one side is a PLUS and the other side is a pseudo that
2630 	       didn't get a hard register but has a reg_equiv_constant,
2631 	       we must replace the constant here since it may no longer
2632 	       be in the position of any operand.  */
2633 	    if (GET_CODE (new0) == PLUS && REG_P (new1)
2634 		&& REGNO (new1) >= FIRST_PSEUDO_REGISTER
2635 		&& reg_renumber[REGNO (new1)] < 0
2636 		&& reg_equivs
2637 		&& reg_equiv_constant (REGNO (new1)) != 0)
2638 	      new1 = reg_equiv_constant (REGNO (new1));
2639 	    else if (GET_CODE (new1) == PLUS && REG_P (new0)
2640 		     && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2641 		     && reg_renumber[REGNO (new0)] < 0
2642 		     && reg_equiv_constant (REGNO (new0)) != 0)
2643 	      new0 = reg_equiv_constant (REGNO (new0));
2644 
2645 	    new_rtx = form_sum (GET_MODE (x), new0, new1);
2646 
2647 	    /* As above, if we are not inside a MEM we do not want to
2648 	       turn a PLUS into something else.  We might try to do so here
2649 	       for an addition of 0 if we aren't optimizing.  */
2650 	    if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2651 	      return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
2652 	    else
2653 	      return new_rtx;
2654 	  }
2655       }
2656       return x;
2657 
2658     case MULT:
2659       /* If this is the product of an eliminable register and a
2660 	 constant, apply the distribute law and move the constant out
2661 	 so that we have (plus (mult ..) ..).  This is needed in order
2662 	 to keep load-address insns valid.   This case is pathological.
2663 	 We ignore the possibility of overflow here.  */
2664       if (REG_P (XEXP (x, 0))
2665 	  && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2666 	  && CONST_INT_P (XEXP (x, 1)))
2667 	for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2668 	     ep++)
2669 	  if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2670 	    {
2671 	      if (! mem_mode
2672 		  /* Refs inside notes or in DEBUG_INSNs don't count for
2673 		     this purpose.  */
2674 		  && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2675 				      || GET_CODE (insn) == INSN_LIST
2676 				      || DEBUG_INSN_P (insn))))
2677 		ep->ref_outside_mem = 1;
2678 
2679 	      return
2680 		plus_constant (Pmode,
2681 			       gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2682 			       ep->previous_offset * INTVAL (XEXP (x, 1)));
2683 	    }
2684 
2685       /* fall through */
2686 
2687     case CALL:
2688     case COMPARE:
2689     /* See comments before PLUS about handling MINUS.  */
2690     case MINUS:
2691     case DIV:      case UDIV:
2692     case MOD:      case UMOD:
2693     case AND:      case IOR:      case XOR:
2694     case ROTATERT: case ROTATE:
2695     case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2696     case NE:       case EQ:
2697     case GE:       case GT:       case GEU:    case GTU:
2698     case LE:       case LT:       case LEU:    case LTU:
2699       {
2700 	rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2701 				     for_costs);
2702 	rtx new1 = XEXP (x, 1)
2703 	  ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false,
2704 			      for_costs) : 0;
2705 
2706 	if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2707 	  return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2708       }
2709       return x;
2710 
2711     case EXPR_LIST:
2712       /* If we have something in XEXP (x, 0), the usual case, eliminate it.  */
2713       if (XEXP (x, 0))
2714 	{
2715 	  new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2716 				      for_costs);
2717 	  if (new_rtx != XEXP (x, 0))
2718 	    {
2719 	      /* If this is a REG_DEAD note, it is not valid anymore.
2720 		 Using the eliminated version could result in creating a
2721 		 REG_DEAD note for the stack or frame pointer.  */
2722 	      if (REG_NOTE_KIND (x) == REG_DEAD)
2723 		return (XEXP (x, 1)
2724 			? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2725 					    for_costs)
2726 			: NULL_RTX);
2727 
2728 	      x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2729 	    }
2730 	}
2731 
2732       /* fall through */
2733 
2734     case INSN_LIST:
2735     case INT_LIST:
2736       /* Now do eliminations in the rest of the chain.  If this was
2737 	 an EXPR_LIST, this might result in allocating more memory than is
2738 	 strictly needed, but it simplifies the code.  */
2739       if (XEXP (x, 1))
2740 	{
2741 	  new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2742 				      for_costs);
2743 	  if (new_rtx != XEXP (x, 1))
2744 	    return
2745 	      gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
2746 	}
2747       return x;
2748 
2749     case PRE_INC:
2750     case POST_INC:
2751     case PRE_DEC:
2752     case POST_DEC:
2753       /* We do not support elimination of a register that is modified.
2754 	 elimination_effects has already make sure that this does not
2755 	 happen.  */
2756       return x;
2757 
2758     case PRE_MODIFY:
2759     case POST_MODIFY:
2760       /* We do not support elimination of a register that is modified.
2761 	 elimination_effects has already make sure that this does not
2762 	 happen.  The only remaining case we need to consider here is
2763 	 that the increment value may be an eliminable register.  */
2764       if (GET_CODE (XEXP (x, 1)) == PLUS
2765 	  && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2766 	{
2767 	  rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2768 					  insn, true, for_costs);
2769 
2770 	  if (new_rtx != XEXP (XEXP (x, 1), 1))
2771 	    return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2772 				   gen_rtx_PLUS (GET_MODE (x),
2773 						 XEXP (x, 0), new_rtx));
2774 	}
2775       return x;
2776 
2777     case STRICT_LOW_PART:
2778     case NEG:          case NOT:
2779     case SIGN_EXTEND:  case ZERO_EXTEND:
2780     case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2781     case FLOAT:        case FIX:
2782     case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2783     case ABS:
2784     case SQRT:
2785     case FFS:
2786     case CLZ:
2787     case CTZ:
2788     case POPCOUNT:
2789     case PARITY:
2790     case BSWAP:
2791       new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2792 				  for_costs);
2793       if (new_rtx != XEXP (x, 0))
2794 	return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
2795       return x;
2796 
2797     case SUBREG:
2798       /* Similar to above processing, but preserve SUBREG_BYTE.
2799 	 Convert (subreg (mem)) to (mem) if not paradoxical.
2800 	 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2801 	 pseudo didn't get a hard reg, we must replace this with the
2802 	 eliminated version of the memory location because push_reload
2803 	 may do the replacement in certain circumstances.  */
2804       if (REG_P (SUBREG_REG (x))
2805 	  && !paradoxical_subreg_p (x)
2806 	  && reg_equivs
2807 	  && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
2808 	{
2809 	  new_rtx = SUBREG_REG (x);
2810 	}
2811       else
2812 	new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false, for_costs);
2813 
2814       if (new_rtx != SUBREG_REG (x))
2815 	{
2816 	  int x_size = GET_MODE_SIZE (GET_MODE (x));
2817 	  int new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
2818 
2819 	  if (MEM_P (new_rtx)
2820 	      && ((x_size < new_size
2821 		   /* On RISC machines, combine can create rtl of the form
2822 		      (set (subreg:m1 (reg:m2 R) 0) ...)
2823 		      where m1 < m2, and expects something interesting to
2824 		      happen to the entire word.  Moreover, it will use the
2825 		      (reg:m2 R) later, expecting all bits to be preserved.
2826 		      So if the number of words is the same, preserve the
2827 		      subreg so that push_reload can see it.  */
2828 		   && !(WORD_REGISTER_OPERATIONS
2829 			&& (x_size - 1) / UNITS_PER_WORD
2830 			   == (new_size -1 ) / UNITS_PER_WORD))
2831 		  || x_size == new_size)
2832 	      )
2833 	    return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
2834 	  else if (insn && GET_CODE (insn) == DEBUG_INSN)
2835 	    return gen_rtx_raw_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2836 	  else
2837 	    return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2838 	}
2839 
2840       return x;
2841 
2842     case MEM:
2843       /* Our only special processing is to pass the mode of the MEM to our
2844 	 recursive call and copy the flags.  While we are here, handle this
2845 	 case more efficiently.  */
2846 
2847       new_rtx = eliminate_regs_1 (XEXP (x, 0), GET_MODE (x), insn, true,
2848 				  for_costs);
2849       if (for_costs
2850 	  && memory_address_p (GET_MODE (x), XEXP (x, 0))
2851 	  && !memory_address_p (GET_MODE (x), new_rtx))
2852 	note_reg_elim_costly (XEXP (x, 0), insn);
2853 
2854       return replace_equiv_address_nv (x, new_rtx);
2855 
2856     case USE:
2857       /* Handle insn_list USE that a call to a pure function may generate.  */
2858       new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false,
2859 				  for_costs);
2860       if (new_rtx != XEXP (x, 0))
2861 	return gen_rtx_USE (GET_MODE (x), new_rtx);
2862       return x;
2863 
2864     case CLOBBER:
2865     case ASM_OPERANDS:
2866       gcc_assert (insn && DEBUG_INSN_P (insn));
2867       break;
2868 
2869     case SET:
2870       gcc_unreachable ();
2871 
2872     default:
2873       break;
2874     }
2875 
2876   /* Process each of our operands recursively.  If any have changed, make a
2877      copy of the rtx.  */
2878   fmt = GET_RTX_FORMAT (code);
2879   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2880     {
2881       if (*fmt == 'e')
2882 	{
2883 	  new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false,
2884 				      for_costs);
2885 	  if (new_rtx != XEXP (x, i) && ! copied)
2886 	    {
2887 	      x = shallow_copy_rtx (x);
2888 	      copied = 1;
2889 	    }
2890 	  XEXP (x, i) = new_rtx;
2891 	}
2892       else if (*fmt == 'E')
2893 	{
2894 	  int copied_vec = 0;
2895 	  for (j = 0; j < XVECLEN (x, i); j++)
2896 	    {
2897 	      new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false,
2898 					  for_costs);
2899 	      if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
2900 		{
2901 		  rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2902 					     XVEC (x, i)->elem);
2903 		  if (! copied)
2904 		    {
2905 		      x = shallow_copy_rtx (x);
2906 		      copied = 1;
2907 		    }
2908 		  XVEC (x, i) = new_v;
2909 		  copied_vec = 1;
2910 		}
2911 	      XVECEXP (x, i, j) = new_rtx;
2912 	    }
2913 	}
2914     }
2915 
2916   return x;
2917 }
2918 
2919 rtx
2920 eliminate_regs (rtx x, machine_mode mem_mode, rtx insn)
2921 {
2922   if (reg_eliminate == NULL)
2923     {
2924       gcc_assert (targetm.no_register_allocation);
2925       return x;
2926     }
2927   return eliminate_regs_1 (x, mem_mode, insn, false, false);
2928 }
2929 
2930 /* Scan rtx X for modifications of elimination target registers.  Update
2931    the table of eliminables to reflect the changed state.  MEM_MODE is
2932    the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM.  */
2933 
2934 static void
2935 elimination_effects (rtx x, machine_mode mem_mode)
2936 {
2937   enum rtx_code code = GET_CODE (x);
2938   struct elim_table *ep;
2939   int regno;
2940   int i, j;
2941   const char *fmt;
2942 
2943   switch (code)
2944     {
2945     CASE_CONST_ANY:
2946     case CONST:
2947     case SYMBOL_REF:
2948     case CODE_LABEL:
2949     case PC:
2950     case CC0:
2951     case ASM_INPUT:
2952     case ADDR_VEC:
2953     case ADDR_DIFF_VEC:
2954     case RETURN:
2955       return;
2956 
2957     case REG:
2958       regno = REGNO (x);
2959 
2960       /* First handle the case where we encounter a bare register that
2961 	 is eliminable.  Replace it with a PLUS.  */
2962       if (regno < FIRST_PSEUDO_REGISTER)
2963 	{
2964 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2965 	       ep++)
2966 	    if (ep->from_rtx == x && ep->can_eliminate)
2967 	      {
2968 		if (! mem_mode)
2969 		  ep->ref_outside_mem = 1;
2970 		return;
2971 	      }
2972 
2973 	}
2974       else if (reg_renumber[regno] < 0
2975 	       && reg_equivs
2976 	       && reg_equiv_constant (regno)
2977 	       && ! function_invariant_p (reg_equiv_constant (regno)))
2978 	elimination_effects (reg_equiv_constant (regno), mem_mode);
2979       return;
2980 
2981     case PRE_INC:
2982     case POST_INC:
2983     case PRE_DEC:
2984     case POST_DEC:
2985     case POST_MODIFY:
2986     case PRE_MODIFY:
2987       /* If we modify the source of an elimination rule, disable it.  */
2988       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2989 	if (ep->from_rtx == XEXP (x, 0))
2990 	  ep->can_eliminate = 0;
2991 
2992       /* If we modify the target of an elimination rule by adding a constant,
2993 	 update its offset.  If we modify the target in any other way, we'll
2994 	 have to disable the rule as well.  */
2995       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2996 	if (ep->to_rtx == XEXP (x, 0))
2997 	  {
2998 	    int size = GET_MODE_SIZE (mem_mode);
2999 
3000 	    /* If more bytes than MEM_MODE are pushed, account for them.  */
3001 #ifdef PUSH_ROUNDING
3002 	    if (ep->to_rtx == stack_pointer_rtx)
3003 	      size = PUSH_ROUNDING (size);
3004 #endif
3005 	    if (code == PRE_DEC || code == POST_DEC)
3006 	      ep->offset += size;
3007 	    else if (code == PRE_INC || code == POST_INC)
3008 	      ep->offset -= size;
3009 	    else if (code == PRE_MODIFY || code == POST_MODIFY)
3010 	      {
3011 		if (GET_CODE (XEXP (x, 1)) == PLUS
3012 		    && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3013 		    && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3014 		  ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3015 		else
3016 		  ep->can_eliminate = 0;
3017 	      }
3018 	  }
3019 
3020       /* These two aren't unary operators.  */
3021       if (code == POST_MODIFY || code == PRE_MODIFY)
3022 	break;
3023 
3024       /* Fall through to generic unary operation case.  */
3025       gcc_fallthrough ();
3026     case STRICT_LOW_PART:
3027     case NEG:          case NOT:
3028     case SIGN_EXTEND:  case ZERO_EXTEND:
3029     case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3030     case FLOAT:        case FIX:
3031     case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3032     case ABS:
3033     case SQRT:
3034     case FFS:
3035     case CLZ:
3036     case CTZ:
3037     case POPCOUNT:
3038     case PARITY:
3039     case BSWAP:
3040       elimination_effects (XEXP (x, 0), mem_mode);
3041       return;
3042 
3043     case SUBREG:
3044       if (REG_P (SUBREG_REG (x))
3045 	  && (GET_MODE_SIZE (GET_MODE (x))
3046 	      <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3047 	  && reg_equivs
3048 	  && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
3049 	return;
3050 
3051       elimination_effects (SUBREG_REG (x), mem_mode);
3052       return;
3053 
3054     case USE:
3055       /* If using a register that is the source of an eliminate we still
3056 	 think can be performed, note it cannot be performed since we don't
3057 	 know how this register is used.  */
3058       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3059 	if (ep->from_rtx == XEXP (x, 0))
3060 	  ep->can_eliminate = 0;
3061 
3062       elimination_effects (XEXP (x, 0), mem_mode);
3063       return;
3064 
3065     case CLOBBER:
3066       /* If clobbering a register that is the replacement register for an
3067 	 elimination we still think can be performed, note that it cannot
3068 	 be performed.  Otherwise, we need not be concerned about it.  */
3069       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3070 	if (ep->to_rtx == XEXP (x, 0))
3071 	  ep->can_eliminate = 0;
3072 
3073       elimination_effects (XEXP (x, 0), mem_mode);
3074       return;
3075 
3076     case SET:
3077       /* Check for setting a register that we know about.  */
3078       if (REG_P (SET_DEST (x)))
3079 	{
3080 	  /* See if this is setting the replacement register for an
3081 	     elimination.
3082 
3083 	     If DEST is the hard frame pointer, we do nothing because we
3084 	     assume that all assignments to the frame pointer are for
3085 	     non-local gotos and are being done at a time when they are valid
3086 	     and do not disturb anything else.  Some machines want to
3087 	     eliminate a fake argument pointer (or even a fake frame pointer)
3088 	     with either the real frame or the stack pointer.  Assignments to
3089 	     the hard frame pointer must not prevent this elimination.  */
3090 
3091 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3092 	       ep++)
3093 	    if (ep->to_rtx == SET_DEST (x)
3094 		&& SET_DEST (x) != hard_frame_pointer_rtx)
3095 	      {
3096 		/* If it is being incremented, adjust the offset.  Otherwise,
3097 		   this elimination can't be done.  */
3098 		rtx src = SET_SRC (x);
3099 
3100 		if (GET_CODE (src) == PLUS
3101 		    && XEXP (src, 0) == SET_DEST (x)
3102 		    && CONST_INT_P (XEXP (src, 1)))
3103 		  ep->offset -= INTVAL (XEXP (src, 1));
3104 		else
3105 		  ep->can_eliminate = 0;
3106 	      }
3107 	}
3108 
3109       elimination_effects (SET_DEST (x), VOIDmode);
3110       elimination_effects (SET_SRC (x), VOIDmode);
3111       return;
3112 
3113     case MEM:
3114       /* Our only special processing is to pass the mode of the MEM to our
3115 	 recursive call.  */
3116       elimination_effects (XEXP (x, 0), GET_MODE (x));
3117       return;
3118 
3119     default:
3120       break;
3121     }
3122 
3123   fmt = GET_RTX_FORMAT (code);
3124   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3125     {
3126       if (*fmt == 'e')
3127 	elimination_effects (XEXP (x, i), mem_mode);
3128       else if (*fmt == 'E')
3129 	for (j = 0; j < XVECLEN (x, i); j++)
3130 	  elimination_effects (XVECEXP (x, i, j), mem_mode);
3131     }
3132 }
3133 
3134 /* Descend through rtx X and verify that no references to eliminable registers
3135    remain.  If any do remain, mark the involved register as not
3136    eliminable.  */
3137 
3138 static void
3139 check_eliminable_occurrences (rtx x)
3140 {
3141   const char *fmt;
3142   int i;
3143   enum rtx_code code;
3144 
3145   if (x == 0)
3146     return;
3147 
3148   code = GET_CODE (x);
3149 
3150   if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3151     {
3152       struct elim_table *ep;
3153 
3154       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3155 	if (ep->from_rtx == x)
3156 	  ep->can_eliminate = 0;
3157       return;
3158     }
3159 
3160   fmt = GET_RTX_FORMAT (code);
3161   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3162     {
3163       if (*fmt == 'e')
3164 	check_eliminable_occurrences (XEXP (x, i));
3165       else if (*fmt == 'E')
3166 	{
3167 	  int j;
3168 	  for (j = 0; j < XVECLEN (x, i); j++)
3169 	    check_eliminable_occurrences (XVECEXP (x, i, j));
3170 	}
3171     }
3172 }
3173 
3174 /* Scan INSN and eliminate all eliminable registers in it.
3175 
3176    If REPLACE is nonzero, do the replacement destructively.  Also
3177    delete the insn as dead it if it is setting an eliminable register.
3178 
3179    If REPLACE is zero, do all our allocations in reload_obstack.
3180 
3181    If no eliminations were done and this insn doesn't require any elimination
3182    processing (these are not identical conditions: it might be updating sp,
3183    but not referencing fp; this needs to be seen during reload_as_needed so
3184    that the offset between fp and sp can be taken into consideration), zero
3185    is returned.  Otherwise, 1 is returned.  */
3186 
3187 static int
3188 eliminate_regs_in_insn (rtx_insn *insn, int replace)
3189 {
3190   int icode = recog_memoized (insn);
3191   rtx old_body = PATTERN (insn);
3192   int insn_is_asm = asm_noperands (old_body) >= 0;
3193   rtx old_set = single_set (insn);
3194   rtx new_body;
3195   int val = 0;
3196   int i;
3197   rtx substed_operand[MAX_RECOG_OPERANDS];
3198   rtx orig_operand[MAX_RECOG_OPERANDS];
3199   struct elim_table *ep;
3200   rtx plus_src, plus_cst_src;
3201 
3202   if (! insn_is_asm && icode < 0)
3203     {
3204       gcc_assert (DEBUG_INSN_P (insn)
3205 		  || GET_CODE (PATTERN (insn)) == USE
3206 		  || GET_CODE (PATTERN (insn)) == CLOBBER
3207 		  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3208       if (DEBUG_INSN_P (insn))
3209 	INSN_VAR_LOCATION_LOC (insn)
3210 	  = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn);
3211       return 0;
3212     }
3213 
3214   if (old_set != 0 && REG_P (SET_DEST (old_set))
3215       && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3216     {
3217       /* Check for setting an eliminable register.  */
3218       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3219 	if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3220 	  {
3221 	    /* If this is setting the frame pointer register to the
3222 	       hardware frame pointer register and this is an elimination
3223 	       that will be done (tested above), this insn is really
3224 	       adjusting the frame pointer downward to compensate for
3225 	       the adjustment done before a nonlocal goto.  */
3226 	    if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
3227 		&& ep->from == FRAME_POINTER_REGNUM
3228 		&& ep->to == HARD_FRAME_POINTER_REGNUM)
3229 	      {
3230 		rtx base = SET_SRC (old_set);
3231 		rtx_insn *base_insn = insn;
3232 		HOST_WIDE_INT offset = 0;
3233 
3234 		while (base != ep->to_rtx)
3235 		  {
3236 		    rtx_insn *prev_insn;
3237 		    rtx prev_set;
3238 
3239 		    if (GET_CODE (base) == PLUS
3240 		        && CONST_INT_P (XEXP (base, 1)))
3241 		      {
3242 		        offset += INTVAL (XEXP (base, 1));
3243 		        base = XEXP (base, 0);
3244 		      }
3245 		    else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3246 			     && (prev_set = single_set (prev_insn)) != 0
3247 			     && rtx_equal_p (SET_DEST (prev_set), base))
3248 		      {
3249 		        base = SET_SRC (prev_set);
3250 		        base_insn = prev_insn;
3251 		      }
3252 		    else
3253 		      break;
3254 		  }
3255 
3256 		if (base == ep->to_rtx)
3257 		  {
3258 		    rtx src = plus_constant (Pmode, ep->to_rtx,
3259 					     offset - ep->offset);
3260 
3261 		    new_body = old_body;
3262 		    if (! replace)
3263 		      {
3264 			new_body = copy_insn (old_body);
3265 			if (REG_NOTES (insn))
3266 			  REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3267 		      }
3268 		    PATTERN (insn) = new_body;
3269 		    old_set = single_set (insn);
3270 
3271 		    /* First see if this insn remains valid when we
3272 		       make the change.  If not, keep the INSN_CODE
3273 		       the same and let reload fit it up.  */
3274 		    validate_change (insn, &SET_SRC (old_set), src, 1);
3275 		    validate_change (insn, &SET_DEST (old_set),
3276 				     ep->to_rtx, 1);
3277 		    if (! apply_change_group ())
3278 		      {
3279 			SET_SRC (old_set) = src;
3280 			SET_DEST (old_set) = ep->to_rtx;
3281 		      }
3282 
3283 		    val = 1;
3284 		    goto done;
3285 		  }
3286 	      }
3287 
3288 	    /* In this case this insn isn't serving a useful purpose.  We
3289 	       will delete it in reload_as_needed once we know that this
3290 	       elimination is, in fact, being done.
3291 
3292 	       If REPLACE isn't set, we can't delete this insn, but needn't
3293 	       process it since it won't be used unless something changes.  */
3294 	    if (replace)
3295 	      {
3296 		delete_dead_insn (insn);
3297 		return 1;
3298 	      }
3299 	    val = 1;
3300 	    goto done;
3301 	  }
3302     }
3303 
3304   /* We allow one special case which happens to work on all machines we
3305      currently support: a single set with the source or a REG_EQUAL
3306      note being a PLUS of an eliminable register and a constant.  */
3307   plus_src = plus_cst_src = 0;
3308   if (old_set && REG_P (SET_DEST (old_set)))
3309     {
3310       if (GET_CODE (SET_SRC (old_set)) == PLUS)
3311 	plus_src = SET_SRC (old_set);
3312       /* First see if the source is of the form (plus (...) CST).  */
3313       if (plus_src
3314 	  && CONST_INT_P (XEXP (plus_src, 1)))
3315 	plus_cst_src = plus_src;
3316       else if (REG_P (SET_SRC (old_set))
3317 	       || plus_src)
3318 	{
3319 	  /* Otherwise, see if we have a REG_EQUAL note of the form
3320 	     (plus (...) CST).  */
3321 	  rtx links;
3322 	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3323 	    {
3324 	      if ((REG_NOTE_KIND (links) == REG_EQUAL
3325 		   || REG_NOTE_KIND (links) == REG_EQUIV)
3326 		  && GET_CODE (XEXP (links, 0)) == PLUS
3327 		  && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3328 		{
3329 		  plus_cst_src = XEXP (links, 0);
3330 		  break;
3331 		}
3332 	    }
3333 	}
3334 
3335       /* Check that the first operand of the PLUS is a hard reg or
3336 	 the lowpart subreg of one.  */
3337       if (plus_cst_src)
3338 	{
3339 	  rtx reg = XEXP (plus_cst_src, 0);
3340 	  if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3341 	    reg = SUBREG_REG (reg);
3342 
3343 	  if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3344 	    plus_cst_src = 0;
3345 	}
3346     }
3347   if (plus_cst_src)
3348     {
3349       rtx reg = XEXP (plus_cst_src, 0);
3350       HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3351 
3352       if (GET_CODE (reg) == SUBREG)
3353 	reg = SUBREG_REG (reg);
3354 
3355       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3356 	if (ep->from_rtx == reg && ep->can_eliminate)
3357 	  {
3358 	    rtx to_rtx = ep->to_rtx;
3359 	    offset += ep->offset;
3360 	    offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
3361 
3362 	    if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3363 	      to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3364 				    to_rtx);
3365 	    /* If we have a nonzero offset, and the source is already
3366 	       a simple REG, the following transformation would
3367 	       increase the cost of the insn by replacing a simple REG
3368 	       with (plus (reg sp) CST).  So try only when we already
3369 	       had a PLUS before.  */
3370 	    if (offset == 0 || plus_src)
3371 	      {
3372 		rtx new_src = plus_constant (GET_MODE (to_rtx),
3373 					     to_rtx, offset);
3374 
3375 		new_body = old_body;
3376 		if (! replace)
3377 		  {
3378 		    new_body = copy_insn (old_body);
3379 		    if (REG_NOTES (insn))
3380 		      REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3381 		  }
3382 		PATTERN (insn) = new_body;
3383 		old_set = single_set (insn);
3384 
3385 		/* First see if this insn remains valid when we make the
3386 		   change.  If not, try to replace the whole pattern with
3387 		   a simple set (this may help if the original insn was a
3388 		   PARALLEL that was only recognized as single_set due to
3389 		   REG_UNUSED notes).  If this isn't valid either, keep
3390 		   the INSN_CODE the same and let reload fix it up.  */
3391 		if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3392 		  {
3393 		    rtx new_pat = gen_rtx_SET (SET_DEST (old_set), new_src);
3394 
3395 		    if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3396 		      SET_SRC (old_set) = new_src;
3397 		  }
3398 	      }
3399 	    else
3400 	      break;
3401 
3402 	    val = 1;
3403 	    /* This can't have an effect on elimination offsets, so skip right
3404 	       to the end.  */
3405 	    goto done;
3406 	  }
3407     }
3408 
3409   /* Determine the effects of this insn on elimination offsets.  */
3410   elimination_effects (old_body, VOIDmode);
3411 
3412   /* Eliminate all eliminable registers occurring in operands that
3413      can be handled by reload.  */
3414   extract_insn (insn);
3415   for (i = 0; i < recog_data.n_operands; i++)
3416     {
3417       orig_operand[i] = recog_data.operand[i];
3418       substed_operand[i] = recog_data.operand[i];
3419 
3420       /* For an asm statement, every operand is eliminable.  */
3421       if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3422 	{
3423 	  bool is_set_src, in_plus;
3424 
3425 	  /* Check for setting a register that we know about.  */
3426 	  if (recog_data.operand_type[i] != OP_IN
3427 	      && REG_P (orig_operand[i]))
3428 	    {
3429 	      /* If we are assigning to a register that can be eliminated, it
3430 		 must be as part of a PARALLEL, since the code above handles
3431 		 single SETs.  We must indicate that we can no longer
3432 		 eliminate this reg.  */
3433 	      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3434 		   ep++)
3435 		if (ep->from_rtx == orig_operand[i])
3436 		  ep->can_eliminate = 0;
3437 	    }
3438 
3439 	  /* Companion to the above plus substitution, we can allow
3440 	     invariants as the source of a plain move.  */
3441 	  is_set_src = false;
3442 	  if (old_set
3443 	      && recog_data.operand_loc[i] == &SET_SRC (old_set))
3444 	    is_set_src = true;
3445 	  in_plus = false;
3446 	  if (plus_src
3447 	      && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3448 		  || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3449 	    in_plus = true;
3450 
3451 	  substed_operand[i]
3452 	    = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3453 			        replace ? insn : NULL_RTX,
3454 				is_set_src || in_plus, false);
3455 	  if (substed_operand[i] != orig_operand[i])
3456 	    val = 1;
3457 	  /* Terminate the search in check_eliminable_occurrences at
3458 	     this point.  */
3459 	  *recog_data.operand_loc[i] = 0;
3460 
3461 	  /* If an output operand changed from a REG to a MEM and INSN is an
3462 	     insn, write a CLOBBER insn.  */
3463 	  if (recog_data.operand_type[i] != OP_IN
3464 	      && REG_P (orig_operand[i])
3465 	      && MEM_P (substed_operand[i])
3466 	      && replace)
3467 	    emit_insn_after (gen_clobber (orig_operand[i]), insn);
3468 	}
3469     }
3470 
3471   for (i = 0; i < recog_data.n_dups; i++)
3472     *recog_data.dup_loc[i]
3473       = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3474 
3475   /* If any eliminable remain, they aren't eliminable anymore.  */
3476   check_eliminable_occurrences (old_body);
3477 
3478   /* Substitute the operands; the new values are in the substed_operand
3479      array.  */
3480   for (i = 0; i < recog_data.n_operands; i++)
3481     *recog_data.operand_loc[i] = substed_operand[i];
3482   for (i = 0; i < recog_data.n_dups; i++)
3483     *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3484 
3485   /* If we are replacing a body that was a (set X (plus Y Z)), try to
3486      re-recognize the insn.  We do this in case we had a simple addition
3487      but now can do this as a load-address.  This saves an insn in this
3488      common case.
3489      If re-recognition fails, the old insn code number will still be used,
3490      and some register operands may have changed into PLUS expressions.
3491      These will be handled by find_reloads by loading them into a register
3492      again.  */
3493 
3494   if (val)
3495     {
3496       /* If we aren't replacing things permanently and we changed something,
3497 	 make another copy to ensure that all the RTL is new.  Otherwise
3498 	 things can go wrong if find_reload swaps commutative operands
3499 	 and one is inside RTL that has been copied while the other is not.  */
3500       new_body = old_body;
3501       if (! replace)
3502 	{
3503 	  new_body = copy_insn (old_body);
3504 	  if (REG_NOTES (insn))
3505 	    REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3506 	}
3507       PATTERN (insn) = new_body;
3508 
3509       /* If we had a move insn but now we don't, rerecognize it.  This will
3510 	 cause spurious re-recognition if the old move had a PARALLEL since
3511 	 the new one still will, but we can't call single_set without
3512 	 having put NEW_BODY into the insn and the re-recognition won't
3513 	 hurt in this rare case.  */
3514       /* ??? Why this huge if statement - why don't we just rerecognize the
3515 	 thing always?  */
3516       if (! insn_is_asm
3517 	  && old_set != 0
3518 	  && ((REG_P (SET_SRC (old_set))
3519 	       && (GET_CODE (new_body) != SET
3520 		   || !REG_P (SET_SRC (new_body))))
3521 	      /* If this was a load from or store to memory, compare
3522 		 the MEM in recog_data.operand to the one in the insn.
3523 		 If they are not equal, then rerecognize the insn.  */
3524 	      || (old_set != 0
3525 		  && ((MEM_P (SET_SRC (old_set))
3526 		       && SET_SRC (old_set) != recog_data.operand[1])
3527 		      || (MEM_P (SET_DEST (old_set))
3528 			  && SET_DEST (old_set) != recog_data.operand[0])))
3529 	      /* If this was an add insn before, rerecognize.  */
3530 	      || GET_CODE (SET_SRC (old_set)) == PLUS))
3531 	{
3532 	  int new_icode = recog (PATTERN (insn), insn, 0);
3533 	  if (new_icode >= 0)
3534 	    INSN_CODE (insn) = new_icode;
3535 	}
3536     }
3537 
3538   /* Restore the old body.  If there were any changes to it, we made a copy
3539      of it while the changes were still in place, so we'll correctly return
3540      a modified insn below.  */
3541   if (! replace)
3542     {
3543       /* Restore the old body.  */
3544       for (i = 0; i < recog_data.n_operands; i++)
3545 	/* Restoring a top-level match_parallel would clobber the new_body
3546 	   we installed in the insn.  */
3547 	if (recog_data.operand_loc[i] != &PATTERN (insn))
3548 	  *recog_data.operand_loc[i] = orig_operand[i];
3549       for (i = 0; i < recog_data.n_dups; i++)
3550 	*recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3551     }
3552 
3553   /* Update all elimination pairs to reflect the status after the current
3554      insn.  The changes we make were determined by the earlier call to
3555      elimination_effects.
3556 
3557      We also detect cases where register elimination cannot be done,
3558      namely, if a register would be both changed and referenced outside a MEM
3559      in the resulting insn since such an insn is often undefined and, even if
3560      not, we cannot know what meaning will be given to it.  Note that it is
3561      valid to have a register used in an address in an insn that changes it
3562      (presumably with a pre- or post-increment or decrement).
3563 
3564      If anything changes, return nonzero.  */
3565 
3566   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3567     {
3568       if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3569 	ep->can_eliminate = 0;
3570 
3571       ep->ref_outside_mem = 0;
3572 
3573       if (ep->previous_offset != ep->offset)
3574 	val = 1;
3575     }
3576 
3577  done:
3578   /* If we changed something, perform elimination in REG_NOTES.  This is
3579      needed even when REPLACE is zero because a REG_DEAD note might refer
3580      to a register that we eliminate and could cause a different number
3581      of spill registers to be needed in the final reload pass than in
3582      the pre-passes.  */
3583   if (val && REG_NOTES (insn) != 0)
3584     REG_NOTES (insn)
3585       = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true,
3586 			  false);
3587 
3588   return val;
3589 }
3590 
3591 /* Like eliminate_regs_in_insn, but only estimate costs for the use of the
3592    register allocator.  INSN is the instruction we need to examine, we perform
3593    eliminations in its operands and record cases where eliminating a reg with
3594    an invariant equivalence would add extra cost.  */
3595 
3596 #pragma GCC diagnostic push
3597 #pragma GCC diagnostic warning "-Wmaybe-uninitialized"
3598 static void
3599 elimination_costs_in_insn (rtx_insn *insn)
3600 {
3601   int icode = recog_memoized (insn);
3602   rtx old_body = PATTERN (insn);
3603   int insn_is_asm = asm_noperands (old_body) >= 0;
3604   rtx old_set = single_set (insn);
3605   int i;
3606   rtx orig_operand[MAX_RECOG_OPERANDS];
3607   rtx orig_dup[MAX_RECOG_OPERANDS];
3608   struct elim_table *ep;
3609   rtx plus_src, plus_cst_src;
3610   bool sets_reg_p;
3611 
3612   if (! insn_is_asm && icode < 0)
3613     {
3614       gcc_assert (DEBUG_INSN_P (insn)
3615 		  || GET_CODE (PATTERN (insn)) == USE
3616 		  || GET_CODE (PATTERN (insn)) == CLOBBER
3617 		  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3618       return;
3619     }
3620 
3621   if (old_set != 0 && REG_P (SET_DEST (old_set))
3622       && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3623     {
3624       /* Check for setting an eliminable register.  */
3625       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3626 	if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3627 	  return;
3628     }
3629 
3630   /* We allow one special case which happens to work on all machines we
3631      currently support: a single set with the source or a REG_EQUAL
3632      note being a PLUS of an eliminable register and a constant.  */
3633   plus_src = plus_cst_src = 0;
3634   sets_reg_p = false;
3635   if (old_set && REG_P (SET_DEST (old_set)))
3636     {
3637       sets_reg_p = true;
3638       if (GET_CODE (SET_SRC (old_set)) == PLUS)
3639 	plus_src = SET_SRC (old_set);
3640       /* First see if the source is of the form (plus (...) CST).  */
3641       if (plus_src
3642 	  && CONST_INT_P (XEXP (plus_src, 1)))
3643 	plus_cst_src = plus_src;
3644       else if (REG_P (SET_SRC (old_set))
3645 	       || plus_src)
3646 	{
3647 	  /* Otherwise, see if we have a REG_EQUAL note of the form
3648 	     (plus (...) CST).  */
3649 	  rtx links;
3650 	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3651 	    {
3652 	      if ((REG_NOTE_KIND (links) == REG_EQUAL
3653 		   || REG_NOTE_KIND (links) == REG_EQUIV)
3654 		  && GET_CODE (XEXP (links, 0)) == PLUS
3655 		  && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3656 		{
3657 		  plus_cst_src = XEXP (links, 0);
3658 		  break;
3659 		}
3660 	    }
3661 	}
3662     }
3663 
3664   /* Determine the effects of this insn on elimination offsets.  */
3665   elimination_effects (old_body, VOIDmode);
3666 
3667   /* Eliminate all eliminable registers occurring in operands that
3668      can be handled by reload.  */
3669   extract_insn (insn);
3670   int n_dups = recog_data.n_dups;
3671   for (i = 0; i < n_dups; i++)
3672     orig_dup[i] = *recog_data.dup_loc[i];
3673 
3674   int n_operands = recog_data.n_operands;
3675   for (i = 0; i < n_operands; i++)
3676     {
3677       orig_operand[i] = recog_data.operand[i];
3678 
3679       /* For an asm statement, every operand is eliminable.  */
3680       if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3681 	{
3682 	  bool is_set_src, in_plus;
3683 
3684 	  /* Check for setting a register that we know about.  */
3685 	  if (recog_data.operand_type[i] != OP_IN
3686 	      && REG_P (orig_operand[i]))
3687 	    {
3688 	      /* If we are assigning to a register that can be eliminated, it
3689 		 must be as part of a PARALLEL, since the code above handles
3690 		 single SETs.  We must indicate that we can no longer
3691 		 eliminate this reg.  */
3692 	      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3693 		   ep++)
3694 		if (ep->from_rtx == orig_operand[i])
3695 		  ep->can_eliminate = 0;
3696 	    }
3697 
3698 	  /* Companion to the above plus substitution, we can allow
3699 	     invariants as the source of a plain move.  */
3700 	  is_set_src = false;
3701 	  if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3702 	    is_set_src = true;
3703 	  if (is_set_src && !sets_reg_p)
3704 	    note_reg_elim_costly (SET_SRC (old_set), insn);
3705 	  in_plus = false;
3706 	  if (plus_src && sets_reg_p
3707 	      && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3708 		  || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3709 	    in_plus = true;
3710 
3711 	  eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3712 			    NULL_RTX,
3713 			    is_set_src || in_plus, true);
3714 	  /* Terminate the search in check_eliminable_occurrences at
3715 	     this point.  */
3716 	  *recog_data.operand_loc[i] = 0;
3717 	}
3718     }
3719 
3720   for (i = 0; i < n_dups; i++)
3721     *recog_data.dup_loc[i]
3722       = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3723 
3724   /* If any eliminable remain, they aren't eliminable anymore.  */
3725   check_eliminable_occurrences (old_body);
3726 
3727   /* Restore the old body.  */
3728   for (i = 0; i < n_operands; i++)
3729     *recog_data.operand_loc[i] = orig_operand[i];
3730   for (i = 0; i < n_dups; i++)
3731     *recog_data.dup_loc[i] = orig_dup[i];
3732 
3733   /* Update all elimination pairs to reflect the status after the current
3734      insn.  The changes we make were determined by the earlier call to
3735      elimination_effects.  */
3736 
3737   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3738     {
3739       if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3740 	ep->can_eliminate = 0;
3741 
3742       ep->ref_outside_mem = 0;
3743     }
3744 
3745   return;
3746 }
3747 #pragma GCC diagnostic pop
3748 
3749 /* Loop through all elimination pairs.
3750    Recalculate the number not at initial offset.
3751 
3752    Compute the maximum offset (minimum offset if the stack does not
3753    grow downward) for each elimination pair.  */
3754 
3755 static void
3756 update_eliminable_offsets (void)
3757 {
3758   struct elim_table *ep;
3759 
3760   num_not_at_initial_offset = 0;
3761   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3762     {
3763       ep->previous_offset = ep->offset;
3764       if (ep->can_eliminate && ep->offset != ep->initial_offset)
3765 	num_not_at_initial_offset++;
3766     }
3767 }
3768 
3769 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3770    replacement we currently believe is valid, mark it as not eliminable if X
3771    modifies DEST in any way other than by adding a constant integer to it.
3772 
3773    If DEST is the frame pointer, we do nothing because we assume that
3774    all assignments to the hard frame pointer are nonlocal gotos and are being
3775    done at a time when they are valid and do not disturb anything else.
3776    Some machines want to eliminate a fake argument pointer with either the
3777    frame or stack pointer.  Assignments to the hard frame pointer must not
3778    prevent this elimination.
3779 
3780    Called via note_stores from reload before starting its passes to scan
3781    the insns of the function.  */
3782 
3783 static void
3784 mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3785 {
3786   unsigned int i;
3787 
3788   /* A SUBREG of a hard register here is just changing its mode.  We should
3789      not see a SUBREG of an eliminable hard register, but check just in
3790      case.  */
3791   if (GET_CODE (dest) == SUBREG)
3792     dest = SUBREG_REG (dest);
3793 
3794   if (dest == hard_frame_pointer_rtx)
3795     return;
3796 
3797   for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3798     if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3799 	&& (GET_CODE (x) != SET
3800 	    || GET_CODE (SET_SRC (x)) != PLUS
3801 	    || XEXP (SET_SRC (x), 0) != dest
3802 	    || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3803       {
3804 	reg_eliminate[i].can_eliminate_previous
3805 	  = reg_eliminate[i].can_eliminate = 0;
3806 	num_eliminable--;
3807       }
3808 }
3809 
3810 /* Verify that the initial elimination offsets did not change since the
3811    last call to set_initial_elim_offsets.  This is used to catch cases
3812    where something illegal happened during reload_as_needed that could
3813    cause incorrect code to be generated if we did not check for it.  */
3814 
3815 static bool
3816 verify_initial_elim_offsets (void)
3817 {
3818   HOST_WIDE_INT t;
3819   struct elim_table *ep;
3820 
3821   if (!num_eliminable)
3822     return true;
3823 
3824   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3825     {
3826       INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3827       if (t != ep->initial_offset)
3828 	return false;
3829     }
3830 
3831   return true;
3832 }
3833 
3834 /* Reset all offsets on eliminable registers to their initial values.  */
3835 
3836 static void
3837 set_initial_elim_offsets (void)
3838 {
3839   struct elim_table *ep = reg_eliminate;
3840 
3841   for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3842     {
3843       INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3844       ep->previous_offset = ep->offset = ep->initial_offset;
3845     }
3846 
3847   num_not_at_initial_offset = 0;
3848 }
3849 
3850 /* Subroutine of set_initial_label_offsets called via for_each_eh_label.  */
3851 
3852 static void
3853 set_initial_eh_label_offset (rtx label)
3854 {
3855   set_label_offsets (label, NULL, 1);
3856 }
3857 
3858 /* Initialize the known label offsets.
3859    Set a known offset for each forced label to be at the initial offset
3860    of each elimination.  We do this because we assume that all
3861    computed jumps occur from a location where each elimination is
3862    at its initial offset.
3863    For all other labels, show that we don't know the offsets.  */
3864 
3865 static void
3866 set_initial_label_offsets (void)
3867 {
3868   memset (offsets_known_at, 0, num_labels);
3869 
3870   unsigned int i;
3871   rtx_insn *insn;
3872   FOR_EACH_VEC_SAFE_ELT (forced_labels, i, insn)
3873     set_label_offsets (insn, NULL, 1);
3874 
3875   for (rtx_insn_list *x = nonlocal_goto_handler_labels; x; x = x->next ())
3876     if (x->insn ())
3877       set_label_offsets (x->insn (), NULL, 1);
3878 
3879   for_each_eh_label (set_initial_eh_label_offset);
3880 }
3881 
3882 /* Set all elimination offsets to the known values for the code label given
3883    by INSN.  */
3884 
3885 static void
3886 set_offsets_for_label (rtx_insn *insn)
3887 {
3888   unsigned int i;
3889   int label_nr = CODE_LABEL_NUMBER (insn);
3890   struct elim_table *ep;
3891 
3892   num_not_at_initial_offset = 0;
3893   for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3894     {
3895       ep->offset = ep->previous_offset
3896 		 = offsets_at[label_nr - first_label_num][i];
3897       if (ep->can_eliminate && ep->offset != ep->initial_offset)
3898 	num_not_at_initial_offset++;
3899     }
3900 }
3901 
3902 /* See if anything that happened changes which eliminations are valid.
3903    For example, on the SPARC, whether or not the frame pointer can
3904    be eliminated can depend on what registers have been used.  We need
3905    not check some conditions again (such as flag_omit_frame_pointer)
3906    since they can't have changed.  */
3907 
3908 static void
3909 update_eliminables (HARD_REG_SET *pset)
3910 {
3911   int previous_frame_pointer_needed = frame_pointer_needed;
3912   struct elim_table *ep;
3913 
3914   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3915     if ((ep->from == HARD_FRAME_POINTER_REGNUM
3916          && targetm.frame_pointer_required ())
3917 	|| ! targetm.can_eliminate (ep->from, ep->to)
3918 	)
3919       ep->can_eliminate = 0;
3920 
3921   /* Look for the case where we have discovered that we can't replace
3922      register A with register B and that means that we will now be
3923      trying to replace register A with register C.  This means we can
3924      no longer replace register C with register B and we need to disable
3925      such an elimination, if it exists.  This occurs often with A == ap,
3926      B == sp, and C == fp.  */
3927 
3928   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3929     {
3930       struct elim_table *op;
3931       int new_to = -1;
3932 
3933       if (! ep->can_eliminate && ep->can_eliminate_previous)
3934 	{
3935 	  /* Find the current elimination for ep->from, if there is a
3936 	     new one.  */
3937 	  for (op = reg_eliminate;
3938 	       op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3939 	    if (op->from == ep->from && op->can_eliminate)
3940 	      {
3941 		new_to = op->to;
3942 		break;
3943 	      }
3944 
3945 	  /* See if there is an elimination of NEW_TO -> EP->TO.  If so,
3946 	     disable it.  */
3947 	  for (op = reg_eliminate;
3948 	       op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3949 	    if (op->from == new_to && op->to == ep->to)
3950 	      op->can_eliminate = 0;
3951 	}
3952     }
3953 
3954   /* See if any registers that we thought we could eliminate the previous
3955      time are no longer eliminable.  If so, something has changed and we
3956      must spill the register.  Also, recompute the number of eliminable
3957      registers and see if the frame pointer is needed; it is if there is
3958      no elimination of the frame pointer that we can perform.  */
3959 
3960   frame_pointer_needed = 1;
3961   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3962     {
3963       if (ep->can_eliminate
3964 	  && ep->from == FRAME_POINTER_REGNUM
3965 	  && ep->to != HARD_FRAME_POINTER_REGNUM
3966 	  && (! SUPPORTS_STACK_ALIGNMENT
3967 	      || ! crtl->stack_realign_needed))
3968 	frame_pointer_needed = 0;
3969 
3970       if (! ep->can_eliminate && ep->can_eliminate_previous)
3971 	{
3972 	  ep->can_eliminate_previous = 0;
3973 	  SET_HARD_REG_BIT (*pset, ep->from);
3974 	  num_eliminable--;
3975 	}
3976     }
3977 
3978   /* If we didn't need a frame pointer last time, but we do now, spill
3979      the hard frame pointer.  */
3980   if (frame_pointer_needed && ! previous_frame_pointer_needed)
3981     SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3982 }
3983 
3984 /* Call update_eliminables an spill any registers we can't eliminate anymore.
3985    Return true iff a register was spilled.  */
3986 
3987 static bool
3988 update_eliminables_and_spill (void)
3989 {
3990   int i;
3991   bool did_spill = false;
3992   HARD_REG_SET to_spill;
3993   CLEAR_HARD_REG_SET (to_spill);
3994   update_eliminables (&to_spill);
3995   AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
3996 
3997   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3998     if (TEST_HARD_REG_BIT (to_spill, i))
3999       {
4000 	spill_hard_reg (i, 1);
4001 	did_spill = true;
4002 
4003 	/* Regardless of the state of spills, if we previously had
4004 	   a register that we thought we could eliminate, but now can
4005 	   not eliminate, we must run another pass.
4006 
4007 	   Consider pseudos which have an entry in reg_equiv_* which
4008 	   reference an eliminable register.  We must make another pass
4009 	   to update reg_equiv_* so that we do not substitute in the
4010 	   old value from when we thought the elimination could be
4011 	   performed.  */
4012       }
4013   return did_spill;
4014 }
4015 
4016 /* Return true if X is used as the target register of an elimination.  */
4017 
4018 bool
4019 elimination_target_reg_p (rtx x)
4020 {
4021   struct elim_table *ep;
4022 
4023   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4024     if (ep->to_rtx == x && ep->can_eliminate)
4025       return true;
4026 
4027   return false;
4028 }
4029 
4030 /* Initialize the table of registers to eliminate.
4031    Pre-condition: global flag frame_pointer_needed has been set before
4032    calling this function.  */
4033 
4034 static void
4035 init_elim_table (void)
4036 {
4037   struct elim_table *ep;
4038   const struct elim_table_1 *ep1;
4039 
4040   if (!reg_eliminate)
4041     reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
4042 
4043   num_eliminable = 0;
4044 
4045   for (ep = reg_eliminate, ep1 = reg_eliminate_1;
4046        ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
4047     {
4048       ep->from = ep1->from;
4049       ep->to = ep1->to;
4050       ep->can_eliminate = ep->can_eliminate_previous
4051 	= (targetm.can_eliminate (ep->from, ep->to)
4052 	   && ! (ep->to == STACK_POINTER_REGNUM
4053 		 && frame_pointer_needed
4054 		 && (! SUPPORTS_STACK_ALIGNMENT
4055 		     || ! stack_realign_fp)));
4056     }
4057 
4058   /* Count the number of eliminable registers and build the FROM and TO
4059      REG rtx's.  Note that code in gen_rtx_REG will cause, e.g.,
4060      gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
4061      We depend on this.  */
4062   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4063     {
4064       num_eliminable += ep->can_eliminate;
4065       ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
4066       ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
4067     }
4068 }
4069 
4070 /* Find all the pseudo registers that didn't get hard regs
4071    but do have known equivalent constants or memory slots.
4072    These include parameters (known equivalent to parameter slots)
4073    and cse'd or loop-moved constant memory addresses.
4074 
4075    Record constant equivalents in reg_equiv_constant
4076    so they will be substituted by find_reloads.
4077    Record memory equivalents in reg_mem_equiv so they can
4078    be substituted eventually by altering the REG-rtx's.  */
4079 
4080 static void
4081 init_eliminable_invariants (rtx_insn *first, bool do_subregs)
4082 {
4083   int i;
4084   rtx_insn *insn;
4085 
4086   grow_reg_equivs ();
4087   if (do_subregs)
4088     reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
4089   else
4090     reg_max_ref_width = NULL;
4091 
4092   num_eliminable_invariants = 0;
4093 
4094   first_label_num = get_first_label_num ();
4095   num_labels = max_label_num () - first_label_num;
4096 
4097   /* Allocate the tables used to store offset information at labels.  */
4098   offsets_known_at = XNEWVEC (char, num_labels);
4099   offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
4100 
4101 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
4102    to.  If DO_SUBREGS is true, also find all paradoxical subregs and
4103    find largest such for each pseudo.  FIRST is the head of the insn
4104    list.  */
4105 
4106   for (insn = first; insn; insn = NEXT_INSN (insn))
4107     {
4108       rtx set = single_set (insn);
4109 
4110       /* We may introduce USEs that we want to remove at the end, so
4111 	 we'll mark them with QImode.  Make sure there are no
4112 	 previously-marked insns left by say regmove.  */
4113       if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
4114 	  && GET_MODE (insn) != VOIDmode)
4115 	PUT_MODE (insn, VOIDmode);
4116 
4117       if (do_subregs && NONDEBUG_INSN_P (insn))
4118 	scan_paradoxical_subregs (PATTERN (insn));
4119 
4120       if (set != 0 && REG_P (SET_DEST (set)))
4121 	{
4122 	  rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
4123 	  rtx x;
4124 
4125 	  if (! note)
4126 	    continue;
4127 
4128 	  i = REGNO (SET_DEST (set));
4129 	  x = XEXP (note, 0);
4130 
4131 	  if (i <= LAST_VIRTUAL_REGISTER)
4132 	    continue;
4133 
4134 	  /* If flag_pic and we have constant, verify it's legitimate.  */
4135 	  if (!CONSTANT_P (x)
4136 	      || !flag_pic || LEGITIMATE_PIC_OPERAND_P (x))
4137 	    {
4138 	      /* It can happen that a REG_EQUIV note contains a MEM
4139 		 that is not a legitimate memory operand.  As later
4140 		 stages of reload assume that all addresses found
4141 		 in the reg_equiv_* arrays were originally legitimate,
4142 		 we ignore such REG_EQUIV notes.  */
4143 	      if (memory_operand (x, VOIDmode))
4144 		{
4145 		  /* Always unshare the equivalence, so we can
4146 		     substitute into this insn without touching the
4147 		       equivalence.  */
4148 		  reg_equiv_memory_loc (i) = copy_rtx (x);
4149 		}
4150 	      else if (function_invariant_p (x))
4151 		{
4152 		  machine_mode mode;
4153 
4154 		  mode = GET_MODE (SET_DEST (set));
4155 		  if (GET_CODE (x) == PLUS)
4156 		    {
4157 		      /* This is PLUS of frame pointer and a constant,
4158 			 and might be shared.  Unshare it.  */
4159 		      reg_equiv_invariant (i) = copy_rtx (x);
4160 		      num_eliminable_invariants++;
4161 		    }
4162 		  else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
4163 		    {
4164 		      reg_equiv_invariant (i) = x;
4165 		      num_eliminable_invariants++;
4166 		    }
4167 		  else if (targetm.legitimate_constant_p (mode, x))
4168 		    reg_equiv_constant (i) = x;
4169 		  else
4170 		    {
4171 		      reg_equiv_memory_loc (i) = force_const_mem (mode, x);
4172 		      if (! reg_equiv_memory_loc (i))
4173 			reg_equiv_init (i) = NULL;
4174 		    }
4175 		}
4176 	      else
4177 		{
4178 		  reg_equiv_init (i) = NULL;
4179 		  continue;
4180 		}
4181 	    }
4182 	  else
4183 	    reg_equiv_init (i) = NULL;
4184 	}
4185     }
4186 
4187   if (dump_file)
4188     for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4189       if (reg_equiv_init (i))
4190 	{
4191 	  fprintf (dump_file, "init_insns for %u: ", i);
4192 	  print_inline_rtx (dump_file, reg_equiv_init (i), 20);
4193 	  fprintf (dump_file, "\n");
4194 	}
4195 }
4196 
4197 /* Indicate that we no longer have known memory locations or constants.
4198    Free all data involved in tracking these.  */
4199 
4200 static void
4201 free_reg_equiv (void)
4202 {
4203   int i;
4204 
4205   free (offsets_known_at);
4206   free (offsets_at);
4207   offsets_at = 0;
4208   offsets_known_at = 0;
4209 
4210   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4211     if (reg_equiv_alt_mem_list (i))
4212       free_EXPR_LIST_list (&reg_equiv_alt_mem_list (i));
4213   vec_free (reg_equivs);
4214 }
4215 
4216 /* Kick all pseudos out of hard register REGNO.
4217 
4218    If CANT_ELIMINATE is nonzero, it means that we are doing this spill
4219    because we found we can't eliminate some register.  In the case, no pseudos
4220    are allowed to be in the register, even if they are only in a block that
4221    doesn't require spill registers, unlike the case when we are spilling this
4222    hard reg to produce another spill register.
4223 
4224    Return nonzero if any pseudos needed to be kicked out.  */
4225 
4226 static void
4227 spill_hard_reg (unsigned int regno, int cant_eliminate)
4228 {
4229   int i;
4230 
4231   if (cant_eliminate)
4232     {
4233       SET_HARD_REG_BIT (bad_spill_regs_global, regno);
4234       df_set_regs_ever_live (regno, true);
4235     }
4236 
4237   /* Spill every pseudo reg that was allocated to this reg
4238      or to something that overlaps this reg.  */
4239 
4240   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4241     if (reg_renumber[i] >= 0
4242 	&& (unsigned int) reg_renumber[i] <= regno
4243 	&& end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
4244       SET_REGNO_REG_SET (&spilled_pseudos, i);
4245 }
4246 
4247 /* After spill_hard_reg was called and/or find_reload_regs was run for all
4248    insns that need reloads, this function is used to actually spill pseudo
4249    registers and try to reallocate them.  It also sets up the spill_regs
4250    array for use by choose_reload_regs.
4251 
4252    GLOBAL nonzero means we should attempt to reallocate any pseudo registers
4253    that we displace from hard registers.  */
4254 
4255 static int
4256 finish_spills (int global)
4257 {
4258   struct insn_chain *chain;
4259   int something_changed = 0;
4260   unsigned i;
4261   reg_set_iterator rsi;
4262 
4263   /* Build the spill_regs array for the function.  */
4264   /* If there are some registers still to eliminate and one of the spill regs
4265      wasn't ever used before, additional stack space may have to be
4266      allocated to store this register.  Thus, we may have changed the offset
4267      between the stack and frame pointers, so mark that something has changed.
4268 
4269      One might think that we need only set VAL to 1 if this is a call-used
4270      register.  However, the set of registers that must be saved by the
4271      prologue is not identical to the call-used set.  For example, the
4272      register used by the call insn for the return PC is a call-used register,
4273      but must be saved by the prologue.  */
4274 
4275   n_spills = 0;
4276   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4277     if (TEST_HARD_REG_BIT (used_spill_regs, i))
4278       {
4279 	spill_reg_order[i] = n_spills;
4280 	spill_regs[n_spills++] = i;
4281 	if (num_eliminable && ! df_regs_ever_live_p (i))
4282 	  something_changed = 1;
4283 	df_set_regs_ever_live (i, true);
4284       }
4285     else
4286       spill_reg_order[i] = -1;
4287 
4288   EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
4289     if (! ira_conflicts_p || reg_renumber[i] >= 0)
4290       {
4291 	/* Record the current hard register the pseudo is allocated to
4292 	   in pseudo_previous_regs so we avoid reallocating it to the
4293 	   same hard reg in a later pass.  */
4294 	gcc_assert (reg_renumber[i] >= 0);
4295 
4296 	SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
4297 	/* Mark it as no longer having a hard register home.  */
4298 	reg_renumber[i] = -1;
4299 	if (ira_conflicts_p)
4300 	  /* Inform IRA about the change.  */
4301 	  ira_mark_allocation_change (i);
4302 	/* We will need to scan everything again.  */
4303 	something_changed = 1;
4304       }
4305 
4306   /* Retry global register allocation if possible.  */
4307   if (global && ira_conflicts_p)
4308     {
4309       unsigned int n;
4310 
4311       memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
4312       /* For every insn that needs reloads, set the registers used as spill
4313 	 regs in pseudo_forbidden_regs for every pseudo live across the
4314 	 insn.  */
4315       for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
4316 	{
4317 	  EXECUTE_IF_SET_IN_REG_SET
4318 	    (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
4319 	    {
4320 	      IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4321 				chain->used_spill_regs);
4322 	    }
4323 	  EXECUTE_IF_SET_IN_REG_SET
4324 	    (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
4325 	    {
4326 	      IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4327 				chain->used_spill_regs);
4328 	    }
4329 	}
4330 
4331       /* Retry allocating the pseudos spilled in IRA and the
4332 	 reload.  For each reg, merge the various reg sets that
4333 	 indicate which hard regs can't be used, and call
4334 	 ira_reassign_pseudos.  */
4335       for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4336 	if (reg_old_renumber[i] != reg_renumber[i])
4337 	  {
4338 	    if (reg_renumber[i] < 0)
4339 	      temp_pseudo_reg_arr[n++] = i;
4340 	    else
4341 	      CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4342 	  }
4343       if (ira_reassign_pseudos (temp_pseudo_reg_arr, n,
4344 				bad_spill_regs_global,
4345 				pseudo_forbidden_regs, pseudo_previous_regs,
4346 				&spilled_pseudos))
4347 	something_changed = 1;
4348     }
4349   /* Fix up the register information in the insn chain.
4350      This involves deleting those of the spilled pseudos which did not get
4351      a new hard register home from the live_{before,after} sets.  */
4352   for (chain = reload_insn_chain; chain; chain = chain->next)
4353     {
4354       HARD_REG_SET used_by_pseudos;
4355       HARD_REG_SET used_by_pseudos2;
4356 
4357       if (! ira_conflicts_p)
4358 	{
4359 	  /* Don't do it for IRA because IRA and the reload still can
4360 	     assign hard registers to the spilled pseudos on next
4361 	     reload iterations.  */
4362 	  AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4363 	  AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4364 	}
4365       /* Mark any unallocated hard regs as available for spills.  That
4366 	 makes inheritance work somewhat better.  */
4367       if (chain->need_reload)
4368 	{
4369 	  REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4370 	  REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4371 	  IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
4372 
4373 	  compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4374 	  compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4375 	  /* Value of chain->used_spill_regs from previous iteration
4376 	     may be not included in the value calculated here because
4377 	     of possible removing caller-saves insns (see function
4378 	     delete_caller_save_insns.  */
4379 	  COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
4380 	  AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
4381 	}
4382     }
4383 
4384   CLEAR_REG_SET (&changed_allocation_pseudos);
4385   /* Let alter_reg modify the reg rtx's for the modified pseudos.  */
4386   for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4387     {
4388       int regno = reg_renumber[i];
4389       if (reg_old_renumber[i] == regno)
4390 	continue;
4391 
4392       SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4393 
4394       alter_reg (i, reg_old_renumber[i], false);
4395       reg_old_renumber[i] = regno;
4396       if (dump_file)
4397 	{
4398 	  if (regno == -1)
4399 	    fprintf (dump_file, " Register %d now on stack.\n\n", i);
4400 	  else
4401 	    fprintf (dump_file, " Register %d now in %d.\n\n",
4402 		     i, reg_renumber[i]);
4403 	}
4404     }
4405 
4406   return something_changed;
4407 }
4408 
4409 /* Find all paradoxical subregs within X and update reg_max_ref_width.  */
4410 
4411 static void
4412 scan_paradoxical_subregs (rtx x)
4413 {
4414   int i;
4415   const char *fmt;
4416   enum rtx_code code = GET_CODE (x);
4417 
4418   switch (code)
4419     {
4420     case REG:
4421     case CONST:
4422     case SYMBOL_REF:
4423     case LABEL_REF:
4424     CASE_CONST_ANY:
4425     case CC0:
4426     case PC:
4427     case USE:
4428     case CLOBBER:
4429       return;
4430 
4431     case SUBREG:
4432       if (REG_P (SUBREG_REG (x))
4433 	  && (GET_MODE_SIZE (GET_MODE (x))
4434 	      > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
4435 	{
4436 	  reg_max_ref_width[REGNO (SUBREG_REG (x))]
4437 	    = GET_MODE_SIZE (GET_MODE (x));
4438 	  mark_home_live_1 (REGNO (SUBREG_REG (x)), GET_MODE (x));
4439 	}
4440       return;
4441 
4442     default:
4443       break;
4444     }
4445 
4446   fmt = GET_RTX_FORMAT (code);
4447   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4448     {
4449       if (fmt[i] == 'e')
4450 	scan_paradoxical_subregs (XEXP (x, i));
4451       else if (fmt[i] == 'E')
4452 	{
4453 	  int j;
4454 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4455 	    scan_paradoxical_subregs (XVECEXP (x, i, j));
4456 	}
4457     }
4458 }
4459 
4460 /* *OP_PTR and *OTHER_PTR are two operands to a conceptual reload.
4461    If *OP_PTR is a paradoxical subreg, try to remove that subreg
4462    and apply the corresponding narrowing subreg to *OTHER_PTR.
4463    Return true if the operands were changed, false otherwise.  */
4464 
4465 static bool
4466 strip_paradoxical_subreg (rtx *op_ptr, rtx *other_ptr)
4467 {
4468   rtx op, inner, other, tem;
4469 
4470   op = *op_ptr;
4471   if (!paradoxical_subreg_p (op))
4472     return false;
4473   inner = SUBREG_REG (op);
4474 
4475   other = *other_ptr;
4476   tem = gen_lowpart_common (GET_MODE (inner), other);
4477   if (!tem)
4478     return false;
4479 
4480   /* If the lowpart operation turned a hard register into a subreg,
4481      rather than simplifying it to another hard register, then the
4482      mode change cannot be properly represented.  For example, OTHER
4483      might be valid in its current mode, but not in the new one.  */
4484   if (GET_CODE (tem) == SUBREG
4485       && REG_P (other)
4486       && HARD_REGISTER_P (other))
4487     return false;
4488 
4489   *op_ptr = inner;
4490   *other_ptr = tem;
4491   return true;
4492 }
4493 
4494 /* A subroutine of reload_as_needed.  If INSN has a REG_EH_REGION note,
4495    examine all of the reload insns between PREV and NEXT exclusive, and
4496    annotate all that may trap.  */
4497 
4498 static void
4499 fixup_eh_region_note (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4500 {
4501   rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4502   if (note == NULL)
4503     return;
4504   if (!insn_could_throw_p (insn))
4505     remove_note (insn, note);
4506   copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next);
4507 }
4508 
4509 /* Reload pseudo-registers into hard regs around each insn as needed.
4510    Additional register load insns are output before the insn that needs it
4511    and perhaps store insns after insns that modify the reloaded pseudo reg.
4512 
4513    reg_last_reload_reg and reg_reloaded_contents keep track of
4514    which registers are already available in reload registers.
4515    We update these for the reloads that we perform,
4516    as the insns are scanned.  */
4517 
4518 static void
4519 reload_as_needed (int live_known)
4520 {
4521   struct insn_chain *chain;
4522 #if AUTO_INC_DEC
4523   int i;
4524 #endif
4525   rtx_note *marker;
4526 
4527   memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4528   memset (spill_reg_store, 0, sizeof spill_reg_store);
4529   reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4530   INIT_REG_SET (&reg_has_output_reload);
4531   CLEAR_HARD_REG_SET (reg_reloaded_valid);
4532   CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4533 
4534   set_initial_elim_offsets ();
4535 
4536   /* Generate a marker insn that we will move around.  */
4537   marker = emit_note (NOTE_INSN_DELETED);
4538   unlink_insn_chain (marker, marker);
4539 
4540   for (chain = reload_insn_chain; chain; chain = chain->next)
4541     {
4542       rtx_insn *prev = 0;
4543       rtx_insn *insn = chain->insn;
4544       rtx_insn *old_next = NEXT_INSN (insn);
4545 #if AUTO_INC_DEC
4546       rtx_insn *old_prev = PREV_INSN (insn);
4547 #endif
4548 
4549       if (will_delete_init_insn_p (insn))
4550 	continue;
4551 
4552       /* If we pass a label, copy the offsets from the label information
4553 	 into the current offsets of each elimination.  */
4554       if (LABEL_P (insn))
4555 	set_offsets_for_label (insn);
4556 
4557       else if (INSN_P (insn))
4558 	{
4559 	  regset_head regs_to_forget;
4560 	  INIT_REG_SET (&regs_to_forget);
4561 	  note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
4562 
4563 	  /* If this is a USE and CLOBBER of a MEM, ensure that any
4564 	     references to eliminable registers have been removed.  */
4565 
4566 	  if ((GET_CODE (PATTERN (insn)) == USE
4567 	       || GET_CODE (PATTERN (insn)) == CLOBBER)
4568 	      && MEM_P (XEXP (PATTERN (insn), 0)))
4569 	    XEXP (XEXP (PATTERN (insn), 0), 0)
4570 	      = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4571 				GET_MODE (XEXP (PATTERN (insn), 0)),
4572 				NULL_RTX);
4573 
4574 	  /* If we need to do register elimination processing, do so.
4575 	     This might delete the insn, in which case we are done.  */
4576 	  if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4577 	    {
4578 	      eliminate_regs_in_insn (insn, 1);
4579 	      if (NOTE_P (insn))
4580 		{
4581 		  update_eliminable_offsets ();
4582 		  CLEAR_REG_SET (&regs_to_forget);
4583 		  continue;
4584 		}
4585 	    }
4586 
4587 	  /* If need_elim is nonzero but need_reload is zero, one might think
4588 	     that we could simply set n_reloads to 0.  However, find_reloads
4589 	     could have done some manipulation of the insn (such as swapping
4590 	     commutative operands), and these manipulations are lost during
4591 	     the first pass for every insn that needs register elimination.
4592 	     So the actions of find_reloads must be redone here.  */
4593 
4594 	  if (! chain->need_elim && ! chain->need_reload
4595 	      && ! chain->need_operand_change)
4596 	    n_reloads = 0;
4597 	  /* First find the pseudo regs that must be reloaded for this insn.
4598 	     This info is returned in the tables reload_... (see reload.h).
4599 	     Also modify the body of INSN by substituting RELOAD
4600 	     rtx's for those pseudo regs.  */
4601 	  else
4602 	    {
4603 	      CLEAR_REG_SET (&reg_has_output_reload);
4604 	      CLEAR_HARD_REG_SET (reg_is_output_reload);
4605 
4606 	      find_reloads (insn, 1, spill_indirect_levels, live_known,
4607 			    spill_reg_order);
4608 	    }
4609 
4610 	  if (n_reloads > 0)
4611 	    {
4612 	      rtx_insn *next = NEXT_INSN (insn);
4613 
4614 	      /* ??? PREV can get deleted by reload inheritance.
4615 		 Work around this by emitting a marker note.  */
4616 	      prev = PREV_INSN (insn);
4617 	      reorder_insns_nobb (marker, marker, prev);
4618 
4619 	      /* Now compute which reload regs to reload them into.  Perhaps
4620 		 reusing reload regs from previous insns, or else output
4621 		 load insns to reload them.  Maybe output store insns too.
4622 		 Record the choices of reload reg in reload_reg_rtx.  */
4623 	      choose_reload_regs (chain);
4624 
4625 	      /* Generate the insns to reload operands into or out of
4626 		 their reload regs.  */
4627 	      emit_reload_insns (chain);
4628 
4629 	      /* Substitute the chosen reload regs from reload_reg_rtx
4630 		 into the insn's body (or perhaps into the bodies of other
4631 		 load and store insn that we just made for reloading
4632 		 and that we moved the structure into).  */
4633 	      subst_reloads (insn);
4634 
4635 	      prev = PREV_INSN (marker);
4636 	      unlink_insn_chain (marker, marker);
4637 
4638 	      /* Adjust the exception region notes for loads and stores.  */
4639 	      if (cfun->can_throw_non_call_exceptions && !CALL_P (insn))
4640 		fixup_eh_region_note (insn, prev, next);
4641 
4642 	      /* Adjust the location of REG_ARGS_SIZE.  */
4643 	      rtx p = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4644 	      if (p)
4645 		{
4646 		  remove_note (insn, p);
4647 		  fixup_args_size_notes (prev, PREV_INSN (next),
4648 					 INTVAL (XEXP (p, 0)));
4649 		}
4650 
4651 	      /* If this was an ASM, make sure that all the reload insns
4652 		 we have generated are valid.  If not, give an error
4653 		 and delete them.  */
4654 	      if (asm_noperands (PATTERN (insn)) >= 0)
4655 		for (rtx_insn *p = NEXT_INSN (prev);
4656 		     p != next;
4657 		     p = NEXT_INSN (p))
4658 		  if (p != insn && INSN_P (p)
4659 		      && GET_CODE (PATTERN (p)) != USE
4660 		      && (recog_memoized (p) < 0
4661 			  || (extract_insn (p),
4662 			      !(constrain_operands (1,
4663 				  get_enabled_alternatives (p))))))
4664 		    {
4665 		      error_for_asm (insn,
4666 				     "%<asm%> operand requires "
4667 				     "impossible reload");
4668 		      delete_insn (p);
4669 		    }
4670 	    }
4671 
4672 	  if (num_eliminable && chain->need_elim)
4673 	    update_eliminable_offsets ();
4674 
4675 	  /* Any previously reloaded spilled pseudo reg, stored in this insn,
4676 	     is no longer validly lying around to save a future reload.
4677 	     Note that this does not detect pseudos that were reloaded
4678 	     for this insn in order to be stored in
4679 	     (obeying register constraints).  That is correct; such reload
4680 	     registers ARE still valid.  */
4681 	  forget_marked_reloads (&regs_to_forget);
4682 	  CLEAR_REG_SET (&regs_to_forget);
4683 
4684 	  /* There may have been CLOBBER insns placed after INSN.  So scan
4685 	     between INSN and NEXT and use them to forget old reloads.  */
4686 	  for (rtx_insn *x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4687 	    if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4688 	      note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4689 
4690 #if AUTO_INC_DEC
4691 	  /* Likewise for regs altered by auto-increment in this insn.
4692 	     REG_INC notes have been changed by reloading:
4693 	     find_reloads_address_1 records substitutions for them,
4694 	     which have been performed by subst_reloads above.  */
4695 	  for (i = n_reloads - 1; i >= 0; i--)
4696 	    {
4697 	      rtx in_reg = rld[i].in_reg;
4698 	      if (in_reg)
4699 		{
4700 		  enum rtx_code code = GET_CODE (in_reg);
4701 		  /* PRE_INC / PRE_DEC will have the reload register ending up
4702 		     with the same value as the stack slot, but that doesn't
4703 		     hold true for POST_INC / POST_DEC.  Either we have to
4704 		     convert the memory access to a true POST_INC / POST_DEC,
4705 		     or we can't use the reload register for inheritance.  */
4706 		  if ((code == POST_INC || code == POST_DEC)
4707 		      && TEST_HARD_REG_BIT (reg_reloaded_valid,
4708 					    REGNO (rld[i].reg_rtx))
4709 		      /* Make sure it is the inc/dec pseudo, and not
4710 			 some other (e.g. output operand) pseudo.  */
4711 		      && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4712 			  == REGNO (XEXP (in_reg, 0))))
4713 
4714 		    {
4715 		      rtx reload_reg = rld[i].reg_rtx;
4716 		      machine_mode mode = GET_MODE (reload_reg);
4717 		      int n = 0;
4718 		      rtx_insn *p;
4719 
4720 		      for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4721 			{
4722 			  /* We really want to ignore REG_INC notes here, so
4723 			     use PATTERN (p) as argument to reg_set_p .  */
4724 			  if (reg_set_p (reload_reg, PATTERN (p)))
4725 			    break;
4726 			  n = count_occurrences (PATTERN (p), reload_reg, 0);
4727 			  if (! n)
4728 			    continue;
4729 			  if (n == 1)
4730 			    {
4731 			      rtx replace_reg
4732 				= gen_rtx_fmt_e (code, mode, reload_reg);
4733 
4734 			      validate_replace_rtx_group (reload_reg,
4735 							  replace_reg, p);
4736 			      n = verify_changes (0);
4737 
4738 			      /* We must also verify that the constraints
4739 				 are met after the replacement.  Make sure
4740 				 extract_insn is only called for an insn
4741 				 where the replacements were found to be
4742 				 valid so far. */
4743 			      if (n)
4744 				{
4745 				  extract_insn (p);
4746 				  n = constrain_operands (1,
4747 				    get_enabled_alternatives (p));
4748 				}
4749 
4750 			      /* If the constraints were not met, then
4751 				 undo the replacement, else confirm it.  */
4752 			      if (!n)
4753 				cancel_changes (0);
4754 			      else
4755 				confirm_change_group ();
4756 			    }
4757 			  break;
4758 			}
4759 		      if (n == 1)
4760 			{
4761 			  add_reg_note (p, REG_INC, reload_reg);
4762 			  /* Mark this as having an output reload so that the
4763 			     REG_INC processing code below won't invalidate
4764 			     the reload for inheritance.  */
4765 			  SET_HARD_REG_BIT (reg_is_output_reload,
4766 					    REGNO (reload_reg));
4767 			  SET_REGNO_REG_SET (&reg_has_output_reload,
4768 					     REGNO (XEXP (in_reg, 0)));
4769 			}
4770 		      else
4771 			forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4772 					      NULL);
4773 		    }
4774 		  else if ((code == PRE_INC || code == PRE_DEC)
4775 			   && TEST_HARD_REG_BIT (reg_reloaded_valid,
4776 						 REGNO (rld[i].reg_rtx))
4777 			   /* Make sure it is the inc/dec pseudo, and not
4778 			      some other (e.g. output operand) pseudo.  */
4779 			   && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4780 			       == REGNO (XEXP (in_reg, 0))))
4781 		    {
4782 		      SET_HARD_REG_BIT (reg_is_output_reload,
4783 					REGNO (rld[i].reg_rtx));
4784 		      SET_REGNO_REG_SET (&reg_has_output_reload,
4785 					 REGNO (XEXP (in_reg, 0)));
4786 		    }
4787 		  else if (code == PRE_INC || code == PRE_DEC
4788 			   || code == POST_INC || code == POST_DEC)
4789 		    {
4790 		      int in_regno = REGNO (XEXP (in_reg, 0));
4791 
4792 		      if (reg_last_reload_reg[in_regno] != NULL_RTX)
4793 			{
4794 			  int in_hard_regno;
4795 			  bool forget_p = true;
4796 
4797 			  in_hard_regno = REGNO (reg_last_reload_reg[in_regno]);
4798 			  if (TEST_HARD_REG_BIT (reg_reloaded_valid,
4799 						 in_hard_regno))
4800 			    {
4801 			      for (rtx_insn *x = (old_prev ?
4802 						  NEXT_INSN (old_prev) : insn);
4803 				   x != old_next;
4804 				   x = NEXT_INSN (x))
4805 				if (x == reg_reloaded_insn[in_hard_regno])
4806 				  {
4807 				    forget_p = false;
4808 				    break;
4809 				  }
4810 			    }
4811 			  /* If for some reasons, we didn't set up
4812 			     reg_last_reload_reg in this insn,
4813 			     invalidate inheritance from previous
4814 			     insns for the incremented/decremented
4815 			     register.  Such registers will be not in
4816 			     reg_has_output_reload.  Invalidate it
4817 			     also if the corresponding element in
4818 			     reg_reloaded_insn is also
4819 			     invalidated.  */
4820 			  if (forget_p)
4821 			    forget_old_reloads_1 (XEXP (in_reg, 0),
4822 						  NULL_RTX, NULL);
4823 			}
4824 		    }
4825 		}
4826 	    }
4827 	  /* If a pseudo that got a hard register is auto-incremented,
4828 	     we must purge records of copying it into pseudos without
4829 	     hard registers.  */
4830 	  for (rtx x = REG_NOTES (insn); x; x = XEXP (x, 1))
4831 	    if (REG_NOTE_KIND (x) == REG_INC)
4832 	      {
4833 		/* See if this pseudo reg was reloaded in this insn.
4834 		   If so, its last-reload info is still valid
4835 		   because it is based on this insn's reload.  */
4836 		for (i = 0; i < n_reloads; i++)
4837 		  if (rld[i].out == XEXP (x, 0))
4838 		    break;
4839 
4840 		if (i == n_reloads)
4841 		  forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4842 	      }
4843 #endif
4844 	}
4845       /* A reload reg's contents are unknown after a label.  */
4846       if (LABEL_P (insn))
4847 	CLEAR_HARD_REG_SET (reg_reloaded_valid);
4848 
4849       /* Don't assume a reload reg is still good after a call insn
4850 	 if it is a call-used reg, or if it contains a value that will
4851          be partially clobbered by the call.  */
4852       else if (CALL_P (insn))
4853 	{
4854 	  AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4855 	  AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4856 
4857 	  /* If this is a call to a setjmp-type function, we must not
4858 	     reuse any reload reg contents across the call; that will
4859 	     just be clobbered by other uses of the register in later
4860 	     code, before the longjmp.  */
4861 	  if (find_reg_note (insn, REG_SETJMP, NULL_RTX))
4862 	    CLEAR_HARD_REG_SET (reg_reloaded_valid);
4863 	}
4864     }
4865 
4866   /* Clean up.  */
4867   free (reg_last_reload_reg);
4868   CLEAR_REG_SET (&reg_has_output_reload);
4869 }
4870 
4871 /* Discard all record of any value reloaded from X,
4872    or reloaded in X from someplace else;
4873    unless X is an output reload reg of the current insn.
4874 
4875    X may be a hard reg (the reload reg)
4876    or it may be a pseudo reg that was reloaded from.
4877 
4878    When DATA is non-NULL just mark the registers in regset
4879    to be forgotten later.  */
4880 
4881 static void
4882 forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
4883 		      void *data)
4884 {
4885   unsigned int regno;
4886   unsigned int nr;
4887   regset regs = (regset) data;
4888 
4889   /* note_stores does give us subregs of hard regs,
4890      subreg_regno_offset requires a hard reg.  */
4891   while (GET_CODE (x) == SUBREG)
4892     {
4893       /* We ignore the subreg offset when calculating the regno,
4894 	 because we are using the entire underlying hard register
4895 	 below.  */
4896       x = SUBREG_REG (x);
4897     }
4898 
4899   if (!REG_P (x))
4900     return;
4901 
4902   regno = REGNO (x);
4903 
4904   if (regno >= FIRST_PSEUDO_REGISTER)
4905     nr = 1;
4906   else
4907     {
4908       unsigned int i;
4909 
4910       nr = hard_regno_nregs[regno][GET_MODE (x)];
4911       /* Storing into a spilled-reg invalidates its contents.
4912 	 This can happen if a block-local pseudo is allocated to that reg
4913 	 and it wasn't spilled because this block's total need is 0.
4914 	 Then some insn might have an optional reload and use this reg.  */
4915       if (!regs)
4916 	for (i = 0; i < nr; i++)
4917 	  /* But don't do this if the reg actually serves as an output
4918 	     reload reg in the current instruction.  */
4919 	  if (n_reloads == 0
4920 	      || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4921 	    {
4922 	      CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4923 	      spill_reg_store[regno + i] = 0;
4924 	    }
4925     }
4926 
4927   if (regs)
4928     while (nr-- > 0)
4929       SET_REGNO_REG_SET (regs, regno + nr);
4930   else
4931     {
4932       /* Since value of X has changed,
4933 	 forget any value previously copied from it.  */
4934 
4935       while (nr-- > 0)
4936 	/* But don't forget a copy if this is the output reload
4937 	   that establishes the copy's validity.  */
4938 	if (n_reloads == 0
4939 	    || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
4940 	  reg_last_reload_reg[regno + nr] = 0;
4941      }
4942 }
4943 
4944 /* Forget the reloads marked in regset by previous function.  */
4945 static void
4946 forget_marked_reloads (regset regs)
4947 {
4948   unsigned int reg;
4949   reg_set_iterator rsi;
4950   EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
4951     {
4952       if (reg < FIRST_PSEUDO_REGISTER
4953 	  /* But don't do this if the reg actually serves as an output
4954 	     reload reg in the current instruction.  */
4955 	  && (n_reloads == 0
4956 	      || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
4957 	  {
4958 	    CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
4959 	    spill_reg_store[reg] = 0;
4960 	  }
4961       if (n_reloads == 0
4962 	  || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
4963 	reg_last_reload_reg[reg] = 0;
4964     }
4965 }
4966 
4967 /* The following HARD_REG_SETs indicate when each hard register is
4968    used for a reload of various parts of the current insn.  */
4969 
4970 /* If reg is unavailable for all reloads.  */
4971 static HARD_REG_SET reload_reg_unavailable;
4972 /* If reg is in use as a reload reg for a RELOAD_OTHER reload.  */
4973 static HARD_REG_SET reload_reg_used;
4974 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I.  */
4975 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4976 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I.  */
4977 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4978 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I.  */
4979 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4980 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I.  */
4981 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4982 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I.  */
4983 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4984 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I.  */
4985 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4986 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload.  */
4987 static HARD_REG_SET reload_reg_used_in_op_addr;
4988 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload.  */
4989 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4990 /* If reg is in use for a RELOAD_FOR_INSN reload.  */
4991 static HARD_REG_SET reload_reg_used_in_insn;
4992 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload.  */
4993 static HARD_REG_SET reload_reg_used_in_other_addr;
4994 
4995 /* If reg is in use as a reload reg for any sort of reload.  */
4996 static HARD_REG_SET reload_reg_used_at_all;
4997 
4998 /* If reg is use as an inherited reload.  We just mark the first register
4999    in the group.  */
5000 static HARD_REG_SET reload_reg_used_for_inherit;
5001 
5002 /* Records which hard regs are used in any way, either as explicit use or
5003    by being allocated to a pseudo during any point of the current insn.  */
5004 static HARD_REG_SET reg_used_in_insn;
5005 
5006 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
5007    TYPE. MODE is used to indicate how many consecutive regs are
5008    actually used.  */
5009 
5010 static void
5011 mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
5012 			machine_mode mode)
5013 {
5014   switch (type)
5015     {
5016     case RELOAD_OTHER:
5017       add_to_hard_reg_set (&reload_reg_used, mode, regno);
5018       break;
5019 
5020     case RELOAD_FOR_INPUT_ADDRESS:
5021       add_to_hard_reg_set (&reload_reg_used_in_input_addr[opnum], mode, regno);
5022       break;
5023 
5024     case RELOAD_FOR_INPADDR_ADDRESS:
5025       add_to_hard_reg_set (&reload_reg_used_in_inpaddr_addr[opnum], mode, regno);
5026       break;
5027 
5028     case RELOAD_FOR_OUTPUT_ADDRESS:
5029       add_to_hard_reg_set (&reload_reg_used_in_output_addr[opnum], mode, regno);
5030       break;
5031 
5032     case RELOAD_FOR_OUTADDR_ADDRESS:
5033       add_to_hard_reg_set (&reload_reg_used_in_outaddr_addr[opnum], mode, regno);
5034       break;
5035 
5036     case RELOAD_FOR_OPERAND_ADDRESS:
5037       add_to_hard_reg_set (&reload_reg_used_in_op_addr, mode, regno);
5038       break;
5039 
5040     case RELOAD_FOR_OPADDR_ADDR:
5041       add_to_hard_reg_set (&reload_reg_used_in_op_addr_reload, mode, regno);
5042       break;
5043 
5044     case RELOAD_FOR_OTHER_ADDRESS:
5045       add_to_hard_reg_set (&reload_reg_used_in_other_addr, mode, regno);
5046       break;
5047 
5048     case RELOAD_FOR_INPUT:
5049       add_to_hard_reg_set (&reload_reg_used_in_input[opnum], mode, regno);
5050       break;
5051 
5052     case RELOAD_FOR_OUTPUT:
5053       add_to_hard_reg_set (&reload_reg_used_in_output[opnum], mode, regno);
5054       break;
5055 
5056     case RELOAD_FOR_INSN:
5057       add_to_hard_reg_set (&reload_reg_used_in_insn,  mode, regno);
5058       break;
5059     }
5060 
5061   add_to_hard_reg_set (&reload_reg_used_at_all, mode, regno);
5062 }
5063 
5064 /* Similarly, but show REGNO is no longer in use for a reload.  */
5065 
5066 static void
5067 clear_reload_reg_in_use (unsigned int regno, int opnum,
5068 			 enum reload_type type, machine_mode mode)
5069 {
5070   unsigned int nregs = hard_regno_nregs[regno][mode];
5071   unsigned int start_regno, end_regno, r;
5072   int i;
5073   /* A complication is that for some reload types, inheritance might
5074      allow multiple reloads of the same types to share a reload register.
5075      We set check_opnum if we have to check only reloads with the same
5076      operand number, and check_any if we have to check all reloads.  */
5077   int check_opnum = 0;
5078   int check_any = 0;
5079   HARD_REG_SET *used_in_set;
5080 
5081   switch (type)
5082     {
5083     case RELOAD_OTHER:
5084       used_in_set = &reload_reg_used;
5085       break;
5086 
5087     case RELOAD_FOR_INPUT_ADDRESS:
5088       used_in_set = &reload_reg_used_in_input_addr[opnum];
5089       break;
5090 
5091     case RELOAD_FOR_INPADDR_ADDRESS:
5092       check_opnum = 1;
5093       used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
5094       break;
5095 
5096     case RELOAD_FOR_OUTPUT_ADDRESS:
5097       used_in_set = &reload_reg_used_in_output_addr[opnum];
5098       break;
5099 
5100     case RELOAD_FOR_OUTADDR_ADDRESS:
5101       check_opnum = 1;
5102       used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
5103       break;
5104 
5105     case RELOAD_FOR_OPERAND_ADDRESS:
5106       used_in_set = &reload_reg_used_in_op_addr;
5107       break;
5108 
5109     case RELOAD_FOR_OPADDR_ADDR:
5110       check_any = 1;
5111       used_in_set = &reload_reg_used_in_op_addr_reload;
5112       break;
5113 
5114     case RELOAD_FOR_OTHER_ADDRESS:
5115       used_in_set = &reload_reg_used_in_other_addr;
5116       check_any = 1;
5117       break;
5118 
5119     case RELOAD_FOR_INPUT:
5120       used_in_set = &reload_reg_used_in_input[opnum];
5121       break;
5122 
5123     case RELOAD_FOR_OUTPUT:
5124       used_in_set = &reload_reg_used_in_output[opnum];
5125       break;
5126 
5127     case RELOAD_FOR_INSN:
5128       used_in_set = &reload_reg_used_in_insn;
5129       break;
5130     default:
5131       gcc_unreachable ();
5132     }
5133   /* We resolve conflicts with remaining reloads of the same type by
5134      excluding the intervals of reload registers by them from the
5135      interval of freed reload registers.  Since we only keep track of
5136      one set of interval bounds, we might have to exclude somewhat
5137      more than what would be necessary if we used a HARD_REG_SET here.
5138      But this should only happen very infrequently, so there should
5139      be no reason to worry about it.  */
5140 
5141   start_regno = regno;
5142   end_regno = regno + nregs;
5143   if (check_opnum || check_any)
5144     {
5145       for (i = n_reloads - 1; i >= 0; i--)
5146 	{
5147 	  if (rld[i].when_needed == type
5148 	      && (check_any || rld[i].opnum == opnum)
5149 	      && rld[i].reg_rtx)
5150 	    {
5151 	      unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
5152 	      unsigned int conflict_end
5153 		= end_hard_regno (rld[i].mode, conflict_start);
5154 
5155 	      /* If there is an overlap with the first to-be-freed register,
5156 		 adjust the interval start.  */
5157 	      if (conflict_start <= start_regno && conflict_end > start_regno)
5158 		start_regno = conflict_end;
5159 	      /* Otherwise, if there is a conflict with one of the other
5160 		 to-be-freed registers, adjust the interval end.  */
5161 	      if (conflict_start > start_regno && conflict_start < end_regno)
5162 		end_regno = conflict_start;
5163 	    }
5164 	}
5165     }
5166 
5167   for (r = start_regno; r < end_regno; r++)
5168     CLEAR_HARD_REG_BIT (*used_in_set, r);
5169 }
5170 
5171 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
5172    specified by OPNUM and TYPE.  */
5173 
5174 static int
5175 reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
5176 {
5177   int i;
5178 
5179   /* In use for a RELOAD_OTHER means it's not available for anything.  */
5180   if (TEST_HARD_REG_BIT (reload_reg_used, regno)
5181       || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5182     return 0;
5183 
5184   switch (type)
5185     {
5186     case RELOAD_OTHER:
5187       /* In use for anything means we can't use it for RELOAD_OTHER.  */
5188       if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
5189 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5190 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5191 	  || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5192 	return 0;
5193 
5194       for (i = 0; i < reload_n_operands; i++)
5195 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5196 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5197 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5198 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5199 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5200 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5201 	  return 0;
5202 
5203       return 1;
5204 
5205     case RELOAD_FOR_INPUT:
5206       if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5207 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
5208 	return 0;
5209 
5210       if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5211 	return 0;
5212 
5213       /* If it is used for some other input, can't use it.  */
5214       for (i = 0; i < reload_n_operands; i++)
5215 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5216 	  return 0;
5217 
5218       /* If it is used in a later operand's address, can't use it.  */
5219       for (i = opnum + 1; i < reload_n_operands; i++)
5220 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5221 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5222 	  return 0;
5223 
5224       return 1;
5225 
5226     case RELOAD_FOR_INPUT_ADDRESS:
5227       /* Can't use a register if it is used for an input address for this
5228 	 operand or used as an input in an earlier one.  */
5229       if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
5230 	  || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5231 	return 0;
5232 
5233       for (i = 0; i < opnum; i++)
5234 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5235 	  return 0;
5236 
5237       return 1;
5238 
5239     case RELOAD_FOR_INPADDR_ADDRESS:
5240       /* Can't use a register if it is used for an input address
5241 	 for this operand or used as an input in an earlier
5242 	 one.  */
5243       if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5244 	return 0;
5245 
5246       for (i = 0; i < opnum; i++)
5247 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5248 	  return 0;
5249 
5250       return 1;
5251 
5252     case RELOAD_FOR_OUTPUT_ADDRESS:
5253       /* Can't use a register if it is used for an output address for this
5254 	 operand or used as an output in this or a later operand.  Note
5255 	 that multiple output operands are emitted in reverse order, so
5256 	 the conflicting ones are those with lower indices.  */
5257       if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
5258 	return 0;
5259 
5260       for (i = 0; i <= opnum; i++)
5261 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5262 	  return 0;
5263 
5264       return 1;
5265 
5266     case RELOAD_FOR_OUTADDR_ADDRESS:
5267       /* Can't use a register if it is used for an output address
5268 	 for this operand or used as an output in this or a
5269 	 later operand.  Note that multiple output operands are
5270 	 emitted in reverse order, so the conflicting ones are
5271 	 those with lower indices.  */
5272       if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5273 	return 0;
5274 
5275       for (i = 0; i <= opnum; i++)
5276 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5277 	  return 0;
5278 
5279       return 1;
5280 
5281     case RELOAD_FOR_OPERAND_ADDRESS:
5282       for (i = 0; i < reload_n_operands; i++)
5283 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5284 	  return 0;
5285 
5286       return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5287 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5288 
5289     case RELOAD_FOR_OPADDR_ADDR:
5290       for (i = 0; i < reload_n_operands; i++)
5291 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5292 	  return 0;
5293 
5294       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
5295 
5296     case RELOAD_FOR_OUTPUT:
5297       /* This cannot share a register with RELOAD_FOR_INSN reloads, other
5298 	 outputs, or an operand address for this or an earlier output.
5299 	 Note that multiple output operands are emitted in reverse order,
5300 	 so the conflicting ones are those with higher indices.  */
5301       if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5302 	return 0;
5303 
5304       for (i = 0; i < reload_n_operands; i++)
5305 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5306 	  return 0;
5307 
5308       for (i = opnum; i < reload_n_operands; i++)
5309 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5310 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5311 	  return 0;
5312 
5313       return 1;
5314 
5315     case RELOAD_FOR_INSN:
5316       for (i = 0; i < reload_n_operands; i++)
5317 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5318 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5319 	  return 0;
5320 
5321       return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5322 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5323 
5324     case RELOAD_FOR_OTHER_ADDRESS:
5325       return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
5326 
5327     default:
5328       gcc_unreachable ();
5329     }
5330 }
5331 
5332 /* Return 1 if the value in reload reg REGNO, as used by the reload with
5333    the number RELOADNUM, is still available in REGNO at the end of the insn.
5334 
5335    We can assume that the reload reg was already tested for availability
5336    at the time it is needed, and we should not check this again,
5337    in case the reg has already been marked in use.  */
5338 
5339 static int
5340 reload_reg_reaches_end_p (unsigned int regno, int reloadnum)
5341 {
5342   int opnum = rld[reloadnum].opnum;
5343   enum reload_type type = rld[reloadnum].when_needed;
5344   int i;
5345 
5346   /* See if there is a reload with the same type for this operand, using
5347      the same register. This case is not handled by the code below.  */
5348   for (i = reloadnum + 1; i < n_reloads; i++)
5349     {
5350       rtx reg;
5351       int nregs;
5352 
5353       if (rld[i].opnum != opnum || rld[i].when_needed != type)
5354 	continue;
5355       reg = rld[i].reg_rtx;
5356       if (reg == NULL_RTX)
5357 	continue;
5358       nregs = hard_regno_nregs[REGNO (reg)][GET_MODE (reg)];
5359       if (regno >= REGNO (reg) && regno < REGNO (reg) + nregs)
5360 	return 0;
5361     }
5362 
5363   switch (type)
5364     {
5365     case RELOAD_OTHER:
5366       /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
5367 	 its value must reach the end.  */
5368       return 1;
5369 
5370       /* If this use is for part of the insn,
5371 	 its value reaches if no subsequent part uses the same register.
5372 	 Just like the above function, don't try to do this with lots
5373 	 of fallthroughs.  */
5374 
5375     case RELOAD_FOR_OTHER_ADDRESS:
5376       /* Here we check for everything else, since these don't conflict
5377 	 with anything else and everything comes later.  */
5378 
5379       for (i = 0; i < reload_n_operands; i++)
5380 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5381 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5382 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5383 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5384 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5385 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5386 	  return 0;
5387 
5388       return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5389 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5390 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5391 	      && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5392 
5393     case RELOAD_FOR_INPUT_ADDRESS:
5394     case RELOAD_FOR_INPADDR_ADDRESS:
5395       /* Similar, except that we check only for this and subsequent inputs
5396 	 and the address of only subsequent inputs and we do not need
5397 	 to check for RELOAD_OTHER objects since they are known not to
5398 	 conflict.  */
5399 
5400       for (i = opnum; i < reload_n_operands; i++)
5401 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5402 	  return 0;
5403 
5404       /* Reload register of reload with type RELOAD_FOR_INPADDR_ADDRESS
5405 	 could be killed if the register is also used by reload with type
5406 	 RELOAD_FOR_INPUT_ADDRESS, so check it.  */
5407       if (type == RELOAD_FOR_INPADDR_ADDRESS
5408 	  && TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
5409 	return 0;
5410 
5411       for (i = opnum + 1; i < reload_n_operands; i++)
5412 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5413 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5414 	  return 0;
5415 
5416       for (i = 0; i < reload_n_operands; i++)
5417 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5418 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5419 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5420 	  return 0;
5421 
5422       if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5423 	return 0;
5424 
5425       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5426 	      && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5427 	      && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5428 
5429     case RELOAD_FOR_INPUT:
5430       /* Similar to input address, except we start at the next operand for
5431 	 both input and input address and we do not check for
5432 	 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5433 	 would conflict.  */
5434 
5435       for (i = opnum + 1; i < reload_n_operands; i++)
5436 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5437 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5438 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5439 	  return 0;
5440 
5441       /* ... fall through ...  */
5442 
5443     case RELOAD_FOR_OPERAND_ADDRESS:
5444       /* Check outputs and their addresses.  */
5445 
5446       for (i = 0; i < reload_n_operands; i++)
5447 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5448 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5449 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5450 	  return 0;
5451 
5452       return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
5453 
5454     case RELOAD_FOR_OPADDR_ADDR:
5455       for (i = 0; i < reload_n_operands; i++)
5456 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5457 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5458 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5459 	  return 0;
5460 
5461       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5462 	      && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5463 	      && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5464 
5465     case RELOAD_FOR_INSN:
5466       /* These conflict with other outputs with RELOAD_OTHER.  So
5467 	 we need only check for output addresses.  */
5468 
5469       opnum = reload_n_operands;
5470 
5471       /* fall through */
5472 
5473     case RELOAD_FOR_OUTPUT:
5474     case RELOAD_FOR_OUTPUT_ADDRESS:
5475     case RELOAD_FOR_OUTADDR_ADDRESS:
5476       /* We already know these can't conflict with a later output.  So the
5477 	 only thing to check are later output addresses.
5478 	 Note that multiple output operands are emitted in reverse order,
5479 	 so the conflicting ones are those with lower indices.  */
5480       for (i = 0; i < opnum; i++)
5481 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5482 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5483 	  return 0;
5484 
5485       /* Reload register of reload with type RELOAD_FOR_OUTADDR_ADDRESS
5486 	 could be killed if the register is also used by reload with type
5487 	 RELOAD_FOR_OUTPUT_ADDRESS, so check it.  */
5488       if (type == RELOAD_FOR_OUTADDR_ADDRESS
5489 	  && TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5490 	return 0;
5491 
5492       return 1;
5493 
5494     default:
5495       gcc_unreachable ();
5496     }
5497 }
5498 
5499 /* Like reload_reg_reaches_end_p, but check that the condition holds for
5500    every register in REG.  */
5501 
5502 static bool
5503 reload_reg_rtx_reaches_end_p (rtx reg, int reloadnum)
5504 {
5505   unsigned int i;
5506 
5507   for (i = REGNO (reg); i < END_REGNO (reg); i++)
5508     if (!reload_reg_reaches_end_p (i, reloadnum))
5509       return false;
5510   return true;
5511 }
5512 
5513 
5514 /*  Returns whether R1 and R2 are uniquely chained: the value of one
5515     is used by the other, and that value is not used by any other
5516     reload for this insn.  This is used to partially undo the decision
5517     made in find_reloads when in the case of multiple
5518     RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5519     RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5520     reloads.  This code tries to avoid the conflict created by that
5521     change.  It might be cleaner to explicitly keep track of which
5522     RELOAD_FOR_OPADDR_ADDR reload is associated with which
5523     RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5524     this after the fact. */
5525 static bool
5526 reloads_unique_chain_p (int r1, int r2)
5527 {
5528   int i;
5529 
5530   /* We only check input reloads.  */
5531   if (! rld[r1].in || ! rld[r2].in)
5532     return false;
5533 
5534   /* Avoid anything with output reloads.  */
5535   if (rld[r1].out || rld[r2].out)
5536     return false;
5537 
5538   /* "chained" means one reload is a component of the other reload,
5539      not the same as the other reload.  */
5540   if (rld[r1].opnum != rld[r2].opnum
5541       || rtx_equal_p (rld[r1].in, rld[r2].in)
5542       || rld[r1].optional || rld[r2].optional
5543       || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5544 	    || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5545     return false;
5546 
5547   /* The following loop assumes that r1 is the reload that feeds r2.  */
5548   if (r1 > r2)
5549     std::swap (r1, r2);
5550 
5551   for (i = 0; i < n_reloads; i ++)
5552     /* Look for input reloads that aren't our two */
5553     if (i != r1 && i != r2 && rld[i].in)
5554       {
5555 	/* If our reload is mentioned at all, it isn't a simple chain.  */
5556 	if (reg_mentioned_p (rld[r1].in, rld[i].in))
5557 	  return false;
5558       }
5559   return true;
5560 }
5561 
5562 /* The recursive function change all occurrences of WHAT in *WHERE
5563    to REPL.  */
5564 static void
5565 substitute (rtx *where, const_rtx what, rtx repl)
5566 {
5567   const char *fmt;
5568   int i;
5569   enum rtx_code code;
5570 
5571   if (*where == 0)
5572     return;
5573 
5574   if (*where == what || rtx_equal_p (*where, what))
5575     {
5576       /* Record the location of the changed rtx.  */
5577       substitute_stack.safe_push (where);
5578       *where = repl;
5579       return;
5580     }
5581 
5582   code = GET_CODE (*where);
5583   fmt = GET_RTX_FORMAT (code);
5584   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5585     {
5586       if (fmt[i] == 'E')
5587 	{
5588 	  int j;
5589 
5590 	  for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5591 	    substitute (&XVECEXP (*where, i, j), what, repl);
5592 	}
5593       else if (fmt[i] == 'e')
5594 	substitute (&XEXP (*where, i), what, repl);
5595     }
5596 }
5597 
5598 /* The function returns TRUE if chain of reload R1 and R2 (in any
5599    order) can be evaluated without usage of intermediate register for
5600    the reload containing another reload.  It is important to see
5601    gen_reload to understand what the function is trying to do.  As an
5602    example, let us have reload chain
5603 
5604       r2: const
5605       r1: <something> + const
5606 
5607    and reload R2 got reload reg HR.  The function returns true if
5608    there is a correct insn HR = HR + <something>.  Otherwise,
5609    gen_reload will use intermediate register (and this is the reload
5610    reg for R1) to reload <something>.
5611 
5612    We need this function to find a conflict for chain reloads.  In our
5613    example, if HR = HR + <something> is incorrect insn, then we cannot
5614    use HR as a reload register for R2.  If we do use it then we get a
5615    wrong code:
5616 
5617       HR = const
5618       HR = <something>
5619       HR = HR + HR
5620 
5621 */
5622 static bool
5623 gen_reload_chain_without_interm_reg_p (int r1, int r2)
5624 {
5625   /* Assume other cases in gen_reload are not possible for
5626      chain reloads or do need an intermediate hard registers.  */
5627   bool result = true;
5628   int regno, code;
5629   rtx out, in;
5630   rtx_insn *insn;
5631   rtx_insn *last = get_last_insn ();
5632 
5633   /* Make r2 a component of r1.  */
5634   if (reg_mentioned_p (rld[r1].in, rld[r2].in))
5635     std::swap (r1, r2);
5636 
5637   gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in));
5638   regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno;
5639   gcc_assert (regno >= 0);
5640   out = gen_rtx_REG (rld[r1].mode, regno);
5641   in = rld[r1].in;
5642   substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno));
5643 
5644   /* If IN is a paradoxical SUBREG, remove it and try to put the
5645      opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
5646   strip_paradoxical_subreg (&in, &out);
5647 
5648   if (GET_CODE (in) == PLUS
5649       && (REG_P (XEXP (in, 0))
5650 	  || GET_CODE (XEXP (in, 0)) == SUBREG
5651 	  || MEM_P (XEXP (in, 0)))
5652       && (REG_P (XEXP (in, 1))
5653 	  || GET_CODE (XEXP (in, 1)) == SUBREG
5654 	  || CONSTANT_P (XEXP (in, 1))
5655 	  || MEM_P (XEXP (in, 1))))
5656     {
5657       insn = emit_insn (gen_rtx_SET (out, in));
5658       code = recog_memoized (insn);
5659       result = false;
5660 
5661       if (code >= 0)
5662 	{
5663 	  extract_insn (insn);
5664 	  /* We want constrain operands to treat this insn strictly in
5665 	     its validity determination, i.e., the way it would after
5666 	     reload has completed.  */
5667 	  result = constrain_operands (1, get_enabled_alternatives (insn));
5668 	}
5669 
5670       delete_insns_since (last);
5671     }
5672 
5673   /* Restore the original value at each changed address within R1.  */
5674   while (!substitute_stack.is_empty ())
5675     {
5676       rtx *where = substitute_stack.pop ();
5677       *where = rld[r2].in;
5678     }
5679 
5680   return result;
5681 }
5682 
5683 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5684    Return 0 otherwise.
5685 
5686    This function uses the same algorithm as reload_reg_free_p above.  */
5687 
5688 static int
5689 reloads_conflict (int r1, int r2)
5690 {
5691   enum reload_type r1_type = rld[r1].when_needed;
5692   enum reload_type r2_type = rld[r2].when_needed;
5693   int r1_opnum = rld[r1].opnum;
5694   int r2_opnum = rld[r2].opnum;
5695 
5696   /* RELOAD_OTHER conflicts with everything.  */
5697   if (r2_type == RELOAD_OTHER)
5698     return 1;
5699 
5700   /* Otherwise, check conflicts differently for each type.  */
5701 
5702   switch (r1_type)
5703     {
5704     case RELOAD_FOR_INPUT:
5705       return (r2_type == RELOAD_FOR_INSN
5706 	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5707 	      || r2_type == RELOAD_FOR_OPADDR_ADDR
5708 	      || r2_type == RELOAD_FOR_INPUT
5709 	      || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5710 		   || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5711 		  && r2_opnum > r1_opnum));
5712 
5713     case RELOAD_FOR_INPUT_ADDRESS:
5714       return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5715 	      || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5716 
5717     case RELOAD_FOR_INPADDR_ADDRESS:
5718       return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5719 	      || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5720 
5721     case RELOAD_FOR_OUTPUT_ADDRESS:
5722       return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5723 	      || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5724 
5725     case RELOAD_FOR_OUTADDR_ADDRESS:
5726       return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5727 	      || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5728 
5729     case RELOAD_FOR_OPERAND_ADDRESS:
5730       return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5731 	      || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5732 		  && (!reloads_unique_chain_p (r1, r2)
5733 		      || !gen_reload_chain_without_interm_reg_p (r1, r2))));
5734 
5735     case RELOAD_FOR_OPADDR_ADDR:
5736       return (r2_type == RELOAD_FOR_INPUT
5737 	      || r2_type == RELOAD_FOR_OPADDR_ADDR);
5738 
5739     case RELOAD_FOR_OUTPUT:
5740       return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5741 	      || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5742 		   || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5743 		  && r2_opnum >= r1_opnum));
5744 
5745     case RELOAD_FOR_INSN:
5746       return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5747 	      || r2_type == RELOAD_FOR_INSN
5748 	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5749 
5750     case RELOAD_FOR_OTHER_ADDRESS:
5751       return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5752 
5753     case RELOAD_OTHER:
5754       return 1;
5755 
5756     default:
5757       gcc_unreachable ();
5758     }
5759 }
5760 
5761 /* Indexed by reload number, 1 if incoming value
5762    inherited from previous insns.  */
5763 static char reload_inherited[MAX_RELOADS];
5764 
5765 /* For an inherited reload, this is the insn the reload was inherited from,
5766    if we know it.  Otherwise, this is 0.  */
5767 static rtx_insn *reload_inheritance_insn[MAX_RELOADS];
5768 
5769 /* If nonzero, this is a place to get the value of the reload,
5770    rather than using reload_in.  */
5771 static rtx reload_override_in[MAX_RELOADS];
5772 
5773 /* For each reload, the hard register number of the register used,
5774    or -1 if we did not need a register for this reload.  */
5775 static int reload_spill_index[MAX_RELOADS];
5776 
5777 /* Index X is the value of rld[X].reg_rtx, adjusted for the input mode.  */
5778 static rtx reload_reg_rtx_for_input[MAX_RELOADS];
5779 
5780 /* Index X is the value of rld[X].reg_rtx, adjusted for the output mode.  */
5781 static rtx reload_reg_rtx_for_output[MAX_RELOADS];
5782 
5783 /* Subroutine of free_for_value_p, used to check a single register.
5784    START_REGNO is the starting regno of the full reload register
5785    (possibly comprising multiple hard registers) that we are considering.  */
5786 
5787 static int
5788 reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5789 			     enum reload_type type, rtx value, rtx out,
5790 			     int reloadnum, int ignore_address_reloads)
5791 {
5792   int time1;
5793   /* Set if we see an input reload that must not share its reload register
5794      with any new earlyclobber, but might otherwise share the reload
5795      register with an output or input-output reload.  */
5796   int check_earlyclobber = 0;
5797   int i;
5798   int copy = 0;
5799 
5800   if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5801     return 0;
5802 
5803   if (out == const0_rtx)
5804     {
5805       copy = 1;
5806       out = NULL_RTX;
5807     }
5808 
5809   /* We use some pseudo 'time' value to check if the lifetimes of the
5810      new register use would overlap with the one of a previous reload
5811      that is not read-only or uses a different value.
5812      The 'time' used doesn't have to be linear in any shape or form, just
5813      monotonic.
5814      Some reload types use different 'buckets' for each operand.
5815      So there are MAX_RECOG_OPERANDS different time values for each
5816      such reload type.
5817      We compute TIME1 as the time when the register for the prospective
5818      new reload ceases to be live, and TIME2 for each existing
5819      reload as the time when that the reload register of that reload
5820      becomes live.
5821      Where there is little to be gained by exact lifetime calculations,
5822      we just make conservative assumptions, i.e. a longer lifetime;
5823      this is done in the 'default:' cases.  */
5824   switch (type)
5825     {
5826     case RELOAD_FOR_OTHER_ADDRESS:
5827       /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads.  */
5828       time1 = copy ? 0 : 1;
5829       break;
5830     case RELOAD_OTHER:
5831       time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5832       break;
5833       /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5834 	 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT.  By adding 0 / 1 / 2 ,
5835 	 respectively, to the time values for these, we get distinct time
5836 	 values.  To get distinct time values for each operand, we have to
5837 	 multiply opnum by at least three.  We round that up to four because
5838 	 multiply by four is often cheaper.  */
5839     case RELOAD_FOR_INPADDR_ADDRESS:
5840       time1 = opnum * 4 + 2;
5841       break;
5842     case RELOAD_FOR_INPUT_ADDRESS:
5843       time1 = opnum * 4 + 3;
5844       break;
5845     case RELOAD_FOR_INPUT:
5846       /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5847 	 executes (inclusive).  */
5848       time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5849       break;
5850     case RELOAD_FOR_OPADDR_ADDR:
5851       /* opnum * 4 + 4
5852 	 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5853       time1 = MAX_RECOG_OPERANDS * 4 + 1;
5854       break;
5855     case RELOAD_FOR_OPERAND_ADDRESS:
5856       /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5857 	 is executed.  */
5858       time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5859       break;
5860     case RELOAD_FOR_OUTADDR_ADDRESS:
5861       time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5862       break;
5863     case RELOAD_FOR_OUTPUT_ADDRESS:
5864       time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5865       break;
5866     default:
5867       time1 = MAX_RECOG_OPERANDS * 5 + 5;
5868     }
5869 
5870   for (i = 0; i < n_reloads; i++)
5871     {
5872       rtx reg = rld[i].reg_rtx;
5873       if (reg && REG_P (reg)
5874 	  && ((unsigned) regno - true_regnum (reg)
5875 	      <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
5876 	  && i != reloadnum)
5877 	{
5878 	  rtx other_input = rld[i].in;
5879 
5880 	  /* If the other reload loads the same input value, that
5881 	     will not cause a conflict only if it's loading it into
5882 	     the same register.  */
5883 	  if (true_regnum (reg) != start_regno)
5884 	    other_input = NULL_RTX;
5885 	  if (! other_input || ! rtx_equal_p (other_input, value)
5886 	      || rld[i].out || out)
5887 	    {
5888 	      int time2;
5889 	      switch (rld[i].when_needed)
5890 		{
5891 		case RELOAD_FOR_OTHER_ADDRESS:
5892 		  time2 = 0;
5893 		  break;
5894 		case RELOAD_FOR_INPADDR_ADDRESS:
5895 		  /* find_reloads makes sure that a
5896 		     RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5897 		     by at most one - the first -
5898 		     RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS .  If the
5899 		     address reload is inherited, the address address reload
5900 		     goes away, so we can ignore this conflict.  */
5901 		  if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5902 		      && ignore_address_reloads
5903 		      /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5904 			 Then the address address is still needed to store
5905 			 back the new address.  */
5906 		      && ! rld[reloadnum].out)
5907 		    continue;
5908 		  /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5909 		     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5910 		     reloads go away.  */
5911 		  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5912 		      && ignore_address_reloads
5913 		      /* Unless we are reloading an auto_inc expression.  */
5914 		      && ! rld[reloadnum].out)
5915 		    continue;
5916 		  time2 = rld[i].opnum * 4 + 2;
5917 		  break;
5918 		case RELOAD_FOR_INPUT_ADDRESS:
5919 		  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5920 		      && ignore_address_reloads
5921 		      && ! rld[reloadnum].out)
5922 		    continue;
5923 		  time2 = rld[i].opnum * 4 + 3;
5924 		  break;
5925 		case RELOAD_FOR_INPUT:
5926 		  time2 = rld[i].opnum * 4 + 4;
5927 		  check_earlyclobber = 1;
5928 		  break;
5929 		  /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5930 		     == MAX_RECOG_OPERAND * 4  */
5931 		case RELOAD_FOR_OPADDR_ADDR:
5932 		  if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5933 		      && ignore_address_reloads
5934 		      && ! rld[reloadnum].out)
5935 		    continue;
5936 		  time2 = MAX_RECOG_OPERANDS * 4 + 1;
5937 		  break;
5938 		case RELOAD_FOR_OPERAND_ADDRESS:
5939 		  time2 = MAX_RECOG_OPERANDS * 4 + 2;
5940 		  check_earlyclobber = 1;
5941 		  break;
5942 		case RELOAD_FOR_INSN:
5943 		  time2 = MAX_RECOG_OPERANDS * 4 + 3;
5944 		  break;
5945 		case RELOAD_FOR_OUTPUT:
5946 		  /* All RELOAD_FOR_OUTPUT reloads become live just after the
5947 		     instruction is executed.  */
5948 		  time2 = MAX_RECOG_OPERANDS * 4 + 4;
5949 		  break;
5950 		  /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5951 		     the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5952 		     value.  */
5953 		case RELOAD_FOR_OUTADDR_ADDRESS:
5954 		  if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5955 		      && ignore_address_reloads
5956 		      && ! rld[reloadnum].out)
5957 		    continue;
5958 		  time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5959 		  break;
5960 		case RELOAD_FOR_OUTPUT_ADDRESS:
5961 		  time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5962 		  break;
5963 		case RELOAD_OTHER:
5964 		  /* If there is no conflict in the input part, handle this
5965 		     like an output reload.  */
5966 		  if (! rld[i].in || rtx_equal_p (other_input, value))
5967 		    {
5968 		      time2 = MAX_RECOG_OPERANDS * 4 + 4;
5969 		      /* Earlyclobbered outputs must conflict with inputs.  */
5970 		      if (earlyclobber_operand_p (rld[i].out))
5971 			time2 = MAX_RECOG_OPERANDS * 4 + 3;
5972 
5973 		      break;
5974 		    }
5975 		  time2 = 1;
5976 		  /* RELOAD_OTHER might be live beyond instruction execution,
5977 		     but this is not obvious when we set time2 = 1.  So check
5978 		     here if there might be a problem with the new reload
5979 		     clobbering the register used by the RELOAD_OTHER.  */
5980 		  if (out)
5981 		    return 0;
5982 		  break;
5983 		default:
5984 		  return 0;
5985 		}
5986 	      if ((time1 >= time2
5987 		   && (! rld[i].in || rld[i].out
5988 		       || ! rtx_equal_p (other_input, value)))
5989 		  || (out && rld[reloadnum].out_reg
5990 		      && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
5991 		return 0;
5992 	    }
5993 	}
5994     }
5995 
5996   /* Earlyclobbered outputs must conflict with inputs.  */
5997   if (check_earlyclobber && out && earlyclobber_operand_p (out))
5998     return 0;
5999 
6000   return 1;
6001 }
6002 
6003 /* Return 1 if the value in reload reg REGNO, as used by a reload
6004    needed for the part of the insn specified by OPNUM and TYPE,
6005    may be used to load VALUE into it.
6006 
6007    MODE is the mode in which the register is used, this is needed to
6008    determine how many hard regs to test.
6009 
6010    Other read-only reloads with the same value do not conflict
6011    unless OUT is nonzero and these other reloads have to live while
6012    output reloads live.
6013    If OUT is CONST0_RTX, this is a special case: it means that the
6014    test should not be for using register REGNO as reload register, but
6015    for copying from register REGNO into the reload register.
6016 
6017    RELOADNUM is the number of the reload we want to load this value for;
6018    a reload does not conflict with itself.
6019 
6020    When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
6021    reloads that load an address for the very reload we are considering.
6022 
6023    The caller has to make sure that there is no conflict with the return
6024    register.  */
6025 
6026 static int
6027 free_for_value_p (int regno, machine_mode mode, int opnum,
6028 		  enum reload_type type, rtx value, rtx out, int reloadnum,
6029 		  int ignore_address_reloads)
6030 {
6031   int nregs = hard_regno_nregs[regno][mode];
6032   while (nregs-- > 0)
6033     if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
6034 				       value, out, reloadnum,
6035 				       ignore_address_reloads))
6036       return 0;
6037   return 1;
6038 }
6039 
6040 /* Return nonzero if the rtx X is invariant over the current function.  */
6041 /* ??? Actually, the places where we use this expect exactly what is
6042    tested here, and not everything that is function invariant.  In
6043    particular, the frame pointer and arg pointer are special cased;
6044    pic_offset_table_rtx is not, and we must not spill these things to
6045    memory.  */
6046 
6047 int
6048 function_invariant_p (const_rtx x)
6049 {
6050   if (CONSTANT_P (x))
6051     return 1;
6052   if (x == frame_pointer_rtx || x == arg_pointer_rtx)
6053     return 1;
6054   if (GET_CODE (x) == PLUS
6055       && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
6056       && GET_CODE (XEXP (x, 1)) == CONST_INT)
6057     return 1;
6058   return 0;
6059 }
6060 
6061 /* Determine whether the reload reg X overlaps any rtx'es used for
6062    overriding inheritance.  Return nonzero if so.  */
6063 
6064 static int
6065 conflicts_with_override (rtx x)
6066 {
6067   int i;
6068   for (i = 0; i < n_reloads; i++)
6069     if (reload_override_in[i]
6070 	&& reg_overlap_mentioned_p (x, reload_override_in[i]))
6071       return 1;
6072   return 0;
6073 }
6074 
6075 /* Give an error message saying we failed to find a reload for INSN,
6076    and clear out reload R.  */
6077 static void
6078 failed_reload (rtx_insn *insn, int r)
6079 {
6080   if (asm_noperands (PATTERN (insn)) < 0)
6081     /* It's the compiler's fault.  */
6082     fatal_insn ("could not find a spill register", insn);
6083 
6084   /* It's the user's fault; the operand's mode and constraint
6085      don't match.  Disable this reload so we don't crash in final.  */
6086   error_for_asm (insn,
6087 		 "%<asm%> operand constraint incompatible with operand size");
6088   rld[r].in = 0;
6089   rld[r].out = 0;
6090   rld[r].reg_rtx = 0;
6091   rld[r].optional = 1;
6092   rld[r].secondary_p = 1;
6093 }
6094 
6095 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
6096    for reload R.  If it's valid, get an rtx for it.  Return nonzero if
6097    successful.  */
6098 static int
6099 set_reload_reg (int i, int r)
6100 {
6101   /* regno is 'set but not used' if HARD_REGNO_MODE_OK doesn't use its first
6102      parameter.  */
6103   int regno ATTRIBUTE_UNUSED;
6104   rtx reg = spill_reg_rtx[i];
6105 
6106   if (reg == 0 || GET_MODE (reg) != rld[r].mode)
6107     spill_reg_rtx[i] = reg
6108       = gen_rtx_REG (rld[r].mode, spill_regs[i]);
6109 
6110   regno = true_regnum (reg);
6111 
6112   /* Detect when the reload reg can't hold the reload mode.
6113      This used to be one `if', but Sequent compiler can't handle that.  */
6114   if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
6115     {
6116       machine_mode test_mode = VOIDmode;
6117       if (rld[r].in)
6118 	test_mode = GET_MODE (rld[r].in);
6119       /* If rld[r].in has VOIDmode, it means we will load it
6120 	 in whatever mode the reload reg has: to wit, rld[r].mode.
6121 	 We have already tested that for validity.  */
6122       /* Aside from that, we need to test that the expressions
6123 	 to reload from or into have modes which are valid for this
6124 	 reload register.  Otherwise the reload insns would be invalid.  */
6125       if (! (rld[r].in != 0 && test_mode != VOIDmode
6126 	     && ! HARD_REGNO_MODE_OK (regno, test_mode)))
6127 	if (! (rld[r].out != 0
6128 	       && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
6129 	  {
6130 	    /* The reg is OK.  */
6131 	    last_spill_reg = i;
6132 
6133 	    /* Mark as in use for this insn the reload regs we use
6134 	       for this.  */
6135 	    mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
6136 				    rld[r].when_needed, rld[r].mode);
6137 
6138 	    rld[r].reg_rtx = reg;
6139 	    reload_spill_index[r] = spill_regs[i];
6140 	    return 1;
6141 	  }
6142     }
6143   return 0;
6144 }
6145 
6146 /* Find a spill register to use as a reload register for reload R.
6147    LAST_RELOAD is nonzero if this is the last reload for the insn being
6148    processed.
6149 
6150    Set rld[R].reg_rtx to the register allocated.
6151 
6152    We return 1 if successful, or 0 if we couldn't find a spill reg and
6153    we didn't change anything.  */
6154 
6155 static int
6156 allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
6157 		     int last_reload)
6158 {
6159   int i, pass, count;
6160 
6161   /* If we put this reload ahead, thinking it is a group,
6162      then insist on finding a group.  Otherwise we can grab a
6163      reg that some other reload needs.
6164      (That can happen when we have a 68000 DATA_OR_FP_REG
6165      which is a group of data regs or one fp reg.)
6166      We need not be so restrictive if there are no more reloads
6167      for this insn.
6168 
6169      ??? Really it would be nicer to have smarter handling
6170      for that kind of reg class, where a problem like this is normal.
6171      Perhaps those classes should be avoided for reloading
6172      by use of more alternatives.  */
6173 
6174   int force_group = rld[r].nregs > 1 && ! last_reload;
6175 
6176   /* If we want a single register and haven't yet found one,
6177      take any reg in the right class and not in use.
6178      If we want a consecutive group, here is where we look for it.
6179 
6180      We use three passes so we can first look for reload regs to
6181      reuse, which are already in use for other reloads in this insn,
6182      and only then use additional registers which are not "bad", then
6183      finally any register.
6184 
6185      I think that maximizing reuse is needed to make sure we don't
6186      run out of reload regs.  Suppose we have three reloads, and
6187      reloads A and B can share regs.  These need two regs.
6188      Suppose A and B are given different regs.
6189      That leaves none for C.  */
6190   for (pass = 0; pass < 3; pass++)
6191     {
6192       /* I is the index in spill_regs.
6193 	 We advance it round-robin between insns to use all spill regs
6194 	 equally, so that inherited reloads have a chance
6195 	 of leapfrogging each other.  */
6196 
6197       i = last_spill_reg;
6198 
6199       for (count = 0; count < n_spills; count++)
6200 	{
6201 	  int rclass = (int) rld[r].rclass;
6202 	  int regnum;
6203 
6204 	  i++;
6205 	  if (i >= n_spills)
6206 	    i -= n_spills;
6207 	  regnum = spill_regs[i];
6208 
6209 	  if ((reload_reg_free_p (regnum, rld[r].opnum,
6210 				  rld[r].when_needed)
6211 	       || (rld[r].in
6212 		   /* We check reload_reg_used to make sure we
6213 		      don't clobber the return register.  */
6214 		   && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
6215 		   && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
6216 					rld[r].when_needed, rld[r].in,
6217 					rld[r].out, r, 1)))
6218 	      && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
6219 	      && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
6220 	      /* Look first for regs to share, then for unshared.  But
6221 		 don't share regs used for inherited reloads; they are
6222 		 the ones we want to preserve.  */
6223 	      && (pass
6224 		  || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
6225 					 regnum)
6226 		      && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
6227 					      regnum))))
6228 	    {
6229 	      int nr = hard_regno_nregs[regnum][rld[r].mode];
6230 
6231 	      /* During the second pass we want to avoid reload registers
6232 		 which are "bad" for this reload.  */
6233 	      if (pass == 1
6234 		  && ira_bad_reload_regno (regnum, rld[r].in, rld[r].out))
6235 		continue;
6236 
6237 	      /* Avoid the problem where spilling a GENERAL_OR_FP_REG
6238 		 (on 68000) got us two FP regs.  If NR is 1,
6239 		 we would reject both of them.  */
6240 	      if (force_group)
6241 		nr = rld[r].nregs;
6242 	      /* If we need only one reg, we have already won.  */
6243 	      if (nr == 1)
6244 		{
6245 		  /* But reject a single reg if we demand a group.  */
6246 		  if (force_group)
6247 		    continue;
6248 		  break;
6249 		}
6250 	      /* Otherwise check that as many consecutive regs as we need
6251 		 are available here.  */
6252 	      while (nr > 1)
6253 		{
6254 		  int regno = regnum + nr - 1;
6255 		  if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
6256 			&& spill_reg_order[regno] >= 0
6257 			&& reload_reg_free_p (regno, rld[r].opnum,
6258 					      rld[r].when_needed)))
6259 		    break;
6260 		  nr--;
6261 		}
6262 	      if (nr == 1)
6263 		break;
6264 	    }
6265 	}
6266 
6267       /* If we found something on the current pass, omit later passes.  */
6268       if (count < n_spills)
6269 	break;
6270     }
6271 
6272   /* We should have found a spill register by now.  */
6273   if (count >= n_spills)
6274     return 0;
6275 
6276   /* I is the index in SPILL_REG_RTX of the reload register we are to
6277      allocate.  Get an rtx for it and find its register number.  */
6278 
6279   return set_reload_reg (i, r);
6280 }
6281 
6282 /* Initialize all the tables needed to allocate reload registers.
6283    CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
6284    is the array we use to restore the reg_rtx field for every reload.  */
6285 
6286 static void
6287 choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
6288 {
6289   int i;
6290 
6291   for (i = 0; i < n_reloads; i++)
6292     rld[i].reg_rtx = save_reload_reg_rtx[i];
6293 
6294   memset (reload_inherited, 0, MAX_RELOADS);
6295   memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
6296   memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
6297 
6298   CLEAR_HARD_REG_SET (reload_reg_used);
6299   CLEAR_HARD_REG_SET (reload_reg_used_at_all);
6300   CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
6301   CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
6302   CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
6303   CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
6304 
6305   CLEAR_HARD_REG_SET (reg_used_in_insn);
6306   {
6307     HARD_REG_SET tmp;
6308     REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
6309     IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6310     REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
6311     IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6312     compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
6313     compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
6314   }
6315 
6316   for (i = 0; i < reload_n_operands; i++)
6317     {
6318       CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
6319       CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
6320       CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
6321       CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
6322       CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
6323       CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
6324     }
6325 
6326   COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
6327 
6328   CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
6329 
6330   for (i = 0; i < n_reloads; i++)
6331     /* If we have already decided to use a certain register,
6332        don't use it in another way.  */
6333     if (rld[i].reg_rtx)
6334       mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
6335 			      rld[i].when_needed, rld[i].mode);
6336 }
6337 
6338 #ifdef SECONDARY_MEMORY_NEEDED
6339 /* If X is not a subreg, return it unmodified.  If it is a subreg,
6340    look up whether we made a replacement for the SUBREG_REG.  Return
6341    either the replacement or the SUBREG_REG.  */
6342 
6343 static rtx
6344 replaced_subreg (rtx x)
6345 {
6346   if (GET_CODE (x) == SUBREG)
6347     return find_replacement (&SUBREG_REG (x));
6348   return x;
6349 }
6350 #endif
6351 
6352 /* Compute the offset to pass to subreg_regno_offset, for a pseudo of
6353    mode OUTERMODE that is available in a hard reg of mode INNERMODE.
6354    SUBREG is non-NULL if the pseudo is a subreg whose reg is a pseudo,
6355    otherwise it is NULL.  */
6356 
6357 static int
6358 compute_reload_subreg_offset (machine_mode outermode,
6359 			      rtx subreg,
6360 			      machine_mode innermode)
6361 {
6362   int outer_offset;
6363   machine_mode middlemode;
6364 
6365   if (!subreg)
6366     return subreg_lowpart_offset (outermode, innermode);
6367 
6368   outer_offset = SUBREG_BYTE (subreg);
6369   middlemode = GET_MODE (SUBREG_REG (subreg));
6370 
6371   /* If SUBREG is paradoxical then return the normal lowpart offset
6372      for OUTERMODE and INNERMODE.  Our caller has already checked
6373      that OUTERMODE fits in INNERMODE.  */
6374   if (outer_offset == 0
6375       && GET_MODE_SIZE (outermode) > GET_MODE_SIZE (middlemode))
6376     return subreg_lowpart_offset (outermode, innermode);
6377 
6378   /* SUBREG is normal, but may not be lowpart; return OUTER_OFFSET
6379      plus the normal lowpart offset for MIDDLEMODE and INNERMODE.  */
6380   return outer_offset + subreg_lowpart_offset (middlemode, innermode);
6381 }
6382 
6383 /* Assign hard reg targets for the pseudo-registers we must reload
6384    into hard regs for this insn.
6385    Also output the instructions to copy them in and out of the hard regs.
6386 
6387    For machines with register classes, we are responsible for
6388    finding a reload reg in the proper class.  */
6389 
6390 static void
6391 choose_reload_regs (struct insn_chain *chain)
6392 {
6393   rtx_insn *insn = chain->insn;
6394   int i, j;
6395   unsigned int max_group_size = 1;
6396   enum reg_class group_class = NO_REGS;
6397   int pass, win, inheritance;
6398 
6399   rtx save_reload_reg_rtx[MAX_RELOADS];
6400 
6401   /* In order to be certain of getting the registers we need,
6402      we must sort the reloads into order of increasing register class.
6403      Then our grabbing of reload registers will parallel the process
6404      that provided the reload registers.
6405 
6406      Also note whether any of the reloads wants a consecutive group of regs.
6407      If so, record the maximum size of the group desired and what
6408      register class contains all the groups needed by this insn.  */
6409 
6410   for (j = 0; j < n_reloads; j++)
6411     {
6412       reload_order[j] = j;
6413       if (rld[j].reg_rtx != NULL_RTX)
6414 	{
6415 	  gcc_assert (REG_P (rld[j].reg_rtx)
6416 		      && HARD_REGISTER_P (rld[j].reg_rtx));
6417 	  reload_spill_index[j] = REGNO (rld[j].reg_rtx);
6418 	}
6419       else
6420 	reload_spill_index[j] = -1;
6421 
6422       if (rld[j].nregs > 1)
6423 	{
6424 	  max_group_size = MAX (rld[j].nregs, max_group_size);
6425 	  group_class
6426 	    = reg_class_superunion[(int) rld[j].rclass][(int) group_class];
6427 	}
6428 
6429       save_reload_reg_rtx[j] = rld[j].reg_rtx;
6430     }
6431 
6432   if (n_reloads > 1)
6433     qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
6434 
6435   /* If -O, try first with inheritance, then turning it off.
6436      If not -O, don't do inheritance.
6437      Using inheritance when not optimizing leads to paradoxes
6438      with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
6439      because one side of the comparison might be inherited.  */
6440   win = 0;
6441   for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
6442     {
6443       choose_reload_regs_init (chain, save_reload_reg_rtx);
6444 
6445       /* Process the reloads in order of preference just found.
6446 	 Beyond this point, subregs can be found in reload_reg_rtx.
6447 
6448 	 This used to look for an existing reloaded home for all of the
6449 	 reloads, and only then perform any new reloads.  But that could lose
6450 	 if the reloads were done out of reg-class order because a later
6451 	 reload with a looser constraint might have an old home in a register
6452 	 needed by an earlier reload with a tighter constraint.
6453 
6454 	 To solve this, we make two passes over the reloads, in the order
6455 	 described above.  In the first pass we try to inherit a reload
6456 	 from a previous insn.  If there is a later reload that needs a
6457 	 class that is a proper subset of the class being processed, we must
6458 	 also allocate a spill register during the first pass.
6459 
6460 	 Then make a second pass over the reloads to allocate any reloads
6461 	 that haven't been given registers yet.  */
6462 
6463       for (j = 0; j < n_reloads; j++)
6464 	{
6465 	  int r = reload_order[j];
6466 	  rtx search_equiv = NULL_RTX;
6467 
6468 	  /* Ignore reloads that got marked inoperative.  */
6469 	  if (rld[r].out == 0 && rld[r].in == 0
6470 	      && ! rld[r].secondary_p)
6471 	    continue;
6472 
6473 	  /* If find_reloads chose to use reload_in or reload_out as a reload
6474 	     register, we don't need to chose one.  Otherwise, try even if it
6475 	     found one since we might save an insn if we find the value lying
6476 	     around.
6477 	     Try also when reload_in is a pseudo without a hard reg.  */
6478 	  if (rld[r].in != 0 && rld[r].reg_rtx != 0
6479 	      && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
6480 		  || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
6481 		      && !MEM_P (rld[r].in)
6482 		      && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
6483 	    continue;
6484 
6485 #if 0 /* No longer needed for correct operation.
6486 	 It might give better code, or might not; worth an experiment?  */
6487 	  /* If this is an optional reload, we can't inherit from earlier insns
6488 	     until we are sure that any non-optional reloads have been allocated.
6489 	     The following code takes advantage of the fact that optional reloads
6490 	     are at the end of reload_order.  */
6491 	  if (rld[r].optional != 0)
6492 	    for (i = 0; i < j; i++)
6493 	      if ((rld[reload_order[i]].out != 0
6494 		   || rld[reload_order[i]].in != 0
6495 		   || rld[reload_order[i]].secondary_p)
6496 		  && ! rld[reload_order[i]].optional
6497 		  && rld[reload_order[i]].reg_rtx == 0)
6498 		allocate_reload_reg (chain, reload_order[i], 0);
6499 #endif
6500 
6501 	  /* First see if this pseudo is already available as reloaded
6502 	     for a previous insn.  We cannot try to inherit for reloads
6503 	     that are smaller than the maximum number of registers needed
6504 	     for groups unless the register we would allocate cannot be used
6505 	     for the groups.
6506 
6507 	     We could check here to see if this is a secondary reload for
6508 	     an object that is already in a register of the desired class.
6509 	     This would avoid the need for the secondary reload register.
6510 	     But this is complex because we can't easily determine what
6511 	     objects might want to be loaded via this reload.  So let a
6512 	     register be allocated here.  In `emit_reload_insns' we suppress
6513 	     one of the loads in the case described above.  */
6514 
6515 	  if (inheritance)
6516 	    {
6517 	      int byte = 0;
6518 	      int regno = -1;
6519 	      machine_mode mode = VOIDmode;
6520 	      rtx subreg = NULL_RTX;
6521 
6522 	      if (rld[r].in == 0)
6523 		;
6524 	      else if (REG_P (rld[r].in))
6525 		{
6526 		  regno = REGNO (rld[r].in);
6527 		  mode = GET_MODE (rld[r].in);
6528 		}
6529 	      else if (REG_P (rld[r].in_reg))
6530 		{
6531 		  regno = REGNO (rld[r].in_reg);
6532 		  mode = GET_MODE (rld[r].in_reg);
6533 		}
6534 	      else if (GET_CODE (rld[r].in_reg) == SUBREG
6535 		       && REG_P (SUBREG_REG (rld[r].in_reg)))
6536 		{
6537 		  regno = REGNO (SUBREG_REG (rld[r].in_reg));
6538 		  if (regno < FIRST_PSEUDO_REGISTER)
6539 		    regno = subreg_regno (rld[r].in_reg);
6540 		  else
6541 		    {
6542 		      subreg = rld[r].in_reg;
6543 		      byte = SUBREG_BYTE (subreg);
6544 		    }
6545 		  mode = GET_MODE (rld[r].in_reg);
6546 		}
6547 #if AUTO_INC_DEC
6548 	      else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
6549 		       && REG_P (XEXP (rld[r].in_reg, 0)))
6550 		{
6551 		  regno = REGNO (XEXP (rld[r].in_reg, 0));
6552 		  mode = GET_MODE (XEXP (rld[r].in_reg, 0));
6553 		  rld[r].out = rld[r].in;
6554 		}
6555 #endif
6556 #if 0
6557 	      /* This won't work, since REGNO can be a pseudo reg number.
6558 		 Also, it takes much more hair to keep track of all the things
6559 		 that can invalidate an inherited reload of part of a pseudoreg.  */
6560 	      else if (GET_CODE (rld[r].in) == SUBREG
6561 		       && REG_P (SUBREG_REG (rld[r].in)))
6562 		regno = subreg_regno (rld[r].in);
6563 #endif
6564 
6565 	      if (regno >= 0
6566 		  && reg_last_reload_reg[regno] != 0
6567 		  && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
6568 		      >= GET_MODE_SIZE (mode) + byte)
6569 #ifdef CANNOT_CHANGE_MODE_CLASS
6570 		  /* Verify that the register it's in can be used in
6571 		     mode MODE.  */
6572 		  && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg[regno]),
6573 						GET_MODE (reg_last_reload_reg[regno]),
6574 						mode)
6575 #endif
6576 		  )
6577 		{
6578 		  enum reg_class rclass = rld[r].rclass, last_class;
6579 		  rtx last_reg = reg_last_reload_reg[regno];
6580 
6581 		  i = REGNO (last_reg);
6582 		  byte = compute_reload_subreg_offset (mode,
6583 						       subreg,
6584 						       GET_MODE (last_reg));
6585 		  i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
6586 		  last_class = REGNO_REG_CLASS (i);
6587 
6588 		  if (reg_reloaded_contents[i] == regno
6589 		      && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
6590 		      && HARD_REGNO_MODE_OK (i, rld[r].mode)
6591 		      && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
6592 			  /* Even if we can't use this register as a reload
6593 			     register, we might use it for reload_override_in,
6594 			     if copying it to the desired class is cheap
6595 			     enough.  */
6596 			  || ((register_move_cost (mode, last_class, rclass)
6597 			       < memory_move_cost (mode, rclass, true))
6598 			      && (secondary_reload_class (1, rclass, mode,
6599 							  last_reg)
6600 				  == NO_REGS)
6601 #ifdef SECONDARY_MEMORY_NEEDED
6602 			      && ! SECONDARY_MEMORY_NEEDED (last_class, rclass,
6603 							    mode)
6604 #endif
6605 			      ))
6606 
6607 		      && (rld[r].nregs == max_group_size
6608 			  || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
6609 						  i))
6610 		      && free_for_value_p (i, rld[r].mode, rld[r].opnum,
6611 					   rld[r].when_needed, rld[r].in,
6612 					   const0_rtx, r, 1))
6613 		    {
6614 		      /* If a group is needed, verify that all the subsequent
6615 			 registers still have their values intact.  */
6616 		      int nr = hard_regno_nregs[i][rld[r].mode];
6617 		      int k;
6618 
6619 		      for (k = 1; k < nr; k++)
6620 			if (reg_reloaded_contents[i + k] != regno
6621 			    || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6622 			  break;
6623 
6624 		      if (k == nr)
6625 			{
6626 			  int i1;
6627 			  int bad_for_class;
6628 
6629 			  last_reg = (GET_MODE (last_reg) == mode
6630 				      ? last_reg : gen_rtx_REG (mode, i));
6631 
6632 			  bad_for_class = 0;
6633 			  for (k = 0; k < nr; k++)
6634 			    bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6635 								  i+k);
6636 
6637 			  /* We found a register that contains the
6638 			     value we need.  If this register is the
6639 			     same as an `earlyclobber' operand of the
6640 			     current insn, just mark it as a place to
6641 			     reload from since we can't use it as the
6642 			     reload register itself.  */
6643 
6644 			  for (i1 = 0; i1 < n_earlyclobbers; i1++)
6645 			    if (reg_overlap_mentioned_for_reload_p
6646 				(reg_last_reload_reg[regno],
6647 				 reload_earlyclobbers[i1]))
6648 			      break;
6649 
6650 			  if (i1 != n_earlyclobbers
6651 			      || ! (free_for_value_p (i, rld[r].mode,
6652 						      rld[r].opnum,
6653 						      rld[r].when_needed, rld[r].in,
6654 						      rld[r].out, r, 1))
6655 			      /* Don't use it if we'd clobber a pseudo reg.  */
6656 			      || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6657 				  && rld[r].out
6658 				  && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6659 			      /* Don't clobber the frame pointer.  */
6660 			      || (i == HARD_FRAME_POINTER_REGNUM
6661 				  && frame_pointer_needed
6662 				  && rld[r].out)
6663 			      /* Don't really use the inherited spill reg
6664 				 if we need it wider than we've got it.  */
6665 			      || (GET_MODE_SIZE (rld[r].mode)
6666 				  > GET_MODE_SIZE (mode))
6667 			      || bad_for_class
6668 
6669 			      /* If find_reloads chose reload_out as reload
6670 				 register, stay with it - that leaves the
6671 				 inherited register for subsequent reloads.  */
6672 			      || (rld[r].out && rld[r].reg_rtx
6673 				  && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6674 			    {
6675 			      if (! rld[r].optional)
6676 				{
6677 				  reload_override_in[r] = last_reg;
6678 				  reload_inheritance_insn[r]
6679 				    = reg_reloaded_insn[i];
6680 				}
6681 			    }
6682 			  else
6683 			    {
6684 			      int k;
6685 			      /* We can use this as a reload reg.  */
6686 			      /* Mark the register as in use for this part of
6687 				 the insn.  */
6688 			      mark_reload_reg_in_use (i,
6689 						      rld[r].opnum,
6690 						      rld[r].when_needed,
6691 						      rld[r].mode);
6692 			      rld[r].reg_rtx = last_reg;
6693 			      reload_inherited[r] = 1;
6694 			      reload_inheritance_insn[r]
6695 				= reg_reloaded_insn[i];
6696 			      reload_spill_index[r] = i;
6697 			      for (k = 0; k < nr; k++)
6698 				SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6699 						  i + k);
6700 			    }
6701 			}
6702 		    }
6703 		}
6704 	    }
6705 
6706 	  /* Here's another way to see if the value is already lying around.  */
6707 	  if (inheritance
6708 	      && rld[r].in != 0
6709 	      && ! reload_inherited[r]
6710 	      && rld[r].out == 0
6711 	      && (CONSTANT_P (rld[r].in)
6712 		  || GET_CODE (rld[r].in) == PLUS
6713 		  || REG_P (rld[r].in)
6714 		  || MEM_P (rld[r].in))
6715 	      && (rld[r].nregs == max_group_size
6716 		  || ! reg_classes_intersect_p (rld[r].rclass, group_class)))
6717 	    search_equiv = rld[r].in;
6718 
6719 	  if (search_equiv)
6720 	    {
6721 	      rtx equiv
6722 		= find_equiv_reg (search_equiv, insn, rld[r].rclass,
6723 				  -1, NULL, 0, rld[r].mode);
6724 	      int regno = 0;
6725 
6726 	      if (equiv != 0)
6727 		{
6728 		  if (REG_P (equiv))
6729 		    regno = REGNO (equiv);
6730 		  else
6731 		    {
6732 		      /* This must be a SUBREG of a hard register.
6733 			 Make a new REG since this might be used in an
6734 			 address and not all machines support SUBREGs
6735 			 there.  */
6736 		      gcc_assert (GET_CODE (equiv) == SUBREG);
6737 		      regno = subreg_regno (equiv);
6738 		      equiv = gen_rtx_REG (rld[r].mode, regno);
6739 		      /* If we choose EQUIV as the reload register, but the
6740 			 loop below decides to cancel the inheritance, we'll
6741 			 end up reloading EQUIV in rld[r].mode, not the mode
6742 			 it had originally.  That isn't safe when EQUIV isn't
6743 			 available as a spill register since its value might
6744 			 still be live at this point.  */
6745 		      for (i = regno; i < regno + (int) rld[r].nregs; i++)
6746 			if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6747 			  equiv = 0;
6748 		    }
6749 		}
6750 
6751 	      /* If we found a spill reg, reject it unless it is free
6752 		 and of the desired class.  */
6753 	      if (equiv != 0)
6754 		{
6755 		  int regs_used = 0;
6756 		  int bad_for_class = 0;
6757 		  int max_regno = regno + rld[r].nregs;
6758 
6759 		  for (i = regno; i < max_regno; i++)
6760 		    {
6761 		      regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6762 						      i);
6763 		      bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6764 							   i);
6765 		    }
6766 
6767 		  if ((regs_used
6768 		       && ! free_for_value_p (regno, rld[r].mode,
6769 					      rld[r].opnum, rld[r].when_needed,
6770 					      rld[r].in, rld[r].out, r, 1))
6771 		      || bad_for_class)
6772 		    equiv = 0;
6773 		}
6774 
6775 	      if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
6776 		equiv = 0;
6777 
6778 	      /* We found a register that contains the value we need.
6779 		 If this register is the same as an `earlyclobber' operand
6780 		 of the current insn, just mark it as a place to reload from
6781 		 since we can't use it as the reload register itself.  */
6782 
6783 	      if (equiv != 0)
6784 		for (i = 0; i < n_earlyclobbers; i++)
6785 		  if (reg_overlap_mentioned_for_reload_p (equiv,
6786 							  reload_earlyclobbers[i]))
6787 		    {
6788 		      if (! rld[r].optional)
6789 			reload_override_in[r] = equiv;
6790 		      equiv = 0;
6791 		      break;
6792 		    }
6793 
6794 	      /* If the equiv register we have found is explicitly clobbered
6795 		 in the current insn, it depends on the reload type if we
6796 		 can use it, use it for reload_override_in, or not at all.
6797 		 In particular, we then can't use EQUIV for a
6798 		 RELOAD_FOR_OUTPUT_ADDRESS reload.  */
6799 
6800 	      if (equiv != 0)
6801 		{
6802 		  if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6803 		    switch (rld[r].when_needed)
6804 		      {
6805 		      case RELOAD_FOR_OTHER_ADDRESS:
6806 		      case RELOAD_FOR_INPADDR_ADDRESS:
6807 		      case RELOAD_FOR_INPUT_ADDRESS:
6808 		      case RELOAD_FOR_OPADDR_ADDR:
6809 			break;
6810 		      case RELOAD_OTHER:
6811 		      case RELOAD_FOR_INPUT:
6812 		      case RELOAD_FOR_OPERAND_ADDRESS:
6813 			if (! rld[r].optional)
6814 			  reload_override_in[r] = equiv;
6815 			/* Fall through.  */
6816 		      default:
6817 			equiv = 0;
6818 			break;
6819 		      }
6820 		  else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6821 		    switch (rld[r].when_needed)
6822 		      {
6823 		      case RELOAD_FOR_OTHER_ADDRESS:
6824 		      case RELOAD_FOR_INPADDR_ADDRESS:
6825 		      case RELOAD_FOR_INPUT_ADDRESS:
6826 		      case RELOAD_FOR_OPADDR_ADDR:
6827 		      case RELOAD_FOR_OPERAND_ADDRESS:
6828 		      case RELOAD_FOR_INPUT:
6829 			break;
6830 		      case RELOAD_OTHER:
6831 			if (! rld[r].optional)
6832 			  reload_override_in[r] = equiv;
6833 			/* Fall through.  */
6834 		      default:
6835 			equiv = 0;
6836 			break;
6837 		      }
6838 		}
6839 
6840 	      /* If we found an equivalent reg, say no code need be generated
6841 		 to load it, and use it as our reload reg.  */
6842 	      if (equiv != 0
6843 		  && (regno != HARD_FRAME_POINTER_REGNUM
6844 		      || !frame_pointer_needed))
6845 		{
6846 		  int nr = hard_regno_nregs[regno][rld[r].mode];
6847 		  int k;
6848 		  rld[r].reg_rtx = equiv;
6849 		  reload_spill_index[r] = regno;
6850 		  reload_inherited[r] = 1;
6851 
6852 		  /* If reg_reloaded_valid is not set for this register,
6853 		     there might be a stale spill_reg_store lying around.
6854 		     We must clear it, since otherwise emit_reload_insns
6855 		     might delete the store.  */
6856 		  if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6857 		    spill_reg_store[regno] = NULL;
6858 		  /* If any of the hard registers in EQUIV are spill
6859 		     registers, mark them as in use for this insn.  */
6860 		  for (k = 0; k < nr; k++)
6861 		    {
6862 		      i = spill_reg_order[regno + k];
6863 		      if (i >= 0)
6864 			{
6865 			  mark_reload_reg_in_use (regno, rld[r].opnum,
6866 						  rld[r].when_needed,
6867 						  rld[r].mode);
6868 			  SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6869 					    regno + k);
6870 			}
6871 		    }
6872 		}
6873 	    }
6874 
6875 	  /* If we found a register to use already, or if this is an optional
6876 	     reload, we are done.  */
6877 	  if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6878 	    continue;
6879 
6880 #if 0
6881 	  /* No longer needed for correct operation.  Might or might
6882 	     not give better code on the average.  Want to experiment?  */
6883 
6884 	  /* See if there is a later reload that has a class different from our
6885 	     class that intersects our class or that requires less register
6886 	     than our reload.  If so, we must allocate a register to this
6887 	     reload now, since that reload might inherit a previous reload
6888 	     and take the only available register in our class.  Don't do this
6889 	     for optional reloads since they will force all previous reloads
6890 	     to be allocated.  Also don't do this for reloads that have been
6891 	     turned off.  */
6892 
6893 	  for (i = j + 1; i < n_reloads; i++)
6894 	    {
6895 	      int s = reload_order[i];
6896 
6897 	      if ((rld[s].in == 0 && rld[s].out == 0
6898 		   && ! rld[s].secondary_p)
6899 		  || rld[s].optional)
6900 		continue;
6901 
6902 	      if ((rld[s].rclass != rld[r].rclass
6903 		   && reg_classes_intersect_p (rld[r].rclass,
6904 					       rld[s].rclass))
6905 		  || rld[s].nregs < rld[r].nregs)
6906 		break;
6907 	    }
6908 
6909 	  if (i == n_reloads)
6910 	    continue;
6911 
6912 	  allocate_reload_reg (chain, r, j == n_reloads - 1);
6913 #endif
6914 	}
6915 
6916       /* Now allocate reload registers for anything non-optional that
6917 	 didn't get one yet.  */
6918       for (j = 0; j < n_reloads; j++)
6919 	{
6920 	  int r = reload_order[j];
6921 
6922 	  /* Ignore reloads that got marked inoperative.  */
6923 	  if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6924 	    continue;
6925 
6926 	  /* Skip reloads that already have a register allocated or are
6927 	     optional.  */
6928 	  if (rld[r].reg_rtx != 0 || rld[r].optional)
6929 	    continue;
6930 
6931 	  if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6932 	    break;
6933 	}
6934 
6935       /* If that loop got all the way, we have won.  */
6936       if (j == n_reloads)
6937 	{
6938 	  win = 1;
6939 	  break;
6940 	}
6941 
6942       /* Loop around and try without any inheritance.  */
6943     }
6944 
6945   if (! win)
6946     {
6947       /* First undo everything done by the failed attempt
6948 	 to allocate with inheritance.  */
6949       choose_reload_regs_init (chain, save_reload_reg_rtx);
6950 
6951       /* Some sanity tests to verify that the reloads found in the first
6952 	 pass are identical to the ones we have now.  */
6953       gcc_assert (chain->n_reloads == n_reloads);
6954 
6955       for (i = 0; i < n_reloads; i++)
6956 	{
6957 	  if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
6958 	    continue;
6959 	  gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
6960 	  for (j = 0; j < n_spills; j++)
6961 	    if (spill_regs[j] == chain->rld[i].regno)
6962 	      if (! set_reload_reg (j, i))
6963 		failed_reload (chain->insn, i);
6964 	}
6965     }
6966 
6967   /* If we thought we could inherit a reload, because it seemed that
6968      nothing else wanted the same reload register earlier in the insn,
6969      verify that assumption, now that all reloads have been assigned.
6970      Likewise for reloads where reload_override_in has been set.  */
6971 
6972   /* If doing expensive optimizations, do one preliminary pass that doesn't
6973      cancel any inheritance, but removes reloads that have been needed only
6974      for reloads that we know can be inherited.  */
6975   for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6976     {
6977       for (j = 0; j < n_reloads; j++)
6978 	{
6979 	  int r = reload_order[j];
6980 	  rtx check_reg;
6981 #ifdef SECONDARY_MEMORY_NEEDED
6982 	  rtx tem;
6983 #endif
6984 	  if (reload_inherited[r] && rld[r].reg_rtx)
6985 	    check_reg = rld[r].reg_rtx;
6986 	  else if (reload_override_in[r]
6987 		   && (REG_P (reload_override_in[r])
6988 		       || GET_CODE (reload_override_in[r]) == SUBREG))
6989 	    check_reg = reload_override_in[r];
6990 	  else
6991 	    continue;
6992 	  if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
6993 				  rld[r].opnum, rld[r].when_needed, rld[r].in,
6994 				  (reload_inherited[r]
6995 				   ? rld[r].out : const0_rtx),
6996 				  r, 1))
6997 	    {
6998 	      if (pass)
6999 		continue;
7000 	      reload_inherited[r] = 0;
7001 	      reload_override_in[r] = 0;
7002 	    }
7003 	  /* If we can inherit a RELOAD_FOR_INPUT, or can use a
7004 	     reload_override_in, then we do not need its related
7005 	     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
7006 	     likewise for other reload types.
7007 	     We handle this by removing a reload when its only replacement
7008 	     is mentioned in reload_in of the reload we are going to inherit.
7009 	     A special case are auto_inc expressions; even if the input is
7010 	     inherited, we still need the address for the output.  We can
7011 	     recognize them because they have RELOAD_OUT set to RELOAD_IN.
7012 	     If we succeeded removing some reload and we are doing a preliminary
7013 	     pass just to remove such reloads, make another pass, since the
7014 	     removal of one reload might allow us to inherit another one.  */
7015 	  else if (rld[r].in
7016 		   && rld[r].out != rld[r].in
7017 		   && remove_address_replacements (rld[r].in))
7018 	    {
7019 	      if (pass)
7020 	        pass = 2;
7021 	    }
7022 #ifdef SECONDARY_MEMORY_NEEDED
7023 	  /* If we needed a memory location for the reload, we also have to
7024 	     remove its related reloads.  */
7025 	  else if (rld[r].in
7026 		   && rld[r].out != rld[r].in
7027 		   && (tem = replaced_subreg (rld[r].in), REG_P (tem))
7028 		   && REGNO (tem) < FIRST_PSEUDO_REGISTER
7029 		   && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (tem)),
7030 					       rld[r].rclass, rld[r].inmode)
7031 		   && remove_address_replacements
7032 		      (get_secondary_mem (tem, rld[r].inmode, rld[r].opnum,
7033 					  rld[r].when_needed)))
7034 	    {
7035 	      if (pass)
7036 	        pass = 2;
7037 	    }
7038 #endif
7039 	}
7040     }
7041 
7042   /* Now that reload_override_in is known valid,
7043      actually override reload_in.  */
7044   for (j = 0; j < n_reloads; j++)
7045     if (reload_override_in[j])
7046       rld[j].in = reload_override_in[j];
7047 
7048   /* If this reload won't be done because it has been canceled or is
7049      optional and not inherited, clear reload_reg_rtx so other
7050      routines (such as subst_reloads) don't get confused.  */
7051   for (j = 0; j < n_reloads; j++)
7052     if (rld[j].reg_rtx != 0
7053 	&& ((rld[j].optional && ! reload_inherited[j])
7054 	    || (rld[j].in == 0 && rld[j].out == 0
7055 		&& ! rld[j].secondary_p)))
7056       {
7057 	int regno = true_regnum (rld[j].reg_rtx);
7058 
7059 	if (spill_reg_order[regno] >= 0)
7060 	  clear_reload_reg_in_use (regno, rld[j].opnum,
7061 				   rld[j].when_needed, rld[j].mode);
7062 	rld[j].reg_rtx = 0;
7063 	reload_spill_index[j] = -1;
7064       }
7065 
7066   /* Record which pseudos and which spill regs have output reloads.  */
7067   for (j = 0; j < n_reloads; j++)
7068     {
7069       int r = reload_order[j];
7070 
7071       i = reload_spill_index[r];
7072 
7073       /* I is nonneg if this reload uses a register.
7074 	 If rld[r].reg_rtx is 0, this is an optional reload
7075 	 that we opted to ignore.  */
7076       if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
7077 	  && rld[r].reg_rtx != 0)
7078 	{
7079 	  int nregno = REGNO (rld[r].out_reg);
7080 	  int nr = 1;
7081 
7082 	  if (nregno < FIRST_PSEUDO_REGISTER)
7083 	    nr = hard_regno_nregs[nregno][rld[r].mode];
7084 
7085 	  while (--nr >= 0)
7086 	    SET_REGNO_REG_SET (&reg_has_output_reload,
7087 			       nregno + nr);
7088 
7089 	  if (i >= 0)
7090 	    add_to_hard_reg_set (&reg_is_output_reload, rld[r].mode, i);
7091 
7092 	  gcc_assert (rld[r].when_needed == RELOAD_OTHER
7093 		      || rld[r].when_needed == RELOAD_FOR_OUTPUT
7094 		      || rld[r].when_needed == RELOAD_FOR_INSN);
7095 	}
7096     }
7097 }
7098 
7099 /* Deallocate the reload register for reload R.  This is called from
7100    remove_address_replacements.  */
7101 
7102 void
7103 deallocate_reload_reg (int r)
7104 {
7105   int regno;
7106 
7107   if (! rld[r].reg_rtx)
7108     return;
7109   regno = true_regnum (rld[r].reg_rtx);
7110   rld[r].reg_rtx = 0;
7111   if (spill_reg_order[regno] >= 0)
7112     clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
7113 			     rld[r].mode);
7114   reload_spill_index[r] = -1;
7115 }
7116 
7117 /* These arrays are filled by emit_reload_insns and its subroutines.  */
7118 static rtx_insn *input_reload_insns[MAX_RECOG_OPERANDS];
7119 static rtx_insn *other_input_address_reload_insns = 0;
7120 static rtx_insn *other_input_reload_insns = 0;
7121 static rtx_insn *input_address_reload_insns[MAX_RECOG_OPERANDS];
7122 static rtx_insn *inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7123 static rtx_insn *output_reload_insns[MAX_RECOG_OPERANDS];
7124 static rtx_insn *output_address_reload_insns[MAX_RECOG_OPERANDS];
7125 static rtx_insn *outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7126 static rtx_insn *operand_reload_insns = 0;
7127 static rtx_insn *other_operand_reload_insns = 0;
7128 static rtx_insn *other_output_reload_insns[MAX_RECOG_OPERANDS];
7129 
7130 /* Values to be put in spill_reg_store are put here first.  Instructions
7131    must only be placed here if the associated reload register reaches
7132    the end of the instruction's reload sequence.  */
7133 static rtx_insn *new_spill_reg_store[FIRST_PSEUDO_REGISTER];
7134 static HARD_REG_SET reg_reloaded_died;
7135 
7136 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
7137    of class NEW_CLASS with mode NEW_MODE.  Or alternatively, if alt_reload_reg
7138    is nonzero, if that is suitable.  On success, change *RELOAD_REG to the
7139    adjusted register, and return true.  Otherwise, return false.  */
7140 static bool
7141 reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
7142 			    enum reg_class new_class,
7143 			    machine_mode new_mode)
7144 
7145 {
7146   rtx reg;
7147 
7148   for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
7149     {
7150       unsigned regno = REGNO (reg);
7151 
7152       if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
7153 	continue;
7154       if (GET_MODE (reg) != new_mode)
7155 	{
7156 	  if (!HARD_REGNO_MODE_OK (regno, new_mode))
7157 	    continue;
7158 	  if (hard_regno_nregs[regno][new_mode]
7159 	      > hard_regno_nregs[regno][GET_MODE (reg)])
7160 	    continue;
7161 	  reg = reload_adjust_reg_for_mode (reg, new_mode);
7162 	}
7163       *reload_reg = reg;
7164       return true;
7165     }
7166   return false;
7167 }
7168 
7169 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
7170    pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
7171    nonzero, if that is suitable.  On success, change *RELOAD_REG to the
7172    adjusted register, and return true.  Otherwise, return false.  */
7173 static bool
7174 reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
7175 			     enum insn_code icode)
7176 
7177 {
7178   enum reg_class new_class = scratch_reload_class (icode);
7179   machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
7180 
7181   return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
7182 				     new_class, new_mode);
7183 }
7184 
7185 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
7186    has the number J.  OLD contains the value to be used as input.  */
7187 
7188 static void
7189 emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
7190 			 rtx old, int j)
7191 {
7192   rtx_insn *insn = chain->insn;
7193   rtx reloadreg;
7194   rtx oldequiv_reg = 0;
7195   rtx oldequiv = 0;
7196   int special = 0;
7197   machine_mode mode;
7198   rtx_insn **where;
7199 
7200   /* delete_output_reload is only invoked properly if old contains
7201      the original pseudo register.  Since this is replaced with a
7202      hard reg when RELOAD_OVERRIDE_IN is set, see if we can
7203      find the pseudo in RELOAD_IN_REG.  This is also used to
7204      determine whether a secondary reload is needed.  */
7205   if (reload_override_in[j]
7206       && (REG_P (rl->in_reg)
7207 	  || (GET_CODE (rl->in_reg) == SUBREG
7208 	      && REG_P (SUBREG_REG (rl->in_reg)))))
7209     {
7210       oldequiv = old;
7211       old = rl->in_reg;
7212     }
7213   if (oldequiv == 0)
7214     oldequiv = old;
7215   else if (REG_P (oldequiv))
7216     oldequiv_reg = oldequiv;
7217   else if (GET_CODE (oldequiv) == SUBREG)
7218     oldequiv_reg = SUBREG_REG (oldequiv);
7219 
7220   reloadreg = reload_reg_rtx_for_input[j];
7221   mode = GET_MODE (reloadreg);
7222 
7223   /* If we are reloading from a register that was recently stored in
7224      with an output-reload, see if we can prove there was
7225      actually no need to store the old value in it.  */
7226 
7227   if (optimize && REG_P (oldequiv)
7228       && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
7229       && spill_reg_store[REGNO (oldequiv)]
7230       && REG_P (old)
7231       && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
7232 	  || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
7233 			  rl->out_reg)))
7234     delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
7235 
7236   /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
7237      OLDEQUIV.  */
7238 
7239   while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
7240     oldequiv = SUBREG_REG (oldequiv);
7241   if (GET_MODE (oldequiv) != VOIDmode
7242       && mode != GET_MODE (oldequiv))
7243     oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
7244 
7245   /* Switch to the right place to emit the reload insns.  */
7246   switch (rl->when_needed)
7247     {
7248     case RELOAD_OTHER:
7249       where = &other_input_reload_insns;
7250       break;
7251     case RELOAD_FOR_INPUT:
7252       where = &input_reload_insns[rl->opnum];
7253       break;
7254     case RELOAD_FOR_INPUT_ADDRESS:
7255       where = &input_address_reload_insns[rl->opnum];
7256       break;
7257     case RELOAD_FOR_INPADDR_ADDRESS:
7258       where = &inpaddr_address_reload_insns[rl->opnum];
7259       break;
7260     case RELOAD_FOR_OUTPUT_ADDRESS:
7261       where = &output_address_reload_insns[rl->opnum];
7262       break;
7263     case RELOAD_FOR_OUTADDR_ADDRESS:
7264       where = &outaddr_address_reload_insns[rl->opnum];
7265       break;
7266     case RELOAD_FOR_OPERAND_ADDRESS:
7267       where = &operand_reload_insns;
7268       break;
7269     case RELOAD_FOR_OPADDR_ADDR:
7270       where = &other_operand_reload_insns;
7271       break;
7272     case RELOAD_FOR_OTHER_ADDRESS:
7273       where = &other_input_address_reload_insns;
7274       break;
7275     default:
7276       gcc_unreachable ();
7277     }
7278 
7279   push_to_sequence (*where);
7280 
7281   /* Auto-increment addresses must be reloaded in a special way.  */
7282   if (rl->out && ! rl->out_reg)
7283     {
7284       /* We are not going to bother supporting the case where a
7285 	 incremented register can't be copied directly from
7286 	 OLDEQUIV since this seems highly unlikely.  */
7287       gcc_assert (rl->secondary_in_reload < 0);
7288 
7289       if (reload_inherited[j])
7290 	oldequiv = reloadreg;
7291 
7292       old = XEXP (rl->in_reg, 0);
7293 
7294       /* Prevent normal processing of this reload.  */
7295       special = 1;
7296       /* Output a special code sequence for this case.  */
7297       inc_for_reload (reloadreg, oldequiv, rl->out, rl->inc);
7298     }
7299 
7300   /* If we are reloading a pseudo-register that was set by the previous
7301      insn, see if we can get rid of that pseudo-register entirely
7302      by redirecting the previous insn into our reload register.  */
7303 
7304   else if (optimize && REG_P (old)
7305 	   && REGNO (old) >= FIRST_PSEUDO_REGISTER
7306 	   && dead_or_set_p (insn, old)
7307 	   /* This is unsafe if some other reload
7308 	      uses the same reg first.  */
7309 	   && ! conflicts_with_override (reloadreg)
7310 	   && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
7311 				rl->when_needed, old, rl->out, j, 0))
7312     {
7313       rtx_insn *temp = PREV_INSN (insn);
7314       while (temp && (NOTE_P (temp) || DEBUG_INSN_P (temp)))
7315 	temp = PREV_INSN (temp);
7316       if (temp
7317 	  && NONJUMP_INSN_P (temp)
7318 	  && GET_CODE (PATTERN (temp)) == SET
7319 	  && SET_DEST (PATTERN (temp)) == old
7320 	  /* Make sure we can access insn_operand_constraint.  */
7321 	  && asm_noperands (PATTERN (temp)) < 0
7322 	  /* This is unsafe if operand occurs more than once in current
7323 	     insn.  Perhaps some occurrences aren't reloaded.  */
7324 	  && count_occurrences (PATTERN (insn), old, 0) == 1)
7325 	{
7326 	  rtx old = SET_DEST (PATTERN (temp));
7327 	  /* Store into the reload register instead of the pseudo.  */
7328 	  SET_DEST (PATTERN (temp)) = reloadreg;
7329 
7330 	  /* Verify that resulting insn is valid.
7331 
7332 	     Note that we have replaced the destination of TEMP with
7333 	     RELOADREG.  If TEMP references RELOADREG within an
7334 	     autoincrement addressing mode, then the resulting insn
7335 	     is ill-formed and we must reject this optimization.  */
7336 	  extract_insn (temp);
7337 	  if (constrain_operands (1, get_enabled_alternatives (temp))
7338 	      && (!AUTO_INC_DEC || ! find_reg_note (temp, REG_INC, reloadreg)))
7339 	    {
7340 	      /* If the previous insn is an output reload, the source is
7341 		 a reload register, and its spill_reg_store entry will
7342 		 contain the previous destination.  This is now
7343 		 invalid.  */
7344 	      if (REG_P (SET_SRC (PATTERN (temp)))
7345 		  && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
7346 		{
7347 		  spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7348 		  spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7349 		}
7350 
7351 	      /* If these are the only uses of the pseudo reg,
7352 		 pretend for GDB it lives in the reload reg we used.  */
7353 	      if (REG_N_DEATHS (REGNO (old)) == 1
7354 		  && REG_N_SETS (REGNO (old)) == 1)
7355 		{
7356 		  reg_renumber[REGNO (old)] = REGNO (reloadreg);
7357 		  if (ira_conflicts_p)
7358 		    /* Inform IRA about the change.  */
7359 		    ira_mark_allocation_change (REGNO (old));
7360 		  alter_reg (REGNO (old), -1, false);
7361 		}
7362 	      special = 1;
7363 
7364 	      /* Adjust any debug insns between temp and insn.  */
7365 	      while ((temp = NEXT_INSN (temp)) != insn)
7366 		if (DEBUG_INSN_P (temp))
7367 		  INSN_VAR_LOCATION_LOC (temp)
7368 		    = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (temp),
7369 					    old, reloadreg);
7370 		else
7371 		  gcc_assert (NOTE_P (temp));
7372 	    }
7373 	  else
7374 	    {
7375 	      SET_DEST (PATTERN (temp)) = old;
7376 	    }
7377 	}
7378     }
7379 
7380   /* We can't do that, so output an insn to load RELOADREG.  */
7381 
7382   /* If we have a secondary reload, pick up the secondary register
7383      and icode, if any.  If OLDEQUIV and OLD are different or
7384      if this is an in-out reload, recompute whether or not we
7385      still need a secondary register and what the icode should
7386      be.  If we still need a secondary register and the class or
7387      icode is different, go back to reloading from OLD if using
7388      OLDEQUIV means that we got the wrong type of register.  We
7389      cannot have different class or icode due to an in-out reload
7390      because we don't make such reloads when both the input and
7391      output need secondary reload registers.  */
7392 
7393   if (! special && rl->secondary_in_reload >= 0)
7394     {
7395       rtx second_reload_reg = 0;
7396       rtx third_reload_reg = 0;
7397       int secondary_reload = rl->secondary_in_reload;
7398       rtx real_oldequiv = oldequiv;
7399       rtx real_old = old;
7400       rtx tmp;
7401       enum insn_code icode;
7402       enum insn_code tertiary_icode = CODE_FOR_nothing;
7403 
7404       /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7405 	 and similarly for OLD.
7406 	 See comments in get_secondary_reload in reload.c.  */
7407       /* If it is a pseudo that cannot be replaced with its
7408 	 equivalent MEM, we must fall back to reload_in, which
7409 	 will have all the necessary substitutions registered.
7410 	 Likewise for a pseudo that can't be replaced with its
7411 	 equivalent constant.
7412 
7413 	 Take extra care for subregs of such pseudos.  Note that
7414 	 we cannot use reg_equiv_mem in this case because it is
7415 	 not in the right mode.  */
7416 
7417       tmp = oldequiv;
7418       if (GET_CODE (tmp) == SUBREG)
7419 	tmp = SUBREG_REG (tmp);
7420       if (REG_P (tmp)
7421 	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7422 	  && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7423 	      || reg_equiv_constant (REGNO (tmp)) != 0))
7424 	{
7425 	  if (! reg_equiv_mem (REGNO (tmp))
7426 	      || num_not_at_initial_offset
7427 	      || GET_CODE (oldequiv) == SUBREG)
7428 	    real_oldequiv = rl->in;
7429 	  else
7430 	    real_oldequiv = reg_equiv_mem (REGNO (tmp));
7431 	}
7432 
7433       tmp = old;
7434       if (GET_CODE (tmp) == SUBREG)
7435 	tmp = SUBREG_REG (tmp);
7436       if (REG_P (tmp)
7437 	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7438 	  && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7439 	      || reg_equiv_constant (REGNO (tmp)) != 0))
7440 	{
7441 	  if (! reg_equiv_mem (REGNO (tmp))
7442 	      || num_not_at_initial_offset
7443 	      || GET_CODE (old) == SUBREG)
7444 	    real_old = rl->in;
7445 	  else
7446 	    real_old = reg_equiv_mem (REGNO (tmp));
7447 	}
7448 
7449       second_reload_reg = rld[secondary_reload].reg_rtx;
7450       if (rld[secondary_reload].secondary_in_reload >= 0)
7451 	{
7452 	  int tertiary_reload = rld[secondary_reload].secondary_in_reload;
7453 
7454 	  third_reload_reg = rld[tertiary_reload].reg_rtx;
7455 	  tertiary_icode = rld[secondary_reload].secondary_in_icode;
7456 	  /* We'd have to add more code for quartary reloads.  */
7457 	  gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
7458 	}
7459       icode = rl->secondary_in_icode;
7460 
7461       if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
7462 	  || (rl->in != 0 && rl->out != 0))
7463 	{
7464 	  secondary_reload_info sri, sri2;
7465 	  enum reg_class new_class, new_t_class;
7466 
7467 	  sri.icode = CODE_FOR_nothing;
7468 	  sri.prev_sri = NULL;
7469 	  new_class
7470 	    = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7471 							 rl->rclass, mode,
7472 							 &sri);
7473 
7474 	  if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7475 	    second_reload_reg = 0;
7476 	  else if (new_class == NO_REGS)
7477 	    {
7478 	      if (reload_adjust_reg_for_icode (&second_reload_reg,
7479 					       third_reload_reg,
7480 					       (enum insn_code) sri.icode))
7481 		{
7482 		  icode = (enum insn_code) sri.icode;
7483 		  third_reload_reg = 0;
7484 		}
7485 	      else
7486 		{
7487 		  oldequiv = old;
7488 		  real_oldequiv = real_old;
7489 		}
7490 	    }
7491 	  else if (sri.icode != CODE_FOR_nothing)
7492 	    /* We currently lack a way to express this in reloads.  */
7493 	    gcc_unreachable ();
7494 	  else
7495 	    {
7496 	      sri2.icode = CODE_FOR_nothing;
7497 	      sri2.prev_sri = &sri;
7498 	      new_t_class
7499 		= (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7500 							     new_class, mode,
7501 							     &sri);
7502 	      if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7503 		{
7504 		  if (reload_adjust_reg_for_temp (&second_reload_reg,
7505 						  third_reload_reg,
7506 						  new_class, mode))
7507 		    {
7508 		      third_reload_reg = 0;
7509 		      tertiary_icode = (enum insn_code) sri2.icode;
7510 		    }
7511 		  else
7512 		    {
7513 		      oldequiv = old;
7514 		      real_oldequiv = real_old;
7515 		    }
7516 		}
7517 	      else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7518 		{
7519 		  rtx intermediate = second_reload_reg;
7520 
7521 		  if (reload_adjust_reg_for_temp (&intermediate, NULL,
7522 						  new_class, mode)
7523 		      && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7524 						      ((enum insn_code)
7525 						       sri2.icode)))
7526 		    {
7527 		      second_reload_reg = intermediate;
7528 		      tertiary_icode = (enum insn_code) sri2.icode;
7529 		    }
7530 		  else
7531 		    {
7532 		      oldequiv = old;
7533 		      real_oldequiv = real_old;
7534 		    }
7535 		}
7536 	      else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7537 		{
7538 		  rtx intermediate = second_reload_reg;
7539 
7540 		  if (reload_adjust_reg_for_temp (&intermediate, NULL,
7541 						  new_class, mode)
7542 		      && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7543 						      new_t_class, mode))
7544 		    {
7545 		      second_reload_reg = intermediate;
7546 		      tertiary_icode = (enum insn_code) sri2.icode;
7547 		    }
7548 		  else
7549 		    {
7550 		      oldequiv = old;
7551 		      real_oldequiv = real_old;
7552 		    }
7553 		}
7554 	      else
7555 		{
7556 		  /* This could be handled more intelligently too.  */
7557 		  oldequiv = old;
7558 		  real_oldequiv = real_old;
7559 		}
7560 	    }
7561 	}
7562 
7563       /* If we still need a secondary reload register, check
7564 	 to see if it is being used as a scratch or intermediate
7565 	 register and generate code appropriately.  If we need
7566 	 a scratch register, use REAL_OLDEQUIV since the form of
7567 	 the insn may depend on the actual address if it is
7568 	 a MEM.  */
7569 
7570       if (second_reload_reg)
7571 	{
7572 	  if (icode != CODE_FOR_nothing)
7573 	    {
7574 	      /* We'd have to add extra code to handle this case.  */
7575 	      gcc_assert (!third_reload_reg);
7576 
7577 	      emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7578 					  second_reload_reg));
7579 	      special = 1;
7580 	    }
7581 	  else
7582 	    {
7583 	      /* See if we need a scratch register to load the
7584 		 intermediate register (a tertiary reload).  */
7585 	      if (tertiary_icode != CODE_FOR_nothing)
7586 		{
7587 		  emit_insn ((GEN_FCN (tertiary_icode)
7588 			      (second_reload_reg, real_oldequiv,
7589 			       third_reload_reg)));
7590 		}
7591 	      else if (third_reload_reg)
7592 		{
7593 		  gen_reload (third_reload_reg, real_oldequiv,
7594 			      rl->opnum,
7595 			      rl->when_needed);
7596 		  gen_reload (second_reload_reg, third_reload_reg,
7597 			      rl->opnum,
7598 			      rl->when_needed);
7599 		}
7600 	      else
7601 		gen_reload (second_reload_reg, real_oldequiv,
7602 			    rl->opnum,
7603 			    rl->when_needed);
7604 
7605 	      oldequiv = second_reload_reg;
7606 	    }
7607 	}
7608     }
7609 
7610   if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7611     {
7612       rtx real_oldequiv = oldequiv;
7613 
7614       if ((REG_P (oldequiv)
7615 	   && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7616 	   && (reg_equiv_memory_loc (REGNO (oldequiv)) != 0
7617 	       || reg_equiv_constant (REGNO (oldequiv)) != 0))
7618 	  || (GET_CODE (oldequiv) == SUBREG
7619 	      && REG_P (SUBREG_REG (oldequiv))
7620 	      && (REGNO (SUBREG_REG (oldequiv))
7621 		  >= FIRST_PSEUDO_REGISTER)
7622 	      && ((reg_equiv_memory_loc (REGNO (SUBREG_REG (oldequiv))) != 0)
7623 		  || (reg_equiv_constant (REGNO (SUBREG_REG (oldequiv))) != 0)))
7624 	  || (CONSTANT_P (oldequiv)
7625 	      && (targetm.preferred_reload_class (oldequiv,
7626 						  REGNO_REG_CLASS (REGNO (reloadreg)))
7627 		  == NO_REGS)))
7628 	real_oldequiv = rl->in;
7629       gen_reload (reloadreg, real_oldequiv, rl->opnum,
7630 		  rl->when_needed);
7631     }
7632 
7633   if (cfun->can_throw_non_call_exceptions)
7634     copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7635 
7636   /* End this sequence.  */
7637   *where = get_insns ();
7638   end_sequence ();
7639 
7640   /* Update reload_override_in so that delete_address_reloads_1
7641      can see the actual register usage.  */
7642   if (oldequiv_reg)
7643     reload_override_in[j] = oldequiv;
7644 }
7645 
7646 /* Generate insns to for the output reload RL, which is for the insn described
7647    by CHAIN and has the number J.  */
7648 static void
7649 emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
7650 			  int j)
7651 {
7652   rtx reloadreg;
7653   rtx_insn *insn = chain->insn;
7654   int special = 0;
7655   rtx old = rl->out;
7656   machine_mode mode;
7657   rtx_insn *p;
7658   rtx rl_reg_rtx;
7659 
7660   if (rl->when_needed == RELOAD_OTHER)
7661     start_sequence ();
7662   else
7663     push_to_sequence (output_reload_insns[rl->opnum]);
7664 
7665   rl_reg_rtx = reload_reg_rtx_for_output[j];
7666   mode = GET_MODE (rl_reg_rtx);
7667 
7668   reloadreg = rl_reg_rtx;
7669 
7670   /* If we need two reload regs, set RELOADREG to the intermediate
7671      one, since it will be stored into OLD.  We might need a secondary
7672      register only for an input reload, so check again here.  */
7673 
7674   if (rl->secondary_out_reload >= 0)
7675     {
7676       rtx real_old = old;
7677       int secondary_reload = rl->secondary_out_reload;
7678       int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7679 
7680       if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7681 	  && reg_equiv_mem (REGNO (old)) != 0)
7682 	real_old = reg_equiv_mem (REGNO (old));
7683 
7684       if (secondary_reload_class (0, rl->rclass, mode, real_old) != NO_REGS)
7685 	{
7686 	  rtx second_reloadreg = reloadreg;
7687 	  reloadreg = rld[secondary_reload].reg_rtx;
7688 
7689 	  /* See if RELOADREG is to be used as a scratch register
7690 	     or as an intermediate register.  */
7691 	  if (rl->secondary_out_icode != CODE_FOR_nothing)
7692 	    {
7693 	      /* We'd have to add extra code to handle this case.  */
7694 	      gcc_assert (tertiary_reload < 0);
7695 
7696 	      emit_insn ((GEN_FCN (rl->secondary_out_icode)
7697 			  (real_old, second_reloadreg, reloadreg)));
7698 	      special = 1;
7699 	    }
7700 	  else
7701 	    {
7702 	      /* See if we need both a scratch and intermediate reload
7703 		 register.  */
7704 
7705 	      enum insn_code tertiary_icode
7706 		= rld[secondary_reload].secondary_out_icode;
7707 
7708 	      /* We'd have to add more code for quartary reloads.  */
7709 	      gcc_assert (tertiary_reload < 0
7710 			  || rld[tertiary_reload].secondary_out_reload < 0);
7711 
7712 	      if (GET_MODE (reloadreg) != mode)
7713 		reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7714 
7715 	      if (tertiary_icode != CODE_FOR_nothing)
7716 		{
7717 		  rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7718 
7719 		  /* Copy primary reload reg to secondary reload reg.
7720 		     (Note that these have been swapped above, then
7721 		     secondary reload reg to OLD using our insn.)  */
7722 
7723 		  /* If REAL_OLD is a paradoxical SUBREG, remove it
7724 		     and try to put the opposite SUBREG on
7725 		     RELOADREG.  */
7726 		  strip_paradoxical_subreg (&real_old, &reloadreg);
7727 
7728 		  gen_reload (reloadreg, second_reloadreg,
7729 			      rl->opnum, rl->when_needed);
7730 		  emit_insn ((GEN_FCN (tertiary_icode)
7731 			      (real_old, reloadreg, third_reloadreg)));
7732 		  special = 1;
7733 		}
7734 
7735 	      else
7736 		{
7737 		  /* Copy between the reload regs here and then to
7738 		     OUT later.  */
7739 
7740 		  gen_reload (reloadreg, second_reloadreg,
7741 			      rl->opnum, rl->when_needed);
7742 		  if (tertiary_reload >= 0)
7743 		    {
7744 		      rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7745 
7746 		      gen_reload (third_reloadreg, reloadreg,
7747 				  rl->opnum, rl->when_needed);
7748 		      reloadreg = third_reloadreg;
7749 		    }
7750 		}
7751 	    }
7752 	}
7753     }
7754 
7755   /* Output the last reload insn.  */
7756   if (! special)
7757     {
7758       rtx set;
7759 
7760       /* Don't output the last reload if OLD is not the dest of
7761 	 INSN and is in the src and is clobbered by INSN.  */
7762       if (! flag_expensive_optimizations
7763 	  || !REG_P (old)
7764 	  || !(set = single_set (insn))
7765 	  || rtx_equal_p (old, SET_DEST (set))
7766 	  || !reg_mentioned_p (old, SET_SRC (set))
7767 	  || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7768 	       && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7769 	gen_reload (old, reloadreg, rl->opnum,
7770 		    rl->when_needed);
7771     }
7772 
7773   /* Look at all insns we emitted, just to be safe.  */
7774   for (p = get_insns (); p; p = NEXT_INSN (p))
7775     if (INSN_P (p))
7776       {
7777 	rtx pat = PATTERN (p);
7778 
7779 	/* If this output reload doesn't come from a spill reg,
7780 	   clear any memory of reloaded copies of the pseudo reg.
7781 	   If this output reload comes from a spill reg,
7782 	   reg_has_output_reload will make this do nothing.  */
7783 	note_stores (pat, forget_old_reloads_1, NULL);
7784 
7785 	if (reg_mentioned_p (rl_reg_rtx, pat))
7786 	  {
7787 	    rtx set = single_set (insn);
7788 	    if (reload_spill_index[j] < 0
7789 		&& set
7790 		&& SET_SRC (set) == rl_reg_rtx)
7791 	      {
7792 		int src = REGNO (SET_SRC (set));
7793 
7794 		reload_spill_index[j] = src;
7795 		SET_HARD_REG_BIT (reg_is_output_reload, src);
7796 		if (find_regno_note (insn, REG_DEAD, src))
7797 		  SET_HARD_REG_BIT (reg_reloaded_died, src);
7798 	      }
7799 	    if (HARD_REGISTER_P (rl_reg_rtx))
7800 	      {
7801 		int s = rl->secondary_out_reload;
7802 		set = single_set (p);
7803 		/* If this reload copies only to the secondary reload
7804 		   register, the secondary reload does the actual
7805 		   store.  */
7806 		if (s >= 0 && set == NULL_RTX)
7807 		  /* We can't tell what function the secondary reload
7808 		     has and where the actual store to the pseudo is
7809 		     made; leave new_spill_reg_store alone.  */
7810 		  ;
7811 		else if (s >= 0
7812 			 && SET_SRC (set) == rl_reg_rtx
7813 			 && SET_DEST (set) == rld[s].reg_rtx)
7814 		  {
7815 		    /* Usually the next instruction will be the
7816 		       secondary reload insn;  if we can confirm
7817 		       that it is, setting new_spill_reg_store to
7818 		       that insn will allow an extra optimization.  */
7819 		    rtx s_reg = rld[s].reg_rtx;
7820 		    rtx_insn *next = NEXT_INSN (p);
7821 		    rld[s].out = rl->out;
7822 		    rld[s].out_reg = rl->out_reg;
7823 		    set = single_set (next);
7824 		    if (set && SET_SRC (set) == s_reg
7825 			&& reload_reg_rtx_reaches_end_p (s_reg, s))
7826 		      {
7827 			SET_HARD_REG_BIT (reg_is_output_reload,
7828 					  REGNO (s_reg));
7829 			new_spill_reg_store[REGNO (s_reg)] = next;
7830 		      }
7831 		  }
7832 		else if (reload_reg_rtx_reaches_end_p (rl_reg_rtx, j))
7833 		  new_spill_reg_store[REGNO (rl_reg_rtx)] = p;
7834 	      }
7835 	  }
7836       }
7837 
7838   if (rl->when_needed == RELOAD_OTHER)
7839     {
7840       emit_insn (other_output_reload_insns[rl->opnum]);
7841       other_output_reload_insns[rl->opnum] = get_insns ();
7842     }
7843   else
7844     output_reload_insns[rl->opnum] = get_insns ();
7845 
7846   if (cfun->can_throw_non_call_exceptions)
7847     copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7848 
7849   end_sequence ();
7850 }
7851 
7852 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7853    and has the number J.  */
7854 static void
7855 do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7856 {
7857   rtx_insn *insn = chain->insn;
7858   rtx old = (rl->in && MEM_P (rl->in)
7859 	     ? rl->in_reg : rl->in);
7860   rtx reg_rtx = rl->reg_rtx;
7861 
7862   if (old && reg_rtx)
7863     {
7864       machine_mode mode;
7865 
7866       /* Determine the mode to reload in.
7867 	 This is very tricky because we have three to choose from.
7868 	 There is the mode the insn operand wants (rl->inmode).
7869 	 There is the mode of the reload register RELOADREG.
7870 	 There is the intrinsic mode of the operand, which we could find
7871 	 by stripping some SUBREGs.
7872 	 It turns out that RELOADREG's mode is irrelevant:
7873 	 we can change that arbitrarily.
7874 
7875 	 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7876 	 then the reload reg may not support QImode moves, so use SImode.
7877 	 If foo is in memory due to spilling a pseudo reg, this is safe,
7878 	 because the QImode value is in the least significant part of a
7879 	 slot big enough for a SImode.  If foo is some other sort of
7880 	 memory reference, then it is impossible to reload this case,
7881 	 so previous passes had better make sure this never happens.
7882 
7883 	 Then consider a one-word union which has SImode and one of its
7884 	 members is a float, being fetched as (SUBREG:SF union:SI).
7885 	 We must fetch that as SFmode because we could be loading into
7886 	 a float-only register.  In this case OLD's mode is correct.
7887 
7888 	 Consider an immediate integer: it has VOIDmode.  Here we need
7889 	 to get a mode from something else.
7890 
7891 	 In some cases, there is a fourth mode, the operand's
7892 	 containing mode.  If the insn specifies a containing mode for
7893 	 this operand, it overrides all others.
7894 
7895 	 I am not sure whether the algorithm here is always right,
7896 	 but it does the right things in those cases.  */
7897 
7898       mode = GET_MODE (old);
7899       if (mode == VOIDmode)
7900 	mode = rl->inmode;
7901 
7902       /* We cannot use gen_lowpart_common since it can do the wrong thing
7903 	 when REG_RTX has a multi-word mode.  Note that REG_RTX must
7904 	 always be a REG here.  */
7905       if (GET_MODE (reg_rtx) != mode)
7906 	reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7907     }
7908   reload_reg_rtx_for_input[j] = reg_rtx;
7909 
7910   if (old != 0
7911       /* AUTO_INC reloads need to be handled even if inherited.  We got an
7912 	 AUTO_INC reload if reload_out is set but reload_out_reg isn't.  */
7913       && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7914       && ! rtx_equal_p (reg_rtx, old)
7915       && reg_rtx != 0)
7916     emit_input_reload_insns (chain, rld + j, old, j);
7917 
7918   /* When inheriting a wider reload, we have a MEM in rl->in,
7919      e.g. inheriting a SImode output reload for
7920      (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10)))  */
7921   if (optimize && reload_inherited[j] && rl->in
7922       && MEM_P (rl->in)
7923       && MEM_P (rl->in_reg)
7924       && reload_spill_index[j] >= 0
7925       && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7926     rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7927 
7928   /* If we are reloading a register that was recently stored in with an
7929      output-reload, see if we can prove there was
7930      actually no need to store the old value in it.  */
7931 
7932   if (optimize
7933       && (reload_inherited[j] || reload_override_in[j])
7934       && reg_rtx
7935       && REG_P (reg_rtx)
7936       && spill_reg_store[REGNO (reg_rtx)] != 0
7937 #if 0
7938       /* There doesn't seem to be any reason to restrict this to pseudos
7939 	 and doing so loses in the case where we are copying from a
7940 	 register of the wrong class.  */
7941       && !HARD_REGISTER_P (spill_reg_stored_to[REGNO (reg_rtx)])
7942 #endif
7943       /* The insn might have already some references to stackslots
7944 	 replaced by MEMs, while reload_out_reg still names the
7945 	 original pseudo.  */
7946       && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (reg_rtx)])
7947 	  || rtx_equal_p (spill_reg_stored_to[REGNO (reg_rtx)], rl->out_reg)))
7948     delete_output_reload (insn, j, REGNO (reg_rtx), reg_rtx);
7949 }
7950 
7951 /* Do output reloading for reload RL, which is for the insn described by
7952    CHAIN and has the number J.
7953    ??? At some point we need to support handling output reloads of
7954    JUMP_INSNs or insns that set cc0.  */
7955 static void
7956 do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
7957 {
7958   rtx note, old;
7959   rtx_insn *insn = chain->insn;
7960   /* If this is an output reload that stores something that is
7961      not loaded in this same reload, see if we can eliminate a previous
7962      store.  */
7963   rtx pseudo = rl->out_reg;
7964   rtx reg_rtx = rl->reg_rtx;
7965 
7966   if (rl->out && reg_rtx)
7967     {
7968       machine_mode mode;
7969 
7970       /* Determine the mode to reload in.
7971 	 See comments above (for input reloading).  */
7972       mode = GET_MODE (rl->out);
7973       if (mode == VOIDmode)
7974 	{
7975 	  /* VOIDmode should never happen for an output.  */
7976 	  if (asm_noperands (PATTERN (insn)) < 0)
7977 	    /* It's the compiler's fault.  */
7978 	    fatal_insn ("VOIDmode on an output", insn);
7979 	  error_for_asm (insn, "output operand is constant in %<asm%>");
7980 	  /* Prevent crash--use something we know is valid.  */
7981 	  mode = word_mode;
7982 	  rl->out = gen_rtx_REG (mode, REGNO (reg_rtx));
7983 	}
7984       if (GET_MODE (reg_rtx) != mode)
7985 	reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7986     }
7987   reload_reg_rtx_for_output[j] = reg_rtx;
7988 
7989   if (pseudo
7990       && optimize
7991       && REG_P (pseudo)
7992       && ! rtx_equal_p (rl->in_reg, pseudo)
7993       && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
7994       && reg_last_reload_reg[REGNO (pseudo)])
7995     {
7996       int pseudo_no = REGNO (pseudo);
7997       int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
7998 
7999       /* We don't need to test full validity of last_regno for
8000 	 inherit here; we only want to know if the store actually
8001 	 matches the pseudo.  */
8002       if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
8003 	  && reg_reloaded_contents[last_regno] == pseudo_no
8004 	  && spill_reg_store[last_regno]
8005 	  && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
8006 	delete_output_reload (insn, j, last_regno, reg_rtx);
8007     }
8008 
8009   old = rl->out_reg;
8010   if (old == 0
8011       || reg_rtx == 0
8012       || rtx_equal_p (old, reg_rtx))
8013     return;
8014 
8015   /* An output operand that dies right away does need a reload,
8016      but need not be copied from it.  Show the new location in the
8017      REG_UNUSED note.  */
8018   if ((REG_P (old) || GET_CODE (old) == SCRATCH)
8019       && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
8020     {
8021       XEXP (note, 0) = reg_rtx;
8022       return;
8023     }
8024   /* Likewise for a SUBREG of an operand that dies.  */
8025   else if (GET_CODE (old) == SUBREG
8026 	   && REG_P (SUBREG_REG (old))
8027 	   && 0 != (note = find_reg_note (insn, REG_UNUSED,
8028 					  SUBREG_REG (old))))
8029     {
8030       XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx);
8031       return;
8032     }
8033   else if (GET_CODE (old) == SCRATCH)
8034     /* If we aren't optimizing, there won't be a REG_UNUSED note,
8035        but we don't want to make an output reload.  */
8036     return;
8037 
8038   /* If is a JUMP_INSN, we can't support output reloads yet.  */
8039   gcc_assert (NONJUMP_INSN_P (insn));
8040 
8041   emit_output_reload_insns (chain, rld + j, j);
8042 }
8043 
8044 /* A reload copies values of MODE from register SRC to register DEST.
8045    Return true if it can be treated for inheritance purposes like a
8046    group of reloads, each one reloading a single hard register.  The
8047    caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
8048    occupy the same number of hard registers.  */
8049 
8050 static bool
8051 inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
8052 		     int src ATTRIBUTE_UNUSED,
8053 		     machine_mode mode ATTRIBUTE_UNUSED)
8054 {
8055 #ifdef CANNOT_CHANGE_MODE_CLASS
8056   return (!REG_CANNOT_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
8057 	  && !REG_CANNOT_CHANGE_MODE_P (src, mode, reg_raw_mode[src]));
8058 #else
8059   return true;
8060 #endif
8061 }
8062 
8063 /* Output insns to reload values in and out of the chosen reload regs.  */
8064 
8065 static void
8066 emit_reload_insns (struct insn_chain *chain)
8067 {
8068   rtx_insn *insn = chain->insn;
8069 
8070   int j;
8071 
8072   CLEAR_HARD_REG_SET (reg_reloaded_died);
8073 
8074   for (j = 0; j < reload_n_operands; j++)
8075     input_reload_insns[j] = input_address_reload_insns[j]
8076       = inpaddr_address_reload_insns[j]
8077       = output_reload_insns[j] = output_address_reload_insns[j]
8078       = outaddr_address_reload_insns[j]
8079       = other_output_reload_insns[j] = 0;
8080   other_input_address_reload_insns = 0;
8081   other_input_reload_insns = 0;
8082   operand_reload_insns = 0;
8083   other_operand_reload_insns = 0;
8084 
8085   /* Dump reloads into the dump file.  */
8086   if (dump_file)
8087     {
8088       fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
8089       debug_reload_to_stream (dump_file);
8090     }
8091 
8092   for (j = 0; j < n_reloads; j++)
8093     if (rld[j].reg_rtx && HARD_REGISTER_P (rld[j].reg_rtx))
8094       {
8095 	unsigned int i;
8096 
8097 	for (i = REGNO (rld[j].reg_rtx); i < END_REGNO (rld[j].reg_rtx); i++)
8098 	  new_spill_reg_store[i] = 0;
8099       }
8100 
8101   /* Now output the instructions to copy the data into and out of the
8102      reload registers.  Do these in the order that the reloads were reported,
8103      since reloads of base and index registers precede reloads of operands
8104      and the operands may need the base and index registers reloaded.  */
8105 
8106   for (j = 0; j < n_reloads; j++)
8107     {
8108       do_input_reload (chain, rld + j, j);
8109       do_output_reload (chain, rld + j, j);
8110     }
8111 
8112   /* Now write all the insns we made for reloads in the order expected by
8113      the allocation functions.  Prior to the insn being reloaded, we write
8114      the following reloads:
8115 
8116      RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
8117 
8118      RELOAD_OTHER reloads.
8119 
8120      For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
8121      by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
8122      RELOAD_FOR_INPUT reload for the operand.
8123 
8124      RELOAD_FOR_OPADDR_ADDRS reloads.
8125 
8126      RELOAD_FOR_OPERAND_ADDRESS reloads.
8127 
8128      After the insn being reloaded, we write the following:
8129 
8130      For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
8131      by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
8132      RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
8133      reloads for the operand.  The RELOAD_OTHER output reloads are
8134      output in descending order by reload number.  */
8135 
8136   emit_insn_before (other_input_address_reload_insns, insn);
8137   emit_insn_before (other_input_reload_insns, insn);
8138 
8139   for (j = 0; j < reload_n_operands; j++)
8140     {
8141       emit_insn_before (inpaddr_address_reload_insns[j], insn);
8142       emit_insn_before (input_address_reload_insns[j], insn);
8143       emit_insn_before (input_reload_insns[j], insn);
8144     }
8145 
8146   emit_insn_before (other_operand_reload_insns, insn);
8147   emit_insn_before (operand_reload_insns, insn);
8148 
8149   for (j = 0; j < reload_n_operands; j++)
8150     {
8151       rtx_insn *x = emit_insn_after (outaddr_address_reload_insns[j], insn);
8152       x = emit_insn_after (output_address_reload_insns[j], x);
8153       x = emit_insn_after (output_reload_insns[j], x);
8154       emit_insn_after (other_output_reload_insns[j], x);
8155     }
8156 
8157   /* For all the spill regs newly reloaded in this instruction,
8158      record what they were reloaded from, so subsequent instructions
8159      can inherit the reloads.
8160 
8161      Update spill_reg_store for the reloads of this insn.
8162      Copy the elements that were updated in the loop above.  */
8163 
8164   for (j = 0; j < n_reloads; j++)
8165     {
8166       int r = reload_order[j];
8167       int i = reload_spill_index[r];
8168 
8169       /* If this is a non-inherited input reload from a pseudo, we must
8170 	 clear any memory of a previous store to the same pseudo.  Only do
8171 	 something if there will not be an output reload for the pseudo
8172 	 being reloaded.  */
8173       if (rld[r].in_reg != 0
8174 	  && ! (reload_inherited[r] || reload_override_in[r]))
8175 	{
8176 	  rtx reg = rld[r].in_reg;
8177 
8178 	  if (GET_CODE (reg) == SUBREG)
8179 	    reg = SUBREG_REG (reg);
8180 
8181 	  if (REG_P (reg)
8182 	      && REGNO (reg) >= FIRST_PSEUDO_REGISTER
8183 	      && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
8184 	    {
8185 	      int nregno = REGNO (reg);
8186 
8187 	      if (reg_last_reload_reg[nregno])
8188 		{
8189 		  int last_regno = REGNO (reg_last_reload_reg[nregno]);
8190 
8191 		  if (reg_reloaded_contents[last_regno] == nregno)
8192 		    spill_reg_store[last_regno] = 0;
8193 		}
8194 	    }
8195 	}
8196 
8197       /* I is nonneg if this reload used a register.
8198 	 If rld[r].reg_rtx is 0, this is an optional reload
8199 	 that we opted to ignore.  */
8200 
8201       if (i >= 0 && rld[r].reg_rtx != 0)
8202 	{
8203 	  int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
8204 	  int k;
8205 
8206 	  /* For a multi register reload, we need to check if all or part
8207 	     of the value lives to the end.  */
8208 	  for (k = 0; k < nr; k++)
8209 	    if (reload_reg_reaches_end_p (i + k, r))
8210 	      CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
8211 
8212 	  /* Maybe the spill reg contains a copy of reload_out.  */
8213 	  if (rld[r].out != 0
8214 	      && (REG_P (rld[r].out)
8215 		  || (rld[r].out_reg
8216 		      ? REG_P (rld[r].out_reg)
8217 		      /* The reload value is an auto-modification of
8218 			 some kind.  For PRE_INC, POST_INC, PRE_DEC
8219 			 and POST_DEC, we record an equivalence
8220 			 between the reload register and the operand
8221 			 on the optimistic assumption that we can make
8222 			 the equivalence hold.  reload_as_needed must
8223 			 then either make it hold or invalidate the
8224 			 equivalence.
8225 
8226 			 PRE_MODIFY and POST_MODIFY addresses are reloaded
8227 			 somewhat differently, and allowing them here leads
8228 			 to problems.  */
8229 		      : (GET_CODE (rld[r].out) != POST_MODIFY
8230 			 && GET_CODE (rld[r].out) != PRE_MODIFY))))
8231 	    {
8232 	      rtx reg;
8233 
8234 	      reg = reload_reg_rtx_for_output[r];
8235 	      if (reload_reg_rtx_reaches_end_p (reg, r))
8236 		{
8237 		  machine_mode mode = GET_MODE (reg);
8238 		  int regno = REGNO (reg);
8239 		  int nregs = hard_regno_nregs[regno][mode];
8240 		  rtx out = (REG_P (rld[r].out)
8241 			     ? rld[r].out
8242 			     : rld[r].out_reg
8243 			     ? rld[r].out_reg
8244 /* AUTO_INC */		     : XEXP (rld[r].in_reg, 0));
8245 		  int out_regno = REGNO (out);
8246 		  int out_nregs = (!HARD_REGISTER_NUM_P (out_regno) ? 1
8247 				   : hard_regno_nregs[out_regno][mode]);
8248 		  bool piecemeal;
8249 
8250 		  spill_reg_store[regno] = new_spill_reg_store[regno];
8251 		  spill_reg_stored_to[regno] = out;
8252 		  reg_last_reload_reg[out_regno] = reg;
8253 
8254 		  piecemeal = (HARD_REGISTER_NUM_P (out_regno)
8255 			       && nregs == out_nregs
8256 			       && inherit_piecemeal_p (out_regno, regno, mode));
8257 
8258 		  /* If OUT_REGNO is a hard register, it may occupy more than
8259 		     one register.  If it does, say what is in the
8260 		     rest of the registers assuming that both registers
8261 		     agree on how many words the object takes.  If not,
8262 		     invalidate the subsequent registers.  */
8263 
8264 		  if (HARD_REGISTER_NUM_P (out_regno))
8265 		    for (k = 1; k < out_nregs; k++)
8266 		      reg_last_reload_reg[out_regno + k]
8267 			= (piecemeal ? regno_reg_rtx[regno + k] : 0);
8268 
8269 		  /* Now do the inverse operation.  */
8270 		  for (k = 0; k < nregs; k++)
8271 		    {
8272 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8273 		      reg_reloaded_contents[regno + k]
8274 			= (!HARD_REGISTER_NUM_P (out_regno) || !piecemeal
8275 			   ? out_regno
8276 			   : out_regno + k);
8277 		      reg_reloaded_insn[regno + k] = insn;
8278 		      SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8279 		      if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8280 			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8281 					  regno + k);
8282 		      else
8283 			CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8284 					    regno + k);
8285 		    }
8286 		}
8287 	    }
8288 	  /* Maybe the spill reg contains a copy of reload_in.  Only do
8289 	     something if there will not be an output reload for
8290 	     the register being reloaded.  */
8291 	  else if (rld[r].out_reg == 0
8292 		   && rld[r].in != 0
8293 		   && ((REG_P (rld[r].in)
8294 			&& !HARD_REGISTER_P (rld[r].in)
8295 			&& !REGNO_REG_SET_P (&reg_has_output_reload,
8296 					     REGNO (rld[r].in)))
8297 		       || (REG_P (rld[r].in_reg)
8298 			   && !REGNO_REG_SET_P (&reg_has_output_reload,
8299 						REGNO (rld[r].in_reg))))
8300 		   && !reg_set_p (reload_reg_rtx_for_input[r], PATTERN (insn)))
8301 	    {
8302 	      rtx reg;
8303 
8304 	      reg = reload_reg_rtx_for_input[r];
8305 	      if (reload_reg_rtx_reaches_end_p (reg, r))
8306 		{
8307 		  machine_mode mode;
8308 		  int regno;
8309 		  int nregs;
8310 		  int in_regno;
8311 		  int in_nregs;
8312 		  rtx in;
8313 		  bool piecemeal;
8314 
8315 		  mode = GET_MODE (reg);
8316 		  regno = REGNO (reg);
8317 		  nregs = hard_regno_nregs[regno][mode];
8318 		  if (REG_P (rld[r].in)
8319 		      && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
8320 		    in = rld[r].in;
8321 		  else if (REG_P (rld[r].in_reg))
8322 		    in = rld[r].in_reg;
8323 		  else
8324 		    in = XEXP (rld[r].in_reg, 0);
8325 		  in_regno = REGNO (in);
8326 
8327 		  in_nregs = (!HARD_REGISTER_NUM_P (in_regno) ? 1
8328 			      : hard_regno_nregs[in_regno][mode]);
8329 
8330 		  reg_last_reload_reg[in_regno] = reg;
8331 
8332 		  piecemeal = (HARD_REGISTER_NUM_P (in_regno)
8333 			       && nregs == in_nregs
8334 			       && inherit_piecemeal_p (regno, in_regno, mode));
8335 
8336 		  if (HARD_REGISTER_NUM_P (in_regno))
8337 		    for (k = 1; k < in_nregs; k++)
8338 		      reg_last_reload_reg[in_regno + k]
8339 			= (piecemeal ? regno_reg_rtx[regno + k] : 0);
8340 
8341 		  /* Unless we inherited this reload, show we haven't
8342 		     recently done a store.
8343 		     Previous stores of inherited auto_inc expressions
8344 		     also have to be discarded.  */
8345 		  if (! reload_inherited[r]
8346 		      || (rld[r].out && ! rld[r].out_reg))
8347 		    spill_reg_store[regno] = 0;
8348 
8349 		  for (k = 0; k < nregs; k++)
8350 		    {
8351 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8352 		      reg_reloaded_contents[regno + k]
8353 			= (!HARD_REGISTER_NUM_P (in_regno) || !piecemeal
8354 			   ? in_regno
8355 			   : in_regno + k);
8356 		      reg_reloaded_insn[regno + k] = insn;
8357 		      SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8358 		      if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8359 			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8360 					  regno + k);
8361 		      else
8362 			CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8363 					    regno + k);
8364 		    }
8365 		}
8366 	    }
8367 	}
8368 
8369       /* The following if-statement was #if 0'd in 1.34 (or before...).
8370 	 It's reenabled in 1.35 because supposedly nothing else
8371 	 deals with this problem.  */
8372 
8373       /* If a register gets output-reloaded from a non-spill register,
8374 	 that invalidates any previous reloaded copy of it.
8375 	 But forget_old_reloads_1 won't get to see it, because
8376 	 it thinks only about the original insn.  So invalidate it here.
8377 	 Also do the same thing for RELOAD_OTHER constraints where the
8378 	 output is discarded.  */
8379       if (i < 0
8380 	  && ((rld[r].out != 0
8381 	       && (REG_P (rld[r].out)
8382 		   || (MEM_P (rld[r].out)
8383 		       && REG_P (rld[r].out_reg))))
8384 	      || (rld[r].out == 0 && rld[r].out_reg
8385 		  && REG_P (rld[r].out_reg))))
8386 	{
8387 	  rtx out = ((rld[r].out && REG_P (rld[r].out))
8388 		     ? rld[r].out : rld[r].out_reg);
8389 	  int out_regno = REGNO (out);
8390 	  machine_mode mode = GET_MODE (out);
8391 
8392 	  /* REG_RTX is now set or clobbered by the main instruction.
8393 	     As the comment above explains, forget_old_reloads_1 only
8394 	     sees the original instruction, and there is no guarantee
8395 	     that the original instruction also clobbered REG_RTX.
8396 	     For example, if find_reloads sees that the input side of
8397 	     a matched operand pair dies in this instruction, it may
8398 	     use the input register as the reload register.
8399 
8400 	     Calling forget_old_reloads_1 is a waste of effort if
8401 	     REG_RTX is also the output register.
8402 
8403 	     If we know that REG_RTX holds the value of a pseudo
8404 	     register, the code after the call will record that fact.  */
8405 	  if (rld[r].reg_rtx && rld[r].reg_rtx != out)
8406 	    forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
8407 
8408 	  if (!HARD_REGISTER_NUM_P (out_regno))
8409 	    {
8410 	      rtx src_reg;
8411 	      rtx_insn *store_insn = NULL;
8412 
8413 	      reg_last_reload_reg[out_regno] = 0;
8414 
8415 	      /* If we can find a hard register that is stored, record
8416 		 the storing insn so that we may delete this insn with
8417 		 delete_output_reload.  */
8418 	      src_reg = reload_reg_rtx_for_output[r];
8419 
8420 	      if (src_reg)
8421 		{
8422 		  if (reload_reg_rtx_reaches_end_p (src_reg, r))
8423 		    store_insn = new_spill_reg_store[REGNO (src_reg)];
8424 		  else
8425 		    src_reg = NULL_RTX;
8426 		}
8427 	      else
8428 		{
8429 		  /* If this is an optional reload, try to find the
8430 		     source reg from an input reload.  */
8431 		  rtx set = single_set (insn);
8432 		  if (set && SET_DEST (set) == rld[r].out)
8433 		    {
8434 		      int k;
8435 
8436 		      src_reg = SET_SRC (set);
8437 		      store_insn = insn;
8438 		      for (k = 0; k < n_reloads; k++)
8439 			{
8440 			  if (rld[k].in == src_reg)
8441 			    {
8442 			      src_reg = reload_reg_rtx_for_input[k];
8443 			      break;
8444 			    }
8445 			}
8446 		    }
8447 		}
8448 	      if (src_reg && REG_P (src_reg)
8449 		  && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
8450 		{
8451 		  int src_regno, src_nregs, k;
8452 		  rtx note;
8453 
8454 		  gcc_assert (GET_MODE (src_reg) == mode);
8455 		  src_regno = REGNO (src_reg);
8456 		  src_nregs = hard_regno_nregs[src_regno][mode];
8457 		  /* The place where to find a death note varies with
8458 		     PRESERVE_DEATH_INFO_REGNO_P .  The condition is not
8459 		     necessarily checked exactly in the code that moves
8460 		     notes, so just check both locations.  */
8461 		  note = find_regno_note (insn, REG_DEAD, src_regno);
8462 		  if (! note && store_insn)
8463 		    note = find_regno_note (store_insn, REG_DEAD, src_regno);
8464 		  for (k = 0; k < src_nregs; k++)
8465 		    {
8466 		      spill_reg_store[src_regno + k] = store_insn;
8467 		      spill_reg_stored_to[src_regno + k] = out;
8468 		      reg_reloaded_contents[src_regno + k] = out_regno;
8469 		      reg_reloaded_insn[src_regno + k] = store_insn;
8470 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8471 		      SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8472 		      if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + k,
8473 							  mode))
8474 			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8475 					  src_regno + k);
8476 		      else
8477 			CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8478 					    src_regno + k);
8479 		      SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8480 		      if (note)
8481 			SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8482 		      else
8483 			CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8484 		    }
8485 		  reg_last_reload_reg[out_regno] = src_reg;
8486 		  /* We have to set reg_has_output_reload here, or else
8487 		     forget_old_reloads_1 will clear reg_last_reload_reg
8488 		     right away.  */
8489 		  SET_REGNO_REG_SET (&reg_has_output_reload,
8490 				     out_regno);
8491 		}
8492 	    }
8493 	  else
8494 	    {
8495 	      int k, out_nregs = hard_regno_nregs[out_regno][mode];
8496 
8497 	      for (k = 0; k < out_nregs; k++)
8498 		reg_last_reload_reg[out_regno + k] = 0;
8499 	    }
8500 	}
8501     }
8502   IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
8503 }
8504 
8505 /* Go through the motions to emit INSN and test if it is strictly valid.
8506    Return the emitted insn if valid, else return NULL.  */
8507 
8508 static rtx_insn *
8509 emit_insn_if_valid_for_reload (rtx pat)
8510 {
8511   rtx_insn *last = get_last_insn ();
8512   int code;
8513 
8514   rtx_insn *insn = emit_insn (pat);
8515   code = recog_memoized (insn);
8516 
8517   if (code >= 0)
8518     {
8519       extract_insn (insn);
8520       /* We want constrain operands to treat this insn strictly in its
8521 	 validity determination, i.e., the way it would after reload has
8522 	 completed.  */
8523       if (constrain_operands (1, get_enabled_alternatives (insn)))
8524 	return insn;
8525     }
8526 
8527   delete_insns_since (last);
8528   return NULL;
8529 }
8530 
8531 /* Emit code to perform a reload from IN (which may be a reload register) to
8532    OUT (which may also be a reload register).  IN or OUT is from operand
8533    OPNUM with reload type TYPE.
8534 
8535    Returns first insn emitted.  */
8536 
8537 static rtx_insn *
8538 gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
8539 {
8540   rtx_insn *last = get_last_insn ();
8541   rtx_insn *tem;
8542 #ifdef SECONDARY_MEMORY_NEEDED
8543   rtx tem1, tem2;
8544 #endif
8545 
8546   /* If IN is a paradoxical SUBREG, remove it and try to put the
8547      opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
8548   if (!strip_paradoxical_subreg (&in, &out))
8549     strip_paradoxical_subreg (&out, &in);
8550 
8551   /* How to do this reload can get quite tricky.  Normally, we are being
8552      asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8553      register that didn't get a hard register.  In that case we can just
8554      call emit_move_insn.
8555 
8556      We can also be asked to reload a PLUS that adds a register or a MEM to
8557      another register, constant or MEM.  This can occur during frame pointer
8558      elimination and while reloading addresses.  This case is handled by
8559      trying to emit a single insn to perform the add.  If it is not valid,
8560      we use a two insn sequence.
8561 
8562      Or we can be asked to reload an unary operand that was a fragment of
8563      an addressing mode, into a register.  If it isn't recognized as-is,
8564      we try making the unop operand and the reload-register the same:
8565      (set reg:X (unop:X expr:Y))
8566      -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8567 
8568      Finally, we could be called to handle an 'o' constraint by putting
8569      an address into a register.  In that case, we first try to do this
8570      with a named pattern of "reload_load_address".  If no such pattern
8571      exists, we just emit a SET insn and hope for the best (it will normally
8572      be valid on machines that use 'o').
8573 
8574      This entire process is made complex because reload will never
8575      process the insns we generate here and so we must ensure that
8576      they will fit their constraints and also by the fact that parts of
8577      IN might be being reloaded separately and replaced with spill registers.
8578      Because of this, we are, in some sense, just guessing the right approach
8579      here.  The one listed above seems to work.
8580 
8581      ??? At some point, this whole thing needs to be rethought.  */
8582 
8583   if (GET_CODE (in) == PLUS
8584       && (REG_P (XEXP (in, 0))
8585 	  || GET_CODE (XEXP (in, 0)) == SUBREG
8586 	  || MEM_P (XEXP (in, 0)))
8587       && (REG_P (XEXP (in, 1))
8588 	  || GET_CODE (XEXP (in, 1)) == SUBREG
8589 	  || CONSTANT_P (XEXP (in, 1))
8590 	  || MEM_P (XEXP (in, 1))))
8591     {
8592       /* We need to compute the sum of a register or a MEM and another
8593 	 register, constant, or MEM, and put it into the reload
8594 	 register.  The best possible way of doing this is if the machine
8595 	 has a three-operand ADD insn that accepts the required operands.
8596 
8597 	 The simplest approach is to try to generate such an insn and see if it
8598 	 is recognized and matches its constraints.  If so, it can be used.
8599 
8600 	 It might be better not to actually emit the insn unless it is valid,
8601 	 but we need to pass the insn as an operand to `recog' and
8602 	 `extract_insn' and it is simpler to emit and then delete the insn if
8603 	 not valid than to dummy things up.  */
8604 
8605       rtx op0, op1, tem;
8606       rtx_insn *insn;
8607       enum insn_code code;
8608 
8609       op0 = find_replacement (&XEXP (in, 0));
8610       op1 = find_replacement (&XEXP (in, 1));
8611 
8612       /* Since constraint checking is strict, commutativity won't be
8613 	 checked, so we need to do that here to avoid spurious failure
8614 	 if the add instruction is two-address and the second operand
8615 	 of the add is the same as the reload reg, which is frequently
8616 	 the case.  If the insn would be A = B + A, rearrange it so
8617 	 it will be A = A + B as constrain_operands expects.  */
8618 
8619       if (REG_P (XEXP (in, 1))
8620 	  && REGNO (out) == REGNO (XEXP (in, 1)))
8621 	tem = op0, op0 = op1, op1 = tem;
8622 
8623       if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8624 	in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8625 
8626       insn = emit_insn_if_valid_for_reload (gen_rtx_SET (out, in));
8627       if (insn)
8628 	return insn;
8629 
8630       /* If that failed, we must use a conservative two-insn sequence.
8631 
8632 	 Use a move to copy one operand into the reload register.  Prefer
8633 	 to reload a constant, MEM or pseudo since the move patterns can
8634 	 handle an arbitrary operand.  If OP1 is not a constant, MEM or
8635 	 pseudo and OP1 is not a valid operand for an add instruction, then
8636 	 reload OP1.
8637 
8638 	 After reloading one of the operands into the reload register, add
8639 	 the reload register to the output register.
8640 
8641 	 If there is another way to do this for a specific machine, a
8642 	 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8643 	 we emit below.  */
8644 
8645       code = optab_handler (add_optab, GET_MODE (out));
8646 
8647       if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8648 	  || (REG_P (op1)
8649 	      && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8650 	  || (code != CODE_FOR_nothing
8651 	      && !insn_operand_matches (code, 2, op1)))
8652 	tem = op0, op0 = op1, op1 = tem;
8653 
8654       gen_reload (out, op0, opnum, type);
8655 
8656       /* If OP0 and OP1 are the same, we can use OUT for OP1.
8657 	 This fixes a problem on the 32K where the stack pointer cannot
8658 	 be used as an operand of an add insn.  */
8659 
8660       if (rtx_equal_p (op0, op1))
8661 	op1 = out;
8662 
8663       insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8664       if (insn)
8665 	{
8666 	  /* Add a REG_EQUIV note so that find_equiv_reg can find it.  */
8667 	  set_dst_reg_note (insn, REG_EQUIV, in, out);
8668 	  return insn;
8669 	}
8670 
8671       /* If that failed, copy the address register to the reload register.
8672 	 Then add the constant to the reload register.  */
8673 
8674       gcc_assert (!reg_overlap_mentioned_p (out, op0));
8675       gen_reload (out, op1, opnum, type);
8676       insn = emit_insn (gen_add2_insn (out, op0));
8677       set_dst_reg_note (insn, REG_EQUIV, in, out);
8678     }
8679 
8680 #ifdef SECONDARY_MEMORY_NEEDED
8681   /* If we need a memory location to do the move, do it that way.  */
8682   else if ((tem1 = replaced_subreg (in), tem2 = replaced_subreg (out),
8683 	    (REG_P (tem1) && REG_P (tem2)))
8684 	   && REGNO (tem1) < FIRST_PSEUDO_REGISTER
8685 	   && REGNO (tem2) < FIRST_PSEUDO_REGISTER
8686 	   && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (tem1)),
8687 				       REGNO_REG_CLASS (REGNO (tem2)),
8688 				       GET_MODE (out)))
8689     {
8690       /* Get the memory to use and rewrite both registers to its mode.  */
8691       rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8692 
8693       if (GET_MODE (loc) != GET_MODE (out))
8694 	out = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (out));
8695 
8696       if (GET_MODE (loc) != GET_MODE (in))
8697 	in = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (in));
8698 
8699       gen_reload (loc, in, opnum, type);
8700       gen_reload (out, loc, opnum, type);
8701     }
8702 #endif
8703   else if (REG_P (out) && UNARY_P (in))
8704     {
8705       rtx op1;
8706       rtx out_moded;
8707       rtx_insn *set;
8708 
8709       op1 = find_replacement (&XEXP (in, 0));
8710       if (op1 != XEXP (in, 0))
8711 	in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8712 
8713       /* First, try a plain SET.  */
8714       set = emit_insn_if_valid_for_reload (gen_rtx_SET (out, in));
8715       if (set)
8716 	return set;
8717 
8718       /* If that failed, move the inner operand to the reload
8719 	 register, and try the same unop with the inner expression
8720 	 replaced with the reload register.  */
8721 
8722       if (GET_MODE (op1) != GET_MODE (out))
8723 	out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8724       else
8725 	out_moded = out;
8726 
8727       gen_reload (out_moded, op1, opnum, type);
8728 
8729       rtx temp = gen_rtx_SET (out, gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8730 						  out_moded));
8731       rtx_insn *insn = emit_insn_if_valid_for_reload (temp);
8732       if (insn)
8733 	{
8734 	  set_unique_reg_note (insn, REG_EQUIV, in);
8735 	  return insn;
8736 	}
8737 
8738       fatal_insn ("failure trying to reload:", set);
8739     }
8740   /* If IN is a simple operand, use gen_move_insn.  */
8741   else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8742     {
8743       tem = emit_insn (gen_move_insn (out, in));
8744       /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note.  */
8745       mark_jump_label (in, tem, 0);
8746     }
8747 
8748   else if (targetm.have_reload_load_address ())
8749     emit_insn (targetm.gen_reload_load_address (out, in));
8750 
8751   /* Otherwise, just write (set OUT IN) and hope for the best.  */
8752   else
8753     emit_insn (gen_rtx_SET (out, in));
8754 
8755   /* Return the first insn emitted.
8756      We can not just return get_last_insn, because there may have
8757      been multiple instructions emitted.  Also note that gen_move_insn may
8758      emit more than one insn itself, so we can not assume that there is one
8759      insn emitted per emit_insn_before call.  */
8760 
8761   return last ? NEXT_INSN (last) : get_insns ();
8762 }
8763 
8764 /* Delete a previously made output-reload whose result we now believe
8765    is not needed.  First we double-check.
8766 
8767    INSN is the insn now being processed.
8768    LAST_RELOAD_REG is the hard register number for which we want to delete
8769    the last output reload.
8770    J is the reload-number that originally used REG.  The caller has made
8771    certain that reload J doesn't use REG any longer for input.
8772    NEW_RELOAD_REG is reload register that reload J is using for REG.  */
8773 
8774 static void
8775 delete_output_reload (rtx_insn *insn, int j, int last_reload_reg,
8776 		      rtx new_reload_reg)
8777 {
8778   rtx_insn *output_reload_insn = spill_reg_store[last_reload_reg];
8779   rtx reg = spill_reg_stored_to[last_reload_reg];
8780   int k;
8781   int n_occurrences;
8782   int n_inherited = 0;
8783   rtx substed;
8784   unsigned regno;
8785   int nregs;
8786 
8787   /* It is possible that this reload has been only used to set another reload
8788      we eliminated earlier and thus deleted this instruction too.  */
8789   if (output_reload_insn->deleted ())
8790     return;
8791 
8792   /* Get the raw pseudo-register referred to.  */
8793 
8794   while (GET_CODE (reg) == SUBREG)
8795     reg = SUBREG_REG (reg);
8796   substed = reg_equiv_memory_loc (REGNO (reg));
8797 
8798   /* This is unsafe if the operand occurs more often in the current
8799      insn than it is inherited.  */
8800   for (k = n_reloads - 1; k >= 0; k--)
8801     {
8802       rtx reg2 = rld[k].in;
8803       if (! reg2)
8804 	continue;
8805       if (MEM_P (reg2) || reload_override_in[k])
8806 	reg2 = rld[k].in_reg;
8807 
8808       if (AUTO_INC_DEC && rld[k].out && ! rld[k].out_reg)
8809 	reg2 = XEXP (rld[k].in_reg, 0);
8810 
8811       while (GET_CODE (reg2) == SUBREG)
8812 	reg2 = SUBREG_REG (reg2);
8813       if (rtx_equal_p (reg2, reg))
8814 	{
8815 	  if (reload_inherited[k] || reload_override_in[k] || k == j)
8816 	    n_inherited++;
8817 	  else
8818 	    return;
8819 	}
8820     }
8821   n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8822   if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8823     n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8824 					reg, 0);
8825   if (substed)
8826     n_occurrences += count_occurrences (PATTERN (insn),
8827 					eliminate_regs (substed, VOIDmode,
8828 							NULL_RTX), 0);
8829   for (rtx i1 = reg_equiv_alt_mem_list (REGNO (reg)); i1; i1 = XEXP (i1, 1))
8830     {
8831       gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8832       n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8833     }
8834   if (n_occurrences > n_inherited)
8835     return;
8836 
8837   regno = REGNO (reg);
8838   if (regno >= FIRST_PSEUDO_REGISTER)
8839     nregs = 1;
8840   else
8841     nregs = hard_regno_nregs[regno][GET_MODE (reg)];
8842 
8843   /* If the pseudo-reg we are reloading is no longer referenced
8844      anywhere between the store into it and here,
8845      and we're within the same basic block, then the value can only
8846      pass through the reload reg and end up here.
8847      Otherwise, give up--return.  */
8848   for (rtx_insn *i1 = NEXT_INSN (output_reload_insn);
8849        i1 != insn; i1 = NEXT_INSN (i1))
8850     {
8851       if (NOTE_INSN_BASIC_BLOCK_P (i1))
8852 	return;
8853       if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8854 	  && refers_to_regno_p (regno, regno + nregs, PATTERN (i1), NULL))
8855 	{
8856 	  /* If this is USE in front of INSN, we only have to check that
8857 	     there are no more references than accounted for by inheritance.  */
8858 	  while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8859 	    {
8860 	      n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8861 	      i1 = NEXT_INSN (i1);
8862 	    }
8863 	  if (n_occurrences <= n_inherited && i1 == insn)
8864 	    break;
8865 	  return;
8866 	}
8867     }
8868 
8869   /* We will be deleting the insn.  Remove the spill reg information.  */
8870   for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
8871     {
8872       spill_reg_store[last_reload_reg + k] = 0;
8873       spill_reg_stored_to[last_reload_reg + k] = 0;
8874     }
8875 
8876   /* The caller has already checked that REG dies or is set in INSN.
8877      It has also checked that we are optimizing, and thus some
8878      inaccuracies in the debugging information are acceptable.
8879      So we could just delete output_reload_insn.  But in some cases
8880      we can improve the debugging information without sacrificing
8881      optimization - maybe even improving the code: See if the pseudo
8882      reg has been completely replaced with reload regs.  If so, delete
8883      the store insn and forget we had a stack slot for the pseudo.  */
8884   if (rld[j].out != rld[j].in
8885       && REG_N_DEATHS (REGNO (reg)) == 1
8886       && REG_N_SETS (REGNO (reg)) == 1
8887       && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8888       && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8889     {
8890       rtx_insn *i2;
8891 
8892       /* We know that it was used only between here and the beginning of
8893 	 the current basic block.  (We also know that the last use before
8894 	 INSN was the output reload we are thinking of deleting, but never
8895 	 mind that.)  Search that range; see if any ref remains.  */
8896       for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8897 	{
8898 	  rtx set = single_set (i2);
8899 
8900 	  /* Uses which just store in the pseudo don't count,
8901 	     since if they are the only uses, they are dead.  */
8902 	  if (set != 0 && SET_DEST (set) == reg)
8903 	    continue;
8904 	  if (LABEL_P (i2) || JUMP_P (i2))
8905 	    break;
8906 	  if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8907 	      && reg_mentioned_p (reg, PATTERN (i2)))
8908 	    {
8909 	      /* Some other ref remains; just delete the output reload we
8910 		 know to be dead.  */
8911 	      delete_address_reloads (output_reload_insn, insn);
8912 	      delete_insn (output_reload_insn);
8913 	      return;
8914 	    }
8915 	}
8916 
8917       /* Delete the now-dead stores into this pseudo.  Note that this
8918 	 loop also takes care of deleting output_reload_insn.  */
8919       for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8920 	{
8921 	  rtx set = single_set (i2);
8922 
8923 	  if (set != 0 && SET_DEST (set) == reg)
8924 	    {
8925 	      delete_address_reloads (i2, insn);
8926 	      delete_insn (i2);
8927 	    }
8928 	  if (LABEL_P (i2) || JUMP_P (i2))
8929 	    break;
8930 	}
8931 
8932       /* For the debugging info, say the pseudo lives in this reload reg.  */
8933       reg_renumber[REGNO (reg)] = REGNO (new_reload_reg);
8934       if (ira_conflicts_p)
8935 	/* Inform IRA about the change.  */
8936 	ira_mark_allocation_change (REGNO (reg));
8937       alter_reg (REGNO (reg), -1, false);
8938     }
8939   else
8940     {
8941       delete_address_reloads (output_reload_insn, insn);
8942       delete_insn (output_reload_insn);
8943     }
8944 }
8945 
8946 /* We are going to delete DEAD_INSN.  Recursively delete loads of
8947    reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8948    CURRENT_INSN is being reloaded, so we have to check its reloads too.  */
8949 static void
8950 delete_address_reloads (rtx_insn *dead_insn, rtx_insn *current_insn)
8951 {
8952   rtx set = single_set (dead_insn);
8953   rtx set2, dst;
8954   rtx_insn *prev, *next;
8955   if (set)
8956     {
8957       rtx dst = SET_DEST (set);
8958       if (MEM_P (dst))
8959 	delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8960     }
8961   /* If we deleted the store from a reloaded post_{in,de}c expression,
8962      we can delete the matching adds.  */
8963   prev = PREV_INSN (dead_insn);
8964   next = NEXT_INSN (dead_insn);
8965   if (! prev || ! next)
8966     return;
8967   set = single_set (next);
8968   set2 = single_set (prev);
8969   if (! set || ! set2
8970       || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8971       || !CONST_INT_P (XEXP (SET_SRC (set), 1))
8972       || !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
8973     return;
8974   dst = SET_DEST (set);
8975   if (! rtx_equal_p (dst, SET_DEST (set2))
8976       || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8977       || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8978       || (INTVAL (XEXP (SET_SRC (set), 1))
8979 	  != -INTVAL (XEXP (SET_SRC (set2), 1))))
8980     return;
8981   delete_related_insns (prev);
8982   delete_related_insns (next);
8983 }
8984 
8985 /* Subfunction of delete_address_reloads: process registers found in X.  */
8986 static void
8987 delete_address_reloads_1 (rtx_insn *dead_insn, rtx x, rtx_insn *current_insn)
8988 {
8989   rtx_insn *prev, *i2;
8990   rtx set, dst;
8991   int i, j;
8992   enum rtx_code code = GET_CODE (x);
8993 
8994   if (code != REG)
8995     {
8996       const char *fmt = GET_RTX_FORMAT (code);
8997       for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8998 	{
8999 	  if (fmt[i] == 'e')
9000 	    delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
9001 	  else if (fmt[i] == 'E')
9002 	    {
9003 	      for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9004 		delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
9005 					  current_insn);
9006 	    }
9007 	}
9008       return;
9009     }
9010 
9011   if (spill_reg_order[REGNO (x)] < 0)
9012     return;
9013 
9014   /* Scan backwards for the insn that sets x.  This might be a way back due
9015      to inheritance.  */
9016   for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
9017     {
9018       code = GET_CODE (prev);
9019       if (code == CODE_LABEL || code == JUMP_INSN)
9020 	return;
9021       if (!INSN_P (prev))
9022 	continue;
9023       if (reg_set_p (x, PATTERN (prev)))
9024 	break;
9025       if (reg_referenced_p (x, PATTERN (prev)))
9026 	return;
9027     }
9028   if (! prev || INSN_UID (prev) < reload_first_uid)
9029     return;
9030   /* Check that PREV only sets the reload register.  */
9031   set = single_set (prev);
9032   if (! set)
9033     return;
9034   dst = SET_DEST (set);
9035   if (!REG_P (dst)
9036       || ! rtx_equal_p (dst, x))
9037     return;
9038   if (! reg_set_p (dst, PATTERN (dead_insn)))
9039     {
9040       /* Check if DST was used in a later insn -
9041 	 it might have been inherited.  */
9042       for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
9043 	{
9044 	  if (LABEL_P (i2))
9045 	    break;
9046 	  if (! INSN_P (i2))
9047 	    continue;
9048 	  if (reg_referenced_p (dst, PATTERN (i2)))
9049 	    {
9050 	      /* If there is a reference to the register in the current insn,
9051 		 it might be loaded in a non-inherited reload.  If no other
9052 		 reload uses it, that means the register is set before
9053 		 referenced.  */
9054 	      if (i2 == current_insn)
9055 		{
9056 		  for (j = n_reloads - 1; j >= 0; j--)
9057 		    if ((rld[j].reg_rtx == dst && reload_inherited[j])
9058 			|| reload_override_in[j] == dst)
9059 		      return;
9060 		  for (j = n_reloads - 1; j >= 0; j--)
9061 		    if (rld[j].in && rld[j].reg_rtx == dst)
9062 		      break;
9063 		  if (j >= 0)
9064 		    break;
9065 		}
9066 	      return;
9067 	    }
9068 	  if (JUMP_P (i2))
9069 	    break;
9070 	  /* If DST is still live at CURRENT_INSN, check if it is used for
9071 	     any reload.  Note that even if CURRENT_INSN sets DST, we still
9072 	     have to check the reloads.  */
9073 	  if (i2 == current_insn)
9074 	    {
9075 	      for (j = n_reloads - 1; j >= 0; j--)
9076 		if ((rld[j].reg_rtx == dst && reload_inherited[j])
9077 		    || reload_override_in[j] == dst)
9078 		  return;
9079 	      /* ??? We can't finish the loop here, because dst might be
9080 		 allocated to a pseudo in this block if no reload in this
9081 		 block needs any of the classes containing DST - see
9082 		 spill_hard_reg.  There is no easy way to tell this, so we
9083 		 have to scan till the end of the basic block.  */
9084 	    }
9085 	  if (reg_set_p (dst, PATTERN (i2)))
9086 	    break;
9087 	}
9088     }
9089   delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
9090   reg_reloaded_contents[REGNO (dst)] = -1;
9091   delete_insn (prev);
9092 }
9093 
9094 /* Output reload-insns to reload VALUE into RELOADREG.
9095    VALUE is an autoincrement or autodecrement RTX whose operand
9096    is a register or memory location;
9097    so reloading involves incrementing that location.
9098    IN is either identical to VALUE, or some cheaper place to reload from.
9099 
9100    INC_AMOUNT is the number to increment or decrement by (always positive).
9101    This cannot be deduced from VALUE.  */
9102 
9103 static void
9104 inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
9105 {
9106   /* REG or MEM to be copied and incremented.  */
9107   rtx incloc = find_replacement (&XEXP (value, 0));
9108   /* Nonzero if increment after copying.  */
9109   int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
9110 	      || GET_CODE (value) == POST_MODIFY);
9111   rtx_insn *last;
9112   rtx inc;
9113   rtx_insn *add_insn;
9114   int code;
9115   rtx real_in = in == value ? incloc : in;
9116 
9117   /* No hard register is equivalent to this register after
9118      inc/dec operation.  If REG_LAST_RELOAD_REG were nonzero,
9119      we could inc/dec that register as well (maybe even using it for
9120      the source), but I'm not sure it's worth worrying about.  */
9121   if (REG_P (incloc))
9122     reg_last_reload_reg[REGNO (incloc)] = 0;
9123 
9124   if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
9125     {
9126       gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
9127       inc = find_replacement (&XEXP (XEXP (value, 1), 1));
9128     }
9129   else
9130     {
9131       if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
9132 	inc_amount = -inc_amount;
9133 
9134       inc = GEN_INT (inc_amount);
9135     }
9136 
9137   /* If this is post-increment, first copy the location to the reload reg.  */
9138   if (post && real_in != reloadreg)
9139     emit_insn (gen_move_insn (reloadreg, real_in));
9140 
9141   if (in == value)
9142     {
9143       /* See if we can directly increment INCLOC.  Use a method similar to
9144 	 that in gen_reload.  */
9145 
9146       last = get_last_insn ();
9147       add_insn = emit_insn (gen_rtx_SET (incloc,
9148 					 gen_rtx_PLUS (GET_MODE (incloc),
9149 						       incloc, inc)));
9150 
9151       code = recog_memoized (add_insn);
9152       if (code >= 0)
9153 	{
9154 	  extract_insn (add_insn);
9155 	  if (constrain_operands (1, get_enabled_alternatives (add_insn)))
9156 	    {
9157 	      /* If this is a pre-increment and we have incremented the value
9158 		 where it lives, copy the incremented value to RELOADREG to
9159 		 be used as an address.  */
9160 
9161 	      if (! post)
9162 		emit_insn (gen_move_insn (reloadreg, incloc));
9163 	      return;
9164 	    }
9165 	}
9166       delete_insns_since (last);
9167     }
9168 
9169   /* If couldn't do the increment directly, must increment in RELOADREG.
9170      The way we do this depends on whether this is pre- or post-increment.
9171      For pre-increment, copy INCLOC to the reload register, increment it
9172      there, then save back.  */
9173 
9174   if (! post)
9175     {
9176       if (in != reloadreg)
9177 	emit_insn (gen_move_insn (reloadreg, real_in));
9178       emit_insn (gen_add2_insn (reloadreg, inc));
9179       emit_insn (gen_move_insn (incloc, reloadreg));
9180     }
9181   else
9182     {
9183       /* Postincrement.
9184 	 Because this might be a jump insn or a compare, and because RELOADREG
9185 	 may not be available after the insn in an input reload, we must do
9186 	 the incrementation before the insn being reloaded for.
9187 
9188 	 We have already copied IN to RELOADREG.  Increment the copy in
9189 	 RELOADREG, save that back, then decrement RELOADREG so it has
9190 	 the original value.  */
9191 
9192       emit_insn (gen_add2_insn (reloadreg, inc));
9193       emit_insn (gen_move_insn (incloc, reloadreg));
9194       if (CONST_INT_P (inc))
9195 	emit_insn (gen_add2_insn (reloadreg,
9196 				  gen_int_mode (-INTVAL (inc),
9197 						GET_MODE (reloadreg))));
9198       else
9199 	emit_insn (gen_sub2_insn (reloadreg, inc));
9200     }
9201 }
9202 
9203 static void
9204 add_auto_inc_notes (rtx_insn *insn, rtx x)
9205 {
9206   enum rtx_code code = GET_CODE (x);
9207   const char *fmt;
9208   int i, j;
9209 
9210   if (code == MEM && auto_inc_p (XEXP (x, 0)))
9211     {
9212       add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
9213       return;
9214     }
9215 
9216   /* Scan all the operand sub-expressions.  */
9217   fmt = GET_RTX_FORMAT (code);
9218   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9219     {
9220       if (fmt[i] == 'e')
9221 	add_auto_inc_notes (insn, XEXP (x, i));
9222       else if (fmt[i] == 'E')
9223 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9224 	  add_auto_inc_notes (insn, XVECEXP (x, i, j));
9225     }
9226 }
9227