xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/reload1.c (revision bdc22b2e01993381dcefeff2bc9b56ca75a4235c)
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2    Copyright (C) 1987-2015 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 
25 #include "machmode.h"
26 #include "hard-reg-set.h"
27 #include "rtl-error.h"
28 #include "tm_p.h"
29 #include "obstack.h"
30 #include "insn-config.h"
31 #include "ggc.h"
32 #include "flags.h"
33 #include "hashtab.h"
34 #include "hash-set.h"
35 #include "vec.h"
36 #include "input.h"
37 #include "function.h"
38 #include "symtab.h"
39 #include "rtl.h"
40 #include "statistics.h"
41 #include "double-int.h"
42 #include "real.h"
43 #include "fixed-value.h"
44 #include "alias.h"
45 #include "wide-int.h"
46 #include "inchash.h"
47 #include "tree.h"
48 #include "expmed.h"
49 #include "dojump.h"
50 #include "explow.h"
51 #include "calls.h"
52 #include "emit-rtl.h"
53 #include "varasm.h"
54 #include "stmt.h"
55 #include "expr.h"
56 #include "insn-codes.h"
57 #include "optabs.h"
58 #include "regs.h"
59 #include "addresses.h"
60 #include "predict.h"
61 #include "dominance.h"
62 #include "cfg.h"
63 #include "cfgrtl.h"
64 #include "cfgbuild.h"
65 #include "basic-block.h"
66 #include "df.h"
67 #include "reload.h"
68 #include "recog.h"
69 #include "except.h"
70 #include "ira.h"
71 #include "target.h"
72 #include "dumpfile.h"
73 #include "rtl-iter.h"
74 
75 /* This file contains the reload pass of the compiler, which is
76    run after register allocation has been done.  It checks that
77    each insn is valid (operands required to be in registers really
78    are in registers of the proper class) and fixes up invalid ones
79    by copying values temporarily into registers for the insns
80    that need them.
81 
82    The results of register allocation are described by the vector
83    reg_renumber; the insns still contain pseudo regs, but reg_renumber
84    can be used to find which hard reg, if any, a pseudo reg is in.
85 
86    The technique we always use is to free up a few hard regs that are
87    called ``reload regs'', and for each place where a pseudo reg
88    must be in a hard reg, copy it temporarily into one of the reload regs.
89 
90    Reload regs are allocated locally for every instruction that needs
91    reloads.  When there are pseudos which are allocated to a register that
92    has been chosen as a reload reg, such pseudos must be ``spilled''.
93    This means that they go to other hard regs, or to stack slots if no other
94    available hard regs can be found.  Spilling can invalidate more
95    insns, requiring additional need for reloads, so we must keep checking
96    until the process stabilizes.
97 
98    For machines with different classes of registers, we must keep track
99    of the register class needed for each reload, and make sure that
100    we allocate enough reload registers of each class.
101 
102    The file reload.c contains the code that checks one insn for
103    validity and reports the reloads that it needs.  This file
104    is in charge of scanning the entire rtl code, accumulating the
105    reload needs, spilling, assigning reload registers to use for
106    fixing up each insn, and generating the new insns to copy values
107    into the reload registers.  */
108 
109 struct target_reload default_target_reload;
110 #if SWITCHABLE_TARGET
111 struct target_reload *this_target_reload = &default_target_reload;
112 #endif
113 
114 #define spill_indirect_levels			\
115   (this_target_reload->x_spill_indirect_levels)
116 
117 /* During reload_as_needed, element N contains a REG rtx for the hard reg
118    into which reg N has been reloaded (perhaps for a previous insn).  */
119 static rtx *reg_last_reload_reg;
120 
121 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
122    for an output reload that stores into reg N.  */
123 static regset_head reg_has_output_reload;
124 
125 /* Indicates which hard regs are reload-registers for an output reload
126    in the current insn.  */
127 static HARD_REG_SET reg_is_output_reload;
128 
129 /* Widest width in which each pseudo reg is referred to (via subreg).  */
130 static unsigned int *reg_max_ref_width;
131 
132 /* Vector to remember old contents of reg_renumber before spilling.  */
133 static short *reg_old_renumber;
134 
135 /* During reload_as_needed, element N contains the last pseudo regno reloaded
136    into hard register N.  If that pseudo reg occupied more than one register,
137    reg_reloaded_contents points to that pseudo for each spill register in
138    use; all of these must remain set for an inheritance to occur.  */
139 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
140 
141 /* During reload_as_needed, element N contains the insn for which
142    hard register N was last used.   Its contents are significant only
143    when reg_reloaded_valid is set for this register.  */
144 static rtx_insn *reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
145 
146 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid.  */
147 static HARD_REG_SET reg_reloaded_valid;
148 /* Indicate if the register was dead at the end of the reload.
149    This is only valid if reg_reloaded_contents is set and valid.  */
150 static HARD_REG_SET reg_reloaded_dead;
151 
152 /* Indicate whether the register's current value is one that is not
153    safe to retain across a call, even for registers that are normally
154    call-saved.  This is only meaningful for members of reg_reloaded_valid.  */
155 static HARD_REG_SET reg_reloaded_call_part_clobbered;
156 
157 /* Number of spill-regs so far; number of valid elements of spill_regs.  */
158 static int n_spills;
159 
160 /* In parallel with spill_regs, contains REG rtx's for those regs.
161    Holds the last rtx used for any given reg, or 0 if it has never
162    been used for spilling yet.  This rtx is reused, provided it has
163    the proper mode.  */
164 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
165 
166 /* In parallel with spill_regs, contains nonzero for a spill reg
167    that was stored after the last time it was used.
168    The precise value is the insn generated to do the store.  */
169 static rtx_insn *spill_reg_store[FIRST_PSEUDO_REGISTER];
170 
171 /* This is the register that was stored with spill_reg_store.  This is a
172    copy of reload_out / reload_out_reg when the value was stored; if
173    reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg.  */
174 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
175 
176 /* This table is the inverse mapping of spill_regs:
177    indexed by hard reg number,
178    it contains the position of that reg in spill_regs,
179    or -1 for something that is not in spill_regs.
180 
181    ?!?  This is no longer accurate.  */
182 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
183 
184 /* This reg set indicates registers that can't be used as spill registers for
185    the currently processed insn.  These are the hard registers which are live
186    during the insn, but not allocated to pseudos, as well as fixed
187    registers.  */
188 static HARD_REG_SET bad_spill_regs;
189 
190 /* These are the hard registers that can't be used as spill register for any
191    insn.  This includes registers used for user variables and registers that
192    we can't eliminate.  A register that appears in this set also can't be used
193    to retry register allocation.  */
194 static HARD_REG_SET bad_spill_regs_global;
195 
196 /* Describes order of use of registers for reloading
197    of spilled pseudo-registers.  `n_spills' is the number of
198    elements that are actually valid; new ones are added at the end.
199 
200    Both spill_regs and spill_reg_order are used on two occasions:
201    once during find_reload_regs, where they keep track of the spill registers
202    for a single insn, but also during reload_as_needed where they show all
203    the registers ever used by reload.  For the latter case, the information
204    is calculated during finish_spills.  */
205 static short spill_regs[FIRST_PSEUDO_REGISTER];
206 
207 /* This vector of reg sets indicates, for each pseudo, which hard registers
208    may not be used for retrying global allocation because the register was
209    formerly spilled from one of them.  If we allowed reallocating a pseudo to
210    a register that it was already allocated to, reload might not
211    terminate.  */
212 static HARD_REG_SET *pseudo_previous_regs;
213 
214 /* This vector of reg sets indicates, for each pseudo, which hard
215    registers may not be used for retrying global allocation because they
216    are used as spill registers during one of the insns in which the
217    pseudo is live.  */
218 static HARD_REG_SET *pseudo_forbidden_regs;
219 
220 /* All hard regs that have been used as spill registers for any insn are
221    marked in this set.  */
222 static HARD_REG_SET used_spill_regs;
223 
224 /* Index of last register assigned as a spill register.  We allocate in
225    a round-robin fashion.  */
226 static int last_spill_reg;
227 
228 /* Record the stack slot for each spilled hard register.  */
229 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
230 
231 /* Width allocated so far for that stack slot.  */
232 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
233 
234 /* Record which pseudos needed to be spilled.  */
235 static regset_head spilled_pseudos;
236 
237 /* Record which pseudos changed their allocation in finish_spills.  */
238 static regset_head changed_allocation_pseudos;
239 
240 /* Used for communication between order_regs_for_reload and count_pseudo.
241    Used to avoid counting one pseudo twice.  */
242 static regset_head pseudos_counted;
243 
244 /* First uid used by insns created by reload in this function.
245    Used in find_equiv_reg.  */
246 int reload_first_uid;
247 
248 /* Flag set by local-alloc or global-alloc if anything is live in
249    a call-clobbered reg across calls.  */
250 int caller_save_needed;
251 
252 /* Set to 1 while reload_as_needed is operating.
253    Required by some machines to handle any generated moves differently.  */
254 int reload_in_progress = 0;
255 
256 /* This obstack is used for allocation of rtl during register elimination.
257    The allocated storage can be freed once find_reloads has processed the
258    insn.  */
259 static struct obstack reload_obstack;
260 
261 /* Points to the beginning of the reload_obstack.  All insn_chain structures
262    are allocated first.  */
263 static char *reload_startobj;
264 
265 /* The point after all insn_chain structures.  Used to quickly deallocate
266    memory allocated in copy_reloads during calculate_needs_all_insns.  */
267 static char *reload_firstobj;
268 
269 /* This points before all local rtl generated by register elimination.
270    Used to quickly free all memory after processing one insn.  */
271 static char *reload_insn_firstobj;
272 
273 /* List of insn_chain instructions, one for every insn that reload needs to
274    examine.  */
275 struct insn_chain *reload_insn_chain;
276 
277 /* TRUE if we potentially left dead insns in the insn stream and want to
278    run DCE immediately after reload, FALSE otherwise.  */
279 static bool need_dce;
280 
281 /* List of all insns needing reloads.  */
282 static struct insn_chain *insns_need_reload;
283 
284 /* This structure is used to record information about register eliminations.
285    Each array entry describes one possible way of eliminating a register
286    in favor of another.   If there is more than one way of eliminating a
287    particular register, the most preferred should be specified first.  */
288 
289 struct elim_table
290 {
291   int from;			/* Register number to be eliminated.  */
292   int to;			/* Register number used as replacement.  */
293   HOST_WIDE_INT initial_offset;	/* Initial difference between values.  */
294   int can_eliminate;		/* Nonzero if this elimination can be done.  */
295   int can_eliminate_previous;	/* Value returned by TARGET_CAN_ELIMINATE
296 				   target hook in previous scan over insns
297 				   made by reload.  */
298   HOST_WIDE_INT offset;		/* Current offset between the two regs.  */
299   HOST_WIDE_INT previous_offset;/* Offset at end of previous insn.  */
300   int ref_outside_mem;		/* "to" has been referenced outside a MEM.  */
301   rtx from_rtx;			/* REG rtx for the register to be eliminated.
302 				   We cannot simply compare the number since
303 				   we might then spuriously replace a hard
304 				   register corresponding to a pseudo
305 				   assigned to the reg to be eliminated.  */
306   rtx to_rtx;			/* REG rtx for the replacement.  */
307 };
308 
309 static struct elim_table *reg_eliminate = 0;
310 
311 /* This is an intermediate structure to initialize the table.  It has
312    exactly the members provided by ELIMINABLE_REGS.  */
313 static const struct elim_table_1
314 {
315   const int from;
316   const int to;
317 } reg_eliminate_1[] =
318 
319 /* If a set of eliminable registers was specified, define the table from it.
320    Otherwise, default to the normal case of the frame pointer being
321    replaced by the stack pointer.  */
322 
323 #ifdef ELIMINABLE_REGS
324   ELIMINABLE_REGS;
325 #else
326   {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
327 #endif
328 
329 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
330 
331 /* Record the number of pending eliminations that have an offset not equal
332    to their initial offset.  If nonzero, we use a new copy of each
333    replacement result in any insns encountered.  */
334 int num_not_at_initial_offset;
335 
336 /* Count the number of registers that we may be able to eliminate.  */
337 static int num_eliminable;
338 /* And the number of registers that are equivalent to a constant that
339    can be eliminated to frame_pointer / arg_pointer + constant.  */
340 static int num_eliminable_invariants;
341 
342 /* For each label, we record the offset of each elimination.  If we reach
343    a label by more than one path and an offset differs, we cannot do the
344    elimination.  This information is indexed by the difference of the
345    number of the label and the first label number.  We can't offset the
346    pointer itself as this can cause problems on machines with segmented
347    memory.  The first table is an array of flags that records whether we
348    have yet encountered a label and the second table is an array of arrays,
349    one entry in the latter array for each elimination.  */
350 
351 static int first_label_num;
352 static char *offsets_known_at;
353 static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
354 
355 vec<reg_equivs_t, va_gc> *reg_equivs;
356 
357 /* Stack of addresses where an rtx has been changed.  We can undo the
358    changes by popping items off the stack and restoring the original
359    value at each location.
360 
361    We use this simplistic undo capability rather than copy_rtx as copy_rtx
362    will not make a deep copy of a normally sharable rtx, such as
363    (const (plus (symbol_ref) (const_int))).  If such an expression appears
364    as R1 in gen_reload_chain_without_interm_reg_p, then a shared
365    rtx expression would be changed.  See PR 42431.  */
366 
367 typedef rtx *rtx_p;
368 static vec<rtx_p> substitute_stack;
369 
370 /* Number of labels in the current function.  */
371 
372 static int num_labels;
373 
374 static void replace_pseudos_in (rtx *, machine_mode, rtx);
375 static void maybe_fix_stack_asms (void);
376 static void copy_reloads (struct insn_chain *);
377 static void calculate_needs_all_insns (int);
378 static int find_reg (struct insn_chain *, int);
379 static void find_reload_regs (struct insn_chain *);
380 static void select_reload_regs (void);
381 static void delete_caller_save_insns (void);
382 
383 static void spill_failure (rtx_insn *, enum reg_class);
384 static void count_spilled_pseudo (int, int, int);
385 static void delete_dead_insn (rtx_insn *);
386 static void alter_reg (int, int, bool);
387 static void set_label_offsets (rtx, rtx_insn *, int);
388 static void check_eliminable_occurrences (rtx);
389 static void elimination_effects (rtx, machine_mode);
390 static rtx eliminate_regs_1 (rtx, machine_mode, rtx, bool, bool);
391 static int eliminate_regs_in_insn (rtx_insn *, int);
392 static void update_eliminable_offsets (void);
393 static void mark_not_eliminable (rtx, const_rtx, void *);
394 static void set_initial_elim_offsets (void);
395 static bool verify_initial_elim_offsets (void);
396 static void set_initial_label_offsets (void);
397 static void set_offsets_for_label (rtx_insn *);
398 static void init_eliminable_invariants (rtx_insn *, bool);
399 static void init_elim_table (void);
400 static void free_reg_equiv (void);
401 static void update_eliminables (HARD_REG_SET *);
402 static bool update_eliminables_and_spill (void);
403 static void elimination_costs_in_insn (rtx_insn *);
404 static void spill_hard_reg (unsigned int, int);
405 static int finish_spills (int);
406 static void scan_paradoxical_subregs (rtx);
407 static void count_pseudo (int);
408 static void order_regs_for_reload (struct insn_chain *);
409 static void reload_as_needed (int);
410 static void forget_old_reloads_1 (rtx, const_rtx, void *);
411 static void forget_marked_reloads (regset);
412 static int reload_reg_class_lower (const void *, const void *);
413 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
414 				    machine_mode);
415 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
416 				     machine_mode);
417 static int reload_reg_free_p (unsigned int, int, enum reload_type);
418 static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
419 					rtx, rtx, int, int);
420 static int free_for_value_p (int, machine_mode, int, enum reload_type,
421 			     rtx, rtx, int, int);
422 static int allocate_reload_reg (struct insn_chain *, int, int);
423 static int conflicts_with_override (rtx);
424 static void failed_reload (rtx_insn *, int);
425 static int set_reload_reg (int, int);
426 static void choose_reload_regs_init (struct insn_chain *, rtx *);
427 static void choose_reload_regs (struct insn_chain *);
428 static void emit_input_reload_insns (struct insn_chain *, struct reload *,
429 				     rtx, int);
430 static void emit_output_reload_insns (struct insn_chain *, struct reload *,
431 				      int);
432 static void do_input_reload (struct insn_chain *, struct reload *, int);
433 static void do_output_reload (struct insn_chain *, struct reload *, int);
434 static void emit_reload_insns (struct insn_chain *);
435 static void delete_output_reload (rtx_insn *, int, int, rtx);
436 static void delete_address_reloads (rtx_insn *, rtx_insn *);
437 static void delete_address_reloads_1 (rtx_insn *, rtx, rtx_insn *);
438 static void inc_for_reload (rtx, rtx, rtx, int);
439 #ifdef AUTO_INC_DEC
440 static void add_auto_inc_notes (rtx_insn *, rtx);
441 #endif
442 static void substitute (rtx *, const_rtx, rtx);
443 static bool gen_reload_chain_without_interm_reg_p (int, int);
444 static int reloads_conflict (int, int);
445 static rtx_insn *gen_reload (rtx, rtx, int, enum reload_type);
446 static rtx_insn *emit_insn_if_valid_for_reload (rtx);
447 
448 /* Initialize the reload pass.  This is called at the beginning of compilation
449    and may be called again if the target is reinitialized.  */
450 
451 void
452 init_reload (void)
453 {
454   int i;
455 
456   /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
457      Set spill_indirect_levels to the number of levels such addressing is
458      permitted, zero if it is not permitted at all.  */
459 
460   rtx tem
461     = gen_rtx_MEM (Pmode,
462 		   gen_rtx_PLUS (Pmode,
463 				 gen_rtx_REG (Pmode,
464 					      LAST_VIRTUAL_REGISTER + 1),
465 				 gen_int_mode (4, Pmode)));
466   spill_indirect_levels = 0;
467 
468   while (memory_address_p (QImode, tem))
469     {
470       spill_indirect_levels++;
471       tem = gen_rtx_MEM (Pmode, tem);
472     }
473 
474   /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)).  */
475 
476   tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
477   indirect_symref_ok = memory_address_p (QImode, tem);
478 
479   /* See if reg+reg is a valid (and offsettable) address.  */
480 
481   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
482     {
483       tem = gen_rtx_PLUS (Pmode,
484 			  gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
485 			  gen_rtx_REG (Pmode, i));
486 
487       /* This way, we make sure that reg+reg is an offsettable address.  */
488       tem = plus_constant (Pmode, tem, 4);
489 
490       if (memory_address_p (QImode, tem))
491 	{
492 	  double_reg_address_ok = 1;
493 	  break;
494 	}
495     }
496 
497   /* Initialize obstack for our rtl allocation.  */
498   if (reload_startobj == NULL)
499     {
500       gcc_obstack_init (&reload_obstack);
501       reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
502     }
503 
504   INIT_REG_SET (&spilled_pseudos);
505   INIT_REG_SET (&changed_allocation_pseudos);
506   INIT_REG_SET (&pseudos_counted);
507 }
508 
509 /* List of insn chains that are currently unused.  */
510 static struct insn_chain *unused_insn_chains = 0;
511 
512 /* Allocate an empty insn_chain structure.  */
513 struct insn_chain *
514 new_insn_chain (void)
515 {
516   struct insn_chain *c;
517 
518   if (unused_insn_chains == 0)
519     {
520       c = XOBNEW (&reload_obstack, struct insn_chain);
521       INIT_REG_SET (&c->live_throughout);
522       INIT_REG_SET (&c->dead_or_set);
523     }
524   else
525     {
526       c = unused_insn_chains;
527       unused_insn_chains = c->next;
528     }
529   c->is_caller_save_insn = 0;
530   c->need_operand_change = 0;
531   c->need_reload = 0;
532   c->need_elim = 0;
533   return c;
534 }
535 
536 /* Small utility function to set all regs in hard reg set TO which are
537    allocated to pseudos in regset FROM.  */
538 
539 void
540 compute_use_by_pseudos (HARD_REG_SET *to, regset from)
541 {
542   unsigned int regno;
543   reg_set_iterator rsi;
544 
545   EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
546     {
547       int r = reg_renumber[regno];
548 
549       if (r < 0)
550 	{
551 	  /* reload_combine uses the information from DF_LIVE_IN,
552 	     which might still contain registers that have not
553 	     actually been allocated since they have an
554 	     equivalence.  */
555 	  gcc_assert (ira_conflicts_p || reload_completed);
556 	}
557       else
558 	add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
559     }
560 }
561 
562 /* Replace all pseudos found in LOC with their corresponding
563    equivalences.  */
564 
565 static void
566 replace_pseudos_in (rtx *loc, machine_mode mem_mode, rtx usage)
567 {
568   rtx x = *loc;
569   enum rtx_code code;
570   const char *fmt;
571   int i, j;
572 
573   if (! x)
574     return;
575 
576   code = GET_CODE (x);
577   if (code == REG)
578     {
579       unsigned int regno = REGNO (x);
580 
581       if (regno < FIRST_PSEUDO_REGISTER)
582 	return;
583 
584       x = eliminate_regs_1 (x, mem_mode, usage, true, false);
585       if (x != *loc)
586 	{
587 	  *loc = x;
588 	  replace_pseudos_in (loc, mem_mode, usage);
589 	  return;
590 	}
591 
592       if (reg_equiv_constant (regno))
593 	*loc = reg_equiv_constant (regno);
594       else if (reg_equiv_invariant (regno))
595 	*loc = reg_equiv_invariant (regno);
596       else if (reg_equiv_mem (regno))
597 	*loc = reg_equiv_mem (regno);
598       else if (reg_equiv_address (regno))
599 	*loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address (regno));
600       else
601 	{
602 	  gcc_assert (!REG_P (regno_reg_rtx[regno])
603 		      || REGNO (regno_reg_rtx[regno]) != regno);
604 	  *loc = regno_reg_rtx[regno];
605 	}
606 
607       return;
608     }
609   else if (code == MEM)
610     {
611       replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
612       return;
613     }
614 
615   /* Process each of our operands recursively.  */
616   fmt = GET_RTX_FORMAT (code);
617   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
618     if (*fmt == 'e')
619       replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
620     else if (*fmt == 'E')
621       for (j = 0; j < XVECLEN (x, i); j++)
622 	replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
623 }
624 
625 /* Determine if the current function has an exception receiver block
626    that reaches the exit block via non-exceptional edges  */
627 
628 static bool
629 has_nonexceptional_receiver (void)
630 {
631   edge e;
632   edge_iterator ei;
633   basic_block *tos, *worklist, bb;
634 
635   /* If we're not optimizing, then just err on the safe side.  */
636   if (!optimize)
637     return true;
638 
639   /* First determine which blocks can reach exit via normal paths.  */
640   tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
641 
642   FOR_EACH_BB_FN (bb, cfun)
643     bb->flags &= ~BB_REACHABLE;
644 
645   /* Place the exit block on our worklist.  */
646   EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_REACHABLE;
647   *tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun);
648 
649   /* Iterate: find everything reachable from what we've already seen.  */
650   while (tos != worklist)
651     {
652       bb = *--tos;
653 
654       FOR_EACH_EDGE (e, ei, bb->preds)
655 	if (!(e->flags & EDGE_ABNORMAL))
656 	  {
657 	    basic_block src = e->src;
658 
659 	    if (!(src->flags & BB_REACHABLE))
660 	      {
661 		src->flags |= BB_REACHABLE;
662 		*tos++ = src;
663 	      }
664 	  }
665     }
666   free (worklist);
667 
668   /* Now see if there's a reachable block with an exceptional incoming
669      edge.  */
670   FOR_EACH_BB_FN (bb, cfun)
671     if (bb->flags & BB_REACHABLE && bb_has_abnormal_pred (bb))
672       return true;
673 
674   /* No exceptional block reached exit unexceptionally.  */
675   return false;
676 }
677 
678 /* Grow (or allocate) the REG_EQUIVS array from its current size (which may be
679    zero elements) to MAX_REG_NUM elements.
680 
681    Initialize all new fields to NULL and update REG_EQUIVS_SIZE.  */
682 void
683 grow_reg_equivs (void)
684 {
685   int old_size = vec_safe_length (reg_equivs);
686   int max_regno = max_reg_num ();
687   int i;
688   reg_equivs_t ze;
689 
690   memset (&ze, 0, sizeof (reg_equivs_t));
691   vec_safe_reserve (reg_equivs, max_regno);
692   for (i = old_size; i < max_regno; i++)
693     reg_equivs->quick_insert (i, ze);
694 }
695 
696 
697 /* Global variables used by reload and its subroutines.  */
698 
699 /* The current basic block while in calculate_elim_costs_all_insns.  */
700 static basic_block elim_bb;
701 
702 /* Set during calculate_needs if an insn needs register elimination.  */
703 static int something_needs_elimination;
704 /* Set during calculate_needs if an insn needs an operand changed.  */
705 static int something_needs_operands_changed;
706 /* Set by alter_regs if we spilled a register to the stack.  */
707 static bool something_was_spilled;
708 
709 /* Nonzero means we couldn't get enough spill regs.  */
710 static int failure;
711 
712 /* Temporary array of pseudo-register number.  */
713 static int *temp_pseudo_reg_arr;
714 
715 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
716    If that insn didn't set the register (i.e., it copied the register to
717    memory), just delete that insn instead of the equivalencing insn plus
718    anything now dead.  If we call delete_dead_insn on that insn, we may
719    delete the insn that actually sets the register if the register dies
720    there and that is incorrect.  */
721 static void
722 remove_init_insns ()
723 {
724   for (int i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
725     {
726       if (reg_renumber[i] < 0 && reg_equiv_init (i) != 0)
727 	{
728 	  rtx list;
729 	  for (list = reg_equiv_init (i); list; list = XEXP (list, 1))
730 	    {
731 	      rtx_insn *equiv_insn = as_a <rtx_insn *> (XEXP (list, 0));
732 
733 	      /* If we already deleted the insn or if it may trap, we can't
734 		 delete it.  The latter case shouldn't happen, but can
735 		 if an insn has a variable address, gets a REG_EH_REGION
736 		 note added to it, and then gets converted into a load
737 		 from a constant address.  */
738 	      if (NOTE_P (equiv_insn)
739 		  || can_throw_internal (equiv_insn))
740 		;
741 	      else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
742 		delete_dead_insn (equiv_insn);
743 	      else
744 		SET_INSN_DELETED (equiv_insn);
745 	    }
746 	}
747     }
748 }
749 
750 /* Return true if remove_init_insns will delete INSN.  */
751 static bool
752 will_delete_init_insn_p (rtx_insn *insn)
753 {
754   rtx set = single_set (insn);
755   if (!set || !REG_P (SET_DEST (set)))
756     return false;
757   unsigned regno = REGNO (SET_DEST (set));
758 
759   if (can_throw_internal (insn))
760     return false;
761 
762   if (regno < FIRST_PSEUDO_REGISTER || reg_renumber[regno] >= 0)
763     return false;
764 
765   for (rtx list = reg_equiv_init (regno); list; list = XEXP (list, 1))
766     {
767       rtx equiv_insn = XEXP (list, 0);
768       if (equiv_insn == insn)
769 	return true;
770     }
771   return false;
772 }
773 
774 /* Main entry point for the reload pass.
775 
776    FIRST is the first insn of the function being compiled.
777 
778    GLOBAL nonzero means we were called from global_alloc
779    and should attempt to reallocate any pseudoregs that we
780    displace from hard regs we will use for reloads.
781    If GLOBAL is zero, we do not have enough information to do that,
782    so any pseudo reg that is spilled must go to the stack.
783 
784    Return value is TRUE if reload likely left dead insns in the
785    stream and a DCE pass should be run to elimiante them.  Else the
786    return value is FALSE.  */
787 
788 bool
789 reload (rtx_insn *first, int global)
790 {
791   int i, n;
792   rtx_insn *insn;
793   struct elim_table *ep;
794   basic_block bb;
795   bool inserted;
796 
797   /* Make sure even insns with volatile mem refs are recognizable.  */
798   init_recog ();
799 
800   failure = 0;
801 
802   reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
803 
804   /* Make sure that the last insn in the chain
805      is not something that needs reloading.  */
806   emit_note (NOTE_INSN_DELETED);
807 
808   /* Enable find_equiv_reg to distinguish insns made by reload.  */
809   reload_first_uid = get_max_uid ();
810 
811 #ifdef SECONDARY_MEMORY_NEEDED
812   /* Initialize the secondary memory table.  */
813   clear_secondary_mem ();
814 #endif
815 
816   /* We don't have a stack slot for any spill reg yet.  */
817   memset (spill_stack_slot, 0, sizeof spill_stack_slot);
818   memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
819 
820   /* Initialize the save area information for caller-save, in case some
821      are needed.  */
822   init_save_areas ();
823 
824   /* Compute which hard registers are now in use
825      as homes for pseudo registers.
826      This is done here rather than (eg) in global_alloc
827      because this point is reached even if not optimizing.  */
828   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
829     mark_home_live (i);
830 
831   /* A function that has a nonlocal label that can reach the exit
832      block via non-exceptional paths must save all call-saved
833      registers.  */
834   if (cfun->has_nonlocal_label
835       && has_nonexceptional_receiver ())
836     crtl->saves_all_registers = 1;
837 
838   if (crtl->saves_all_registers)
839     for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
840       if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
841 	df_set_regs_ever_live (i, true);
842 
843   /* Find all the pseudo registers that didn't get hard regs
844      but do have known equivalent constants or memory slots.
845      These include parameters (known equivalent to parameter slots)
846      and cse'd or loop-moved constant memory addresses.
847 
848      Record constant equivalents in reg_equiv_constant
849      so they will be substituted by find_reloads.
850      Record memory equivalents in reg_mem_equiv so they can
851      be substituted eventually by altering the REG-rtx's.  */
852 
853   grow_reg_equivs ();
854   reg_old_renumber = XCNEWVEC (short, max_regno);
855   memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
856   pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
857   pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
858 
859   CLEAR_HARD_REG_SET (bad_spill_regs_global);
860 
861   init_eliminable_invariants (first, true);
862   init_elim_table ();
863 
864   /* Alter each pseudo-reg rtx to contain its hard reg number.  Assign
865      stack slots to the pseudos that lack hard regs or equivalents.
866      Do not touch virtual registers.  */
867 
868   temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
869   for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
870     temp_pseudo_reg_arr[n++] = i;
871 
872   if (ira_conflicts_p)
873     /* Ask IRA to order pseudo-registers for better stack slot
874        sharing.  */
875     ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_width);
876 
877   for (i = 0; i < n; i++)
878     alter_reg (temp_pseudo_reg_arr[i], -1, false);
879 
880   /* If we have some registers we think can be eliminated, scan all insns to
881      see if there is an insn that sets one of these registers to something
882      other than itself plus a constant.  If so, the register cannot be
883      eliminated.  Doing this scan here eliminates an extra pass through the
884      main reload loop in the most common case where register elimination
885      cannot be done.  */
886   for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
887     if (INSN_P (insn))
888       note_stores (PATTERN (insn), mark_not_eliminable, NULL);
889 
890   maybe_fix_stack_asms ();
891 
892   insns_need_reload = 0;
893   something_needs_elimination = 0;
894 
895   /* Initialize to -1, which means take the first spill register.  */
896   last_spill_reg = -1;
897 
898   /* Spill any hard regs that we know we can't eliminate.  */
899   CLEAR_HARD_REG_SET (used_spill_regs);
900   /* There can be multiple ways to eliminate a register;
901      they should be listed adjacently.
902      Elimination for any register fails only if all possible ways fail.  */
903   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
904     {
905       int from = ep->from;
906       int can_eliminate = 0;
907       do
908 	{
909           can_eliminate |= ep->can_eliminate;
910           ep++;
911 	}
912       while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
913       if (! can_eliminate)
914 	spill_hard_reg (from, 1);
915     }
916 
917 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
918   if (frame_pointer_needed)
919     spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
920 #endif
921   finish_spills (global);
922 
923   /* From now on, we may need to generate moves differently.  We may also
924      allow modifications of insns which cause them to not be recognized.
925      Any such modifications will be cleaned up during reload itself.  */
926   reload_in_progress = 1;
927 
928   /* This loop scans the entire function each go-round
929      and repeats until one repetition spills no additional hard regs.  */
930   for (;;)
931     {
932       int something_changed;
933       int did_spill;
934       HOST_WIDE_INT starting_frame_size;
935 
936       starting_frame_size = get_frame_size ();
937       something_was_spilled = false;
938 
939       set_initial_elim_offsets ();
940       set_initial_label_offsets ();
941 
942       /* For each pseudo register that has an equivalent location defined,
943 	 try to eliminate any eliminable registers (such as the frame pointer)
944 	 assuming initial offsets for the replacement register, which
945 	 is the normal case.
946 
947 	 If the resulting location is directly addressable, substitute
948 	 the MEM we just got directly for the old REG.
949 
950 	 If it is not addressable but is a constant or the sum of a hard reg
951 	 and constant, it is probably not addressable because the constant is
952 	 out of range, in that case record the address; we will generate
953 	 hairy code to compute the address in a register each time it is
954 	 needed.  Similarly if it is a hard register, but one that is not
955 	 valid as an address register.
956 
957 	 If the location is not addressable, but does not have one of the
958 	 above forms, assign a stack slot.  We have to do this to avoid the
959 	 potential of producing lots of reloads if, e.g., a location involves
960 	 a pseudo that didn't get a hard register and has an equivalent memory
961 	 location that also involves a pseudo that didn't get a hard register.
962 
963 	 Perhaps at some point we will improve reload_when_needed handling
964 	 so this problem goes away.  But that's very hairy.  */
965 
966       for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
967 	if (reg_renumber[i] < 0 && reg_equiv_memory_loc (i))
968 	  {
969 	    rtx x = eliminate_regs (reg_equiv_memory_loc (i), VOIDmode,
970 				    NULL_RTX);
971 
972 	    if (strict_memory_address_addr_space_p
973 		  (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0),
974 		   MEM_ADDR_SPACE (x)))
975 	      reg_equiv_mem (i) = x, reg_equiv_address (i) = 0;
976 	    else if (CONSTANT_P (XEXP (x, 0))
977 		     || (REG_P (XEXP (x, 0))
978 			 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
979 		     || (GET_CODE (XEXP (x, 0)) == PLUS
980 			 && REG_P (XEXP (XEXP (x, 0), 0))
981 			 && (REGNO (XEXP (XEXP (x, 0), 0))
982 			     < FIRST_PSEUDO_REGISTER)
983 			 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
984 	      reg_equiv_address (i) = XEXP (x, 0), reg_equiv_mem (i) = 0;
985 	    else
986 	      {
987 		/* Make a new stack slot.  Then indicate that something
988 		   changed so we go back and recompute offsets for
989 		   eliminable registers because the allocation of memory
990 		   below might change some offset.  reg_equiv_{mem,address}
991 		   will be set up for this pseudo on the next pass around
992 		   the loop.  */
993 		reg_equiv_memory_loc (i) = 0;
994 		reg_equiv_init (i) = 0;
995 		alter_reg (i, -1, true);
996 	      }
997 	  }
998 
999       if (caller_save_needed)
1000 	setup_save_areas ();
1001 
1002       if (starting_frame_size && crtl->stack_alignment_needed)
1003 	{
1004 	  /* If we have a stack frame, we must align it now.  The
1005 	     stack size may be a part of the offset computation for
1006 	     register elimination.  So if this changes the stack size,
1007 	     then repeat the elimination bookkeeping.  We don't
1008 	     realign when there is no stack, as that will cause a
1009 	     stack frame when none is needed should
1010 	     STARTING_FRAME_OFFSET not be already aligned to
1011 	     STACK_BOUNDARY.  */
1012 	  assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
1013 	}
1014       /* If we allocated another stack slot, redo elimination bookkeeping.  */
1015       if (something_was_spilled || starting_frame_size != get_frame_size ())
1016 	{
1017 	  update_eliminables_and_spill ();
1018 	  continue;
1019 	}
1020 
1021       if (caller_save_needed)
1022 	{
1023 	  save_call_clobbered_regs ();
1024 	  /* That might have allocated new insn_chain structures.  */
1025 	  reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1026 	}
1027 
1028       calculate_needs_all_insns (global);
1029 
1030       if (! ira_conflicts_p)
1031 	/* Don't do it for IRA.  We need this info because we don't
1032 	   change live_throughout and dead_or_set for chains when IRA
1033 	   is used.  */
1034 	CLEAR_REG_SET (&spilled_pseudos);
1035 
1036       did_spill = 0;
1037 
1038       something_changed = 0;
1039 
1040       /* If we allocated any new memory locations, make another pass
1041 	 since it might have changed elimination offsets.  */
1042       if (something_was_spilled || starting_frame_size != get_frame_size ())
1043 	something_changed = 1;
1044 
1045       /* Even if the frame size remained the same, we might still have
1046 	 changed elimination offsets, e.g. if find_reloads called
1047 	 force_const_mem requiring the back end to allocate a constant
1048 	 pool base register that needs to be saved on the stack.  */
1049       else if (!verify_initial_elim_offsets ())
1050 	something_changed = 1;
1051 
1052       if (update_eliminables_and_spill ())
1053 	{
1054 	  did_spill = 1;
1055 	  something_changed = 1;
1056 	}
1057 
1058       select_reload_regs ();
1059       if (failure)
1060 	goto failed;
1061 
1062       if (insns_need_reload != 0 || did_spill)
1063 	something_changed |= finish_spills (global);
1064 
1065       if (! something_changed)
1066 	break;
1067 
1068       if (caller_save_needed)
1069 	delete_caller_save_insns ();
1070 
1071       obstack_free (&reload_obstack, reload_firstobj);
1072     }
1073 
1074   /* If global-alloc was run, notify it of any register eliminations we have
1075      done.  */
1076   if (global)
1077     for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1078       if (ep->can_eliminate)
1079 	mark_elimination (ep->from, ep->to);
1080 
1081   remove_init_insns ();
1082 
1083   /* Use the reload registers where necessary
1084      by generating move instructions to move the must-be-register
1085      values into or out of the reload registers.  */
1086 
1087   if (insns_need_reload != 0 || something_needs_elimination
1088       || something_needs_operands_changed)
1089     {
1090       HOST_WIDE_INT old_frame_size = get_frame_size ();
1091 
1092       reload_as_needed (global);
1093 
1094       gcc_assert (old_frame_size == get_frame_size ());
1095 
1096       gcc_assert (verify_initial_elim_offsets ());
1097     }
1098 
1099   /* If we were able to eliminate the frame pointer, show that it is no
1100      longer live at the start of any basic block.  If it ls live by
1101      virtue of being in a pseudo, that pseudo will be marked live
1102      and hence the frame pointer will be known to be live via that
1103      pseudo.  */
1104 
1105   if (! frame_pointer_needed)
1106     FOR_EACH_BB_FN (bb, cfun)
1107       bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1108 
1109   /* Come here (with failure set nonzero) if we can't get enough spill
1110      regs.  */
1111  failed:
1112 
1113   CLEAR_REG_SET (&changed_allocation_pseudos);
1114   CLEAR_REG_SET (&spilled_pseudos);
1115   reload_in_progress = 0;
1116 
1117   /* Now eliminate all pseudo regs by modifying them into
1118      their equivalent memory references.
1119      The REG-rtx's for the pseudos are modified in place,
1120      so all insns that used to refer to them now refer to memory.
1121 
1122      For a reg that has a reg_equiv_address, all those insns
1123      were changed by reloading so that no insns refer to it any longer;
1124      but the DECL_RTL of a variable decl may refer to it,
1125      and if so this causes the debugging info to mention the variable.  */
1126 
1127   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1128     {
1129       rtx addr = 0;
1130 
1131       if (reg_equiv_mem (i))
1132 	addr = XEXP (reg_equiv_mem (i), 0);
1133 
1134       if (reg_equiv_address (i))
1135 	addr = reg_equiv_address (i);
1136 
1137       if (addr)
1138 	{
1139 	  if (reg_renumber[i] < 0)
1140 	    {
1141 	      rtx reg = regno_reg_rtx[i];
1142 
1143 	      REG_USERVAR_P (reg) = 0;
1144 	      PUT_CODE (reg, MEM);
1145 	      XEXP (reg, 0) = addr;
1146 	      if (reg_equiv_memory_loc (i))
1147 		MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc (i));
1148 	      else
1149 		MEM_ATTRS (reg) = 0;
1150 	      MEM_NOTRAP_P (reg) = 1;
1151 	    }
1152 	  else if (reg_equiv_mem (i))
1153 	    XEXP (reg_equiv_mem (i), 0) = addr;
1154 	}
1155 
1156       /* We don't want complex addressing modes in debug insns
1157 	 if simpler ones will do, so delegitimize equivalences
1158 	 in debug insns.  */
1159       if (MAY_HAVE_DEBUG_INSNS && reg_renumber[i] < 0)
1160 	{
1161 	  rtx reg = regno_reg_rtx[i];
1162 	  rtx equiv = 0;
1163 	  df_ref use, next;
1164 
1165 	  if (reg_equiv_constant (i))
1166 	    equiv = reg_equiv_constant (i);
1167 	  else if (reg_equiv_invariant (i))
1168 	    equiv = reg_equiv_invariant (i);
1169 	  else if (reg && MEM_P (reg))
1170 	    equiv = targetm.delegitimize_address (reg);
1171 	  else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1172 	    equiv = reg;
1173 
1174 	  if (equiv == reg)
1175 	    continue;
1176 
1177 	  for (use = DF_REG_USE_CHAIN (i); use; use = next)
1178 	    {
1179 	      insn = DF_REF_INSN (use);
1180 
1181 	      /* Make sure the next ref is for a different instruction,
1182 		 so that we're not affected by the rescan.  */
1183 	      next = DF_REF_NEXT_REG (use);
1184 	      while (next && DF_REF_INSN (next) == insn)
1185 		next = DF_REF_NEXT_REG (next);
1186 
1187 	      if (DEBUG_INSN_P (insn))
1188 		{
1189 		  if (!equiv)
1190 		    {
1191 		      INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
1192 		      df_insn_rescan_debug_internal (insn);
1193 		    }
1194 		  else
1195 		    INSN_VAR_LOCATION_LOC (insn)
1196 		      = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn),
1197 					      reg, equiv);
1198 		}
1199 	    }
1200 	}
1201     }
1202 
1203   /* We must set reload_completed now since the cleanup_subreg_operands call
1204      below will re-recognize each insn and reload may have generated insns
1205      which are only valid during and after reload.  */
1206   reload_completed = 1;
1207 
1208   /* Make a pass over all the insns and delete all USEs which we inserted
1209      only to tag a REG_EQUAL note on them.  Remove all REG_DEAD and REG_UNUSED
1210      notes.  Delete all CLOBBER insns, except those that refer to the return
1211      value and the special mem:BLK CLOBBERs added to prevent the scheduler
1212      from misarranging variable-array code, and simplify (subreg (reg))
1213      operands.  Strip and regenerate REG_INC notes that may have been moved
1214      around.  */
1215 
1216   for (insn = first; insn; insn = NEXT_INSN (insn))
1217     if (INSN_P (insn))
1218       {
1219 	rtx *pnote;
1220 
1221 	if (CALL_P (insn))
1222 	  replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1223 			      VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1224 
1225 	if ((GET_CODE (PATTERN (insn)) == USE
1226 	     /* We mark with QImode USEs introduced by reload itself.  */
1227 	     && (GET_MODE (insn) == QImode
1228 		 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1229 	    || (GET_CODE (PATTERN (insn)) == CLOBBER
1230 		&& (!MEM_P (XEXP (PATTERN (insn), 0))
1231 		    || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1232 		    || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1233 			&& XEXP (XEXP (PATTERN (insn), 0), 0)
1234 				!= stack_pointer_rtx))
1235 		&& (!REG_P (XEXP (PATTERN (insn), 0))
1236 		    || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1237 	  {
1238 	    delete_insn (insn);
1239 	    continue;
1240 	  }
1241 
1242 	/* Some CLOBBERs may survive until here and still reference unassigned
1243 	   pseudos with const equivalent, which may in turn cause ICE in later
1244 	   passes if the reference remains in place.  */
1245 	if (GET_CODE (PATTERN (insn)) == CLOBBER)
1246 	  replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1247 			      VOIDmode, PATTERN (insn));
1248 
1249 	/* Discard obvious no-ops, even without -O.  This optimization
1250 	   is fast and doesn't interfere with debugging.  */
1251 	if (NONJUMP_INSN_P (insn)
1252 	    && GET_CODE (PATTERN (insn)) == SET
1253 	    && REG_P (SET_SRC (PATTERN (insn)))
1254 	    && REG_P (SET_DEST (PATTERN (insn)))
1255 	    && (REGNO (SET_SRC (PATTERN (insn)))
1256 		== REGNO (SET_DEST (PATTERN (insn)))))
1257 	  {
1258 	    delete_insn (insn);
1259 	    continue;
1260 	  }
1261 
1262 	pnote = &REG_NOTES (insn);
1263 	while (*pnote != 0)
1264 	  {
1265 	    if (REG_NOTE_KIND (*pnote) == REG_DEAD
1266 		|| REG_NOTE_KIND (*pnote) == REG_UNUSED
1267 		|| REG_NOTE_KIND (*pnote) == REG_INC)
1268 	      *pnote = XEXP (*pnote, 1);
1269 	    else
1270 	      pnote = &XEXP (*pnote, 1);
1271 	  }
1272 
1273 #ifdef AUTO_INC_DEC
1274 	add_auto_inc_notes (insn, PATTERN (insn));
1275 #endif
1276 
1277 	/* Simplify (subreg (reg)) if it appears as an operand.  */
1278 	cleanup_subreg_operands (insn);
1279 
1280 	/* Clean up invalid ASMs so that they don't confuse later passes.
1281 	   See PR 21299.  */
1282 	if (asm_noperands (PATTERN (insn)) >= 0)
1283 	  {
1284 	    extract_insn (insn);
1285 	    if (!constrain_operands (1, get_enabled_alternatives (insn)))
1286 	      {
1287 		error_for_asm (insn,
1288 			       "%<asm%> operand has impossible constraints");
1289 		delete_insn (insn);
1290 		continue;
1291 	      }
1292 	  }
1293       }
1294 
1295   /* If we are doing generic stack checking, give a warning if this
1296      function's frame size is larger than we expect.  */
1297   if (flag_stack_check == GENERIC_STACK_CHECK)
1298     {
1299       HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1300       static int verbose_warned = 0;
1301 
1302       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1303 	if (df_regs_ever_live_p (i) && ! fixed_regs[i] && call_used_regs[i])
1304 	  size += UNITS_PER_WORD;
1305 
1306       if (size > STACK_CHECK_MAX_FRAME_SIZE)
1307 	{
1308 	  warning (0, "frame size too large for reliable stack checking");
1309 	  if (! verbose_warned)
1310 	    {
1311 	      warning (0, "try reducing the number of local variables");
1312 	      verbose_warned = 1;
1313 	    }
1314 	}
1315     }
1316 
1317   free (temp_pseudo_reg_arr);
1318 
1319   /* Indicate that we no longer have known memory locations or constants.  */
1320   free_reg_equiv ();
1321 
1322   free (reg_max_ref_width);
1323   free (reg_old_renumber);
1324   free (pseudo_previous_regs);
1325   free (pseudo_forbidden_regs);
1326 
1327   CLEAR_HARD_REG_SET (used_spill_regs);
1328   for (i = 0; i < n_spills; i++)
1329     SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1330 
1331   /* Free all the insn_chain structures at once.  */
1332   obstack_free (&reload_obstack, reload_startobj);
1333   unused_insn_chains = 0;
1334 
1335   inserted = fixup_abnormal_edges ();
1336 
1337   /* We've possibly turned single trapping insn into multiple ones.  */
1338   if (cfun->can_throw_non_call_exceptions)
1339     {
1340       sbitmap blocks;
1341       blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
1342       bitmap_ones (blocks);
1343       find_many_sub_basic_blocks (blocks);
1344       sbitmap_free (blocks);
1345     }
1346 
1347   if (inserted)
1348     commit_edge_insertions ();
1349 
1350   /* Replacing pseudos with their memory equivalents might have
1351      created shared rtx.  Subsequent passes would get confused
1352      by this, so unshare everything here.  */
1353   unshare_all_rtl_again (first);
1354 
1355 #ifdef STACK_BOUNDARY
1356   /* init_emit has set the alignment of the hard frame pointer
1357      to STACK_BOUNDARY.  It is very likely no longer valid if
1358      the hard frame pointer was used for register allocation.  */
1359   if (!frame_pointer_needed)
1360     REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1361 #endif
1362 
1363   substitute_stack.release ();
1364 
1365   gcc_assert (bitmap_empty_p (&spilled_pseudos));
1366 
1367   reload_completed = !failure;
1368 
1369   return need_dce;
1370 }
1371 
1372 /* Yet another special case.  Unfortunately, reg-stack forces people to
1373    write incorrect clobbers in asm statements.  These clobbers must not
1374    cause the register to appear in bad_spill_regs, otherwise we'll call
1375    fatal_insn later.  We clear the corresponding regnos in the live
1376    register sets to avoid this.
1377    The whole thing is rather sick, I'm afraid.  */
1378 
1379 static void
1380 maybe_fix_stack_asms (void)
1381 {
1382 #ifdef STACK_REGS
1383   const char *constraints[MAX_RECOG_OPERANDS];
1384   machine_mode operand_mode[MAX_RECOG_OPERANDS];
1385   struct insn_chain *chain;
1386 
1387   for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1388     {
1389       int i, noperands;
1390       HARD_REG_SET clobbered, allowed;
1391       rtx pat;
1392 
1393       if (! INSN_P (chain->insn)
1394 	  || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1395 	continue;
1396       pat = PATTERN (chain->insn);
1397       if (GET_CODE (pat) != PARALLEL)
1398 	continue;
1399 
1400       CLEAR_HARD_REG_SET (clobbered);
1401       CLEAR_HARD_REG_SET (allowed);
1402 
1403       /* First, make a mask of all stack regs that are clobbered.  */
1404       for (i = 0; i < XVECLEN (pat, 0); i++)
1405 	{
1406 	  rtx t = XVECEXP (pat, 0, i);
1407 	  if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1408 	    SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1409 	}
1410 
1411       /* Get the operand values and constraints out of the insn.  */
1412       decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1413 			   constraints, operand_mode, NULL);
1414 
1415       /* For every operand, see what registers are allowed.  */
1416       for (i = 0; i < noperands; i++)
1417 	{
1418 	  const char *p = constraints[i];
1419 	  /* For every alternative, we compute the class of registers allowed
1420 	     for reloading in CLS, and merge its contents into the reg set
1421 	     ALLOWED.  */
1422 	  int cls = (int) NO_REGS;
1423 
1424 	  for (;;)
1425 	    {
1426 	      char c = *p;
1427 
1428 	      if (c == '\0' || c == ',' || c == '#')
1429 		{
1430 		  /* End of one alternative - mark the regs in the current
1431 		     class, and reset the class.  */
1432 		  IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1433 		  cls = NO_REGS;
1434 		  p++;
1435 		  if (c == '#')
1436 		    do {
1437 		      c = *p++;
1438 		    } while (c != '\0' && c != ',');
1439 		  if (c == '\0')
1440 		    break;
1441 		  continue;
1442 		}
1443 
1444 	      switch (c)
1445 		{
1446 		case 'g':
1447 		  cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1448 		  break;
1449 
1450 		default:
1451 		  enum constraint_num cn = lookup_constraint (p);
1452 		  if (insn_extra_address_constraint (cn))
1453 		    cls = (int) reg_class_subunion[cls]
1454 		      [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1455 					     ADDRESS, SCRATCH)];
1456 		  else
1457 		    cls = (int) reg_class_subunion[cls]
1458 		      [reg_class_for_constraint (cn)];
1459 		  break;
1460 		}
1461 	      p += CONSTRAINT_LEN (c, p);
1462 	    }
1463 	}
1464       /* Those of the registers which are clobbered, but allowed by the
1465 	 constraints, must be usable as reload registers.  So clear them
1466 	 out of the life information.  */
1467       AND_HARD_REG_SET (allowed, clobbered);
1468       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1469 	if (TEST_HARD_REG_BIT (allowed, i))
1470 	  {
1471 	    CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1472 	    CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1473 	  }
1474     }
1475 
1476 #endif
1477 }
1478 
1479 /* Copy the global variables n_reloads and rld into the corresponding elts
1480    of CHAIN.  */
1481 static void
1482 copy_reloads (struct insn_chain *chain)
1483 {
1484   chain->n_reloads = n_reloads;
1485   chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
1486   memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1487   reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1488 }
1489 
1490 /* Walk the chain of insns, and determine for each whether it needs reloads
1491    and/or eliminations.  Build the corresponding insns_need_reload list, and
1492    set something_needs_elimination as appropriate.  */
1493 static void
1494 calculate_needs_all_insns (int global)
1495 {
1496   struct insn_chain **pprev_reload = &insns_need_reload;
1497   struct insn_chain *chain, *next = 0;
1498 
1499   something_needs_elimination = 0;
1500 
1501   reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1502   for (chain = reload_insn_chain; chain != 0; chain = next)
1503     {
1504       rtx_insn *insn = chain->insn;
1505 
1506       next = chain->next;
1507 
1508       /* Clear out the shortcuts.  */
1509       chain->n_reloads = 0;
1510       chain->need_elim = 0;
1511       chain->need_reload = 0;
1512       chain->need_operand_change = 0;
1513 
1514       /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1515 	 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1516 	 what effects this has on the known offsets at labels.  */
1517 
1518       if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1519 	  || (INSN_P (insn) && REG_NOTES (insn) != 0))
1520 	set_label_offsets (insn, insn, 0);
1521 
1522       if (INSN_P (insn))
1523 	{
1524 	  rtx old_body = PATTERN (insn);
1525 	  int old_code = INSN_CODE (insn);
1526 	  rtx old_notes = REG_NOTES (insn);
1527 	  int did_elimination = 0;
1528 	  int operands_changed = 0;
1529 
1530 	  /* Skip insns that only set an equivalence.  */
1531 	  if (will_delete_init_insn_p (insn))
1532 	    continue;
1533 
1534 	  /* If needed, eliminate any eliminable registers.  */
1535 	  if (num_eliminable || num_eliminable_invariants)
1536 	    did_elimination = eliminate_regs_in_insn (insn, 0);
1537 
1538 	  /* Analyze the instruction.  */
1539 	  operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1540 					   global, spill_reg_order);
1541 
1542 	  /* If a no-op set needs more than one reload, this is likely
1543 	     to be something that needs input address reloads.  We
1544 	     can't get rid of this cleanly later, and it is of no use
1545 	     anyway, so discard it now.
1546 	     We only do this when expensive_optimizations is enabled,
1547 	     since this complements reload inheritance / output
1548 	     reload deletion, and it can make debugging harder.  */
1549 	  if (flag_expensive_optimizations && n_reloads > 1)
1550 	    {
1551 	      rtx set = single_set (insn);
1552 	      if (set
1553 		  &&
1554 		  ((SET_SRC (set) == SET_DEST (set)
1555 		    && REG_P (SET_SRC (set))
1556 		    && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1557 		   || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1558 		       && reg_renumber[REGNO (SET_SRC (set))] < 0
1559 		       && reg_renumber[REGNO (SET_DEST (set))] < 0
1560 		       && reg_equiv_memory_loc (REGNO (SET_SRC (set))) != NULL
1561 		       && reg_equiv_memory_loc (REGNO (SET_DEST (set))) != NULL
1562 		       && rtx_equal_p (reg_equiv_memory_loc (REGNO (SET_SRC (set))),
1563 				       reg_equiv_memory_loc (REGNO (SET_DEST (set)))))))
1564 		{
1565 		  if (ira_conflicts_p)
1566 		    /* Inform IRA about the insn deletion.  */
1567 		    ira_mark_memory_move_deletion (REGNO (SET_DEST (set)),
1568 						   REGNO (SET_SRC (set)));
1569 		  delete_insn (insn);
1570 		  /* Delete it from the reload chain.  */
1571 		  if (chain->prev)
1572 		    chain->prev->next = next;
1573 		  else
1574 		    reload_insn_chain = next;
1575 		  if (next)
1576 		    next->prev = chain->prev;
1577 		  chain->next = unused_insn_chains;
1578 		  unused_insn_chains = chain;
1579 		  continue;
1580 		}
1581 	    }
1582 	  if (num_eliminable)
1583 	    update_eliminable_offsets ();
1584 
1585 	  /* Remember for later shortcuts which insns had any reloads or
1586 	     register eliminations.  */
1587 	  chain->need_elim = did_elimination;
1588 	  chain->need_reload = n_reloads > 0;
1589 	  chain->need_operand_change = operands_changed;
1590 
1591 	  /* Discard any register replacements done.  */
1592 	  if (did_elimination)
1593 	    {
1594 	      obstack_free (&reload_obstack, reload_insn_firstobj);
1595 	      PATTERN (insn) = old_body;
1596 	      INSN_CODE (insn) = old_code;
1597 	      REG_NOTES (insn) = old_notes;
1598 	      something_needs_elimination = 1;
1599 	    }
1600 
1601 	  something_needs_operands_changed |= operands_changed;
1602 
1603 	  if (n_reloads != 0)
1604 	    {
1605 	      copy_reloads (chain);
1606 	      *pprev_reload = chain;
1607 	      pprev_reload = &chain->next_need_reload;
1608 	    }
1609 	}
1610     }
1611   *pprev_reload = 0;
1612 }
1613 
1614 /* This function is called from the register allocator to set up estimates
1615    for the cost of eliminating pseudos which have REG_EQUIV equivalences to
1616    an invariant.  The structure is similar to calculate_needs_all_insns.  */
1617 
1618 void
1619 calculate_elim_costs_all_insns (void)
1620 {
1621   int *reg_equiv_init_cost;
1622   basic_block bb;
1623   int i;
1624 
1625   reg_equiv_init_cost = XCNEWVEC (int, max_regno);
1626   init_elim_table ();
1627   init_eliminable_invariants (get_insns (), false);
1628 
1629   set_initial_elim_offsets ();
1630   set_initial_label_offsets ();
1631 
1632   FOR_EACH_BB_FN (bb, cfun)
1633     {
1634       rtx_insn *insn;
1635       elim_bb = bb;
1636 
1637       FOR_BB_INSNS (bb, insn)
1638 	{
1639 	  /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1640 	     include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1641 	     what effects this has on the known offsets at labels.  */
1642 
1643 	  if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1644 	      || (INSN_P (insn) && REG_NOTES (insn) != 0))
1645 	    set_label_offsets (insn, insn, 0);
1646 
1647 	  if (INSN_P (insn))
1648 	    {
1649 	      rtx set = single_set (insn);
1650 
1651 	      /* Skip insns that only set an equivalence.  */
1652 	      if (set && REG_P (SET_DEST (set))
1653 		  && reg_renumber[REGNO (SET_DEST (set))] < 0
1654 		  && (reg_equiv_constant (REGNO (SET_DEST (set)))
1655 		      || reg_equiv_invariant (REGNO (SET_DEST (set)))))
1656 		{
1657 		  unsigned regno = REGNO (SET_DEST (set));
1658 		  rtx init = reg_equiv_init (regno);
1659 		  if (init)
1660 		    {
1661 		      rtx t = eliminate_regs_1 (SET_SRC (set), VOIDmode, insn,
1662 						false, true);
1663 		      int cost = set_src_cost (t, optimize_bb_for_speed_p (bb));
1664 		      int freq = REG_FREQ_FROM_BB (bb);
1665 
1666 		      reg_equiv_init_cost[regno] = cost * freq;
1667 		      continue;
1668 		    }
1669 		}
1670 	      /* If needed, eliminate any eliminable registers.  */
1671 	      if (num_eliminable || num_eliminable_invariants)
1672 		elimination_costs_in_insn (insn);
1673 
1674 	      if (num_eliminable)
1675 		update_eliminable_offsets ();
1676 	    }
1677 	}
1678     }
1679   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1680     {
1681       if (reg_equiv_invariant (i))
1682 	{
1683 	  if (reg_equiv_init (i))
1684 	    {
1685 	      int cost = reg_equiv_init_cost[i];
1686 	      if (dump_file)
1687 		fprintf (dump_file,
1688 			 "Reg %d has equivalence, initial gains %d\n", i, cost);
1689 	      if (cost != 0)
1690 		ira_adjust_equiv_reg_cost (i, cost);
1691 	    }
1692 	  else
1693 	    {
1694 	      if (dump_file)
1695 		fprintf (dump_file,
1696 			 "Reg %d had equivalence, but can't be eliminated\n",
1697 			 i);
1698 	      ira_adjust_equiv_reg_cost (i, 0);
1699 	    }
1700 	}
1701     }
1702 
1703   free (reg_equiv_init_cost);
1704   free (offsets_known_at);
1705   free (offsets_at);
1706   offsets_at = NULL;
1707   offsets_known_at = NULL;
1708 }
1709 
1710 /* Comparison function for qsort to decide which of two reloads
1711    should be handled first.  *P1 and *P2 are the reload numbers.  */
1712 
1713 static int
1714 reload_reg_class_lower (const void *r1p, const void *r2p)
1715 {
1716   int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1717   int t;
1718 
1719   /* Consider required reloads before optional ones.  */
1720   t = rld[r1].optional - rld[r2].optional;
1721   if (t != 0)
1722     return t;
1723 
1724   /* Count all solitary classes before non-solitary ones.  */
1725   t = ((reg_class_size[(int) rld[r2].rclass] == 1)
1726        - (reg_class_size[(int) rld[r1].rclass] == 1));
1727   if (t != 0)
1728     return t;
1729 
1730   /* Aside from solitaires, consider all multi-reg groups first.  */
1731   t = rld[r2].nregs - rld[r1].nregs;
1732   if (t != 0)
1733     return t;
1734 
1735   /* Consider reloads in order of increasing reg-class number.  */
1736   t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1737   if (t != 0)
1738     return t;
1739 
1740   /* If reloads are equally urgent, sort by reload number,
1741      so that the results of qsort leave nothing to chance.  */
1742   return r1 - r2;
1743 }
1744 
1745 /* The cost of spilling each hard reg.  */
1746 static int spill_cost[FIRST_PSEUDO_REGISTER];
1747 
1748 /* When spilling multiple hard registers, we use SPILL_COST for the first
1749    spilled hard reg and SPILL_ADD_COST for subsequent regs.  SPILL_ADD_COST
1750    only the first hard reg for a multi-reg pseudo.  */
1751 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1752 
1753 /* Map of hard regno to pseudo regno currently occupying the hard
1754    reg.  */
1755 static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1756 
1757 /* Update the spill cost arrays, considering that pseudo REG is live.  */
1758 
1759 static void
1760 count_pseudo (int reg)
1761 {
1762   int freq = REG_FREQ (reg);
1763   int r = reg_renumber[reg];
1764   int nregs;
1765 
1766   /* Ignore spilled pseudo-registers which can be here only if IRA is used.  */
1767   if (ira_conflicts_p && r < 0)
1768     return;
1769 
1770   if (REGNO_REG_SET_P (&pseudos_counted, reg)
1771       || REGNO_REG_SET_P (&spilled_pseudos, reg))
1772     return;
1773 
1774   SET_REGNO_REG_SET (&pseudos_counted, reg);
1775 
1776   gcc_assert (r >= 0);
1777 
1778   spill_add_cost[r] += freq;
1779   nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1780   while (nregs-- > 0)
1781     {
1782       hard_regno_to_pseudo_regno[r + nregs] = reg;
1783       spill_cost[r + nregs] += freq;
1784     }
1785 }
1786 
1787 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1788    contents of BAD_SPILL_REGS for the insn described by CHAIN.  */
1789 
1790 static void
1791 order_regs_for_reload (struct insn_chain *chain)
1792 {
1793   unsigned i;
1794   HARD_REG_SET used_by_pseudos;
1795   HARD_REG_SET used_by_pseudos2;
1796   reg_set_iterator rsi;
1797 
1798   COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1799 
1800   memset (spill_cost, 0, sizeof spill_cost);
1801   memset (spill_add_cost, 0, sizeof spill_add_cost);
1802   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1803     hard_regno_to_pseudo_regno[i] = -1;
1804 
1805   /* Count number of uses of each hard reg by pseudo regs allocated to it
1806      and then order them by decreasing use.  First exclude hard registers
1807      that are live in or across this insn.  */
1808 
1809   REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1810   REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1811   IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1812   IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1813 
1814   /* Now find out which pseudos are allocated to it, and update
1815      hard_reg_n_uses.  */
1816   CLEAR_REG_SET (&pseudos_counted);
1817 
1818   EXECUTE_IF_SET_IN_REG_SET
1819     (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1820     {
1821       count_pseudo (i);
1822     }
1823   EXECUTE_IF_SET_IN_REG_SET
1824     (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1825     {
1826       count_pseudo (i);
1827     }
1828   CLEAR_REG_SET (&pseudos_counted);
1829 }
1830 
1831 /* Vector of reload-numbers showing the order in which the reloads should
1832    be processed.  */
1833 static short reload_order[MAX_RELOADS];
1834 
1835 /* This is used to keep track of the spill regs used in one insn.  */
1836 static HARD_REG_SET used_spill_regs_local;
1837 
1838 /* We decided to spill hard register SPILLED, which has a size of
1839    SPILLED_NREGS.  Determine how pseudo REG, which is live during the insn,
1840    is affected.  We will add it to SPILLED_PSEUDOS if necessary, and we will
1841    update SPILL_COST/SPILL_ADD_COST.  */
1842 
1843 static void
1844 count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1845 {
1846   int freq = REG_FREQ (reg);
1847   int r = reg_renumber[reg];
1848   int nregs;
1849 
1850   /* Ignore spilled pseudo-registers which can be here only if IRA is used.  */
1851   if (ira_conflicts_p && r < 0)
1852     return;
1853 
1854   gcc_assert (r >= 0);
1855 
1856   nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1857 
1858   if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1859       || spilled + spilled_nregs <= r || r + nregs <= spilled)
1860     return;
1861 
1862   SET_REGNO_REG_SET (&spilled_pseudos, reg);
1863 
1864   spill_add_cost[r] -= freq;
1865   while (nregs-- > 0)
1866     {
1867       hard_regno_to_pseudo_regno[r + nregs] = -1;
1868       spill_cost[r + nregs] -= freq;
1869     }
1870 }
1871 
1872 /* Find reload register to use for reload number ORDER.  */
1873 
1874 static int
1875 find_reg (struct insn_chain *chain, int order)
1876 {
1877   int rnum = reload_order[order];
1878   struct reload *rl = rld + rnum;
1879   int best_cost = INT_MAX;
1880   int best_reg = -1;
1881   unsigned int i, j, n;
1882   int k;
1883   HARD_REG_SET not_usable;
1884   HARD_REG_SET used_by_other_reload;
1885   reg_set_iterator rsi;
1886   static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1887   static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1888 
1889   COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1890   IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1891   IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->rclass]);
1892 
1893   CLEAR_HARD_REG_SET (used_by_other_reload);
1894   for (k = 0; k < order; k++)
1895     {
1896       int other = reload_order[k];
1897 
1898       if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1899 	for (j = 0; j < rld[other].nregs; j++)
1900 	  SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1901     }
1902 
1903   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1904     {
1905 #ifdef REG_ALLOC_ORDER
1906       unsigned int regno = reg_alloc_order[i];
1907 #else
1908       unsigned int regno = i;
1909 #endif
1910 
1911       if (! TEST_HARD_REG_BIT (not_usable, regno)
1912 	  && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1913 	  && HARD_REGNO_MODE_OK (regno, rl->mode))
1914 	{
1915 	  int this_cost = spill_cost[regno];
1916 	  int ok = 1;
1917 	  unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1918 
1919 	  for (j = 1; j < this_nregs; j++)
1920 	    {
1921 	      this_cost += spill_add_cost[regno + j];
1922 	      if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1923 		  || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1924 		ok = 0;
1925 	    }
1926 	  if (! ok)
1927 	    continue;
1928 
1929 	  if (ira_conflicts_p)
1930 	    {
1931 	      /* Ask IRA to find a better pseudo-register for
1932 		 spilling.  */
1933 	      for (n = j = 0; j < this_nregs; j++)
1934 		{
1935 		  int r = hard_regno_to_pseudo_regno[regno + j];
1936 
1937 		  if (r < 0)
1938 		    continue;
1939 		  if (n == 0 || regno_pseudo_regs[n - 1] != r)
1940 		    regno_pseudo_regs[n++] = r;
1941 		}
1942 	      regno_pseudo_regs[n++] = -1;
1943 	      if (best_reg < 0
1944 		  || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1945 						      best_regno_pseudo_regs,
1946 						      rl->in, rl->out,
1947 						      chain->insn))
1948 		{
1949 		  best_reg = regno;
1950 		  for (j = 0;; j++)
1951 		    {
1952 		      best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1953 		      if (regno_pseudo_regs[j] < 0)
1954 			break;
1955 		    }
1956 		}
1957 	      continue;
1958 	    }
1959 
1960 	  if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1961 	    this_cost--;
1962 	  if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1963 	    this_cost--;
1964 	  if (this_cost < best_cost
1965 	      /* Among registers with equal cost, prefer caller-saved ones, or
1966 		 use REG_ALLOC_ORDER if it is defined.  */
1967 	      || (this_cost == best_cost
1968 #ifdef REG_ALLOC_ORDER
1969 		  && (inv_reg_alloc_order[regno]
1970 		      < inv_reg_alloc_order[best_reg])
1971 #else
1972 		  && call_used_regs[regno]
1973 		  && ! call_used_regs[best_reg]
1974 #endif
1975 		  ))
1976 	    {
1977 	      best_reg = regno;
1978 	      best_cost = this_cost;
1979 	    }
1980 	}
1981     }
1982   if (best_reg == -1)
1983     return 0;
1984 
1985   if (dump_file)
1986     fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1987 
1988   rl->nregs = hard_regno_nregs[best_reg][rl->mode];
1989   rl->regno = best_reg;
1990 
1991   EXECUTE_IF_SET_IN_REG_SET
1992     (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1993     {
1994       count_spilled_pseudo (best_reg, rl->nregs, j);
1995     }
1996 
1997   EXECUTE_IF_SET_IN_REG_SET
1998     (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1999     {
2000       count_spilled_pseudo (best_reg, rl->nregs, j);
2001     }
2002 
2003   for (i = 0; i < rl->nregs; i++)
2004     {
2005       gcc_assert (spill_cost[best_reg + i] == 0);
2006       gcc_assert (spill_add_cost[best_reg + i] == 0);
2007       gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
2008       SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
2009     }
2010   return 1;
2011 }
2012 
2013 /* Find more reload regs to satisfy the remaining need of an insn, which
2014    is given by CHAIN.
2015    Do it by ascending class number, since otherwise a reg
2016    might be spilled for a big class and might fail to count
2017    for a smaller class even though it belongs to that class.  */
2018 
2019 static void
2020 find_reload_regs (struct insn_chain *chain)
2021 {
2022   int i;
2023 
2024   /* In order to be certain of getting the registers we need,
2025      we must sort the reloads into order of increasing register class.
2026      Then our grabbing of reload registers will parallel the process
2027      that provided the reload registers.  */
2028   for (i = 0; i < chain->n_reloads; i++)
2029     {
2030       /* Show whether this reload already has a hard reg.  */
2031       if (chain->rld[i].reg_rtx)
2032 	{
2033 	  int regno = REGNO (chain->rld[i].reg_rtx);
2034 	  chain->rld[i].regno = regno;
2035 	  chain->rld[i].nregs
2036 	    = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
2037 	}
2038       else
2039 	chain->rld[i].regno = -1;
2040       reload_order[i] = i;
2041     }
2042 
2043   n_reloads = chain->n_reloads;
2044   memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
2045 
2046   CLEAR_HARD_REG_SET (used_spill_regs_local);
2047 
2048   if (dump_file)
2049     fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
2050 
2051   qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
2052 
2053   /* Compute the order of preference for hard registers to spill.  */
2054 
2055   order_regs_for_reload (chain);
2056 
2057   for (i = 0; i < n_reloads; i++)
2058     {
2059       int r = reload_order[i];
2060 
2061       /* Ignore reloads that got marked inoperative.  */
2062       if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
2063 	  && ! rld[r].optional
2064 	  && rld[r].regno == -1)
2065 	if (! find_reg (chain, i))
2066 	  {
2067 	    if (dump_file)
2068 	      fprintf (dump_file, "reload failure for reload %d\n", r);
2069 	    spill_failure (chain->insn, rld[r].rclass);
2070 	    failure = 1;
2071 	    return;
2072 	  }
2073     }
2074 
2075   COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
2076   IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
2077 
2078   memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2079 }
2080 
2081 static void
2082 select_reload_regs (void)
2083 {
2084   struct insn_chain *chain;
2085 
2086   /* Try to satisfy the needs for each insn.  */
2087   for (chain = insns_need_reload; chain != 0;
2088        chain = chain->next_need_reload)
2089     find_reload_regs (chain);
2090 }
2091 
2092 /* Delete all insns that were inserted by emit_caller_save_insns during
2093    this iteration.  */
2094 static void
2095 delete_caller_save_insns (void)
2096 {
2097   struct insn_chain *c = reload_insn_chain;
2098 
2099   while (c != 0)
2100     {
2101       while (c != 0 && c->is_caller_save_insn)
2102 	{
2103 	  struct insn_chain *next = c->next;
2104 	  rtx_insn *insn = c->insn;
2105 
2106 	  if (c == reload_insn_chain)
2107 	    reload_insn_chain = next;
2108 	  delete_insn (insn);
2109 
2110 	  if (next)
2111 	    next->prev = c->prev;
2112 	  if (c->prev)
2113 	    c->prev->next = next;
2114 	  c->next = unused_insn_chains;
2115 	  unused_insn_chains = c;
2116 	  c = next;
2117 	}
2118       if (c != 0)
2119 	c = c->next;
2120     }
2121 }
2122 
2123 /* Handle the failure to find a register to spill.
2124    INSN should be one of the insns which needed this particular spill reg.  */
2125 
2126 static void
2127 spill_failure (rtx_insn *insn, enum reg_class rclass)
2128 {
2129   if (asm_noperands (PATTERN (insn)) >= 0)
2130     error_for_asm (insn, "can%'t find a register in class %qs while "
2131 		   "reloading %<asm%>",
2132 		   reg_class_names[rclass]);
2133   else
2134     {
2135       error ("unable to find a register to spill in class %qs",
2136 	     reg_class_names[rclass]);
2137 
2138       if (dump_file)
2139 	{
2140 	  fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2141 	  debug_reload_to_stream (dump_file);
2142 	}
2143       fatal_insn ("this is the insn:", insn);
2144     }
2145 }
2146 
2147 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2148    data that is dead in INSN.  */
2149 
2150 static void
2151 delete_dead_insn (rtx_insn *insn)
2152 {
2153   rtx_insn *prev = prev_active_insn (insn);
2154   rtx prev_dest;
2155 
2156   /* If the previous insn sets a register that dies in our insn make
2157      a note that we want to run DCE immediately after reload.
2158 
2159      We used to delete the previous insn & recurse, but that's wrong for
2160      block local equivalences.  Instead of trying to figure out the exact
2161      circumstances where we can delete the potentially dead insns, just
2162      let DCE do the job.  */
2163   if (prev && BLOCK_FOR_INSN (prev) == BLOCK_FOR_INSN (insn)
2164       && GET_CODE (PATTERN (prev)) == SET
2165       && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2166       && reg_mentioned_p (prev_dest, PATTERN (insn))
2167       && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2168       && ! side_effects_p (SET_SRC (PATTERN (prev))))
2169     need_dce = 1;
2170 
2171   SET_INSN_DELETED (insn);
2172 }
2173 
2174 /* Modify the home of pseudo-reg I.
2175    The new home is present in reg_renumber[I].
2176 
2177    FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2178    or it may be -1, meaning there is none or it is not relevant.
2179    This is used so that all pseudos spilled from a given hard reg
2180    can share one stack slot.  */
2181 
2182 static void
2183 alter_reg (int i, int from_reg, bool dont_share_p)
2184 {
2185   /* When outputting an inline function, this can happen
2186      for a reg that isn't actually used.  */
2187   if (regno_reg_rtx[i] == 0)
2188     return;
2189 
2190   /* If the reg got changed to a MEM at rtl-generation time,
2191      ignore it.  */
2192   if (!REG_P (regno_reg_rtx[i]))
2193     return;
2194 
2195   /* Modify the reg-rtx to contain the new hard reg
2196      number or else to contain its pseudo reg number.  */
2197   SET_REGNO (regno_reg_rtx[i],
2198 	     reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2199 
2200   /* If we have a pseudo that is needed but has no hard reg or equivalent,
2201      allocate a stack slot for it.  */
2202 
2203   if (reg_renumber[i] < 0
2204       && REG_N_REFS (i) > 0
2205       && reg_equiv_constant (i) == 0
2206       && (reg_equiv_invariant (i) == 0
2207 	  || reg_equiv_init (i) == 0)
2208       && reg_equiv_memory_loc (i) == 0)
2209     {
2210       rtx x = NULL_RTX;
2211       machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2212       unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2213       unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2214       unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2215       unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
2216       int adjust = 0;
2217 
2218       something_was_spilled = true;
2219 
2220       if (ira_conflicts_p)
2221 	{
2222 	  /* Mark the spill for IRA.  */
2223 	  SET_REGNO_REG_SET (&spilled_pseudos, i);
2224 	  if (!dont_share_p)
2225 	    x = ira_reuse_stack_slot (i, inherent_size, total_size);
2226 	}
2227 
2228       if (x)
2229 	;
2230 
2231       /* Each pseudo reg has an inherent size which comes from its own mode,
2232 	 and a total size which provides room for paradoxical subregs
2233 	 which refer to the pseudo reg in wider modes.
2234 
2235 	 We can use a slot already allocated if it provides both
2236 	 enough inherent space and enough total space.
2237 	 Otherwise, we allocate a new slot, making sure that it has no less
2238 	 inherent space, and no less total space, then the previous slot.  */
2239       else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2240 	{
2241 	  rtx stack_slot;
2242 
2243 	  /* No known place to spill from => no slot to reuse.  */
2244 	  x = assign_stack_local (mode, total_size,
2245 				  min_align > inherent_align
2246 				  || total_size > inherent_size ? -1 : 0);
2247 
2248 	  stack_slot = x;
2249 
2250 	  /* Cancel the big-endian correction done in assign_stack_local.
2251 	     Get the address of the beginning of the slot.  This is so we
2252 	     can do a big-endian correction unconditionally below.  */
2253 	  if (BYTES_BIG_ENDIAN)
2254 	    {
2255 	      adjust = inherent_size - total_size;
2256 	      if (adjust)
2257 		stack_slot
2258 		  = adjust_address_nv (x, mode_for_size (total_size
2259 						         * BITS_PER_UNIT,
2260 						         MODE_INT, 1),
2261 				       adjust);
2262 	    }
2263 
2264 	  if (! dont_share_p && ira_conflicts_p)
2265 	    /* Inform IRA about allocation a new stack slot.  */
2266 	    ira_mark_new_stack_slot (stack_slot, i, total_size);
2267 	}
2268 
2269       /* Reuse a stack slot if possible.  */
2270       else if (spill_stack_slot[from_reg] != 0
2271 	       && spill_stack_slot_width[from_reg] >= total_size
2272 	       && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2273 		   >= inherent_size)
2274 	       && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2275 	x = spill_stack_slot[from_reg];
2276 
2277       /* Allocate a bigger slot.  */
2278       else
2279 	{
2280 	  /* Compute maximum size needed, both for inherent size
2281 	     and for total size.  */
2282 	  rtx stack_slot;
2283 
2284 	  if (spill_stack_slot[from_reg])
2285 	    {
2286 	      if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2287 		  > inherent_size)
2288 		mode = GET_MODE (spill_stack_slot[from_reg]);
2289 	      if (spill_stack_slot_width[from_reg] > total_size)
2290 		total_size = spill_stack_slot_width[from_reg];
2291 	      if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2292 		min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2293 	    }
2294 
2295 	  /* Make a slot with that size.  */
2296 	  x = assign_stack_local (mode, total_size,
2297 				  min_align > inherent_align
2298 				  || total_size > inherent_size ? -1 : 0);
2299 	  stack_slot = x;
2300 
2301 	  /* Cancel the  big-endian correction done in assign_stack_local.
2302 	     Get the address of the beginning of the slot.  This is so we
2303 	     can do a big-endian correction unconditionally below.  */
2304 	  if (BYTES_BIG_ENDIAN)
2305 	    {
2306 	      adjust = GET_MODE_SIZE (mode) - total_size;
2307 	      if (adjust)
2308 		stack_slot
2309 		  = adjust_address_nv (x, mode_for_size (total_size
2310 							 * BITS_PER_UNIT,
2311 							 MODE_INT, 1),
2312 				       adjust);
2313 	    }
2314 
2315 	  spill_stack_slot[from_reg] = stack_slot;
2316 	  spill_stack_slot_width[from_reg] = total_size;
2317 	}
2318 
2319       /* On a big endian machine, the "address" of the slot
2320 	 is the address of the low part that fits its inherent mode.  */
2321       if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2322 	adjust += (total_size - inherent_size);
2323 
2324       /* If we have any adjustment to make, or if the stack slot is the
2325 	 wrong mode, make a new stack slot.  */
2326       x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2327 
2328       /* Set all of the memory attributes as appropriate for a spill.  */
2329       set_mem_attrs_for_spill (x);
2330 
2331       /* Save the stack slot for later.  */
2332       reg_equiv_memory_loc (i) = x;
2333     }
2334 }
2335 
2336 /* Mark the slots in regs_ever_live for the hard regs used by
2337    pseudo-reg number REGNO, accessed in MODE.  */
2338 
2339 static void
2340 mark_home_live_1 (int regno, machine_mode mode)
2341 {
2342   int i, lim;
2343 
2344   i = reg_renumber[regno];
2345   if (i < 0)
2346     return;
2347   lim = end_hard_regno (mode, i);
2348   while (i < lim)
2349     df_set_regs_ever_live (i++, true);
2350 }
2351 
2352 /* Mark the slots in regs_ever_live for the hard regs
2353    used by pseudo-reg number REGNO.  */
2354 
2355 void
2356 mark_home_live (int regno)
2357 {
2358   if (reg_renumber[regno] >= 0)
2359     mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2360 }
2361 
2362 /* This function handles the tracking of elimination offsets around branches.
2363 
2364    X is a piece of RTL being scanned.
2365 
2366    INSN is the insn that it came from, if any.
2367 
2368    INITIAL_P is nonzero if we are to set the offset to be the initial
2369    offset and zero if we are setting the offset of the label to be the
2370    current offset.  */
2371 
2372 static void
2373 set_label_offsets (rtx x, rtx_insn *insn, int initial_p)
2374 {
2375   enum rtx_code code = GET_CODE (x);
2376   rtx tem;
2377   unsigned int i;
2378   struct elim_table *p;
2379 
2380   switch (code)
2381     {
2382     case LABEL_REF:
2383       if (LABEL_REF_NONLOCAL_P (x))
2384 	return;
2385 
2386       x = LABEL_REF_LABEL (x);
2387 
2388       /* ... fall through ...  */
2389 
2390     case CODE_LABEL:
2391       /* If we know nothing about this label, set the desired offsets.  Note
2392 	 that this sets the offset at a label to be the offset before a label
2393 	 if we don't know anything about the label.  This is not correct for
2394 	 the label after a BARRIER, but is the best guess we can make.  If
2395 	 we guessed wrong, we will suppress an elimination that might have
2396 	 been possible had we been able to guess correctly.  */
2397 
2398       if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2399 	{
2400 	  for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2401 	    offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2402 	      = (initial_p ? reg_eliminate[i].initial_offset
2403 		 : reg_eliminate[i].offset);
2404 	  offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2405 	}
2406 
2407       /* Otherwise, if this is the definition of a label and it is
2408 	 preceded by a BARRIER, set our offsets to the known offset of
2409 	 that label.  */
2410 
2411       else if (x == insn
2412 	       && (tem = prev_nonnote_insn (insn)) != 0
2413 	       && BARRIER_P (tem))
2414 	set_offsets_for_label (insn);
2415       else
2416 	/* If neither of the above cases is true, compare each offset
2417 	   with those previously recorded and suppress any eliminations
2418 	   where the offsets disagree.  */
2419 
2420 	for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2421 	  if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2422 	      != (initial_p ? reg_eliminate[i].initial_offset
2423 		  : reg_eliminate[i].offset))
2424 	    reg_eliminate[i].can_eliminate = 0;
2425 
2426       return;
2427 
2428     case JUMP_TABLE_DATA:
2429       set_label_offsets (PATTERN (insn), insn, initial_p);
2430       return;
2431 
2432     case JUMP_INSN:
2433       set_label_offsets (PATTERN (insn), insn, initial_p);
2434 
2435       /* ... fall through ...  */
2436 
2437     case INSN:
2438     case CALL_INSN:
2439       /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2440 	 to indirectly and hence must have all eliminations at their
2441 	 initial offsets.  */
2442       for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2443 	if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2444 	  set_label_offsets (XEXP (tem, 0), insn, 1);
2445       return;
2446 
2447     case PARALLEL:
2448     case ADDR_VEC:
2449     case ADDR_DIFF_VEC:
2450       /* Each of the labels in the parallel or address vector must be
2451 	 at their initial offsets.  We want the first field for PARALLEL
2452 	 and ADDR_VEC and the second field for ADDR_DIFF_VEC.  */
2453 
2454       for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2455 	set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2456 			   insn, initial_p);
2457       return;
2458 
2459     case SET:
2460       /* We only care about setting PC.  If the source is not RETURN,
2461 	 IF_THEN_ELSE, or a label, disable any eliminations not at
2462 	 their initial offsets.  Similarly if any arm of the IF_THEN_ELSE
2463 	 isn't one of those possibilities.  For branches to a label,
2464 	 call ourselves recursively.
2465 
2466 	 Note that this can disable elimination unnecessarily when we have
2467 	 a non-local goto since it will look like a non-constant jump to
2468 	 someplace in the current function.  This isn't a significant
2469 	 problem since such jumps will normally be when all elimination
2470 	 pairs are back to their initial offsets.  */
2471 
2472       if (SET_DEST (x) != pc_rtx)
2473 	return;
2474 
2475       switch (GET_CODE (SET_SRC (x)))
2476 	{
2477 	case PC:
2478 	case RETURN:
2479 	  return;
2480 
2481 	case LABEL_REF:
2482 	  set_label_offsets (SET_SRC (x), insn, initial_p);
2483 	  return;
2484 
2485 	case IF_THEN_ELSE:
2486 	  tem = XEXP (SET_SRC (x), 1);
2487 	  if (GET_CODE (tem) == LABEL_REF)
2488 	    set_label_offsets (LABEL_REF_LABEL (tem), insn, initial_p);
2489 	  else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2490 	    break;
2491 
2492 	  tem = XEXP (SET_SRC (x), 2);
2493 	  if (GET_CODE (tem) == LABEL_REF)
2494 	    set_label_offsets (LABEL_REF_LABEL (tem), insn, initial_p);
2495 	  else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2496 	    break;
2497 	  return;
2498 
2499 	default:
2500 	  break;
2501 	}
2502 
2503       /* If we reach here, all eliminations must be at their initial
2504 	 offset because we are doing a jump to a variable address.  */
2505       for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2506 	if (p->offset != p->initial_offset)
2507 	  p->can_eliminate = 0;
2508       break;
2509 
2510     default:
2511       break;
2512     }
2513 }
2514 
2515 /* This function examines every reg that occurs in X and adjusts the
2516    costs for its elimination which are gathered by IRA.  INSN is the
2517    insn in which X occurs.  We do not recurse into MEM expressions.  */
2518 
2519 static void
2520 note_reg_elim_costly (const_rtx x, rtx insn)
2521 {
2522   subrtx_iterator::array_type array;
2523   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
2524     {
2525       const_rtx x = *iter;
2526       if (MEM_P (x))
2527 	iter.skip_subrtxes ();
2528       else if (REG_P (x)
2529 	       && REGNO (x) >= FIRST_PSEUDO_REGISTER
2530 	       && reg_equiv_init (REGNO (x))
2531 	       && reg_equiv_invariant (REGNO (x)))
2532 	{
2533 	  rtx t = reg_equiv_invariant (REGNO (x));
2534 	  rtx new_rtx = eliminate_regs_1 (t, Pmode, insn, true, true);
2535 	  int cost = set_src_cost (new_rtx, optimize_bb_for_speed_p (elim_bb));
2536 	  int freq = REG_FREQ_FROM_BB (elim_bb);
2537 
2538 	  if (cost != 0)
2539 	    ira_adjust_equiv_reg_cost (REGNO (x), -cost * freq);
2540 	}
2541     }
2542 }
2543 
2544 /* Scan X and replace any eliminable registers (such as fp) with a
2545    replacement (such as sp), plus an offset.
2546 
2547    MEM_MODE is the mode of an enclosing MEM.  We need this to know how
2548    much to adjust a register for, e.g., PRE_DEC.  Also, if we are inside a
2549    MEM, we are allowed to replace a sum of a register and the constant zero
2550    with the register, which we cannot do outside a MEM.  In addition, we need
2551    to record the fact that a register is referenced outside a MEM.
2552 
2553    If INSN is an insn, it is the insn containing X.  If we replace a REG
2554    in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2555    CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2556    the REG is being modified.
2557 
2558    Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2559    That's used when we eliminate in expressions stored in notes.
2560    This means, do not set ref_outside_mem even if the reference
2561    is outside of MEMs.
2562 
2563    If FOR_COSTS is true, we are being called before reload in order to
2564    estimate the costs of keeping registers with an equivalence unallocated.
2565 
2566    REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2567    replacements done assuming all offsets are at their initial values.  If
2568    they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2569    encounter, return the actual location so that find_reloads will do
2570    the proper thing.  */
2571 
2572 static rtx
2573 eliminate_regs_1 (rtx x, machine_mode mem_mode, rtx insn,
2574 		  bool may_use_invariant, bool for_costs)
2575 {
2576   enum rtx_code code = GET_CODE (x);
2577   struct elim_table *ep;
2578   int regno;
2579   rtx new_rtx;
2580   int i, j;
2581   const char *fmt;
2582   int copied = 0;
2583 
2584   if (! current_function_decl)
2585     return x;
2586 
2587   switch (code)
2588     {
2589     CASE_CONST_ANY:
2590     case CONST:
2591     case SYMBOL_REF:
2592     case CODE_LABEL:
2593     case PC:
2594     case CC0:
2595     case ASM_INPUT:
2596     case ADDR_VEC:
2597     case ADDR_DIFF_VEC:
2598     case RETURN:
2599       return x;
2600 
2601     case REG:
2602       regno = REGNO (x);
2603 
2604       /* First handle the case where we encounter a bare register that
2605 	 is eliminable.  Replace it with a PLUS.  */
2606       if (regno < FIRST_PSEUDO_REGISTER)
2607 	{
2608 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2609 	       ep++)
2610 	    if (ep->from_rtx == x && ep->can_eliminate)
2611 	      return plus_constant (Pmode, ep->to_rtx, ep->previous_offset);
2612 
2613 	}
2614       else if (reg_renumber && reg_renumber[regno] < 0
2615 	       && reg_equivs
2616 	       && reg_equiv_invariant (regno))
2617 	{
2618 	  if (may_use_invariant || (insn && DEBUG_INSN_P (insn)))
2619 	    return eliminate_regs_1 (copy_rtx (reg_equiv_invariant (regno)),
2620 			             mem_mode, insn, true, for_costs);
2621 	  /* There exists at least one use of REGNO that cannot be
2622 	     eliminated.  Prevent the defining insn from being deleted.  */
2623 	  reg_equiv_init (regno) = NULL_RTX;
2624 	  if (!for_costs)
2625 	    alter_reg (regno, -1, true);
2626 	}
2627       return x;
2628 
2629     /* You might think handling MINUS in a manner similar to PLUS is a
2630        good idea.  It is not.  It has been tried multiple times and every
2631        time the change has had to have been reverted.
2632 
2633        Other parts of reload know a PLUS is special (gen_reload for example)
2634        and require special code to handle code a reloaded PLUS operand.
2635 
2636        Also consider backends where the flags register is clobbered by a
2637        MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2638        lea instruction comes to mind).  If we try to reload a MINUS, we
2639        may kill the flags register that was holding a useful value.
2640 
2641        So, please before trying to handle MINUS, consider reload as a
2642        whole instead of this little section as well as the backend issues.  */
2643     case PLUS:
2644       /* If this is the sum of an eliminable register and a constant, rework
2645 	 the sum.  */
2646       if (REG_P (XEXP (x, 0))
2647 	  && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2648 	  && CONSTANT_P (XEXP (x, 1)))
2649 	{
2650 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2651 	       ep++)
2652 	    if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2653 	      {
2654 		/* The only time we want to replace a PLUS with a REG (this
2655 		   occurs when the constant operand of the PLUS is the negative
2656 		   of the offset) is when we are inside a MEM.  We won't want
2657 		   to do so at other times because that would change the
2658 		   structure of the insn in a way that reload can't handle.
2659 		   We special-case the commonest situation in
2660 		   eliminate_regs_in_insn, so just replace a PLUS with a
2661 		   PLUS here, unless inside a MEM.  */
2662 		if (mem_mode != 0 && CONST_INT_P (XEXP (x, 1))
2663 		    && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2664 		  return ep->to_rtx;
2665 		else
2666 		  return gen_rtx_PLUS (Pmode, ep->to_rtx,
2667 				       plus_constant (Pmode, XEXP (x, 1),
2668 						      ep->previous_offset));
2669 	      }
2670 
2671 	  /* If the register is not eliminable, we are done since the other
2672 	     operand is a constant.  */
2673 	  return x;
2674 	}
2675 
2676       /* If this is part of an address, we want to bring any constant to the
2677 	 outermost PLUS.  We will do this by doing register replacement in
2678 	 our operands and seeing if a constant shows up in one of them.
2679 
2680 	 Note that there is no risk of modifying the structure of the insn,
2681 	 since we only get called for its operands, thus we are either
2682 	 modifying the address inside a MEM, or something like an address
2683 	 operand of a load-address insn.  */
2684 
2685       {
2686 	rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2687 				     for_costs);
2688 	rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2689 				     for_costs);
2690 
2691 	if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2692 	  {
2693 	    /* If one side is a PLUS and the other side is a pseudo that
2694 	       didn't get a hard register but has a reg_equiv_constant,
2695 	       we must replace the constant here since it may no longer
2696 	       be in the position of any operand.  */
2697 	    if (GET_CODE (new0) == PLUS && REG_P (new1)
2698 		&& REGNO (new1) >= FIRST_PSEUDO_REGISTER
2699 		&& reg_renumber[REGNO (new1)] < 0
2700 		&& reg_equivs
2701 		&& reg_equiv_constant (REGNO (new1)) != 0)
2702 	      new1 = reg_equiv_constant (REGNO (new1));
2703 	    else if (GET_CODE (new1) == PLUS && REG_P (new0)
2704 		     && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2705 		     && reg_renumber[REGNO (new0)] < 0
2706 		     && reg_equiv_constant (REGNO (new0)) != 0)
2707 	      new0 = reg_equiv_constant (REGNO (new0));
2708 
2709 	    new_rtx = form_sum (GET_MODE (x), new0, new1);
2710 
2711 	    /* As above, if we are not inside a MEM we do not want to
2712 	       turn a PLUS into something else.  We might try to do so here
2713 	       for an addition of 0 if we aren't optimizing.  */
2714 	    if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2715 	      return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
2716 	    else
2717 	      return new_rtx;
2718 	  }
2719       }
2720       return x;
2721 
2722     case MULT:
2723       /* If this is the product of an eliminable register and a
2724 	 constant, apply the distribute law and move the constant out
2725 	 so that we have (plus (mult ..) ..).  This is needed in order
2726 	 to keep load-address insns valid.   This case is pathological.
2727 	 We ignore the possibility of overflow here.  */
2728       if (REG_P (XEXP (x, 0))
2729 	  && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2730 	  && CONST_INT_P (XEXP (x, 1)))
2731 	for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2732 	     ep++)
2733 	  if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2734 	    {
2735 	      if (! mem_mode
2736 		  /* Refs inside notes or in DEBUG_INSNs don't count for
2737 		     this purpose.  */
2738 		  && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2739 				      || GET_CODE (insn) == INSN_LIST
2740 				      || DEBUG_INSN_P (insn))))
2741 		ep->ref_outside_mem = 1;
2742 
2743 	      return
2744 		plus_constant (Pmode,
2745 			       gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2746 			       ep->previous_offset * INTVAL (XEXP (x, 1)));
2747 	    }
2748 
2749       /* ... fall through ...  */
2750 
2751     case CALL:
2752     case COMPARE:
2753     /* See comments before PLUS about handling MINUS.  */
2754     case MINUS:
2755     case DIV:      case UDIV:
2756     case MOD:      case UMOD:
2757     case AND:      case IOR:      case XOR:
2758     case ROTATERT: case ROTATE:
2759     case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2760     case NE:       case EQ:
2761     case GE:       case GT:       case GEU:    case GTU:
2762     case LE:       case LT:       case LEU:    case LTU:
2763       {
2764 	rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2765 				     for_costs);
2766 	rtx new1 = XEXP (x, 1)
2767 	  ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false,
2768 			      for_costs) : 0;
2769 
2770 	if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2771 	  return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2772       }
2773       return x;
2774 
2775     case EXPR_LIST:
2776       /* If we have something in XEXP (x, 0), the usual case, eliminate it.  */
2777       if (XEXP (x, 0))
2778 	{
2779 	  new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2780 				      for_costs);
2781 	  if (new_rtx != XEXP (x, 0))
2782 	    {
2783 	      /* If this is a REG_DEAD note, it is not valid anymore.
2784 		 Using the eliminated version could result in creating a
2785 		 REG_DEAD note for the stack or frame pointer.  */
2786 	      if (REG_NOTE_KIND (x) == REG_DEAD)
2787 		return (XEXP (x, 1)
2788 			? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2789 					    for_costs)
2790 			: NULL_RTX);
2791 
2792 	      x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2793 	    }
2794 	}
2795 
2796       /* ... fall through ...  */
2797 
2798     case INSN_LIST:
2799     case INT_LIST:
2800       /* Now do eliminations in the rest of the chain.  If this was
2801 	 an EXPR_LIST, this might result in allocating more memory than is
2802 	 strictly needed, but it simplifies the code.  */
2803       if (XEXP (x, 1))
2804 	{
2805 	  new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2806 				      for_costs);
2807 	  if (new_rtx != XEXP (x, 1))
2808 	    return
2809 	      gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
2810 	}
2811       return x;
2812 
2813     case PRE_INC:
2814     case POST_INC:
2815     case PRE_DEC:
2816     case POST_DEC:
2817       /* We do not support elimination of a register that is modified.
2818 	 elimination_effects has already make sure that this does not
2819 	 happen.  */
2820       return x;
2821 
2822     case PRE_MODIFY:
2823     case POST_MODIFY:
2824       /* We do not support elimination of a register that is modified.
2825 	 elimination_effects has already make sure that this does not
2826 	 happen.  The only remaining case we need to consider here is
2827 	 that the increment value may be an eliminable register.  */
2828       if (GET_CODE (XEXP (x, 1)) == PLUS
2829 	  && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2830 	{
2831 	  rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2832 					  insn, true, for_costs);
2833 
2834 	  if (new_rtx != XEXP (XEXP (x, 1), 1))
2835 	    return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2836 				   gen_rtx_PLUS (GET_MODE (x),
2837 						 XEXP (x, 0), new_rtx));
2838 	}
2839       return x;
2840 
2841     case STRICT_LOW_PART:
2842     case NEG:          case NOT:
2843     case SIGN_EXTEND:  case ZERO_EXTEND:
2844     case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2845     case FLOAT:        case FIX:
2846     case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2847     case ABS:
2848     case SQRT:
2849     case FFS:
2850     case CLZ:
2851     case CTZ:
2852     case POPCOUNT:
2853     case PARITY:
2854     case BSWAP:
2855       new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2856 				  for_costs);
2857       if (new_rtx != XEXP (x, 0))
2858 	return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
2859       return x;
2860 
2861     case SUBREG:
2862       /* Similar to above processing, but preserve SUBREG_BYTE.
2863 	 Convert (subreg (mem)) to (mem) if not paradoxical.
2864 	 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2865 	 pseudo didn't get a hard reg, we must replace this with the
2866 	 eliminated version of the memory location because push_reload
2867 	 may do the replacement in certain circumstances.  */
2868       if (REG_P (SUBREG_REG (x))
2869 	  && !paradoxical_subreg_p (x)
2870 	  && reg_equivs
2871 	  && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
2872 	{
2873 	  new_rtx = SUBREG_REG (x);
2874 	}
2875       else
2876 	new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false, for_costs);
2877 
2878       if (new_rtx != SUBREG_REG (x))
2879 	{
2880 	  int x_size = GET_MODE_SIZE (GET_MODE (x));
2881 	  int new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
2882 
2883 	  if (MEM_P (new_rtx)
2884 	      && ((x_size < new_size
2885 #ifdef WORD_REGISTER_OPERATIONS
2886 		   /* On these machines, combine can create rtl of the form
2887 		      (set (subreg:m1 (reg:m2 R) 0) ...)
2888 		      where m1 < m2, and expects something interesting to
2889 		      happen to the entire word.  Moreover, it will use the
2890 		      (reg:m2 R) later, expecting all bits to be preserved.
2891 		      So if the number of words is the same, preserve the
2892 		      subreg so that push_reload can see it.  */
2893 		   && ! ((x_size - 1) / UNITS_PER_WORD
2894 			 == (new_size -1 ) / UNITS_PER_WORD)
2895 #endif
2896 		   )
2897 		  || x_size == new_size)
2898 	      )
2899 	    return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
2900 	  else
2901 	    return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2902 	}
2903 
2904       return x;
2905 
2906     case MEM:
2907       /* Our only special processing is to pass the mode of the MEM to our
2908 	 recursive call and copy the flags.  While we are here, handle this
2909 	 case more efficiently.  */
2910 
2911       new_rtx = eliminate_regs_1 (XEXP (x, 0), GET_MODE (x), insn, true,
2912 				  for_costs);
2913       if (for_costs
2914 	  && memory_address_p (GET_MODE (x), XEXP (x, 0))
2915 	  && !memory_address_p (GET_MODE (x), new_rtx))
2916 	note_reg_elim_costly (XEXP (x, 0), insn);
2917 
2918       return replace_equiv_address_nv (x, new_rtx);
2919 
2920     case USE:
2921       /* Handle insn_list USE that a call to a pure function may generate.  */
2922       new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false,
2923 				  for_costs);
2924       if (new_rtx != XEXP (x, 0))
2925 	return gen_rtx_USE (GET_MODE (x), new_rtx);
2926       return x;
2927 
2928     case CLOBBER:
2929     case ASM_OPERANDS:
2930       gcc_assert (insn && DEBUG_INSN_P (insn));
2931       break;
2932 
2933     case SET:
2934       gcc_unreachable ();
2935 
2936     default:
2937       break;
2938     }
2939 
2940   /* Process each of our operands recursively.  If any have changed, make a
2941      copy of the rtx.  */
2942   fmt = GET_RTX_FORMAT (code);
2943   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2944     {
2945       if (*fmt == 'e')
2946 	{
2947 	  new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false,
2948 				      for_costs);
2949 	  if (new_rtx != XEXP (x, i) && ! copied)
2950 	    {
2951 	      x = shallow_copy_rtx (x);
2952 	      copied = 1;
2953 	    }
2954 	  XEXP (x, i) = new_rtx;
2955 	}
2956       else if (*fmt == 'E')
2957 	{
2958 	  int copied_vec = 0;
2959 	  for (j = 0; j < XVECLEN (x, i); j++)
2960 	    {
2961 	      new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false,
2962 					  for_costs);
2963 	      if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
2964 		{
2965 		  rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2966 					     XVEC (x, i)->elem);
2967 		  if (! copied)
2968 		    {
2969 		      x = shallow_copy_rtx (x);
2970 		      copied = 1;
2971 		    }
2972 		  XVEC (x, i) = new_v;
2973 		  copied_vec = 1;
2974 		}
2975 	      XVECEXP (x, i, j) = new_rtx;
2976 	    }
2977 	}
2978     }
2979 
2980   return x;
2981 }
2982 
2983 rtx
2984 eliminate_regs (rtx x, machine_mode mem_mode, rtx insn)
2985 {
2986   if (reg_eliminate == NULL)
2987     {
2988       gcc_assert (targetm.no_register_allocation);
2989       return x;
2990     }
2991   return eliminate_regs_1 (x, mem_mode, insn, false, false);
2992 }
2993 
2994 /* Scan rtx X for modifications of elimination target registers.  Update
2995    the table of eliminables to reflect the changed state.  MEM_MODE is
2996    the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM.  */
2997 
2998 static void
2999 elimination_effects (rtx x, machine_mode mem_mode)
3000 {
3001   enum rtx_code code = GET_CODE (x);
3002   struct elim_table *ep;
3003   int regno;
3004   int i, j;
3005   const char *fmt;
3006 
3007   switch (code)
3008     {
3009     CASE_CONST_ANY:
3010     case CONST:
3011     case SYMBOL_REF:
3012     case CODE_LABEL:
3013     case PC:
3014     case CC0:
3015     case ASM_INPUT:
3016     case ADDR_VEC:
3017     case ADDR_DIFF_VEC:
3018     case RETURN:
3019       return;
3020 
3021     case REG:
3022       regno = REGNO (x);
3023 
3024       /* First handle the case where we encounter a bare register that
3025 	 is eliminable.  Replace it with a PLUS.  */
3026       if (regno < FIRST_PSEUDO_REGISTER)
3027 	{
3028 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3029 	       ep++)
3030 	    if (ep->from_rtx == x && ep->can_eliminate)
3031 	      {
3032 		if (! mem_mode)
3033 		  ep->ref_outside_mem = 1;
3034 		return;
3035 	      }
3036 
3037 	}
3038       else if (reg_renumber[regno] < 0
3039 	       && reg_equivs
3040 	       && reg_equiv_constant (regno)
3041 	       && ! function_invariant_p (reg_equiv_constant (regno)))
3042 	elimination_effects (reg_equiv_constant (regno), mem_mode);
3043       return;
3044 
3045     case PRE_INC:
3046     case POST_INC:
3047     case PRE_DEC:
3048     case POST_DEC:
3049     case POST_MODIFY:
3050     case PRE_MODIFY:
3051       /* If we modify the source of an elimination rule, disable it.  */
3052       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3053 	if (ep->from_rtx == XEXP (x, 0))
3054 	  ep->can_eliminate = 0;
3055 
3056       /* If we modify the target of an elimination rule by adding a constant,
3057 	 update its offset.  If we modify the target in any other way, we'll
3058 	 have to disable the rule as well.  */
3059       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3060 	if (ep->to_rtx == XEXP (x, 0))
3061 	  {
3062 	    int size = GET_MODE_SIZE (mem_mode);
3063 
3064 	    /* If more bytes than MEM_MODE are pushed, account for them.  */
3065 #ifdef PUSH_ROUNDING
3066 	    if (ep->to_rtx == stack_pointer_rtx)
3067 	      size = PUSH_ROUNDING (size);
3068 #endif
3069 	    if (code == PRE_DEC || code == POST_DEC)
3070 	      ep->offset += size;
3071 	    else if (code == PRE_INC || code == POST_INC)
3072 	      ep->offset -= size;
3073 	    else if (code == PRE_MODIFY || code == POST_MODIFY)
3074 	      {
3075 		if (GET_CODE (XEXP (x, 1)) == PLUS
3076 		    && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3077 		    && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3078 		  ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3079 		else
3080 		  ep->can_eliminate = 0;
3081 	      }
3082 	  }
3083 
3084       /* These two aren't unary operators.  */
3085       if (code == POST_MODIFY || code == PRE_MODIFY)
3086 	break;
3087 
3088       /* Fall through to generic unary operation case.  */
3089     case STRICT_LOW_PART:
3090     case NEG:          case NOT:
3091     case SIGN_EXTEND:  case ZERO_EXTEND:
3092     case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3093     case FLOAT:        case FIX:
3094     case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3095     case ABS:
3096     case SQRT:
3097     case FFS:
3098     case CLZ:
3099     case CTZ:
3100     case POPCOUNT:
3101     case PARITY:
3102     case BSWAP:
3103       elimination_effects (XEXP (x, 0), mem_mode);
3104       return;
3105 
3106     case SUBREG:
3107       if (REG_P (SUBREG_REG (x))
3108 	  && (GET_MODE_SIZE (GET_MODE (x))
3109 	      <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3110 	  && reg_equivs
3111 	  && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
3112 	return;
3113 
3114       elimination_effects (SUBREG_REG (x), mem_mode);
3115       return;
3116 
3117     case USE:
3118       /* If using a register that is the source of an eliminate we still
3119 	 think can be performed, note it cannot be performed since we don't
3120 	 know how this register is used.  */
3121       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3122 	if (ep->from_rtx == XEXP (x, 0))
3123 	  ep->can_eliminate = 0;
3124 
3125       elimination_effects (XEXP (x, 0), mem_mode);
3126       return;
3127 
3128     case CLOBBER:
3129       /* If clobbering a register that is the replacement register for an
3130 	 elimination we still think can be performed, note that it cannot
3131 	 be performed.  Otherwise, we need not be concerned about it.  */
3132       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3133 	if (ep->to_rtx == XEXP (x, 0))
3134 	  ep->can_eliminate = 0;
3135 
3136       elimination_effects (XEXP (x, 0), mem_mode);
3137       return;
3138 
3139     case SET:
3140       /* Check for setting a register that we know about.  */
3141       if (REG_P (SET_DEST (x)))
3142 	{
3143 	  /* See if this is setting the replacement register for an
3144 	     elimination.
3145 
3146 	     If DEST is the hard frame pointer, we do nothing because we
3147 	     assume that all assignments to the frame pointer are for
3148 	     non-local gotos and are being done at a time when they are valid
3149 	     and do not disturb anything else.  Some machines want to
3150 	     eliminate a fake argument pointer (or even a fake frame pointer)
3151 	     with either the real frame or the stack pointer.  Assignments to
3152 	     the hard frame pointer must not prevent this elimination.  */
3153 
3154 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3155 	       ep++)
3156 	    if (ep->to_rtx == SET_DEST (x)
3157 		&& SET_DEST (x) != hard_frame_pointer_rtx)
3158 	      {
3159 		/* If it is being incremented, adjust the offset.  Otherwise,
3160 		   this elimination can't be done.  */
3161 		rtx src = SET_SRC (x);
3162 
3163 		if (GET_CODE (src) == PLUS
3164 		    && XEXP (src, 0) == SET_DEST (x)
3165 		    && CONST_INT_P (XEXP (src, 1)))
3166 		  ep->offset -= INTVAL (XEXP (src, 1));
3167 		else
3168 		  ep->can_eliminate = 0;
3169 	      }
3170 	}
3171 
3172       elimination_effects (SET_DEST (x), VOIDmode);
3173       elimination_effects (SET_SRC (x), VOIDmode);
3174       return;
3175 
3176     case MEM:
3177       /* Our only special processing is to pass the mode of the MEM to our
3178 	 recursive call.  */
3179       elimination_effects (XEXP (x, 0), GET_MODE (x));
3180       return;
3181 
3182     default:
3183       break;
3184     }
3185 
3186   fmt = GET_RTX_FORMAT (code);
3187   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3188     {
3189       if (*fmt == 'e')
3190 	elimination_effects (XEXP (x, i), mem_mode);
3191       else if (*fmt == 'E')
3192 	for (j = 0; j < XVECLEN (x, i); j++)
3193 	  elimination_effects (XVECEXP (x, i, j), mem_mode);
3194     }
3195 }
3196 
3197 /* Descend through rtx X and verify that no references to eliminable registers
3198    remain.  If any do remain, mark the involved register as not
3199    eliminable.  */
3200 
3201 static void
3202 check_eliminable_occurrences (rtx x)
3203 {
3204   const char *fmt;
3205   int i;
3206   enum rtx_code code;
3207 
3208   if (x == 0)
3209     return;
3210 
3211   code = GET_CODE (x);
3212 
3213   if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3214     {
3215       struct elim_table *ep;
3216 
3217       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3218 	if (ep->from_rtx == x)
3219 	  ep->can_eliminate = 0;
3220       return;
3221     }
3222 
3223   fmt = GET_RTX_FORMAT (code);
3224   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3225     {
3226       if (*fmt == 'e')
3227 	check_eliminable_occurrences (XEXP (x, i));
3228       else if (*fmt == 'E')
3229 	{
3230 	  int j;
3231 	  for (j = 0; j < XVECLEN (x, i); j++)
3232 	    check_eliminable_occurrences (XVECEXP (x, i, j));
3233 	}
3234     }
3235 }
3236 
3237 /* Scan INSN and eliminate all eliminable registers in it.
3238 
3239    If REPLACE is nonzero, do the replacement destructively.  Also
3240    delete the insn as dead it if it is setting an eliminable register.
3241 
3242    If REPLACE is zero, do all our allocations in reload_obstack.
3243 
3244    If no eliminations were done and this insn doesn't require any elimination
3245    processing (these are not identical conditions: it might be updating sp,
3246    but not referencing fp; this needs to be seen during reload_as_needed so
3247    that the offset between fp and sp can be taken into consideration), zero
3248    is returned.  Otherwise, 1 is returned.  */
3249 
3250 static int
3251 eliminate_regs_in_insn (rtx_insn *insn, int replace)
3252 {
3253   int icode = recog_memoized (insn);
3254   rtx old_body = PATTERN (insn);
3255   int insn_is_asm = asm_noperands (old_body) >= 0;
3256   rtx old_set = single_set (insn);
3257   rtx new_body;
3258   int val = 0;
3259   int i;
3260   rtx substed_operand[MAX_RECOG_OPERANDS];
3261   rtx orig_operand[MAX_RECOG_OPERANDS];
3262   struct elim_table *ep;
3263   rtx plus_src, plus_cst_src;
3264 
3265   if (! insn_is_asm && icode < 0)
3266     {
3267       gcc_assert (DEBUG_INSN_P (insn)
3268 		  || GET_CODE (PATTERN (insn)) == USE
3269 		  || GET_CODE (PATTERN (insn)) == CLOBBER
3270 		  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3271       if (DEBUG_INSN_P (insn))
3272 	INSN_VAR_LOCATION_LOC (insn)
3273 	  = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn);
3274       return 0;
3275     }
3276 
3277   if (old_set != 0 && REG_P (SET_DEST (old_set))
3278       && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3279     {
3280       /* Check for setting an eliminable register.  */
3281       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3282 	if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3283 	  {
3284 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3285 	    /* If this is setting the frame pointer register to the
3286 	       hardware frame pointer register and this is an elimination
3287 	       that will be done (tested above), this insn is really
3288 	       adjusting the frame pointer downward to compensate for
3289 	       the adjustment done before a nonlocal goto.  */
3290 	    if (ep->from == FRAME_POINTER_REGNUM
3291 		&& ep->to == HARD_FRAME_POINTER_REGNUM)
3292 	      {
3293 		rtx base = SET_SRC (old_set);
3294 		rtx_insn *base_insn = insn;
3295 		HOST_WIDE_INT offset = 0;
3296 
3297 		while (base != ep->to_rtx)
3298 		  {
3299 		    rtx_insn *prev_insn;
3300 		    rtx prev_set;
3301 
3302 		    if (GET_CODE (base) == PLUS
3303 		        && CONST_INT_P (XEXP (base, 1)))
3304 		      {
3305 		        offset += INTVAL (XEXP (base, 1));
3306 		        base = XEXP (base, 0);
3307 		      }
3308 		    else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3309 			     && (prev_set = single_set (prev_insn)) != 0
3310 			     && rtx_equal_p (SET_DEST (prev_set), base))
3311 		      {
3312 		        base = SET_SRC (prev_set);
3313 		        base_insn = prev_insn;
3314 		      }
3315 		    else
3316 		      break;
3317 		  }
3318 
3319 		if (base == ep->to_rtx)
3320 		  {
3321 		    rtx src = plus_constant (Pmode, ep->to_rtx,
3322 					     offset - ep->offset);
3323 
3324 		    new_body = old_body;
3325 		    if (! replace)
3326 		      {
3327 			new_body = copy_insn (old_body);
3328 			if (REG_NOTES (insn))
3329 			  REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3330 		      }
3331 		    PATTERN (insn) = new_body;
3332 		    old_set = single_set (insn);
3333 
3334 		    /* First see if this insn remains valid when we
3335 		       make the change.  If not, keep the INSN_CODE
3336 		       the same and let reload fit it up.  */
3337 		    validate_change (insn, &SET_SRC (old_set), src, 1);
3338 		    validate_change (insn, &SET_DEST (old_set),
3339 				     ep->to_rtx, 1);
3340 		    if (! apply_change_group ())
3341 		      {
3342 			SET_SRC (old_set) = src;
3343 			SET_DEST (old_set) = ep->to_rtx;
3344 		      }
3345 
3346 		    val = 1;
3347 		    goto done;
3348 		  }
3349 	      }
3350 #endif
3351 
3352 	    /* In this case this insn isn't serving a useful purpose.  We
3353 	       will delete it in reload_as_needed once we know that this
3354 	       elimination is, in fact, being done.
3355 
3356 	       If REPLACE isn't set, we can't delete this insn, but needn't
3357 	       process it since it won't be used unless something changes.  */
3358 	    if (replace)
3359 	      {
3360 		delete_dead_insn (insn);
3361 		return 1;
3362 	      }
3363 	    val = 1;
3364 	    goto done;
3365 	  }
3366     }
3367 
3368   /* We allow one special case which happens to work on all machines we
3369      currently support: a single set with the source or a REG_EQUAL
3370      note being a PLUS of an eliminable register and a constant.  */
3371   plus_src = plus_cst_src = 0;
3372   if (old_set && REG_P (SET_DEST (old_set)))
3373     {
3374       if (GET_CODE (SET_SRC (old_set)) == PLUS)
3375 	plus_src = SET_SRC (old_set);
3376       /* First see if the source is of the form (plus (...) CST).  */
3377       if (plus_src
3378 	  && CONST_INT_P (XEXP (plus_src, 1)))
3379 	plus_cst_src = plus_src;
3380       else if (REG_P (SET_SRC (old_set))
3381 	       || plus_src)
3382 	{
3383 	  /* Otherwise, see if we have a REG_EQUAL note of the form
3384 	     (plus (...) CST).  */
3385 	  rtx links;
3386 	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3387 	    {
3388 	      if ((REG_NOTE_KIND (links) == REG_EQUAL
3389 		   || REG_NOTE_KIND (links) == REG_EQUIV)
3390 		  && GET_CODE (XEXP (links, 0)) == PLUS
3391 		  && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3392 		{
3393 		  plus_cst_src = XEXP (links, 0);
3394 		  break;
3395 		}
3396 	    }
3397 	}
3398 
3399       /* Check that the first operand of the PLUS is a hard reg or
3400 	 the lowpart subreg of one.  */
3401       if (plus_cst_src)
3402 	{
3403 	  rtx reg = XEXP (plus_cst_src, 0);
3404 	  if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3405 	    reg = SUBREG_REG (reg);
3406 
3407 	  if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3408 	    plus_cst_src = 0;
3409 	}
3410     }
3411   if (plus_cst_src)
3412     {
3413       rtx reg = XEXP (plus_cst_src, 0);
3414       HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3415 
3416       if (GET_CODE (reg) == SUBREG)
3417 	reg = SUBREG_REG (reg);
3418 
3419       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3420 	if (ep->from_rtx == reg && ep->can_eliminate)
3421 	  {
3422 	    rtx to_rtx = ep->to_rtx;
3423 	    offset += ep->offset;
3424 	    offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
3425 
3426 	    if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3427 	      to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3428 				    to_rtx);
3429 	    /* If we have a nonzero offset, and the source is already
3430 	       a simple REG, the following transformation would
3431 	       increase the cost of the insn by replacing a simple REG
3432 	       with (plus (reg sp) CST).  So try only when we already
3433 	       had a PLUS before.  */
3434 	    if (offset == 0 || plus_src)
3435 	      {
3436 		rtx new_src = plus_constant (GET_MODE (to_rtx),
3437 					     to_rtx, offset);
3438 
3439 		new_body = old_body;
3440 		if (! replace)
3441 		  {
3442 		    new_body = copy_insn (old_body);
3443 		    if (REG_NOTES (insn))
3444 		      REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3445 		  }
3446 		PATTERN (insn) = new_body;
3447 		old_set = single_set (insn);
3448 
3449 		/* First see if this insn remains valid when we make the
3450 		   change.  If not, try to replace the whole pattern with
3451 		   a simple set (this may help if the original insn was a
3452 		   PARALLEL that was only recognized as single_set due to
3453 		   REG_UNUSED notes).  If this isn't valid either, keep
3454 		   the INSN_CODE the same and let reload fix it up.  */
3455 		if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3456 		  {
3457 		    rtx new_pat = gen_rtx_SET (VOIDmode,
3458 					       SET_DEST (old_set), new_src);
3459 
3460 		    if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3461 		      SET_SRC (old_set) = new_src;
3462 		  }
3463 	      }
3464 	    else
3465 	      break;
3466 
3467 	    val = 1;
3468 	    /* This can't have an effect on elimination offsets, so skip right
3469 	       to the end.  */
3470 	    goto done;
3471 	  }
3472     }
3473 
3474   /* Determine the effects of this insn on elimination offsets.  */
3475   elimination_effects (old_body, VOIDmode);
3476 
3477   /* Eliminate all eliminable registers occurring in operands that
3478      can be handled by reload.  */
3479   extract_insn (insn);
3480   for (i = 0; i < recog_data.n_operands; i++)
3481     {
3482       orig_operand[i] = recog_data.operand[i];
3483       substed_operand[i] = recog_data.operand[i];
3484 
3485       /* For an asm statement, every operand is eliminable.  */
3486       if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3487 	{
3488 	  bool is_set_src, in_plus;
3489 
3490 	  /* Check for setting a register that we know about.  */
3491 	  if (recog_data.operand_type[i] != OP_IN
3492 	      && REG_P (orig_operand[i]))
3493 	    {
3494 	      /* If we are assigning to a register that can be eliminated, it
3495 		 must be as part of a PARALLEL, since the code above handles
3496 		 single SETs.  We must indicate that we can no longer
3497 		 eliminate this reg.  */
3498 	      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3499 		   ep++)
3500 		if (ep->from_rtx == orig_operand[i])
3501 		  ep->can_eliminate = 0;
3502 	    }
3503 
3504 	  /* Companion to the above plus substitution, we can allow
3505 	     invariants as the source of a plain move.  */
3506 	  is_set_src = false;
3507 	  if (old_set
3508 	      && recog_data.operand_loc[i] == &SET_SRC (old_set))
3509 	    is_set_src = true;
3510 	  in_plus = false;
3511 	  if (plus_src
3512 	      && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3513 		  || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3514 	    in_plus = true;
3515 
3516 	  substed_operand[i]
3517 	    = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3518 			        replace ? insn : NULL_RTX,
3519 				is_set_src || in_plus, false);
3520 	  if (substed_operand[i] != orig_operand[i])
3521 	    val = 1;
3522 	  /* Terminate the search in check_eliminable_occurrences at
3523 	     this point.  */
3524 	  *recog_data.operand_loc[i] = 0;
3525 
3526 	  /* If an output operand changed from a REG to a MEM and INSN is an
3527 	     insn, write a CLOBBER insn.  */
3528 	  if (recog_data.operand_type[i] != OP_IN
3529 	      && REG_P (orig_operand[i])
3530 	      && MEM_P (substed_operand[i])
3531 	      && replace)
3532 	    emit_insn_after (gen_clobber (orig_operand[i]), insn);
3533 	}
3534     }
3535 
3536   for (i = 0; i < recog_data.n_dups; i++)
3537     *recog_data.dup_loc[i]
3538       = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3539 
3540   /* If any eliminable remain, they aren't eliminable anymore.  */
3541   check_eliminable_occurrences (old_body);
3542 
3543   /* Substitute the operands; the new values are in the substed_operand
3544      array.  */
3545   for (i = 0; i < recog_data.n_operands; i++)
3546     *recog_data.operand_loc[i] = substed_operand[i];
3547   for (i = 0; i < recog_data.n_dups; i++)
3548     *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3549 
3550   /* If we are replacing a body that was a (set X (plus Y Z)), try to
3551      re-recognize the insn.  We do this in case we had a simple addition
3552      but now can do this as a load-address.  This saves an insn in this
3553      common case.
3554      If re-recognition fails, the old insn code number will still be used,
3555      and some register operands may have changed into PLUS expressions.
3556      These will be handled by find_reloads by loading them into a register
3557      again.  */
3558 
3559   if (val)
3560     {
3561       /* If we aren't replacing things permanently and we changed something,
3562 	 make another copy to ensure that all the RTL is new.  Otherwise
3563 	 things can go wrong if find_reload swaps commutative operands
3564 	 and one is inside RTL that has been copied while the other is not.  */
3565       new_body = old_body;
3566       if (! replace)
3567 	{
3568 	  new_body = copy_insn (old_body);
3569 	  if (REG_NOTES (insn))
3570 	    REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3571 	}
3572       PATTERN (insn) = new_body;
3573 
3574       /* If we had a move insn but now we don't, rerecognize it.  This will
3575 	 cause spurious re-recognition if the old move had a PARALLEL since
3576 	 the new one still will, but we can't call single_set without
3577 	 having put NEW_BODY into the insn and the re-recognition won't
3578 	 hurt in this rare case.  */
3579       /* ??? Why this huge if statement - why don't we just rerecognize the
3580 	 thing always?  */
3581       if (! insn_is_asm
3582 	  && old_set != 0
3583 	  && ((REG_P (SET_SRC (old_set))
3584 	       && (GET_CODE (new_body) != SET
3585 		   || !REG_P (SET_SRC (new_body))))
3586 	      /* If this was a load from or store to memory, compare
3587 		 the MEM in recog_data.operand to the one in the insn.
3588 		 If they are not equal, then rerecognize the insn.  */
3589 	      || (old_set != 0
3590 		  && ((MEM_P (SET_SRC (old_set))
3591 		       && SET_SRC (old_set) != recog_data.operand[1])
3592 		      || (MEM_P (SET_DEST (old_set))
3593 			  && SET_DEST (old_set) != recog_data.operand[0])))
3594 	      /* If this was an add insn before, rerecognize.  */
3595 	      || GET_CODE (SET_SRC (old_set)) == PLUS))
3596 	{
3597 	  int new_icode = recog (PATTERN (insn), insn, 0);
3598 	  if (new_icode >= 0)
3599 	    INSN_CODE (insn) = new_icode;
3600 	}
3601     }
3602 
3603   /* Restore the old body.  If there were any changes to it, we made a copy
3604      of it while the changes were still in place, so we'll correctly return
3605      a modified insn below.  */
3606   if (! replace)
3607     {
3608       /* Restore the old body.  */
3609       for (i = 0; i < recog_data.n_operands; i++)
3610 	/* Restoring a top-level match_parallel would clobber the new_body
3611 	   we installed in the insn.  */
3612 	if (recog_data.operand_loc[i] != &PATTERN (insn))
3613 	  *recog_data.operand_loc[i] = orig_operand[i];
3614       for (i = 0; i < recog_data.n_dups; i++)
3615 	*recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3616     }
3617 
3618   /* Update all elimination pairs to reflect the status after the current
3619      insn.  The changes we make were determined by the earlier call to
3620      elimination_effects.
3621 
3622      We also detect cases where register elimination cannot be done,
3623      namely, if a register would be both changed and referenced outside a MEM
3624      in the resulting insn since such an insn is often undefined and, even if
3625      not, we cannot know what meaning will be given to it.  Note that it is
3626      valid to have a register used in an address in an insn that changes it
3627      (presumably with a pre- or post-increment or decrement).
3628 
3629      If anything changes, return nonzero.  */
3630 
3631   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3632     {
3633       if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3634 	ep->can_eliminate = 0;
3635 
3636       ep->ref_outside_mem = 0;
3637 
3638       if (ep->previous_offset != ep->offset)
3639 	val = 1;
3640     }
3641 
3642  done:
3643   /* If we changed something, perform elimination in REG_NOTES.  This is
3644      needed even when REPLACE is zero because a REG_DEAD note might refer
3645      to a register that we eliminate and could cause a different number
3646      of spill registers to be needed in the final reload pass than in
3647      the pre-passes.  */
3648   if (val && REG_NOTES (insn) != 0)
3649     REG_NOTES (insn)
3650       = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true,
3651 			  false);
3652 
3653   return val;
3654 }
3655 
3656 /* Like eliminate_regs_in_insn, but only estimate costs for the use of the
3657    register allocator.  INSN is the instruction we need to examine, we perform
3658    eliminations in its operands and record cases where eliminating a reg with
3659    an invariant equivalence would add extra cost.  */
3660 
3661 static void
3662 elimination_costs_in_insn (rtx_insn *insn)
3663 {
3664   int icode = recog_memoized (insn);
3665   rtx old_body = PATTERN (insn);
3666   int insn_is_asm = asm_noperands (old_body) >= 0;
3667   rtx old_set = single_set (insn);
3668   int i;
3669   rtx orig_operand[MAX_RECOG_OPERANDS];
3670   rtx orig_dup[MAX_RECOG_OPERANDS];
3671   struct elim_table *ep;
3672   rtx plus_src, plus_cst_src;
3673   bool sets_reg_p;
3674 
3675   if (! insn_is_asm && icode < 0)
3676     {
3677       gcc_assert (DEBUG_INSN_P (insn)
3678 		  || GET_CODE (PATTERN (insn)) == USE
3679 		  || GET_CODE (PATTERN (insn)) == CLOBBER
3680 		  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3681       return;
3682     }
3683 
3684   if (old_set != 0 && REG_P (SET_DEST (old_set))
3685       && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3686     {
3687       /* Check for setting an eliminable register.  */
3688       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3689 	if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3690 	  return;
3691     }
3692 
3693   /* We allow one special case which happens to work on all machines we
3694      currently support: a single set with the source or a REG_EQUAL
3695      note being a PLUS of an eliminable register and a constant.  */
3696   plus_src = plus_cst_src = 0;
3697   sets_reg_p = false;
3698   if (old_set && REG_P (SET_DEST (old_set)))
3699     {
3700       sets_reg_p = true;
3701       if (GET_CODE (SET_SRC (old_set)) == PLUS)
3702 	plus_src = SET_SRC (old_set);
3703       /* First see if the source is of the form (plus (...) CST).  */
3704       if (plus_src
3705 	  && CONST_INT_P (XEXP (plus_src, 1)))
3706 	plus_cst_src = plus_src;
3707       else if (REG_P (SET_SRC (old_set))
3708 	       || plus_src)
3709 	{
3710 	  /* Otherwise, see if we have a REG_EQUAL note of the form
3711 	     (plus (...) CST).  */
3712 	  rtx links;
3713 	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3714 	    {
3715 	      if ((REG_NOTE_KIND (links) == REG_EQUAL
3716 		   || REG_NOTE_KIND (links) == REG_EQUIV)
3717 		  && GET_CODE (XEXP (links, 0)) == PLUS
3718 		  && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3719 		{
3720 		  plus_cst_src = XEXP (links, 0);
3721 		  break;
3722 		}
3723 	    }
3724 	}
3725     }
3726 
3727   /* Determine the effects of this insn on elimination offsets.  */
3728   elimination_effects (old_body, VOIDmode);
3729 
3730   /* Eliminate all eliminable registers occurring in operands that
3731      can be handled by reload.  */
3732   extract_insn (insn);
3733   for (i = 0; i < recog_data.n_dups; i++)
3734     orig_dup[i] = *recog_data.dup_loc[i];
3735 
3736   for (i = 0; i < recog_data.n_operands; i++)
3737     {
3738       orig_operand[i] = recog_data.operand[i];
3739 
3740       /* For an asm statement, every operand is eliminable.  */
3741       if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3742 	{
3743 	  bool is_set_src, in_plus;
3744 
3745 	  /* Check for setting a register that we know about.  */
3746 	  if (recog_data.operand_type[i] != OP_IN
3747 	      && REG_P (orig_operand[i]))
3748 	    {
3749 	      /* If we are assigning to a register that can be eliminated, it
3750 		 must be as part of a PARALLEL, since the code above handles
3751 		 single SETs.  We must indicate that we can no longer
3752 		 eliminate this reg.  */
3753 	      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3754 		   ep++)
3755 		if (ep->from_rtx == orig_operand[i])
3756 		  ep->can_eliminate = 0;
3757 	    }
3758 
3759 	  /* Companion to the above plus substitution, we can allow
3760 	     invariants as the source of a plain move.  */
3761 	  is_set_src = false;
3762 	  if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3763 	    is_set_src = true;
3764 	  if (is_set_src && !sets_reg_p)
3765 	    note_reg_elim_costly (SET_SRC (old_set), insn);
3766 	  in_plus = false;
3767 	  if (plus_src && sets_reg_p
3768 	      && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3769 		  || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3770 	    in_plus = true;
3771 
3772 	  eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3773 			    NULL_RTX,
3774 			    is_set_src || in_plus, true);
3775 	  /* Terminate the search in check_eliminable_occurrences at
3776 	     this point.  */
3777 	  *recog_data.operand_loc[i] = 0;
3778 	}
3779     }
3780 
3781   for (i = 0; i < recog_data.n_dups; i++)
3782     *recog_data.dup_loc[i]
3783       = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3784 
3785   /* If any eliminable remain, they aren't eliminable anymore.  */
3786   check_eliminable_occurrences (old_body);
3787 
3788   /* Restore the old body.  */
3789   for (i = 0; i < recog_data.n_operands; i++)
3790     *recog_data.operand_loc[i] = orig_operand[i];
3791   for (i = 0; i < recog_data.n_dups; i++)
3792     *recog_data.dup_loc[i] = orig_dup[i];
3793 
3794   /* Update all elimination pairs to reflect the status after the current
3795      insn.  The changes we make were determined by the earlier call to
3796      elimination_effects.  */
3797 
3798   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3799     {
3800       if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3801 	ep->can_eliminate = 0;
3802 
3803       ep->ref_outside_mem = 0;
3804     }
3805 
3806   return;
3807 }
3808 
3809 /* Loop through all elimination pairs.
3810    Recalculate the number not at initial offset.
3811 
3812    Compute the maximum offset (minimum offset if the stack does not
3813    grow downward) for each elimination pair.  */
3814 
3815 static void
3816 update_eliminable_offsets (void)
3817 {
3818   struct elim_table *ep;
3819 
3820   num_not_at_initial_offset = 0;
3821   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3822     {
3823       ep->previous_offset = ep->offset;
3824       if (ep->can_eliminate && ep->offset != ep->initial_offset)
3825 	num_not_at_initial_offset++;
3826     }
3827 }
3828 
3829 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3830    replacement we currently believe is valid, mark it as not eliminable if X
3831    modifies DEST in any way other than by adding a constant integer to it.
3832 
3833    If DEST is the frame pointer, we do nothing because we assume that
3834    all assignments to the hard frame pointer are nonlocal gotos and are being
3835    done at a time when they are valid and do not disturb anything else.
3836    Some machines want to eliminate a fake argument pointer with either the
3837    frame or stack pointer.  Assignments to the hard frame pointer must not
3838    prevent this elimination.
3839 
3840    Called via note_stores from reload before starting its passes to scan
3841    the insns of the function.  */
3842 
3843 static void
3844 mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3845 {
3846   unsigned int i;
3847 
3848   /* A SUBREG of a hard register here is just changing its mode.  We should
3849      not see a SUBREG of an eliminable hard register, but check just in
3850      case.  */
3851   if (GET_CODE (dest) == SUBREG)
3852     dest = SUBREG_REG (dest);
3853 
3854   if (dest == hard_frame_pointer_rtx)
3855     return;
3856 
3857   for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3858     if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3859 	&& (GET_CODE (x) != SET
3860 	    || GET_CODE (SET_SRC (x)) != PLUS
3861 	    || XEXP (SET_SRC (x), 0) != dest
3862 	    || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3863       {
3864 	reg_eliminate[i].can_eliminate_previous
3865 	  = reg_eliminate[i].can_eliminate = 0;
3866 	num_eliminable--;
3867       }
3868 }
3869 
3870 /* Verify that the initial elimination offsets did not change since the
3871    last call to set_initial_elim_offsets.  This is used to catch cases
3872    where something illegal happened during reload_as_needed that could
3873    cause incorrect code to be generated if we did not check for it.  */
3874 
3875 static bool
3876 verify_initial_elim_offsets (void)
3877 {
3878   HOST_WIDE_INT t;
3879 
3880   if (!num_eliminable)
3881     return true;
3882 
3883 #ifdef ELIMINABLE_REGS
3884   {
3885    struct elim_table *ep;
3886 
3887    for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3888      {
3889        INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3890        if (t != ep->initial_offset)
3891 	 return false;
3892      }
3893   }
3894 #else
3895   INITIAL_FRAME_POINTER_OFFSET (t);
3896   if (t != reg_eliminate[0].initial_offset)
3897     return false;
3898 #endif
3899 
3900   return true;
3901 }
3902 
3903 /* Reset all offsets on eliminable registers to their initial values.  */
3904 
3905 static void
3906 set_initial_elim_offsets (void)
3907 {
3908   struct elim_table *ep = reg_eliminate;
3909 
3910 #ifdef ELIMINABLE_REGS
3911   for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3912     {
3913       INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3914       ep->previous_offset = ep->offset = ep->initial_offset;
3915     }
3916 #else
3917   INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3918   ep->previous_offset = ep->offset = ep->initial_offset;
3919 #endif
3920 
3921   num_not_at_initial_offset = 0;
3922 }
3923 
3924 /* Subroutine of set_initial_label_offsets called via for_each_eh_label.  */
3925 
3926 static void
3927 set_initial_eh_label_offset (rtx label)
3928 {
3929   set_label_offsets (label, NULL, 1);
3930 }
3931 
3932 /* Initialize the known label offsets.
3933    Set a known offset for each forced label to be at the initial offset
3934    of each elimination.  We do this because we assume that all
3935    computed jumps occur from a location where each elimination is
3936    at its initial offset.
3937    For all other labels, show that we don't know the offsets.  */
3938 
3939 static void
3940 set_initial_label_offsets (void)
3941 {
3942   memset (offsets_known_at, 0, num_labels);
3943 
3944   for (rtx_insn_list *x = forced_labels; x; x = x->next ())
3945     if (x->insn ())
3946       set_label_offsets (x->insn (), NULL, 1);
3947 
3948   for (rtx_insn_list *x = nonlocal_goto_handler_labels; x; x = x->next ())
3949     if (x->insn ())
3950       set_label_offsets (x->insn (), NULL, 1);
3951 
3952   for_each_eh_label (set_initial_eh_label_offset);
3953 }
3954 
3955 /* Set all elimination offsets to the known values for the code label given
3956    by INSN.  */
3957 
3958 static void
3959 set_offsets_for_label (rtx_insn *insn)
3960 {
3961   unsigned int i;
3962   int label_nr = CODE_LABEL_NUMBER (insn);
3963   struct elim_table *ep;
3964 
3965   num_not_at_initial_offset = 0;
3966   for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3967     {
3968       ep->offset = ep->previous_offset
3969 		 = offsets_at[label_nr - first_label_num][i];
3970       if (ep->can_eliminate && ep->offset != ep->initial_offset)
3971 	num_not_at_initial_offset++;
3972     }
3973 }
3974 
3975 /* See if anything that happened changes which eliminations are valid.
3976    For example, on the SPARC, whether or not the frame pointer can
3977    be eliminated can depend on what registers have been used.  We need
3978    not check some conditions again (such as flag_omit_frame_pointer)
3979    since they can't have changed.  */
3980 
3981 static void
3982 update_eliminables (HARD_REG_SET *pset)
3983 {
3984   int previous_frame_pointer_needed = frame_pointer_needed;
3985   struct elim_table *ep;
3986 
3987   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3988     if ((ep->from == HARD_FRAME_POINTER_REGNUM
3989          && targetm.frame_pointer_required ())
3990 #ifdef ELIMINABLE_REGS
3991 	|| ! targetm.can_eliminate (ep->from, ep->to)
3992 #endif
3993 	)
3994       ep->can_eliminate = 0;
3995 
3996   /* Look for the case where we have discovered that we can't replace
3997      register A with register B and that means that we will now be
3998      trying to replace register A with register C.  This means we can
3999      no longer replace register C with register B and we need to disable
4000      such an elimination, if it exists.  This occurs often with A == ap,
4001      B == sp, and C == fp.  */
4002 
4003   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4004     {
4005       struct elim_table *op;
4006       int new_to = -1;
4007 
4008       if (! ep->can_eliminate && ep->can_eliminate_previous)
4009 	{
4010 	  /* Find the current elimination for ep->from, if there is a
4011 	     new one.  */
4012 	  for (op = reg_eliminate;
4013 	       op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
4014 	    if (op->from == ep->from && op->can_eliminate)
4015 	      {
4016 		new_to = op->to;
4017 		break;
4018 	      }
4019 
4020 	  /* See if there is an elimination of NEW_TO -> EP->TO.  If so,
4021 	     disable it.  */
4022 	  for (op = reg_eliminate;
4023 	       op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
4024 	    if (op->from == new_to && op->to == ep->to)
4025 	      op->can_eliminate = 0;
4026 	}
4027     }
4028 
4029   /* See if any registers that we thought we could eliminate the previous
4030      time are no longer eliminable.  If so, something has changed and we
4031      must spill the register.  Also, recompute the number of eliminable
4032      registers and see if the frame pointer is needed; it is if there is
4033      no elimination of the frame pointer that we can perform.  */
4034 
4035   frame_pointer_needed = 1;
4036   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4037     {
4038       if (ep->can_eliminate
4039 	  && ep->from == FRAME_POINTER_REGNUM
4040 	  && ep->to != HARD_FRAME_POINTER_REGNUM
4041 	  && (! SUPPORTS_STACK_ALIGNMENT
4042 	      || ! crtl->stack_realign_needed))
4043 	frame_pointer_needed = 0;
4044 
4045       if (! ep->can_eliminate && ep->can_eliminate_previous)
4046 	{
4047 	  ep->can_eliminate_previous = 0;
4048 	  SET_HARD_REG_BIT (*pset, ep->from);
4049 	  num_eliminable--;
4050 	}
4051     }
4052 
4053   /* If we didn't need a frame pointer last time, but we do now, spill
4054      the hard frame pointer.  */
4055   if (frame_pointer_needed && ! previous_frame_pointer_needed)
4056     SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
4057 }
4058 
4059 /* Call update_eliminables an spill any registers we can't eliminate anymore.
4060    Return true iff a register was spilled.  */
4061 
4062 static bool
4063 update_eliminables_and_spill (void)
4064 {
4065   int i;
4066   bool did_spill = false;
4067   HARD_REG_SET to_spill;
4068   CLEAR_HARD_REG_SET (to_spill);
4069   update_eliminables (&to_spill);
4070   AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
4071 
4072   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4073     if (TEST_HARD_REG_BIT (to_spill, i))
4074       {
4075 	spill_hard_reg (i, 1);
4076 	did_spill = true;
4077 
4078 	/* Regardless of the state of spills, if we previously had
4079 	   a register that we thought we could eliminate, but now can
4080 	   not eliminate, we must run another pass.
4081 
4082 	   Consider pseudos which have an entry in reg_equiv_* which
4083 	   reference an eliminable register.  We must make another pass
4084 	   to update reg_equiv_* so that we do not substitute in the
4085 	   old value from when we thought the elimination could be
4086 	   performed.  */
4087       }
4088   return did_spill;
4089 }
4090 
4091 /* Return true if X is used as the target register of an elimination.  */
4092 
4093 bool
4094 elimination_target_reg_p (rtx x)
4095 {
4096   struct elim_table *ep;
4097 
4098   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4099     if (ep->to_rtx == x && ep->can_eliminate)
4100       return true;
4101 
4102   return false;
4103 }
4104 
4105 /* Initialize the table of registers to eliminate.
4106    Pre-condition: global flag frame_pointer_needed has been set before
4107    calling this function.  */
4108 
4109 static void
4110 init_elim_table (void)
4111 {
4112   struct elim_table *ep;
4113 #ifdef ELIMINABLE_REGS
4114   const struct elim_table_1 *ep1;
4115 #endif
4116 
4117   if (!reg_eliminate)
4118     reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
4119 
4120   num_eliminable = 0;
4121 
4122 #ifdef ELIMINABLE_REGS
4123   for (ep = reg_eliminate, ep1 = reg_eliminate_1;
4124        ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
4125     {
4126       ep->from = ep1->from;
4127       ep->to = ep1->to;
4128       ep->can_eliminate = ep->can_eliminate_previous
4129 	= (targetm.can_eliminate (ep->from, ep->to)
4130 	   && ! (ep->to == STACK_POINTER_REGNUM
4131 		 && frame_pointer_needed
4132 		 && (! SUPPORTS_STACK_ALIGNMENT
4133 		     || ! stack_realign_fp)));
4134     }
4135 #else
4136   reg_eliminate[0].from = reg_eliminate_1[0].from;
4137   reg_eliminate[0].to = reg_eliminate_1[0].to;
4138   reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
4139     = ! frame_pointer_needed;
4140 #endif
4141 
4142   /* Count the number of eliminable registers and build the FROM and TO
4143      REG rtx's.  Note that code in gen_rtx_REG will cause, e.g.,
4144      gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
4145      We depend on this.  */
4146   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4147     {
4148       num_eliminable += ep->can_eliminate;
4149       ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
4150       ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
4151     }
4152 }
4153 
4154 /* Find all the pseudo registers that didn't get hard regs
4155    but do have known equivalent constants or memory slots.
4156    These include parameters (known equivalent to parameter slots)
4157    and cse'd or loop-moved constant memory addresses.
4158 
4159    Record constant equivalents in reg_equiv_constant
4160    so they will be substituted by find_reloads.
4161    Record memory equivalents in reg_mem_equiv so they can
4162    be substituted eventually by altering the REG-rtx's.  */
4163 
4164 static void
4165 init_eliminable_invariants (rtx_insn *first, bool do_subregs)
4166 {
4167   int i;
4168   rtx_insn *insn;
4169 
4170   grow_reg_equivs ();
4171   if (do_subregs)
4172     reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
4173   else
4174     reg_max_ref_width = NULL;
4175 
4176   num_eliminable_invariants = 0;
4177 
4178   first_label_num = get_first_label_num ();
4179   num_labels = max_label_num () - first_label_num;
4180 
4181   /* Allocate the tables used to store offset information at labels.  */
4182   offsets_known_at = XNEWVEC (char, num_labels);
4183   offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
4184 
4185 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
4186    to.  If DO_SUBREGS is true, also find all paradoxical subregs and
4187    find largest such for each pseudo.  FIRST is the head of the insn
4188    list.  */
4189 
4190   for (insn = first; insn; insn = NEXT_INSN (insn))
4191     {
4192       rtx set = single_set (insn);
4193 
4194       /* We may introduce USEs that we want to remove at the end, so
4195 	 we'll mark them with QImode.  Make sure there are no
4196 	 previously-marked insns left by say regmove.  */
4197       if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
4198 	  && GET_MODE (insn) != VOIDmode)
4199 	PUT_MODE (insn, VOIDmode);
4200 
4201       if (do_subregs && NONDEBUG_INSN_P (insn))
4202 	scan_paradoxical_subregs (PATTERN (insn));
4203 
4204       if (set != 0 && REG_P (SET_DEST (set)))
4205 	{
4206 	  rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
4207 	  rtx x;
4208 
4209 	  if (! note)
4210 	    continue;
4211 
4212 	  i = REGNO (SET_DEST (set));
4213 	  x = XEXP (note, 0);
4214 
4215 	  if (i <= LAST_VIRTUAL_REGISTER)
4216 	    continue;
4217 
4218 	  /* If flag_pic and we have constant, verify it's legitimate.  */
4219 	  if (!CONSTANT_P (x)
4220 	      || !flag_pic || LEGITIMATE_PIC_OPERAND_P (x))
4221 	    {
4222 	      /* It can happen that a REG_EQUIV note contains a MEM
4223 		 that is not a legitimate memory operand.  As later
4224 		 stages of reload assume that all addresses found
4225 		 in the reg_equiv_* arrays were originally legitimate,
4226 		 we ignore such REG_EQUIV notes.  */
4227 	      if (memory_operand (x, VOIDmode))
4228 		{
4229 		  /* Always unshare the equivalence, so we can
4230 		     substitute into this insn without touching the
4231 		       equivalence.  */
4232 		  reg_equiv_memory_loc (i) = copy_rtx (x);
4233 		}
4234 	      else if (function_invariant_p (x))
4235 		{
4236 		  machine_mode mode;
4237 
4238 		  mode = GET_MODE (SET_DEST (set));
4239 		  if (GET_CODE (x) == PLUS)
4240 		    {
4241 		      /* This is PLUS of frame pointer and a constant,
4242 			 and might be shared.  Unshare it.  */
4243 		      reg_equiv_invariant (i) = copy_rtx (x);
4244 		      num_eliminable_invariants++;
4245 		    }
4246 		  else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
4247 		    {
4248 		      reg_equiv_invariant (i) = x;
4249 		      num_eliminable_invariants++;
4250 		    }
4251 		  else if (targetm.legitimate_constant_p (mode, x))
4252 		    reg_equiv_constant (i) = x;
4253 		  else
4254 		    {
4255 		      reg_equiv_memory_loc (i) = force_const_mem (mode, x);
4256 		      if (! reg_equiv_memory_loc (i))
4257 			reg_equiv_init (i) = NULL_RTX;
4258 		    }
4259 		}
4260 	      else
4261 		{
4262 		  reg_equiv_init (i) = NULL_RTX;
4263 		  continue;
4264 		}
4265 	    }
4266 	  else
4267 	    reg_equiv_init (i) = NULL_RTX;
4268 	}
4269     }
4270 
4271   if (dump_file)
4272     for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4273       if (reg_equiv_init (i))
4274 	{
4275 	  fprintf (dump_file, "init_insns for %u: ", i);
4276 	  print_inline_rtx (dump_file, reg_equiv_init (i), 20);
4277 	  fprintf (dump_file, "\n");
4278 	}
4279 }
4280 
4281 /* Indicate that we no longer have known memory locations or constants.
4282    Free all data involved in tracking these.  */
4283 
4284 static void
4285 free_reg_equiv (void)
4286 {
4287   int i;
4288 
4289   free (offsets_known_at);
4290   free (offsets_at);
4291   offsets_at = 0;
4292   offsets_known_at = 0;
4293 
4294   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4295     if (reg_equiv_alt_mem_list (i))
4296       free_EXPR_LIST_list (&reg_equiv_alt_mem_list (i));
4297   vec_free (reg_equivs);
4298 }
4299 
4300 /* Kick all pseudos out of hard register REGNO.
4301 
4302    If CANT_ELIMINATE is nonzero, it means that we are doing this spill
4303    because we found we can't eliminate some register.  In the case, no pseudos
4304    are allowed to be in the register, even if they are only in a block that
4305    doesn't require spill registers, unlike the case when we are spilling this
4306    hard reg to produce another spill register.
4307 
4308    Return nonzero if any pseudos needed to be kicked out.  */
4309 
4310 static void
4311 spill_hard_reg (unsigned int regno, int cant_eliminate)
4312 {
4313   int i;
4314 
4315   if (cant_eliminate)
4316     {
4317       SET_HARD_REG_BIT (bad_spill_regs_global, regno);
4318       df_set_regs_ever_live (regno, true);
4319     }
4320 
4321   /* Spill every pseudo reg that was allocated to this reg
4322      or to something that overlaps this reg.  */
4323 
4324   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4325     if (reg_renumber[i] >= 0
4326 	&& (unsigned int) reg_renumber[i] <= regno
4327 	&& end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
4328       SET_REGNO_REG_SET (&spilled_pseudos, i);
4329 }
4330 
4331 /* After find_reload_regs has been run for all insn that need reloads,
4332    and/or spill_hard_regs was called, this function is used to actually
4333    spill pseudo registers and try to reallocate them.  It also sets up the
4334    spill_regs array for use by choose_reload_regs.  */
4335 
4336 static int
4337 finish_spills (int global)
4338 {
4339   struct insn_chain *chain;
4340   int something_changed = 0;
4341   unsigned i;
4342   reg_set_iterator rsi;
4343 
4344   /* Build the spill_regs array for the function.  */
4345   /* If there are some registers still to eliminate and one of the spill regs
4346      wasn't ever used before, additional stack space may have to be
4347      allocated to store this register.  Thus, we may have changed the offset
4348      between the stack and frame pointers, so mark that something has changed.
4349 
4350      One might think that we need only set VAL to 1 if this is a call-used
4351      register.  However, the set of registers that must be saved by the
4352      prologue is not identical to the call-used set.  For example, the
4353      register used by the call insn for the return PC is a call-used register,
4354      but must be saved by the prologue.  */
4355 
4356   n_spills = 0;
4357   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4358     if (TEST_HARD_REG_BIT (used_spill_regs, i))
4359       {
4360 	spill_reg_order[i] = n_spills;
4361 	spill_regs[n_spills++] = i;
4362 	if (num_eliminable && ! df_regs_ever_live_p (i))
4363 	  something_changed = 1;
4364 	df_set_regs_ever_live (i, true);
4365       }
4366     else
4367       spill_reg_order[i] = -1;
4368 
4369   EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
4370     if (! ira_conflicts_p || reg_renumber[i] >= 0)
4371       {
4372 	/* Record the current hard register the pseudo is allocated to
4373 	   in pseudo_previous_regs so we avoid reallocating it to the
4374 	   same hard reg in a later pass.  */
4375 	gcc_assert (reg_renumber[i] >= 0);
4376 
4377 	SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
4378 	/* Mark it as no longer having a hard register home.  */
4379 	reg_renumber[i] = -1;
4380 	if (ira_conflicts_p)
4381 	  /* Inform IRA about the change.  */
4382 	  ira_mark_allocation_change (i);
4383 	/* We will need to scan everything again.  */
4384 	something_changed = 1;
4385       }
4386 
4387   /* Retry global register allocation if possible.  */
4388   if (global && ira_conflicts_p)
4389     {
4390       unsigned int n;
4391 
4392       memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
4393       /* For every insn that needs reloads, set the registers used as spill
4394 	 regs in pseudo_forbidden_regs for every pseudo live across the
4395 	 insn.  */
4396       for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
4397 	{
4398 	  EXECUTE_IF_SET_IN_REG_SET
4399 	    (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
4400 	    {
4401 	      IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4402 				chain->used_spill_regs);
4403 	    }
4404 	  EXECUTE_IF_SET_IN_REG_SET
4405 	    (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
4406 	    {
4407 	      IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4408 				chain->used_spill_regs);
4409 	    }
4410 	}
4411 
4412       /* Retry allocating the pseudos spilled in IRA and the
4413 	 reload.  For each reg, merge the various reg sets that
4414 	 indicate which hard regs can't be used, and call
4415 	 ira_reassign_pseudos.  */
4416       for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4417 	if (reg_old_renumber[i] != reg_renumber[i])
4418 	  {
4419 	    if (reg_renumber[i] < 0)
4420 	      temp_pseudo_reg_arr[n++] = i;
4421 	    else
4422 	      CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4423 	  }
4424       if (ira_reassign_pseudos (temp_pseudo_reg_arr, n,
4425 				bad_spill_regs_global,
4426 				pseudo_forbidden_regs, pseudo_previous_regs,
4427 				&spilled_pseudos))
4428 	something_changed = 1;
4429     }
4430   /* Fix up the register information in the insn chain.
4431      This involves deleting those of the spilled pseudos which did not get
4432      a new hard register home from the live_{before,after} sets.  */
4433   for (chain = reload_insn_chain; chain; chain = chain->next)
4434     {
4435       HARD_REG_SET used_by_pseudos;
4436       HARD_REG_SET used_by_pseudos2;
4437 
4438       if (! ira_conflicts_p)
4439 	{
4440 	  /* Don't do it for IRA because IRA and the reload still can
4441 	     assign hard registers to the spilled pseudos on next
4442 	     reload iterations.  */
4443 	  AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4444 	  AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4445 	}
4446       /* Mark any unallocated hard regs as available for spills.  That
4447 	 makes inheritance work somewhat better.  */
4448       if (chain->need_reload)
4449 	{
4450 	  REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4451 	  REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4452 	  IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
4453 
4454 	  compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4455 	  compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4456 	  /* Value of chain->used_spill_regs from previous iteration
4457 	     may be not included in the value calculated here because
4458 	     of possible removing caller-saves insns (see function
4459 	     delete_caller_save_insns.  */
4460 	  COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
4461 	  AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
4462 	}
4463     }
4464 
4465   CLEAR_REG_SET (&changed_allocation_pseudos);
4466   /* Let alter_reg modify the reg rtx's for the modified pseudos.  */
4467   for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4468     {
4469       int regno = reg_renumber[i];
4470       if (reg_old_renumber[i] == regno)
4471 	continue;
4472 
4473       SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4474 
4475       alter_reg (i, reg_old_renumber[i], false);
4476       reg_old_renumber[i] = regno;
4477       if (dump_file)
4478 	{
4479 	  if (regno == -1)
4480 	    fprintf (dump_file, " Register %d now on stack.\n\n", i);
4481 	  else
4482 	    fprintf (dump_file, " Register %d now in %d.\n\n",
4483 		     i, reg_renumber[i]);
4484 	}
4485     }
4486 
4487   return something_changed;
4488 }
4489 
4490 /* Find all paradoxical subregs within X and update reg_max_ref_width.  */
4491 
4492 static void
4493 scan_paradoxical_subregs (rtx x)
4494 {
4495   int i;
4496   const char *fmt;
4497   enum rtx_code code = GET_CODE (x);
4498 
4499   switch (code)
4500     {
4501     case REG:
4502     case CONST:
4503     case SYMBOL_REF:
4504     case LABEL_REF:
4505     CASE_CONST_ANY:
4506     case CC0:
4507     case PC:
4508     case USE:
4509     case CLOBBER:
4510       return;
4511 
4512     case SUBREG:
4513       if (REG_P (SUBREG_REG (x))
4514 	  && (GET_MODE_SIZE (GET_MODE (x))
4515 	      > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
4516 	{
4517 	  reg_max_ref_width[REGNO (SUBREG_REG (x))]
4518 	    = GET_MODE_SIZE (GET_MODE (x));
4519 	  mark_home_live_1 (REGNO (SUBREG_REG (x)), GET_MODE (x));
4520 	}
4521       return;
4522 
4523     default:
4524       break;
4525     }
4526 
4527   fmt = GET_RTX_FORMAT (code);
4528   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4529     {
4530       if (fmt[i] == 'e')
4531 	scan_paradoxical_subregs (XEXP (x, i));
4532       else if (fmt[i] == 'E')
4533 	{
4534 	  int j;
4535 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4536 	    scan_paradoxical_subregs (XVECEXP (x, i, j));
4537 	}
4538     }
4539 }
4540 
4541 /* *OP_PTR and *OTHER_PTR are two operands to a conceptual reload.
4542    If *OP_PTR is a paradoxical subreg, try to remove that subreg
4543    and apply the corresponding narrowing subreg to *OTHER_PTR.
4544    Return true if the operands were changed, false otherwise.  */
4545 
4546 static bool
4547 strip_paradoxical_subreg (rtx *op_ptr, rtx *other_ptr)
4548 {
4549   rtx op, inner, other, tem;
4550 
4551   op = *op_ptr;
4552   if (!paradoxical_subreg_p (op))
4553     return false;
4554   inner = SUBREG_REG (op);
4555 
4556   other = *other_ptr;
4557   tem = gen_lowpart_common (GET_MODE (inner), other);
4558   if (!tem)
4559     return false;
4560 
4561   /* If the lowpart operation turned a hard register into a subreg,
4562      rather than simplifying it to another hard register, then the
4563      mode change cannot be properly represented.  For example, OTHER
4564      might be valid in its current mode, but not in the new one.  */
4565   if (GET_CODE (tem) == SUBREG
4566       && REG_P (other)
4567       && HARD_REGISTER_P (other))
4568     return false;
4569 
4570   *op_ptr = inner;
4571   *other_ptr = tem;
4572   return true;
4573 }
4574 
4575 /* A subroutine of reload_as_needed.  If INSN has a REG_EH_REGION note,
4576    examine all of the reload insns between PREV and NEXT exclusive, and
4577    annotate all that may trap.  */
4578 
4579 static void
4580 fixup_eh_region_note (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4581 {
4582   rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4583   if (note == NULL)
4584     return;
4585   if (!insn_could_throw_p (insn))
4586     remove_note (insn, note);
4587   copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next);
4588 }
4589 
4590 /* Reload pseudo-registers into hard regs around each insn as needed.
4591    Additional register load insns are output before the insn that needs it
4592    and perhaps store insns after insns that modify the reloaded pseudo reg.
4593 
4594    reg_last_reload_reg and reg_reloaded_contents keep track of
4595    which registers are already available in reload registers.
4596    We update these for the reloads that we perform,
4597    as the insns are scanned.  */
4598 
4599 static void
4600 reload_as_needed (int live_known)
4601 {
4602   struct insn_chain *chain;
4603 #if defined (AUTO_INC_DEC)
4604   int i;
4605 #endif
4606   rtx_note *marker;
4607 
4608   memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4609   memset (spill_reg_store, 0, sizeof spill_reg_store);
4610   reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4611   INIT_REG_SET (&reg_has_output_reload);
4612   CLEAR_HARD_REG_SET (reg_reloaded_valid);
4613   CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4614 
4615   set_initial_elim_offsets ();
4616 
4617   /* Generate a marker insn that we will move around.  */
4618   marker = emit_note (NOTE_INSN_DELETED);
4619   unlink_insn_chain (marker, marker);
4620 
4621   for (chain = reload_insn_chain; chain; chain = chain->next)
4622     {
4623       rtx_insn *prev = 0;
4624       rtx_insn *insn = chain->insn;
4625       rtx_insn *old_next = NEXT_INSN (insn);
4626 #ifdef AUTO_INC_DEC
4627       rtx_insn *old_prev = PREV_INSN (insn);
4628 #endif
4629 
4630       if (will_delete_init_insn_p (insn))
4631 	continue;
4632 
4633       /* If we pass a label, copy the offsets from the label information
4634 	 into the current offsets of each elimination.  */
4635       if (LABEL_P (insn))
4636 	set_offsets_for_label (insn);
4637 
4638       else if (INSN_P (insn))
4639 	{
4640 	  regset_head regs_to_forget;
4641 	  INIT_REG_SET (&regs_to_forget);
4642 	  note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
4643 
4644 	  /* If this is a USE and CLOBBER of a MEM, ensure that any
4645 	     references to eliminable registers have been removed.  */
4646 
4647 	  if ((GET_CODE (PATTERN (insn)) == USE
4648 	       || GET_CODE (PATTERN (insn)) == CLOBBER)
4649 	      && MEM_P (XEXP (PATTERN (insn), 0)))
4650 	    XEXP (XEXP (PATTERN (insn), 0), 0)
4651 	      = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4652 				GET_MODE (XEXP (PATTERN (insn), 0)),
4653 				NULL_RTX);
4654 
4655 	  /* If we need to do register elimination processing, do so.
4656 	     This might delete the insn, in which case we are done.  */
4657 	  if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4658 	    {
4659 	      eliminate_regs_in_insn (insn, 1);
4660 	      if (NOTE_P (insn))
4661 		{
4662 		  update_eliminable_offsets ();
4663 		  CLEAR_REG_SET (&regs_to_forget);
4664 		  continue;
4665 		}
4666 	    }
4667 
4668 	  /* If need_elim is nonzero but need_reload is zero, one might think
4669 	     that we could simply set n_reloads to 0.  However, find_reloads
4670 	     could have done some manipulation of the insn (such as swapping
4671 	     commutative operands), and these manipulations are lost during
4672 	     the first pass for every insn that needs register elimination.
4673 	     So the actions of find_reloads must be redone here.  */
4674 
4675 	  if (! chain->need_elim && ! chain->need_reload
4676 	      && ! chain->need_operand_change)
4677 	    n_reloads = 0;
4678 	  /* First find the pseudo regs that must be reloaded for this insn.
4679 	     This info is returned in the tables reload_... (see reload.h).
4680 	     Also modify the body of INSN by substituting RELOAD
4681 	     rtx's for those pseudo regs.  */
4682 	  else
4683 	    {
4684 	      CLEAR_REG_SET (&reg_has_output_reload);
4685 	      CLEAR_HARD_REG_SET (reg_is_output_reload);
4686 
4687 	      find_reloads (insn, 1, spill_indirect_levels, live_known,
4688 			    spill_reg_order);
4689 	    }
4690 
4691 	  if (n_reloads > 0)
4692 	    {
4693 	      rtx_insn *next = NEXT_INSN (insn);
4694 
4695 	      /* ??? PREV can get deleted by reload inheritance.
4696 		 Work around this by emitting a marker note.  */
4697 	      prev = PREV_INSN (insn);
4698 	      reorder_insns_nobb (marker, marker, prev);
4699 
4700 	      /* Now compute which reload regs to reload them into.  Perhaps
4701 		 reusing reload regs from previous insns, or else output
4702 		 load insns to reload them.  Maybe output store insns too.
4703 		 Record the choices of reload reg in reload_reg_rtx.  */
4704 	      choose_reload_regs (chain);
4705 
4706 	      /* Generate the insns to reload operands into or out of
4707 		 their reload regs.  */
4708 	      emit_reload_insns (chain);
4709 
4710 	      /* Substitute the chosen reload regs from reload_reg_rtx
4711 		 into the insn's body (or perhaps into the bodies of other
4712 		 load and store insn that we just made for reloading
4713 		 and that we moved the structure into).  */
4714 	      subst_reloads (insn);
4715 
4716 	      prev = PREV_INSN (marker);
4717 	      unlink_insn_chain (marker, marker);
4718 
4719 	      /* Adjust the exception region notes for loads and stores.  */
4720 	      if (cfun->can_throw_non_call_exceptions && !CALL_P (insn))
4721 		fixup_eh_region_note (insn, prev, next);
4722 
4723 	      /* Adjust the location of REG_ARGS_SIZE.  */
4724 	      rtx p = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4725 	      if (p)
4726 		{
4727 		  remove_note (insn, p);
4728 		  fixup_args_size_notes (prev, PREV_INSN (next),
4729 					 INTVAL (XEXP (p, 0)));
4730 		}
4731 
4732 	      /* If this was an ASM, make sure that all the reload insns
4733 		 we have generated are valid.  If not, give an error
4734 		 and delete them.  */
4735 	      if (asm_noperands (PATTERN (insn)) >= 0)
4736 		for (rtx_insn *p = NEXT_INSN (prev);
4737 		     p != next;
4738 		     p = NEXT_INSN (p))
4739 		  if (p != insn && INSN_P (p)
4740 		      && GET_CODE (PATTERN (p)) != USE
4741 		      && (recog_memoized (p) < 0
4742 			  || (extract_insn (p),
4743 			      !(constrain_operands (1,
4744 				  get_enabled_alternatives (p))))))
4745 		    {
4746 		      error_for_asm (insn,
4747 				     "%<asm%> operand requires "
4748 				     "impossible reload");
4749 		      delete_insn (p);
4750 		    }
4751 	    }
4752 
4753 	  if (num_eliminable && chain->need_elim)
4754 	    update_eliminable_offsets ();
4755 
4756 	  /* Any previously reloaded spilled pseudo reg, stored in this insn,
4757 	     is no longer validly lying around to save a future reload.
4758 	     Note that this does not detect pseudos that were reloaded
4759 	     for this insn in order to be stored in
4760 	     (obeying register constraints).  That is correct; such reload
4761 	     registers ARE still valid.  */
4762 	  forget_marked_reloads (&regs_to_forget);
4763 	  CLEAR_REG_SET (&regs_to_forget);
4764 
4765 	  /* There may have been CLOBBER insns placed after INSN.  So scan
4766 	     between INSN and NEXT and use them to forget old reloads.  */
4767 	  for (rtx_insn *x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4768 	    if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4769 	      note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4770 
4771 #ifdef AUTO_INC_DEC
4772 	  /* Likewise for regs altered by auto-increment in this insn.
4773 	     REG_INC notes have been changed by reloading:
4774 	     find_reloads_address_1 records substitutions for them,
4775 	     which have been performed by subst_reloads above.  */
4776 	  for (i = n_reloads - 1; i >= 0; i--)
4777 	    {
4778 	      rtx in_reg = rld[i].in_reg;
4779 	      if (in_reg)
4780 		{
4781 		  enum rtx_code code = GET_CODE (in_reg);
4782 		  /* PRE_INC / PRE_DEC will have the reload register ending up
4783 		     with the same value as the stack slot, but that doesn't
4784 		     hold true for POST_INC / POST_DEC.  Either we have to
4785 		     convert the memory access to a true POST_INC / POST_DEC,
4786 		     or we can't use the reload register for inheritance.  */
4787 		  if ((code == POST_INC || code == POST_DEC)
4788 		      && TEST_HARD_REG_BIT (reg_reloaded_valid,
4789 					    REGNO (rld[i].reg_rtx))
4790 		      /* Make sure it is the inc/dec pseudo, and not
4791 			 some other (e.g. output operand) pseudo.  */
4792 		      && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4793 			  == REGNO (XEXP (in_reg, 0))))
4794 
4795 		    {
4796 		      rtx reload_reg = rld[i].reg_rtx;
4797 		      machine_mode mode = GET_MODE (reload_reg);
4798 		      int n = 0;
4799 		      rtx_insn *p;
4800 
4801 		      for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4802 			{
4803 			  /* We really want to ignore REG_INC notes here, so
4804 			     use PATTERN (p) as argument to reg_set_p .  */
4805 			  if (reg_set_p (reload_reg, PATTERN (p)))
4806 			    break;
4807 			  n = count_occurrences (PATTERN (p), reload_reg, 0);
4808 			  if (! n)
4809 			    continue;
4810 			  if (n == 1)
4811 			    {
4812 			      rtx replace_reg
4813 				= gen_rtx_fmt_e (code, mode, reload_reg);
4814 
4815 			      validate_replace_rtx_group (reload_reg,
4816 							  replace_reg, p);
4817 			      n = verify_changes (0);
4818 
4819 			      /* We must also verify that the constraints
4820 				 are met after the replacement.  Make sure
4821 				 extract_insn is only called for an insn
4822 				 where the replacements were found to be
4823 				 valid so far. */
4824 			      if (n)
4825 				{
4826 				  extract_insn (p);
4827 				  n = constrain_operands (1,
4828 				    get_enabled_alternatives (p));
4829 				}
4830 
4831 			      /* If the constraints were not met, then
4832 				 undo the replacement, else confirm it.  */
4833 			      if (!n)
4834 				cancel_changes (0);
4835 			      else
4836 				confirm_change_group ();
4837 			    }
4838 			  break;
4839 			}
4840 		      if (n == 1)
4841 			{
4842 			  add_reg_note (p, REG_INC, reload_reg);
4843 			  /* Mark this as having an output reload so that the
4844 			     REG_INC processing code below won't invalidate
4845 			     the reload for inheritance.  */
4846 			  SET_HARD_REG_BIT (reg_is_output_reload,
4847 					    REGNO (reload_reg));
4848 			  SET_REGNO_REG_SET (&reg_has_output_reload,
4849 					     REGNO (XEXP (in_reg, 0)));
4850 			}
4851 		      else
4852 			forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4853 					      NULL);
4854 		    }
4855 		  else if ((code == PRE_INC || code == PRE_DEC)
4856 			   && TEST_HARD_REG_BIT (reg_reloaded_valid,
4857 						 REGNO (rld[i].reg_rtx))
4858 			   /* Make sure it is the inc/dec pseudo, and not
4859 			      some other (e.g. output operand) pseudo.  */
4860 			   && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4861 			       == REGNO (XEXP (in_reg, 0))))
4862 		    {
4863 		      SET_HARD_REG_BIT (reg_is_output_reload,
4864 					REGNO (rld[i].reg_rtx));
4865 		      SET_REGNO_REG_SET (&reg_has_output_reload,
4866 					 REGNO (XEXP (in_reg, 0)));
4867 		    }
4868 		  else if (code == PRE_INC || code == PRE_DEC
4869 			   || code == POST_INC || code == POST_DEC)
4870 		    {
4871 		      int in_regno = REGNO (XEXP (in_reg, 0));
4872 
4873 		      if (reg_last_reload_reg[in_regno] != NULL_RTX)
4874 			{
4875 			  int in_hard_regno;
4876 			  bool forget_p = true;
4877 
4878 			  in_hard_regno = REGNO (reg_last_reload_reg[in_regno]);
4879 			  if (TEST_HARD_REG_BIT (reg_reloaded_valid,
4880 						 in_hard_regno))
4881 			    {
4882 			      for (rtx_insn *x = (old_prev ?
4883 						  NEXT_INSN (old_prev) : insn);
4884 				   x != old_next;
4885 				   x = NEXT_INSN (x))
4886 				if (x == reg_reloaded_insn[in_hard_regno])
4887 				  {
4888 				    forget_p = false;
4889 				    break;
4890 				  }
4891 			    }
4892 			  /* If for some reasons, we didn't set up
4893 			     reg_last_reload_reg in this insn,
4894 			     invalidate inheritance from previous
4895 			     insns for the incremented/decremented
4896 			     register.  Such registers will be not in
4897 			     reg_has_output_reload.  Invalidate it
4898 			     also if the corresponding element in
4899 			     reg_reloaded_insn is also
4900 			     invalidated.  */
4901 			  if (forget_p)
4902 			    forget_old_reloads_1 (XEXP (in_reg, 0),
4903 						  NULL_RTX, NULL);
4904 			}
4905 		    }
4906 		}
4907 	    }
4908 	  /* If a pseudo that got a hard register is auto-incremented,
4909 	     we must purge records of copying it into pseudos without
4910 	     hard registers.  */
4911 	  for (rtx x = REG_NOTES (insn); x; x = XEXP (x, 1))
4912 	    if (REG_NOTE_KIND (x) == REG_INC)
4913 	      {
4914 		/* See if this pseudo reg was reloaded in this insn.
4915 		   If so, its last-reload info is still valid
4916 		   because it is based on this insn's reload.  */
4917 		for (i = 0; i < n_reloads; i++)
4918 		  if (rld[i].out == XEXP (x, 0))
4919 		    break;
4920 
4921 		if (i == n_reloads)
4922 		  forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4923 	      }
4924 #endif
4925 	}
4926       /* A reload reg's contents are unknown after a label.  */
4927       if (LABEL_P (insn))
4928 	CLEAR_HARD_REG_SET (reg_reloaded_valid);
4929 
4930       /* Don't assume a reload reg is still good after a call insn
4931 	 if it is a call-used reg, or if it contains a value that will
4932          be partially clobbered by the call.  */
4933       else if (CALL_P (insn))
4934 	{
4935 	  AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4936 	  AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4937 
4938 	  /* If this is a call to a setjmp-type function, we must not
4939 	     reuse any reload reg contents across the call; that will
4940 	     just be clobbered by other uses of the register in later
4941 	     code, before the longjmp.  */
4942 	  if (find_reg_note (insn, REG_SETJMP, NULL_RTX))
4943 	    CLEAR_HARD_REG_SET (reg_reloaded_valid);
4944 	}
4945     }
4946 
4947   /* Clean up.  */
4948   free (reg_last_reload_reg);
4949   CLEAR_REG_SET (&reg_has_output_reload);
4950 }
4951 
4952 /* Discard all record of any value reloaded from X,
4953    or reloaded in X from someplace else;
4954    unless X is an output reload reg of the current insn.
4955 
4956    X may be a hard reg (the reload reg)
4957    or it may be a pseudo reg that was reloaded from.
4958 
4959    When DATA is non-NULL just mark the registers in regset
4960    to be forgotten later.  */
4961 
4962 static void
4963 forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
4964 		      void *data)
4965 {
4966   unsigned int regno;
4967   unsigned int nr;
4968   regset regs = (regset) data;
4969 
4970   /* note_stores does give us subregs of hard regs,
4971      subreg_regno_offset requires a hard reg.  */
4972   while (GET_CODE (x) == SUBREG)
4973     {
4974       /* We ignore the subreg offset when calculating the regno,
4975 	 because we are using the entire underlying hard register
4976 	 below.  */
4977       x = SUBREG_REG (x);
4978     }
4979 
4980   if (!REG_P (x))
4981     return;
4982 
4983   regno = REGNO (x);
4984 
4985   if (regno >= FIRST_PSEUDO_REGISTER)
4986     nr = 1;
4987   else
4988     {
4989       unsigned int i;
4990 
4991       nr = hard_regno_nregs[regno][GET_MODE (x)];
4992       /* Storing into a spilled-reg invalidates its contents.
4993 	 This can happen if a block-local pseudo is allocated to that reg
4994 	 and it wasn't spilled because this block's total need is 0.
4995 	 Then some insn might have an optional reload and use this reg.  */
4996       if (!regs)
4997 	for (i = 0; i < nr; i++)
4998 	  /* But don't do this if the reg actually serves as an output
4999 	     reload reg in the current instruction.  */
5000 	  if (n_reloads == 0
5001 	      || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
5002 	    {
5003 	      CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
5004 	      spill_reg_store[regno + i] = 0;
5005 	    }
5006     }
5007 
5008   if (regs)
5009     while (nr-- > 0)
5010       SET_REGNO_REG_SET (regs, regno + nr);
5011   else
5012     {
5013       /* Since value of X has changed,
5014 	 forget any value previously copied from it.  */
5015 
5016       while (nr-- > 0)
5017 	/* But don't forget a copy if this is the output reload
5018 	   that establishes the copy's validity.  */
5019 	if (n_reloads == 0
5020 	    || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
5021 	  reg_last_reload_reg[regno + nr] = 0;
5022      }
5023 }
5024 
5025 /* Forget the reloads marked in regset by previous function.  */
5026 static void
5027 forget_marked_reloads (regset regs)
5028 {
5029   unsigned int reg;
5030   reg_set_iterator rsi;
5031   EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
5032     {
5033       if (reg < FIRST_PSEUDO_REGISTER
5034 	  /* But don't do this if the reg actually serves as an output
5035 	     reload reg in the current instruction.  */
5036 	  && (n_reloads == 0
5037 	      || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
5038 	  {
5039 	    CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
5040 	    spill_reg_store[reg] = 0;
5041 	  }
5042       if (n_reloads == 0
5043 	  || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
5044 	reg_last_reload_reg[reg] = 0;
5045     }
5046 }
5047 
5048 /* The following HARD_REG_SETs indicate when each hard register is
5049    used for a reload of various parts of the current insn.  */
5050 
5051 /* If reg is unavailable for all reloads.  */
5052 static HARD_REG_SET reload_reg_unavailable;
5053 /* If reg is in use as a reload reg for a RELOAD_OTHER reload.  */
5054 static HARD_REG_SET reload_reg_used;
5055 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I.  */
5056 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
5057 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I.  */
5058 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
5059 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I.  */
5060 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5061 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I.  */
5062 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5063 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I.  */
5064 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5065 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I.  */
5066 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5067 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload.  */
5068 static HARD_REG_SET reload_reg_used_in_op_addr;
5069 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload.  */
5070 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
5071 /* If reg is in use for a RELOAD_FOR_INSN reload.  */
5072 static HARD_REG_SET reload_reg_used_in_insn;
5073 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload.  */
5074 static HARD_REG_SET reload_reg_used_in_other_addr;
5075 
5076 /* If reg is in use as a reload reg for any sort of reload.  */
5077 static HARD_REG_SET reload_reg_used_at_all;
5078 
5079 /* If reg is use as an inherited reload.  We just mark the first register
5080    in the group.  */
5081 static HARD_REG_SET reload_reg_used_for_inherit;
5082 
5083 /* Records which hard regs are used in any way, either as explicit use or
5084    by being allocated to a pseudo during any point of the current insn.  */
5085 static HARD_REG_SET reg_used_in_insn;
5086 
5087 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
5088    TYPE. MODE is used to indicate how many consecutive regs are
5089    actually used.  */
5090 
5091 static void
5092 mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
5093 			machine_mode mode)
5094 {
5095   switch (type)
5096     {
5097     case RELOAD_OTHER:
5098       add_to_hard_reg_set (&reload_reg_used, mode, regno);
5099       break;
5100 
5101     case RELOAD_FOR_INPUT_ADDRESS:
5102       add_to_hard_reg_set (&reload_reg_used_in_input_addr[opnum], mode, regno);
5103       break;
5104 
5105     case RELOAD_FOR_INPADDR_ADDRESS:
5106       add_to_hard_reg_set (&reload_reg_used_in_inpaddr_addr[opnum], mode, regno);
5107       break;
5108 
5109     case RELOAD_FOR_OUTPUT_ADDRESS:
5110       add_to_hard_reg_set (&reload_reg_used_in_output_addr[opnum], mode, regno);
5111       break;
5112 
5113     case RELOAD_FOR_OUTADDR_ADDRESS:
5114       add_to_hard_reg_set (&reload_reg_used_in_outaddr_addr[opnum], mode, regno);
5115       break;
5116 
5117     case RELOAD_FOR_OPERAND_ADDRESS:
5118       add_to_hard_reg_set (&reload_reg_used_in_op_addr, mode, regno);
5119       break;
5120 
5121     case RELOAD_FOR_OPADDR_ADDR:
5122       add_to_hard_reg_set (&reload_reg_used_in_op_addr_reload, mode, regno);
5123       break;
5124 
5125     case RELOAD_FOR_OTHER_ADDRESS:
5126       add_to_hard_reg_set (&reload_reg_used_in_other_addr, mode, regno);
5127       break;
5128 
5129     case RELOAD_FOR_INPUT:
5130       add_to_hard_reg_set (&reload_reg_used_in_input[opnum], mode, regno);
5131       break;
5132 
5133     case RELOAD_FOR_OUTPUT:
5134       add_to_hard_reg_set (&reload_reg_used_in_output[opnum], mode, regno);
5135       break;
5136 
5137     case RELOAD_FOR_INSN:
5138       add_to_hard_reg_set (&reload_reg_used_in_insn,  mode, regno);
5139       break;
5140     }
5141 
5142   add_to_hard_reg_set (&reload_reg_used_at_all, mode, regno);
5143 }
5144 
5145 /* Similarly, but show REGNO is no longer in use for a reload.  */
5146 
5147 static void
5148 clear_reload_reg_in_use (unsigned int regno, int opnum,
5149 			 enum reload_type type, machine_mode mode)
5150 {
5151   unsigned int nregs = hard_regno_nregs[regno][mode];
5152   unsigned int start_regno, end_regno, r;
5153   int i;
5154   /* A complication is that for some reload types, inheritance might
5155      allow multiple reloads of the same types to share a reload register.
5156      We set check_opnum if we have to check only reloads with the same
5157      operand number, and check_any if we have to check all reloads.  */
5158   int check_opnum = 0;
5159   int check_any = 0;
5160   HARD_REG_SET *used_in_set;
5161 
5162   switch (type)
5163     {
5164     case RELOAD_OTHER:
5165       used_in_set = &reload_reg_used;
5166       break;
5167 
5168     case RELOAD_FOR_INPUT_ADDRESS:
5169       used_in_set = &reload_reg_used_in_input_addr[opnum];
5170       break;
5171 
5172     case RELOAD_FOR_INPADDR_ADDRESS:
5173       check_opnum = 1;
5174       used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
5175       break;
5176 
5177     case RELOAD_FOR_OUTPUT_ADDRESS:
5178       used_in_set = &reload_reg_used_in_output_addr[opnum];
5179       break;
5180 
5181     case RELOAD_FOR_OUTADDR_ADDRESS:
5182       check_opnum = 1;
5183       used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
5184       break;
5185 
5186     case RELOAD_FOR_OPERAND_ADDRESS:
5187       used_in_set = &reload_reg_used_in_op_addr;
5188       break;
5189 
5190     case RELOAD_FOR_OPADDR_ADDR:
5191       check_any = 1;
5192       used_in_set = &reload_reg_used_in_op_addr_reload;
5193       break;
5194 
5195     case RELOAD_FOR_OTHER_ADDRESS:
5196       used_in_set = &reload_reg_used_in_other_addr;
5197       check_any = 1;
5198       break;
5199 
5200     case RELOAD_FOR_INPUT:
5201       used_in_set = &reload_reg_used_in_input[opnum];
5202       break;
5203 
5204     case RELOAD_FOR_OUTPUT:
5205       used_in_set = &reload_reg_used_in_output[opnum];
5206       break;
5207 
5208     case RELOAD_FOR_INSN:
5209       used_in_set = &reload_reg_used_in_insn;
5210       break;
5211     default:
5212       gcc_unreachable ();
5213     }
5214   /* We resolve conflicts with remaining reloads of the same type by
5215      excluding the intervals of reload registers by them from the
5216      interval of freed reload registers.  Since we only keep track of
5217      one set of interval bounds, we might have to exclude somewhat
5218      more than what would be necessary if we used a HARD_REG_SET here.
5219      But this should only happen very infrequently, so there should
5220      be no reason to worry about it.  */
5221 
5222   start_regno = regno;
5223   end_regno = regno + nregs;
5224   if (check_opnum || check_any)
5225     {
5226       for (i = n_reloads - 1; i >= 0; i--)
5227 	{
5228 	  if (rld[i].when_needed == type
5229 	      && (check_any || rld[i].opnum == opnum)
5230 	      && rld[i].reg_rtx)
5231 	    {
5232 	      unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
5233 	      unsigned int conflict_end
5234 		= end_hard_regno (rld[i].mode, conflict_start);
5235 
5236 	      /* If there is an overlap with the first to-be-freed register,
5237 		 adjust the interval start.  */
5238 	      if (conflict_start <= start_regno && conflict_end > start_regno)
5239 		start_regno = conflict_end;
5240 	      /* Otherwise, if there is a conflict with one of the other
5241 		 to-be-freed registers, adjust the interval end.  */
5242 	      if (conflict_start > start_regno && conflict_start < end_regno)
5243 		end_regno = conflict_start;
5244 	    }
5245 	}
5246     }
5247 
5248   for (r = start_regno; r < end_regno; r++)
5249     CLEAR_HARD_REG_BIT (*used_in_set, r);
5250 }
5251 
5252 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
5253    specified by OPNUM and TYPE.  */
5254 
5255 static int
5256 reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
5257 {
5258   int i;
5259 
5260   /* In use for a RELOAD_OTHER means it's not available for anything.  */
5261   if (TEST_HARD_REG_BIT (reload_reg_used, regno)
5262       || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5263     return 0;
5264 
5265   switch (type)
5266     {
5267     case RELOAD_OTHER:
5268       /* In use for anything means we can't use it for RELOAD_OTHER.  */
5269       if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
5270 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5271 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5272 	  || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5273 	return 0;
5274 
5275       for (i = 0; i < reload_n_operands; i++)
5276 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5277 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5278 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5279 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5280 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5281 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5282 	  return 0;
5283 
5284       return 1;
5285 
5286     case RELOAD_FOR_INPUT:
5287       if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5288 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
5289 	return 0;
5290 
5291       if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5292 	return 0;
5293 
5294       /* If it is used for some other input, can't use it.  */
5295       for (i = 0; i < reload_n_operands; i++)
5296 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5297 	  return 0;
5298 
5299       /* If it is used in a later operand's address, can't use it.  */
5300       for (i = opnum + 1; i < reload_n_operands; i++)
5301 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5302 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5303 	  return 0;
5304 
5305       return 1;
5306 
5307     case RELOAD_FOR_INPUT_ADDRESS:
5308       /* Can't use a register if it is used for an input address for this
5309 	 operand or used as an input in an earlier one.  */
5310       if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
5311 	  || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5312 	return 0;
5313 
5314       for (i = 0; i < opnum; i++)
5315 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5316 	  return 0;
5317 
5318       return 1;
5319 
5320     case RELOAD_FOR_INPADDR_ADDRESS:
5321       /* Can't use a register if it is used for an input address
5322 	 for this operand or used as an input in an earlier
5323 	 one.  */
5324       if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5325 	return 0;
5326 
5327       for (i = 0; i < opnum; i++)
5328 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5329 	  return 0;
5330 
5331       return 1;
5332 
5333     case RELOAD_FOR_OUTPUT_ADDRESS:
5334       /* Can't use a register if it is used for an output address for this
5335 	 operand or used as an output in this or a later operand.  Note
5336 	 that multiple output operands are emitted in reverse order, so
5337 	 the conflicting ones are those with lower indices.  */
5338       if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
5339 	return 0;
5340 
5341       for (i = 0; i <= opnum; i++)
5342 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5343 	  return 0;
5344 
5345       return 1;
5346 
5347     case RELOAD_FOR_OUTADDR_ADDRESS:
5348       /* Can't use a register if it is used for an output address
5349 	 for this operand or used as an output in this or a
5350 	 later operand.  Note that multiple output operands are
5351 	 emitted in reverse order, so the conflicting ones are
5352 	 those with lower indices.  */
5353       if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5354 	return 0;
5355 
5356       for (i = 0; i <= opnum; i++)
5357 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5358 	  return 0;
5359 
5360       return 1;
5361 
5362     case RELOAD_FOR_OPERAND_ADDRESS:
5363       for (i = 0; i < reload_n_operands; i++)
5364 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5365 	  return 0;
5366 
5367       return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5368 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5369 
5370     case RELOAD_FOR_OPADDR_ADDR:
5371       for (i = 0; i < reload_n_operands; i++)
5372 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5373 	  return 0;
5374 
5375       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
5376 
5377     case RELOAD_FOR_OUTPUT:
5378       /* This cannot share a register with RELOAD_FOR_INSN reloads, other
5379 	 outputs, or an operand address for this or an earlier output.
5380 	 Note that multiple output operands are emitted in reverse order,
5381 	 so the conflicting ones are those with higher indices.  */
5382       if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5383 	return 0;
5384 
5385       for (i = 0; i < reload_n_operands; i++)
5386 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5387 	  return 0;
5388 
5389       for (i = opnum; i < reload_n_operands; i++)
5390 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5391 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5392 	  return 0;
5393 
5394       return 1;
5395 
5396     case RELOAD_FOR_INSN:
5397       for (i = 0; i < reload_n_operands; i++)
5398 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5399 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5400 	  return 0;
5401 
5402       return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5403 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5404 
5405     case RELOAD_FOR_OTHER_ADDRESS:
5406       return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
5407 
5408     default:
5409       gcc_unreachable ();
5410     }
5411 }
5412 
5413 /* Return 1 if the value in reload reg REGNO, as used by the reload with
5414    the number RELOADNUM, is still available in REGNO at the end of the insn.
5415 
5416    We can assume that the reload reg was already tested for availability
5417    at the time it is needed, and we should not check this again,
5418    in case the reg has already been marked in use.  */
5419 
5420 static int
5421 reload_reg_reaches_end_p (unsigned int regno, int reloadnum)
5422 {
5423   int opnum = rld[reloadnum].opnum;
5424   enum reload_type type = rld[reloadnum].when_needed;
5425   int i;
5426 
5427   /* See if there is a reload with the same type for this operand, using
5428      the same register. This case is not handled by the code below.  */
5429   for (i = reloadnum + 1; i < n_reloads; i++)
5430     {
5431       rtx reg;
5432       int nregs;
5433 
5434       if (rld[i].opnum != opnum || rld[i].when_needed != type)
5435 	continue;
5436       reg = rld[i].reg_rtx;
5437       if (reg == NULL_RTX)
5438 	continue;
5439       nregs = hard_regno_nregs[REGNO (reg)][GET_MODE (reg)];
5440       if (regno >= REGNO (reg) && regno < REGNO (reg) + nregs)
5441 	return 0;
5442     }
5443 
5444   switch (type)
5445     {
5446     case RELOAD_OTHER:
5447       /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
5448 	 its value must reach the end.  */
5449       return 1;
5450 
5451       /* If this use is for part of the insn,
5452 	 its value reaches if no subsequent part uses the same register.
5453 	 Just like the above function, don't try to do this with lots
5454 	 of fallthroughs.  */
5455 
5456     case RELOAD_FOR_OTHER_ADDRESS:
5457       /* Here we check for everything else, since these don't conflict
5458 	 with anything else and everything comes later.  */
5459 
5460       for (i = 0; i < reload_n_operands; i++)
5461 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5462 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5463 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5464 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5465 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5466 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5467 	  return 0;
5468 
5469       return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5470 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5471 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5472 	      && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5473 
5474     case RELOAD_FOR_INPUT_ADDRESS:
5475     case RELOAD_FOR_INPADDR_ADDRESS:
5476       /* Similar, except that we check only for this and subsequent inputs
5477 	 and the address of only subsequent inputs and we do not need
5478 	 to check for RELOAD_OTHER objects since they are known not to
5479 	 conflict.  */
5480 
5481       for (i = opnum; i < reload_n_operands; i++)
5482 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5483 	  return 0;
5484 
5485       /* Reload register of reload with type RELOAD_FOR_INPADDR_ADDRESS
5486 	 could be killed if the register is also used by reload with type
5487 	 RELOAD_FOR_INPUT_ADDRESS, so check it.  */
5488       if (type == RELOAD_FOR_INPADDR_ADDRESS
5489 	  && TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
5490 	return 0;
5491 
5492       for (i = opnum + 1; i < reload_n_operands; i++)
5493 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5494 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5495 	  return 0;
5496 
5497       for (i = 0; i < reload_n_operands; i++)
5498 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5499 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5500 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5501 	  return 0;
5502 
5503       if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5504 	return 0;
5505 
5506       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5507 	      && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5508 	      && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5509 
5510     case RELOAD_FOR_INPUT:
5511       /* Similar to input address, except we start at the next operand for
5512 	 both input and input address and we do not check for
5513 	 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5514 	 would conflict.  */
5515 
5516       for (i = opnum + 1; i < reload_n_operands; i++)
5517 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5518 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5519 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5520 	  return 0;
5521 
5522       /* ... fall through ...  */
5523 
5524     case RELOAD_FOR_OPERAND_ADDRESS:
5525       /* Check outputs and their addresses.  */
5526 
5527       for (i = 0; i < reload_n_operands; i++)
5528 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5529 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5530 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5531 	  return 0;
5532 
5533       return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
5534 
5535     case RELOAD_FOR_OPADDR_ADDR:
5536       for (i = 0; i < reload_n_operands; i++)
5537 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5538 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5539 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5540 	  return 0;
5541 
5542       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5543 	      && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5544 	      && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5545 
5546     case RELOAD_FOR_INSN:
5547       /* These conflict with other outputs with RELOAD_OTHER.  So
5548 	 we need only check for output addresses.  */
5549 
5550       opnum = reload_n_operands;
5551 
5552       /* ... fall through ...  */
5553 
5554     case RELOAD_FOR_OUTPUT:
5555     case RELOAD_FOR_OUTPUT_ADDRESS:
5556     case RELOAD_FOR_OUTADDR_ADDRESS:
5557       /* We already know these can't conflict with a later output.  So the
5558 	 only thing to check are later output addresses.
5559 	 Note that multiple output operands are emitted in reverse order,
5560 	 so the conflicting ones are those with lower indices.  */
5561       for (i = 0; i < opnum; i++)
5562 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5563 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5564 	  return 0;
5565 
5566       /* Reload register of reload with type RELOAD_FOR_OUTADDR_ADDRESS
5567 	 could be killed if the register is also used by reload with type
5568 	 RELOAD_FOR_OUTPUT_ADDRESS, so check it.  */
5569       if (type == RELOAD_FOR_OUTADDR_ADDRESS
5570 	  && TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5571 	return 0;
5572 
5573       return 1;
5574 
5575     default:
5576       gcc_unreachable ();
5577     }
5578 }
5579 
5580 /* Like reload_reg_reaches_end_p, but check that the condition holds for
5581    every register in REG.  */
5582 
5583 static bool
5584 reload_reg_rtx_reaches_end_p (rtx reg, int reloadnum)
5585 {
5586   unsigned int i;
5587 
5588   for (i = REGNO (reg); i < END_REGNO (reg); i++)
5589     if (!reload_reg_reaches_end_p (i, reloadnum))
5590       return false;
5591   return true;
5592 }
5593 
5594 
5595 /*  Returns whether R1 and R2 are uniquely chained: the value of one
5596     is used by the other, and that value is not used by any other
5597     reload for this insn.  This is used to partially undo the decision
5598     made in find_reloads when in the case of multiple
5599     RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5600     RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5601     reloads.  This code tries to avoid the conflict created by that
5602     change.  It might be cleaner to explicitly keep track of which
5603     RELOAD_FOR_OPADDR_ADDR reload is associated with which
5604     RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5605     this after the fact. */
5606 static bool
5607 reloads_unique_chain_p (int r1, int r2)
5608 {
5609   int i;
5610 
5611   /* We only check input reloads.  */
5612   if (! rld[r1].in || ! rld[r2].in)
5613     return false;
5614 
5615   /* Avoid anything with output reloads.  */
5616   if (rld[r1].out || rld[r2].out)
5617     return false;
5618 
5619   /* "chained" means one reload is a component of the other reload,
5620      not the same as the other reload.  */
5621   if (rld[r1].opnum != rld[r2].opnum
5622       || rtx_equal_p (rld[r1].in, rld[r2].in)
5623       || rld[r1].optional || rld[r2].optional
5624       || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5625 	    || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5626     return false;
5627 
5628   /* The following loop assumes that r1 is the reload that feeds r2.  */
5629   if (r1 > r2)
5630     {
5631       int tmp = r2;
5632       r2 = r1;
5633       r1 = tmp;
5634     }
5635 
5636   for (i = 0; i < n_reloads; i ++)
5637     /* Look for input reloads that aren't our two */
5638     if (i != r1 && i != r2 && rld[i].in)
5639       {
5640 	/* If our reload is mentioned at all, it isn't a simple chain.  */
5641 	if (reg_mentioned_p (rld[r1].in, rld[i].in))
5642 	  return false;
5643       }
5644   return true;
5645 }
5646 
5647 /* The recursive function change all occurrences of WHAT in *WHERE
5648    to REPL.  */
5649 static void
5650 substitute (rtx *where, const_rtx what, rtx repl)
5651 {
5652   const char *fmt;
5653   int i;
5654   enum rtx_code code;
5655 
5656   if (*where == 0)
5657     return;
5658 
5659   if (*where == what || rtx_equal_p (*where, what))
5660     {
5661       /* Record the location of the changed rtx.  */
5662       substitute_stack.safe_push (where);
5663       *where = repl;
5664       return;
5665     }
5666 
5667   code = GET_CODE (*where);
5668   fmt = GET_RTX_FORMAT (code);
5669   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5670     {
5671       if (fmt[i] == 'E')
5672 	{
5673 	  int j;
5674 
5675 	  for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5676 	    substitute (&XVECEXP (*where, i, j), what, repl);
5677 	}
5678       else if (fmt[i] == 'e')
5679 	substitute (&XEXP (*where, i), what, repl);
5680     }
5681 }
5682 
5683 /* The function returns TRUE if chain of reload R1 and R2 (in any
5684    order) can be evaluated without usage of intermediate register for
5685    the reload containing another reload.  It is important to see
5686    gen_reload to understand what the function is trying to do.  As an
5687    example, let us have reload chain
5688 
5689       r2: const
5690       r1: <something> + const
5691 
5692    and reload R2 got reload reg HR.  The function returns true if
5693    there is a correct insn HR = HR + <something>.  Otherwise,
5694    gen_reload will use intermediate register (and this is the reload
5695    reg for R1) to reload <something>.
5696 
5697    We need this function to find a conflict for chain reloads.  In our
5698    example, if HR = HR + <something> is incorrect insn, then we cannot
5699    use HR as a reload register for R2.  If we do use it then we get a
5700    wrong code:
5701 
5702       HR = const
5703       HR = <something>
5704       HR = HR + HR
5705 
5706 */
5707 static bool
5708 gen_reload_chain_without_interm_reg_p (int r1, int r2)
5709 {
5710   /* Assume other cases in gen_reload are not possible for
5711      chain reloads or do need an intermediate hard registers.  */
5712   bool result = true;
5713   int regno, n, code;
5714   rtx out, in;
5715   rtx_insn *insn;
5716   rtx_insn *last = get_last_insn ();
5717 
5718   /* Make r2 a component of r1.  */
5719   if (reg_mentioned_p (rld[r1].in, rld[r2].in))
5720     {
5721       n = r1;
5722       r1 = r2;
5723       r2 = n;
5724     }
5725   gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in));
5726   regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno;
5727   gcc_assert (regno >= 0);
5728   out = gen_rtx_REG (rld[r1].mode, regno);
5729   in = rld[r1].in;
5730   substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno));
5731 
5732   /* If IN is a paradoxical SUBREG, remove it and try to put the
5733      opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
5734   strip_paradoxical_subreg (&in, &out);
5735 
5736   if (GET_CODE (in) == PLUS
5737       && (REG_P (XEXP (in, 0))
5738 	  || GET_CODE (XEXP (in, 0)) == SUBREG
5739 	  || MEM_P (XEXP (in, 0)))
5740       && (REG_P (XEXP (in, 1))
5741 	  || GET_CODE (XEXP (in, 1)) == SUBREG
5742 	  || CONSTANT_P (XEXP (in, 1))
5743 	  || MEM_P (XEXP (in, 1))))
5744     {
5745       insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
5746       code = recog_memoized (insn);
5747       result = false;
5748 
5749       if (code >= 0)
5750 	{
5751 	  extract_insn (insn);
5752 	  /* We want constrain operands to treat this insn strictly in
5753 	     its validity determination, i.e., the way it would after
5754 	     reload has completed.  */
5755 	  result = constrain_operands (1, get_enabled_alternatives (insn));
5756 	}
5757 
5758       delete_insns_since (last);
5759     }
5760 
5761   /* Restore the original value at each changed address within R1.  */
5762   while (!substitute_stack.is_empty ())
5763     {
5764       rtx *where = substitute_stack.pop ();
5765       *where = rld[r2].in;
5766     }
5767 
5768   return result;
5769 }
5770 
5771 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5772    Return 0 otherwise.
5773 
5774    This function uses the same algorithm as reload_reg_free_p above.  */
5775 
5776 static int
5777 reloads_conflict (int r1, int r2)
5778 {
5779   enum reload_type r1_type = rld[r1].when_needed;
5780   enum reload_type r2_type = rld[r2].when_needed;
5781   int r1_opnum = rld[r1].opnum;
5782   int r2_opnum = rld[r2].opnum;
5783 
5784   /* RELOAD_OTHER conflicts with everything.  */
5785   if (r2_type == RELOAD_OTHER)
5786     return 1;
5787 
5788   /* Otherwise, check conflicts differently for each type.  */
5789 
5790   switch (r1_type)
5791     {
5792     case RELOAD_FOR_INPUT:
5793       return (r2_type == RELOAD_FOR_INSN
5794 	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5795 	      || r2_type == RELOAD_FOR_OPADDR_ADDR
5796 	      || r2_type == RELOAD_FOR_INPUT
5797 	      || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5798 		   || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5799 		  && r2_opnum > r1_opnum));
5800 
5801     case RELOAD_FOR_INPUT_ADDRESS:
5802       return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5803 	      || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5804 
5805     case RELOAD_FOR_INPADDR_ADDRESS:
5806       return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5807 	      || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5808 
5809     case RELOAD_FOR_OUTPUT_ADDRESS:
5810       return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5811 	      || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5812 
5813     case RELOAD_FOR_OUTADDR_ADDRESS:
5814       return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5815 	      || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5816 
5817     case RELOAD_FOR_OPERAND_ADDRESS:
5818       return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5819 	      || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5820 		  && (!reloads_unique_chain_p (r1, r2)
5821 		      || !gen_reload_chain_without_interm_reg_p (r1, r2))));
5822 
5823     case RELOAD_FOR_OPADDR_ADDR:
5824       return (r2_type == RELOAD_FOR_INPUT
5825 	      || r2_type == RELOAD_FOR_OPADDR_ADDR);
5826 
5827     case RELOAD_FOR_OUTPUT:
5828       return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5829 	      || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5830 		   || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5831 		  && r2_opnum >= r1_opnum));
5832 
5833     case RELOAD_FOR_INSN:
5834       return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5835 	      || r2_type == RELOAD_FOR_INSN
5836 	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5837 
5838     case RELOAD_FOR_OTHER_ADDRESS:
5839       return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5840 
5841     case RELOAD_OTHER:
5842       return 1;
5843 
5844     default:
5845       gcc_unreachable ();
5846     }
5847 }
5848 
5849 /* Indexed by reload number, 1 if incoming value
5850    inherited from previous insns.  */
5851 static char reload_inherited[MAX_RELOADS];
5852 
5853 /* For an inherited reload, this is the insn the reload was inherited from,
5854    if we know it.  Otherwise, this is 0.  */
5855 static rtx_insn *reload_inheritance_insn[MAX_RELOADS];
5856 
5857 /* If nonzero, this is a place to get the value of the reload,
5858    rather than using reload_in.  */
5859 static rtx reload_override_in[MAX_RELOADS];
5860 
5861 /* For each reload, the hard register number of the register used,
5862    or -1 if we did not need a register for this reload.  */
5863 static int reload_spill_index[MAX_RELOADS];
5864 
5865 /* Index X is the value of rld[X].reg_rtx, adjusted for the input mode.  */
5866 static rtx reload_reg_rtx_for_input[MAX_RELOADS];
5867 
5868 /* Index X is the value of rld[X].reg_rtx, adjusted for the output mode.  */
5869 static rtx reload_reg_rtx_for_output[MAX_RELOADS];
5870 
5871 /* Subroutine of free_for_value_p, used to check a single register.
5872    START_REGNO is the starting regno of the full reload register
5873    (possibly comprising multiple hard registers) that we are considering.  */
5874 
5875 static int
5876 reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5877 			     enum reload_type type, rtx value, rtx out,
5878 			     int reloadnum, int ignore_address_reloads)
5879 {
5880   int time1;
5881   /* Set if we see an input reload that must not share its reload register
5882      with any new earlyclobber, but might otherwise share the reload
5883      register with an output or input-output reload.  */
5884   int check_earlyclobber = 0;
5885   int i;
5886   int copy = 0;
5887 
5888   if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5889     return 0;
5890 
5891   if (out == const0_rtx)
5892     {
5893       copy = 1;
5894       out = NULL_RTX;
5895     }
5896 
5897   /* We use some pseudo 'time' value to check if the lifetimes of the
5898      new register use would overlap with the one of a previous reload
5899      that is not read-only or uses a different value.
5900      The 'time' used doesn't have to be linear in any shape or form, just
5901      monotonic.
5902      Some reload types use different 'buckets' for each operand.
5903      So there are MAX_RECOG_OPERANDS different time values for each
5904      such reload type.
5905      We compute TIME1 as the time when the register for the prospective
5906      new reload ceases to be live, and TIME2 for each existing
5907      reload as the time when that the reload register of that reload
5908      becomes live.
5909      Where there is little to be gained by exact lifetime calculations,
5910      we just make conservative assumptions, i.e. a longer lifetime;
5911      this is done in the 'default:' cases.  */
5912   switch (type)
5913     {
5914     case RELOAD_FOR_OTHER_ADDRESS:
5915       /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads.  */
5916       time1 = copy ? 0 : 1;
5917       break;
5918     case RELOAD_OTHER:
5919       time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5920       break;
5921       /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5922 	 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT.  By adding 0 / 1 / 2 ,
5923 	 respectively, to the time values for these, we get distinct time
5924 	 values.  To get distinct time values for each operand, we have to
5925 	 multiply opnum by at least three.  We round that up to four because
5926 	 multiply by four is often cheaper.  */
5927     case RELOAD_FOR_INPADDR_ADDRESS:
5928       time1 = opnum * 4 + 2;
5929       break;
5930     case RELOAD_FOR_INPUT_ADDRESS:
5931       time1 = opnum * 4 + 3;
5932       break;
5933     case RELOAD_FOR_INPUT:
5934       /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5935 	 executes (inclusive).  */
5936       time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5937       break;
5938     case RELOAD_FOR_OPADDR_ADDR:
5939       /* opnum * 4 + 4
5940 	 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5941       time1 = MAX_RECOG_OPERANDS * 4 + 1;
5942       break;
5943     case RELOAD_FOR_OPERAND_ADDRESS:
5944       /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5945 	 is executed.  */
5946       time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5947       break;
5948     case RELOAD_FOR_OUTADDR_ADDRESS:
5949       time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5950       break;
5951     case RELOAD_FOR_OUTPUT_ADDRESS:
5952       time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5953       break;
5954     default:
5955       time1 = MAX_RECOG_OPERANDS * 5 + 5;
5956     }
5957 
5958   for (i = 0; i < n_reloads; i++)
5959     {
5960       rtx reg = rld[i].reg_rtx;
5961       if (reg && REG_P (reg)
5962 	  && ((unsigned) regno - true_regnum (reg)
5963 	      <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
5964 	  && i != reloadnum)
5965 	{
5966 	  rtx other_input = rld[i].in;
5967 
5968 	  /* If the other reload loads the same input value, that
5969 	     will not cause a conflict only if it's loading it into
5970 	     the same register.  */
5971 	  if (true_regnum (reg) != start_regno)
5972 	    other_input = NULL_RTX;
5973 	  if (! other_input || ! rtx_equal_p (other_input, value)
5974 	      || rld[i].out || out)
5975 	    {
5976 	      int time2;
5977 	      switch (rld[i].when_needed)
5978 		{
5979 		case RELOAD_FOR_OTHER_ADDRESS:
5980 		  time2 = 0;
5981 		  break;
5982 		case RELOAD_FOR_INPADDR_ADDRESS:
5983 		  /* find_reloads makes sure that a
5984 		     RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5985 		     by at most one - the first -
5986 		     RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS .  If the
5987 		     address reload is inherited, the address address reload
5988 		     goes away, so we can ignore this conflict.  */
5989 		  if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5990 		      && ignore_address_reloads
5991 		      /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5992 			 Then the address address is still needed to store
5993 			 back the new address.  */
5994 		      && ! rld[reloadnum].out)
5995 		    continue;
5996 		  /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5997 		     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5998 		     reloads go away.  */
5999 		  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
6000 		      && ignore_address_reloads
6001 		      /* Unless we are reloading an auto_inc expression.  */
6002 		      && ! rld[reloadnum].out)
6003 		    continue;
6004 		  time2 = rld[i].opnum * 4 + 2;
6005 		  break;
6006 		case RELOAD_FOR_INPUT_ADDRESS:
6007 		  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
6008 		      && ignore_address_reloads
6009 		      && ! rld[reloadnum].out)
6010 		    continue;
6011 		  time2 = rld[i].opnum * 4 + 3;
6012 		  break;
6013 		case RELOAD_FOR_INPUT:
6014 		  time2 = rld[i].opnum * 4 + 4;
6015 		  check_earlyclobber = 1;
6016 		  break;
6017 		  /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
6018 		     == MAX_RECOG_OPERAND * 4  */
6019 		case RELOAD_FOR_OPADDR_ADDR:
6020 		  if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
6021 		      && ignore_address_reloads
6022 		      && ! rld[reloadnum].out)
6023 		    continue;
6024 		  time2 = MAX_RECOG_OPERANDS * 4 + 1;
6025 		  break;
6026 		case RELOAD_FOR_OPERAND_ADDRESS:
6027 		  time2 = MAX_RECOG_OPERANDS * 4 + 2;
6028 		  check_earlyclobber = 1;
6029 		  break;
6030 		case RELOAD_FOR_INSN:
6031 		  time2 = MAX_RECOG_OPERANDS * 4 + 3;
6032 		  break;
6033 		case RELOAD_FOR_OUTPUT:
6034 		  /* All RELOAD_FOR_OUTPUT reloads become live just after the
6035 		     instruction is executed.  */
6036 		  time2 = MAX_RECOG_OPERANDS * 4 + 4;
6037 		  break;
6038 		  /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
6039 		     the RELOAD_FOR_OUTPUT reloads, so assign it the same time
6040 		     value.  */
6041 		case RELOAD_FOR_OUTADDR_ADDRESS:
6042 		  if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
6043 		      && ignore_address_reloads
6044 		      && ! rld[reloadnum].out)
6045 		    continue;
6046 		  time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
6047 		  break;
6048 		case RELOAD_FOR_OUTPUT_ADDRESS:
6049 		  time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
6050 		  break;
6051 		case RELOAD_OTHER:
6052 		  /* If there is no conflict in the input part, handle this
6053 		     like an output reload.  */
6054 		  if (! rld[i].in || rtx_equal_p (other_input, value))
6055 		    {
6056 		      time2 = MAX_RECOG_OPERANDS * 4 + 4;
6057 		      /* Earlyclobbered outputs must conflict with inputs.  */
6058 		      if (earlyclobber_operand_p (rld[i].out))
6059 			time2 = MAX_RECOG_OPERANDS * 4 + 3;
6060 
6061 		      break;
6062 		    }
6063 		  time2 = 1;
6064 		  /* RELOAD_OTHER might be live beyond instruction execution,
6065 		     but this is not obvious when we set time2 = 1.  So check
6066 		     here if there might be a problem with the new reload
6067 		     clobbering the register used by the RELOAD_OTHER.  */
6068 		  if (out)
6069 		    return 0;
6070 		  break;
6071 		default:
6072 		  return 0;
6073 		}
6074 	      if ((time1 >= time2
6075 		   && (! rld[i].in || rld[i].out
6076 		       || ! rtx_equal_p (other_input, value)))
6077 		  || (out && rld[reloadnum].out_reg
6078 		      && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
6079 		return 0;
6080 	    }
6081 	}
6082     }
6083 
6084   /* Earlyclobbered outputs must conflict with inputs.  */
6085   if (check_earlyclobber && out && earlyclobber_operand_p (out))
6086     return 0;
6087 
6088   return 1;
6089 }
6090 
6091 /* Return 1 if the value in reload reg REGNO, as used by a reload
6092    needed for the part of the insn specified by OPNUM and TYPE,
6093    may be used to load VALUE into it.
6094 
6095    MODE is the mode in which the register is used, this is needed to
6096    determine how many hard regs to test.
6097 
6098    Other read-only reloads with the same value do not conflict
6099    unless OUT is nonzero and these other reloads have to live while
6100    output reloads live.
6101    If OUT is CONST0_RTX, this is a special case: it means that the
6102    test should not be for using register REGNO as reload register, but
6103    for copying from register REGNO into the reload register.
6104 
6105    RELOADNUM is the number of the reload we want to load this value for;
6106    a reload does not conflict with itself.
6107 
6108    When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
6109    reloads that load an address for the very reload we are considering.
6110 
6111    The caller has to make sure that there is no conflict with the return
6112    register.  */
6113 
6114 static int
6115 free_for_value_p (int regno, machine_mode mode, int opnum,
6116 		  enum reload_type type, rtx value, rtx out, int reloadnum,
6117 		  int ignore_address_reloads)
6118 {
6119   int nregs = hard_regno_nregs[regno][mode];
6120   while (nregs-- > 0)
6121     if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
6122 				       value, out, reloadnum,
6123 				       ignore_address_reloads))
6124       return 0;
6125   return 1;
6126 }
6127 
6128 /* Return nonzero if the rtx X is invariant over the current function.  */
6129 /* ??? Actually, the places where we use this expect exactly what is
6130    tested here, and not everything that is function invariant.  In
6131    particular, the frame pointer and arg pointer are special cased;
6132    pic_offset_table_rtx is not, and we must not spill these things to
6133    memory.  */
6134 
6135 int
6136 function_invariant_p (const_rtx x)
6137 {
6138   if (CONSTANT_P (x))
6139     return 1;
6140   if (x == frame_pointer_rtx || x == arg_pointer_rtx)
6141     return 1;
6142   if (GET_CODE (x) == PLUS
6143       && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
6144       && GET_CODE (XEXP (x, 1)) == CONST_INT)
6145     return 1;
6146   return 0;
6147 }
6148 
6149 /* Determine whether the reload reg X overlaps any rtx'es used for
6150    overriding inheritance.  Return nonzero if so.  */
6151 
6152 static int
6153 conflicts_with_override (rtx x)
6154 {
6155   int i;
6156   for (i = 0; i < n_reloads; i++)
6157     if (reload_override_in[i]
6158 	&& reg_overlap_mentioned_p (x, reload_override_in[i]))
6159       return 1;
6160   return 0;
6161 }
6162 
6163 /* Give an error message saying we failed to find a reload for INSN,
6164    and clear out reload R.  */
6165 static void
6166 failed_reload (rtx_insn *insn, int r)
6167 {
6168   if (asm_noperands (PATTERN (insn)) < 0)
6169     /* It's the compiler's fault.  */
6170     fatal_insn ("could not find a spill register", insn);
6171 
6172   /* It's the user's fault; the operand's mode and constraint
6173      don't match.  Disable this reload so we don't crash in final.  */
6174   error_for_asm (insn,
6175 		 "%<asm%> operand constraint incompatible with operand size");
6176   rld[r].in = 0;
6177   rld[r].out = 0;
6178   rld[r].reg_rtx = 0;
6179   rld[r].optional = 1;
6180   rld[r].secondary_p = 1;
6181 }
6182 
6183 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
6184    for reload R.  If it's valid, get an rtx for it.  Return nonzero if
6185    successful.  */
6186 static int
6187 set_reload_reg (int i, int r)
6188 {
6189   /* regno is 'set but not used' if HARD_REGNO_MODE_OK doesn't use its first
6190      parameter.  */
6191   int regno ATTRIBUTE_UNUSED;
6192   rtx reg = spill_reg_rtx[i];
6193 
6194   if (reg == 0 || GET_MODE (reg) != rld[r].mode)
6195     spill_reg_rtx[i] = reg
6196       = gen_rtx_REG (rld[r].mode, spill_regs[i]);
6197 
6198   regno = true_regnum (reg);
6199 
6200   /* Detect when the reload reg can't hold the reload mode.
6201      This used to be one `if', but Sequent compiler can't handle that.  */
6202   if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
6203     {
6204       machine_mode test_mode = VOIDmode;
6205       if (rld[r].in)
6206 	test_mode = GET_MODE (rld[r].in);
6207       /* If rld[r].in has VOIDmode, it means we will load it
6208 	 in whatever mode the reload reg has: to wit, rld[r].mode.
6209 	 We have already tested that for validity.  */
6210       /* Aside from that, we need to test that the expressions
6211 	 to reload from or into have modes which are valid for this
6212 	 reload register.  Otherwise the reload insns would be invalid.  */
6213       if (! (rld[r].in != 0 && test_mode != VOIDmode
6214 	     && ! HARD_REGNO_MODE_OK (regno, test_mode)))
6215 	if (! (rld[r].out != 0
6216 	       && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
6217 	  {
6218 	    /* The reg is OK.  */
6219 	    last_spill_reg = i;
6220 
6221 	    /* Mark as in use for this insn the reload regs we use
6222 	       for this.  */
6223 	    mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
6224 				    rld[r].when_needed, rld[r].mode);
6225 
6226 	    rld[r].reg_rtx = reg;
6227 	    reload_spill_index[r] = spill_regs[i];
6228 	    return 1;
6229 	  }
6230     }
6231   return 0;
6232 }
6233 
6234 /* Find a spill register to use as a reload register for reload R.
6235    LAST_RELOAD is nonzero if this is the last reload for the insn being
6236    processed.
6237 
6238    Set rld[R].reg_rtx to the register allocated.
6239 
6240    We return 1 if successful, or 0 if we couldn't find a spill reg and
6241    we didn't change anything.  */
6242 
6243 static int
6244 allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
6245 		     int last_reload)
6246 {
6247   int i, pass, count;
6248 
6249   /* If we put this reload ahead, thinking it is a group,
6250      then insist on finding a group.  Otherwise we can grab a
6251      reg that some other reload needs.
6252      (That can happen when we have a 68000 DATA_OR_FP_REG
6253      which is a group of data regs or one fp reg.)
6254      We need not be so restrictive if there are no more reloads
6255      for this insn.
6256 
6257      ??? Really it would be nicer to have smarter handling
6258      for that kind of reg class, where a problem like this is normal.
6259      Perhaps those classes should be avoided for reloading
6260      by use of more alternatives.  */
6261 
6262   int force_group = rld[r].nregs > 1 && ! last_reload;
6263 
6264   /* If we want a single register and haven't yet found one,
6265      take any reg in the right class and not in use.
6266      If we want a consecutive group, here is where we look for it.
6267 
6268      We use three passes so we can first look for reload regs to
6269      reuse, which are already in use for other reloads in this insn,
6270      and only then use additional registers which are not "bad", then
6271      finally any register.
6272 
6273      I think that maximizing reuse is needed to make sure we don't
6274      run out of reload regs.  Suppose we have three reloads, and
6275      reloads A and B can share regs.  These need two regs.
6276      Suppose A and B are given different regs.
6277      That leaves none for C.  */
6278   for (pass = 0; pass < 3; pass++)
6279     {
6280       /* I is the index in spill_regs.
6281 	 We advance it round-robin between insns to use all spill regs
6282 	 equally, so that inherited reloads have a chance
6283 	 of leapfrogging each other.  */
6284 
6285       i = last_spill_reg;
6286 
6287       for (count = 0; count < n_spills; count++)
6288 	{
6289 	  int rclass = (int) rld[r].rclass;
6290 	  int regnum;
6291 
6292 	  i++;
6293 	  if (i >= n_spills)
6294 	    i -= n_spills;
6295 	  regnum = spill_regs[i];
6296 
6297 	  if ((reload_reg_free_p (regnum, rld[r].opnum,
6298 				  rld[r].when_needed)
6299 	       || (rld[r].in
6300 		   /* We check reload_reg_used to make sure we
6301 		      don't clobber the return register.  */
6302 		   && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
6303 		   && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
6304 					rld[r].when_needed, rld[r].in,
6305 					rld[r].out, r, 1)))
6306 	      && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
6307 	      && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
6308 	      /* Look first for regs to share, then for unshared.  But
6309 		 don't share regs used for inherited reloads; they are
6310 		 the ones we want to preserve.  */
6311 	      && (pass
6312 		  || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
6313 					 regnum)
6314 		      && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
6315 					      regnum))))
6316 	    {
6317 	      int nr = hard_regno_nregs[regnum][rld[r].mode];
6318 
6319 	      /* During the second pass we want to avoid reload registers
6320 		 which are "bad" for this reload.  */
6321 	      if (pass == 1
6322 		  && ira_bad_reload_regno (regnum, rld[r].in, rld[r].out))
6323 		continue;
6324 
6325 	      /* Avoid the problem where spilling a GENERAL_OR_FP_REG
6326 		 (on 68000) got us two FP regs.  If NR is 1,
6327 		 we would reject both of them.  */
6328 	      if (force_group)
6329 		nr = rld[r].nregs;
6330 	      /* If we need only one reg, we have already won.  */
6331 	      if (nr == 1)
6332 		{
6333 		  /* But reject a single reg if we demand a group.  */
6334 		  if (force_group)
6335 		    continue;
6336 		  break;
6337 		}
6338 	      /* Otherwise check that as many consecutive regs as we need
6339 		 are available here.  */
6340 	      while (nr > 1)
6341 		{
6342 		  int regno = regnum + nr - 1;
6343 		  if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
6344 			&& spill_reg_order[regno] >= 0
6345 			&& reload_reg_free_p (regno, rld[r].opnum,
6346 					      rld[r].when_needed)))
6347 		    break;
6348 		  nr--;
6349 		}
6350 	      if (nr == 1)
6351 		break;
6352 	    }
6353 	}
6354 
6355       /* If we found something on the current pass, omit later passes.  */
6356       if (count < n_spills)
6357 	break;
6358     }
6359 
6360   /* We should have found a spill register by now.  */
6361   if (count >= n_spills)
6362     return 0;
6363 
6364   /* I is the index in SPILL_REG_RTX of the reload register we are to
6365      allocate.  Get an rtx for it and find its register number.  */
6366 
6367   return set_reload_reg (i, r);
6368 }
6369 
6370 /* Initialize all the tables needed to allocate reload registers.
6371    CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
6372    is the array we use to restore the reg_rtx field for every reload.  */
6373 
6374 static void
6375 choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
6376 {
6377   int i;
6378 
6379   for (i = 0; i < n_reloads; i++)
6380     rld[i].reg_rtx = save_reload_reg_rtx[i];
6381 
6382   memset (reload_inherited, 0, MAX_RELOADS);
6383   memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
6384   memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
6385 
6386   CLEAR_HARD_REG_SET (reload_reg_used);
6387   CLEAR_HARD_REG_SET (reload_reg_used_at_all);
6388   CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
6389   CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
6390   CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
6391   CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
6392 
6393   CLEAR_HARD_REG_SET (reg_used_in_insn);
6394   {
6395     HARD_REG_SET tmp;
6396     REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
6397     IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6398     REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
6399     IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6400     compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
6401     compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
6402   }
6403 
6404   for (i = 0; i < reload_n_operands; i++)
6405     {
6406       CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
6407       CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
6408       CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
6409       CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
6410       CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
6411       CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
6412     }
6413 
6414   COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
6415 
6416   CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
6417 
6418   for (i = 0; i < n_reloads; i++)
6419     /* If we have already decided to use a certain register,
6420        don't use it in another way.  */
6421     if (rld[i].reg_rtx)
6422       mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
6423 			      rld[i].when_needed, rld[i].mode);
6424 }
6425 
6426 #ifdef SECONDARY_MEMORY_NEEDED
6427 /* If X is not a subreg, return it unmodified.  If it is a subreg,
6428    look up whether we made a replacement for the SUBREG_REG.  Return
6429    either the replacement or the SUBREG_REG.  */
6430 
6431 static rtx
6432 replaced_subreg (rtx x)
6433 {
6434   if (GET_CODE (x) == SUBREG)
6435     return find_replacement (&SUBREG_REG (x));
6436   return x;
6437 }
6438 #endif
6439 
6440 /* Compute the offset to pass to subreg_regno_offset, for a pseudo of
6441    mode OUTERMODE that is available in a hard reg of mode INNERMODE.
6442    SUBREG is non-NULL if the pseudo is a subreg whose reg is a pseudo,
6443    otherwise it is NULL.  */
6444 
6445 static int
6446 compute_reload_subreg_offset (machine_mode outermode,
6447 			      rtx subreg,
6448 			      machine_mode innermode)
6449 {
6450   int outer_offset;
6451   machine_mode middlemode;
6452 
6453   if (!subreg)
6454     return subreg_lowpart_offset (outermode, innermode);
6455 
6456   outer_offset = SUBREG_BYTE (subreg);
6457   middlemode = GET_MODE (SUBREG_REG (subreg));
6458 
6459   /* If SUBREG is paradoxical then return the normal lowpart offset
6460      for OUTERMODE and INNERMODE.  Our caller has already checked
6461      that OUTERMODE fits in INNERMODE.  */
6462   if (outer_offset == 0
6463       && GET_MODE_SIZE (outermode) > GET_MODE_SIZE (middlemode))
6464     return subreg_lowpart_offset (outermode, innermode);
6465 
6466   /* SUBREG is normal, but may not be lowpart; return OUTER_OFFSET
6467      plus the normal lowpart offset for MIDDLEMODE and INNERMODE.  */
6468   return outer_offset + subreg_lowpart_offset (middlemode, innermode);
6469 }
6470 
6471 /* Assign hard reg targets for the pseudo-registers we must reload
6472    into hard regs for this insn.
6473    Also output the instructions to copy them in and out of the hard regs.
6474 
6475    For machines with register classes, we are responsible for
6476    finding a reload reg in the proper class.  */
6477 
6478 static void
6479 choose_reload_regs (struct insn_chain *chain)
6480 {
6481   rtx_insn *insn = chain->insn;
6482   int i, j;
6483   unsigned int max_group_size = 1;
6484   enum reg_class group_class = NO_REGS;
6485   int pass, win, inheritance;
6486 
6487   rtx save_reload_reg_rtx[MAX_RELOADS];
6488 
6489   /* In order to be certain of getting the registers we need,
6490      we must sort the reloads into order of increasing register class.
6491      Then our grabbing of reload registers will parallel the process
6492      that provided the reload registers.
6493 
6494      Also note whether any of the reloads wants a consecutive group of regs.
6495      If so, record the maximum size of the group desired and what
6496      register class contains all the groups needed by this insn.  */
6497 
6498   for (j = 0; j < n_reloads; j++)
6499     {
6500       reload_order[j] = j;
6501       if (rld[j].reg_rtx != NULL_RTX)
6502 	{
6503 	  gcc_assert (REG_P (rld[j].reg_rtx)
6504 		      && HARD_REGISTER_P (rld[j].reg_rtx));
6505 	  reload_spill_index[j] = REGNO (rld[j].reg_rtx);
6506 	}
6507       else
6508 	reload_spill_index[j] = -1;
6509 
6510       if (rld[j].nregs > 1)
6511 	{
6512 	  max_group_size = MAX (rld[j].nregs, max_group_size);
6513 	  group_class
6514 	    = reg_class_superunion[(int) rld[j].rclass][(int) group_class];
6515 	}
6516 
6517       save_reload_reg_rtx[j] = rld[j].reg_rtx;
6518     }
6519 
6520   if (n_reloads > 1)
6521     qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
6522 
6523   /* If -O, try first with inheritance, then turning it off.
6524      If not -O, don't do inheritance.
6525      Using inheritance when not optimizing leads to paradoxes
6526      with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
6527      because one side of the comparison might be inherited.  */
6528   win = 0;
6529   for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
6530     {
6531       choose_reload_regs_init (chain, save_reload_reg_rtx);
6532 
6533       /* Process the reloads in order of preference just found.
6534 	 Beyond this point, subregs can be found in reload_reg_rtx.
6535 
6536 	 This used to look for an existing reloaded home for all of the
6537 	 reloads, and only then perform any new reloads.  But that could lose
6538 	 if the reloads were done out of reg-class order because a later
6539 	 reload with a looser constraint might have an old home in a register
6540 	 needed by an earlier reload with a tighter constraint.
6541 
6542 	 To solve this, we make two passes over the reloads, in the order
6543 	 described above.  In the first pass we try to inherit a reload
6544 	 from a previous insn.  If there is a later reload that needs a
6545 	 class that is a proper subset of the class being processed, we must
6546 	 also allocate a spill register during the first pass.
6547 
6548 	 Then make a second pass over the reloads to allocate any reloads
6549 	 that haven't been given registers yet.  */
6550 
6551       for (j = 0; j < n_reloads; j++)
6552 	{
6553 	  int r = reload_order[j];
6554 	  rtx search_equiv = NULL_RTX;
6555 
6556 	  /* Ignore reloads that got marked inoperative.  */
6557 	  if (rld[r].out == 0 && rld[r].in == 0
6558 	      && ! rld[r].secondary_p)
6559 	    continue;
6560 
6561 	  /* If find_reloads chose to use reload_in or reload_out as a reload
6562 	     register, we don't need to chose one.  Otherwise, try even if it
6563 	     found one since we might save an insn if we find the value lying
6564 	     around.
6565 	     Try also when reload_in is a pseudo without a hard reg.  */
6566 	  if (rld[r].in != 0 && rld[r].reg_rtx != 0
6567 	      && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
6568 		  || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
6569 		      && !MEM_P (rld[r].in)
6570 		      && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
6571 	    continue;
6572 
6573 #if 0 /* No longer needed for correct operation.
6574 	 It might give better code, or might not; worth an experiment?  */
6575 	  /* If this is an optional reload, we can't inherit from earlier insns
6576 	     until we are sure that any non-optional reloads have been allocated.
6577 	     The following code takes advantage of the fact that optional reloads
6578 	     are at the end of reload_order.  */
6579 	  if (rld[r].optional != 0)
6580 	    for (i = 0; i < j; i++)
6581 	      if ((rld[reload_order[i]].out != 0
6582 		   || rld[reload_order[i]].in != 0
6583 		   || rld[reload_order[i]].secondary_p)
6584 		  && ! rld[reload_order[i]].optional
6585 		  && rld[reload_order[i]].reg_rtx == 0)
6586 		allocate_reload_reg (chain, reload_order[i], 0);
6587 #endif
6588 
6589 	  /* First see if this pseudo is already available as reloaded
6590 	     for a previous insn.  We cannot try to inherit for reloads
6591 	     that are smaller than the maximum number of registers needed
6592 	     for groups unless the register we would allocate cannot be used
6593 	     for the groups.
6594 
6595 	     We could check here to see if this is a secondary reload for
6596 	     an object that is already in a register of the desired class.
6597 	     This would avoid the need for the secondary reload register.
6598 	     But this is complex because we can't easily determine what
6599 	     objects might want to be loaded via this reload.  So let a
6600 	     register be allocated here.  In `emit_reload_insns' we suppress
6601 	     one of the loads in the case described above.  */
6602 
6603 	  if (inheritance)
6604 	    {
6605 	      int byte = 0;
6606 	      int regno = -1;
6607 	      machine_mode mode = VOIDmode;
6608 	      rtx subreg = NULL_RTX;
6609 
6610 	      if (rld[r].in == 0)
6611 		;
6612 	      else if (REG_P (rld[r].in))
6613 		{
6614 		  regno = REGNO (rld[r].in);
6615 		  mode = GET_MODE (rld[r].in);
6616 		}
6617 	      else if (REG_P (rld[r].in_reg))
6618 		{
6619 		  regno = REGNO (rld[r].in_reg);
6620 		  mode = GET_MODE (rld[r].in_reg);
6621 		}
6622 	      else if (GET_CODE (rld[r].in_reg) == SUBREG
6623 		       && REG_P (SUBREG_REG (rld[r].in_reg)))
6624 		{
6625 		  regno = REGNO (SUBREG_REG (rld[r].in_reg));
6626 		  if (regno < FIRST_PSEUDO_REGISTER)
6627 		    regno = subreg_regno (rld[r].in_reg);
6628 		  else
6629 		    {
6630 		      subreg = rld[r].in_reg;
6631 		      byte = SUBREG_BYTE (subreg);
6632 		    }
6633 		  mode = GET_MODE (rld[r].in_reg);
6634 		}
6635 #ifdef AUTO_INC_DEC
6636 	      else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
6637 		       && REG_P (XEXP (rld[r].in_reg, 0)))
6638 		{
6639 		  regno = REGNO (XEXP (rld[r].in_reg, 0));
6640 		  mode = GET_MODE (XEXP (rld[r].in_reg, 0));
6641 		  rld[r].out = rld[r].in;
6642 		}
6643 #endif
6644 #if 0
6645 	      /* This won't work, since REGNO can be a pseudo reg number.
6646 		 Also, it takes much more hair to keep track of all the things
6647 		 that can invalidate an inherited reload of part of a pseudoreg.  */
6648 	      else if (GET_CODE (rld[r].in) == SUBREG
6649 		       && REG_P (SUBREG_REG (rld[r].in)))
6650 		regno = subreg_regno (rld[r].in);
6651 #endif
6652 
6653 	      if (regno >= 0
6654 		  && reg_last_reload_reg[regno] != 0
6655 		  && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
6656 		      >= GET_MODE_SIZE (mode) + byte)
6657 #ifdef CANNOT_CHANGE_MODE_CLASS
6658 		  /* Verify that the register it's in can be used in
6659 		     mode MODE.  */
6660 		  && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg[regno]),
6661 						GET_MODE (reg_last_reload_reg[regno]),
6662 						mode)
6663 #endif
6664 		  )
6665 		{
6666 		  enum reg_class rclass = rld[r].rclass, last_class;
6667 		  rtx last_reg = reg_last_reload_reg[regno];
6668 
6669 		  i = REGNO (last_reg);
6670 		  byte = compute_reload_subreg_offset (mode,
6671 						       subreg,
6672 						       GET_MODE (last_reg));
6673 		  i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
6674 		  last_class = REGNO_REG_CLASS (i);
6675 
6676 		  if (reg_reloaded_contents[i] == regno
6677 		      && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
6678 		      && HARD_REGNO_MODE_OK (i, rld[r].mode)
6679 		      && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
6680 			  /* Even if we can't use this register as a reload
6681 			     register, we might use it for reload_override_in,
6682 			     if copying it to the desired class is cheap
6683 			     enough.  */
6684 			  || ((register_move_cost (mode, last_class, rclass)
6685 			       < memory_move_cost (mode, rclass, true))
6686 			      && (secondary_reload_class (1, rclass, mode,
6687 							  last_reg)
6688 				  == NO_REGS)
6689 #ifdef SECONDARY_MEMORY_NEEDED
6690 			      && ! SECONDARY_MEMORY_NEEDED (last_class, rclass,
6691 							    mode)
6692 #endif
6693 			      ))
6694 
6695 		      && (rld[r].nregs == max_group_size
6696 			  || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
6697 						  i))
6698 		      && free_for_value_p (i, rld[r].mode, rld[r].opnum,
6699 					   rld[r].when_needed, rld[r].in,
6700 					   const0_rtx, r, 1))
6701 		    {
6702 		      /* If a group is needed, verify that all the subsequent
6703 			 registers still have their values intact.  */
6704 		      int nr = hard_regno_nregs[i][rld[r].mode];
6705 		      int k;
6706 
6707 		      for (k = 1; k < nr; k++)
6708 			if (reg_reloaded_contents[i + k] != regno
6709 			    || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6710 			  break;
6711 
6712 		      if (k == nr)
6713 			{
6714 			  int i1;
6715 			  int bad_for_class;
6716 
6717 			  last_reg = (GET_MODE (last_reg) == mode
6718 				      ? last_reg : gen_rtx_REG (mode, i));
6719 
6720 			  bad_for_class = 0;
6721 			  for (k = 0; k < nr; k++)
6722 			    bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6723 								  i+k);
6724 
6725 			  /* We found a register that contains the
6726 			     value we need.  If this register is the
6727 			     same as an `earlyclobber' operand of the
6728 			     current insn, just mark it as a place to
6729 			     reload from since we can't use it as the
6730 			     reload register itself.  */
6731 
6732 			  for (i1 = 0; i1 < n_earlyclobbers; i1++)
6733 			    if (reg_overlap_mentioned_for_reload_p
6734 				(reg_last_reload_reg[regno],
6735 				 reload_earlyclobbers[i1]))
6736 			      break;
6737 
6738 			  if (i1 != n_earlyclobbers
6739 			      || ! (free_for_value_p (i, rld[r].mode,
6740 						      rld[r].opnum,
6741 						      rld[r].when_needed, rld[r].in,
6742 						      rld[r].out, r, 1))
6743 			      /* Don't use it if we'd clobber a pseudo reg.  */
6744 			      || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6745 				  && rld[r].out
6746 				  && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6747 			      /* Don't clobber the frame pointer.  */
6748 			      || (i == HARD_FRAME_POINTER_REGNUM
6749 				  && frame_pointer_needed
6750 				  && rld[r].out)
6751 			      /* Don't really use the inherited spill reg
6752 				 if we need it wider than we've got it.  */
6753 			      || (GET_MODE_SIZE (rld[r].mode)
6754 				  > GET_MODE_SIZE (mode))
6755 			      || bad_for_class
6756 
6757 			      /* If find_reloads chose reload_out as reload
6758 				 register, stay with it - that leaves the
6759 				 inherited register for subsequent reloads.  */
6760 			      || (rld[r].out && rld[r].reg_rtx
6761 				  && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6762 			    {
6763 			      if (! rld[r].optional)
6764 				{
6765 				  reload_override_in[r] = last_reg;
6766 				  reload_inheritance_insn[r]
6767 				    = reg_reloaded_insn[i];
6768 				}
6769 			    }
6770 			  else
6771 			    {
6772 			      int k;
6773 			      /* We can use this as a reload reg.  */
6774 			      /* Mark the register as in use for this part of
6775 				 the insn.  */
6776 			      mark_reload_reg_in_use (i,
6777 						      rld[r].opnum,
6778 						      rld[r].when_needed,
6779 						      rld[r].mode);
6780 			      rld[r].reg_rtx = last_reg;
6781 			      reload_inherited[r] = 1;
6782 			      reload_inheritance_insn[r]
6783 				= reg_reloaded_insn[i];
6784 			      reload_spill_index[r] = i;
6785 			      for (k = 0; k < nr; k++)
6786 				SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6787 						  i + k);
6788 			    }
6789 			}
6790 		    }
6791 		}
6792 	    }
6793 
6794 	  /* Here's another way to see if the value is already lying around.  */
6795 	  if (inheritance
6796 	      && rld[r].in != 0
6797 	      && ! reload_inherited[r]
6798 	      && rld[r].out == 0
6799 	      && (CONSTANT_P (rld[r].in)
6800 		  || GET_CODE (rld[r].in) == PLUS
6801 		  || REG_P (rld[r].in)
6802 		  || MEM_P (rld[r].in))
6803 	      && (rld[r].nregs == max_group_size
6804 		  || ! reg_classes_intersect_p (rld[r].rclass, group_class)))
6805 	    search_equiv = rld[r].in;
6806 
6807 	  if (search_equiv)
6808 	    {
6809 	      rtx equiv
6810 		= find_equiv_reg (search_equiv, insn, rld[r].rclass,
6811 				  -1, NULL, 0, rld[r].mode);
6812 	      int regno = 0;
6813 
6814 	      if (equiv != 0)
6815 		{
6816 		  if (REG_P (equiv))
6817 		    regno = REGNO (equiv);
6818 		  else
6819 		    {
6820 		      /* This must be a SUBREG of a hard register.
6821 			 Make a new REG since this might be used in an
6822 			 address and not all machines support SUBREGs
6823 			 there.  */
6824 		      gcc_assert (GET_CODE (equiv) == SUBREG);
6825 		      regno = subreg_regno (equiv);
6826 		      equiv = gen_rtx_REG (rld[r].mode, regno);
6827 		      /* If we choose EQUIV as the reload register, but the
6828 			 loop below decides to cancel the inheritance, we'll
6829 			 end up reloading EQUIV in rld[r].mode, not the mode
6830 			 it had originally.  That isn't safe when EQUIV isn't
6831 			 available as a spill register since its value might
6832 			 still be live at this point.  */
6833 		      for (i = regno; i < regno + (int) rld[r].nregs; i++)
6834 			if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6835 			  equiv = 0;
6836 		    }
6837 		}
6838 
6839 	      /* If we found a spill reg, reject it unless it is free
6840 		 and of the desired class.  */
6841 	      if (equiv != 0)
6842 		{
6843 		  int regs_used = 0;
6844 		  int bad_for_class = 0;
6845 		  int max_regno = regno + rld[r].nregs;
6846 
6847 		  for (i = regno; i < max_regno; i++)
6848 		    {
6849 		      regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6850 						      i);
6851 		      bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6852 							   i);
6853 		    }
6854 
6855 		  if ((regs_used
6856 		       && ! free_for_value_p (regno, rld[r].mode,
6857 					      rld[r].opnum, rld[r].when_needed,
6858 					      rld[r].in, rld[r].out, r, 1))
6859 		      || bad_for_class)
6860 		    equiv = 0;
6861 		}
6862 
6863 	      if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
6864 		equiv = 0;
6865 
6866 	      /* We found a register that contains the value we need.
6867 		 If this register is the same as an `earlyclobber' operand
6868 		 of the current insn, just mark it as a place to reload from
6869 		 since we can't use it as the reload register itself.  */
6870 
6871 	      if (equiv != 0)
6872 		for (i = 0; i < n_earlyclobbers; i++)
6873 		  if (reg_overlap_mentioned_for_reload_p (equiv,
6874 							  reload_earlyclobbers[i]))
6875 		    {
6876 		      if (! rld[r].optional)
6877 			reload_override_in[r] = equiv;
6878 		      equiv = 0;
6879 		      break;
6880 		    }
6881 
6882 	      /* If the equiv register we have found is explicitly clobbered
6883 		 in the current insn, it depends on the reload type if we
6884 		 can use it, use it for reload_override_in, or not at all.
6885 		 In particular, we then can't use EQUIV for a
6886 		 RELOAD_FOR_OUTPUT_ADDRESS reload.  */
6887 
6888 	      if (equiv != 0)
6889 		{
6890 		  if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6891 		    switch (rld[r].when_needed)
6892 		      {
6893 		      case RELOAD_FOR_OTHER_ADDRESS:
6894 		      case RELOAD_FOR_INPADDR_ADDRESS:
6895 		      case RELOAD_FOR_INPUT_ADDRESS:
6896 		      case RELOAD_FOR_OPADDR_ADDR:
6897 			break;
6898 		      case RELOAD_OTHER:
6899 		      case RELOAD_FOR_INPUT:
6900 		      case RELOAD_FOR_OPERAND_ADDRESS:
6901 			if (! rld[r].optional)
6902 			  reload_override_in[r] = equiv;
6903 			/* Fall through.  */
6904 		      default:
6905 			equiv = 0;
6906 			break;
6907 		      }
6908 		  else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6909 		    switch (rld[r].when_needed)
6910 		      {
6911 		      case RELOAD_FOR_OTHER_ADDRESS:
6912 		      case RELOAD_FOR_INPADDR_ADDRESS:
6913 		      case RELOAD_FOR_INPUT_ADDRESS:
6914 		      case RELOAD_FOR_OPADDR_ADDR:
6915 		      case RELOAD_FOR_OPERAND_ADDRESS:
6916 		      case RELOAD_FOR_INPUT:
6917 			break;
6918 		      case RELOAD_OTHER:
6919 			if (! rld[r].optional)
6920 			  reload_override_in[r] = equiv;
6921 			/* Fall through.  */
6922 		      default:
6923 			equiv = 0;
6924 			break;
6925 		      }
6926 		}
6927 
6928 	      /* If we found an equivalent reg, say no code need be generated
6929 		 to load it, and use it as our reload reg.  */
6930 	      if (equiv != 0
6931 		  && (regno != HARD_FRAME_POINTER_REGNUM
6932 		      || !frame_pointer_needed))
6933 		{
6934 		  int nr = hard_regno_nregs[regno][rld[r].mode];
6935 		  int k;
6936 		  rld[r].reg_rtx = equiv;
6937 		  reload_spill_index[r] = regno;
6938 		  reload_inherited[r] = 1;
6939 
6940 		  /* If reg_reloaded_valid is not set for this register,
6941 		     there might be a stale spill_reg_store lying around.
6942 		     We must clear it, since otherwise emit_reload_insns
6943 		     might delete the store.  */
6944 		  if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6945 		    spill_reg_store[regno] = NULL;
6946 		  /* If any of the hard registers in EQUIV are spill
6947 		     registers, mark them as in use for this insn.  */
6948 		  for (k = 0; k < nr; k++)
6949 		    {
6950 		      i = spill_reg_order[regno + k];
6951 		      if (i >= 0)
6952 			{
6953 			  mark_reload_reg_in_use (regno, rld[r].opnum,
6954 						  rld[r].when_needed,
6955 						  rld[r].mode);
6956 			  SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6957 					    regno + k);
6958 			}
6959 		    }
6960 		}
6961 	    }
6962 
6963 	  /* If we found a register to use already, or if this is an optional
6964 	     reload, we are done.  */
6965 	  if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6966 	    continue;
6967 
6968 #if 0
6969 	  /* No longer needed for correct operation.  Might or might
6970 	     not give better code on the average.  Want to experiment?  */
6971 
6972 	  /* See if there is a later reload that has a class different from our
6973 	     class that intersects our class or that requires less register
6974 	     than our reload.  If so, we must allocate a register to this
6975 	     reload now, since that reload might inherit a previous reload
6976 	     and take the only available register in our class.  Don't do this
6977 	     for optional reloads since they will force all previous reloads
6978 	     to be allocated.  Also don't do this for reloads that have been
6979 	     turned off.  */
6980 
6981 	  for (i = j + 1; i < n_reloads; i++)
6982 	    {
6983 	      int s = reload_order[i];
6984 
6985 	      if ((rld[s].in == 0 && rld[s].out == 0
6986 		   && ! rld[s].secondary_p)
6987 		  || rld[s].optional)
6988 		continue;
6989 
6990 	      if ((rld[s].rclass != rld[r].rclass
6991 		   && reg_classes_intersect_p (rld[r].rclass,
6992 					       rld[s].rclass))
6993 		  || rld[s].nregs < rld[r].nregs)
6994 		break;
6995 	    }
6996 
6997 	  if (i == n_reloads)
6998 	    continue;
6999 
7000 	  allocate_reload_reg (chain, r, j == n_reloads - 1);
7001 #endif
7002 	}
7003 
7004       /* Now allocate reload registers for anything non-optional that
7005 	 didn't get one yet.  */
7006       for (j = 0; j < n_reloads; j++)
7007 	{
7008 	  int r = reload_order[j];
7009 
7010 	  /* Ignore reloads that got marked inoperative.  */
7011 	  if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
7012 	    continue;
7013 
7014 	  /* Skip reloads that already have a register allocated or are
7015 	     optional.  */
7016 	  if (rld[r].reg_rtx != 0 || rld[r].optional)
7017 	    continue;
7018 
7019 	  if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
7020 	    break;
7021 	}
7022 
7023       /* If that loop got all the way, we have won.  */
7024       if (j == n_reloads)
7025 	{
7026 	  win = 1;
7027 	  break;
7028 	}
7029 
7030       /* Loop around and try without any inheritance.  */
7031     }
7032 
7033   if (! win)
7034     {
7035       /* First undo everything done by the failed attempt
7036 	 to allocate with inheritance.  */
7037       choose_reload_regs_init (chain, save_reload_reg_rtx);
7038 
7039       /* Some sanity tests to verify that the reloads found in the first
7040 	 pass are identical to the ones we have now.  */
7041       gcc_assert (chain->n_reloads == n_reloads);
7042 
7043       for (i = 0; i < n_reloads; i++)
7044 	{
7045 	  if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
7046 	    continue;
7047 	  gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
7048 	  for (j = 0; j < n_spills; j++)
7049 	    if (spill_regs[j] == chain->rld[i].regno)
7050 	      if (! set_reload_reg (j, i))
7051 		failed_reload (chain->insn, i);
7052 	}
7053     }
7054 
7055   /* If we thought we could inherit a reload, because it seemed that
7056      nothing else wanted the same reload register earlier in the insn,
7057      verify that assumption, now that all reloads have been assigned.
7058      Likewise for reloads where reload_override_in has been set.  */
7059 
7060   /* If doing expensive optimizations, do one preliminary pass that doesn't
7061      cancel any inheritance, but removes reloads that have been needed only
7062      for reloads that we know can be inherited.  */
7063   for (pass = flag_expensive_optimizations; pass >= 0; pass--)
7064     {
7065       for (j = 0; j < n_reloads; j++)
7066 	{
7067 	  int r = reload_order[j];
7068 	  rtx check_reg;
7069 #ifdef SECONDARY_MEMORY_NEEDED
7070 	  rtx tem;
7071 #endif
7072 	  if (reload_inherited[r] && rld[r].reg_rtx)
7073 	    check_reg = rld[r].reg_rtx;
7074 	  else if (reload_override_in[r]
7075 		   && (REG_P (reload_override_in[r])
7076 		       || GET_CODE (reload_override_in[r]) == SUBREG))
7077 	    check_reg = reload_override_in[r];
7078 	  else
7079 	    continue;
7080 	  if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
7081 				  rld[r].opnum, rld[r].when_needed, rld[r].in,
7082 				  (reload_inherited[r]
7083 				   ? rld[r].out : const0_rtx),
7084 				  r, 1))
7085 	    {
7086 	      if (pass)
7087 		continue;
7088 	      reload_inherited[r] = 0;
7089 	      reload_override_in[r] = 0;
7090 	    }
7091 	  /* If we can inherit a RELOAD_FOR_INPUT, or can use a
7092 	     reload_override_in, then we do not need its related
7093 	     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
7094 	     likewise for other reload types.
7095 	     We handle this by removing a reload when its only replacement
7096 	     is mentioned in reload_in of the reload we are going to inherit.
7097 	     A special case are auto_inc expressions; even if the input is
7098 	     inherited, we still need the address for the output.  We can
7099 	     recognize them because they have RELOAD_OUT set to RELOAD_IN.
7100 	     If we succeeded removing some reload and we are doing a preliminary
7101 	     pass just to remove such reloads, make another pass, since the
7102 	     removal of one reload might allow us to inherit another one.  */
7103 	  else if (rld[r].in
7104 		   && rld[r].out != rld[r].in
7105 		   && remove_address_replacements (rld[r].in))
7106 	    {
7107 	      if (pass)
7108 	        pass = 2;
7109 	    }
7110 #ifdef SECONDARY_MEMORY_NEEDED
7111 	  /* If we needed a memory location for the reload, we also have to
7112 	     remove its related reloads.  */
7113 	  else if (rld[r].in
7114 		   && rld[r].out != rld[r].in
7115 		   && (tem = replaced_subreg (rld[r].in), REG_P (tem))
7116 		   && REGNO (tem) < FIRST_PSEUDO_REGISTER
7117 		   && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (tem)),
7118 					       rld[r].rclass, rld[r].inmode)
7119 		   && remove_address_replacements
7120 		      (get_secondary_mem (tem, rld[r].inmode, rld[r].opnum,
7121 					  rld[r].when_needed)))
7122 	    {
7123 	      if (pass)
7124 	        pass = 2;
7125 	    }
7126 #endif
7127 	}
7128     }
7129 
7130   /* Now that reload_override_in is known valid,
7131      actually override reload_in.  */
7132   for (j = 0; j < n_reloads; j++)
7133     if (reload_override_in[j])
7134       rld[j].in = reload_override_in[j];
7135 
7136   /* If this reload won't be done because it has been canceled or is
7137      optional and not inherited, clear reload_reg_rtx so other
7138      routines (such as subst_reloads) don't get confused.  */
7139   for (j = 0; j < n_reloads; j++)
7140     if (rld[j].reg_rtx != 0
7141 	&& ((rld[j].optional && ! reload_inherited[j])
7142 	    || (rld[j].in == 0 && rld[j].out == 0
7143 		&& ! rld[j].secondary_p)))
7144       {
7145 	int regno = true_regnum (rld[j].reg_rtx);
7146 
7147 	if (spill_reg_order[regno] >= 0)
7148 	  clear_reload_reg_in_use (regno, rld[j].opnum,
7149 				   rld[j].when_needed, rld[j].mode);
7150 	rld[j].reg_rtx = 0;
7151 	reload_spill_index[j] = -1;
7152       }
7153 
7154   /* Record which pseudos and which spill regs have output reloads.  */
7155   for (j = 0; j < n_reloads; j++)
7156     {
7157       int r = reload_order[j];
7158 
7159       i = reload_spill_index[r];
7160 
7161       /* I is nonneg if this reload uses a register.
7162 	 If rld[r].reg_rtx is 0, this is an optional reload
7163 	 that we opted to ignore.  */
7164       if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
7165 	  && rld[r].reg_rtx != 0)
7166 	{
7167 	  int nregno = REGNO (rld[r].out_reg);
7168 	  int nr = 1;
7169 
7170 	  if (nregno < FIRST_PSEUDO_REGISTER)
7171 	    nr = hard_regno_nregs[nregno][rld[r].mode];
7172 
7173 	  while (--nr >= 0)
7174 	    SET_REGNO_REG_SET (&reg_has_output_reload,
7175 			       nregno + nr);
7176 
7177 	  if (i >= 0)
7178 	    add_to_hard_reg_set (&reg_is_output_reload, rld[r].mode, i);
7179 
7180 	  gcc_assert (rld[r].when_needed == RELOAD_OTHER
7181 		      || rld[r].when_needed == RELOAD_FOR_OUTPUT
7182 		      || rld[r].when_needed == RELOAD_FOR_INSN);
7183 	}
7184     }
7185 }
7186 
7187 /* Deallocate the reload register for reload R.  This is called from
7188    remove_address_replacements.  */
7189 
7190 void
7191 deallocate_reload_reg (int r)
7192 {
7193   int regno;
7194 
7195   if (! rld[r].reg_rtx)
7196     return;
7197   regno = true_regnum (rld[r].reg_rtx);
7198   rld[r].reg_rtx = 0;
7199   if (spill_reg_order[regno] >= 0)
7200     clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
7201 			     rld[r].mode);
7202   reload_spill_index[r] = -1;
7203 }
7204 
7205 /* These arrays are filled by emit_reload_insns and its subroutines.  */
7206 static rtx_insn *input_reload_insns[MAX_RECOG_OPERANDS];
7207 static rtx_insn *other_input_address_reload_insns = 0;
7208 static rtx_insn *other_input_reload_insns = 0;
7209 static rtx_insn *input_address_reload_insns[MAX_RECOG_OPERANDS];
7210 static rtx_insn *inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7211 static rtx_insn *output_reload_insns[MAX_RECOG_OPERANDS];
7212 static rtx_insn *output_address_reload_insns[MAX_RECOG_OPERANDS];
7213 static rtx_insn *outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7214 static rtx_insn *operand_reload_insns = 0;
7215 static rtx_insn *other_operand_reload_insns = 0;
7216 static rtx_insn *other_output_reload_insns[MAX_RECOG_OPERANDS];
7217 
7218 /* Values to be put in spill_reg_store are put here first.  Instructions
7219    must only be placed here if the associated reload register reaches
7220    the end of the instruction's reload sequence.  */
7221 static rtx_insn *new_spill_reg_store[FIRST_PSEUDO_REGISTER];
7222 static HARD_REG_SET reg_reloaded_died;
7223 
7224 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
7225    of class NEW_CLASS with mode NEW_MODE.  Or alternatively, if alt_reload_reg
7226    is nonzero, if that is suitable.  On success, change *RELOAD_REG to the
7227    adjusted register, and return true.  Otherwise, return false.  */
7228 static bool
7229 reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
7230 			    enum reg_class new_class,
7231 			    machine_mode new_mode)
7232 
7233 {
7234   rtx reg;
7235 
7236   for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
7237     {
7238       unsigned regno = REGNO (reg);
7239 
7240       if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
7241 	continue;
7242       if (GET_MODE (reg) != new_mode)
7243 	{
7244 	  if (!HARD_REGNO_MODE_OK (regno, new_mode))
7245 	    continue;
7246 	  if (hard_regno_nregs[regno][new_mode]
7247 	      > hard_regno_nregs[regno][GET_MODE (reg)])
7248 	    continue;
7249 	  reg = reload_adjust_reg_for_mode (reg, new_mode);
7250 	}
7251       *reload_reg = reg;
7252       return true;
7253     }
7254   return false;
7255 }
7256 
7257 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
7258    pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
7259    nonzero, if that is suitable.  On success, change *RELOAD_REG to the
7260    adjusted register, and return true.  Otherwise, return false.  */
7261 static bool
7262 reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
7263 			     enum insn_code icode)
7264 
7265 {
7266   enum reg_class new_class = scratch_reload_class (icode);
7267   machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
7268 
7269   return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
7270 				     new_class, new_mode);
7271 }
7272 
7273 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
7274    has the number J.  OLD contains the value to be used as input.  */
7275 
7276 static void
7277 emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
7278 			 rtx old, int j)
7279 {
7280   rtx_insn *insn = chain->insn;
7281   rtx reloadreg;
7282   rtx oldequiv_reg = 0;
7283   rtx oldequiv = 0;
7284   int special = 0;
7285   machine_mode mode;
7286   rtx_insn **where;
7287 
7288   /* delete_output_reload is only invoked properly if old contains
7289      the original pseudo register.  Since this is replaced with a
7290      hard reg when RELOAD_OVERRIDE_IN is set, see if we can
7291      find the pseudo in RELOAD_IN_REG.  This is also used to
7292      determine whether a secondary reload is needed.  */
7293   if (reload_override_in[j]
7294       && (REG_P (rl->in_reg)
7295 	  || (GET_CODE (rl->in_reg) == SUBREG
7296 	      && REG_P (SUBREG_REG (rl->in_reg)))))
7297     {
7298       oldequiv = old;
7299       old = rl->in_reg;
7300     }
7301   if (oldequiv == 0)
7302     oldequiv = old;
7303   else if (REG_P (oldequiv))
7304     oldequiv_reg = oldequiv;
7305   else if (GET_CODE (oldequiv) == SUBREG)
7306     oldequiv_reg = SUBREG_REG (oldequiv);
7307 
7308   reloadreg = reload_reg_rtx_for_input[j];
7309   mode = GET_MODE (reloadreg);
7310 
7311   /* If we are reloading from a register that was recently stored in
7312      with an output-reload, see if we can prove there was
7313      actually no need to store the old value in it.  */
7314 
7315   if (optimize && REG_P (oldequiv)
7316       && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
7317       && spill_reg_store[REGNO (oldequiv)]
7318       && REG_P (old)
7319       && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
7320 	  || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
7321 			  rl->out_reg)))
7322     delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
7323 
7324   /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
7325      OLDEQUIV.  */
7326 
7327   while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
7328     oldequiv = SUBREG_REG (oldequiv);
7329   if (GET_MODE (oldequiv) != VOIDmode
7330       && mode != GET_MODE (oldequiv))
7331     oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
7332 
7333   /* Switch to the right place to emit the reload insns.  */
7334   switch (rl->when_needed)
7335     {
7336     case RELOAD_OTHER:
7337       where = &other_input_reload_insns;
7338       break;
7339     case RELOAD_FOR_INPUT:
7340       where = &input_reload_insns[rl->opnum];
7341       break;
7342     case RELOAD_FOR_INPUT_ADDRESS:
7343       where = &input_address_reload_insns[rl->opnum];
7344       break;
7345     case RELOAD_FOR_INPADDR_ADDRESS:
7346       where = &inpaddr_address_reload_insns[rl->opnum];
7347       break;
7348     case RELOAD_FOR_OUTPUT_ADDRESS:
7349       where = &output_address_reload_insns[rl->opnum];
7350       break;
7351     case RELOAD_FOR_OUTADDR_ADDRESS:
7352       where = &outaddr_address_reload_insns[rl->opnum];
7353       break;
7354     case RELOAD_FOR_OPERAND_ADDRESS:
7355       where = &operand_reload_insns;
7356       break;
7357     case RELOAD_FOR_OPADDR_ADDR:
7358       where = &other_operand_reload_insns;
7359       break;
7360     case RELOAD_FOR_OTHER_ADDRESS:
7361       where = &other_input_address_reload_insns;
7362       break;
7363     default:
7364       gcc_unreachable ();
7365     }
7366 
7367   push_to_sequence (*where);
7368 
7369   /* Auto-increment addresses must be reloaded in a special way.  */
7370   if (rl->out && ! rl->out_reg)
7371     {
7372       /* We are not going to bother supporting the case where a
7373 	 incremented register can't be copied directly from
7374 	 OLDEQUIV since this seems highly unlikely.  */
7375       gcc_assert (rl->secondary_in_reload < 0);
7376 
7377       if (reload_inherited[j])
7378 	oldequiv = reloadreg;
7379 
7380       old = XEXP (rl->in_reg, 0);
7381 
7382       /* Prevent normal processing of this reload.  */
7383       special = 1;
7384       /* Output a special code sequence for this case.  */
7385       inc_for_reload (reloadreg, oldequiv, rl->out, rl->inc);
7386     }
7387 
7388   /* If we are reloading a pseudo-register that was set by the previous
7389      insn, see if we can get rid of that pseudo-register entirely
7390      by redirecting the previous insn into our reload register.  */
7391 
7392   else if (optimize && REG_P (old)
7393 	   && REGNO (old) >= FIRST_PSEUDO_REGISTER
7394 	   && dead_or_set_p (insn, old)
7395 	   /* This is unsafe if some other reload
7396 	      uses the same reg first.  */
7397 	   && ! conflicts_with_override (reloadreg)
7398 	   && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
7399 				rl->when_needed, old, rl->out, j, 0))
7400     {
7401       rtx_insn *temp = PREV_INSN (insn);
7402       while (temp && (NOTE_P (temp) || DEBUG_INSN_P (temp)))
7403 	temp = PREV_INSN (temp);
7404       if (temp
7405 	  && NONJUMP_INSN_P (temp)
7406 	  && GET_CODE (PATTERN (temp)) == SET
7407 	  && SET_DEST (PATTERN (temp)) == old
7408 	  /* Make sure we can access insn_operand_constraint.  */
7409 	  && asm_noperands (PATTERN (temp)) < 0
7410 	  /* This is unsafe if operand occurs more than once in current
7411 	     insn.  Perhaps some occurrences aren't reloaded.  */
7412 	  && count_occurrences (PATTERN (insn), old, 0) == 1)
7413 	{
7414 	  rtx old = SET_DEST (PATTERN (temp));
7415 	  /* Store into the reload register instead of the pseudo.  */
7416 	  SET_DEST (PATTERN (temp)) = reloadreg;
7417 
7418 	  /* Verify that resulting insn is valid.
7419 
7420 	     Note that we have replaced the destination of TEMP with
7421 	     RELOADREG.  If TEMP references RELOADREG within an
7422 	     autoincrement addressing mode, then the resulting insn
7423 	     is ill-formed and we must reject this optimization.  */
7424 	  extract_insn (temp);
7425 	  if (constrain_operands (1, get_enabled_alternatives (temp))
7426 #ifdef AUTO_INC_DEC
7427 	      && ! find_reg_note (temp, REG_INC, reloadreg)
7428 #endif
7429 	      )
7430 	    {
7431 	      /* If the previous insn is an output reload, the source is
7432 		 a reload register, and its spill_reg_store entry will
7433 		 contain the previous destination.  This is now
7434 		 invalid.  */
7435 	      if (REG_P (SET_SRC (PATTERN (temp)))
7436 		  && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
7437 		{
7438 		  spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7439 		  spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7440 		}
7441 
7442 	      /* If these are the only uses of the pseudo reg,
7443 		 pretend for GDB it lives in the reload reg we used.  */
7444 	      if (REG_N_DEATHS (REGNO (old)) == 1
7445 		  && REG_N_SETS (REGNO (old)) == 1)
7446 		{
7447 		  reg_renumber[REGNO (old)] = REGNO (reloadreg);
7448 		  if (ira_conflicts_p)
7449 		    /* Inform IRA about the change.  */
7450 		    ira_mark_allocation_change (REGNO (old));
7451 		  alter_reg (REGNO (old), -1, false);
7452 		}
7453 	      special = 1;
7454 
7455 	      /* Adjust any debug insns between temp and insn.  */
7456 	      while ((temp = NEXT_INSN (temp)) != insn)
7457 		if (DEBUG_INSN_P (temp))
7458 		  replace_rtx (PATTERN (temp), old, reloadreg);
7459 		else
7460 		  gcc_assert (NOTE_P (temp));
7461 	    }
7462 	  else
7463 	    {
7464 	      SET_DEST (PATTERN (temp)) = old;
7465 	    }
7466 	}
7467     }
7468 
7469   /* We can't do that, so output an insn to load RELOADREG.  */
7470 
7471   /* If we have a secondary reload, pick up the secondary register
7472      and icode, if any.  If OLDEQUIV and OLD are different or
7473      if this is an in-out reload, recompute whether or not we
7474      still need a secondary register and what the icode should
7475      be.  If we still need a secondary register and the class or
7476      icode is different, go back to reloading from OLD if using
7477      OLDEQUIV means that we got the wrong type of register.  We
7478      cannot have different class or icode due to an in-out reload
7479      because we don't make such reloads when both the input and
7480      output need secondary reload registers.  */
7481 
7482   if (! special && rl->secondary_in_reload >= 0)
7483     {
7484       rtx second_reload_reg = 0;
7485       rtx third_reload_reg = 0;
7486       int secondary_reload = rl->secondary_in_reload;
7487       rtx real_oldequiv = oldequiv;
7488       rtx real_old = old;
7489       rtx tmp;
7490       enum insn_code icode;
7491       enum insn_code tertiary_icode = CODE_FOR_nothing;
7492 
7493       /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7494 	 and similarly for OLD.
7495 	 See comments in get_secondary_reload in reload.c.  */
7496       /* If it is a pseudo that cannot be replaced with its
7497 	 equivalent MEM, we must fall back to reload_in, which
7498 	 will have all the necessary substitutions registered.
7499 	 Likewise for a pseudo that can't be replaced with its
7500 	 equivalent constant.
7501 
7502 	 Take extra care for subregs of such pseudos.  Note that
7503 	 we cannot use reg_equiv_mem in this case because it is
7504 	 not in the right mode.  */
7505 
7506       tmp = oldequiv;
7507       if (GET_CODE (tmp) == SUBREG)
7508 	tmp = SUBREG_REG (tmp);
7509       if (REG_P (tmp)
7510 	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7511 	  && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7512 	      || reg_equiv_constant (REGNO (tmp)) != 0))
7513 	{
7514 	  if (! reg_equiv_mem (REGNO (tmp))
7515 	      || num_not_at_initial_offset
7516 	      || GET_CODE (oldequiv) == SUBREG)
7517 	    real_oldequiv = rl->in;
7518 	  else
7519 	    real_oldequiv = reg_equiv_mem (REGNO (tmp));
7520 	}
7521 
7522       tmp = old;
7523       if (GET_CODE (tmp) == SUBREG)
7524 	tmp = SUBREG_REG (tmp);
7525       if (REG_P (tmp)
7526 	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7527 	  && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7528 	      || reg_equiv_constant (REGNO (tmp)) != 0))
7529 	{
7530 	  if (! reg_equiv_mem (REGNO (tmp))
7531 	      || num_not_at_initial_offset
7532 	      || GET_CODE (old) == SUBREG)
7533 	    real_old = rl->in;
7534 	  else
7535 	    real_old = reg_equiv_mem (REGNO (tmp));
7536 	}
7537 
7538       second_reload_reg = rld[secondary_reload].reg_rtx;
7539       if (rld[secondary_reload].secondary_in_reload >= 0)
7540 	{
7541 	  int tertiary_reload = rld[secondary_reload].secondary_in_reload;
7542 
7543 	  third_reload_reg = rld[tertiary_reload].reg_rtx;
7544 	  tertiary_icode = rld[secondary_reload].secondary_in_icode;
7545 	  /* We'd have to add more code for quartary reloads.  */
7546 	  gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
7547 	}
7548       icode = rl->secondary_in_icode;
7549 
7550       if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
7551 	  || (rl->in != 0 && rl->out != 0))
7552 	{
7553 	  secondary_reload_info sri, sri2;
7554 	  enum reg_class new_class, new_t_class;
7555 
7556 	  sri.icode = CODE_FOR_nothing;
7557 	  sri.prev_sri = NULL;
7558 	  new_class
7559 	    = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7560 							 rl->rclass, mode,
7561 							 &sri);
7562 
7563 	  if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7564 	    second_reload_reg = 0;
7565 	  else if (new_class == NO_REGS)
7566 	    {
7567 	      if (reload_adjust_reg_for_icode (&second_reload_reg,
7568 					       third_reload_reg,
7569 					       (enum insn_code) sri.icode))
7570 		{
7571 		  icode = (enum insn_code) sri.icode;
7572 		  third_reload_reg = 0;
7573 		}
7574 	      else
7575 		{
7576 		  oldequiv = old;
7577 		  real_oldequiv = real_old;
7578 		}
7579 	    }
7580 	  else if (sri.icode != CODE_FOR_nothing)
7581 	    /* We currently lack a way to express this in reloads.  */
7582 	    gcc_unreachable ();
7583 	  else
7584 	    {
7585 	      sri2.icode = CODE_FOR_nothing;
7586 	      sri2.prev_sri = &sri;
7587 	      new_t_class
7588 		= (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7589 							     new_class, mode,
7590 							     &sri);
7591 	      if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7592 		{
7593 		  if (reload_adjust_reg_for_temp (&second_reload_reg,
7594 						  third_reload_reg,
7595 						  new_class, mode))
7596 		    {
7597 		      third_reload_reg = 0;
7598 		      tertiary_icode = (enum insn_code) sri2.icode;
7599 		    }
7600 		  else
7601 		    {
7602 		      oldequiv = old;
7603 		      real_oldequiv = real_old;
7604 		    }
7605 		}
7606 	      else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7607 		{
7608 		  rtx intermediate = second_reload_reg;
7609 
7610 		  if (reload_adjust_reg_for_temp (&intermediate, NULL,
7611 						  new_class, mode)
7612 		      && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7613 						      ((enum insn_code)
7614 						       sri2.icode)))
7615 		    {
7616 		      second_reload_reg = intermediate;
7617 		      tertiary_icode = (enum insn_code) sri2.icode;
7618 		    }
7619 		  else
7620 		    {
7621 		      oldequiv = old;
7622 		      real_oldequiv = real_old;
7623 		    }
7624 		}
7625 	      else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7626 		{
7627 		  rtx intermediate = second_reload_reg;
7628 
7629 		  if (reload_adjust_reg_for_temp (&intermediate, NULL,
7630 						  new_class, mode)
7631 		      && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7632 						      new_t_class, mode))
7633 		    {
7634 		      second_reload_reg = intermediate;
7635 		      tertiary_icode = (enum insn_code) sri2.icode;
7636 		    }
7637 		  else
7638 		    {
7639 		      oldequiv = old;
7640 		      real_oldequiv = real_old;
7641 		    }
7642 		}
7643 	      else
7644 		{
7645 		  /* This could be handled more intelligently too.  */
7646 		  oldequiv = old;
7647 		  real_oldequiv = real_old;
7648 		}
7649 	    }
7650 	}
7651 
7652       /* If we still need a secondary reload register, check
7653 	 to see if it is being used as a scratch or intermediate
7654 	 register and generate code appropriately.  If we need
7655 	 a scratch register, use REAL_OLDEQUIV since the form of
7656 	 the insn may depend on the actual address if it is
7657 	 a MEM.  */
7658 
7659       if (second_reload_reg)
7660 	{
7661 	  if (icode != CODE_FOR_nothing)
7662 	    {
7663 	      /* We'd have to add extra code to handle this case.  */
7664 	      gcc_assert (!third_reload_reg);
7665 
7666 	      emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7667 					  second_reload_reg));
7668 	      special = 1;
7669 	    }
7670 	  else
7671 	    {
7672 	      /* See if we need a scratch register to load the
7673 		 intermediate register (a tertiary reload).  */
7674 	      if (tertiary_icode != CODE_FOR_nothing)
7675 		{
7676 		  emit_insn ((GEN_FCN (tertiary_icode)
7677 			      (second_reload_reg, real_oldequiv,
7678 			       third_reload_reg)));
7679 		}
7680 	      else if (third_reload_reg)
7681 		{
7682 		  gen_reload (third_reload_reg, real_oldequiv,
7683 			      rl->opnum,
7684 			      rl->when_needed);
7685 		  gen_reload (second_reload_reg, third_reload_reg,
7686 			      rl->opnum,
7687 			      rl->when_needed);
7688 		}
7689 	      else
7690 		gen_reload (second_reload_reg, real_oldequiv,
7691 			    rl->opnum,
7692 			    rl->when_needed);
7693 
7694 	      oldequiv = second_reload_reg;
7695 	    }
7696 	}
7697     }
7698 
7699   if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7700     {
7701       rtx real_oldequiv = oldequiv;
7702 
7703       if ((REG_P (oldequiv)
7704 	   && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7705 	   && (reg_equiv_memory_loc (REGNO (oldequiv)) != 0
7706 	       || reg_equiv_constant (REGNO (oldequiv)) != 0))
7707 	  || (GET_CODE (oldequiv) == SUBREG
7708 	      && REG_P (SUBREG_REG (oldequiv))
7709 	      && (REGNO (SUBREG_REG (oldequiv))
7710 		  >= FIRST_PSEUDO_REGISTER)
7711 	      && ((reg_equiv_memory_loc (REGNO (SUBREG_REG (oldequiv))) != 0)
7712 		  || (reg_equiv_constant (REGNO (SUBREG_REG (oldequiv))) != 0)))
7713 	  || (CONSTANT_P (oldequiv)
7714 	      && (targetm.preferred_reload_class (oldequiv,
7715 						  REGNO_REG_CLASS (REGNO (reloadreg)))
7716 		  == NO_REGS)))
7717 	real_oldequiv = rl->in;
7718       gen_reload (reloadreg, real_oldequiv, rl->opnum,
7719 		  rl->when_needed);
7720     }
7721 
7722   if (cfun->can_throw_non_call_exceptions)
7723     copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7724 
7725   /* End this sequence.  */
7726   *where = get_insns ();
7727   end_sequence ();
7728 
7729   /* Update reload_override_in so that delete_address_reloads_1
7730      can see the actual register usage.  */
7731   if (oldequiv_reg)
7732     reload_override_in[j] = oldequiv;
7733 }
7734 
7735 /* Generate insns to for the output reload RL, which is for the insn described
7736    by CHAIN and has the number J.  */
7737 static void
7738 emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
7739 			  int j)
7740 {
7741   rtx reloadreg;
7742   rtx_insn *insn = chain->insn;
7743   int special = 0;
7744   rtx old = rl->out;
7745   machine_mode mode;
7746   rtx_insn *p;
7747   rtx rl_reg_rtx;
7748 
7749   if (rl->when_needed == RELOAD_OTHER)
7750     start_sequence ();
7751   else
7752     push_to_sequence (output_reload_insns[rl->opnum]);
7753 
7754   rl_reg_rtx = reload_reg_rtx_for_output[j];
7755   mode = GET_MODE (rl_reg_rtx);
7756 
7757   reloadreg = rl_reg_rtx;
7758 
7759   /* If we need two reload regs, set RELOADREG to the intermediate
7760      one, since it will be stored into OLD.  We might need a secondary
7761      register only for an input reload, so check again here.  */
7762 
7763   if (rl->secondary_out_reload >= 0)
7764     {
7765       rtx real_old = old;
7766       int secondary_reload = rl->secondary_out_reload;
7767       int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7768 
7769       if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7770 	  && reg_equiv_mem (REGNO (old)) != 0)
7771 	real_old = reg_equiv_mem (REGNO (old));
7772 
7773       if (secondary_reload_class (0, rl->rclass, mode, real_old) != NO_REGS)
7774 	{
7775 	  rtx second_reloadreg = reloadreg;
7776 	  reloadreg = rld[secondary_reload].reg_rtx;
7777 
7778 	  /* See if RELOADREG is to be used as a scratch register
7779 	     or as an intermediate register.  */
7780 	  if (rl->secondary_out_icode != CODE_FOR_nothing)
7781 	    {
7782 	      /* We'd have to add extra code to handle this case.  */
7783 	      gcc_assert (tertiary_reload < 0);
7784 
7785 	      emit_insn ((GEN_FCN (rl->secondary_out_icode)
7786 			  (real_old, second_reloadreg, reloadreg)));
7787 	      special = 1;
7788 	    }
7789 	  else
7790 	    {
7791 	      /* See if we need both a scratch and intermediate reload
7792 		 register.  */
7793 
7794 	      enum insn_code tertiary_icode
7795 		= rld[secondary_reload].secondary_out_icode;
7796 
7797 	      /* We'd have to add more code for quartary reloads.  */
7798 	      gcc_assert (tertiary_reload < 0
7799 			  || rld[tertiary_reload].secondary_out_reload < 0);
7800 
7801 	      if (GET_MODE (reloadreg) != mode)
7802 		reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7803 
7804 	      if (tertiary_icode != CODE_FOR_nothing)
7805 		{
7806 		  rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7807 
7808 		  /* Copy primary reload reg to secondary reload reg.
7809 		     (Note that these have been swapped above, then
7810 		     secondary reload reg to OLD using our insn.)  */
7811 
7812 		  /* If REAL_OLD is a paradoxical SUBREG, remove it
7813 		     and try to put the opposite SUBREG on
7814 		     RELOADREG.  */
7815 		  strip_paradoxical_subreg (&real_old, &reloadreg);
7816 
7817 		  gen_reload (reloadreg, second_reloadreg,
7818 			      rl->opnum, rl->when_needed);
7819 		  emit_insn ((GEN_FCN (tertiary_icode)
7820 			      (real_old, reloadreg, third_reloadreg)));
7821 		  special = 1;
7822 		}
7823 
7824 	      else
7825 		{
7826 		  /* Copy between the reload regs here and then to
7827 		     OUT later.  */
7828 
7829 		  gen_reload (reloadreg, second_reloadreg,
7830 			      rl->opnum, rl->when_needed);
7831 		  if (tertiary_reload >= 0)
7832 		    {
7833 		      rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7834 
7835 		      gen_reload (third_reloadreg, reloadreg,
7836 				  rl->opnum, rl->when_needed);
7837 		      reloadreg = third_reloadreg;
7838 		    }
7839 		}
7840 	    }
7841 	}
7842     }
7843 
7844   /* Output the last reload insn.  */
7845   if (! special)
7846     {
7847       rtx set;
7848 
7849       /* Don't output the last reload if OLD is not the dest of
7850 	 INSN and is in the src and is clobbered by INSN.  */
7851       if (! flag_expensive_optimizations
7852 	  || !REG_P (old)
7853 	  || !(set = single_set (insn))
7854 	  || rtx_equal_p (old, SET_DEST (set))
7855 	  || !reg_mentioned_p (old, SET_SRC (set))
7856 	  || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7857 	       && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7858 	gen_reload (old, reloadreg, rl->opnum,
7859 		    rl->when_needed);
7860     }
7861 
7862   /* Look at all insns we emitted, just to be safe.  */
7863   for (p = get_insns (); p; p = NEXT_INSN (p))
7864     if (INSN_P (p))
7865       {
7866 	rtx pat = PATTERN (p);
7867 
7868 	/* If this output reload doesn't come from a spill reg,
7869 	   clear any memory of reloaded copies of the pseudo reg.
7870 	   If this output reload comes from a spill reg,
7871 	   reg_has_output_reload will make this do nothing.  */
7872 	note_stores (pat, forget_old_reloads_1, NULL);
7873 
7874 	if (reg_mentioned_p (rl_reg_rtx, pat))
7875 	  {
7876 	    rtx set = single_set (insn);
7877 	    if (reload_spill_index[j] < 0
7878 		&& set
7879 		&& SET_SRC (set) == rl_reg_rtx)
7880 	      {
7881 		int src = REGNO (SET_SRC (set));
7882 
7883 		reload_spill_index[j] = src;
7884 		SET_HARD_REG_BIT (reg_is_output_reload, src);
7885 		if (find_regno_note (insn, REG_DEAD, src))
7886 		  SET_HARD_REG_BIT (reg_reloaded_died, src);
7887 	      }
7888 	    if (HARD_REGISTER_P (rl_reg_rtx))
7889 	      {
7890 		int s = rl->secondary_out_reload;
7891 		set = single_set (p);
7892 		/* If this reload copies only to the secondary reload
7893 		   register, the secondary reload does the actual
7894 		   store.  */
7895 		if (s >= 0 && set == NULL_RTX)
7896 		  /* We can't tell what function the secondary reload
7897 		     has and where the actual store to the pseudo is
7898 		     made; leave new_spill_reg_store alone.  */
7899 		  ;
7900 		else if (s >= 0
7901 			 && SET_SRC (set) == rl_reg_rtx
7902 			 && SET_DEST (set) == rld[s].reg_rtx)
7903 		  {
7904 		    /* Usually the next instruction will be the
7905 		       secondary reload insn;  if we can confirm
7906 		       that it is, setting new_spill_reg_store to
7907 		       that insn will allow an extra optimization.  */
7908 		    rtx s_reg = rld[s].reg_rtx;
7909 		    rtx_insn *next = NEXT_INSN (p);
7910 		    rld[s].out = rl->out;
7911 		    rld[s].out_reg = rl->out_reg;
7912 		    set = single_set (next);
7913 		    if (set && SET_SRC (set) == s_reg
7914 			&& reload_reg_rtx_reaches_end_p (s_reg, s))
7915 		      {
7916 			SET_HARD_REG_BIT (reg_is_output_reload,
7917 					  REGNO (s_reg));
7918 			new_spill_reg_store[REGNO (s_reg)] = next;
7919 		      }
7920 		  }
7921 		else if (reload_reg_rtx_reaches_end_p (rl_reg_rtx, j))
7922 		  new_spill_reg_store[REGNO (rl_reg_rtx)] = p;
7923 	      }
7924 	  }
7925       }
7926 
7927   if (rl->when_needed == RELOAD_OTHER)
7928     {
7929       emit_insn (other_output_reload_insns[rl->opnum]);
7930       other_output_reload_insns[rl->opnum] = get_insns ();
7931     }
7932   else
7933     output_reload_insns[rl->opnum] = get_insns ();
7934 
7935   if (cfun->can_throw_non_call_exceptions)
7936     copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7937 
7938   end_sequence ();
7939 }
7940 
7941 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7942    and has the number J.  */
7943 static void
7944 do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7945 {
7946   rtx_insn *insn = chain->insn;
7947   rtx old = (rl->in && MEM_P (rl->in)
7948 	     ? rl->in_reg : rl->in);
7949   rtx reg_rtx = rl->reg_rtx;
7950 
7951   if (old && reg_rtx)
7952     {
7953       machine_mode mode;
7954 
7955       /* Determine the mode to reload in.
7956 	 This is very tricky because we have three to choose from.
7957 	 There is the mode the insn operand wants (rl->inmode).
7958 	 There is the mode of the reload register RELOADREG.
7959 	 There is the intrinsic mode of the operand, which we could find
7960 	 by stripping some SUBREGs.
7961 	 It turns out that RELOADREG's mode is irrelevant:
7962 	 we can change that arbitrarily.
7963 
7964 	 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7965 	 then the reload reg may not support QImode moves, so use SImode.
7966 	 If foo is in memory due to spilling a pseudo reg, this is safe,
7967 	 because the QImode value is in the least significant part of a
7968 	 slot big enough for a SImode.  If foo is some other sort of
7969 	 memory reference, then it is impossible to reload this case,
7970 	 so previous passes had better make sure this never happens.
7971 
7972 	 Then consider a one-word union which has SImode and one of its
7973 	 members is a float, being fetched as (SUBREG:SF union:SI).
7974 	 We must fetch that as SFmode because we could be loading into
7975 	 a float-only register.  In this case OLD's mode is correct.
7976 
7977 	 Consider an immediate integer: it has VOIDmode.  Here we need
7978 	 to get a mode from something else.
7979 
7980 	 In some cases, there is a fourth mode, the operand's
7981 	 containing mode.  If the insn specifies a containing mode for
7982 	 this operand, it overrides all others.
7983 
7984 	 I am not sure whether the algorithm here is always right,
7985 	 but it does the right things in those cases.  */
7986 
7987       mode = GET_MODE (old);
7988       if (mode == VOIDmode)
7989 	mode = rl->inmode;
7990 
7991       /* We cannot use gen_lowpart_common since it can do the wrong thing
7992 	 when REG_RTX has a multi-word mode.  Note that REG_RTX must
7993 	 always be a REG here.  */
7994       if (GET_MODE (reg_rtx) != mode)
7995 	reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7996     }
7997   reload_reg_rtx_for_input[j] = reg_rtx;
7998 
7999   if (old != 0
8000       /* AUTO_INC reloads need to be handled even if inherited.  We got an
8001 	 AUTO_INC reload if reload_out is set but reload_out_reg isn't.  */
8002       && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
8003       && ! rtx_equal_p (reg_rtx, old)
8004       && reg_rtx != 0)
8005     emit_input_reload_insns (chain, rld + j, old, j);
8006 
8007   /* When inheriting a wider reload, we have a MEM in rl->in,
8008      e.g. inheriting a SImode output reload for
8009      (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10)))  */
8010   if (optimize && reload_inherited[j] && rl->in
8011       && MEM_P (rl->in)
8012       && MEM_P (rl->in_reg)
8013       && reload_spill_index[j] >= 0
8014       && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
8015     rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
8016 
8017   /* If we are reloading a register that was recently stored in with an
8018      output-reload, see if we can prove there was
8019      actually no need to store the old value in it.  */
8020 
8021   if (optimize
8022       && (reload_inherited[j] || reload_override_in[j])
8023       && reg_rtx
8024       && REG_P (reg_rtx)
8025       && spill_reg_store[REGNO (reg_rtx)] != 0
8026 #if 0
8027       /* There doesn't seem to be any reason to restrict this to pseudos
8028 	 and doing so loses in the case where we are copying from a
8029 	 register of the wrong class.  */
8030       && !HARD_REGISTER_P (spill_reg_stored_to[REGNO (reg_rtx)])
8031 #endif
8032       /* The insn might have already some references to stackslots
8033 	 replaced by MEMs, while reload_out_reg still names the
8034 	 original pseudo.  */
8035       && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (reg_rtx)])
8036 	  || rtx_equal_p (spill_reg_stored_to[REGNO (reg_rtx)], rl->out_reg)))
8037     delete_output_reload (insn, j, REGNO (reg_rtx), reg_rtx);
8038 }
8039 
8040 /* Do output reloading for reload RL, which is for the insn described by
8041    CHAIN and has the number J.
8042    ??? At some point we need to support handling output reloads of
8043    JUMP_INSNs or insns that set cc0.  */
8044 static void
8045 do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
8046 {
8047   rtx note, old;
8048   rtx_insn *insn = chain->insn;
8049   /* If this is an output reload that stores something that is
8050      not loaded in this same reload, see if we can eliminate a previous
8051      store.  */
8052   rtx pseudo = rl->out_reg;
8053   rtx reg_rtx = rl->reg_rtx;
8054 
8055   if (rl->out && reg_rtx)
8056     {
8057       machine_mode mode;
8058 
8059       /* Determine the mode to reload in.
8060 	 See comments above (for input reloading).  */
8061       mode = GET_MODE (rl->out);
8062       if (mode == VOIDmode)
8063 	{
8064 	  /* VOIDmode should never happen for an output.  */
8065 	  if (asm_noperands (PATTERN (insn)) < 0)
8066 	    /* It's the compiler's fault.  */
8067 	    fatal_insn ("VOIDmode on an output", insn);
8068 	  error_for_asm (insn, "output operand is constant in %<asm%>");
8069 	  /* Prevent crash--use something we know is valid.  */
8070 	  mode = word_mode;
8071 	  rl->out = gen_rtx_REG (mode, REGNO (reg_rtx));
8072 	}
8073       if (GET_MODE (reg_rtx) != mode)
8074 	reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
8075     }
8076   reload_reg_rtx_for_output[j] = reg_rtx;
8077 
8078   if (pseudo
8079       && optimize
8080       && REG_P (pseudo)
8081       && ! rtx_equal_p (rl->in_reg, pseudo)
8082       && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
8083       && reg_last_reload_reg[REGNO (pseudo)])
8084     {
8085       int pseudo_no = REGNO (pseudo);
8086       int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
8087 
8088       /* We don't need to test full validity of last_regno for
8089 	 inherit here; we only want to know if the store actually
8090 	 matches the pseudo.  */
8091       if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
8092 	  && reg_reloaded_contents[last_regno] == pseudo_no
8093 	  && spill_reg_store[last_regno]
8094 	  && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
8095 	delete_output_reload (insn, j, last_regno, reg_rtx);
8096     }
8097 
8098   old = rl->out_reg;
8099   if (old == 0
8100       || reg_rtx == 0
8101       || rtx_equal_p (old, reg_rtx))
8102     return;
8103 
8104   /* An output operand that dies right away does need a reload,
8105      but need not be copied from it.  Show the new location in the
8106      REG_UNUSED note.  */
8107   if ((REG_P (old) || GET_CODE (old) == SCRATCH)
8108       && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
8109     {
8110       XEXP (note, 0) = reg_rtx;
8111       return;
8112     }
8113   /* Likewise for a SUBREG of an operand that dies.  */
8114   else if (GET_CODE (old) == SUBREG
8115 	   && REG_P (SUBREG_REG (old))
8116 	   && 0 != (note = find_reg_note (insn, REG_UNUSED,
8117 					  SUBREG_REG (old))))
8118     {
8119       XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx);
8120       return;
8121     }
8122   else if (GET_CODE (old) == SCRATCH)
8123     /* If we aren't optimizing, there won't be a REG_UNUSED note,
8124        but we don't want to make an output reload.  */
8125     return;
8126 
8127   /* If is a JUMP_INSN, we can't support output reloads yet.  */
8128   gcc_assert (NONJUMP_INSN_P (insn));
8129 
8130   emit_output_reload_insns (chain, rld + j, j);
8131 }
8132 
8133 /* A reload copies values of MODE from register SRC to register DEST.
8134    Return true if it can be treated for inheritance purposes like a
8135    group of reloads, each one reloading a single hard register.  The
8136    caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
8137    occupy the same number of hard registers.  */
8138 
8139 static bool
8140 inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
8141 		     int src ATTRIBUTE_UNUSED,
8142 		     machine_mode mode ATTRIBUTE_UNUSED)
8143 {
8144 #ifdef CANNOT_CHANGE_MODE_CLASS
8145   return (!REG_CANNOT_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
8146 	  && !REG_CANNOT_CHANGE_MODE_P (src, mode, reg_raw_mode[src]));
8147 #else
8148   return true;
8149 #endif
8150 }
8151 
8152 /* Output insns to reload values in and out of the chosen reload regs.  */
8153 
8154 static void
8155 emit_reload_insns (struct insn_chain *chain)
8156 {
8157   rtx_insn *insn = chain->insn;
8158 
8159   int j;
8160 
8161   CLEAR_HARD_REG_SET (reg_reloaded_died);
8162 
8163   for (j = 0; j < reload_n_operands; j++)
8164     input_reload_insns[j] = input_address_reload_insns[j]
8165       = inpaddr_address_reload_insns[j]
8166       = output_reload_insns[j] = output_address_reload_insns[j]
8167       = outaddr_address_reload_insns[j]
8168       = other_output_reload_insns[j] = 0;
8169   other_input_address_reload_insns = 0;
8170   other_input_reload_insns = 0;
8171   operand_reload_insns = 0;
8172   other_operand_reload_insns = 0;
8173 
8174   /* Dump reloads into the dump file.  */
8175   if (dump_file)
8176     {
8177       fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
8178       debug_reload_to_stream (dump_file);
8179     }
8180 
8181   for (j = 0; j < n_reloads; j++)
8182     if (rld[j].reg_rtx && HARD_REGISTER_P (rld[j].reg_rtx))
8183       {
8184 	unsigned int i;
8185 
8186 	for (i = REGNO (rld[j].reg_rtx); i < END_REGNO (rld[j].reg_rtx); i++)
8187 	  new_spill_reg_store[i] = 0;
8188       }
8189 
8190   /* Now output the instructions to copy the data into and out of the
8191      reload registers.  Do these in the order that the reloads were reported,
8192      since reloads of base and index registers precede reloads of operands
8193      and the operands may need the base and index registers reloaded.  */
8194 
8195   for (j = 0; j < n_reloads; j++)
8196     {
8197       do_input_reload (chain, rld + j, j);
8198       do_output_reload (chain, rld + j, j);
8199     }
8200 
8201   /* Now write all the insns we made for reloads in the order expected by
8202      the allocation functions.  Prior to the insn being reloaded, we write
8203      the following reloads:
8204 
8205      RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
8206 
8207      RELOAD_OTHER reloads.
8208 
8209      For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
8210      by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
8211      RELOAD_FOR_INPUT reload for the operand.
8212 
8213      RELOAD_FOR_OPADDR_ADDRS reloads.
8214 
8215      RELOAD_FOR_OPERAND_ADDRESS reloads.
8216 
8217      After the insn being reloaded, we write the following:
8218 
8219      For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
8220      by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
8221      RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
8222      reloads for the operand.  The RELOAD_OTHER output reloads are
8223      output in descending order by reload number.  */
8224 
8225   emit_insn_before (other_input_address_reload_insns, insn);
8226   emit_insn_before (other_input_reload_insns, insn);
8227 
8228   for (j = 0; j < reload_n_operands; j++)
8229     {
8230       emit_insn_before (inpaddr_address_reload_insns[j], insn);
8231       emit_insn_before (input_address_reload_insns[j], insn);
8232       emit_insn_before (input_reload_insns[j], insn);
8233     }
8234 
8235   emit_insn_before (other_operand_reload_insns, insn);
8236   emit_insn_before (operand_reload_insns, insn);
8237 
8238   for (j = 0; j < reload_n_operands; j++)
8239     {
8240       rtx x = emit_insn_after (outaddr_address_reload_insns[j], insn);
8241       x = emit_insn_after (output_address_reload_insns[j], x);
8242       x = emit_insn_after (output_reload_insns[j], x);
8243       emit_insn_after (other_output_reload_insns[j], x);
8244     }
8245 
8246   /* For all the spill regs newly reloaded in this instruction,
8247      record what they were reloaded from, so subsequent instructions
8248      can inherit the reloads.
8249 
8250      Update spill_reg_store for the reloads of this insn.
8251      Copy the elements that were updated in the loop above.  */
8252 
8253   for (j = 0; j < n_reloads; j++)
8254     {
8255       int r = reload_order[j];
8256       int i = reload_spill_index[r];
8257 
8258       /* If this is a non-inherited input reload from a pseudo, we must
8259 	 clear any memory of a previous store to the same pseudo.  Only do
8260 	 something if there will not be an output reload for the pseudo
8261 	 being reloaded.  */
8262       if (rld[r].in_reg != 0
8263 	  && ! (reload_inherited[r] || reload_override_in[r]))
8264 	{
8265 	  rtx reg = rld[r].in_reg;
8266 
8267 	  if (GET_CODE (reg) == SUBREG)
8268 	    reg = SUBREG_REG (reg);
8269 
8270 	  if (REG_P (reg)
8271 	      && REGNO (reg) >= FIRST_PSEUDO_REGISTER
8272 	      && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
8273 	    {
8274 	      int nregno = REGNO (reg);
8275 
8276 	      if (reg_last_reload_reg[nregno])
8277 		{
8278 		  int last_regno = REGNO (reg_last_reload_reg[nregno]);
8279 
8280 		  if (reg_reloaded_contents[last_regno] == nregno)
8281 		    spill_reg_store[last_regno] = 0;
8282 		}
8283 	    }
8284 	}
8285 
8286       /* I is nonneg if this reload used a register.
8287 	 If rld[r].reg_rtx is 0, this is an optional reload
8288 	 that we opted to ignore.  */
8289 
8290       if (i >= 0 && rld[r].reg_rtx != 0)
8291 	{
8292 	  int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
8293 	  int k;
8294 
8295 	  /* For a multi register reload, we need to check if all or part
8296 	     of the value lives to the end.  */
8297 	  for (k = 0; k < nr; k++)
8298 	    if (reload_reg_reaches_end_p (i + k, r))
8299 	      CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
8300 
8301 	  /* Maybe the spill reg contains a copy of reload_out.  */
8302 	  if (rld[r].out != 0
8303 	      && (REG_P (rld[r].out)
8304 		  || (rld[r].out_reg
8305 		      ? REG_P (rld[r].out_reg)
8306 		      /* The reload value is an auto-modification of
8307 			 some kind.  For PRE_INC, POST_INC, PRE_DEC
8308 			 and POST_DEC, we record an equivalence
8309 			 between the reload register and the operand
8310 			 on the optimistic assumption that we can make
8311 			 the equivalence hold.  reload_as_needed must
8312 			 then either make it hold or invalidate the
8313 			 equivalence.
8314 
8315 			 PRE_MODIFY and POST_MODIFY addresses are reloaded
8316 			 somewhat differently, and allowing them here leads
8317 			 to problems.  */
8318 		      : (GET_CODE (rld[r].out) != POST_MODIFY
8319 			 && GET_CODE (rld[r].out) != PRE_MODIFY))))
8320 	    {
8321 	      rtx reg;
8322 
8323 	      reg = reload_reg_rtx_for_output[r];
8324 	      if (reload_reg_rtx_reaches_end_p (reg, r))
8325 		{
8326 		  machine_mode mode = GET_MODE (reg);
8327 		  int regno = REGNO (reg);
8328 		  int nregs = hard_regno_nregs[regno][mode];
8329 		  rtx out = (REG_P (rld[r].out)
8330 			     ? rld[r].out
8331 			     : rld[r].out_reg
8332 			     ? rld[r].out_reg
8333 /* AUTO_INC */		     : XEXP (rld[r].in_reg, 0));
8334 		  int out_regno = REGNO (out);
8335 		  int out_nregs = (!HARD_REGISTER_NUM_P (out_regno) ? 1
8336 				   : hard_regno_nregs[out_regno][mode]);
8337 		  bool piecemeal;
8338 
8339 		  spill_reg_store[regno] = new_spill_reg_store[regno];
8340 		  spill_reg_stored_to[regno] = out;
8341 		  reg_last_reload_reg[out_regno] = reg;
8342 
8343 		  piecemeal = (HARD_REGISTER_NUM_P (out_regno)
8344 			       && nregs == out_nregs
8345 			       && inherit_piecemeal_p (out_regno, regno, mode));
8346 
8347 		  /* If OUT_REGNO is a hard register, it may occupy more than
8348 		     one register.  If it does, say what is in the
8349 		     rest of the registers assuming that both registers
8350 		     agree on how many words the object takes.  If not,
8351 		     invalidate the subsequent registers.  */
8352 
8353 		  if (HARD_REGISTER_NUM_P (out_regno))
8354 		    for (k = 1; k < out_nregs; k++)
8355 		      reg_last_reload_reg[out_regno + k]
8356 			= (piecemeal ? regno_reg_rtx[regno + k] : 0);
8357 
8358 		  /* Now do the inverse operation.  */
8359 		  for (k = 0; k < nregs; k++)
8360 		    {
8361 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8362 		      reg_reloaded_contents[regno + k]
8363 			= (!HARD_REGISTER_NUM_P (out_regno) || !piecemeal
8364 			   ? out_regno
8365 			   : out_regno + k);
8366 		      reg_reloaded_insn[regno + k] = insn;
8367 		      SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8368 		      if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8369 			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8370 					  regno + k);
8371 		      else
8372 			CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8373 					    regno + k);
8374 		    }
8375 		}
8376 	    }
8377 	  /* Maybe the spill reg contains a copy of reload_in.  Only do
8378 	     something if there will not be an output reload for
8379 	     the register being reloaded.  */
8380 	  else if (rld[r].out_reg == 0
8381 		   && rld[r].in != 0
8382 		   && ((REG_P (rld[r].in)
8383 			&& !HARD_REGISTER_P (rld[r].in)
8384 			&& !REGNO_REG_SET_P (&reg_has_output_reload,
8385 					     REGNO (rld[r].in)))
8386 		       || (REG_P (rld[r].in_reg)
8387 			   && !REGNO_REG_SET_P (&reg_has_output_reload,
8388 						REGNO (rld[r].in_reg))))
8389 		   && !reg_set_p (reload_reg_rtx_for_input[r], PATTERN (insn)))
8390 	    {
8391 	      rtx reg;
8392 
8393 	      reg = reload_reg_rtx_for_input[r];
8394 	      if (reload_reg_rtx_reaches_end_p (reg, r))
8395 		{
8396 		  machine_mode mode;
8397 		  int regno;
8398 		  int nregs;
8399 		  int in_regno;
8400 		  int in_nregs;
8401 		  rtx in;
8402 		  bool piecemeal;
8403 
8404 		  mode = GET_MODE (reg);
8405 		  regno = REGNO (reg);
8406 		  nregs = hard_regno_nregs[regno][mode];
8407 		  if (REG_P (rld[r].in)
8408 		      && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
8409 		    in = rld[r].in;
8410 		  else if (REG_P (rld[r].in_reg))
8411 		    in = rld[r].in_reg;
8412 		  else
8413 		    in = XEXP (rld[r].in_reg, 0);
8414 		  in_regno = REGNO (in);
8415 
8416 		  in_nregs = (!HARD_REGISTER_NUM_P (in_regno) ? 1
8417 			      : hard_regno_nregs[in_regno][mode]);
8418 
8419 		  reg_last_reload_reg[in_regno] = reg;
8420 
8421 		  piecemeal = (HARD_REGISTER_NUM_P (in_regno)
8422 			       && nregs == in_nregs
8423 			       && inherit_piecemeal_p (regno, in_regno, mode));
8424 
8425 		  if (HARD_REGISTER_NUM_P (in_regno))
8426 		    for (k = 1; k < in_nregs; k++)
8427 		      reg_last_reload_reg[in_regno + k]
8428 			= (piecemeal ? regno_reg_rtx[regno + k] : 0);
8429 
8430 		  /* Unless we inherited this reload, show we haven't
8431 		     recently done a store.
8432 		     Previous stores of inherited auto_inc expressions
8433 		     also have to be discarded.  */
8434 		  if (! reload_inherited[r]
8435 		      || (rld[r].out && ! rld[r].out_reg))
8436 		    spill_reg_store[regno] = 0;
8437 
8438 		  for (k = 0; k < nregs; k++)
8439 		    {
8440 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8441 		      reg_reloaded_contents[regno + k]
8442 			= (!HARD_REGISTER_NUM_P (in_regno) || !piecemeal
8443 			   ? in_regno
8444 			   : in_regno + k);
8445 		      reg_reloaded_insn[regno + k] = insn;
8446 		      SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8447 		      if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8448 			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8449 					  regno + k);
8450 		      else
8451 			CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8452 					    regno + k);
8453 		    }
8454 		}
8455 	    }
8456 	}
8457 
8458       /* The following if-statement was #if 0'd in 1.34 (or before...).
8459 	 It's reenabled in 1.35 because supposedly nothing else
8460 	 deals with this problem.  */
8461 
8462       /* If a register gets output-reloaded from a non-spill register,
8463 	 that invalidates any previous reloaded copy of it.
8464 	 But forget_old_reloads_1 won't get to see it, because
8465 	 it thinks only about the original insn.  So invalidate it here.
8466 	 Also do the same thing for RELOAD_OTHER constraints where the
8467 	 output is discarded.  */
8468       if (i < 0
8469 	  && ((rld[r].out != 0
8470 	       && (REG_P (rld[r].out)
8471 		   || (MEM_P (rld[r].out)
8472 		       && REG_P (rld[r].out_reg))))
8473 	      || (rld[r].out == 0 && rld[r].out_reg
8474 		  && REG_P (rld[r].out_reg))))
8475 	{
8476 	  rtx out = ((rld[r].out && REG_P (rld[r].out))
8477 		     ? rld[r].out : rld[r].out_reg);
8478 	  int out_regno = REGNO (out);
8479 	  machine_mode mode = GET_MODE (out);
8480 
8481 	  /* REG_RTX is now set or clobbered by the main instruction.
8482 	     As the comment above explains, forget_old_reloads_1 only
8483 	     sees the original instruction, and there is no guarantee
8484 	     that the original instruction also clobbered REG_RTX.
8485 	     For example, if find_reloads sees that the input side of
8486 	     a matched operand pair dies in this instruction, it may
8487 	     use the input register as the reload register.
8488 
8489 	     Calling forget_old_reloads_1 is a waste of effort if
8490 	     REG_RTX is also the output register.
8491 
8492 	     If we know that REG_RTX holds the value of a pseudo
8493 	     register, the code after the call will record that fact.  */
8494 	  if (rld[r].reg_rtx && rld[r].reg_rtx != out)
8495 	    forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
8496 
8497 	  if (!HARD_REGISTER_NUM_P (out_regno))
8498 	    {
8499 	      rtx src_reg;
8500 	      rtx_insn *store_insn = NULL;
8501 
8502 	      reg_last_reload_reg[out_regno] = 0;
8503 
8504 	      /* If we can find a hard register that is stored, record
8505 		 the storing insn so that we may delete this insn with
8506 		 delete_output_reload.  */
8507 	      src_reg = reload_reg_rtx_for_output[r];
8508 
8509 	      if (src_reg)
8510 		{
8511 		  if (reload_reg_rtx_reaches_end_p (src_reg, r))
8512 		    store_insn = new_spill_reg_store[REGNO (src_reg)];
8513 		  else
8514 		    src_reg = NULL_RTX;
8515 		}
8516 	      else
8517 		{
8518 		  /* If this is an optional reload, try to find the
8519 		     source reg from an input reload.  */
8520 		  rtx set = single_set (insn);
8521 		  if (set && SET_DEST (set) == rld[r].out)
8522 		    {
8523 		      int k;
8524 
8525 		      src_reg = SET_SRC (set);
8526 		      store_insn = insn;
8527 		      for (k = 0; k < n_reloads; k++)
8528 			{
8529 			  if (rld[k].in == src_reg)
8530 			    {
8531 			      src_reg = reload_reg_rtx_for_input[k];
8532 			      break;
8533 			    }
8534 			}
8535 		    }
8536 		}
8537 	      if (src_reg && REG_P (src_reg)
8538 		  && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
8539 		{
8540 		  int src_regno, src_nregs, k;
8541 		  rtx note;
8542 
8543 		  gcc_assert (GET_MODE (src_reg) == mode);
8544 		  src_regno = REGNO (src_reg);
8545 		  src_nregs = hard_regno_nregs[src_regno][mode];
8546 		  /* The place where to find a death note varies with
8547 		     PRESERVE_DEATH_INFO_REGNO_P .  The condition is not
8548 		     necessarily checked exactly in the code that moves
8549 		     notes, so just check both locations.  */
8550 		  note = find_regno_note (insn, REG_DEAD, src_regno);
8551 		  if (! note && store_insn)
8552 		    note = find_regno_note (store_insn, REG_DEAD, src_regno);
8553 		  for (k = 0; k < src_nregs; k++)
8554 		    {
8555 		      spill_reg_store[src_regno + k] = store_insn;
8556 		      spill_reg_stored_to[src_regno + k] = out;
8557 		      reg_reloaded_contents[src_regno + k] = out_regno;
8558 		      reg_reloaded_insn[src_regno + k] = store_insn;
8559 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8560 		      SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8561 		      if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + k,
8562 							  mode))
8563 			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8564 					  src_regno + k);
8565 		      else
8566 			CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8567 					    src_regno + k);
8568 		      SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8569 		      if (note)
8570 			SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8571 		      else
8572 			CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8573 		    }
8574 		  reg_last_reload_reg[out_regno] = src_reg;
8575 		  /* We have to set reg_has_output_reload here, or else
8576 		     forget_old_reloads_1 will clear reg_last_reload_reg
8577 		     right away.  */
8578 		  SET_REGNO_REG_SET (&reg_has_output_reload,
8579 				     out_regno);
8580 		}
8581 	    }
8582 	  else
8583 	    {
8584 	      int k, out_nregs = hard_regno_nregs[out_regno][mode];
8585 
8586 	      for (k = 0; k < out_nregs; k++)
8587 		reg_last_reload_reg[out_regno + k] = 0;
8588 	    }
8589 	}
8590     }
8591   IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
8592 }
8593 
8594 /* Go through the motions to emit INSN and test if it is strictly valid.
8595    Return the emitted insn if valid, else return NULL.  */
8596 
8597 static rtx_insn *
8598 emit_insn_if_valid_for_reload (rtx pat)
8599 {
8600   rtx_insn *last = get_last_insn ();
8601   int code;
8602 
8603   rtx_insn *insn = emit_insn (pat);
8604   code = recog_memoized (insn);
8605 
8606   if (code >= 0)
8607     {
8608       extract_insn (insn);
8609       /* We want constrain operands to treat this insn strictly in its
8610 	 validity determination, i.e., the way it would after reload has
8611 	 completed.  */
8612       if (constrain_operands (1, get_enabled_alternatives (insn)))
8613 	return insn;
8614     }
8615 
8616   delete_insns_since (last);
8617   return NULL;
8618 }
8619 
8620 /* Emit code to perform a reload from IN (which may be a reload register) to
8621    OUT (which may also be a reload register).  IN or OUT is from operand
8622    OPNUM with reload type TYPE.
8623 
8624    Returns first insn emitted.  */
8625 
8626 static rtx_insn *
8627 gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
8628 {
8629   rtx_insn *last = get_last_insn ();
8630   rtx_insn *tem;
8631 #ifdef SECONDARY_MEMORY_NEEDED
8632   rtx tem1, tem2;
8633 #endif
8634 
8635   /* If IN is a paradoxical SUBREG, remove it and try to put the
8636      opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
8637   if (!strip_paradoxical_subreg (&in, &out))
8638     strip_paradoxical_subreg (&out, &in);
8639 
8640   /* How to do this reload can get quite tricky.  Normally, we are being
8641      asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8642      register that didn't get a hard register.  In that case we can just
8643      call emit_move_insn.
8644 
8645      We can also be asked to reload a PLUS that adds a register or a MEM to
8646      another register, constant or MEM.  This can occur during frame pointer
8647      elimination and while reloading addresses.  This case is handled by
8648      trying to emit a single insn to perform the add.  If it is not valid,
8649      we use a two insn sequence.
8650 
8651      Or we can be asked to reload an unary operand that was a fragment of
8652      an addressing mode, into a register.  If it isn't recognized as-is,
8653      we try making the unop operand and the reload-register the same:
8654      (set reg:X (unop:X expr:Y))
8655      -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8656 
8657      Finally, we could be called to handle an 'o' constraint by putting
8658      an address into a register.  In that case, we first try to do this
8659      with a named pattern of "reload_load_address".  If no such pattern
8660      exists, we just emit a SET insn and hope for the best (it will normally
8661      be valid on machines that use 'o').
8662 
8663      This entire process is made complex because reload will never
8664      process the insns we generate here and so we must ensure that
8665      they will fit their constraints and also by the fact that parts of
8666      IN might be being reloaded separately and replaced with spill registers.
8667      Because of this, we are, in some sense, just guessing the right approach
8668      here.  The one listed above seems to work.
8669 
8670      ??? At some point, this whole thing needs to be rethought.  */
8671 
8672   if (GET_CODE (in) == PLUS
8673       && (REG_P (XEXP (in, 0))
8674 	  || GET_CODE (XEXP (in, 0)) == SUBREG
8675 	  || MEM_P (XEXP (in, 0)))
8676       && (REG_P (XEXP (in, 1))
8677 	  || GET_CODE (XEXP (in, 1)) == SUBREG
8678 	  || CONSTANT_P (XEXP (in, 1))
8679 	  || MEM_P (XEXP (in, 1))))
8680     {
8681       /* We need to compute the sum of a register or a MEM and another
8682 	 register, constant, or MEM, and put it into the reload
8683 	 register.  The best possible way of doing this is if the machine
8684 	 has a three-operand ADD insn that accepts the required operands.
8685 
8686 	 The simplest approach is to try to generate such an insn and see if it
8687 	 is recognized and matches its constraints.  If so, it can be used.
8688 
8689 	 It might be better not to actually emit the insn unless it is valid,
8690 	 but we need to pass the insn as an operand to `recog' and
8691 	 `extract_insn' and it is simpler to emit and then delete the insn if
8692 	 not valid than to dummy things up.  */
8693 
8694       rtx op0, op1, tem;
8695       rtx_insn *insn;
8696       enum insn_code code;
8697 
8698       op0 = find_replacement (&XEXP (in, 0));
8699       op1 = find_replacement (&XEXP (in, 1));
8700 
8701       /* Since constraint checking is strict, commutativity won't be
8702 	 checked, so we need to do that here to avoid spurious failure
8703 	 if the add instruction is two-address and the second operand
8704 	 of the add is the same as the reload reg, which is frequently
8705 	 the case.  If the insn would be A = B + A, rearrange it so
8706 	 it will be A = A + B as constrain_operands expects.  */
8707 
8708       if (REG_P (XEXP (in, 1))
8709 	  && REGNO (out) == REGNO (XEXP (in, 1)))
8710 	tem = op0, op0 = op1, op1 = tem;
8711 
8712       if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8713 	in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8714 
8715       insn = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8716       if (insn)
8717 	return insn;
8718 
8719       /* If that failed, we must use a conservative two-insn sequence.
8720 
8721 	 Use a move to copy one operand into the reload register.  Prefer
8722 	 to reload a constant, MEM or pseudo since the move patterns can
8723 	 handle an arbitrary operand.  If OP1 is not a constant, MEM or
8724 	 pseudo and OP1 is not a valid operand for an add instruction, then
8725 	 reload OP1.
8726 
8727 	 After reloading one of the operands into the reload register, add
8728 	 the reload register to the output register.
8729 
8730 	 If there is another way to do this for a specific machine, a
8731 	 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8732 	 we emit below.  */
8733 
8734       code = optab_handler (add_optab, GET_MODE (out));
8735 
8736       if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8737 	  || (REG_P (op1)
8738 	      && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8739 	  || (code != CODE_FOR_nothing
8740 	      && !insn_operand_matches (code, 2, op1)))
8741 	tem = op0, op0 = op1, op1 = tem;
8742 
8743       gen_reload (out, op0, opnum, type);
8744 
8745       /* If OP0 and OP1 are the same, we can use OUT for OP1.
8746 	 This fixes a problem on the 32K where the stack pointer cannot
8747 	 be used as an operand of an add insn.  */
8748 
8749       if (rtx_equal_p (op0, op1))
8750 	op1 = out;
8751 
8752       insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8753       if (insn)
8754 	{
8755 	  /* Add a REG_EQUIV note so that find_equiv_reg can find it.  */
8756 	  set_dst_reg_note (insn, REG_EQUIV, in, out);
8757 	  return insn;
8758 	}
8759 
8760       /* If that failed, copy the address register to the reload register.
8761 	 Then add the constant to the reload register.  */
8762 
8763       gcc_assert (!reg_overlap_mentioned_p (out, op0));
8764       gen_reload (out, op1, opnum, type);
8765       insn = emit_insn (gen_add2_insn (out, op0));
8766       set_dst_reg_note (insn, REG_EQUIV, in, out);
8767     }
8768 
8769 #ifdef SECONDARY_MEMORY_NEEDED
8770   /* If we need a memory location to do the move, do it that way.  */
8771   else if ((tem1 = replaced_subreg (in), tem2 = replaced_subreg (out),
8772 	    (REG_P (tem1) && REG_P (tem2)))
8773 	   && REGNO (tem1) < FIRST_PSEUDO_REGISTER
8774 	   && REGNO (tem2) < FIRST_PSEUDO_REGISTER
8775 	   && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (tem1)),
8776 				       REGNO_REG_CLASS (REGNO (tem2)),
8777 				       GET_MODE (out)))
8778     {
8779       /* Get the memory to use and rewrite both registers to its mode.  */
8780       rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8781 
8782       if (GET_MODE (loc) != GET_MODE (out))
8783 	out = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (out));
8784 
8785       if (GET_MODE (loc) != GET_MODE (in))
8786 	in = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (in));
8787 
8788       gen_reload (loc, in, opnum, type);
8789       gen_reload (out, loc, opnum, type);
8790     }
8791 #endif
8792   else if (REG_P (out) && UNARY_P (in))
8793     {
8794       rtx insn;
8795       rtx op1;
8796       rtx out_moded;
8797       rtx_insn *set;
8798 
8799       op1 = find_replacement (&XEXP (in, 0));
8800       if (op1 != XEXP (in, 0))
8801 	in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8802 
8803       /* First, try a plain SET.  */
8804       set = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8805       if (set)
8806 	return set;
8807 
8808       /* If that failed, move the inner operand to the reload
8809 	 register, and try the same unop with the inner expression
8810 	 replaced with the reload register.  */
8811 
8812       if (GET_MODE (op1) != GET_MODE (out))
8813 	out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8814       else
8815 	out_moded = out;
8816 
8817       gen_reload (out_moded, op1, opnum, type);
8818 
8819       insn
8820 	= gen_rtx_SET (VOIDmode, out,
8821 		       gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8822 				      out_moded));
8823       insn = emit_insn_if_valid_for_reload (insn);
8824       if (insn)
8825 	{
8826 	  set_unique_reg_note (insn, REG_EQUIV, in);
8827 	  return as_a <rtx_insn *> (insn);
8828 	}
8829 
8830       fatal_insn ("failure trying to reload:", set);
8831     }
8832   /* If IN is a simple operand, use gen_move_insn.  */
8833   else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8834     {
8835       tem = emit_insn (gen_move_insn (out, in));
8836       /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note.  */
8837       mark_jump_label (in, tem, 0);
8838     }
8839 
8840 #ifdef HAVE_reload_load_address
8841   else if (HAVE_reload_load_address)
8842     emit_insn (gen_reload_load_address (out, in));
8843 #endif
8844 
8845   /* Otherwise, just write (set OUT IN) and hope for the best.  */
8846   else
8847     emit_insn (gen_rtx_SET (VOIDmode, out, in));
8848 
8849   /* Return the first insn emitted.
8850      We can not just return get_last_insn, because there may have
8851      been multiple instructions emitted.  Also note that gen_move_insn may
8852      emit more than one insn itself, so we can not assume that there is one
8853      insn emitted per emit_insn_before call.  */
8854 
8855   return last ? NEXT_INSN (last) : get_insns ();
8856 }
8857 
8858 /* Delete a previously made output-reload whose result we now believe
8859    is not needed.  First we double-check.
8860 
8861    INSN is the insn now being processed.
8862    LAST_RELOAD_REG is the hard register number for which we want to delete
8863    the last output reload.
8864    J is the reload-number that originally used REG.  The caller has made
8865    certain that reload J doesn't use REG any longer for input.
8866    NEW_RELOAD_REG is reload register that reload J is using for REG.  */
8867 
8868 static void
8869 delete_output_reload (rtx_insn *insn, int j, int last_reload_reg,
8870 		      rtx new_reload_reg)
8871 {
8872   rtx_insn *output_reload_insn = spill_reg_store[last_reload_reg];
8873   rtx reg = spill_reg_stored_to[last_reload_reg];
8874   int k;
8875   int n_occurrences;
8876   int n_inherited = 0;
8877   rtx substed;
8878   unsigned regno;
8879   int nregs;
8880 
8881   /* It is possible that this reload has been only used to set another reload
8882      we eliminated earlier and thus deleted this instruction too.  */
8883   if (output_reload_insn->deleted ())
8884     return;
8885 
8886   /* Get the raw pseudo-register referred to.  */
8887 
8888   while (GET_CODE (reg) == SUBREG)
8889     reg = SUBREG_REG (reg);
8890   substed = reg_equiv_memory_loc (REGNO (reg));
8891 
8892   /* This is unsafe if the operand occurs more often in the current
8893      insn than it is inherited.  */
8894   for (k = n_reloads - 1; k >= 0; k--)
8895     {
8896       rtx reg2 = rld[k].in;
8897       if (! reg2)
8898 	continue;
8899       if (MEM_P (reg2) || reload_override_in[k])
8900 	reg2 = rld[k].in_reg;
8901 #ifdef AUTO_INC_DEC
8902       if (rld[k].out && ! rld[k].out_reg)
8903 	reg2 = XEXP (rld[k].in_reg, 0);
8904 #endif
8905       while (GET_CODE (reg2) == SUBREG)
8906 	reg2 = SUBREG_REG (reg2);
8907       if (rtx_equal_p (reg2, reg))
8908 	{
8909 	  if (reload_inherited[k] || reload_override_in[k] || k == j)
8910 	    n_inherited++;
8911 	  else
8912 	    return;
8913 	}
8914     }
8915   n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8916   if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8917     n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8918 					reg, 0);
8919   if (substed)
8920     n_occurrences += count_occurrences (PATTERN (insn),
8921 					eliminate_regs (substed, VOIDmode,
8922 							NULL_RTX), 0);
8923   for (rtx i1 = reg_equiv_alt_mem_list (REGNO (reg)); i1; i1 = XEXP (i1, 1))
8924     {
8925       gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8926       n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8927     }
8928   if (n_occurrences > n_inherited)
8929     return;
8930 
8931   regno = REGNO (reg);
8932   if (regno >= FIRST_PSEUDO_REGISTER)
8933     nregs = 1;
8934   else
8935     nregs = hard_regno_nregs[regno][GET_MODE (reg)];
8936 
8937   /* If the pseudo-reg we are reloading is no longer referenced
8938      anywhere between the store into it and here,
8939      and we're within the same basic block, then the value can only
8940      pass through the reload reg and end up here.
8941      Otherwise, give up--return.  */
8942   for (rtx_insn *i1 = NEXT_INSN (output_reload_insn);
8943        i1 != insn; i1 = NEXT_INSN (i1))
8944     {
8945       if (NOTE_INSN_BASIC_BLOCK_P (i1))
8946 	return;
8947       if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8948 	  && refers_to_regno_p (regno, regno + nregs, PATTERN (i1), NULL))
8949 	{
8950 	  /* If this is USE in front of INSN, we only have to check that
8951 	     there are no more references than accounted for by inheritance.  */
8952 	  while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8953 	    {
8954 	      n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8955 	      i1 = NEXT_INSN (i1);
8956 	    }
8957 	  if (n_occurrences <= n_inherited && i1 == insn)
8958 	    break;
8959 	  return;
8960 	}
8961     }
8962 
8963   /* We will be deleting the insn.  Remove the spill reg information.  */
8964   for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
8965     {
8966       spill_reg_store[last_reload_reg + k] = 0;
8967       spill_reg_stored_to[last_reload_reg + k] = 0;
8968     }
8969 
8970   /* The caller has already checked that REG dies or is set in INSN.
8971      It has also checked that we are optimizing, and thus some
8972      inaccuracies in the debugging information are acceptable.
8973      So we could just delete output_reload_insn.  But in some cases
8974      we can improve the debugging information without sacrificing
8975      optimization - maybe even improving the code: See if the pseudo
8976      reg has been completely replaced with reload regs.  If so, delete
8977      the store insn and forget we had a stack slot for the pseudo.  */
8978   if (rld[j].out != rld[j].in
8979       && REG_N_DEATHS (REGNO (reg)) == 1
8980       && REG_N_SETS (REGNO (reg)) == 1
8981       && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8982       && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8983     {
8984       rtx_insn *i2;
8985 
8986       /* We know that it was used only between here and the beginning of
8987 	 the current basic block.  (We also know that the last use before
8988 	 INSN was the output reload we are thinking of deleting, but never
8989 	 mind that.)  Search that range; see if any ref remains.  */
8990       for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8991 	{
8992 	  rtx set = single_set (i2);
8993 
8994 	  /* Uses which just store in the pseudo don't count,
8995 	     since if they are the only uses, they are dead.  */
8996 	  if (set != 0 && SET_DEST (set) == reg)
8997 	    continue;
8998 	  if (LABEL_P (i2) || JUMP_P (i2))
8999 	    break;
9000 	  if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
9001 	      && reg_mentioned_p (reg, PATTERN (i2)))
9002 	    {
9003 	      /* Some other ref remains; just delete the output reload we
9004 		 know to be dead.  */
9005 	      delete_address_reloads (output_reload_insn, insn);
9006 	      delete_insn (output_reload_insn);
9007 	      return;
9008 	    }
9009 	}
9010 
9011       /* Delete the now-dead stores into this pseudo.  Note that this
9012 	 loop also takes care of deleting output_reload_insn.  */
9013       for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
9014 	{
9015 	  rtx set = single_set (i2);
9016 
9017 	  if (set != 0 && SET_DEST (set) == reg)
9018 	    {
9019 	      delete_address_reloads (i2, insn);
9020 	      delete_insn (i2);
9021 	    }
9022 	  if (LABEL_P (i2) || JUMP_P (i2))
9023 	    break;
9024 	}
9025 
9026       /* For the debugging info, say the pseudo lives in this reload reg.  */
9027       reg_renumber[REGNO (reg)] = REGNO (new_reload_reg);
9028       if (ira_conflicts_p)
9029 	/* Inform IRA about the change.  */
9030 	ira_mark_allocation_change (REGNO (reg));
9031       alter_reg (REGNO (reg), -1, false);
9032     }
9033   else
9034     {
9035       delete_address_reloads (output_reload_insn, insn);
9036       delete_insn (output_reload_insn);
9037     }
9038 }
9039 
9040 /* We are going to delete DEAD_INSN.  Recursively delete loads of
9041    reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
9042    CURRENT_INSN is being reloaded, so we have to check its reloads too.  */
9043 static void
9044 delete_address_reloads (rtx_insn *dead_insn, rtx_insn *current_insn)
9045 {
9046   rtx set = single_set (dead_insn);
9047   rtx set2, dst;
9048   rtx_insn *prev, *next;
9049   if (set)
9050     {
9051       rtx dst = SET_DEST (set);
9052       if (MEM_P (dst))
9053 	delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
9054     }
9055   /* If we deleted the store from a reloaded post_{in,de}c expression,
9056      we can delete the matching adds.  */
9057   prev = PREV_INSN (dead_insn);
9058   next = NEXT_INSN (dead_insn);
9059   if (! prev || ! next)
9060     return;
9061   set = single_set (next);
9062   set2 = single_set (prev);
9063   if (! set || ! set2
9064       || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
9065       || !CONST_INT_P (XEXP (SET_SRC (set), 1))
9066       || !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
9067     return;
9068   dst = SET_DEST (set);
9069   if (! rtx_equal_p (dst, SET_DEST (set2))
9070       || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
9071       || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
9072       || (INTVAL (XEXP (SET_SRC (set), 1))
9073 	  != -INTVAL (XEXP (SET_SRC (set2), 1))))
9074     return;
9075   delete_related_insns (prev);
9076   delete_related_insns (next);
9077 }
9078 
9079 /* Subfunction of delete_address_reloads: process registers found in X.  */
9080 static void
9081 delete_address_reloads_1 (rtx_insn *dead_insn, rtx x, rtx_insn *current_insn)
9082 {
9083   rtx_insn *prev, *i2;
9084   rtx set, dst;
9085   int i, j;
9086   enum rtx_code code = GET_CODE (x);
9087 
9088   if (code != REG)
9089     {
9090       const char *fmt = GET_RTX_FORMAT (code);
9091       for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9092 	{
9093 	  if (fmt[i] == 'e')
9094 	    delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
9095 	  else if (fmt[i] == 'E')
9096 	    {
9097 	      for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9098 		delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
9099 					  current_insn);
9100 	    }
9101 	}
9102       return;
9103     }
9104 
9105   if (spill_reg_order[REGNO (x)] < 0)
9106     return;
9107 
9108   /* Scan backwards for the insn that sets x.  This might be a way back due
9109      to inheritance.  */
9110   for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
9111     {
9112       code = GET_CODE (prev);
9113       if (code == CODE_LABEL || code == JUMP_INSN)
9114 	return;
9115       if (!INSN_P (prev))
9116 	continue;
9117       if (reg_set_p (x, PATTERN (prev)))
9118 	break;
9119       if (reg_referenced_p (x, PATTERN (prev)))
9120 	return;
9121     }
9122   if (! prev || INSN_UID (prev) < reload_first_uid)
9123     return;
9124   /* Check that PREV only sets the reload register.  */
9125   set = single_set (prev);
9126   if (! set)
9127     return;
9128   dst = SET_DEST (set);
9129   if (!REG_P (dst)
9130       || ! rtx_equal_p (dst, x))
9131     return;
9132   if (! reg_set_p (dst, PATTERN (dead_insn)))
9133     {
9134       /* Check if DST was used in a later insn -
9135 	 it might have been inherited.  */
9136       for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
9137 	{
9138 	  if (LABEL_P (i2))
9139 	    break;
9140 	  if (! INSN_P (i2))
9141 	    continue;
9142 	  if (reg_referenced_p (dst, PATTERN (i2)))
9143 	    {
9144 	      /* If there is a reference to the register in the current insn,
9145 		 it might be loaded in a non-inherited reload.  If no other
9146 		 reload uses it, that means the register is set before
9147 		 referenced.  */
9148 	      if (i2 == current_insn)
9149 		{
9150 		  for (j = n_reloads - 1; j >= 0; j--)
9151 		    if ((rld[j].reg_rtx == dst && reload_inherited[j])
9152 			|| reload_override_in[j] == dst)
9153 		      return;
9154 		  for (j = n_reloads - 1; j >= 0; j--)
9155 		    if (rld[j].in && rld[j].reg_rtx == dst)
9156 		      break;
9157 		  if (j >= 0)
9158 		    break;
9159 		}
9160 	      return;
9161 	    }
9162 	  if (JUMP_P (i2))
9163 	    break;
9164 	  /* If DST is still live at CURRENT_INSN, check if it is used for
9165 	     any reload.  Note that even if CURRENT_INSN sets DST, we still
9166 	     have to check the reloads.  */
9167 	  if (i2 == current_insn)
9168 	    {
9169 	      for (j = n_reloads - 1; j >= 0; j--)
9170 		if ((rld[j].reg_rtx == dst && reload_inherited[j])
9171 		    || reload_override_in[j] == dst)
9172 		  return;
9173 	      /* ??? We can't finish the loop here, because dst might be
9174 		 allocated to a pseudo in this block if no reload in this
9175 		 block needs any of the classes containing DST - see
9176 		 spill_hard_reg.  There is no easy way to tell this, so we
9177 		 have to scan till the end of the basic block.  */
9178 	    }
9179 	  if (reg_set_p (dst, PATTERN (i2)))
9180 	    break;
9181 	}
9182     }
9183   delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
9184   reg_reloaded_contents[REGNO (dst)] = -1;
9185   delete_insn (prev);
9186 }
9187 
9188 /* Output reload-insns to reload VALUE into RELOADREG.
9189    VALUE is an autoincrement or autodecrement RTX whose operand
9190    is a register or memory location;
9191    so reloading involves incrementing that location.
9192    IN is either identical to VALUE, or some cheaper place to reload from.
9193 
9194    INC_AMOUNT is the number to increment or decrement by (always positive).
9195    This cannot be deduced from VALUE.  */
9196 
9197 static void
9198 inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
9199 {
9200   /* REG or MEM to be copied and incremented.  */
9201   rtx incloc = find_replacement (&XEXP (value, 0));
9202   /* Nonzero if increment after copying.  */
9203   int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
9204 	      || GET_CODE (value) == POST_MODIFY);
9205   rtx_insn *last;
9206   rtx inc;
9207   rtx_insn *add_insn;
9208   int code;
9209   rtx real_in = in == value ? incloc : in;
9210 
9211   /* No hard register is equivalent to this register after
9212      inc/dec operation.  If REG_LAST_RELOAD_REG were nonzero,
9213      we could inc/dec that register as well (maybe even using it for
9214      the source), but I'm not sure it's worth worrying about.  */
9215   if (REG_P (incloc))
9216     reg_last_reload_reg[REGNO (incloc)] = 0;
9217 
9218   if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
9219     {
9220       gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
9221       inc = find_replacement (&XEXP (XEXP (value, 1), 1));
9222     }
9223   else
9224     {
9225       if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
9226 	inc_amount = -inc_amount;
9227 
9228       inc = GEN_INT (inc_amount);
9229     }
9230 
9231   /* If this is post-increment, first copy the location to the reload reg.  */
9232   if (post && real_in != reloadreg)
9233     emit_insn (gen_move_insn (reloadreg, real_in));
9234 
9235   if (in == value)
9236     {
9237       /* See if we can directly increment INCLOC.  Use a method similar to
9238 	 that in gen_reload.  */
9239 
9240       last = get_last_insn ();
9241       add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
9242 					 gen_rtx_PLUS (GET_MODE (incloc),
9243 						       incloc, inc)));
9244 
9245       code = recog_memoized (add_insn);
9246       if (code >= 0)
9247 	{
9248 	  extract_insn (add_insn);
9249 	  if (constrain_operands (1, get_enabled_alternatives (add_insn)))
9250 	    {
9251 	      /* If this is a pre-increment and we have incremented the value
9252 		 where it lives, copy the incremented value to RELOADREG to
9253 		 be used as an address.  */
9254 
9255 	      if (! post)
9256 		emit_insn (gen_move_insn (reloadreg, incloc));
9257 	      return;
9258 	    }
9259 	}
9260       delete_insns_since (last);
9261     }
9262 
9263   /* If couldn't do the increment directly, must increment in RELOADREG.
9264      The way we do this depends on whether this is pre- or post-increment.
9265      For pre-increment, copy INCLOC to the reload register, increment it
9266      there, then save back.  */
9267 
9268   if (! post)
9269     {
9270       if (in != reloadreg)
9271 	emit_insn (gen_move_insn (reloadreg, real_in));
9272       emit_insn (gen_add2_insn (reloadreg, inc));
9273       emit_insn (gen_move_insn (incloc, reloadreg));
9274     }
9275   else
9276     {
9277       /* Postincrement.
9278 	 Because this might be a jump insn or a compare, and because RELOADREG
9279 	 may not be available after the insn in an input reload, we must do
9280 	 the incrementation before the insn being reloaded for.
9281 
9282 	 We have already copied IN to RELOADREG.  Increment the copy in
9283 	 RELOADREG, save that back, then decrement RELOADREG so it has
9284 	 the original value.  */
9285 
9286       emit_insn (gen_add2_insn (reloadreg, inc));
9287       emit_insn (gen_move_insn (incloc, reloadreg));
9288       if (CONST_INT_P (inc))
9289 	emit_insn (gen_add2_insn (reloadreg,
9290 				  gen_int_mode (-INTVAL (inc),
9291 						GET_MODE (reloadreg))));
9292       else
9293 	emit_insn (gen_sub2_insn (reloadreg, inc));
9294     }
9295 }
9296 
9297 #ifdef AUTO_INC_DEC
9298 static void
9299 add_auto_inc_notes (rtx_insn *insn, rtx x)
9300 {
9301   enum rtx_code code = GET_CODE (x);
9302   const char *fmt;
9303   int i, j;
9304 
9305   if (code == MEM && auto_inc_p (XEXP (x, 0)))
9306     {
9307       add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
9308       return;
9309     }
9310 
9311   /* Scan all the operand sub-expressions.  */
9312   fmt = GET_RTX_FORMAT (code);
9313   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9314     {
9315       if (fmt[i] == 'e')
9316 	add_auto_inc_notes (insn, XEXP (x, i));
9317       else if (fmt[i] == 'E')
9318 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9319 	  add_auto_inc_notes (insn, XVECEXP (x, i, j));
9320     }
9321 }
9322 #endif
9323