xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/reload.c (revision 8ecbf5f02b752fcb7debe1a8fab1dc82602bc760)
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2    Copyright (C) 1987-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains subroutines used only from the file reload1.c.
21    It knows how to scan one insn for operands and values
22    that need to be copied into registers to make valid code.
23    It also finds other operands and values which are valid
24    but for which equivalent values in registers exist and
25    ought to be used instead.
26 
27    Before processing the first insn of the function, call `init_reload'.
28    init_reload actually has to be called earlier anyway.
29 
30    To scan an insn, call `find_reloads'.  This does two things:
31    1. sets up tables describing which values must be reloaded
32    for this insn, and what kind of hard regs they must be reloaded into;
33    2. optionally record the locations where those values appear in
34    the data, so they can be replaced properly later.
35    This is done only if the second arg to `find_reloads' is nonzero.
36 
37    The third arg to `find_reloads' specifies the number of levels
38    of indirect addressing supported by the machine.  If it is zero,
39    indirect addressing is not valid.  If it is one, (MEM (REG n))
40    is valid even if (REG n) did not get a hard register; if it is two,
41    (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42    hard register, and similarly for higher values.
43 
44    Then you must choose the hard regs to reload those pseudo regs into,
45    and generate appropriate load insns before this insn and perhaps
46    also store insns after this insn.  Set up the array `reload_reg_rtx'
47    to contain the REG rtx's for the registers you used.  In some
48    cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49    for certain reloads.  Then that tells you which register to use,
50    so you do not need to allocate one.  But you still do need to add extra
51    instructions to copy the value into and out of that register.
52 
53    Finally you must call `subst_reloads' to substitute the reload reg rtx's
54    into the locations already recorded.
55 
56 NOTE SIDE EFFECTS:
57 
58    find_reloads can alter the operands of the instruction it is called on.
59 
60    1. Two operands of any sort may be interchanged, if they are in a
61    commutative instruction.
62    This happens only if find_reloads thinks the instruction will compile
63    better that way.
64 
65    2. Pseudo-registers that are equivalent to constants are replaced
66    with those constants if they are not in hard registers.
67 
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
71 
72 Using a reload register for several reloads in one insn:
73 
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
77 
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
81 
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload.  */
85 
86 #define REG_OK_STRICT
87 
88 /* We do not enable this with CHECKING_P, since it is awfully slow.  */
89 #undef DEBUG_RELOAD
90 
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "backend.h"
95 #include "target.h"
96 #include "rtl.h"
97 #include "tree.h"
98 #include "df.h"
99 #include "memmodel.h"
100 #include "tm_p.h"
101 #include "optabs.h"
102 #include "regs.h"
103 #include "ira.h"
104 #include "recog.h"
105 #include "rtl-error.h"
106 #include "reload.h"
107 #include "addresses.h"
108 #include "params.h"
109 
110 /* True if X is a constant that can be forced into the constant pool.
111    MODE is the mode of the operand, or VOIDmode if not known.  */
112 #define CONST_POOL_OK_P(MODE, X)		\
113   ((MODE) != VOIDmode				\
114    && CONSTANT_P (X)				\
115    && GET_CODE (X) != HIGH			\
116    && !targetm.cannot_force_const_mem (MODE, X))
117 
118 /* True if C is a non-empty register class that has too few registers
119    to be safely used as a reload target class.  */
120 
121 static inline bool
122 small_register_class_p (reg_class_t rclass)
123 {
124   return (reg_class_size [(int) rclass] == 1
125 	  || (reg_class_size [(int) rclass] >= 1
126 	      && targetm.class_likely_spilled_p (rclass)));
127 }
128 
129 
130 /* All reloads of the current insn are recorded here.  See reload.h for
131    comments.  */
132 int n_reloads;
133 struct reload rld[MAX_RELOADS];
134 
135 /* All the "earlyclobber" operands of the current insn
136    are recorded here.  */
137 int n_earlyclobbers;
138 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
139 
140 int reload_n_operands;
141 
142 /* Replacing reloads.
143 
144    If `replace_reloads' is nonzero, then as each reload is recorded
145    an entry is made for it in the table `replacements'.
146    Then later `subst_reloads' can look through that table and
147    perform all the replacements needed.  */
148 
149 /* Nonzero means record the places to replace.  */
150 static int replace_reloads;
151 
152 /* Each replacement is recorded with a structure like this.  */
153 struct replacement
154 {
155   rtx *where;			/* Location to store in */
156   int what;			/* which reload this is for */
157   machine_mode mode;	/* mode it must have */
158 };
159 
160 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
161 
162 /* Number of replacements currently recorded.  */
163 static int n_replacements;
164 
165 /* Used to track what is modified by an operand.  */
166 struct decomposition
167 {
168   int reg_flag;		/* Nonzero if referencing a register.  */
169   int safe;		/* Nonzero if this can't conflict with anything.  */
170   rtx base;		/* Base address for MEM.  */
171   poly_int64_pod start;	/* Starting offset or register number.  */
172   poly_int64_pod end;	/* Ending offset or register number.  */
173 };
174 
175 /* Save MEMs needed to copy from one class of registers to another.  One MEM
176    is used per mode, but normally only one or two modes are ever used.
177 
178    We keep two versions, before and after register elimination.  The one
179    after register elimination is record separately for each operand.  This
180    is done in case the address is not valid to be sure that we separately
181    reload each.  */
182 
183 static rtx secondary_memlocs[NUM_MACHINE_MODES];
184 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
185 static int secondary_memlocs_elim_used = 0;
186 
187 /* The instruction we are doing reloads for;
188    so we can test whether a register dies in it.  */
189 static rtx_insn *this_insn;
190 
191 /* Nonzero if this instruction is a user-specified asm with operands.  */
192 static int this_insn_is_asm;
193 
194 /* If hard_regs_live_known is nonzero,
195    we can tell which hard regs are currently live,
196    at least enough to succeed in choosing dummy reloads.  */
197 static int hard_regs_live_known;
198 
199 /* Indexed by hard reg number,
200    element is nonnegative if hard reg has been spilled.
201    This vector is passed to `find_reloads' as an argument
202    and is not changed here.  */
203 static short *static_reload_reg_p;
204 
205 /* Set to 1 in subst_reg_equivs if it changes anything.  */
206 static int subst_reg_equivs_changed;
207 
208 /* On return from push_reload, holds the reload-number for the OUT
209    operand, which can be different for that from the input operand.  */
210 static int output_reloadnum;
211 
212   /* Compare two RTX's.  */
213 #define MATCHES(x, y) \
214  (x == y || (x != 0 && (REG_P (x)				\
215 			? REG_P (y) && REGNO (x) == REGNO (y)	\
216 			: rtx_equal_p (x, y) && ! side_effects_p (x))))
217 
218   /* Indicates if two reloads purposes are for similar enough things that we
219      can merge their reloads.  */
220 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
221   ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER	\
222    || ((when1) == (when2) && (op1) == (op2))		\
223    || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
224    || ((when1) == RELOAD_FOR_OPERAND_ADDRESS		\
225        && (when2) == RELOAD_FOR_OPERAND_ADDRESS)	\
226    || ((when1) == RELOAD_FOR_OTHER_ADDRESS		\
227        && (when2) == RELOAD_FOR_OTHER_ADDRESS))
228 
229   /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged.  */
230 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
231   ((when1) != (when2)					\
232    || ! ((op1) == (op2)					\
233 	 || (when1) == RELOAD_FOR_INPUT			\
234 	 || (when1) == RELOAD_FOR_OPERAND_ADDRESS	\
235 	 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
236 
237   /* If we are going to reload an address, compute the reload type to
238      use.  */
239 #define ADDR_TYPE(type)					\
240   ((type) == RELOAD_FOR_INPUT_ADDRESS			\
241    ? RELOAD_FOR_INPADDR_ADDRESS				\
242    : ((type) == RELOAD_FOR_OUTPUT_ADDRESS		\
243       ? RELOAD_FOR_OUTADDR_ADDRESS			\
244       : (type)))
245 
246 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
247 				  machine_mode, enum reload_type,
248 				  enum insn_code *, secondary_reload_info *);
249 static enum reg_class find_valid_class (machine_mode, machine_mode,
250 					int, unsigned int);
251 static void push_replacement (rtx *, int, machine_mode);
252 static void dup_replacements (rtx *, rtx *);
253 static void combine_reloads (void);
254 static int find_reusable_reload (rtx *, rtx, enum reg_class,
255 				 enum reload_type, int, int);
256 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, machine_mode,
257 			      machine_mode, reg_class_t, int, int);
258 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
259 static struct decomposition decompose (rtx);
260 static int immune_p (rtx, rtx, struct decomposition);
261 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
262 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
263 				rtx_insn *, int *);
264 static rtx make_memloc (rtx, int);
265 static int maybe_memory_address_addr_space_p (machine_mode, rtx,
266 					      addr_space_t, rtx *);
267 static int find_reloads_address (machine_mode, rtx *, rtx, rtx *,
268 				 int, enum reload_type, int, rtx_insn *);
269 static rtx subst_reg_equivs (rtx, rtx_insn *);
270 static rtx subst_indexed_address (rtx);
271 static void update_auto_inc_notes (rtx_insn *, int, int);
272 static int find_reloads_address_1 (machine_mode, addr_space_t, rtx, int,
273 				   enum rtx_code, enum rtx_code, rtx *,
274 				   int, enum reload_type,int, rtx_insn *);
275 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
276 				       machine_mode, int,
277 				       enum reload_type, int);
278 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
279 					int, rtx_insn *, int *);
280 static void copy_replacements_1 (rtx *, rtx *, int);
281 static poly_int64 find_inc_amount (rtx, rtx);
282 static int refers_to_mem_for_reload_p (rtx);
283 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
284 					 rtx, rtx *);
285 
286 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
287    list yet.  */
288 
289 static void
290 push_reg_equiv_alt_mem (int regno, rtx mem)
291 {
292   rtx it;
293 
294   for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
295     if (rtx_equal_p (XEXP (it, 0), mem))
296       return;
297 
298   reg_equiv_alt_mem_list (regno)
299     = alloc_EXPR_LIST (REG_EQUIV, mem,
300 		       reg_equiv_alt_mem_list (regno));
301 }
302 
303 /* Determine if any secondary reloads are needed for loading (if IN_P is
304    nonzero) or storing (if IN_P is zero) X to or from a reload register of
305    register class RELOAD_CLASS in mode RELOAD_MODE.  If secondary reloads
306    are needed, push them.
307 
308    Return the reload number of the secondary reload we made, or -1 if
309    we didn't need one.  *PICODE is set to the insn_code to use if we do
310    need a secondary reload.  */
311 
312 static int
313 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
314 		       enum reg_class reload_class,
315 		       machine_mode reload_mode, enum reload_type type,
316 		       enum insn_code *picode, secondary_reload_info *prev_sri)
317 {
318   enum reg_class rclass = NO_REGS;
319   enum reg_class scratch_class;
320   machine_mode mode = reload_mode;
321   enum insn_code icode = CODE_FOR_nothing;
322   enum insn_code t_icode = CODE_FOR_nothing;
323   enum reload_type secondary_type;
324   int s_reload, t_reload = -1;
325   const char *scratch_constraint;
326   secondary_reload_info sri;
327 
328   if (type == RELOAD_FOR_INPUT_ADDRESS
329       || type == RELOAD_FOR_OUTPUT_ADDRESS
330       || type == RELOAD_FOR_INPADDR_ADDRESS
331       || type == RELOAD_FOR_OUTADDR_ADDRESS)
332     secondary_type = type;
333   else
334     secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
335 
336   *picode = CODE_FOR_nothing;
337 
338   /* If X is a paradoxical SUBREG, use the inner value to determine both the
339      mode and object being reloaded.  */
340   if (paradoxical_subreg_p (x))
341     {
342       x = SUBREG_REG (x);
343       reload_mode = GET_MODE (x);
344     }
345 
346   /* If X is a pseudo-register that has an equivalent MEM (actually, if it
347      is still a pseudo-register by now, it *must* have an equivalent MEM
348      but we don't want to assume that), use that equivalent when seeing if
349      a secondary reload is needed since whether or not a reload is needed
350      might be sensitive to the form of the MEM.  */
351 
352   if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
353       && reg_equiv_mem (REGNO (x)))
354     x = reg_equiv_mem (REGNO (x));
355 
356   sri.icode = CODE_FOR_nothing;
357   sri.prev_sri = prev_sri;
358   rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
359 						      reload_mode, &sri);
360   icode = (enum insn_code) sri.icode;
361 
362   /* If we don't need any secondary registers, done.  */
363   if (rclass == NO_REGS && icode == CODE_FOR_nothing)
364     return -1;
365 
366   if (rclass != NO_REGS)
367     t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
368 				      reload_mode, type, &t_icode, &sri);
369 
370   /* If we will be using an insn, the secondary reload is for a
371      scratch register.  */
372 
373   if (icode != CODE_FOR_nothing)
374     {
375       /* If IN_P is nonzero, the reload register will be the output in
376 	 operand 0.  If IN_P is zero, the reload register will be the input
377 	 in operand 1.  Outputs should have an initial "=", which we must
378 	 skip.  */
379 
380       /* ??? It would be useful to be able to handle only two, or more than
381 	 three, operands, but for now we can only handle the case of having
382 	 exactly three: output, input and one temp/scratch.  */
383       gcc_assert (insn_data[(int) icode].n_operands == 3);
384 
385       /* ??? We currently have no way to represent a reload that needs
386 	 an icode to reload from an intermediate tertiary reload register.
387 	 We should probably have a new field in struct reload to tag a
388 	 chain of scratch operand reloads onto.   */
389       gcc_assert (rclass == NO_REGS);
390 
391       scratch_constraint = insn_data[(int) icode].operand[2].constraint;
392       gcc_assert (*scratch_constraint == '=');
393       scratch_constraint++;
394       if (*scratch_constraint == '&')
395 	scratch_constraint++;
396       scratch_class = (reg_class_for_constraint
397 		       (lookup_constraint (scratch_constraint)));
398 
399       rclass = scratch_class;
400       mode = insn_data[(int) icode].operand[2].mode;
401     }
402 
403   /* This case isn't valid, so fail.  Reload is allowed to use the same
404      register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
405      in the case of a secondary register, we actually need two different
406      registers for correct code.  We fail here to prevent the possibility of
407      silently generating incorrect code later.
408 
409      The convention is that secondary input reloads are valid only if the
410      secondary_class is different from class.  If you have such a case, you
411      can not use secondary reloads, you must work around the problem some
412      other way.
413 
414      Allow this when a reload_in/out pattern is being used.  I.e. assume
415      that the generated code handles this case.  */
416 
417   gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
418 	      || t_icode != CODE_FOR_nothing);
419 
420   /* See if we can reuse an existing secondary reload.  */
421   for (s_reload = 0; s_reload < n_reloads; s_reload++)
422     if (rld[s_reload].secondary_p
423 	&& (reg_class_subset_p (rclass, rld[s_reload].rclass)
424 	    || reg_class_subset_p (rld[s_reload].rclass, rclass))
425 	&& ((in_p && rld[s_reload].inmode == mode)
426 	    || (! in_p && rld[s_reload].outmode == mode))
427 	&& ((in_p && rld[s_reload].secondary_in_reload == t_reload)
428 	    || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
429 	&& ((in_p && rld[s_reload].secondary_in_icode == t_icode)
430 	    || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
431 	&& (small_register_class_p (rclass)
432 	    || targetm.small_register_classes_for_mode_p (VOIDmode))
433 	&& MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
434 			     opnum, rld[s_reload].opnum))
435       {
436 	if (in_p)
437 	  rld[s_reload].inmode = mode;
438 	if (! in_p)
439 	  rld[s_reload].outmode = mode;
440 
441 	if (reg_class_subset_p (rclass, rld[s_reload].rclass))
442 	  rld[s_reload].rclass = rclass;
443 
444 	rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
445 	rld[s_reload].optional &= optional;
446 	rld[s_reload].secondary_p = 1;
447 	if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
448 			    opnum, rld[s_reload].opnum))
449 	  rld[s_reload].when_needed = RELOAD_OTHER;
450 
451 	break;
452       }
453 
454   if (s_reload == n_reloads)
455     {
456       /* If we need a memory location to copy between the two reload regs,
457 	 set it up now.  Note that we do the input case before making
458 	 the reload and the output case after.  This is due to the
459 	 way reloads are output.  */
460 
461       if (in_p && icode == CODE_FOR_nothing
462 	  && targetm.secondary_memory_needed (mode, rclass, reload_class))
463 	{
464 	  get_secondary_mem (x, reload_mode, opnum, type);
465 
466 	  /* We may have just added new reloads.  Make sure we add
467 	     the new reload at the end.  */
468 	  s_reload = n_reloads;
469 	}
470 
471       /* We need to make a new secondary reload for this register class.  */
472       rld[s_reload].in = rld[s_reload].out = 0;
473       rld[s_reload].rclass = rclass;
474 
475       rld[s_reload].inmode = in_p ? mode : VOIDmode;
476       rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
477       rld[s_reload].reg_rtx = 0;
478       rld[s_reload].optional = optional;
479       rld[s_reload].inc = 0;
480       /* Maybe we could combine these, but it seems too tricky.  */
481       rld[s_reload].nocombine = 1;
482       rld[s_reload].in_reg = 0;
483       rld[s_reload].out_reg = 0;
484       rld[s_reload].opnum = opnum;
485       rld[s_reload].when_needed = secondary_type;
486       rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
487       rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
488       rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
489       rld[s_reload].secondary_out_icode
490 	= ! in_p ? t_icode : CODE_FOR_nothing;
491       rld[s_reload].secondary_p = 1;
492 
493       n_reloads++;
494 
495       if (! in_p && icode == CODE_FOR_nothing
496 	  && targetm.secondary_memory_needed (mode, reload_class, rclass))
497 	get_secondary_mem (x, mode, opnum, type);
498     }
499 
500   *picode = icode;
501   return s_reload;
502 }
503 
504 /* If a secondary reload is needed, return its class.  If both an intermediate
505    register and a scratch register is needed, we return the class of the
506    intermediate register.  */
507 reg_class_t
508 secondary_reload_class (bool in_p, reg_class_t rclass, machine_mode mode,
509 			rtx x)
510 {
511   enum insn_code icode;
512   secondary_reload_info sri;
513 
514   sri.icode = CODE_FOR_nothing;
515   sri.prev_sri = NULL;
516   rclass
517     = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
518   icode = (enum insn_code) sri.icode;
519 
520   /* If there are no secondary reloads at all, we return NO_REGS.
521      If an intermediate register is needed, we return its class.  */
522   if (icode == CODE_FOR_nothing || rclass != NO_REGS)
523     return rclass;
524 
525   /* No intermediate register is needed, but we have a special reload
526      pattern, which we assume for now needs a scratch register.  */
527   return scratch_reload_class (icode);
528 }
529 
530 /* ICODE is the insn_code of a reload pattern.  Check that it has exactly
531    three operands, verify that operand 2 is an output operand, and return
532    its register class.
533    ??? We'd like to be able to handle any pattern with at least 2 operands,
534    for zero or more scratch registers, but that needs more infrastructure.  */
535 enum reg_class
536 scratch_reload_class (enum insn_code icode)
537 {
538   const char *scratch_constraint;
539   enum reg_class rclass;
540 
541   gcc_assert (insn_data[(int) icode].n_operands == 3);
542   scratch_constraint = insn_data[(int) icode].operand[2].constraint;
543   gcc_assert (*scratch_constraint == '=');
544   scratch_constraint++;
545   if (*scratch_constraint == '&')
546     scratch_constraint++;
547   rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
548   gcc_assert (rclass != NO_REGS);
549   return rclass;
550 }
551 
552 /* Return a memory location that will be used to copy X in mode MODE.
553    If we haven't already made a location for this mode in this insn,
554    call find_reloads_address on the location being returned.  */
555 
556 rtx
557 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, machine_mode mode,
558 		   int opnum, enum reload_type type)
559 {
560   rtx loc;
561   int mem_valid;
562 
563   /* By default, if MODE is narrower than a word, widen it to a word.
564      This is required because most machines that require these memory
565      locations do not support short load and stores from all registers
566      (e.g., FP registers).  */
567 
568   mode = targetm.secondary_memory_needed_mode (mode);
569 
570   /* If we already have made a MEM for this operand in MODE, return it.  */
571   if (secondary_memlocs_elim[(int) mode][opnum] != 0)
572     return secondary_memlocs_elim[(int) mode][opnum];
573 
574   /* If this is the first time we've tried to get a MEM for this mode,
575      allocate a new one.  `something_changed' in reload will get set
576      by noticing that the frame size has changed.  */
577 
578   if (secondary_memlocs[(int) mode] == 0)
579     {
580 #ifdef SECONDARY_MEMORY_NEEDED_RTX
581       secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
582 #else
583       secondary_memlocs[(int) mode]
584 	= assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
585 #endif
586     }
587 
588   /* Get a version of the address doing any eliminations needed.  If that
589      didn't give us a new MEM, make a new one if it isn't valid.  */
590 
591   loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
592   mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
593 						  MEM_ADDR_SPACE (loc));
594 
595   if (! mem_valid && loc == secondary_memlocs[(int) mode])
596     loc = copy_rtx (loc);
597 
598   /* The only time the call below will do anything is if the stack
599      offset is too large.  In that case IND_LEVELS doesn't matter, so we
600      can just pass a zero.  Adjust the type to be the address of the
601      corresponding object.  If the address was valid, save the eliminated
602      address.  If it wasn't valid, we need to make a reload each time, so
603      don't save it.  */
604 
605   if (! mem_valid)
606     {
607       type =  (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
608 	       : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
609 	       : RELOAD_OTHER);
610 
611       find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
612 			    opnum, type, 0, 0);
613     }
614 
615   secondary_memlocs_elim[(int) mode][opnum] = loc;
616   if (secondary_memlocs_elim_used <= (int)mode)
617     secondary_memlocs_elim_used = (int)mode + 1;
618   return loc;
619 }
620 
621 /* Clear any secondary memory locations we've made.  */
622 
623 void
624 clear_secondary_mem (void)
625 {
626   memset (secondary_memlocs, 0, sizeof secondary_memlocs);
627 }
628 
629 
630 /* Find the largest class which has at least one register valid in
631    mode INNER, and which for every such register, that register number
632    plus N is also valid in OUTER (if in range) and is cheap to move
633    into REGNO.  Such a class must exist.  */
634 
635 static enum reg_class
636 find_valid_class (machine_mode outer ATTRIBUTE_UNUSED,
637 		  machine_mode inner ATTRIBUTE_UNUSED, int n,
638 		  unsigned int dest_regno ATTRIBUTE_UNUSED)
639 {
640   int best_cost = -1;
641   int rclass;
642   int regno;
643   enum reg_class best_class = NO_REGS;
644   enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
645   unsigned int best_size = 0;
646   int cost;
647 
648   for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
649     {
650       int bad = 0;
651       int good = 0;
652       for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
653 	if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
654 	  {
655 	    if (targetm.hard_regno_mode_ok (regno, inner))
656 	      {
657 		good = 1;
658 		if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
659 		    && !targetm.hard_regno_mode_ok (regno + n, outer))
660 		  bad = 1;
661 	      }
662 	  }
663 
664       if (bad || !good)
665 	continue;
666       cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
667 
668       if ((reg_class_size[rclass] > best_size
669 	   && (best_cost < 0 || best_cost >= cost))
670 	  || best_cost > cost)
671 	{
672 	  best_class = (enum reg_class) rclass;
673 	  best_size = reg_class_size[rclass];
674 	  best_cost = register_move_cost (outer, (enum reg_class) rclass,
675 					  dest_class);
676 	}
677     }
678 
679   gcc_assert (best_size != 0);
680 
681   return best_class;
682 }
683 
684 /* We are trying to reload a subreg of something that is not a register.
685    Find the largest class which contains only registers valid in
686    mode MODE.  OUTER is the mode of the subreg, DEST_CLASS the class in
687    which we would eventually like to obtain the object.  */
688 
689 static enum reg_class
690 find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED,
691 		    machine_mode mode ATTRIBUTE_UNUSED,
692 		    enum reg_class dest_class ATTRIBUTE_UNUSED)
693 {
694   int best_cost = -1;
695   int rclass;
696   int regno;
697   enum reg_class best_class = NO_REGS;
698   unsigned int best_size = 0;
699   int cost;
700 
701   for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
702     {
703       unsigned int computed_rclass_size = 0;
704 
705       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
706         {
707           if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
708 	      && targetm.hard_regno_mode_ok (regno, mode))
709             computed_rclass_size++;
710         }
711 
712       cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
713 
714       if ((computed_rclass_size > best_size
715 	   && (best_cost < 0 || best_cost >= cost))
716 	  || best_cost > cost)
717 	{
718 	  best_class = (enum reg_class) rclass;
719 	  best_size = computed_rclass_size;
720 	  best_cost = register_move_cost (outer, (enum reg_class) rclass,
721 					  dest_class);
722 	}
723     }
724 
725   gcc_assert (best_size != 0);
726 
727 #ifdef LIMIT_RELOAD_CLASS
728   best_class = LIMIT_RELOAD_CLASS (mode, best_class);
729 #endif
730   return best_class;
731 }
732 
733 /* Return the number of a previously made reload that can be combined with
734    a new one, or n_reloads if none of the existing reloads can be used.
735    OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
736    push_reload, they determine the kind of the new reload that we try to
737    combine.  P_IN points to the corresponding value of IN, which can be
738    modified by this function.
739    DONT_SHARE is nonzero if we can't share any input-only reload for IN.  */
740 
741 static int
742 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
743 		      enum reload_type type, int opnum, int dont_share)
744 {
745   rtx in = *p_in;
746   int i;
747   /* We can't merge two reloads if the output of either one is
748      earlyclobbered.  */
749 
750   if (earlyclobber_operand_p (out))
751     return n_reloads;
752 
753   /* We can use an existing reload if the class is right
754      and at least one of IN and OUT is a match
755      and the other is at worst neutral.
756      (A zero compared against anything is neutral.)
757 
758      For targets with small register classes, don't use existing reloads
759      unless they are for the same thing since that can cause us to need
760      more reload registers than we otherwise would.  */
761 
762   for (i = 0; i < n_reloads; i++)
763     if ((reg_class_subset_p (rclass, rld[i].rclass)
764 	 || reg_class_subset_p (rld[i].rclass, rclass))
765 	/* If the existing reload has a register, it must fit our class.  */
766 	&& (rld[i].reg_rtx == 0
767 	    || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
768 				  true_regnum (rld[i].reg_rtx)))
769 	&& ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
770 	     && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
771 	    || (out != 0 && MATCHES (rld[i].out, out)
772 		&& (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
773 	&& (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
774 	&& (small_register_class_p (rclass)
775 	    || targetm.small_register_classes_for_mode_p (VOIDmode))
776 	&& MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
777       return i;
778 
779   /* Reloading a plain reg for input can match a reload to postincrement
780      that reg, since the postincrement's value is the right value.
781      Likewise, it can match a preincrement reload, since we regard
782      the preincrementation as happening before any ref in this insn
783      to that register.  */
784   for (i = 0; i < n_reloads; i++)
785     if ((reg_class_subset_p (rclass, rld[i].rclass)
786 	 || reg_class_subset_p (rld[i].rclass, rclass))
787 	/* If the existing reload has a register, it must fit our
788 	   class.  */
789 	&& (rld[i].reg_rtx == 0
790 	    || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
791 				  true_regnum (rld[i].reg_rtx)))
792 	&& out == 0 && rld[i].out == 0 && rld[i].in != 0
793 	&& ((REG_P (in)
794 	     && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
795 	     && MATCHES (XEXP (rld[i].in, 0), in))
796 	    || (REG_P (rld[i].in)
797 		&& GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
798 		&& MATCHES (XEXP (in, 0), rld[i].in)))
799 	&& (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
800 	&& (small_register_class_p (rclass)
801 	    || targetm.small_register_classes_for_mode_p (VOIDmode))
802 	&& MERGABLE_RELOADS (type, rld[i].when_needed,
803 			     opnum, rld[i].opnum))
804       {
805 	/* Make sure reload_in ultimately has the increment,
806 	   not the plain register.  */
807 	if (REG_P (in))
808 	  *p_in = rld[i].in;
809 	return i;
810       }
811   return n_reloads;
812 }
813 
814 /* Return true if:
815 
816    (a) (subreg:OUTER_MODE REG ...) represents a word or subword subreg
817        of a multiword value; and
818 
819    (b) the number of *words* in REG does not match the number of *registers*
820        in REG.  */
821 
822 static bool
823 complex_word_subreg_p (machine_mode outer_mode, rtx reg)
824 {
825   machine_mode inner_mode = GET_MODE (reg);
826   poly_uint64 reg_words = REG_NREGS (reg) * UNITS_PER_WORD;
827   return (known_le (GET_MODE_SIZE (outer_mode), UNITS_PER_WORD)
828 	  && maybe_gt (GET_MODE_SIZE (inner_mode), UNITS_PER_WORD)
829 	  && !known_equal_after_align_up (GET_MODE_SIZE (inner_mode),
830 					  reg_words, UNITS_PER_WORD));
831 }
832 
833 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
834    expression.  MODE is the mode that X will be used in.  OUTPUT is true if
835    the function is invoked for the output part of an enclosing reload.  */
836 
837 static bool
838 reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool output)
839 {
840   rtx inner;
841   int regno;
842 
843   /* Only SUBREGs are problematical.  */
844   if (GET_CODE (x) != SUBREG)
845     return false;
846 
847   inner = SUBREG_REG (x);
848 
849   /* If INNER is a constant or PLUS, then INNER will need reloading.  */
850   if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
851     return true;
852 
853   /* If INNER is not a register, then INNER will not need reloading.  */
854   if (!REG_P (inner))
855     return false;
856 
857   regno = REGNO (inner);
858 
859   /* If INNER is not a hard register, then INNER will not need reloading
860      unless it's a mode dependent memory reference.  */
861   if (regno >= FIRST_PSEUDO_REGISTER)
862     return !output
863 	   && reg_equiv_mem (regno) != 0
864 	   && mode_dependent_address_p (XEXP (reg_equiv_mem (regno), 0),
865 					MEM_ADDR_SPACE (reg_equiv_mem (regno)));
866 
867   /* If INNER is not ok for MODE, then INNER will need reloading.  */
868   if (!targetm.hard_regno_mode_ok (subreg_regno (x), mode))
869     return true;
870 
871   /* If this is for an output, and the outer part is a word or smaller,
872      INNER is larger than a word and the number of registers in INNER is
873      not the same as the number of words in INNER, then INNER will need
874      reloading (with an in-out reload).  */
875   return output && complex_word_subreg_p (mode, inner);
876 }
877 
878 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
879    requiring an extra reload register.  The caller has already found that
880    IN contains some reference to REGNO, so check that we can produce the
881    new value in a single step.  E.g. if we have
882    (set (reg r13) (plus (reg r13) (const int 1))), and there is an
883    instruction that adds one to a register, this should succeed.
884    However, if we have something like
885    (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
886    needs to be loaded into a register first, we need a separate reload
887    register.
888    Such PLUS reloads are generated by find_reload_address_part.
889    The out-of-range PLUS expressions are usually introduced in the instruction
890    patterns by register elimination and substituting pseudos without a home
891    by their function-invariant equivalences.  */
892 static int
893 can_reload_into (rtx in, int regno, machine_mode mode)
894 {
895   rtx dst;
896   rtx_insn *test_insn;
897   int r = 0;
898   struct recog_data_d save_recog_data;
899 
900   /* For matching constraints, we often get notional input reloads where
901      we want to use the original register as the reload register.  I.e.
902      technically this is a non-optional input-output reload, but IN is
903      already a valid register, and has been chosen as the reload register.
904      Speed this up, since it trivially works.  */
905   if (REG_P (in))
906     return 1;
907 
908   /* To test MEMs properly, we'd have to take into account all the reloads
909      that are already scheduled, which can become quite complicated.
910      And since we've already handled address reloads for this MEM, it
911      should always succeed anyway.  */
912   if (MEM_P (in))
913     return 1;
914 
915   /* If we can make a simple SET insn that does the job, everything should
916      be fine.  */
917   dst =  gen_rtx_REG (mode, regno);
918   test_insn = make_insn_raw (gen_rtx_SET (dst, in));
919   save_recog_data = recog_data;
920   if (recog_memoized (test_insn) >= 0)
921     {
922       extract_insn (test_insn);
923       r = constrain_operands (1, get_enabled_alternatives (test_insn));
924     }
925   recog_data = save_recog_data;
926   return r;
927 }
928 
929 /* Record one reload that needs to be performed.
930    IN is an rtx saying where the data are to be found before this instruction.
931    OUT says where they must be stored after the instruction.
932    (IN is zero for data not read, and OUT is zero for data not written.)
933    INLOC and OUTLOC point to the places in the instructions where
934    IN and OUT were found.
935    If IN and OUT are both nonzero, it means the same register must be used
936    to reload both IN and OUT.
937 
938    RCLASS is a register class required for the reloaded data.
939    INMODE is the machine mode that the instruction requires
940    for the reg that replaces IN and OUTMODE is likewise for OUT.
941 
942    If IN is zero, then OUT's location and mode should be passed as
943    INLOC and INMODE.
944 
945    STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
946 
947    OPTIONAL nonzero means this reload does not need to be performed:
948    it can be discarded if that is more convenient.
949 
950    OPNUM and TYPE say what the purpose of this reload is.
951 
952    The return value is the reload-number for this reload.
953 
954    If both IN and OUT are nonzero, in some rare cases we might
955    want to make two separate reloads.  (Actually we never do this now.)
956    Therefore, the reload-number for OUT is stored in
957    output_reloadnum when we return; the return value applies to IN.
958    Usually (presently always), when IN and OUT are nonzero,
959    the two reload-numbers are equal, but the caller should be careful to
960    distinguish them.  */
961 
962 int
963 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
964 	     enum reg_class rclass, machine_mode inmode,
965 	     machine_mode outmode, int strict_low, int optional,
966 	     int opnum, enum reload_type type)
967 {
968   int i;
969   int dont_share = 0;
970   int dont_remove_subreg = 0;
971 #ifdef LIMIT_RELOAD_CLASS
972   rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
973 #endif
974   int secondary_in_reload = -1, secondary_out_reload = -1;
975   enum insn_code secondary_in_icode = CODE_FOR_nothing;
976   enum insn_code secondary_out_icode = CODE_FOR_nothing;
977   enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
978   subreg_in_class = NO_REGS;
979 
980   /* INMODE and/or OUTMODE could be VOIDmode if no mode
981      has been specified for the operand.  In that case,
982      use the operand's mode as the mode to reload.  */
983   if (inmode == VOIDmode && in != 0)
984     inmode = GET_MODE (in);
985   if (outmode == VOIDmode && out != 0)
986     outmode = GET_MODE (out);
987 
988   /* If find_reloads and friends until now missed to replace a pseudo
989      with a constant of reg_equiv_constant something went wrong
990      beforehand.
991      Note that it can't simply be done here if we missed it earlier
992      since the constant might need to be pushed into the literal pool
993      and the resulting memref would probably need further
994      reloading.  */
995   if (in != 0 && REG_P (in))
996     {
997       int regno = REGNO (in);
998 
999       gcc_assert (regno < FIRST_PSEUDO_REGISTER
1000 		  || reg_renumber[regno] >= 0
1001 		  || reg_equiv_constant (regno) == NULL_RTX);
1002     }
1003 
1004   /* reg_equiv_constant only contains constants which are obviously
1005      not appropriate as destination.  So if we would need to replace
1006      the destination pseudo with a constant we are in real
1007      trouble.  */
1008   if (out != 0 && REG_P (out))
1009     {
1010       int regno = REGNO (out);
1011 
1012       gcc_assert (regno < FIRST_PSEUDO_REGISTER
1013 		  || reg_renumber[regno] >= 0
1014 		  || reg_equiv_constant (regno) == NULL_RTX);
1015     }
1016 
1017   /* If we have a read-write operand with an address side-effect,
1018      change either IN or OUT so the side-effect happens only once.  */
1019   if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1020     switch (GET_CODE (XEXP (in, 0)))
1021       {
1022       case POST_INC: case POST_DEC:   case POST_MODIFY:
1023 	in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1024 	break;
1025 
1026       case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1027 	out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1028 	break;
1029 
1030       default:
1031 	break;
1032       }
1033 
1034   /* If we are reloading a (SUBREG constant ...), really reload just the
1035      inside expression in its own mode.  Similarly for (SUBREG (PLUS ...)).
1036      If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1037      a pseudo and hence will become a MEM) with M1 wider than M2 and the
1038      register is a pseudo, also reload the inside expression.
1039      For machines that extend byte loads, do this for any SUBREG of a pseudo
1040      where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1041      M2 is an integral mode that gets extended when loaded.
1042      Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1043      where either M1 is not valid for R or M2 is wider than a word but we
1044      only need one register to store an M2-sized quantity in R.
1045      (However, if OUT is nonzero, we need to reload the reg *and*
1046      the subreg, so do nothing here, and let following statement handle it.)
1047 
1048      Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1049      we can't handle it here because CONST_INT does not indicate a mode.
1050 
1051      Similarly, we must reload the inside expression if we have a
1052      STRICT_LOW_PART (presumably, in == out in this case).
1053 
1054      Also reload the inner expression if it does not require a secondary
1055      reload but the SUBREG does.
1056 
1057      Finally, reload the inner expression if it is a register that is in
1058      the class whose registers cannot be referenced in a different size
1059      and M1 is not the same size as M2.  If subreg_lowpart_p is false, we
1060      cannot reload just the inside since we might end up with the wrong
1061      register class.  But if it is inside a STRICT_LOW_PART, we have
1062      no choice, so we hope we do get the right register class there.  */
1063 
1064   scalar_int_mode inner_mode;
1065   if (in != 0 && GET_CODE (in) == SUBREG
1066       && (subreg_lowpart_p (in) || strict_low)
1067       && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (in)),
1068 					inmode, rclass)
1069       && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (in))]
1070       && (CONSTANT_P (SUBREG_REG (in))
1071 	  || GET_CODE (SUBREG_REG (in)) == PLUS
1072 	  || strict_low
1073 	  || (((REG_P (SUBREG_REG (in))
1074 		&& REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1075 	       || MEM_P (SUBREG_REG (in)))
1076 	      && (paradoxical_subreg_p (inmode, GET_MODE (SUBREG_REG (in)))
1077 		  || (known_le (GET_MODE_SIZE (inmode), UNITS_PER_WORD)
1078 		      && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (in)),
1079 						 &inner_mode)
1080 		      && GET_MODE_SIZE (inner_mode) <= UNITS_PER_WORD
1081 		      && paradoxical_subreg_p (inmode, inner_mode)
1082 		      && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)
1083 		  || (WORD_REGISTER_OPERATIONS
1084 		      && partial_subreg_p (inmode, GET_MODE (SUBREG_REG (in)))
1085 		      && (known_equal_after_align_down
1086 			  (GET_MODE_SIZE (inmode) - 1,
1087 			   GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1,
1088 			   UNITS_PER_WORD)))))
1089 	  || (REG_P (SUBREG_REG (in))
1090 	      && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1091 	      /* The case where out is nonzero
1092 		 is handled differently in the following statement.  */
1093 	      && (out == 0 || subreg_lowpart_p (in))
1094 	      && (complex_word_subreg_p (inmode, SUBREG_REG (in))
1095 		  || !targetm.hard_regno_mode_ok (subreg_regno (in), inmode)))
1096 	  || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1097 	      && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1098 					  SUBREG_REG (in))
1099 		  == NO_REGS))
1100 	  || (REG_P (SUBREG_REG (in))
1101 	      && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1102 	      && !REG_CAN_CHANGE_MODE_P (REGNO (SUBREG_REG (in)),
1103 					 GET_MODE (SUBREG_REG (in)), inmode))))
1104     {
1105 #ifdef LIMIT_RELOAD_CLASS
1106       in_subreg_loc = inloc;
1107 #endif
1108       inloc = &SUBREG_REG (in);
1109       in = *inloc;
1110 
1111       if (!WORD_REGISTER_OPERATIONS
1112 	  && LOAD_EXTEND_OP (GET_MODE (in)) == UNKNOWN
1113 	  && MEM_P (in))
1114 	/* This is supposed to happen only for paradoxical subregs made by
1115 	   combine.c.  (SUBREG (MEM)) isn't supposed to occur other ways.  */
1116 	gcc_assert (known_le (GET_MODE_SIZE (GET_MODE (in)),
1117 			      GET_MODE_SIZE (inmode)));
1118 
1119       inmode = GET_MODE (in);
1120     }
1121 
1122   /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1123      where M1 is not valid for R if it was not handled by the code above.
1124 
1125      Similar issue for (SUBREG constant ...) if it was not handled by the
1126      code above.  This can happen if SUBREG_BYTE != 0.
1127 
1128      However, we must reload the inner reg *as well as* the subreg in
1129      that case.  */
1130 
1131   if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1132     {
1133       if (REG_P (SUBREG_REG (in)) && HARD_REGISTER_P (SUBREG_REG (in)))
1134 	subreg_in_class
1135 	  = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1136 			      subreg_regno_offset (REGNO (SUBREG_REG (in)),
1137 						   GET_MODE (SUBREG_REG (in)),
1138 						   SUBREG_BYTE (in),
1139 						   GET_MODE (in)),
1140 			      REGNO (SUBREG_REG (in)));
1141 #if 1 // XXXMRG
1142       else if (REG_P (SUBREG_REG (in))
1143                || GET_CODE (SUBREG_REG (in)) == SYMBOL_REF)
1144 #else
1145       else if (CONSTANT_P (SUBREG_REG (in))
1146                || GET_CODE (SUBREG_REG (in)) == PLUS)
1147 #endif
1148 	subreg_in_class = find_valid_class_1 (inmode,
1149 					      GET_MODE (SUBREG_REG (in)),
1150 					      rclass);
1151 
1152       /* This relies on the fact that emit_reload_insns outputs the
1153 	 instructions for input reloads of type RELOAD_OTHER in the same
1154 	 order as the reloads.  Thus if the outer reload is also of type
1155 	 RELOAD_OTHER, we are guaranteed that this inner reload will be
1156 	 output before the outer reload.  */
1157       push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1158 		   subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1159       dont_remove_subreg = 1;
1160     }
1161 
1162   /* Similarly for paradoxical and problematical SUBREGs on the output.
1163      Note that there is no reason we need worry about the previous value
1164      of SUBREG_REG (out); even if wider than out, storing in a subreg is
1165      entitled to clobber it all (except in the case of a word mode subreg
1166      or of a STRICT_LOW_PART, in that latter case the constraint should
1167      label it input-output.)  */
1168   if (out != 0 && GET_CODE (out) == SUBREG
1169       && (subreg_lowpart_p (out) || strict_low)
1170       && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (out)),
1171 					outmode, rclass)
1172       && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (out))]
1173       && (CONSTANT_P (SUBREG_REG (out))
1174 	  || strict_low
1175 	  || (((REG_P (SUBREG_REG (out))
1176 		&& REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1177 	       || MEM_P (SUBREG_REG (out)))
1178 	      && (paradoxical_subreg_p (outmode, GET_MODE (SUBREG_REG (out)))
1179 		  || (WORD_REGISTER_OPERATIONS
1180 		      && partial_subreg_p (outmode, GET_MODE (SUBREG_REG (out)))
1181 		      && (known_equal_after_align_down
1182 			  (GET_MODE_SIZE (outmode) - 1,
1183 			   GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1,
1184 			   UNITS_PER_WORD)))))
1185 	  || (REG_P (SUBREG_REG (out))
1186 	      && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1187 	      /* The case of a word mode subreg
1188 		 is handled differently in the following statement.  */
1189 	      && ! (known_le (GET_MODE_SIZE (outmode), UNITS_PER_WORD)
1190 		    && maybe_gt (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))),
1191 				 UNITS_PER_WORD))
1192 	      && !targetm.hard_regno_mode_ok (subreg_regno (out), outmode))
1193 	  || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1194 	      && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1195 					  SUBREG_REG (out))
1196 		  == NO_REGS))
1197 	  || (REG_P (SUBREG_REG (out))
1198 	      && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1199 	      && !REG_CAN_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1200 					 GET_MODE (SUBREG_REG (out)),
1201 					 outmode))))
1202     {
1203 #ifdef LIMIT_RELOAD_CLASS
1204       out_subreg_loc = outloc;
1205 #endif
1206       outloc = &SUBREG_REG (out);
1207       out = *outloc;
1208       gcc_assert (WORD_REGISTER_OPERATIONS || !MEM_P (out)
1209 		  || known_le (GET_MODE_SIZE (GET_MODE (out)),
1210 			       GET_MODE_SIZE (outmode)));
1211       outmode = GET_MODE (out);
1212     }
1213 
1214   /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1215      where either M1 is not valid for R or M2 is wider than a word but we
1216      only need one register to store an M2-sized quantity in R.
1217 
1218      However, we must reload the inner reg *as well as* the subreg in
1219      that case and the inner reg is an in-out reload.  */
1220 
1221   if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1222     {
1223       enum reg_class in_out_class
1224 	= find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1225 			    subreg_regno_offset (REGNO (SUBREG_REG (out)),
1226 						 GET_MODE (SUBREG_REG (out)),
1227 						 SUBREG_BYTE (out),
1228 						 GET_MODE (out)),
1229 			    REGNO (SUBREG_REG (out)));
1230 
1231       /* This relies on the fact that emit_reload_insns outputs the
1232 	 instructions for output reloads of type RELOAD_OTHER in reverse
1233 	 order of the reloads.  Thus if the outer reload is also of type
1234 	 RELOAD_OTHER, we are guaranteed that this inner reload will be
1235 	 output after the outer reload.  */
1236       push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1237 		   &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1238 		   0, 0, opnum, RELOAD_OTHER);
1239       dont_remove_subreg = 1;
1240     }
1241 
1242   /* If IN appears in OUT, we can't share any input-only reload for IN.  */
1243   if (in != 0 && out != 0 && MEM_P (out)
1244       && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1245       && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1246     dont_share = 1;
1247 
1248   /* If IN is a SUBREG of a hard register, make a new REG.  This
1249      simplifies some of the cases below.  */
1250 
1251   if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1252       && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1253       && ! dont_remove_subreg)
1254     in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1255 
1256   /* Similarly for OUT.  */
1257   if (out != 0 && GET_CODE (out) == SUBREG
1258       && REG_P (SUBREG_REG (out))
1259       && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1260       && ! dont_remove_subreg)
1261     out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1262 
1263   /* Narrow down the class of register wanted if that is
1264      desirable on this machine for efficiency.  */
1265   {
1266     reg_class_t preferred_class = rclass;
1267 
1268     if (in != 0)
1269       preferred_class = targetm.preferred_reload_class (in, rclass);
1270 
1271     /* Output reloads may need analogous treatment, different in detail.  */
1272     if (out != 0)
1273       preferred_class
1274 	= targetm.preferred_output_reload_class (out, preferred_class);
1275 
1276     /* Discard what the target said if we cannot do it.  */
1277     if (preferred_class != NO_REGS
1278 	|| (optional && type == RELOAD_FOR_OUTPUT))
1279       rclass = (enum reg_class) preferred_class;
1280   }
1281 
1282   /* Make sure we use a class that can handle the actual pseudo
1283      inside any subreg.  For example, on the 386, QImode regs
1284      can appear within SImode subregs.  Although GENERAL_REGS
1285      can handle SImode, QImode needs a smaller class.  */
1286 #ifdef LIMIT_RELOAD_CLASS
1287   if (in_subreg_loc)
1288     rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1289   else if (in != 0 && GET_CODE (in) == SUBREG)
1290     rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1291 
1292   if (out_subreg_loc)
1293     rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1294   if (out != 0 && GET_CODE (out) == SUBREG)
1295     rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1296 #endif
1297 
1298   /* Verify that this class is at least possible for the mode that
1299      is specified.  */
1300   if (this_insn_is_asm)
1301     {
1302       machine_mode mode;
1303       if (paradoxical_subreg_p (inmode, outmode))
1304 	mode = inmode;
1305       else
1306 	mode = outmode;
1307       if (mode == VOIDmode)
1308 	{
1309 	  error_for_asm (this_insn, "cannot reload integer constant "
1310 			 "operand in %<asm%>");
1311 	  mode = word_mode;
1312 	  if (in != 0)
1313 	    inmode = word_mode;
1314 	  if (out != 0)
1315 	    outmode = word_mode;
1316 	}
1317       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1318 	if (targetm.hard_regno_mode_ok (i, mode)
1319 	    && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1320 	  break;
1321       if (i == FIRST_PSEUDO_REGISTER)
1322 	{
1323 	  error_for_asm (this_insn, "impossible register constraint "
1324 			 "in %<asm%>");
1325 	  /* Avoid further trouble with this insn.  */
1326 	  PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1327 	  /* We used to continue here setting class to ALL_REGS, but it triggers
1328 	     sanity check on i386 for:
1329 	     void foo(long double d)
1330 	     {
1331 	       asm("" :: "a" (d));
1332 	     }
1333 	     Returning zero here ought to be safe as we take care in
1334 	     find_reloads to not process the reloads when instruction was
1335 	     replaced by USE.  */
1336 
1337 	  return 0;
1338 	}
1339     }
1340 
1341   /* Optional output reloads are always OK even if we have no register class,
1342      since the function of these reloads is only to have spill_reg_store etc.
1343      set, so that the storing insn can be deleted later.  */
1344   gcc_assert (rclass != NO_REGS
1345 	      || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1346 
1347   i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1348 
1349   if (i == n_reloads)
1350     {
1351       /* See if we need a secondary reload register to move between CLASS
1352 	 and IN or CLASS and OUT.  Get the icode and push any required reloads
1353 	 needed for each of them if so.  */
1354 
1355       if (in != 0)
1356 	secondary_in_reload
1357 	  = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1358 				   &secondary_in_icode, NULL);
1359       if (out != 0 && GET_CODE (out) != SCRATCH)
1360 	secondary_out_reload
1361 	  = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1362 				   type, &secondary_out_icode, NULL);
1363 
1364       /* We found no existing reload suitable for re-use.
1365 	 So add an additional reload.  */
1366 
1367       if (subreg_in_class == NO_REGS
1368 	  && in != 0
1369 	  && (REG_P (in)
1370 	      || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1371 	  && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1372 	subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1373       /* If a memory location is needed for the copy, make one.  */
1374       if (subreg_in_class != NO_REGS
1375 	  && targetm.secondary_memory_needed (inmode, subreg_in_class, rclass))
1376 	get_secondary_mem (in, inmode, opnum, type);
1377 
1378       i = n_reloads;
1379       rld[i].in = in;
1380       rld[i].out = out;
1381       rld[i].rclass = rclass;
1382       rld[i].inmode = inmode;
1383       rld[i].outmode = outmode;
1384       rld[i].reg_rtx = 0;
1385       rld[i].optional = optional;
1386       rld[i].inc = 0;
1387       rld[i].nocombine = 0;
1388       rld[i].in_reg = inloc ? *inloc : 0;
1389       rld[i].out_reg = outloc ? *outloc : 0;
1390       rld[i].opnum = opnum;
1391       rld[i].when_needed = type;
1392       rld[i].secondary_in_reload = secondary_in_reload;
1393       rld[i].secondary_out_reload = secondary_out_reload;
1394       rld[i].secondary_in_icode = secondary_in_icode;
1395       rld[i].secondary_out_icode = secondary_out_icode;
1396       rld[i].secondary_p = 0;
1397 
1398       n_reloads++;
1399 
1400       if (out != 0
1401           && (REG_P (out)
1402 	      || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1403 	  && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1404 	  && (targetm.secondary_memory_needed
1405 	      (outmode, rclass, REGNO_REG_CLASS (reg_or_subregno (out)))))
1406 	get_secondary_mem (out, outmode, opnum, type);
1407     }
1408   else
1409     {
1410       /* We are reusing an existing reload,
1411 	 but we may have additional information for it.
1412 	 For example, we may now have both IN and OUT
1413 	 while the old one may have just one of them.  */
1414 
1415       /* The modes can be different.  If they are, we want to reload in
1416 	 the larger mode, so that the value is valid for both modes.  */
1417       if (inmode != VOIDmode
1418 	  && partial_subreg_p (rld[i].inmode, inmode))
1419 	rld[i].inmode = inmode;
1420       if (outmode != VOIDmode
1421 	  && partial_subreg_p (rld[i].outmode, outmode))
1422 	rld[i].outmode = outmode;
1423       if (in != 0)
1424 	{
1425 	  rtx in_reg = inloc ? *inloc : 0;
1426 	  /* If we merge reloads for two distinct rtl expressions that
1427 	     are identical in content, there might be duplicate address
1428 	     reloads.  Remove the extra set now, so that if we later find
1429 	     that we can inherit this reload, we can get rid of the
1430 	     address reloads altogether.
1431 
1432 	     Do not do this if both reloads are optional since the result
1433 	     would be an optional reload which could potentially leave
1434 	     unresolved address replacements.
1435 
1436 	     It is not sufficient to call transfer_replacements since
1437 	     choose_reload_regs will remove the replacements for address
1438 	     reloads of inherited reloads which results in the same
1439 	     problem.  */
1440 	  if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1441 	      && ! (rld[i].optional && optional))
1442 	    {
1443 	      /* We must keep the address reload with the lower operand
1444 		 number alive.  */
1445 	      if (opnum > rld[i].opnum)
1446 		{
1447 		  remove_address_replacements (in);
1448 		  in = rld[i].in;
1449 		  in_reg = rld[i].in_reg;
1450 		}
1451 	      else
1452 		remove_address_replacements (rld[i].in);
1453 	    }
1454 	  /* When emitting reloads we don't necessarily look at the in-
1455 	     and outmode, but also directly at the operands (in and out).
1456 	     So we can't simply overwrite them with whatever we have found
1457 	     for this (to-be-merged) reload, we have to "merge" that too.
1458 	     Reusing another reload already verified that we deal with the
1459 	     same operands, just possibly in different modes.  So we
1460 	     overwrite the operands only when the new mode is larger.
1461 	     See also PR33613.  */
1462 	  if (!rld[i].in
1463 	      || partial_subreg_p (GET_MODE (rld[i].in), GET_MODE (in)))
1464 	    rld[i].in = in;
1465 	  if (!rld[i].in_reg
1466 	      || (in_reg
1467 		  && partial_subreg_p (GET_MODE (rld[i].in_reg),
1468 				       GET_MODE (in_reg))))
1469 	    rld[i].in_reg = in_reg;
1470 	}
1471       if (out != 0)
1472 	{
1473 	  if (!rld[i].out
1474 	      || (out
1475 		  && partial_subreg_p (GET_MODE (rld[i].out),
1476 				       GET_MODE (out))))
1477 	    rld[i].out = out;
1478 	  if (outloc
1479 	      && (!rld[i].out_reg
1480 		  || partial_subreg_p (GET_MODE (rld[i].out_reg),
1481 				       GET_MODE (*outloc))))
1482 	    rld[i].out_reg = *outloc;
1483 	}
1484       if (reg_class_subset_p (rclass, rld[i].rclass))
1485 	rld[i].rclass = rclass;
1486       rld[i].optional &= optional;
1487       if (MERGE_TO_OTHER (type, rld[i].when_needed,
1488 			  opnum, rld[i].opnum))
1489 	rld[i].when_needed = RELOAD_OTHER;
1490       rld[i].opnum = MIN (rld[i].opnum, opnum);
1491     }
1492 
1493   /* If the ostensible rtx being reloaded differs from the rtx found
1494      in the location to substitute, this reload is not safe to combine
1495      because we cannot reliably tell whether it appears in the insn.  */
1496 
1497   if (in != 0 && in != *inloc)
1498     rld[i].nocombine = 1;
1499 
1500 #if 0
1501   /* This was replaced by changes in find_reloads_address_1 and the new
1502      function inc_for_reload, which go with a new meaning of reload_inc.  */
1503 
1504   /* If this is an IN/OUT reload in an insn that sets the CC,
1505      it must be for an autoincrement.  It doesn't work to store
1506      the incremented value after the insn because that would clobber the CC.
1507      So we must do the increment of the value reloaded from,
1508      increment it, store it back, then decrement again.  */
1509   if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1510     {
1511       out = 0;
1512       rld[i].out = 0;
1513       rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1514       /* If we did not find a nonzero amount-to-increment-by,
1515 	 that contradicts the belief that IN is being incremented
1516 	 in an address in this insn.  */
1517       gcc_assert (rld[i].inc != 0);
1518     }
1519 #endif
1520 
1521   /* If we will replace IN and OUT with the reload-reg,
1522      record where they are located so that substitution need
1523      not do a tree walk.  */
1524 
1525   if (replace_reloads)
1526     {
1527       if (inloc != 0)
1528 	{
1529 	  struct replacement *r = &replacements[n_replacements++];
1530 	  r->what = i;
1531 	  r->where = inloc;
1532 	  r->mode = inmode;
1533 	}
1534       if (outloc != 0 && outloc != inloc)
1535 	{
1536 	  struct replacement *r = &replacements[n_replacements++];
1537 	  r->what = i;
1538 	  r->where = outloc;
1539 	  r->mode = outmode;
1540 	}
1541     }
1542 
1543   /* If this reload is just being introduced and it has both
1544      an incoming quantity and an outgoing quantity that are
1545      supposed to be made to match, see if either one of the two
1546      can serve as the place to reload into.
1547 
1548      If one of them is acceptable, set rld[i].reg_rtx
1549      to that one.  */
1550 
1551   if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1552     {
1553       rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1554 					  inmode, outmode,
1555 					  rld[i].rclass, i,
1556 					  earlyclobber_operand_p (out));
1557 
1558       /* If the outgoing register already contains the same value
1559 	 as the incoming one, we can dispense with loading it.
1560 	 The easiest way to tell the caller that is to give a phony
1561 	 value for the incoming operand (same as outgoing one).  */
1562       if (rld[i].reg_rtx == out
1563 	  && (REG_P (in) || CONSTANT_P (in))
1564 	  && find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1565 			     static_reload_reg_p, i, inmode) != 0)
1566 	rld[i].in = out;
1567     }
1568 
1569   /* If this is an input reload and the operand contains a register that
1570      dies in this insn and is used nowhere else, see if it is the right class
1571      to be used for this reload.  Use it if so.  (This occurs most commonly
1572      in the case of paradoxical SUBREGs and in-out reloads).  We cannot do
1573      this if it is also an output reload that mentions the register unless
1574      the output is a SUBREG that clobbers an entire register.
1575 
1576      Note that the operand might be one of the spill regs, if it is a
1577      pseudo reg and we are in a block where spilling has not taken place.
1578      But if there is no spilling in this block, that is OK.
1579      An explicitly used hard reg cannot be a spill reg.  */
1580 
1581   if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1582     {
1583       rtx note;
1584       int regno;
1585       machine_mode rel_mode = inmode;
1586 
1587       if (out && partial_subreg_p (rel_mode, outmode))
1588 	rel_mode = outmode;
1589 
1590       for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1591 	if (REG_NOTE_KIND (note) == REG_DEAD
1592 	    && REG_P (XEXP (note, 0))
1593 	    && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1594 	    && reg_mentioned_p (XEXP (note, 0), in)
1595 	    /* Check that a former pseudo is valid; see find_dummy_reload.  */
1596 	    && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1597 		|| (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1598 				    ORIGINAL_REGNO (XEXP (note, 0)))
1599 		    && REG_NREGS (XEXP (note, 0)) == 1))
1600 	    && ! refers_to_regno_for_reload_p (regno,
1601 					       end_hard_regno (rel_mode,
1602 							       regno),
1603 					       PATTERN (this_insn), inloc)
1604 	    && ! find_reg_fusage (this_insn, USE, XEXP (note, 0))
1605 	    /* If this is also an output reload, IN cannot be used as
1606 	       the reload register if it is set in this insn unless IN
1607 	       is also OUT.  */
1608 	    && (out == 0 || in == out
1609 		|| ! hard_reg_set_here_p (regno,
1610 					  end_hard_regno (rel_mode, regno),
1611 					  PATTERN (this_insn)))
1612 	    /* ??? Why is this code so different from the previous?
1613 	       Is there any simple coherent way to describe the two together?
1614 	       What's going on here.  */
1615 	    && (in != out
1616 		|| (GET_CODE (in) == SUBREG
1617 		    && (known_equal_after_align_up
1618 			(GET_MODE_SIZE (GET_MODE (in)),
1619 			 GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))),
1620 			 UNITS_PER_WORD))))
1621 	    /* Make sure the operand fits in the reg that dies.  */
1622 	    && known_le (GET_MODE_SIZE (rel_mode),
1623 			 GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1624 	    && targetm.hard_regno_mode_ok (regno, inmode)
1625 	    && targetm.hard_regno_mode_ok (regno, outmode))
1626 	  {
1627 	    unsigned int offs;
1628 	    unsigned int nregs = MAX (hard_regno_nregs (regno, inmode),
1629 				      hard_regno_nregs (regno, outmode));
1630 
1631 	    for (offs = 0; offs < nregs; offs++)
1632 	      if (fixed_regs[regno + offs]
1633 		  || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1634 					  regno + offs))
1635 		break;
1636 
1637 	    if (offs == nregs
1638 		&& (! (refers_to_regno_for_reload_p
1639 		       (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1640 		    || can_reload_into (in, regno, inmode)))
1641 	      {
1642 		rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1643 		break;
1644 	      }
1645 	  }
1646     }
1647 
1648   if (out)
1649     output_reloadnum = i;
1650 
1651   return i;
1652 }
1653 
1654 /* Record an additional place we must replace a value
1655    for which we have already recorded a reload.
1656    RELOADNUM is the value returned by push_reload
1657    when the reload was recorded.
1658    This is used in insn patterns that use match_dup.  */
1659 
1660 static void
1661 push_replacement (rtx *loc, int reloadnum, machine_mode mode)
1662 {
1663   if (replace_reloads)
1664     {
1665       struct replacement *r = &replacements[n_replacements++];
1666       r->what = reloadnum;
1667       r->where = loc;
1668       r->mode = mode;
1669     }
1670 }
1671 
1672 /* Duplicate any replacement we have recorded to apply at
1673    location ORIG_LOC to also be performed at DUP_LOC.
1674    This is used in insn patterns that use match_dup.  */
1675 
1676 static void
1677 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1678 {
1679   int i, n = n_replacements;
1680 
1681   for (i = 0; i < n; i++)
1682     {
1683       struct replacement *r = &replacements[i];
1684       if (r->where == orig_loc)
1685 	push_replacement (dup_loc, r->what, r->mode);
1686     }
1687 }
1688 
1689 /* Transfer all replacements that used to be in reload FROM to be in
1690    reload TO.  */
1691 
1692 void
1693 transfer_replacements (int to, int from)
1694 {
1695   int i;
1696 
1697   for (i = 0; i < n_replacements; i++)
1698     if (replacements[i].what == from)
1699       replacements[i].what = to;
1700 }
1701 
1702 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1703    or a subpart of it.  If we have any replacements registered for IN_RTX,
1704    cancel the reloads that were supposed to load them.
1705    Return nonzero if we canceled any reloads.  */
1706 int
1707 remove_address_replacements (rtx in_rtx)
1708 {
1709   int i, j;
1710   char reload_flags[MAX_RELOADS];
1711   int something_changed = 0;
1712 
1713   memset (reload_flags, 0, sizeof reload_flags);
1714   for (i = 0, j = 0; i < n_replacements; i++)
1715     {
1716       if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1717 	reload_flags[replacements[i].what] |= 1;
1718       else
1719 	{
1720 	  replacements[j++] = replacements[i];
1721 	  reload_flags[replacements[i].what] |= 2;
1722 	}
1723     }
1724   /* Note that the following store must be done before the recursive calls.  */
1725   n_replacements = j;
1726 
1727   for (i = n_reloads - 1; i >= 0; i--)
1728     {
1729       if (reload_flags[i] == 1)
1730 	{
1731 	  deallocate_reload_reg (i);
1732 	  remove_address_replacements (rld[i].in);
1733 	  rld[i].in = 0;
1734 	  something_changed = 1;
1735 	}
1736     }
1737   return something_changed;
1738 }
1739 
1740 /* If there is only one output reload, and it is not for an earlyclobber
1741    operand, try to combine it with a (logically unrelated) input reload
1742    to reduce the number of reload registers needed.
1743 
1744    This is safe if the input reload does not appear in
1745    the value being output-reloaded, because this implies
1746    it is not needed any more once the original insn completes.
1747 
1748    If that doesn't work, see we can use any of the registers that
1749    die in this insn as a reload register.  We can if it is of the right
1750    class and does not appear in the value being output-reloaded.  */
1751 
1752 static void
1753 combine_reloads (void)
1754 {
1755   int i, regno;
1756   int output_reload = -1;
1757   int secondary_out = -1;
1758   rtx note;
1759 
1760   /* Find the output reload; return unless there is exactly one
1761      and that one is mandatory.  */
1762 
1763   for (i = 0; i < n_reloads; i++)
1764     if (rld[i].out != 0)
1765       {
1766 	if (output_reload >= 0)
1767 	  return;
1768 	output_reload = i;
1769       }
1770 
1771   if (output_reload < 0 || rld[output_reload].optional)
1772     return;
1773 
1774   /* An input-output reload isn't combinable.  */
1775 
1776   if (rld[output_reload].in != 0)
1777     return;
1778 
1779   /* If this reload is for an earlyclobber operand, we can't do anything.  */
1780   if (earlyclobber_operand_p (rld[output_reload].out))
1781     return;
1782 
1783   /* If there is a reload for part of the address of this operand, we would
1784      need to change it to RELOAD_FOR_OTHER_ADDRESS.  But that would extend
1785      its life to the point where doing this combine would not lower the
1786      number of spill registers needed.  */
1787   for (i = 0; i < n_reloads; i++)
1788     if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1789 	 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1790 	&& rld[i].opnum == rld[output_reload].opnum)
1791       return;
1792 
1793   /* Check each input reload; can we combine it?  */
1794 
1795   for (i = 0; i < n_reloads; i++)
1796     if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1797 	/* Life span of this reload must not extend past main insn.  */
1798 	&& rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1799 	&& rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1800 	&& rld[i].when_needed != RELOAD_OTHER
1801 	&& (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1802 	    == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1803 				       [(int) rld[output_reload].outmode])
1804 	&& known_eq (rld[i].inc, 0)
1805 	&& rld[i].reg_rtx == 0
1806 	/* Don't combine two reloads with different secondary
1807 	   memory locations.  */
1808 	&& (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1809 	    || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1810 	    || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1811 			    secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1812 	&& (targetm.small_register_classes_for_mode_p (VOIDmode)
1813 	    ? (rld[i].rclass == rld[output_reload].rclass)
1814 	    : (reg_class_subset_p (rld[i].rclass,
1815 				   rld[output_reload].rclass)
1816 	       || reg_class_subset_p (rld[output_reload].rclass,
1817 				      rld[i].rclass)))
1818 	&& (MATCHES (rld[i].in, rld[output_reload].out)
1819 	    /* Args reversed because the first arg seems to be
1820 	       the one that we imagine being modified
1821 	       while the second is the one that might be affected.  */
1822 	    || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1823 						      rld[i].in)
1824 		/* However, if the input is a register that appears inside
1825 		   the output, then we also can't share.
1826 		   Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1827 		   If the same reload reg is used for both reg 69 and the
1828 		   result to be stored in memory, then that result
1829 		   will clobber the address of the memory ref.  */
1830 		&& ! (REG_P (rld[i].in)
1831 		      && reg_overlap_mentioned_for_reload_p (rld[i].in,
1832 							     rld[output_reload].out))))
1833 	&& ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1834 					 rld[i].when_needed != RELOAD_FOR_INPUT)
1835 	&& (reg_class_size[(int) rld[i].rclass]
1836 	    || targetm.small_register_classes_for_mode_p (VOIDmode))
1837 	/* We will allow making things slightly worse by combining an
1838 	   input and an output, but no worse than that.  */
1839 	&& (rld[i].when_needed == RELOAD_FOR_INPUT
1840 	    || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1841       {
1842 	int j;
1843 
1844 	/* We have found a reload to combine with!  */
1845 	rld[i].out = rld[output_reload].out;
1846 	rld[i].out_reg = rld[output_reload].out_reg;
1847 	rld[i].outmode = rld[output_reload].outmode;
1848 	/* Mark the old output reload as inoperative.  */
1849 	rld[output_reload].out = 0;
1850 	/* The combined reload is needed for the entire insn.  */
1851 	rld[i].when_needed = RELOAD_OTHER;
1852 	/* If the output reload had a secondary reload, copy it.  */
1853 	if (rld[output_reload].secondary_out_reload != -1)
1854 	  {
1855 	    rld[i].secondary_out_reload
1856 	      = rld[output_reload].secondary_out_reload;
1857 	    rld[i].secondary_out_icode
1858 	      = rld[output_reload].secondary_out_icode;
1859 	  }
1860 
1861 	/* Copy any secondary MEM.  */
1862 	if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1863 	  secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1864 	    = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1865 	/* If required, minimize the register class.  */
1866 	if (reg_class_subset_p (rld[output_reload].rclass,
1867 				rld[i].rclass))
1868 	  rld[i].rclass = rld[output_reload].rclass;
1869 
1870 	/* Transfer all replacements from the old reload to the combined.  */
1871 	for (j = 0; j < n_replacements; j++)
1872 	  if (replacements[j].what == output_reload)
1873 	    replacements[j].what = i;
1874 
1875 	return;
1876       }
1877 
1878   /* If this insn has only one operand that is modified or written (assumed
1879      to be the first),  it must be the one corresponding to this reload.  It
1880      is safe to use anything that dies in this insn for that output provided
1881      that it does not occur in the output (we already know it isn't an
1882      earlyclobber.  If this is an asm insn, give up.  */
1883 
1884   if (INSN_CODE (this_insn) == -1)
1885     return;
1886 
1887   for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1888     if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1889 	|| insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1890       return;
1891 
1892   /* See if some hard register that dies in this insn and is not used in
1893      the output is the right class.  Only works if the register we pick
1894      up can fully hold our output reload.  */
1895   for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1896     if (REG_NOTE_KIND (note) == REG_DEAD
1897 	&& REG_P (XEXP (note, 0))
1898 	&& !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1899 						rld[output_reload].out)
1900 	&& (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1901 	&& targetm.hard_regno_mode_ok (regno, rld[output_reload].outmode)
1902 	&& TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1903 			      regno)
1904 	&& (hard_regno_nregs (regno, rld[output_reload].outmode)
1905 	    <= REG_NREGS (XEXP (note, 0)))
1906 	/* Ensure that a secondary or tertiary reload for this output
1907 	   won't want this register.  */
1908 	&& ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1909 	    || (!(TEST_HARD_REG_BIT
1910 		  (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1911 		&& ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1912 		    || !(TEST_HARD_REG_BIT
1913 			 (reg_class_contents[(int) rld[secondary_out].rclass],
1914 			  regno)))))
1915 	&& !fixed_regs[regno]
1916 	/* Check that a former pseudo is valid; see find_dummy_reload.  */
1917 	&& (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1918 	    || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1919 			       ORIGINAL_REGNO (XEXP (note, 0)))
1920 		&& REG_NREGS (XEXP (note, 0)) == 1)))
1921       {
1922 	rld[output_reload].reg_rtx
1923 	  = gen_rtx_REG (rld[output_reload].outmode, regno);
1924 	return;
1925       }
1926 }
1927 
1928 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1929    See if one of IN and OUT is a register that may be used;
1930    this is desirable since a spill-register won't be needed.
1931    If so, return the register rtx that proves acceptable.
1932 
1933    INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1934    RCLASS is the register class required for the reload.
1935 
1936    If FOR_REAL is >= 0, it is the number of the reload,
1937    and in some cases when it can be discovered that OUT doesn't need
1938    to be computed, clear out rld[FOR_REAL].out.
1939 
1940    If FOR_REAL is -1, this should not be done, because this call
1941    is just to see if a register can be found, not to find and install it.
1942 
1943    EARLYCLOBBER is nonzero if OUT is an earlyclobber operand.  This
1944    puts an additional constraint on being able to use IN for OUT since
1945    IN must not appear elsewhere in the insn (it is assumed that IN itself
1946    is safe from the earlyclobber).  */
1947 
1948 static rtx
1949 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1950 		   machine_mode inmode, machine_mode outmode,
1951 		   reg_class_t rclass, int for_real, int earlyclobber)
1952 {
1953   rtx in = real_in;
1954   rtx out = real_out;
1955   int in_offset = 0;
1956   int out_offset = 0;
1957   rtx value = 0;
1958 
1959   /* If operands exceed a word, we can't use either of them
1960      unless they have the same size.  */
1961   if (maybe_ne (GET_MODE_SIZE (outmode), GET_MODE_SIZE (inmode))
1962       && (maybe_gt (GET_MODE_SIZE (outmode), UNITS_PER_WORD)
1963 	  || maybe_gt (GET_MODE_SIZE (inmode), UNITS_PER_WORD)))
1964     return 0;
1965 
1966   /* Note that {in,out}_offset are needed only when 'in' or 'out'
1967      respectively refers to a hard register.  */
1968 
1969   /* Find the inside of any subregs.  */
1970   while (GET_CODE (out) == SUBREG)
1971     {
1972       if (REG_P (SUBREG_REG (out))
1973 	  && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1974 	out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1975 					   GET_MODE (SUBREG_REG (out)),
1976 					   SUBREG_BYTE (out),
1977 					   GET_MODE (out));
1978       out = SUBREG_REG (out);
1979     }
1980   while (GET_CODE (in) == SUBREG)
1981     {
1982       if (REG_P (SUBREG_REG (in))
1983 	  && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1984 	in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1985 					  GET_MODE (SUBREG_REG (in)),
1986 					  SUBREG_BYTE (in),
1987 					  GET_MODE (in));
1988       in = SUBREG_REG (in);
1989     }
1990 
1991   /* Narrow down the reg class, the same way push_reload will;
1992      otherwise we might find a dummy now, but push_reload won't.  */
1993   {
1994     reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
1995     if (preferred_class != NO_REGS)
1996       rclass = (enum reg_class) preferred_class;
1997   }
1998 
1999   /* See if OUT will do.  */
2000   if (REG_P (out)
2001       && REGNO (out) < FIRST_PSEUDO_REGISTER)
2002     {
2003       unsigned int regno = REGNO (out) + out_offset;
2004       unsigned int nwords = hard_regno_nregs (regno, outmode);
2005       rtx saved_rtx;
2006 
2007       /* When we consider whether the insn uses OUT,
2008 	 ignore references within IN.  They don't prevent us
2009 	 from copying IN into OUT, because those refs would
2010 	 move into the insn that reloads IN.
2011 
2012 	 However, we only ignore IN in its role as this reload.
2013 	 If the insn uses IN elsewhere and it contains OUT,
2014 	 that counts.  We can't be sure it's the "same" operand
2015 	 so it might not go through this reload.
2016 
2017          We also need to avoid using OUT if it, or part of it, is a
2018          fixed register.  Modifying such registers, even transiently,
2019          may have undefined effects on the machine, such as modifying
2020          the stack pointer.  */
2021       saved_rtx = *inloc;
2022       *inloc = const0_rtx;
2023 
2024       if (regno < FIRST_PSEUDO_REGISTER
2025 	  && targetm.hard_regno_mode_ok (regno, outmode)
2026 	  && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2027 					     PATTERN (this_insn), outloc))
2028 	{
2029 	  unsigned int i;
2030 
2031 	  for (i = 0; i < nwords; i++)
2032 	    if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2033 				     regno + i)
2034 		|| fixed_regs[regno + i])
2035 	      break;
2036 
2037 	  if (i == nwords)
2038 	    {
2039 	      if (REG_P (real_out))
2040 		value = real_out;
2041 	      else
2042 		value = gen_rtx_REG (outmode, regno);
2043 	    }
2044 	}
2045 
2046       *inloc = saved_rtx;
2047     }
2048 
2049   /* Consider using IN if OUT was not acceptable
2050      or if OUT dies in this insn (like the quotient in a divmod insn).
2051      We can't use IN unless it is dies in this insn,
2052      which means we must know accurately which hard regs are live.
2053      Also, the result can't go in IN if IN is used within OUT,
2054      or if OUT is an earlyclobber and IN appears elsewhere in the insn.  */
2055   if (hard_regs_live_known
2056       && REG_P (in)
2057       && REGNO (in) < FIRST_PSEUDO_REGISTER
2058       && (value == 0
2059 	  || find_reg_note (this_insn, REG_UNUSED, real_out))
2060       && find_reg_note (this_insn, REG_DEAD, real_in)
2061       && !fixed_regs[REGNO (in)]
2062       && targetm.hard_regno_mode_ok (REGNO (in),
2063 				     /* The only case where out and real_out
2064 					might have different modes is where
2065 					real_out is a subreg, and in that
2066 					case, out has a real mode.  */
2067 				     (GET_MODE (out) != VOIDmode
2068 				      ? GET_MODE (out) : outmode))
2069       && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2070 	  /* However only do this if we can be sure that this input
2071 	     operand doesn't correspond with an uninitialized pseudo.
2072 	     global can assign some hardreg to it that is the same as
2073 	     the one assigned to a different, also live pseudo (as it
2074 	     can ignore the conflict).  We must never introduce writes
2075 	     to such hardregs, as they would clobber the other live
2076 	     pseudo.  See PR 20973.  */
2077 	  || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2078 			     ORIGINAL_REGNO (in))
2079 	      /* Similarly, only do this if we can be sure that the death
2080 		 note is still valid.  global can assign some hardreg to
2081 		 the pseudo referenced in the note and simultaneously a
2082 		 subword of this hardreg to a different, also live pseudo,
2083 		 because only another subword of the hardreg is actually
2084 		 used in the insn.  This cannot happen if the pseudo has
2085 		 been assigned exactly one hardreg.  See PR 33732.  */
2086 	      && REG_NREGS (in) == 1)))
2087     {
2088       unsigned int regno = REGNO (in) + in_offset;
2089       unsigned int nwords = hard_regno_nregs (regno, inmode);
2090 
2091       if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2092 	  && ! hard_reg_set_here_p (regno, regno + nwords,
2093 				    PATTERN (this_insn))
2094 	  && (! earlyclobber
2095 	      || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2096 						 PATTERN (this_insn), inloc)))
2097 	{
2098 	  unsigned int i;
2099 
2100 	  for (i = 0; i < nwords; i++)
2101 	    if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2102 				     regno + i))
2103 	      break;
2104 
2105 	  if (i == nwords)
2106 	    {
2107 	      /* If we were going to use OUT as the reload reg
2108 		 and changed our mind, it means OUT is a dummy that
2109 		 dies here.  So don't bother copying value to it.  */
2110 	      if (for_real >= 0 && value == real_out)
2111 		rld[for_real].out = 0;
2112 	      if (REG_P (real_in))
2113 		value = real_in;
2114 	      else
2115 		value = gen_rtx_REG (inmode, regno);
2116 	    }
2117 	}
2118     }
2119 
2120   return value;
2121 }
2122 
2123 /* This page contains subroutines used mainly for determining
2124    whether the IN or an OUT of a reload can serve as the
2125    reload register.  */
2126 
2127 /* Return 1 if X is an operand of an insn that is being earlyclobbered.  */
2128 
2129 int
2130 earlyclobber_operand_p (rtx x)
2131 {
2132   int i;
2133 
2134   for (i = 0; i < n_earlyclobbers; i++)
2135     if (reload_earlyclobbers[i] == x)
2136       return 1;
2137 
2138   return 0;
2139 }
2140 
2141 /* Return 1 if expression X alters a hard reg in the range
2142    from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2143    either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2144    X should be the body of an instruction.  */
2145 
2146 static int
2147 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2148 {
2149   if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2150     {
2151       rtx op0 = SET_DEST (x);
2152 
2153       while (GET_CODE (op0) == SUBREG)
2154 	op0 = SUBREG_REG (op0);
2155       if (REG_P (op0))
2156 	{
2157 	  unsigned int r = REGNO (op0);
2158 
2159 	  /* See if this reg overlaps range under consideration.  */
2160 	  if (r < end_regno
2161 	      && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2162 	    return 1;
2163 	}
2164     }
2165   else if (GET_CODE (x) == PARALLEL)
2166     {
2167       int i = XVECLEN (x, 0) - 1;
2168 
2169       for (; i >= 0; i--)
2170 	if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2171 	  return 1;
2172     }
2173 
2174   return 0;
2175 }
2176 
2177 /* Return 1 if ADDR is a valid memory address for mode MODE
2178    in address space AS, and check that each pseudo reg has the
2179    proper kind of hard reg.  */
2180 
2181 int
2182 strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
2183 				    rtx addr, addr_space_t as)
2184 {
2185 #ifdef GO_IF_LEGITIMATE_ADDRESS
2186   gcc_assert (ADDR_SPACE_GENERIC_P (as));
2187   GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2188   return 0;
2189 
2190  win:
2191   return 1;
2192 #else
2193   return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2194 #endif
2195 }
2196 
2197 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2198    if they are the same hard reg, and has special hacks for
2199    autoincrement and autodecrement.
2200    This is specifically intended for find_reloads to use
2201    in determining whether two operands match.
2202    X is the operand whose number is the lower of the two.
2203 
2204    The value is 2 if Y contains a pre-increment that matches
2205    a non-incrementing address in X.  */
2206 
2207 /* ??? To be completely correct, we should arrange to pass
2208    for X the output operand and for Y the input operand.
2209    For now, we assume that the output operand has the lower number
2210    because that is natural in (SET output (... input ...)).  */
2211 
2212 int
2213 operands_match_p (rtx x, rtx y)
2214 {
2215   int i;
2216   RTX_CODE code = GET_CODE (x);
2217   const char *fmt;
2218   int success_2;
2219 
2220   if (x == y)
2221     return 1;
2222   if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2223       && (REG_P (y) || (GET_CODE (y) == SUBREG
2224 				  && REG_P (SUBREG_REG (y)))))
2225     {
2226       int j;
2227 
2228       if (code == SUBREG)
2229 	{
2230 	  i = REGNO (SUBREG_REG (x));
2231 	  if (i >= FIRST_PSEUDO_REGISTER)
2232 	    goto slow;
2233 	  i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2234 				    GET_MODE (SUBREG_REG (x)),
2235 				    SUBREG_BYTE (x),
2236 				    GET_MODE (x));
2237 	}
2238       else
2239 	i = REGNO (x);
2240 
2241       if (GET_CODE (y) == SUBREG)
2242 	{
2243 	  j = REGNO (SUBREG_REG (y));
2244 	  if (j >= FIRST_PSEUDO_REGISTER)
2245 	    goto slow;
2246 	  j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2247 				    GET_MODE (SUBREG_REG (y)),
2248 				    SUBREG_BYTE (y),
2249 				    GET_MODE (y));
2250 	}
2251       else
2252 	j = REGNO (y);
2253 
2254       /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2255 	 multiple hard register group of scalar integer registers, so that
2256 	 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2257 	 register.  */
2258       scalar_int_mode xmode;
2259       if (REG_WORDS_BIG_ENDIAN
2260 	  && is_a <scalar_int_mode> (GET_MODE (x), &xmode)
2261 	  && GET_MODE_SIZE (xmode) > UNITS_PER_WORD
2262 	  && i < FIRST_PSEUDO_REGISTER)
2263 	i += hard_regno_nregs (i, xmode) - 1;
2264       scalar_int_mode ymode;
2265       if (REG_WORDS_BIG_ENDIAN
2266 	  && is_a <scalar_int_mode> (GET_MODE (y), &ymode)
2267 	  && GET_MODE_SIZE (ymode) > UNITS_PER_WORD
2268 	  && j < FIRST_PSEUDO_REGISTER)
2269 	j += hard_regno_nregs (j, ymode) - 1;
2270 
2271       return i == j;
2272     }
2273   /* If two operands must match, because they are really a single
2274      operand of an assembler insn, then two postincrements are invalid
2275      because the assembler insn would increment only once.
2276      On the other hand, a postincrement matches ordinary indexing
2277      if the postincrement is the output operand.  */
2278   if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2279     return operands_match_p (XEXP (x, 0), y);
2280   /* Two preincrements are invalid
2281      because the assembler insn would increment only once.
2282      On the other hand, a preincrement matches ordinary indexing
2283      if the preincrement is the input operand.
2284      In this case, return 2, since some callers need to do special
2285      things when this happens.  */
2286   if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2287       || GET_CODE (y) == PRE_MODIFY)
2288     return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2289 
2290  slow:
2291 
2292   /* Now we have disposed of all the cases in which different rtx codes
2293      can match.  */
2294   if (code != GET_CODE (y))
2295     return 0;
2296 
2297   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2298   if (GET_MODE (x) != GET_MODE (y))
2299     return 0;
2300 
2301   /* MEMs referring to different address space are not equivalent.  */
2302   if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2303     return 0;
2304 
2305   switch (code)
2306     {
2307     CASE_CONST_UNIQUE:
2308       return 0;
2309 
2310     case LABEL_REF:
2311       return label_ref_label (x) == label_ref_label (y);
2312     case SYMBOL_REF:
2313       return XSTR (x, 0) == XSTR (y, 0);
2314 
2315     default:
2316       break;
2317     }
2318 
2319   /* Compare the elements.  If any pair of corresponding elements
2320      fail to match, return 0 for the whole things.  */
2321 
2322   success_2 = 0;
2323   fmt = GET_RTX_FORMAT (code);
2324   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2325     {
2326       int val, j;
2327       switch (fmt[i])
2328 	{
2329 	case 'w':
2330 	  if (XWINT (x, i) != XWINT (y, i))
2331 	    return 0;
2332 	  break;
2333 
2334 	case 'i':
2335 	  if (XINT (x, i) != XINT (y, i))
2336 	    return 0;
2337 	  break;
2338 
2339 	case 'p':
2340 	  if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
2341 	    return 0;
2342 	  break;
2343 
2344 	case 'e':
2345 	  val = operands_match_p (XEXP (x, i), XEXP (y, i));
2346 	  if (val == 0)
2347 	    return 0;
2348 	  /* If any subexpression returns 2,
2349 	     we should return 2 if we are successful.  */
2350 	  if (val == 2)
2351 	    success_2 = 1;
2352 	  break;
2353 
2354 	case '0':
2355 	  break;
2356 
2357 	case 'E':
2358 	  if (XVECLEN (x, i) != XVECLEN (y, i))
2359 	    return 0;
2360 	  for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2361 	    {
2362 	      val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2363 	      if (val == 0)
2364 		return 0;
2365 	      if (val == 2)
2366 		success_2 = 1;
2367 	    }
2368 	  break;
2369 
2370 	  /* It is believed that rtx's at this level will never
2371 	     contain anything but integers and other rtx's,
2372 	     except for within LABEL_REFs and SYMBOL_REFs.  */
2373 	default:
2374 	  gcc_unreachable ();
2375 	}
2376     }
2377   return 1 + success_2;
2378 }
2379 
2380 /* Describe the range of registers or memory referenced by X.
2381    If X is a register, set REG_FLAG and put the first register
2382    number into START and the last plus one into END.
2383    If X is a memory reference, put a base address into BASE
2384    and a range of integer offsets into START and END.
2385    If X is pushing on the stack, we can assume it causes no trouble,
2386    so we set the SAFE field.  */
2387 
2388 static struct decomposition
2389 decompose (rtx x)
2390 {
2391   struct decomposition val;
2392   int all_const = 0, regno;
2393 
2394   memset (&val, 0, sizeof (val));
2395 
2396   switch (GET_CODE (x))
2397     {
2398     case MEM:
2399       {
2400 	rtx base = NULL_RTX, offset = 0;
2401 	rtx addr = XEXP (x, 0);
2402 
2403 	if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2404 	    || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2405 	  {
2406 	    val.base = XEXP (addr, 0);
2407 	    val.start = -GET_MODE_SIZE (GET_MODE (x));
2408 	    val.end = GET_MODE_SIZE (GET_MODE (x));
2409 	    val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2410 	    return val;
2411 	  }
2412 
2413 	if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2414 	  {
2415 	    if (GET_CODE (XEXP (addr, 1)) == PLUS
2416 		&& XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2417 		&& CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2418 	      {
2419 		val.base  = XEXP (addr, 0);
2420 		val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2421 		val.end   = INTVAL (XEXP (XEXP (addr, 1), 1));
2422 		val.safe  = REGNO (val.base) == STACK_POINTER_REGNUM;
2423 		return val;
2424 	      }
2425 	  }
2426 
2427 	if (GET_CODE (addr) == CONST)
2428 	  {
2429 	    addr = XEXP (addr, 0);
2430 	    all_const = 1;
2431 	  }
2432 	if (GET_CODE (addr) == PLUS)
2433 	  {
2434 	    if (CONSTANT_P (XEXP (addr, 0)))
2435 	      {
2436 		base = XEXP (addr, 1);
2437 		offset = XEXP (addr, 0);
2438 	      }
2439 	    else if (CONSTANT_P (XEXP (addr, 1)))
2440 	      {
2441 		base = XEXP (addr, 0);
2442 		offset = XEXP (addr, 1);
2443 	      }
2444 	  }
2445 
2446 	if (offset == 0)
2447 	  {
2448 	    base = addr;
2449 	    offset = const0_rtx;
2450 	  }
2451 	if (GET_CODE (offset) == CONST)
2452 	  offset = XEXP (offset, 0);
2453 	if (GET_CODE (offset) == PLUS)
2454 	  {
2455 	    if (CONST_INT_P (XEXP (offset, 0)))
2456 	      {
2457 		base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2458 		offset = XEXP (offset, 0);
2459 	      }
2460 	    else if (CONST_INT_P (XEXP (offset, 1)))
2461 	      {
2462 		base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2463 		offset = XEXP (offset, 1);
2464 	      }
2465 	    else
2466 	      {
2467 		base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2468 		offset = const0_rtx;
2469 	      }
2470 	  }
2471 	else if (!CONST_INT_P (offset))
2472 	  {
2473 	    base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2474 	    offset = const0_rtx;
2475 	  }
2476 
2477 	if (all_const && GET_CODE (base) == PLUS)
2478 	  base = gen_rtx_CONST (GET_MODE (base), base);
2479 
2480 	gcc_assert (CONST_INT_P (offset));
2481 
2482 	val.start = INTVAL (offset);
2483 	val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2484 	val.base = base;
2485       }
2486       break;
2487 
2488     case REG:
2489       val.reg_flag = 1;
2490       regno = true_regnum (x);
2491       if (regno < 0 || regno >= FIRST_PSEUDO_REGISTER)
2492 	{
2493 	  /* A pseudo with no hard reg.  */
2494 	  val.start = REGNO (x);
2495 	  val.end = val.start + 1;
2496 	}
2497       else
2498 	{
2499 	  /* A hard reg.  */
2500 	  val.start = regno;
2501 	  val.end = end_hard_regno (GET_MODE (x), regno);
2502 	}
2503       break;
2504 
2505     case SUBREG:
2506       if (!REG_P (SUBREG_REG (x)))
2507 	/* This could be more precise, but it's good enough.  */
2508 	return decompose (SUBREG_REG (x));
2509       regno = true_regnum (x);
2510       if (regno < 0 || regno >= FIRST_PSEUDO_REGISTER)
2511 	return decompose (SUBREG_REG (x));
2512 
2513       /* A hard reg.  */
2514       val.reg_flag = 1;
2515       val.start = regno;
2516       val.end = regno + subreg_nregs (x);
2517       break;
2518 
2519     case SCRATCH:
2520       /* This hasn't been assigned yet, so it can't conflict yet.  */
2521       val.safe = 1;
2522       break;
2523 
2524     default:
2525       gcc_assert (CONSTANT_P (x));
2526       val.safe = 1;
2527       break;
2528     }
2529   return val;
2530 }
2531 
2532 /* Return 1 if altering Y will not modify the value of X.
2533    Y is also described by YDATA, which should be decompose (Y).  */
2534 
2535 static int
2536 immune_p (rtx x, rtx y, struct decomposition ydata)
2537 {
2538   struct decomposition xdata;
2539 
2540   if (ydata.reg_flag)
2541     /* In this case the decomposition structure contains register
2542        numbers rather than byte offsets.  */
2543     return !refers_to_regno_for_reload_p (ydata.start.to_constant (),
2544 					  ydata.end.to_constant (),
2545 					  x, (rtx *) 0);
2546   if (ydata.safe)
2547     return 1;
2548 
2549   gcc_assert (MEM_P (y));
2550   /* If Y is memory and X is not, Y can't affect X.  */
2551   if (!MEM_P (x))
2552     return 1;
2553 
2554   xdata = decompose (x);
2555 
2556   if (! rtx_equal_p (xdata.base, ydata.base))
2557     {
2558       /* If bases are distinct symbolic constants, there is no overlap.  */
2559       if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2560 	return 1;
2561       /* Constants and stack slots never overlap.  */
2562       if (CONSTANT_P (xdata.base)
2563 	  && (ydata.base == frame_pointer_rtx
2564 	      || ydata.base == hard_frame_pointer_rtx
2565 	      || ydata.base == stack_pointer_rtx))
2566 	return 1;
2567       if (CONSTANT_P (ydata.base)
2568 	  && (xdata.base == frame_pointer_rtx
2569 	      || xdata.base == hard_frame_pointer_rtx
2570 	      || xdata.base == stack_pointer_rtx))
2571 	return 1;
2572       /* If either base is variable, we don't know anything.  */
2573       return 0;
2574     }
2575 
2576   return known_ge (xdata.start, ydata.end) || known_ge (ydata.start, xdata.end);
2577 }
2578 
2579 /* Similar, but calls decompose.  */
2580 
2581 int
2582 safe_from_earlyclobber (rtx op, rtx clobber)
2583 {
2584   struct decomposition early_data;
2585 
2586   early_data = decompose (clobber);
2587   return immune_p (op, clobber, early_data);
2588 }
2589 
2590 /* Main entry point of this file: search the body of INSN
2591    for values that need reloading and record them with push_reload.
2592    REPLACE nonzero means record also where the values occur
2593    so that subst_reloads can be used.
2594 
2595    IND_LEVELS says how many levels of indirection are supported by this
2596    machine; a value of zero means that a memory reference is not a valid
2597    memory address.
2598 
2599    LIVE_KNOWN says we have valid information about which hard
2600    regs are live at each point in the program; this is true when
2601    we are called from global_alloc but false when stupid register
2602    allocation has been done.
2603 
2604    RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2605    which is nonnegative if the reg has been commandeered for reloading into.
2606    It is copied into STATIC_RELOAD_REG_P and referenced from there
2607    by various subroutines.
2608 
2609    Return TRUE if some operands need to be changed, because of swapping
2610    commutative operands, reg_equiv_address substitution, or whatever.  */
2611 
2612 int
2613 find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2614 	      short *reload_reg_p)
2615 {
2616   int insn_code_number;
2617   int i, j;
2618   int noperands;
2619   /* These start out as the constraints for the insn
2620      and they are chewed up as we consider alternatives.  */
2621   const char *constraints[MAX_RECOG_OPERANDS];
2622   /* These are the preferred classes for an operand, or NO_REGS if it isn't
2623      a register.  */
2624   enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2625   char pref_or_nothing[MAX_RECOG_OPERANDS];
2626   /* Nonzero for a MEM operand whose entire address needs a reload.
2627      May be -1 to indicate the entire address may or may not need a reload.  */
2628   int address_reloaded[MAX_RECOG_OPERANDS];
2629   /* Nonzero for an address operand that needs to be completely reloaded.
2630      May be -1 to indicate the entire operand may or may not need a reload.  */
2631   int address_operand_reloaded[MAX_RECOG_OPERANDS];
2632   /* Value of enum reload_type to use for operand.  */
2633   enum reload_type operand_type[MAX_RECOG_OPERANDS];
2634   /* Value of enum reload_type to use within address of operand.  */
2635   enum reload_type address_type[MAX_RECOG_OPERANDS];
2636   /* Save the usage of each operand.  */
2637   enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2638   int no_input_reloads = 0, no_output_reloads = 0;
2639   int n_alternatives;
2640   reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2641   char this_alternative_match_win[MAX_RECOG_OPERANDS];
2642   char this_alternative_win[MAX_RECOG_OPERANDS];
2643   char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2644   char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2645   int this_alternative_matches[MAX_RECOG_OPERANDS];
2646   reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2647   int this_alternative_number;
2648   int goal_alternative_number = 0;
2649   int operand_reloadnum[MAX_RECOG_OPERANDS];
2650   int goal_alternative_matches[MAX_RECOG_OPERANDS];
2651   int goal_alternative_matched[MAX_RECOG_OPERANDS];
2652   char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2653   char goal_alternative_win[MAX_RECOG_OPERANDS];
2654   char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2655   char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2656   int goal_alternative_swapped;
2657   int best;
2658   int commutative;
2659   char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2660   rtx substed_operand[MAX_RECOG_OPERANDS];
2661   rtx body = PATTERN (insn);
2662   rtx set = single_set (insn);
2663   int goal_earlyclobber = 0, this_earlyclobber;
2664   machine_mode operand_mode[MAX_RECOG_OPERANDS];
2665   int retval = 0;
2666 
2667   this_insn = insn;
2668   n_reloads = 0;
2669   n_replacements = 0;
2670   n_earlyclobbers = 0;
2671   replace_reloads = replace;
2672   hard_regs_live_known = live_known;
2673   static_reload_reg_p = reload_reg_p;
2674 
2675   /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2676      neither are insns that SET cc0.  Insns that use CC0 are not allowed
2677      to have any input reloads.  */
2678   if (JUMP_P (insn) || CALL_P (insn))
2679     no_output_reloads = 1;
2680 
2681   if (HAVE_cc0 && reg_referenced_p (cc0_rtx, PATTERN (insn)))
2682     no_input_reloads = 1;
2683   if (HAVE_cc0 && reg_set_p (cc0_rtx, PATTERN (insn)))
2684     no_output_reloads = 1;
2685 
2686   /* The eliminated forms of any secondary memory locations are per-insn, so
2687      clear them out here.  */
2688 
2689   if (secondary_memlocs_elim_used)
2690     {
2691       memset (secondary_memlocs_elim, 0,
2692 	      sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2693       secondary_memlocs_elim_used = 0;
2694     }
2695 
2696   /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2697      is cheap to move between them.  If it is not, there may not be an insn
2698      to do the copy, so we may need a reload.  */
2699   if (GET_CODE (body) == SET
2700       && REG_P (SET_DEST (body))
2701       && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2702       && REG_P (SET_SRC (body))
2703       && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2704       && register_move_cost (GET_MODE (SET_SRC (body)),
2705 			     REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2706 			     REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2707     return 0;
2708 
2709   extract_insn (insn);
2710 
2711   noperands = reload_n_operands = recog_data.n_operands;
2712   n_alternatives = recog_data.n_alternatives;
2713 
2714   /* Just return "no reloads" if insn has no operands with constraints.  */
2715   if (noperands == 0 || n_alternatives == 0)
2716     return 0;
2717 
2718   insn_code_number = INSN_CODE (insn);
2719   this_insn_is_asm = insn_code_number < 0;
2720 
2721   memcpy (operand_mode, recog_data.operand_mode,
2722 	  noperands * sizeof (machine_mode));
2723   memcpy (constraints, recog_data.constraints,
2724 	  noperands * sizeof (const char *));
2725 
2726   commutative = -1;
2727 
2728   /* If we will need to know, later, whether some pair of operands
2729      are the same, we must compare them now and save the result.
2730      Reloading the base and index registers will clobber them
2731      and afterward they will fail to match.  */
2732 
2733   for (i = 0; i < noperands; i++)
2734     {
2735       const char *p;
2736       int c;
2737       char *end;
2738 
2739       substed_operand[i] = recog_data.operand[i];
2740       p = constraints[i];
2741 
2742       modified[i] = RELOAD_READ;
2743 
2744       /* Scan this operand's constraint to see if it is an output operand,
2745 	 an in-out operand, is commutative, or should match another.  */
2746 
2747       while ((c = *p))
2748 	{
2749 	  p += CONSTRAINT_LEN (c, p);
2750 	  switch (c)
2751 	    {
2752 	    case '=':
2753 	      modified[i] = RELOAD_WRITE;
2754 	      break;
2755 	    case '+':
2756 	      modified[i] = RELOAD_READ_WRITE;
2757 	      break;
2758 	    case '%':
2759 	      {
2760 		/* The last operand should not be marked commutative.  */
2761 		gcc_assert (i != noperands - 1);
2762 
2763 		/* We currently only support one commutative pair of
2764 		   operands.  Some existing asm code currently uses more
2765 		   than one pair.  Previously, that would usually work,
2766 		   but sometimes it would crash the compiler.  We
2767 		   continue supporting that case as well as we can by
2768 		   silently ignoring all but the first pair.  In the
2769 		   future we may handle it correctly.  */
2770 		if (commutative < 0)
2771 		  commutative = i;
2772 		else
2773 		  gcc_assert (this_insn_is_asm);
2774 	      }
2775 	      break;
2776 	    /* Use of ISDIGIT is tempting here, but it may get expensive because
2777 	       of locale support we don't want.  */
2778 	    case '0': case '1': case '2': case '3': case '4':
2779 	    case '5': case '6': case '7': case '8': case '9':
2780 	      {
2781 		c = strtoul (p - 1, &end, 10);
2782 		p = end;
2783 
2784 		operands_match[c][i]
2785 		  = operands_match_p (recog_data.operand[c],
2786 				      recog_data.operand[i]);
2787 
2788 		/* An operand may not match itself.  */
2789 		gcc_assert (c != i);
2790 
2791 		/* If C can be commuted with C+1, and C might need to match I,
2792 		   then C+1 might also need to match I.  */
2793 		if (commutative >= 0)
2794 		  {
2795 		    if (c == commutative || c == commutative + 1)
2796 		      {
2797 			int other = c + (c == commutative ? 1 : -1);
2798 			operands_match[other][i]
2799 			  = operands_match_p (recog_data.operand[other],
2800 					      recog_data.operand[i]);
2801 		      }
2802 		    if (i == commutative || i == commutative + 1)
2803 		      {
2804 			int other = i + (i == commutative ? 1 : -1);
2805 			operands_match[c][other]
2806 			  = operands_match_p (recog_data.operand[c],
2807 					      recog_data.operand[other]);
2808 		      }
2809 		    /* Note that C is supposed to be less than I.
2810 		       No need to consider altering both C and I because in
2811 		       that case we would alter one into the other.  */
2812 		  }
2813 	      }
2814 	    }
2815 	}
2816     }
2817 
2818   /* Examine each operand that is a memory reference or memory address
2819      and reload parts of the addresses into index registers.
2820      Also here any references to pseudo regs that didn't get hard regs
2821      but are equivalent to constants get replaced in the insn itself
2822      with those constants.  Nobody will ever see them again.
2823 
2824      Finally, set up the preferred classes of each operand.  */
2825 
2826   for (i = 0; i < noperands; i++)
2827     {
2828       RTX_CODE code = GET_CODE (recog_data.operand[i]);
2829 
2830       address_reloaded[i] = 0;
2831       address_operand_reloaded[i] = 0;
2832       operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2833 			 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2834 			 : RELOAD_OTHER);
2835       address_type[i]
2836 	= (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2837 	   : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2838 	   : RELOAD_OTHER);
2839 
2840       if (*constraints[i] == 0)
2841 	/* Ignore things like match_operator operands.  */
2842 	;
2843       else if (insn_extra_address_constraint
2844 	       (lookup_constraint (constraints[i])))
2845 	{
2846 	  address_operand_reloaded[i]
2847 	    = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2848 				    recog_data.operand[i],
2849 				    recog_data.operand_loc[i],
2850 				    i, operand_type[i], ind_levels, insn);
2851 
2852 	  /* If we now have a simple operand where we used to have a
2853 	     PLUS or MULT, re-recognize and try again.  */
2854 	  if ((OBJECT_P (*recog_data.operand_loc[i])
2855 	       || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2856 	      && (GET_CODE (recog_data.operand[i]) == MULT
2857 		  || GET_CODE (recog_data.operand[i]) == PLUS))
2858 	    {
2859 	      INSN_CODE (insn) = -1;
2860 	      retval = find_reloads (insn, replace, ind_levels, live_known,
2861 				     reload_reg_p);
2862 	      return retval;
2863 	    }
2864 
2865 	  recog_data.operand[i] = *recog_data.operand_loc[i];
2866 	  substed_operand[i] = recog_data.operand[i];
2867 
2868 	  /* Address operands are reloaded in their existing mode,
2869 	     no matter what is specified in the machine description.  */
2870 	  operand_mode[i] = GET_MODE (recog_data.operand[i]);
2871 
2872 	  /* If the address is a single CONST_INT pick address mode
2873 	     instead otherwise we will later not know in which mode
2874 	     the reload should be performed.  */
2875 	  if (operand_mode[i] == VOIDmode)
2876 	    operand_mode[i] = Pmode;
2877 
2878 	}
2879       else if (code == MEM)
2880 	{
2881 	  address_reloaded[i]
2882 	    = find_reloads_address (GET_MODE (recog_data.operand[i]),
2883 				    recog_data.operand_loc[i],
2884 				    XEXP (recog_data.operand[i], 0),
2885 				    &XEXP (recog_data.operand[i], 0),
2886 				    i, address_type[i], ind_levels, insn);
2887 	  recog_data.operand[i] = *recog_data.operand_loc[i];
2888 	  substed_operand[i] = recog_data.operand[i];
2889 	}
2890       else if (code == SUBREG)
2891 	{
2892 	  rtx reg = SUBREG_REG (recog_data.operand[i]);
2893 	  rtx op
2894 	    = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2895 				   ind_levels,
2896 				   set != 0
2897 				   && &SET_DEST (set) == recog_data.operand_loc[i],
2898 				   insn,
2899 				   &address_reloaded[i]);
2900 
2901 	  /* If we made a MEM to load (a part of) the stackslot of a pseudo
2902 	     that didn't get a hard register, emit a USE with a REG_EQUAL
2903 	     note in front so that we might inherit a previous, possibly
2904 	     wider reload.  */
2905 
2906 	  if (replace
2907 	      && MEM_P (op)
2908 	      && REG_P (reg)
2909 	      && known_ge (GET_MODE_SIZE (GET_MODE (reg)),
2910 			   GET_MODE_SIZE (GET_MODE (op)))
2911 	      && reg_equiv_constant (REGNO (reg)) == 0)
2912 	    set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2913 						   insn),
2914 				 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2915 
2916 	  substed_operand[i] = recog_data.operand[i] = op;
2917 	}
2918       else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2919 	/* We can get a PLUS as an "operand" as a result of register
2920 	   elimination.  See eliminate_regs and gen_reload.  We handle
2921 	   a unary operator by reloading the operand.  */
2922 	substed_operand[i] = recog_data.operand[i]
2923 	  = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2924 				 ind_levels, 0, insn,
2925 				 &address_reloaded[i]);
2926       else if (code == REG)
2927 	{
2928 	  /* This is equivalent to calling find_reloads_toplev.
2929 	     The code is duplicated for speed.
2930 	     When we find a pseudo always equivalent to a constant,
2931 	     we replace it by the constant.  We must be sure, however,
2932 	     that we don't try to replace it in the insn in which it
2933 	     is being set.  */
2934 	  int regno = REGNO (recog_data.operand[i]);
2935 	  if (reg_equiv_constant (regno) != 0
2936 	      && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2937 	    {
2938 	      /* Record the existing mode so that the check if constants are
2939 		 allowed will work when operand_mode isn't specified.  */
2940 
2941 	      if (operand_mode[i] == VOIDmode)
2942 		operand_mode[i] = GET_MODE (recog_data.operand[i]);
2943 
2944 	      substed_operand[i] = recog_data.operand[i]
2945 		= reg_equiv_constant (regno);
2946 	    }
2947 	  if (reg_equiv_memory_loc (regno) != 0
2948 	      && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2949 	    /* We need not give a valid is_set_dest argument since the case
2950 	       of a constant equivalence was checked above.  */
2951 	    substed_operand[i] = recog_data.operand[i]
2952 	      = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2953 				     ind_levels, 0, insn,
2954 				     &address_reloaded[i]);
2955 	}
2956       /* If the operand is still a register (we didn't replace it with an
2957 	 equivalent), get the preferred class to reload it into.  */
2958       code = GET_CODE (recog_data.operand[i]);
2959       preferred_class[i]
2960 	= ((code == REG && REGNO (recog_data.operand[i])
2961 	    >= FIRST_PSEUDO_REGISTER)
2962 	   ? reg_preferred_class (REGNO (recog_data.operand[i]))
2963 	   : NO_REGS);
2964       pref_or_nothing[i]
2965 	= (code == REG
2966 	   && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2967 	   && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2968     }
2969 
2970   /* If this is simply a copy from operand 1 to operand 0, merge the
2971      preferred classes for the operands.  */
2972   if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2973       && recog_data.operand[1] == SET_SRC (set))
2974     {
2975       preferred_class[0] = preferred_class[1]
2976 	= reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2977       pref_or_nothing[0] |= pref_or_nothing[1];
2978       pref_or_nothing[1] |= pref_or_nothing[0];
2979     }
2980 
2981   /* Now see what we need for pseudo-regs that didn't get hard regs
2982      or got the wrong kind of hard reg.  For this, we must consider
2983      all the operands together against the register constraints.  */
2984 
2985   best = MAX_RECOG_OPERANDS * 2 + 600;
2986 
2987   goal_alternative_swapped = 0;
2988 
2989   /* The constraints are made of several alternatives.
2990      Each operand's constraint looks like foo,bar,... with commas
2991      separating the alternatives.  The first alternatives for all
2992      operands go together, the second alternatives go together, etc.
2993 
2994      First loop over alternatives.  */
2995 
2996   alternative_mask enabled = get_enabled_alternatives (insn);
2997   for (this_alternative_number = 0;
2998        this_alternative_number < n_alternatives;
2999        this_alternative_number++)
3000     {
3001       int swapped;
3002 
3003       if (!TEST_BIT (enabled, this_alternative_number))
3004 	{
3005 	  int i;
3006 
3007 	  for (i = 0; i < recog_data.n_operands; i++)
3008 	    constraints[i] = skip_alternative (constraints[i]);
3009 
3010 	  continue;
3011 	}
3012 
3013       /* If insn is commutative (it's safe to exchange a certain pair
3014 	 of operands) then we need to try each alternative twice, the
3015 	 second time matching those two operands as if we had
3016 	 exchanged them.  To do this, really exchange them in
3017 	 operands.  */
3018       for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3019 	{
3020 	  /* Loop over operands for one constraint alternative.  */
3021 	  /* LOSERS counts those that don't fit this alternative
3022 	     and would require loading.  */
3023 	  int losers = 0;
3024 	  /* BAD is set to 1 if it some operand can't fit this alternative
3025 	     even after reloading.  */
3026 	  int bad = 0;
3027 	  /* REJECT is a count of how undesirable this alternative says it is
3028 	     if any reloading is required.  If the alternative matches exactly
3029 	     then REJECT is ignored, but otherwise it gets this much
3030 	     counted against it in addition to the reloading needed.  Each
3031 	     ? counts three times here since we want the disparaging caused by
3032 	     a bad register class to only count 1/3 as much.  */
3033 	  int reject = 0;
3034 
3035 	  if (swapped)
3036 	    {
3037 	      recog_data.operand[commutative] = substed_operand[commutative + 1];
3038 	      recog_data.operand[commutative + 1] = substed_operand[commutative];
3039 	      /* Swap the duplicates too.  */
3040 	      for (i = 0; i < recog_data.n_dups; i++)
3041 		if (recog_data.dup_num[i] == commutative
3042 		    || recog_data.dup_num[i] == commutative + 1)
3043 		  *recog_data.dup_loc[i]
3044 		    = recog_data.operand[(int) recog_data.dup_num[i]];
3045 
3046 	      std::swap (preferred_class[commutative],
3047 			 preferred_class[commutative + 1]);
3048 	      std::swap (pref_or_nothing[commutative],
3049 			 pref_or_nothing[commutative + 1]);
3050 	      std::swap (address_reloaded[commutative],
3051 			 address_reloaded[commutative + 1]);
3052 	    }
3053 
3054 	  this_earlyclobber = 0;
3055 
3056 	  for (i = 0; i < noperands; i++)
3057 	    {
3058 	      const char *p = constraints[i];
3059 	      char *end;
3060 	      int len;
3061 	      int win = 0;
3062 	      int did_match = 0;
3063 	      /* 0 => this operand can be reloaded somehow for this alternative.  */
3064 	      int badop = 1;
3065 	      /* 0 => this operand can be reloaded if the alternative allows regs.  */
3066 	      int winreg = 0;
3067 	      int c;
3068 	      int m;
3069 	      rtx operand = recog_data.operand[i];
3070 	      int offset = 0;
3071 	      /* Nonzero means this is a MEM that must be reloaded into a reg
3072 		 regardless of what the constraint says.  */
3073 	      int force_reload = 0;
3074 	      int offmemok = 0;
3075 	      /* Nonzero if a constant forced into memory would be OK for this
3076 		 operand.  */
3077 	      int constmemok = 0;
3078 	      int earlyclobber = 0;
3079 	      enum constraint_num cn;
3080 	      enum reg_class cl;
3081 
3082 	      /* If the predicate accepts a unary operator, it means that
3083 		 we need to reload the operand, but do not do this for
3084 		 match_operator and friends.  */
3085 	      if (UNARY_P (operand) && *p != 0)
3086 		operand = XEXP (operand, 0);
3087 
3088 	      /* If the operand is a SUBREG, extract
3089 		 the REG or MEM (or maybe even a constant) within.
3090 		 (Constants can occur as a result of reg_equiv_constant.)  */
3091 
3092 	      while (GET_CODE (operand) == SUBREG)
3093 		{
3094 		  /* Offset only matters when operand is a REG and
3095 		     it is a hard reg.  This is because it is passed
3096 		     to reg_fits_class_p if it is a REG and all pseudos
3097 		     return 0 from that function.  */
3098 		  if (REG_P (SUBREG_REG (operand))
3099 		      && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3100 		    {
3101 		      if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3102 						 GET_MODE (SUBREG_REG (operand)),
3103 						 SUBREG_BYTE (operand),
3104 						 GET_MODE (operand)) < 0)
3105 			force_reload = 1;
3106 		      offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3107 						     GET_MODE (SUBREG_REG (operand)),
3108 						     SUBREG_BYTE (operand),
3109 						     GET_MODE (operand));
3110 		    }
3111 		  operand = SUBREG_REG (operand);
3112 		  /* Force reload if this is a constant or PLUS or if there may
3113 		     be a problem accessing OPERAND in the outer mode.  */
3114 		  scalar_int_mode inner_mode;
3115 		  if (CONSTANT_P (operand)
3116 		      || GET_CODE (operand) == PLUS
3117 		      /* We must force a reload of paradoxical SUBREGs
3118 			 of a MEM because the alignment of the inner value
3119 			 may not be enough to do the outer reference.  On
3120 			 big-endian machines, it may also reference outside
3121 			 the object.
3122 
3123 			 On machines that extend byte operations and we have a
3124 			 SUBREG where both the inner and outer modes are no wider
3125 			 than a word and the inner mode is narrower, is integral,
3126 			 and gets extended when loaded from memory, combine.c has
3127 			 made assumptions about the behavior of the machine in such
3128 			 register access.  If the data is, in fact, in memory we
3129 			 must always load using the size assumed to be in the
3130 			 register and let the insn do the different-sized
3131 			 accesses.
3132 
3133 			 This is doubly true if WORD_REGISTER_OPERATIONS.  In
3134 			 this case eliminate_regs has left non-paradoxical
3135 			 subregs for push_reload to see.  Make sure it does
3136 			 by forcing the reload.
3137 
3138 			 ??? When is it right at this stage to have a subreg
3139 			 of a mem that is _not_ to be handled specially?  IMO
3140 			 those should have been reduced to just a mem.  */
3141 		      || ((MEM_P (operand)
3142 			   || (REG_P (operand)
3143 			       && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3144 			  && (WORD_REGISTER_OPERATIONS
3145 			      || (((maybe_lt
3146 				    (GET_MODE_BITSIZE (GET_MODE (operand)),
3147 				     BIGGEST_ALIGNMENT))
3148 				   && (paradoxical_subreg_p
3149 				       (operand_mode[i], GET_MODE (operand)))))
3150 			      || BYTES_BIG_ENDIAN
3151 			      || (known_le (GET_MODE_SIZE (operand_mode[i]),
3152 					    UNITS_PER_WORD)
3153 				  && (is_a <scalar_int_mode>
3154 				      (GET_MODE (operand), &inner_mode))
3155 				  && (GET_MODE_SIZE (inner_mode)
3156 				      <= UNITS_PER_WORD)
3157 				  && paradoxical_subreg_p (operand_mode[i],
3158 							   inner_mode)
3159 				  && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)))
3160 		      )
3161 		    force_reload = 1;
3162 		}
3163 
3164 	      this_alternative[i] = NO_REGS;
3165 	      this_alternative_win[i] = 0;
3166 	      this_alternative_match_win[i] = 0;
3167 	      this_alternative_offmemok[i] = 0;
3168 	      this_alternative_earlyclobber[i] = 0;
3169 	      this_alternative_matches[i] = -1;
3170 
3171 	      /* An empty constraint or empty alternative
3172 		 allows anything which matched the pattern.  */
3173 	      if (*p == 0 || *p == ',')
3174 		win = 1, badop = 0;
3175 
3176 	      /* Scan this alternative's specs for this operand;
3177 		 set WIN if the operand fits any letter in this alternative.
3178 		 Otherwise, clear BADOP if this operand could
3179 		 fit some letter after reloads,
3180 		 or set WINREG if this operand could fit after reloads
3181 		 provided the constraint allows some registers.  */
3182 
3183 	      do
3184 		switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3185 		  {
3186 		  case '\0':
3187 		    len = 0;
3188 		    break;
3189 		  case ',':
3190 		    c = '\0';
3191 		    break;
3192 
3193 		  case '?':
3194 		    reject += 6;
3195 		    break;
3196 
3197 		  case '!':
3198 		    reject = 600;
3199 		    break;
3200 
3201 		  case '#':
3202 		    /* Ignore rest of this alternative as far as
3203 		       reloading is concerned.  */
3204 		    do
3205 		      p++;
3206 		    while (*p && *p != ',');
3207 		    len = 0;
3208 		    break;
3209 
3210 		  case '0':  case '1':  case '2':  case '3':  case '4':
3211 		  case '5':  case '6':  case '7':  case '8':  case '9':
3212 		    m = strtoul (p, &end, 10);
3213 		    p = end;
3214 		    len = 0;
3215 
3216 		    this_alternative_matches[i] = m;
3217 		    /* We are supposed to match a previous operand.
3218 		       If we do, we win if that one did.
3219 		       If we do not, count both of the operands as losers.
3220 		       (This is too conservative, since most of the time
3221 		       only a single reload insn will be needed to make
3222 		       the two operands win.  As a result, this alternative
3223 		       may be rejected when it is actually desirable.)  */
3224 		    if ((swapped && (m != commutative || i != commutative + 1))
3225 			/* If we are matching as if two operands were swapped,
3226 			   also pretend that operands_match had been computed
3227 			   with swapped.
3228 			   But if I is the second of those and C is the first,
3229 			   don't exchange them, because operands_match is valid
3230 			   only on one side of its diagonal.  */
3231 			? (operands_match
3232 			   [(m == commutative || m == commutative + 1)
3233 			    ? 2 * commutative + 1 - m : m]
3234 			   [(i == commutative || i == commutative + 1)
3235 			    ? 2 * commutative + 1 - i : i])
3236 			: operands_match[m][i])
3237 		      {
3238 			/* If we are matching a non-offsettable address where an
3239 			   offsettable address was expected, then we must reject
3240 			   this combination, because we can't reload it.  */
3241 			if (this_alternative_offmemok[m]
3242 			    && MEM_P (recog_data.operand[m])
3243 			    && this_alternative[m] == NO_REGS
3244 			    && ! this_alternative_win[m])
3245 			  bad = 1;
3246 
3247 			did_match = this_alternative_win[m];
3248 		      }
3249 		    else
3250 		      {
3251 			/* Operands don't match.  */
3252 			rtx value;
3253 			int loc1, loc2;
3254 			/* Retroactively mark the operand we had to match
3255 			   as a loser, if it wasn't already.  */
3256 			if (this_alternative_win[m])
3257 			  losers++;
3258 			this_alternative_win[m] = 0;
3259 			if (this_alternative[m] == NO_REGS)
3260 			  bad = 1;
3261 			/* But count the pair only once in the total badness of
3262 			   this alternative, if the pair can be a dummy reload.
3263 			   The pointers in operand_loc are not swapped; swap
3264 			   them by hand if necessary.  */
3265 			if (swapped && i == commutative)
3266 			  loc1 = commutative + 1;
3267 			else if (swapped && i == commutative + 1)
3268 			  loc1 = commutative;
3269 			else
3270 			  loc1 = i;
3271 			if (swapped && m == commutative)
3272 			  loc2 = commutative + 1;
3273 			else if (swapped && m == commutative + 1)
3274 			  loc2 = commutative;
3275 			else
3276 			  loc2 = m;
3277 			value
3278 			  = find_dummy_reload (recog_data.operand[i],
3279 					       recog_data.operand[m],
3280 					       recog_data.operand_loc[loc1],
3281 					       recog_data.operand_loc[loc2],
3282 					       operand_mode[i], operand_mode[m],
3283 					       this_alternative[m], -1,
3284 					       this_alternative_earlyclobber[m]);
3285 
3286 			if (value != 0)
3287 			  losers--;
3288 		      }
3289 		    /* This can be fixed with reloads if the operand
3290 		       we are supposed to match can be fixed with reloads.  */
3291 		    badop = 0;
3292 		    this_alternative[i] = this_alternative[m];
3293 
3294 		    /* If we have to reload this operand and some previous
3295 		       operand also had to match the same thing as this
3296 		       operand, we don't know how to do that.  So reject this
3297 		       alternative.  */
3298 		    if (! did_match || force_reload)
3299 		      for (j = 0; j < i; j++)
3300 			if (this_alternative_matches[j]
3301 			    == this_alternative_matches[i])
3302 			  {
3303 			    badop = 1;
3304 			    break;
3305 			  }
3306 		    break;
3307 
3308 		  case 'p':
3309 		    /* All necessary reloads for an address_operand
3310 		       were handled in find_reloads_address.  */
3311 		    this_alternative[i]
3312 		      = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3313 					ADDRESS, SCRATCH);
3314 		    win = 1;
3315 		    badop = 0;
3316 		    break;
3317 
3318 		  case TARGET_MEM_CONSTRAINT:
3319 		    if (force_reload)
3320 		      break;
3321 		    if (MEM_P (operand)
3322 			|| (REG_P (operand)
3323 			    && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3324 			    && reg_renumber[REGNO (operand)] < 0))
3325 		      win = 1;
3326 		    if (CONST_POOL_OK_P (operand_mode[i], operand))
3327 		      badop = 0;
3328 		    constmemok = 1;
3329 		    break;
3330 
3331 		  case '<':
3332 		    if (MEM_P (operand)
3333 			&& ! address_reloaded[i]
3334 			&& (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3335 			    || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3336 		      win = 1;
3337 		    break;
3338 
3339 		  case '>':
3340 		    if (MEM_P (operand)
3341 			&& ! address_reloaded[i]
3342 			&& (GET_CODE (XEXP (operand, 0)) == PRE_INC
3343 			    || GET_CODE (XEXP (operand, 0)) == POST_INC))
3344 		      win = 1;
3345 		    break;
3346 
3347 		    /* Memory operand whose address is not offsettable.  */
3348 		  case 'V':
3349 		    if (force_reload)
3350 		      break;
3351 		    if (MEM_P (operand)
3352 			&& ! (ind_levels ? offsettable_memref_p (operand)
3353 			      : offsettable_nonstrict_memref_p (operand))
3354 			/* Certain mem addresses will become offsettable
3355 			   after they themselves are reloaded.  This is important;
3356 			   we don't want our own handling of unoffsettables
3357 			   to override the handling of reg_equiv_address.  */
3358 			&& !(REG_P (XEXP (operand, 0))
3359 			     && (ind_levels == 0
3360 				 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3361 		      win = 1;
3362 		    break;
3363 
3364 		    /* Memory operand whose address is offsettable.  */
3365 		  case 'o':
3366 		    if (force_reload)
3367 		      break;
3368 		    if ((MEM_P (operand)
3369 			 /* If IND_LEVELS, find_reloads_address won't reload a
3370 			    pseudo that didn't get a hard reg, so we have to
3371 			    reject that case.  */
3372 			 && ((ind_levels ? offsettable_memref_p (operand)
3373 			      : offsettable_nonstrict_memref_p (operand))
3374 			     /* A reloaded address is offsettable because it is now
3375 				just a simple register indirect.  */
3376 			     || address_reloaded[i] == 1))
3377 			|| (REG_P (operand)
3378 			    && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3379 			    && reg_renumber[REGNO (operand)] < 0
3380 			    /* If reg_equiv_address is nonzero, we will be
3381 			       loading it into a register; hence it will be
3382 			       offsettable, but we cannot say that reg_equiv_mem
3383 			       is offsettable without checking.  */
3384 			    && ((reg_equiv_mem (REGNO (operand)) != 0
3385 				 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3386 				|| (reg_equiv_address (REGNO (operand)) != 0))))
3387 		      win = 1;
3388 		    if (CONST_POOL_OK_P (operand_mode[i], operand)
3389 			|| MEM_P (operand))
3390 		      badop = 0;
3391 		    constmemok = 1;
3392 		    offmemok = 1;
3393 		    break;
3394 
3395 		  case '&':
3396 		    /* Output operand that is stored before the need for the
3397 		       input operands (and their index registers) is over.  */
3398 		    earlyclobber = 1, this_earlyclobber = 1;
3399 		    break;
3400 
3401 		  case 'X':
3402 		    force_reload = 0;
3403 		    win = 1;
3404 		    break;
3405 
3406 		  case 'g':
3407 		    if (! force_reload
3408 			/* A PLUS is never a valid operand, but reload can make
3409 			   it from a register when eliminating registers.  */
3410 			&& GET_CODE (operand) != PLUS
3411 			/* A SCRATCH is not a valid operand.  */
3412 			&& GET_CODE (operand) != SCRATCH
3413 			&& (! CONSTANT_P (operand)
3414 			    || ! flag_pic
3415 			    || LEGITIMATE_PIC_OPERAND_P (operand))
3416 			&& (GENERAL_REGS == ALL_REGS
3417 			    || !REG_P (operand)
3418 			    || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3419 				&& reg_renumber[REGNO (operand)] < 0)))
3420 		      win = 1;
3421 		    cl = GENERAL_REGS;
3422 		    goto reg;
3423 
3424 		  default:
3425 		    cn = lookup_constraint (p);
3426 		    switch (get_constraint_type (cn))
3427 		      {
3428 		      case CT_REGISTER:
3429 			cl = reg_class_for_constraint (cn);
3430 			if (cl != NO_REGS)
3431 			  goto reg;
3432 			break;
3433 
3434 		      case CT_CONST_INT:
3435 			if (CONST_INT_P (operand)
3436 			    && (insn_const_int_ok_for_constraint
3437 				(INTVAL (operand), cn)))
3438 			  win = true;
3439 			break;
3440 
3441 		      case CT_MEMORY:
3442 			if (force_reload)
3443 			  break;
3444 			if (constraint_satisfied_p (operand, cn))
3445 			  win = 1;
3446 			/* If the address was already reloaded,
3447 			   we win as well.  */
3448 			else if (MEM_P (operand) && address_reloaded[i] == 1)
3449 			  win = 1;
3450 			/* Likewise if the address will be reloaded because
3451 			   reg_equiv_address is nonzero.  For reg_equiv_mem
3452 			   we have to check.  */
3453 			else if (REG_P (operand)
3454 				 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3455 				 && reg_renumber[REGNO (operand)] < 0
3456 				 && ((reg_equiv_mem (REGNO (operand)) != 0
3457 				      && (constraint_satisfied_p
3458 					  (reg_equiv_mem (REGNO (operand)),
3459 					   cn)))
3460 				     || (reg_equiv_address (REGNO (operand))
3461 					 != 0)))
3462 			  win = 1;
3463 
3464 			/* If we didn't already win, we can reload
3465 			   constants via force_const_mem, and other
3466 			   MEMs by reloading the address like for 'o'.  */
3467 			if (CONST_POOL_OK_P (operand_mode[i], operand)
3468 			    || MEM_P (operand))
3469 			  badop = 0;
3470 			constmemok = 1;
3471 			offmemok = 1;
3472 			break;
3473 
3474 		      case CT_SPECIAL_MEMORY:
3475 			if (force_reload)
3476 			  break;
3477 			if (constraint_satisfied_p (operand, cn))
3478 			  win = 1;
3479 			/* Likewise if the address will be reloaded because
3480 			   reg_equiv_address is nonzero.  For reg_equiv_mem
3481 			   we have to check.  */
3482 			else if (REG_P (operand)
3483 				 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3484 				 && reg_renumber[REGNO (operand)] < 0
3485 				 && reg_equiv_mem (REGNO (operand)) != 0
3486 				 && (constraint_satisfied_p
3487 				     (reg_equiv_mem (REGNO (operand)), cn)))
3488 			  win = 1;
3489 			break;
3490 
3491 		      case CT_ADDRESS:
3492 			if (constraint_satisfied_p (operand, cn))
3493 			  win = 1;
3494 
3495 			/* If we didn't already win, we can reload
3496 			   the address into a base register.  */
3497 			this_alternative[i]
3498 			  = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3499 					    ADDRESS, SCRATCH);
3500 			badop = 0;
3501 			break;
3502 
3503 		      case CT_FIXED_FORM:
3504 			if (constraint_satisfied_p (operand, cn))
3505 			  win = 1;
3506 			break;
3507 		      }
3508 		    break;
3509 
3510 		  reg:
3511 		    this_alternative[i]
3512 		      = reg_class_subunion[this_alternative[i]][cl];
3513 		    if (GET_MODE (operand) == BLKmode)
3514 		      break;
3515 		    winreg = 1;
3516 		    if (REG_P (operand)
3517 			&& reg_fits_class_p (operand, this_alternative[i],
3518   			             offset, GET_MODE (recog_data.operand[i])))
3519 		      win = 1;
3520 		    break;
3521 		  }
3522 	      while ((p += len), c);
3523 
3524 	      if (swapped == (commutative >= 0 ? 1 : 0))
3525 		constraints[i] = p;
3526 
3527 	      /* If this operand could be handled with a reg,
3528 		 and some reg is allowed, then this operand can be handled.  */
3529 	      if (winreg && this_alternative[i] != NO_REGS
3530 		  && (win || !class_only_fixed_regs[this_alternative[i]]))
3531 		badop = 0;
3532 
3533 	      /* Record which operands fit this alternative.  */
3534 	      this_alternative_earlyclobber[i] = earlyclobber;
3535 	      if (win && ! force_reload)
3536 		this_alternative_win[i] = 1;
3537 	      else if (did_match && ! force_reload)
3538 		this_alternative_match_win[i] = 1;
3539 	      else
3540 		{
3541 		  int const_to_mem = 0;
3542 
3543 		  this_alternative_offmemok[i] = offmemok;
3544 		  losers++;
3545 		  if (badop)
3546 		    bad = 1;
3547 		  /* Alternative loses if it has no regs for a reg operand.  */
3548 		  if (REG_P (operand)
3549 		      && this_alternative[i] == NO_REGS
3550 		      && this_alternative_matches[i] < 0)
3551 		    bad = 1;
3552 
3553 		  /* If this is a constant that is reloaded into the desired
3554 		     class by copying it to memory first, count that as another
3555 		     reload.  This is consistent with other code and is
3556 		     required to avoid choosing another alternative when
3557 		     the constant is moved into memory by this function on
3558 		     an early reload pass.  Note that the test here is
3559 		     precisely the same as in the code below that calls
3560 		     force_const_mem.  */
3561 		  if (CONST_POOL_OK_P (operand_mode[i], operand)
3562 		      && ((targetm.preferred_reload_class (operand,
3563 							   this_alternative[i])
3564 			   == NO_REGS)
3565 			  || no_input_reloads))
3566 		    {
3567 		      const_to_mem = 1;
3568 		      if (this_alternative[i] != NO_REGS)
3569 			losers++;
3570 		    }
3571 
3572 		  /* Alternative loses if it requires a type of reload not
3573 		     permitted for this insn.  We can always reload SCRATCH
3574 		     and objects with a REG_UNUSED note.  */
3575 		  if (GET_CODE (operand) != SCRATCH
3576 		      && modified[i] != RELOAD_READ && no_output_reloads
3577 		      && ! find_reg_note (insn, REG_UNUSED, operand))
3578 		    bad = 1;
3579 		  else if (modified[i] != RELOAD_WRITE && no_input_reloads
3580 			   && ! const_to_mem)
3581 		    bad = 1;
3582 
3583 		  /* If we can't reload this value at all, reject this
3584 		     alternative.  Note that we could also lose due to
3585 		     LIMIT_RELOAD_CLASS, but we don't check that
3586 		     here.  */
3587 
3588 		  if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3589 		    {
3590 		      if (targetm.preferred_reload_class (operand,
3591 							  this_alternative[i])
3592 			  == NO_REGS)
3593 			reject = 600;
3594 
3595 		      if (operand_type[i] == RELOAD_FOR_OUTPUT
3596 			  && (targetm.preferred_output_reload_class (operand,
3597 							    this_alternative[i])
3598 			      == NO_REGS))
3599 			reject = 600;
3600 		    }
3601 
3602 		  /* We prefer to reload pseudos over reloading other things,
3603 		     since such reloads may be able to be eliminated later.
3604 		     If we are reloading a SCRATCH, we won't be generating any
3605 		     insns, just using a register, so it is also preferred.
3606 		     So bump REJECT in other cases.  Don't do this in the
3607 		     case where we are forcing a constant into memory and
3608 		     it will then win since we don't want to have a different
3609 		     alternative match then.  */
3610 		  if (! (REG_P (operand)
3611 			 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3612 		      && GET_CODE (operand) != SCRATCH
3613 		      && ! (const_to_mem && constmemok))
3614 		    reject += 2;
3615 
3616 		  /* Input reloads can be inherited more often than output
3617 		     reloads can be removed, so penalize output reloads.  */
3618 		  if (operand_type[i] != RELOAD_FOR_INPUT
3619 		      && GET_CODE (operand) != SCRATCH)
3620 		    reject++;
3621 		}
3622 
3623 	      /* If this operand is a pseudo register that didn't get
3624 		 a hard reg and this alternative accepts some
3625 		 register, see if the class that we want is a subset
3626 		 of the preferred class for this register.  If not,
3627 		 but it intersects that class, use the preferred class
3628 		 instead.  If it does not intersect the preferred
3629 		 class, show that usage of this alternative should be
3630 		 discouraged; it will be discouraged more still if the
3631 		 register is `preferred or nothing'.  We do this
3632 		 because it increases the chance of reusing our spill
3633 		 register in a later insn and avoiding a pair of
3634 		 memory stores and loads.
3635 
3636 		 Don't bother with this if this alternative will
3637 		 accept this operand.
3638 
3639 		 Don't do this for a multiword operand, since it is
3640 		 only a small win and has the risk of requiring more
3641 		 spill registers, which could cause a large loss.
3642 
3643 		 Don't do this if the preferred class has only one
3644 		 register because we might otherwise exhaust the
3645 		 class.  */
3646 
3647 	      if (! win && ! did_match
3648 		  && this_alternative[i] != NO_REGS
3649 		  && known_le (GET_MODE_SIZE (operand_mode[i]), UNITS_PER_WORD)
3650 		  && reg_class_size [(int) preferred_class[i]] > 0
3651 		  && ! small_register_class_p (preferred_class[i]))
3652 		{
3653 		  if (! reg_class_subset_p (this_alternative[i],
3654 					    preferred_class[i]))
3655 		    {
3656 		      /* Since we don't have a way of forming the intersection,
3657 			 we just do something special if the preferred class
3658 			 is a subset of the class we have; that's the most
3659 			 common case anyway.  */
3660 		      if (reg_class_subset_p (preferred_class[i],
3661 					      this_alternative[i]))
3662 			this_alternative[i] = preferred_class[i];
3663 		      else
3664 			reject += (2 + 2 * pref_or_nothing[i]);
3665 		    }
3666 		}
3667 	    }
3668 
3669 	  /* Now see if any output operands that are marked "earlyclobber"
3670 	     in this alternative conflict with any input operands
3671 	     or any memory addresses.  */
3672 
3673 	  for (i = 0; i < noperands; i++)
3674 	    if (this_alternative_earlyclobber[i]
3675 		&& (this_alternative_win[i] || this_alternative_match_win[i]))
3676 	      {
3677 		struct decomposition early_data;
3678 
3679 		early_data = decompose (recog_data.operand[i]);
3680 
3681 		gcc_assert (modified[i] != RELOAD_READ);
3682 
3683 		if (this_alternative[i] == NO_REGS)
3684 		  {
3685 		    this_alternative_earlyclobber[i] = 0;
3686 		    gcc_assert (this_insn_is_asm);
3687 		    error_for_asm (this_insn,
3688 			      "%<&%> constraint used with no register class");
3689 		  }
3690 
3691 		for (j = 0; j < noperands; j++)
3692 		  /* Is this an input operand or a memory ref?  */
3693 		  if ((MEM_P (recog_data.operand[j])
3694 		       || modified[j] != RELOAD_WRITE)
3695 		      && j != i
3696 		      /* Ignore things like match_operator operands.  */
3697 		      && !recog_data.is_operator[j]
3698 		      /* Don't count an input operand that is constrained to match
3699 			 the early clobber operand.  */
3700 		      && ! (this_alternative_matches[j] == i
3701 			    && rtx_equal_p (recog_data.operand[i],
3702 					    recog_data.operand[j]))
3703 		      /* Is it altered by storing the earlyclobber operand?  */
3704 		      && !immune_p (recog_data.operand[j], recog_data.operand[i],
3705 				    early_data))
3706 		    {
3707 		      /* If the output is in a non-empty few-regs class,
3708 			 it's costly to reload it, so reload the input instead.  */
3709 		      if (small_register_class_p (this_alternative[i])
3710 			  && (REG_P (recog_data.operand[j])
3711 			      || GET_CODE (recog_data.operand[j]) == SUBREG))
3712 			{
3713 			  losers++;
3714 			  this_alternative_win[j] = 0;
3715 			  this_alternative_match_win[j] = 0;
3716 			}
3717 		      else
3718 			break;
3719 		    }
3720 		/* If an earlyclobber operand conflicts with something,
3721 		   it must be reloaded, so request this and count the cost.  */
3722 		if (j != noperands)
3723 		  {
3724 		    losers++;
3725 		    this_alternative_win[i] = 0;
3726 		    this_alternative_match_win[j] = 0;
3727 		    for (j = 0; j < noperands; j++)
3728 		      if (this_alternative_matches[j] == i
3729 			  && this_alternative_match_win[j])
3730 			{
3731 			  this_alternative_win[j] = 0;
3732 			  this_alternative_match_win[j] = 0;
3733 			  losers++;
3734 			}
3735 		  }
3736 	      }
3737 
3738 	  /* If one alternative accepts all the operands, no reload required,
3739 	     choose that alternative; don't consider the remaining ones.  */
3740 	  if (losers == 0)
3741 	    {
3742 	      /* Unswap these so that they are never swapped at `finish'.  */
3743 	      if (swapped)
3744 		{
3745 		  recog_data.operand[commutative] = substed_operand[commutative];
3746 		  recog_data.operand[commutative + 1]
3747 		    = substed_operand[commutative + 1];
3748 		}
3749 	      for (i = 0; i < noperands; i++)
3750 		{
3751 		  goal_alternative_win[i] = this_alternative_win[i];
3752 		  goal_alternative_match_win[i] = this_alternative_match_win[i];
3753 		  goal_alternative[i] = this_alternative[i];
3754 		  goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3755 		  goal_alternative_matches[i] = this_alternative_matches[i];
3756 		  goal_alternative_earlyclobber[i]
3757 		    = this_alternative_earlyclobber[i];
3758 		}
3759 	      goal_alternative_number = this_alternative_number;
3760 	      goal_alternative_swapped = swapped;
3761 	      goal_earlyclobber = this_earlyclobber;
3762 	      goto finish;
3763 	    }
3764 
3765 	  /* REJECT, set by the ! and ? constraint characters and when a register
3766 	     would be reloaded into a non-preferred class, discourages the use of
3767 	     this alternative for a reload goal.  REJECT is incremented by six
3768 	     for each ? and two for each non-preferred class.  */
3769 	  losers = losers * 6 + reject;
3770 
3771 	  /* If this alternative can be made to work by reloading,
3772 	     and it needs less reloading than the others checked so far,
3773 	     record it as the chosen goal for reloading.  */
3774 	  if (! bad)
3775 	    {
3776 	      if (best > losers)
3777 		{
3778 		  for (i = 0; i < noperands; i++)
3779 		    {
3780 		      goal_alternative[i] = this_alternative[i];
3781 		      goal_alternative_win[i] = this_alternative_win[i];
3782 		      goal_alternative_match_win[i]
3783 			= this_alternative_match_win[i];
3784 		      goal_alternative_offmemok[i]
3785 			= this_alternative_offmemok[i];
3786 		      goal_alternative_matches[i] = this_alternative_matches[i];
3787 		      goal_alternative_earlyclobber[i]
3788 			= this_alternative_earlyclobber[i];
3789 		    }
3790 		  goal_alternative_swapped = swapped;
3791 		  best = losers;
3792 		  goal_alternative_number = this_alternative_number;
3793 		  goal_earlyclobber = this_earlyclobber;
3794 		}
3795 	    }
3796 
3797 	  if (swapped)
3798 	    {
3799 	      /* If the commutative operands have been swapped, swap
3800 		 them back in order to check the next alternative.  */
3801 	      recog_data.operand[commutative] = substed_operand[commutative];
3802 	      recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3803 	      /* Unswap the duplicates too.  */
3804 	      for (i = 0; i < recog_data.n_dups; i++)
3805 		if (recog_data.dup_num[i] == commutative
3806 		    || recog_data.dup_num[i] == commutative + 1)
3807 		  *recog_data.dup_loc[i]
3808 		    = recog_data.operand[(int) recog_data.dup_num[i]];
3809 
3810 	      /* Unswap the operand related information as well.  */
3811 	      std::swap (preferred_class[commutative],
3812 			 preferred_class[commutative + 1]);
3813 	      std::swap (pref_or_nothing[commutative],
3814 			 pref_or_nothing[commutative + 1]);
3815 	      std::swap (address_reloaded[commutative],
3816 			 address_reloaded[commutative + 1]);
3817 	    }
3818 	}
3819     }
3820 
3821   /* The operands don't meet the constraints.
3822      goal_alternative describes the alternative
3823      that we could reach by reloading the fewest operands.
3824      Reload so as to fit it.  */
3825 
3826   if (best == MAX_RECOG_OPERANDS * 2 + 600)
3827     {
3828       /* No alternative works with reloads??  */
3829       if (insn_code_number >= 0)
3830 	fatal_insn ("unable to generate reloads for:", insn);
3831       error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3832       /* Avoid further trouble with this insn.  */
3833       PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3834       n_reloads = 0;
3835       return 0;
3836     }
3837 
3838   /* Jump to `finish' from above if all operands are valid already.
3839      In that case, goal_alternative_win is all 1.  */
3840  finish:
3841 
3842   /* Right now, for any pair of operands I and J that are required to match,
3843      with I < J,
3844      goal_alternative_matches[J] is I.
3845      Set up goal_alternative_matched as the inverse function:
3846      goal_alternative_matched[I] = J.  */
3847 
3848   for (i = 0; i < noperands; i++)
3849     goal_alternative_matched[i] = -1;
3850 
3851   for (i = 0; i < noperands; i++)
3852     if (! goal_alternative_win[i]
3853 	&& goal_alternative_matches[i] >= 0)
3854       goal_alternative_matched[goal_alternative_matches[i]] = i;
3855 
3856   for (i = 0; i < noperands; i++)
3857     goal_alternative_win[i] |= goal_alternative_match_win[i];
3858 
3859   /* If the best alternative is with operands 1 and 2 swapped,
3860      consider them swapped before reporting the reloads.  Update the
3861      operand numbers of any reloads already pushed.  */
3862 
3863   if (goal_alternative_swapped)
3864     {
3865       std::swap (substed_operand[commutative],
3866 		 substed_operand[commutative + 1]);
3867       std::swap (recog_data.operand[commutative],
3868 		 recog_data.operand[commutative + 1]);
3869       std::swap (*recog_data.operand_loc[commutative],
3870 		 *recog_data.operand_loc[commutative + 1]);
3871 
3872       for (i = 0; i < recog_data.n_dups; i++)
3873 	if (recog_data.dup_num[i] == commutative
3874 	    || recog_data.dup_num[i] == commutative + 1)
3875 	  *recog_data.dup_loc[i]
3876 	    = recog_data.operand[(int) recog_data.dup_num[i]];
3877 
3878       for (i = 0; i < n_reloads; i++)
3879 	{
3880 	  if (rld[i].opnum == commutative)
3881 	    rld[i].opnum = commutative + 1;
3882 	  else if (rld[i].opnum == commutative + 1)
3883 	    rld[i].opnum = commutative;
3884 	}
3885     }
3886 
3887   for (i = 0; i < noperands; i++)
3888     {
3889       operand_reloadnum[i] = -1;
3890 
3891       /* If this is an earlyclobber operand, we need to widen the scope.
3892 	 The reload must remain valid from the start of the insn being
3893 	 reloaded until after the operand is stored into its destination.
3894 	 We approximate this with RELOAD_OTHER even though we know that we
3895 	 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3896 
3897 	 One special case that is worth checking is when we have an
3898 	 output that is earlyclobber but isn't used past the insn (typically
3899 	 a SCRATCH).  In this case, we only need have the reload live
3900 	 through the insn itself, but not for any of our input or output
3901 	 reloads.
3902 	 But we must not accidentally narrow the scope of an existing
3903 	 RELOAD_OTHER reload - leave these alone.
3904 
3905 	 In any case, anything needed to address this operand can remain
3906 	 however they were previously categorized.  */
3907 
3908       if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3909 	operand_type[i]
3910 	  = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3911 	     ? RELOAD_FOR_INSN : RELOAD_OTHER);
3912     }
3913 
3914   /* Any constants that aren't allowed and can't be reloaded
3915      into registers are here changed into memory references.  */
3916   for (i = 0; i < noperands; i++)
3917     if (! goal_alternative_win[i])
3918       {
3919 	rtx op = recog_data.operand[i];
3920 	rtx subreg = NULL_RTX;
3921 	rtx plus = NULL_RTX;
3922 	machine_mode mode = operand_mode[i];
3923 
3924 	/* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3925 	   push_reload so we have to let them pass here.  */
3926 	if (GET_CODE (op) == SUBREG)
3927 	  {
3928 	    subreg = op;
3929 	    op = SUBREG_REG (op);
3930 	    mode = GET_MODE (op);
3931 	  }
3932 
3933 	if (GET_CODE (op) == PLUS)
3934 	  {
3935 	    plus = op;
3936 	    op = XEXP (op, 1);
3937 	  }
3938 
3939 	if (CONST_POOL_OK_P (mode, op)
3940 	    && ((targetm.preferred_reload_class (op, goal_alternative[i])
3941 		 == NO_REGS)
3942 		|| no_input_reloads))
3943 	  {
3944 	    int this_address_reloaded;
3945 	    rtx tem = force_const_mem (mode, op);
3946 
3947 	    /* If we stripped a SUBREG or a PLUS above add it back.  */
3948 	    if (plus != NULL_RTX)
3949 	      tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3950 
3951 	    if (subreg != NULL_RTX)
3952 	      tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3953 
3954 	    this_address_reloaded = 0;
3955 	    substed_operand[i] = recog_data.operand[i]
3956 	      = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3957 				     0, insn, &this_address_reloaded);
3958 
3959 	    /* If the alternative accepts constant pool refs directly
3960 	       there will be no reload needed at all.  */
3961 	    if (plus == NULL_RTX
3962 		&& subreg == NULL_RTX
3963 		&& alternative_allows_const_pool_ref (this_address_reloaded != 1
3964 						      ? substed_operand[i]
3965 						      : NULL,
3966 						      recog_data.constraints[i],
3967 						      goal_alternative_number))
3968 	      goal_alternative_win[i] = 1;
3969 	  }
3970       }
3971 
3972   /* Record the values of the earlyclobber operands for the caller.  */
3973   if (goal_earlyclobber)
3974     for (i = 0; i < noperands; i++)
3975       if (goal_alternative_earlyclobber[i])
3976 	reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3977 
3978   /* Now record reloads for all the operands that need them.  */
3979   for (i = 0; i < noperands; i++)
3980     if (! goal_alternative_win[i])
3981       {
3982 	/* Operands that match previous ones have already been handled.  */
3983 	if (goal_alternative_matches[i] >= 0)
3984 	  ;
3985 	/* Handle an operand with a nonoffsettable address
3986 	   appearing where an offsettable address will do
3987 	   by reloading the address into a base register.
3988 
3989 	   ??? We can also do this when the operand is a register and
3990 	   reg_equiv_mem is not offsettable, but this is a bit tricky,
3991 	   so we don't bother with it.  It may not be worth doing.  */
3992 	else if (goal_alternative_matched[i] == -1
3993 		 && goal_alternative_offmemok[i]
3994 		 && MEM_P (recog_data.operand[i]))
3995 	  {
3996 	    /* If the address to be reloaded is a VOIDmode constant,
3997 	       use the default address mode as mode of the reload register,
3998 	       as would have been done by find_reloads_address.  */
3999 	    addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
4000 	    machine_mode address_mode;
4001 
4002 	    address_mode = get_address_mode (recog_data.operand[i]);
4003 	    operand_reloadnum[i]
4004 	      = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
4005 			     &XEXP (recog_data.operand[i], 0), (rtx*) 0,
4006 			     base_reg_class (VOIDmode, as, MEM, SCRATCH),
4007 			     address_mode,
4008 			     VOIDmode, 0, 0, i, RELOAD_OTHER);
4009 	    rld[operand_reloadnum[i]].inc
4010 	      = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4011 
4012 	    /* If this operand is an output, we will have made any
4013 	       reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4014 	       now we are treating part of the operand as an input, so
4015 	       we must change these to RELOAD_FOR_OTHER_ADDRESS.  */
4016 
4017 	    if (modified[i] == RELOAD_WRITE)
4018 	      {
4019 		for (j = 0; j < n_reloads; j++)
4020 		  {
4021 		    if (rld[j].opnum == i)
4022 		      {
4023 			if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4024 			  rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4025 			else if (rld[j].when_needed
4026 				 == RELOAD_FOR_OUTADDR_ADDRESS)
4027 			  rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4028 		      }
4029 		  }
4030 	      }
4031 	  }
4032 	else if (goal_alternative_matched[i] == -1)
4033 	  {
4034 	    operand_reloadnum[i]
4035 	      = push_reload ((modified[i] != RELOAD_WRITE
4036 			      ? recog_data.operand[i] : 0),
4037 			     (modified[i] != RELOAD_READ
4038 			      ? recog_data.operand[i] : 0),
4039 			     (modified[i] != RELOAD_WRITE
4040 			      ? recog_data.operand_loc[i] : 0),
4041 			     (modified[i] != RELOAD_READ
4042 			      ? recog_data.operand_loc[i] : 0),
4043 			     (enum reg_class) goal_alternative[i],
4044 			     (modified[i] == RELOAD_WRITE
4045 			      ? VOIDmode : operand_mode[i]),
4046 			     (modified[i] == RELOAD_READ
4047 			      ? VOIDmode : operand_mode[i]),
4048 			     (insn_code_number < 0 ? 0
4049 			      : insn_data[insn_code_number].operand[i].strict_low),
4050 			     0, i, operand_type[i]);
4051 	  }
4052 	/* In a matching pair of operands, one must be input only
4053 	   and the other must be output only.
4054 	   Pass the input operand as IN and the other as OUT.  */
4055 	else if (modified[i] == RELOAD_READ
4056 		 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4057 	  {
4058 	    operand_reloadnum[i]
4059 	      = push_reload (recog_data.operand[i],
4060 			     recog_data.operand[goal_alternative_matched[i]],
4061 			     recog_data.operand_loc[i],
4062 			     recog_data.operand_loc[goal_alternative_matched[i]],
4063 			     (enum reg_class) goal_alternative[i],
4064 			     operand_mode[i],
4065 			     operand_mode[goal_alternative_matched[i]],
4066 			     0, 0, i, RELOAD_OTHER);
4067 	    operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4068 	  }
4069 	else if (modified[i] == RELOAD_WRITE
4070 		 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4071 	  {
4072 	    operand_reloadnum[goal_alternative_matched[i]]
4073 	      = push_reload (recog_data.operand[goal_alternative_matched[i]],
4074 			     recog_data.operand[i],
4075 			     recog_data.operand_loc[goal_alternative_matched[i]],
4076 			     recog_data.operand_loc[i],
4077 			     (enum reg_class) goal_alternative[i],
4078 			     operand_mode[goal_alternative_matched[i]],
4079 			     operand_mode[i],
4080 			     0, 0, i, RELOAD_OTHER);
4081 	    operand_reloadnum[i] = output_reloadnum;
4082 	  }
4083 	else
4084 	  {
4085 	    gcc_assert (insn_code_number < 0);
4086 	    error_for_asm (insn, "inconsistent operand constraints "
4087 			   "in an %<asm%>");
4088 	    /* Avoid further trouble with this insn.  */
4089 	    PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4090 	    n_reloads = 0;
4091 	    return 0;
4092 	  }
4093       }
4094     else if (goal_alternative_matched[i] < 0
4095 	     && goal_alternative_matches[i] < 0
4096 	     && address_operand_reloaded[i] != 1
4097 	     && optimize)
4098       {
4099 	/* For each non-matching operand that's a MEM or a pseudo-register
4100 	   that didn't get a hard register, make an optional reload.
4101 	   This may get done even if the insn needs no reloads otherwise.  */
4102 
4103 	rtx operand = recog_data.operand[i];
4104 
4105 	while (GET_CODE (operand) == SUBREG)
4106 	  operand = SUBREG_REG (operand);
4107 	if ((MEM_P (operand)
4108 	     || (REG_P (operand)
4109 		 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4110 	    /* If this is only for an output, the optional reload would not
4111 	       actually cause us to use a register now, just note that
4112 	       something is stored here.  */
4113 	    && (goal_alternative[i] != NO_REGS
4114 		|| modified[i] == RELOAD_WRITE)
4115 	    && ! no_input_reloads
4116 	    /* An optional output reload might allow to delete INSN later.
4117 	       We mustn't make in-out reloads on insns that are not permitted
4118 	       output reloads.
4119 	       If this is an asm, we can't delete it; we must not even call
4120 	       push_reload for an optional output reload in this case,
4121 	       because we can't be sure that the constraint allows a register,
4122 	       and push_reload verifies the constraints for asms.  */
4123 	    && (modified[i] == RELOAD_READ
4124 		|| (! no_output_reloads && ! this_insn_is_asm)))
4125 	  operand_reloadnum[i]
4126 	    = push_reload ((modified[i] != RELOAD_WRITE
4127 			    ? recog_data.operand[i] : 0),
4128 			   (modified[i] != RELOAD_READ
4129 			    ? recog_data.operand[i] : 0),
4130 			   (modified[i] != RELOAD_WRITE
4131 			    ? recog_data.operand_loc[i] : 0),
4132 			   (modified[i] != RELOAD_READ
4133 			    ? recog_data.operand_loc[i] : 0),
4134 			   (enum reg_class) goal_alternative[i],
4135 			   (modified[i] == RELOAD_WRITE
4136 			    ? VOIDmode : operand_mode[i]),
4137 			   (modified[i] == RELOAD_READ
4138 			    ? VOIDmode : operand_mode[i]),
4139 			   (insn_code_number < 0 ? 0
4140 			    : insn_data[insn_code_number].operand[i].strict_low),
4141 			   1, i, operand_type[i]);
4142 	/* If a memory reference remains (either as a MEM or a pseudo that
4143 	   did not get a hard register), yet we can't make an optional
4144 	   reload, check if this is actually a pseudo register reference;
4145 	   we then need to emit a USE and/or a CLOBBER so that reload
4146 	   inheritance will do the right thing.  */
4147 	else if (replace
4148 		 && (MEM_P (operand)
4149 		     || (REG_P (operand)
4150 			 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4151 			 && reg_renumber [REGNO (operand)] < 0)))
4152 	  {
4153 	    operand = *recog_data.operand_loc[i];
4154 
4155 	    while (GET_CODE (operand) == SUBREG)
4156 	      operand = SUBREG_REG (operand);
4157 	    if (REG_P (operand))
4158 	      {
4159 		if (modified[i] != RELOAD_WRITE)
4160 		  /* We mark the USE with QImode so that we recognize
4161 		     it as one that can be safely deleted at the end
4162 		     of reload.  */
4163 		  PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4164 					      insn), QImode);
4165 		if (modified[i] != RELOAD_READ)
4166 		  emit_insn_after (gen_clobber (operand), insn);
4167 	      }
4168 	  }
4169       }
4170     else if (goal_alternative_matches[i] >= 0
4171 	     && goal_alternative_win[goal_alternative_matches[i]]
4172 	     && modified[i] == RELOAD_READ
4173 	     && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4174 	     && ! no_input_reloads && ! no_output_reloads
4175 	     && optimize)
4176       {
4177 	/* Similarly, make an optional reload for a pair of matching
4178 	   objects that are in MEM or a pseudo that didn't get a hard reg.  */
4179 
4180 	rtx operand = recog_data.operand[i];
4181 
4182 	while (GET_CODE (operand) == SUBREG)
4183 	  operand = SUBREG_REG (operand);
4184 	if ((MEM_P (operand)
4185 	     || (REG_P (operand)
4186 		 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4187 	    && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4188 	  operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4189 	    = push_reload (recog_data.operand[goal_alternative_matches[i]],
4190 			   recog_data.operand[i],
4191 			   recog_data.operand_loc[goal_alternative_matches[i]],
4192 			   recog_data.operand_loc[i],
4193 			   (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4194 			   operand_mode[goal_alternative_matches[i]],
4195 			   operand_mode[i],
4196 			   0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4197       }
4198 
4199   /* Perform whatever substitutions on the operands we are supposed
4200      to make due to commutativity or replacement of registers
4201      with equivalent constants or memory slots.  */
4202 
4203   for (i = 0; i < noperands; i++)
4204     {
4205       /* We only do this on the last pass through reload, because it is
4206 	 possible for some data (like reg_equiv_address) to be changed during
4207 	 later passes.  Moreover, we lose the opportunity to get a useful
4208 	 reload_{in,out}_reg when we do these replacements.  */
4209 
4210       if (replace)
4211 	{
4212 	  rtx substitution = substed_operand[i];
4213 
4214 	  *recog_data.operand_loc[i] = substitution;
4215 
4216 	  /* If we're replacing an operand with a LABEL_REF, we need to
4217 	     make sure that there's a REG_LABEL_OPERAND note attached to
4218 	     this instruction.  */
4219 	  if (GET_CODE (substitution) == LABEL_REF
4220 	      && !find_reg_note (insn, REG_LABEL_OPERAND,
4221 				 label_ref_label (substitution))
4222 	      /* For a JUMP_P, if it was a branch target it must have
4223 		 already been recorded as such.  */
4224 	      && (!JUMP_P (insn)
4225 		  || !label_is_jump_target_p (label_ref_label (substitution),
4226 					      insn)))
4227 	    {
4228 	      add_reg_note (insn, REG_LABEL_OPERAND,
4229 			    label_ref_label (substitution));
4230 	      if (LABEL_P (label_ref_label (substitution)))
4231 		++LABEL_NUSES (label_ref_label (substitution));
4232 	    }
4233 
4234 	}
4235       else
4236 	retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4237     }
4238 
4239   /* If this insn pattern contains any MATCH_DUP's, make sure that
4240      they will be substituted if the operands they match are substituted.
4241      Also do now any substitutions we already did on the operands.
4242 
4243      Don't do this if we aren't making replacements because we might be
4244      propagating things allocated by frame pointer elimination into places
4245      it doesn't expect.  */
4246 
4247   if (insn_code_number >= 0 && replace)
4248     for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4249       {
4250 	int opno = recog_data.dup_num[i];
4251 	*recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4252 	dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4253       }
4254 
4255 #if 0
4256   /* This loses because reloading of prior insns can invalidate the equivalence
4257      (or at least find_equiv_reg isn't smart enough to find it any more),
4258      causing this insn to need more reload regs than it needed before.
4259      It may be too late to make the reload regs available.
4260      Now this optimization is done safely in choose_reload_regs.  */
4261 
4262   /* For each reload of a reg into some other class of reg,
4263      search for an existing equivalent reg (same value now) in the right class.
4264      We can use it as long as we don't need to change its contents.  */
4265   for (i = 0; i < n_reloads; i++)
4266     if (rld[i].reg_rtx == 0
4267 	&& rld[i].in != 0
4268 	&& REG_P (rld[i].in)
4269 	&& rld[i].out == 0)
4270       {
4271 	rld[i].reg_rtx
4272 	  = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4273 			    static_reload_reg_p, 0, rld[i].inmode);
4274 	/* Prevent generation of insn to load the value
4275 	   because the one we found already has the value.  */
4276 	if (rld[i].reg_rtx)
4277 	  rld[i].in = rld[i].reg_rtx;
4278       }
4279 #endif
4280 
4281   /* If we detected error and replaced asm instruction by USE, forget about the
4282      reloads.  */
4283   if (GET_CODE (PATTERN (insn)) == USE
4284       && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4285     n_reloads = 0;
4286 
4287   /* Perhaps an output reload can be combined with another
4288      to reduce needs by one.  */
4289   if (!goal_earlyclobber)
4290     combine_reloads ();
4291 
4292   /* If we have a pair of reloads for parts of an address, they are reloading
4293      the same object, the operands themselves were not reloaded, and they
4294      are for two operands that are supposed to match, merge the reloads and
4295      change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS.  */
4296 
4297   for (i = 0; i < n_reloads; i++)
4298     {
4299       int k;
4300 
4301       for (j = i + 1; j < n_reloads; j++)
4302 	if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4303 	     || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4304 	     || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4305 	     || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4306 	    && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4307 		|| rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4308 		|| rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4309 		|| rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4310 	    && rtx_equal_p (rld[i].in, rld[j].in)
4311 	    && (operand_reloadnum[rld[i].opnum] < 0
4312 		|| rld[operand_reloadnum[rld[i].opnum]].optional)
4313 	    && (operand_reloadnum[rld[j].opnum] < 0
4314 		|| rld[operand_reloadnum[rld[j].opnum]].optional)
4315 	    && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4316 		|| (goal_alternative_matches[rld[j].opnum]
4317 		    == rld[i].opnum)))
4318 	  {
4319 	    for (k = 0; k < n_replacements; k++)
4320 	      if (replacements[k].what == j)
4321 		replacements[k].what = i;
4322 
4323 	    if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4324 		|| rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4325 	      rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4326 	    else
4327 	      rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4328 	    rld[j].in = 0;
4329 	  }
4330     }
4331 
4332   /* Scan all the reloads and update their type.
4333      If a reload is for the address of an operand and we didn't reload
4334      that operand, change the type.  Similarly, change the operand number
4335      of a reload when two operands match.  If a reload is optional, treat it
4336      as though the operand isn't reloaded.
4337 
4338      ??? This latter case is somewhat odd because if we do the optional
4339      reload, it means the object is hanging around.  Thus we need only
4340      do the address reload if the optional reload was NOT done.
4341 
4342      Change secondary reloads to be the address type of their operand, not
4343      the normal type.
4344 
4345      If an operand's reload is now RELOAD_OTHER, change any
4346      RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4347      RELOAD_FOR_OTHER_ADDRESS.  */
4348 
4349   for (i = 0; i < n_reloads; i++)
4350     {
4351       if (rld[i].secondary_p
4352 	  && rld[i].when_needed == operand_type[rld[i].opnum])
4353 	rld[i].when_needed = address_type[rld[i].opnum];
4354 
4355       if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4356 	   || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4357 	   || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4358 	   || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4359 	  && (operand_reloadnum[rld[i].opnum] < 0
4360 	      || rld[operand_reloadnum[rld[i].opnum]].optional))
4361 	{
4362 	  /* If we have a secondary reload to go along with this reload,
4363 	     change its type to RELOAD_FOR_OPADDR_ADDR.  */
4364 
4365 	  if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4366 	       || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4367 	      && rld[i].secondary_in_reload != -1)
4368 	    {
4369 	      int secondary_in_reload = rld[i].secondary_in_reload;
4370 
4371 	      rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4372 
4373 	      /* If there's a tertiary reload we have to change it also.  */
4374 	      if (secondary_in_reload > 0
4375 		  && rld[secondary_in_reload].secondary_in_reload != -1)
4376 		rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4377 		  = RELOAD_FOR_OPADDR_ADDR;
4378 	    }
4379 
4380 	  if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4381 	       || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4382 	      && rld[i].secondary_out_reload != -1)
4383 	    {
4384 	      int secondary_out_reload = rld[i].secondary_out_reload;
4385 
4386 	      rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4387 
4388 	      /* If there's a tertiary reload we have to change it also.  */
4389 	      if (secondary_out_reload
4390 		  && rld[secondary_out_reload].secondary_out_reload != -1)
4391 		rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4392 		  = RELOAD_FOR_OPADDR_ADDR;
4393 	    }
4394 
4395 	  if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4396 	      || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4397 	    rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4398 	  else
4399 	    rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4400 	}
4401 
4402       if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4403 	   || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4404 	  && operand_reloadnum[rld[i].opnum] >= 0
4405 	  && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4406 	      == RELOAD_OTHER))
4407 	rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4408 
4409       if (goal_alternative_matches[rld[i].opnum] >= 0)
4410 	rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4411     }
4412 
4413   /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4414      If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4415      reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4416 
4417      choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4418      conflict with RELOAD_FOR_OPERAND_ADDRESS reloads.  This is true for a
4419      single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4420      However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4421      then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4422      RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4423      This is complicated by the fact that a single operand can have more
4424      than one RELOAD_FOR_OPERAND_ADDRESS reload.  It is very difficult to fix
4425      choose_reload_regs without affecting code quality, and cases that
4426      actually fail are extremely rare, so it turns out to be better to fix
4427      the problem here by not generating cases that choose_reload_regs will
4428      fail for.  */
4429   /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4430      RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4431      a single operand.
4432      We can reduce the register pressure by exploiting that a
4433      RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4434      does not conflict with any of them, if it is only used for the first of
4435      the RELOAD_FOR_X_ADDRESS reloads.  */
4436   {
4437     int first_op_addr_num = -2;
4438     int first_inpaddr_num[MAX_RECOG_OPERANDS];
4439     int first_outpaddr_num[MAX_RECOG_OPERANDS];
4440     int need_change = 0;
4441     /* We use last_op_addr_reload and the contents of the above arrays
4442        first as flags - -2 means no instance encountered, -1 means exactly
4443        one instance encountered.
4444        If more than one instance has been encountered, we store the reload
4445        number of the first reload of the kind in question; reload numbers
4446        are known to be non-negative.  */
4447     for (i = 0; i < noperands; i++)
4448       first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4449     for (i = n_reloads - 1; i >= 0; i--)
4450       {
4451 	switch (rld[i].when_needed)
4452 	  {
4453 	  case RELOAD_FOR_OPERAND_ADDRESS:
4454 	    if (++first_op_addr_num >= 0)
4455 	      {
4456 		first_op_addr_num = i;
4457 		need_change = 1;
4458 	      }
4459 	    break;
4460 	  case RELOAD_FOR_INPUT_ADDRESS:
4461 	    if (++first_inpaddr_num[rld[i].opnum] >= 0)
4462 	      {
4463 		first_inpaddr_num[rld[i].opnum] = i;
4464 		need_change = 1;
4465 	      }
4466 	    break;
4467 	  case RELOAD_FOR_OUTPUT_ADDRESS:
4468 	    if (++first_outpaddr_num[rld[i].opnum] >= 0)
4469 	      {
4470 		first_outpaddr_num[rld[i].opnum] = i;
4471 		need_change = 1;
4472 	      }
4473 	    break;
4474 	  default:
4475 	    break;
4476 	  }
4477       }
4478 
4479     if (need_change)
4480       {
4481 	for (i = 0; i < n_reloads; i++)
4482 	  {
4483 	    int first_num;
4484 	    enum reload_type type;
4485 
4486 	    switch (rld[i].when_needed)
4487 	      {
4488 	      case RELOAD_FOR_OPADDR_ADDR:
4489 		first_num = first_op_addr_num;
4490 		type = RELOAD_FOR_OPERAND_ADDRESS;
4491 		break;
4492 	      case RELOAD_FOR_INPADDR_ADDRESS:
4493 		first_num = first_inpaddr_num[rld[i].opnum];
4494 		type = RELOAD_FOR_INPUT_ADDRESS;
4495 		break;
4496 	      case RELOAD_FOR_OUTADDR_ADDRESS:
4497 		first_num = first_outpaddr_num[rld[i].opnum];
4498 		type = RELOAD_FOR_OUTPUT_ADDRESS;
4499 		break;
4500 	      default:
4501 		continue;
4502 	      }
4503 	    if (first_num < 0)
4504 	      continue;
4505 	    else if (i > first_num)
4506 	      rld[i].when_needed = type;
4507 	    else
4508 	      {
4509 		/* Check if the only TYPE reload that uses reload I is
4510 		   reload FIRST_NUM.  */
4511 		for (j = n_reloads - 1; j > first_num; j--)
4512 		  {
4513 		    if (rld[j].when_needed == type
4514 			&& (rld[i].secondary_p
4515 			    ? rld[j].secondary_in_reload == i
4516 			    : reg_mentioned_p (rld[i].in, rld[j].in)))
4517 		      {
4518 			rld[i].when_needed = type;
4519 			break;
4520 		      }
4521 		  }
4522 	      }
4523 	  }
4524       }
4525   }
4526 
4527   /* See if we have any reloads that are now allowed to be merged
4528      because we've changed when the reload is needed to
4529      RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS.  Only
4530      check for the most common cases.  */
4531 
4532   for (i = 0; i < n_reloads; i++)
4533     if (rld[i].in != 0 && rld[i].out == 0
4534 	&& (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4535 	    || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4536 	    || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4537       for (j = 0; j < n_reloads; j++)
4538 	if (i != j && rld[j].in != 0 && rld[j].out == 0
4539 	    && rld[j].when_needed == rld[i].when_needed
4540 	    && MATCHES (rld[i].in, rld[j].in)
4541 	    && rld[i].rclass == rld[j].rclass
4542 	    && !rld[i].nocombine && !rld[j].nocombine
4543 	    && rld[i].reg_rtx == rld[j].reg_rtx)
4544 	  {
4545 	    rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4546 	    transfer_replacements (i, j);
4547 	    rld[j].in = 0;
4548 	  }
4549 
4550   /* If we made any reloads for addresses, see if they violate a
4551      "no input reloads" requirement for this insn.  But loads that we
4552      do after the insn (such as for output addresses) are fine.  */
4553   if (HAVE_cc0 && no_input_reloads)
4554     for (i = 0; i < n_reloads; i++)
4555       gcc_assert (rld[i].in == 0
4556 		  || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4557 		  || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4558 
4559   /* Compute reload_mode and reload_nregs.  */
4560   for (i = 0; i < n_reloads; i++)
4561     {
4562       rld[i].mode = rld[i].inmode;
4563       if (rld[i].mode == VOIDmode
4564 	  || partial_subreg_p (rld[i].mode, rld[i].outmode))
4565 	rld[i].mode = rld[i].outmode;
4566 
4567       rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4568     }
4569 
4570   /* Special case a simple move with an input reload and a
4571      destination of a hard reg, if the hard reg is ok, use it.  */
4572   for (i = 0; i < n_reloads; i++)
4573     if (rld[i].when_needed == RELOAD_FOR_INPUT
4574 	&& GET_CODE (PATTERN (insn)) == SET
4575 	&& REG_P (SET_DEST (PATTERN (insn)))
4576 	&& (SET_SRC (PATTERN (insn)) == rld[i].in
4577 	    || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4578 	&& !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4579       {
4580 	rtx dest = SET_DEST (PATTERN (insn));
4581 	unsigned int regno = REGNO (dest);
4582 
4583 	if (regno < FIRST_PSEUDO_REGISTER
4584 	    && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4585 	    && targetm.hard_regno_mode_ok (regno, rld[i].mode))
4586 	  {
4587 	    int nr = hard_regno_nregs (regno, rld[i].mode);
4588 	    int ok = 1, nri;
4589 
4590 	    for (nri = 1; nri < nr; nri ++)
4591 	      if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4592 		{
4593 		  ok = 0;
4594 		  break;
4595 		}
4596 
4597 	    if (ok)
4598 	      rld[i].reg_rtx = dest;
4599 	  }
4600       }
4601 
4602   return retval;
4603 }
4604 
4605 /* Return true if alternative number ALTNUM in constraint-string
4606    CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4607    MEM gives the reference if its address hasn't been fully reloaded,
4608    otherwise it is NULL.  */
4609 
4610 static bool
4611 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4612 				   const char *constraint, int altnum)
4613 {
4614   int c;
4615 
4616   /* Skip alternatives before the one requested.  */
4617   while (altnum > 0)
4618     {
4619       while (*constraint++ != ',')
4620 	;
4621       altnum--;
4622     }
4623   /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4624      If one of them is present, this alternative accepts the result of
4625      passing a constant-pool reference through find_reloads_toplev.
4626 
4627      The same is true of extra memory constraints if the address
4628      was reloaded into a register.  However, the target may elect
4629      to disallow the original constant address, forcing it to be
4630      reloaded into a register instead.  */
4631   for (; (c = *constraint) && c != ',' && c != '#';
4632        constraint += CONSTRAINT_LEN (c, constraint))
4633     {
4634       enum constraint_num cn = lookup_constraint (constraint);
4635       if (insn_extra_memory_constraint (cn)
4636 	  && (mem == NULL || constraint_satisfied_p (mem, cn)))
4637 	return true;
4638     }
4639   return false;
4640 }
4641 
4642 /* Scan X for memory references and scan the addresses for reloading.
4643    Also checks for references to "constant" regs that we want to eliminate
4644    and replaces them with the values they stand for.
4645    We may alter X destructively if it contains a reference to such.
4646    If X is just a constant reg, we return the equivalent value
4647    instead of X.
4648 
4649    IND_LEVELS says how many levels of indirect addressing this machine
4650    supports.
4651 
4652    OPNUM and TYPE identify the purpose of the reload.
4653 
4654    IS_SET_DEST is true if X is the destination of a SET, which is not
4655    appropriate to be replaced by a constant.
4656 
4657    INSN, if nonzero, is the insn in which we do the reload.  It is used
4658    to determine if we may generate output reloads, and where to put USEs
4659    for pseudos that we have to replace with stack slots.
4660 
4661    ADDRESS_RELOADED.  If nonzero, is a pointer to where we put the
4662    result of find_reloads_address.  */
4663 
4664 static rtx
4665 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4666 		     int ind_levels, int is_set_dest, rtx_insn *insn,
4667 		     int *address_reloaded)
4668 {
4669   RTX_CODE code = GET_CODE (x);
4670 
4671   const char *fmt = GET_RTX_FORMAT (code);
4672   int i;
4673   int copied;
4674 
4675   if (code == REG)
4676     {
4677       /* This code is duplicated for speed in find_reloads.  */
4678       int regno = REGNO (x);
4679       if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4680 	x = reg_equiv_constant (regno);
4681 #if 0
4682       /*  This creates (subreg (mem...)) which would cause an unnecessary
4683 	  reload of the mem.  */
4684       else if (reg_equiv_mem (regno) != 0)
4685 	x = reg_equiv_mem (regno);
4686 #endif
4687       else if (reg_equiv_memory_loc (regno)
4688 	       && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4689 	{
4690 	  rtx mem = make_memloc (x, regno);
4691 	  if (reg_equiv_address (regno)
4692 	      || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4693 	    {
4694 	      /* If this is not a toplevel operand, find_reloads doesn't see
4695 		 this substitution.  We have to emit a USE of the pseudo so
4696 		 that delete_output_reload can see it.  */
4697 	      if (replace_reloads && recog_data.operand[opnum] != x)
4698 		/* We mark the USE with QImode so that we recognize it
4699 		   as one that can be safely deleted at the end of
4700 		   reload.  */
4701 		PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4702 			  QImode);
4703 	      x = mem;
4704 	      i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4705 					opnum, type, ind_levels, insn);
4706 	      if (!rtx_equal_p (x, mem))
4707 		push_reg_equiv_alt_mem (regno, x);
4708 	      if (address_reloaded)
4709 		*address_reloaded = i;
4710 	    }
4711 	}
4712       return x;
4713     }
4714   if (code == MEM)
4715     {
4716       rtx tem = x;
4717 
4718       i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4719 				opnum, type, ind_levels, insn);
4720       if (address_reloaded)
4721 	*address_reloaded = i;
4722 
4723       return tem;
4724     }
4725 
4726   if (code == SUBREG && REG_P (SUBREG_REG (x)))
4727     {
4728       /* Check for SUBREG containing a REG that's equivalent to a
4729 	 constant.  If the constant has a known value, truncate it
4730 	 right now.  Similarly if we are extracting a single-word of a
4731 	 multi-word constant.  If the constant is symbolic, allow it
4732 	 to be substituted normally.  push_reload will strip the
4733 	 subreg later.  The constant must not be VOIDmode, because we
4734 	 will lose the mode of the register (this should never happen
4735 	 because one of the cases above should handle it).  */
4736 
4737       int regno = REGNO (SUBREG_REG (x));
4738       rtx tem;
4739 
4740       if (regno >= FIRST_PSEUDO_REGISTER
4741 	  && reg_renumber[regno] < 0
4742 	  && reg_equiv_constant (regno) != 0)
4743 	{
4744 	  tem =
4745 	    simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4746 				 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4747 	  gcc_assert (tem);
4748 	  if (CONSTANT_P (tem)
4749 	      && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4750 	    {
4751 	      tem = force_const_mem (GET_MODE (x), tem);
4752 	      i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4753 					&XEXP (tem, 0), opnum, type,
4754 					ind_levels, insn);
4755 	      if (address_reloaded)
4756 		*address_reloaded = i;
4757 	    }
4758 	  return tem;
4759 	}
4760 
4761       /* If the subreg contains a reg that will be converted to a mem,
4762 	 attempt to convert the whole subreg to a (narrower or wider)
4763 	 memory reference instead.  If this succeeds, we're done --
4764 	 otherwise fall through to check whether the inner reg still
4765 	 needs address reloads anyway.  */
4766 
4767       if (regno >= FIRST_PSEUDO_REGISTER
4768 	  && reg_equiv_memory_loc (regno) != 0)
4769 	{
4770 	  tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4771 					     insn, address_reloaded);
4772 	  if (tem)
4773 	    return tem;
4774 	}
4775     }
4776 
4777   for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4778     {
4779       if (fmt[i] == 'e')
4780 	{
4781 	  rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4782 					      ind_levels, is_set_dest, insn,
4783 					      address_reloaded);
4784 	  /* If we have replaced a reg with it's equivalent memory loc -
4785 	     that can still be handled here e.g. if it's in a paradoxical
4786 	     subreg - we must make the change in a copy, rather than using
4787 	     a destructive change.  This way, find_reloads can still elect
4788 	     not to do the change.  */
4789 	  if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4790 	    {
4791 	      x = shallow_copy_rtx (x);
4792 	      copied = 1;
4793 	    }
4794 	  XEXP (x, i) = new_part;
4795 	}
4796     }
4797   return x;
4798 }
4799 
4800 /* Return a mem ref for the memory equivalent of reg REGNO.
4801    This mem ref is not shared with anything.  */
4802 
4803 static rtx
4804 make_memloc (rtx ad, int regno)
4805 {
4806   /* We must rerun eliminate_regs, in case the elimination
4807      offsets have changed.  */
4808   rtx tem
4809     = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4810 	    0);
4811 
4812   /* If TEM might contain a pseudo, we must copy it to avoid
4813      modifying it when we do the substitution for the reload.  */
4814   if (rtx_varies_p (tem, 0))
4815     tem = copy_rtx (tem);
4816 
4817   tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4818   tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4819 
4820   /* Copy the result if it's still the same as the equivalence, to avoid
4821      modifying it when we do the substitution for the reload.  */
4822   if (tem == reg_equiv_memory_loc (regno))
4823     tem = copy_rtx (tem);
4824   return tem;
4825 }
4826 
4827 /* Returns true if AD could be turned into a valid memory reference
4828    to mode MODE in address space AS by reloading the part pointed to
4829    by PART into a register.  */
4830 
4831 static int
4832 maybe_memory_address_addr_space_p (machine_mode mode, rtx ad,
4833 				   addr_space_t as, rtx *part)
4834 {
4835   int retv;
4836   rtx tem = *part;
4837   rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4838 
4839   *part = reg;
4840   retv = memory_address_addr_space_p (mode, ad, as);
4841   *part = tem;
4842 
4843   return retv;
4844 }
4845 
4846 /* Record all reloads needed for handling memory address AD
4847    which appears in *LOC in a memory reference to mode MODE
4848    which itself is found in location  *MEMREFLOC.
4849    Note that we take shortcuts assuming that no multi-reg machine mode
4850    occurs as part of an address.
4851 
4852    OPNUM and TYPE specify the purpose of this reload.
4853 
4854    IND_LEVELS says how many levels of indirect addressing this machine
4855    supports.
4856 
4857    INSN, if nonzero, is the insn in which we do the reload.  It is used
4858    to determine if we may generate output reloads, and where to put USEs
4859    for pseudos that we have to replace with stack slots.
4860 
4861    Value is one if this address is reloaded or replaced as a whole; it is
4862    zero if the top level of this address was not reloaded or replaced, and
4863    it is -1 if it may or may not have been reloaded or replaced.
4864 
4865    Note that there is no verification that the address will be valid after
4866    this routine does its work.  Instead, we rely on the fact that the address
4867    was valid when reload started.  So we need only undo things that reload
4868    could have broken.  These are wrong register types, pseudos not allocated
4869    to a hard register, and frame pointer elimination.  */
4870 
4871 static int
4872 find_reloads_address (machine_mode mode, rtx *memrefloc, rtx ad,
4873 		      rtx *loc, int opnum, enum reload_type type,
4874 		      int ind_levels, rtx_insn *insn)
4875 {
4876   addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4877 			     : ADDR_SPACE_GENERIC;
4878   int regno;
4879   int removed_and = 0;
4880   int op_index;
4881   rtx tem;
4882 
4883   /* If the address is a register, see if it is a legitimate address and
4884      reload if not.  We first handle the cases where we need not reload
4885      or where we must reload in a non-standard way.  */
4886 
4887   if (REG_P (ad))
4888     {
4889       regno = REGNO (ad);
4890 
4891       if (reg_equiv_constant (regno) != 0)
4892 	{
4893 	  find_reloads_address_part (reg_equiv_constant (regno), loc,
4894 				     base_reg_class (mode, as, MEM, SCRATCH),
4895 				     GET_MODE (ad), opnum, type, ind_levels);
4896 	  return 1;
4897 	}
4898 
4899       tem = reg_equiv_memory_loc (regno);
4900       if (tem != 0)
4901 	{
4902 	  if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4903 	    {
4904 	      tem = make_memloc (ad, regno);
4905 	      if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4906 							XEXP (tem, 0),
4907 							MEM_ADDR_SPACE (tem)))
4908 		{
4909 		  rtx orig = tem;
4910 
4911 		  find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4912 					&XEXP (tem, 0), opnum,
4913 					ADDR_TYPE (type), ind_levels, insn);
4914 		  if (!rtx_equal_p (tem, orig))
4915 		    push_reg_equiv_alt_mem (regno, tem);
4916 		}
4917 	      /* We can avoid a reload if the register's equivalent memory
4918 		 expression is valid as an indirect memory address.
4919 		 But not all addresses are valid in a mem used as an indirect
4920 		 address: only reg or reg+constant.  */
4921 
4922 	      if (ind_levels > 0
4923 		  && strict_memory_address_addr_space_p (mode, tem, as)
4924 		  && (REG_P (XEXP (tem, 0))
4925 		      || (GET_CODE (XEXP (tem, 0)) == PLUS
4926 			  && REG_P (XEXP (XEXP (tem, 0), 0))
4927 			  && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4928 		{
4929 		  /* TEM is not the same as what we'll be replacing the
4930 		     pseudo with after reload, put a USE in front of INSN
4931 		     in the final reload pass.  */
4932 		  if (replace_reloads
4933 		      && num_not_at_initial_offset
4934 		      && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4935 		    {
4936 		      *loc = tem;
4937 		      /* We mark the USE with QImode so that we
4938 			 recognize it as one that can be safely
4939 			 deleted at the end of reload.  */
4940 		      PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4941 						  insn), QImode);
4942 
4943 		      /* This doesn't really count as replacing the address
4944 			 as a whole, since it is still a memory access.  */
4945 		    }
4946 		  return 0;
4947 		}
4948 	      ad = tem;
4949 	    }
4950 	}
4951 
4952       /* The only remaining case where we can avoid a reload is if this is a
4953 	 hard register that is valid as a base register and which is not the
4954 	 subject of a CLOBBER in this insn.  */
4955 
4956       else if (regno < FIRST_PSEUDO_REGISTER
4957 	       && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
4958 	       && ! regno_clobbered_p (regno, this_insn, mode, 0))
4959 	return 0;
4960 
4961       /* If we do not have one of the cases above, we must do the reload.  */
4962       push_reload (ad, NULL_RTX, loc, (rtx*) 0,
4963 		   base_reg_class (mode, as, MEM, SCRATCH),
4964 		   GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4965       return 1;
4966     }
4967 
4968   if (strict_memory_address_addr_space_p (mode, ad, as))
4969     {
4970       /* The address appears valid, so reloads are not needed.
4971 	 But the address may contain an eliminable register.
4972 	 This can happen because a machine with indirect addressing
4973 	 may consider a pseudo register by itself a valid address even when
4974 	 it has failed to get a hard reg.
4975 	 So do a tree-walk to find and eliminate all such regs.  */
4976 
4977       /* But first quickly dispose of a common case.  */
4978       if (GET_CODE (ad) == PLUS
4979 	  && CONST_INT_P (XEXP (ad, 1))
4980 	  && REG_P (XEXP (ad, 0))
4981 	  && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4982 	return 0;
4983 
4984       subst_reg_equivs_changed = 0;
4985       *loc = subst_reg_equivs (ad, insn);
4986 
4987       if (! subst_reg_equivs_changed)
4988 	return 0;
4989 
4990       /* Check result for validity after substitution.  */
4991       if (strict_memory_address_addr_space_p (mode, ad, as))
4992 	return 0;
4993     }
4994 
4995 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4996   do
4997     {
4998       if (memrefloc && ADDR_SPACE_GENERIC_P (as))
4999 	{
5000 	  LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
5001 				     ind_levels, win);
5002 	}
5003       break;
5004     win:
5005       *memrefloc = copy_rtx (*memrefloc);
5006       XEXP (*memrefloc, 0) = ad;
5007       move_replacements (&ad, &XEXP (*memrefloc, 0));
5008       return -1;
5009     }
5010   while (0);
5011 #endif
5012 
5013   /* The address is not valid.  We have to figure out why.  First see if
5014      we have an outer AND and remove it if so.  Then analyze what's inside.  */
5015 
5016   if (GET_CODE (ad) == AND)
5017     {
5018       removed_and = 1;
5019       loc = &XEXP (ad, 0);
5020       ad = *loc;
5021     }
5022 
5023   /* One possibility for why the address is invalid is that it is itself
5024      a MEM.  This can happen when the frame pointer is being eliminated, a
5025      pseudo is not allocated to a hard register, and the offset between the
5026      frame and stack pointers is not its initial value.  In that case the
5027      pseudo will have been replaced by a MEM referring to the
5028      stack pointer.  */
5029   if (MEM_P (ad))
5030     {
5031       /* First ensure that the address in this MEM is valid.  Then, unless
5032 	 indirect addresses are valid, reload the MEM into a register.  */
5033       tem = ad;
5034       find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5035 			    opnum, ADDR_TYPE (type),
5036 			    ind_levels == 0 ? 0 : ind_levels - 1, insn);
5037 
5038       /* If tem was changed, then we must create a new memory reference to
5039 	 hold it and store it back into memrefloc.  */
5040       if (tem != ad && memrefloc)
5041 	{
5042 	  *memrefloc = copy_rtx (*memrefloc);
5043 	  copy_replacements (tem, XEXP (*memrefloc, 0));
5044 	  loc = &XEXP (*memrefloc, 0);
5045 	  if (removed_and)
5046 	    loc = &XEXP (*loc, 0);
5047 	}
5048 
5049       /* Check similar cases as for indirect addresses as above except
5050 	 that we can allow pseudos and a MEM since they should have been
5051 	 taken care of above.  */
5052 
5053       if (ind_levels == 0
5054 	  || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5055 	  || MEM_P (XEXP (tem, 0))
5056 	  || ! (REG_P (XEXP (tem, 0))
5057 		|| (GET_CODE (XEXP (tem, 0)) == PLUS
5058 		    && REG_P (XEXP (XEXP (tem, 0), 0))
5059 		    && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5060 	{
5061 	  /* Must use TEM here, not AD, since it is the one that will
5062 	     have any subexpressions reloaded, if needed.  */
5063 	  push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5064 		       base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5065 		       VOIDmode, 0,
5066 		       0, opnum, type);
5067 	  return ! removed_and;
5068 	}
5069       else
5070 	return 0;
5071     }
5072 
5073   /* If we have address of a stack slot but it's not valid because the
5074      displacement is too large, compute the sum in a register.
5075      Handle all base registers here, not just fp/ap/sp, because on some
5076      targets (namely SH) we can also get too large displacements from
5077      big-endian corrections.  */
5078   else if (GET_CODE (ad) == PLUS
5079 	   && REG_P (XEXP (ad, 0))
5080 	   && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5081 	   && CONST_INT_P (XEXP (ad, 1))
5082 	   && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5083 				    CONST_INT)
5084 	       /* Similarly, if we were to reload the base register and the
5085 		  mem+offset address is still invalid, then we want to reload
5086 		  the whole address, not just the base register.  */
5087 	       || ! maybe_memory_address_addr_space_p
5088 		     (mode, ad, as, &(XEXP (ad, 0)))))
5089 
5090     {
5091       /* Unshare the MEM rtx so we can safely alter it.  */
5092       if (memrefloc)
5093 	{
5094 	  *memrefloc = copy_rtx (*memrefloc);
5095 	  loc = &XEXP (*memrefloc, 0);
5096 	  if (removed_and)
5097 	    loc = &XEXP (*loc, 0);
5098 	}
5099 
5100       if (double_reg_address_ok[mode]
5101 	  && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5102 				  PLUS, CONST_INT))
5103 	{
5104 	  /* Unshare the sum as well.  */
5105 	  *loc = ad = copy_rtx (ad);
5106 
5107 	  /* Reload the displacement into an index reg.
5108 	     We assume the frame pointer or arg pointer is a base reg.  */
5109 	  find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5110 				     INDEX_REG_CLASS, GET_MODE (ad), opnum,
5111 				     type, ind_levels);
5112 	  return 0;
5113 	}
5114       else
5115 	{
5116 	  /* If the sum of two regs is not necessarily valid,
5117 	     reload the sum into a base reg.
5118 	     That will at least work.  */
5119 	  find_reloads_address_part (ad, loc,
5120 				     base_reg_class (mode, as, MEM, SCRATCH),
5121 				     GET_MODE (ad), opnum, type, ind_levels);
5122 	}
5123       return ! removed_and;
5124     }
5125 
5126   /* If we have an indexed stack slot, there are three possible reasons why
5127      it might be invalid: The index might need to be reloaded, the address
5128      might have been made by frame pointer elimination and hence have a
5129      constant out of range, or both reasons might apply.
5130 
5131      We can easily check for an index needing reload, but even if that is the
5132      case, we might also have an invalid constant.  To avoid making the
5133      conservative assumption and requiring two reloads, we see if this address
5134      is valid when not interpreted strictly.  If it is, the only problem is
5135      that the index needs a reload and find_reloads_address_1 will take care
5136      of it.
5137 
5138      Handle all base registers here, not just fp/ap/sp, because on some
5139      targets (namely SPARC) we can also get invalid addresses from preventive
5140      subreg big-endian corrections made by find_reloads_toplev.  We
5141      can also get expressions involving LO_SUM (rather than PLUS) from
5142      find_reloads_subreg_address.
5143 
5144      If we decide to do something, it must be that `double_reg_address_ok'
5145      is true.  We generate a reload of the base register + constant and
5146      rework the sum so that the reload register will be added to the index.
5147      This is safe because we know the address isn't shared.
5148 
5149      We check for the base register as both the first and second operand of
5150      the innermost PLUS and/or LO_SUM.  */
5151 
5152   for (op_index = 0; op_index < 2; ++op_index)
5153     {
5154       rtx operand, addend;
5155       enum rtx_code inner_code;
5156 
5157       if (GET_CODE (ad) != PLUS)
5158 	  continue;
5159 
5160       inner_code = GET_CODE (XEXP (ad, 0));
5161       if (!(GET_CODE (ad) == PLUS
5162 	    && CONST_INT_P (XEXP (ad, 1))
5163 	    && (inner_code == PLUS || inner_code == LO_SUM)))
5164 	continue;
5165 
5166       operand = XEXP (XEXP (ad, 0), op_index);
5167       if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5168 	continue;
5169 
5170       addend = XEXP (XEXP (ad, 0), 1 - op_index);
5171 
5172       if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5173 				GET_CODE (addend))
5174 	   || operand == frame_pointer_rtx
5175 	   || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
5176 	       && operand == hard_frame_pointer_rtx)
5177 	   || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5178 	       && operand == arg_pointer_rtx)
5179 	   || operand == stack_pointer_rtx)
5180 	  && ! maybe_memory_address_addr_space_p
5181 		(mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5182 	{
5183 	  rtx offset_reg;
5184 	  enum reg_class cls;
5185 
5186 	  offset_reg = plus_constant (GET_MODE (ad), operand,
5187 				      INTVAL (XEXP (ad, 1)));
5188 
5189 	  /* Form the adjusted address.  */
5190 	  if (GET_CODE (XEXP (ad, 0)) == PLUS)
5191 	    ad = gen_rtx_PLUS (GET_MODE (ad),
5192 			       op_index == 0 ? offset_reg : addend,
5193 			       op_index == 0 ? addend : offset_reg);
5194 	  else
5195 	    ad = gen_rtx_LO_SUM (GET_MODE (ad),
5196 				 op_index == 0 ? offset_reg : addend,
5197 				 op_index == 0 ? addend : offset_reg);
5198 	  *loc = ad;
5199 
5200 	  cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5201 	  find_reloads_address_part (XEXP (ad, op_index),
5202 				     &XEXP (ad, op_index), cls,
5203 				     GET_MODE (ad), opnum, type, ind_levels);
5204 	  find_reloads_address_1 (mode, as,
5205 				  XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5206 				  GET_CODE (XEXP (ad, op_index)),
5207 				  &XEXP (ad, 1 - op_index), opnum,
5208 				  type, 0, insn);
5209 
5210 	  return 0;
5211 	}
5212     }
5213 
5214   /* See if address becomes valid when an eliminable register
5215      in a sum is replaced.  */
5216 
5217   tem = ad;
5218   if (GET_CODE (ad) == PLUS)
5219     tem = subst_indexed_address (ad);
5220   if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5221     {
5222       /* Ok, we win that way.  Replace any additional eliminable
5223 	 registers.  */
5224 
5225       subst_reg_equivs_changed = 0;
5226       tem = subst_reg_equivs (tem, insn);
5227 
5228       /* Make sure that didn't make the address invalid again.  */
5229 
5230       if (! subst_reg_equivs_changed
5231 	  || strict_memory_address_addr_space_p (mode, tem, as))
5232 	{
5233 	  *loc = tem;
5234 	  return 0;
5235 	}
5236     }
5237 
5238   /* If constants aren't valid addresses, reload the constant address
5239      into a register.  */
5240   if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5241     {
5242       machine_mode address_mode = GET_MODE (ad);
5243       if (address_mode == VOIDmode)
5244 	address_mode = targetm.addr_space.address_mode (as);
5245 
5246       /* If AD is an address in the constant pool, the MEM rtx may be shared.
5247 	 Unshare it so we can safely alter it.  */
5248       if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5249 	  && CONSTANT_POOL_ADDRESS_P (ad))
5250 	{
5251 	  *memrefloc = copy_rtx (*memrefloc);
5252 	  loc = &XEXP (*memrefloc, 0);
5253 	  if (removed_and)
5254 	    loc = &XEXP (*loc, 0);
5255 	}
5256 
5257       find_reloads_address_part (ad, loc,
5258 				 base_reg_class (mode, as, MEM, SCRATCH),
5259 				 address_mode, opnum, type, ind_levels);
5260       return ! removed_and;
5261     }
5262 
5263   return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5264 				 opnum, type, ind_levels, insn);
5265 }
5266 
5267 /* Find all pseudo regs appearing in AD
5268    that are eliminable in favor of equivalent values
5269    and do not have hard regs; replace them by their equivalents.
5270    INSN, if nonzero, is the insn in which we do the reload.  We put USEs in
5271    front of it for pseudos that we have to replace with stack slots.  */
5272 
5273 static rtx
5274 subst_reg_equivs (rtx ad, rtx_insn *insn)
5275 {
5276   RTX_CODE code = GET_CODE (ad);
5277   int i;
5278   const char *fmt;
5279 
5280   switch (code)
5281     {
5282     case HIGH:
5283     case CONST:
5284     CASE_CONST_ANY:
5285     case SYMBOL_REF:
5286     case LABEL_REF:
5287     case PC:
5288     case CC0:
5289       return ad;
5290 
5291     case REG:
5292       {
5293 	int regno = REGNO (ad);
5294 
5295 	if (reg_equiv_constant (regno) != 0)
5296 	  {
5297 	    subst_reg_equivs_changed = 1;
5298 	    return reg_equiv_constant (regno);
5299 	  }
5300 	if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5301 	  {
5302 	    rtx mem = make_memloc (ad, regno);
5303 	    if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5304 	      {
5305 		subst_reg_equivs_changed = 1;
5306 		/* We mark the USE with QImode so that we recognize it
5307 		   as one that can be safely deleted at the end of
5308 		   reload.  */
5309 		PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5310 			  QImode);
5311 		return mem;
5312 	      }
5313 	  }
5314       }
5315       return ad;
5316 
5317     case PLUS:
5318       /* Quickly dispose of a common case.  */
5319       if (XEXP (ad, 0) == frame_pointer_rtx
5320 	  && CONST_INT_P (XEXP (ad, 1)))
5321 	return ad;
5322       break;
5323 
5324     default:
5325       break;
5326     }
5327 
5328   fmt = GET_RTX_FORMAT (code);
5329   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5330     if (fmt[i] == 'e')
5331       XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5332   return ad;
5333 }
5334 
5335 /* Compute the sum of X and Y, making canonicalizations assumed in an
5336    address, namely: sum constant integers, surround the sum of two
5337    constants with a CONST, put the constant as the second operand, and
5338    group the constant on the outermost sum.
5339 
5340    This routine assumes both inputs are already in canonical form.  */
5341 
5342 rtx
5343 form_sum (machine_mode mode, rtx x, rtx y)
5344 {
5345   rtx tem;
5346 
5347   gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5348   gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5349 
5350   if (CONST_INT_P (x))
5351     return plus_constant (mode, y, INTVAL (x));
5352   else if (CONST_INT_P (y))
5353     return plus_constant (mode, x, INTVAL (y));
5354   else if (CONSTANT_P (x))
5355     tem = x, x = y, y = tem;
5356 
5357   if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5358     return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5359 
5360   /* Note that if the operands of Y are specified in the opposite
5361      order in the recursive calls below, infinite recursion will occur.  */
5362   if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5363     return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5364 
5365   /* If both constant, encapsulate sum.  Otherwise, just form sum.  A
5366      constant will have been placed second.  */
5367   if (CONSTANT_P (x) && CONSTANT_P (y))
5368     {
5369       if (GET_CODE (x) == CONST)
5370 	x = XEXP (x, 0);
5371       if (GET_CODE (y) == CONST)
5372 	y = XEXP (y, 0);
5373 
5374       return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5375     }
5376 
5377   return gen_rtx_PLUS (mode, x, y);
5378 }
5379 
5380 /* If ADDR is a sum containing a pseudo register that should be
5381    replaced with a constant (from reg_equiv_constant),
5382    return the result of doing so, and also apply the associative
5383    law so that the result is more likely to be a valid address.
5384    (But it is not guaranteed to be one.)
5385 
5386    Note that at most one register is replaced, even if more are
5387    replaceable.  Also, we try to put the result into a canonical form
5388    so it is more likely to be a valid address.
5389 
5390    In all other cases, return ADDR.  */
5391 
5392 static rtx
5393 subst_indexed_address (rtx addr)
5394 {
5395   rtx op0 = 0, op1 = 0, op2 = 0;
5396   rtx tem;
5397   int regno;
5398 
5399   if (GET_CODE (addr) == PLUS)
5400     {
5401       /* Try to find a register to replace.  */
5402       op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5403       if (REG_P (op0)
5404 	  && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5405 	  && reg_renumber[regno] < 0
5406 	  && reg_equiv_constant (regno) != 0)
5407 	op0 = reg_equiv_constant (regno);
5408       else if (REG_P (op1)
5409 	       && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5410 	       && reg_renumber[regno] < 0
5411 	       && reg_equiv_constant (regno) != 0)
5412 	op1 = reg_equiv_constant (regno);
5413       else if (GET_CODE (op0) == PLUS
5414 	       && (tem = subst_indexed_address (op0)) != op0)
5415 	op0 = tem;
5416       else if (GET_CODE (op1) == PLUS
5417 	       && (tem = subst_indexed_address (op1)) != op1)
5418 	op1 = tem;
5419       else
5420 	return addr;
5421 
5422       /* Pick out up to three things to add.  */
5423       if (GET_CODE (op1) == PLUS)
5424 	op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5425       else if (GET_CODE (op0) == PLUS)
5426 	op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5427 
5428       /* Compute the sum.  */
5429       if (op2 != 0)
5430 	op1 = form_sum (GET_MODE (addr), op1, op2);
5431       if (op1 != 0)
5432 	op0 = form_sum (GET_MODE (addr), op0, op1);
5433 
5434       return op0;
5435     }
5436   return addr;
5437 }
5438 
5439 /* Update the REG_INC notes for an insn.  It updates all REG_INC
5440    notes for the instruction which refer to REGNO the to refer
5441    to the reload number.
5442 
5443    INSN is the insn for which any REG_INC notes need updating.
5444 
5445    REGNO is the register number which has been reloaded.
5446 
5447    RELOADNUM is the reload number.  */
5448 
5449 static void
5450 update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5451 		       int reloadnum ATTRIBUTE_UNUSED)
5452 {
5453   if (!AUTO_INC_DEC)
5454     return;
5455 
5456   for (rtx link = REG_NOTES (insn); link; link = XEXP (link, 1))
5457     if (REG_NOTE_KIND (link) == REG_INC
5458         && (int) REGNO (XEXP (link, 0)) == regno)
5459       push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5460 }
5461 
5462 /* Record the pseudo registers we must reload into hard registers in a
5463    subexpression of a would-be memory address, X referring to a value
5464    in mode MODE.  (This function is not called if the address we find
5465    is strictly valid.)
5466 
5467    CONTEXT = 1 means we are considering regs as index regs,
5468    = 0 means we are considering them as base regs.
5469    OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5470    or an autoinc code.
5471    If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5472    is the code of the index part of the address.  Otherwise, pass SCRATCH
5473    for this argument.
5474    OPNUM and TYPE specify the purpose of any reloads made.
5475 
5476    IND_LEVELS says how many levels of indirect addressing are
5477    supported at this point in the address.
5478 
5479    INSN, if nonzero, is the insn in which we do the reload.  It is used
5480    to determine if we may generate output reloads.
5481 
5482    We return nonzero if X, as a whole, is reloaded or replaced.  */
5483 
5484 /* Note that we take shortcuts assuming that no multi-reg machine mode
5485    occurs as part of an address.
5486    Also, this is not fully machine-customizable; it works for machines
5487    such as VAXen and 68000's and 32000's, but other possible machines
5488    could have addressing modes that this does not handle right.
5489    If you add push_reload calls here, you need to make sure gen_reload
5490    handles those cases gracefully.  */
5491 
5492 static int
5493 find_reloads_address_1 (machine_mode mode, addr_space_t as,
5494 			rtx x, int context,
5495 			enum rtx_code outer_code, enum rtx_code index_code,
5496 			rtx *loc, int opnum, enum reload_type type,
5497 			int ind_levels, rtx_insn *insn)
5498 {
5499 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX)	\
5500   ((CONTEXT) == 0							\
5501    ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX)		\
5502    : REGNO_OK_FOR_INDEX_P (REGNO))
5503 
5504   enum reg_class context_reg_class;
5505   RTX_CODE code = GET_CODE (x);
5506   bool reloaded_inner_of_autoinc = false;
5507 
5508   if (context == 1)
5509     context_reg_class = INDEX_REG_CLASS;
5510   else
5511     context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5512 
5513   switch (code)
5514     {
5515     case PLUS:
5516       {
5517 	rtx orig_op0 = XEXP (x, 0);
5518 	rtx orig_op1 = XEXP (x, 1);
5519 	RTX_CODE code0 = GET_CODE (orig_op0);
5520 	RTX_CODE code1 = GET_CODE (orig_op1);
5521 	rtx op0 = orig_op0;
5522 	rtx op1 = orig_op1;
5523 
5524 	if (GET_CODE (op0) == SUBREG)
5525 	  {
5526 	    op0 = SUBREG_REG (op0);
5527 	    code0 = GET_CODE (op0);
5528 	    if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5529 	      op0 = gen_rtx_REG (word_mode,
5530 				 (REGNO (op0) +
5531 				  subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5532 						       GET_MODE (SUBREG_REG (orig_op0)),
5533 						       SUBREG_BYTE (orig_op0),
5534 						       GET_MODE (orig_op0))));
5535 	  }
5536 
5537 	if (GET_CODE (op1) == SUBREG)
5538 	  {
5539 	    op1 = SUBREG_REG (op1);
5540 	    code1 = GET_CODE (op1);
5541 	    if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5542 	      /* ??? Why is this given op1's mode and above for
5543 		 ??? op0 SUBREGs we use word_mode?  */
5544 	      op1 = gen_rtx_REG (GET_MODE (op1),
5545 				 (REGNO (op1) +
5546 				  subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5547 						       GET_MODE (SUBREG_REG (orig_op1)),
5548 						       SUBREG_BYTE (orig_op1),
5549 						       GET_MODE (orig_op1))));
5550 	  }
5551 	/* Plus in the index register may be created only as a result of
5552 	   register rematerialization for expression like &localvar*4.  Reload it.
5553 	   It may be possible to combine the displacement on the outer level,
5554 	   but it is probably not worthwhile to do so.  */
5555 	if (context == 1)
5556 	  {
5557 	    find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5558 				  opnum, ADDR_TYPE (type), ind_levels, insn);
5559 	    push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5560 			 context_reg_class,
5561 			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5562 	    return 1;
5563 	  }
5564 
5565 	if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5566 	    || code0 == ZERO_EXTEND || code1 == MEM)
5567 	  {
5568 	    find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5569 				    &XEXP (x, 0), opnum, type, ind_levels,
5570 				    insn);
5571 	    find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5572 				    &XEXP (x, 1), opnum, type, ind_levels,
5573 				    insn);
5574 	  }
5575 
5576 	else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5577 		 || code1 == ZERO_EXTEND || code0 == MEM)
5578 	  {
5579 	    find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5580 				    &XEXP (x, 0), opnum, type, ind_levels,
5581 				    insn);
5582 	    find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5583 				    &XEXP (x, 1), opnum, type, ind_levels,
5584 				    insn);
5585 	  }
5586 
5587 	else if (code0 == CONST_INT || code0 == CONST
5588 		 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5589 	  find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5590 				  &XEXP (x, 1), opnum, type, ind_levels,
5591 				  insn);
5592 
5593 	else if (code1 == CONST_INT || code1 == CONST
5594 		 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5595 	  find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5596 				  &XEXP (x, 0), opnum, type, ind_levels,
5597 				  insn);
5598 
5599 	else if (code0 == REG && code1 == REG)
5600 	  {
5601 	    if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5602 		&& regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5603 	      return 0;
5604 	    else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5605 		     && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5606 	      return 0;
5607 	    else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5608 	      find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5609 				      &XEXP (x, 1), opnum, type, ind_levels,
5610 				      insn);
5611 	    else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5612 	      find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5613 				      &XEXP (x, 0), opnum, type, ind_levels,
5614 				      insn);
5615 	    else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5616 	      find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5617 				      &XEXP (x, 0), opnum, type, ind_levels,
5618 				      insn);
5619 	    else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5620 	      find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5621 				      &XEXP (x, 1), opnum, type, ind_levels,
5622 				      insn);
5623 	    else
5624 	      {
5625 		find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5626 					&XEXP (x, 0), opnum, type, ind_levels,
5627 					insn);
5628 		find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5629 					&XEXP (x, 1), opnum, type, ind_levels,
5630 					insn);
5631 	      }
5632 	  }
5633 
5634 	else if (code0 == REG)
5635 	  {
5636 	    find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5637 				    &XEXP (x, 0), opnum, type, ind_levels,
5638 				    insn);
5639 	    find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5640 				    &XEXP (x, 1), opnum, type, ind_levels,
5641 				    insn);
5642 	  }
5643 
5644 	else if (code1 == REG)
5645 	  {
5646 	    find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5647 				    &XEXP (x, 1), opnum, type, ind_levels,
5648 				    insn);
5649 	    find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5650 				    &XEXP (x, 0), opnum, type, ind_levels,
5651 				    insn);
5652 	  }
5653       }
5654 
5655       return 0;
5656 
5657     case POST_MODIFY:
5658     case PRE_MODIFY:
5659       {
5660 	rtx op0 = XEXP (x, 0);
5661 	rtx op1 = XEXP (x, 1);
5662 	enum rtx_code index_code;
5663 	int regno;
5664 	int reloadnum;
5665 
5666 	if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5667 	  return 0;
5668 
5669 	/* Currently, we only support {PRE,POST}_MODIFY constructs
5670 	   where a base register is {inc,dec}remented by the contents
5671 	   of another register or by a constant value.  Thus, these
5672 	   operands must match.  */
5673 	gcc_assert (op0 == XEXP (op1, 0));
5674 
5675 	/* Require index register (or constant).  Let's just handle the
5676 	   register case in the meantime... If the target allows
5677 	   auto-modify by a constant then we could try replacing a pseudo
5678 	   register with its equivalent constant where applicable.
5679 
5680 	   We also handle the case where the register was eliminated
5681 	   resulting in a PLUS subexpression.
5682 
5683 	   If we later decide to reload the whole PRE_MODIFY or
5684 	   POST_MODIFY, inc_for_reload might clobber the reload register
5685 	   before reading the index.  The index register might therefore
5686 	   need to live longer than a TYPE reload normally would, so be
5687 	   conservative and class it as RELOAD_OTHER.  */
5688 	if ((REG_P (XEXP (op1, 1))
5689 	     && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5690 	    || GET_CODE (XEXP (op1, 1)) == PLUS)
5691 	  find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5692 				  &XEXP (op1, 1), opnum, RELOAD_OTHER,
5693 				  ind_levels, insn);
5694 
5695 	gcc_assert (REG_P (XEXP (op1, 0)));
5696 
5697 	regno = REGNO (XEXP (op1, 0));
5698 	index_code = GET_CODE (XEXP (op1, 1));
5699 
5700 	/* A register that is incremented cannot be constant!  */
5701 	gcc_assert (regno < FIRST_PSEUDO_REGISTER
5702 		    || reg_equiv_constant (regno) == 0);
5703 
5704 	/* Handle a register that is equivalent to a memory location
5705 	    which cannot be addressed directly.  */
5706 	if (reg_equiv_memory_loc (regno) != 0
5707 	    && (reg_equiv_address (regno) != 0
5708 		|| num_not_at_initial_offset))
5709 	  {
5710 	    rtx tem = make_memloc (XEXP (x, 0), regno);
5711 
5712 	    if (reg_equiv_address (regno)
5713 		|| ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5714 	      {
5715 		rtx orig = tem;
5716 
5717 		/* First reload the memory location's address.
5718 		    We can't use ADDR_TYPE (type) here, because we need to
5719 		    write back the value after reading it, hence we actually
5720 		    need two registers.  */
5721 		find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5722 				      &XEXP (tem, 0), opnum,
5723 				      RELOAD_OTHER,
5724 				      ind_levels, insn);
5725 
5726 		if (!rtx_equal_p (tem, orig))
5727 		  push_reg_equiv_alt_mem (regno, tem);
5728 
5729 		/* Then reload the memory location into a base
5730 		   register.  */
5731 		reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5732 					 &XEXP (op1, 0),
5733 					 base_reg_class (mode, as,
5734 							 code, index_code),
5735 					 GET_MODE (x), GET_MODE (x), 0,
5736 					 0, opnum, RELOAD_OTHER);
5737 
5738 		update_auto_inc_notes (this_insn, regno, reloadnum);
5739 		return 0;
5740 	      }
5741 	  }
5742 
5743 	if (reg_renumber[regno] >= 0)
5744 	  regno = reg_renumber[regno];
5745 
5746 	/* We require a base register here...  */
5747 	if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5748 	  {
5749 	    reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5750 				     &XEXP (op1, 0), &XEXP (x, 0),
5751 				     base_reg_class (mode, as,
5752 						     code, index_code),
5753 				     GET_MODE (x), GET_MODE (x), 0, 0,
5754 				     opnum, RELOAD_OTHER);
5755 
5756 	    update_auto_inc_notes (this_insn, regno, reloadnum);
5757 	    return 0;
5758 	  }
5759       }
5760       return 0;
5761 
5762     case POST_INC:
5763     case POST_DEC:
5764     case PRE_INC:
5765     case PRE_DEC:
5766       if (REG_P (XEXP (x, 0)))
5767 	{
5768 	  int regno = REGNO (XEXP (x, 0));
5769 	  int value = 0;
5770 	  rtx x_orig = x;
5771 
5772 	  /* A register that is incremented cannot be constant!  */
5773 	  gcc_assert (regno < FIRST_PSEUDO_REGISTER
5774 		      || reg_equiv_constant (regno) == 0);
5775 
5776 	  /* Handle a register that is equivalent to a memory location
5777 	     which cannot be addressed directly.  */
5778 	  if (reg_equiv_memory_loc (regno) != 0
5779 	      && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5780 	    {
5781 	      rtx tem = make_memloc (XEXP (x, 0), regno);
5782 	      if (reg_equiv_address (regno)
5783 		  || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5784 		{
5785 		  rtx orig = tem;
5786 
5787 		  /* First reload the memory location's address.
5788 		     We can't use ADDR_TYPE (type) here, because we need to
5789 		     write back the value after reading it, hence we actually
5790 		     need two registers.  */
5791 		  find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5792 					&XEXP (tem, 0), opnum, type,
5793 					ind_levels, insn);
5794 		  reloaded_inner_of_autoinc = true;
5795 		  if (!rtx_equal_p (tem, orig))
5796 		    push_reg_equiv_alt_mem (regno, tem);
5797 		  /* Put this inside a new increment-expression.  */
5798 		  x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5799 		  /* Proceed to reload that, as if it contained a register.  */
5800 		}
5801 	    }
5802 
5803 	  /* If we have a hard register that is ok in this incdec context,
5804 	     don't make a reload.  If the register isn't nice enough for
5805 	     autoincdec, we can reload it.  But, if an autoincrement of a
5806 	     register that we here verified as playing nice, still outside
5807 	     isn't "valid", it must be that no autoincrement is "valid".
5808 	     If that is true and something made an autoincrement anyway,
5809 	     this must be a special context where one is allowed.
5810 	     (For example, a "push" instruction.)
5811 	     We can't improve this address, so leave it alone.  */
5812 
5813 	  /* Otherwise, reload the autoincrement into a suitable hard reg
5814 	     and record how much to increment by.  */
5815 
5816 	  if (reg_renumber[regno] >= 0)
5817 	    regno = reg_renumber[regno];
5818 	  if (regno >= FIRST_PSEUDO_REGISTER
5819 	      || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5820 				      index_code))
5821 	    {
5822 	      int reloadnum;
5823 
5824 	      /* If we can output the register afterwards, do so, this
5825 		 saves the extra update.
5826 		 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5827 		 CALL_INSN - and it does not set CC0.
5828 		 But don't do this if we cannot directly address the
5829 		 memory location, since this will make it harder to
5830 		 reuse address reloads, and increases register pressure.
5831 		 Also don't do this if we can probably update x directly.  */
5832 	      rtx equiv = (MEM_P (XEXP (x, 0))
5833 			   ? XEXP (x, 0)
5834 			   : reg_equiv_mem (regno));
5835 	      enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5836 	      if (insn && NONJUMP_INSN_P (insn)
5837 #if HAVE_cc0
5838 		  && ! sets_cc0_p (PATTERN (insn))
5839 #endif
5840 		  && (regno < FIRST_PSEUDO_REGISTER
5841 		      || (equiv
5842 			  && memory_operand (equiv, GET_MODE (equiv))
5843 			  && ! (icode != CODE_FOR_nothing
5844 				&& insn_operand_matches (icode, 0, equiv)
5845 				&& insn_operand_matches (icode, 1, equiv))))
5846 		  /* Using RELOAD_OTHER means we emit this and the reload we
5847 		     made earlier in the wrong order.  */
5848 		  && !reloaded_inner_of_autoinc)
5849 		{
5850 		  /* We use the original pseudo for loc, so that
5851 		     emit_reload_insns() knows which pseudo this
5852 		     reload refers to and updates the pseudo rtx, not
5853 		     its equivalent memory location, as well as the
5854 		     corresponding entry in reg_last_reload_reg.  */
5855 		  loc = &XEXP (x_orig, 0);
5856 		  x = XEXP (x, 0);
5857 		  reloadnum
5858 		    = push_reload (x, x, loc, loc,
5859 				   context_reg_class,
5860 				   GET_MODE (x), GET_MODE (x), 0, 0,
5861 				   opnum, RELOAD_OTHER);
5862 		}
5863 	      else
5864 		{
5865 		  reloadnum
5866 		    = push_reload (x, x, loc, (rtx*) 0,
5867 				   context_reg_class,
5868 				   GET_MODE (x), GET_MODE (x), 0, 0,
5869 				   opnum, type);
5870 		  rld[reloadnum].inc
5871 		    = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5872 
5873 		  value = 1;
5874 		}
5875 
5876 	      update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5877 				     reloadnum);
5878 	    }
5879 	  return value;
5880 	}
5881       return 0;
5882 
5883     case TRUNCATE:
5884     case SIGN_EXTEND:
5885     case ZERO_EXTEND:
5886       /* Look for parts to reload in the inner expression and reload them
5887 	 too, in addition to this operation.  Reloading all inner parts in
5888 	 addition to this one shouldn't be necessary, but at this point,
5889 	 we don't know if we can possibly omit any part that *can* be
5890 	 reloaded.  Targets that are better off reloading just either part
5891 	 (or perhaps even a different part of an outer expression), should
5892 	 define LEGITIMIZE_RELOAD_ADDRESS.  */
5893       find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5894 			      context, code, SCRATCH, &XEXP (x, 0), opnum,
5895 			      type, ind_levels, insn);
5896       push_reload (x, NULL_RTX, loc, (rtx*) 0,
5897 		   context_reg_class,
5898 		   GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5899       return 1;
5900 
5901     case MEM:
5902       /* This is probably the result of a substitution, by eliminate_regs, of
5903 	 an equivalent address for a pseudo that was not allocated to a hard
5904 	 register.  Verify that the specified address is valid and reload it
5905 	 into a register.
5906 
5907 	 Since we know we are going to reload this item, don't decrement for
5908 	 the indirection level.
5909 
5910 	 Note that this is actually conservative:  it would be slightly more
5911 	 efficient to use the value of SPILL_INDIRECT_LEVELS from
5912 	 reload1.c here.  */
5913 
5914       find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5915 			    opnum, ADDR_TYPE (type), ind_levels, insn);
5916       push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5917 		   context_reg_class,
5918 		   GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5919       return 1;
5920 
5921     case REG:
5922       {
5923 	int regno = REGNO (x);
5924 
5925 	if (reg_equiv_constant (regno) != 0)
5926 	  {
5927 	    find_reloads_address_part (reg_equiv_constant (regno), loc,
5928 				       context_reg_class,
5929 				       GET_MODE (x), opnum, type, ind_levels);
5930 	    return 1;
5931 	  }
5932 
5933 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5934 	 that feeds this insn.  */
5935 	if (reg_equiv_mem (regno) != 0)
5936 	  {
5937 	    push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5938 			 context_reg_class,
5939 			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5940 	    return 1;
5941 	  }
5942 #endif
5943 
5944 	if (reg_equiv_memory_loc (regno)
5945 	    && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5946 	  {
5947 	    rtx tem = make_memloc (x, regno);
5948 	    if (reg_equiv_address (regno) != 0
5949 		|| ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5950 	      {
5951 		x = tem;
5952 		find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5953 				      &XEXP (x, 0), opnum, ADDR_TYPE (type),
5954 				      ind_levels, insn);
5955 		if (!rtx_equal_p (x, tem))
5956 		  push_reg_equiv_alt_mem (regno, x);
5957 	      }
5958 	  }
5959 
5960 	if (reg_renumber[regno] >= 0)
5961 	  regno = reg_renumber[regno];
5962 
5963 	if (regno >= FIRST_PSEUDO_REGISTER
5964 	    || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5965 				    index_code))
5966 	  {
5967 	    push_reload (x, NULL_RTX, loc, (rtx*) 0,
5968 			 context_reg_class,
5969 			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5970 	    return 1;
5971 	  }
5972 
5973 	/* If a register appearing in an address is the subject of a CLOBBER
5974 	   in this insn, reload it into some other register to be safe.
5975 	   The CLOBBER is supposed to make the register unavailable
5976 	   from before this insn to after it.  */
5977 	if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5978 	  {
5979 	    push_reload (x, NULL_RTX, loc, (rtx*) 0,
5980 			 context_reg_class,
5981 			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5982 	    return 1;
5983 	  }
5984       }
5985       return 0;
5986 
5987     case SUBREG:
5988       if (REG_P (SUBREG_REG (x)))
5989 	{
5990 	  /* If this is a SUBREG of a hard register and the resulting register
5991 	     is of the wrong class, reload the whole SUBREG.  This avoids
5992 	     needless copies if SUBREG_REG is multi-word.  */
5993 	  if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5994 	    {
5995 	      int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5996 
5997 	      if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5998 				       index_code))
5999 		{
6000 		  push_reload (x, NULL_RTX, loc, (rtx*) 0,
6001 			       context_reg_class,
6002 			       GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6003 		  return 1;
6004 		}
6005 	    }
6006 	  /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6007 	     is larger than the class size, then reload the whole SUBREG.  */
6008 	  else
6009 	    {
6010 	      enum reg_class rclass = context_reg_class;
6011 	      if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6012 		  > reg_class_size[(int) rclass])
6013 		{
6014 		  /* If the inner register will be replaced by a memory
6015 		     reference, we can do this only if we can replace the
6016 		     whole subreg by a (narrower) memory reference.  If
6017 		     this is not possible, fall through and reload just
6018 		     the inner register (including address reloads).  */
6019 		  if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6020 		    {
6021 		      rtx tem = find_reloads_subreg_address (x, opnum,
6022 							     ADDR_TYPE (type),
6023 							     ind_levels, insn,
6024 							     NULL);
6025 		      if (tem)
6026 			{
6027 			  push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6028 				       GET_MODE (tem), VOIDmode, 0, 0,
6029 				       opnum, type);
6030 			  return 1;
6031 			}
6032 		    }
6033 		  else
6034 		    {
6035 		      push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6036 				   GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6037 		      return 1;
6038 		    }
6039 		}
6040 	    }
6041 	}
6042       break;
6043 
6044     default:
6045       break;
6046     }
6047 
6048   {
6049     const char *fmt = GET_RTX_FORMAT (code);
6050     int i;
6051 
6052     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6053       {
6054 	if (fmt[i] == 'e')
6055 	  /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6056 	     we get here.  */
6057 	  find_reloads_address_1 (mode, as, XEXP (x, i), context,
6058 				  code, SCRATCH, &XEXP (x, i),
6059 				  opnum, type, ind_levels, insn);
6060       }
6061   }
6062 
6063 #undef REG_OK_FOR_CONTEXT
6064   return 0;
6065 }
6066 
6067 /* X, which is found at *LOC, is a part of an address that needs to be
6068    reloaded into a register of class RCLASS.  If X is a constant, or if
6069    X is a PLUS that contains a constant, check that the constant is a
6070    legitimate operand and that we are supposed to be able to load
6071    it into the register.
6072 
6073    If not, force the constant into memory and reload the MEM instead.
6074 
6075    MODE is the mode to use, in case X is an integer constant.
6076 
6077    OPNUM and TYPE describe the purpose of any reloads made.
6078 
6079    IND_LEVELS says how many levels of indirect addressing this machine
6080    supports.  */
6081 
6082 static void
6083 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6084 			   machine_mode mode, int opnum,
6085 			   enum reload_type type, int ind_levels)
6086 {
6087   if (CONSTANT_P (x)
6088       && (!targetm.legitimate_constant_p (mode, x)
6089 	  || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6090     {
6091       x = force_const_mem (mode, x);
6092       find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6093 			    opnum, type, ind_levels, 0);
6094     }
6095 
6096   else if (GET_CODE (x) == PLUS
6097 	   && CONSTANT_P (XEXP (x, 1))
6098 	   && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6099 	       || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6100 		   == NO_REGS))
6101     {
6102       rtx tem;
6103 
6104       tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6105       x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6106       find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6107 			    opnum, type, ind_levels, 0);
6108     }
6109 
6110   push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6111 	       mode, VOIDmode, 0, 0, opnum, type);
6112 }
6113 
6114 /* X, a subreg of a pseudo, is a part of an address that needs to be
6115    reloaded, and the pseusdo is equivalent to a memory location.
6116 
6117    Attempt to replace the whole subreg by a (possibly narrower or wider)
6118    memory reference.  If this is possible, return this new memory
6119    reference, and push all required address reloads.  Otherwise,
6120    return NULL.
6121 
6122    OPNUM and TYPE identify the purpose of the reload.
6123 
6124    IND_LEVELS says how many levels of indirect addressing are
6125    supported at this point in the address.
6126 
6127    INSN, if nonzero, is the insn in which we do the reload.  It is used
6128    to determine where to put USEs for pseudos that we have to replace with
6129    stack slots.  */
6130 
6131 static rtx
6132 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6133 			     int ind_levels, rtx_insn *insn,
6134 			     int *address_reloaded)
6135 {
6136   machine_mode outer_mode = GET_MODE (x);
6137   machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6138   int regno = REGNO (SUBREG_REG (x));
6139   int reloaded = 0;
6140   rtx tem, orig;
6141   poly_int64 offset;
6142 
6143   gcc_assert (reg_equiv_memory_loc (regno) != 0);
6144 
6145   /* We cannot replace the subreg with a modified memory reference if:
6146 
6147      - we have a paradoxical subreg that implicitly acts as a zero or
6148        sign extension operation due to LOAD_EXTEND_OP;
6149 
6150      - we have a subreg that is implicitly supposed to act on the full
6151        register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6152 
6153      - the address of the equivalent memory location is mode-dependent;  or
6154 
6155      - we have a paradoxical subreg and the resulting memory is not
6156        sufficiently aligned to allow access in the wider mode.
6157 
6158     In addition, we choose not to perform the replacement for *any*
6159     paradoxical subreg, even if it were possible in principle.  This
6160     is to avoid generating wider memory references than necessary.
6161 
6162     This corresponds to how previous versions of reload used to handle
6163     paradoxical subregs where no address reload was required.  */
6164 
6165   if (paradoxical_subreg_p (x))
6166     return NULL;
6167 
6168   if (WORD_REGISTER_OPERATIONS
6169       && partial_subreg_p (outer_mode, inner_mode)
6170       && known_equal_after_align_down (GET_MODE_SIZE (outer_mode) - 1,
6171 				       GET_MODE_SIZE (inner_mode) - 1,
6172 				       UNITS_PER_WORD))
6173     return NULL;
6174 
6175   /* Since we don't attempt to handle paradoxical subregs, we can just
6176      call into simplify_subreg, which will handle all remaining checks
6177      for us.  */
6178   orig = make_memloc (SUBREG_REG (x), regno);
6179   offset = SUBREG_BYTE (x);
6180   tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6181   if (!tem || !MEM_P (tem))
6182     return NULL;
6183 
6184   /* Now push all required address reloads, if any.  */
6185   reloaded = find_reloads_address (GET_MODE (tem), &tem,
6186 				   XEXP (tem, 0), &XEXP (tem, 0),
6187 				   opnum, type, ind_levels, insn);
6188   /* ??? Do we need to handle nonzero offsets somehow?  */
6189   if (known_eq (offset, 0) && !rtx_equal_p (tem, orig))
6190     push_reg_equiv_alt_mem (regno, tem);
6191 
6192   /* For some processors an address may be valid in the original mode but
6193      not in a smaller mode.  For example, ARM accepts a scaled index register
6194      in SImode but not in HImode.  Note that this is only a problem if the
6195      address in reg_equiv_mem is already invalid in the new mode; other
6196      cases would be fixed by find_reloads_address as usual.
6197 
6198      ??? We attempt to handle such cases here by doing an additional reload
6199      of the full address after the usual processing by find_reloads_address.
6200      Note that this may not work in the general case, but it seems to cover
6201      the cases where this situation currently occurs.  A more general fix
6202      might be to reload the *value* instead of the address, but this would
6203      not be expected by the callers of this routine as-is.
6204 
6205      If find_reloads_address already completed replaced the address, there
6206      is nothing further to do.  */
6207   if (reloaded == 0
6208       && reg_equiv_mem (regno) != 0
6209       && !strict_memory_address_addr_space_p
6210 		(GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6211 		 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6212     {
6213       push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6214 		   base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6215 				   MEM, SCRATCH),
6216 		   GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6217       reloaded = 1;
6218     }
6219 
6220   /* If this is not a toplevel operand, find_reloads doesn't see this
6221      substitution.  We have to emit a USE of the pseudo so that
6222      delete_output_reload can see it.  */
6223   if (replace_reloads && recog_data.operand[opnum] != x)
6224     /* We mark the USE with QImode so that we recognize it as one that
6225        can be safely deleted at the end of reload.  */
6226     PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6227 	      QImode);
6228 
6229   if (address_reloaded)
6230     *address_reloaded = reloaded;
6231 
6232   return tem;
6233 }
6234 
6235 /* Substitute into the current INSN the registers into which we have reloaded
6236    the things that need reloading.  The array `replacements'
6237    contains the locations of all pointers that must be changed
6238    and says what to replace them with.
6239 
6240    Return the rtx that X translates into; usually X, but modified.  */
6241 
6242 void
6243 subst_reloads (rtx_insn *insn)
6244 {
6245   int i;
6246 
6247   for (i = 0; i < n_replacements; i++)
6248     {
6249       struct replacement *r = &replacements[i];
6250       rtx reloadreg = rld[r->what].reg_rtx;
6251       if (reloadreg)
6252 	{
6253 #ifdef DEBUG_RELOAD
6254 	  /* This checking takes a very long time on some platforms
6255 	     causing the gcc.c-torture/compile/limits-fnargs.c test
6256 	     to time out during testing.  See PR 31850.
6257 
6258 	     Internal consistency test.  Check that we don't modify
6259 	     anything in the equivalence arrays.  Whenever something from
6260 	     those arrays needs to be reloaded, it must be unshared before
6261 	     being substituted into; the equivalence must not be modified.
6262 	     Otherwise, if the equivalence is used after that, it will
6263 	     have been modified, and the thing substituted (probably a
6264 	     register) is likely overwritten and not a usable equivalence.  */
6265 	  int check_regno;
6266 
6267 	  for (check_regno = 0; check_regno < max_regno; check_regno++)
6268 	    {
6269 #define CHECK_MODF(ARRAY)						\
6270 	      gcc_assert (!(*reg_equivs)[check_regno].ARRAY		\
6271 			  || !loc_mentioned_in_p (r->where,		\
6272 						  (*reg_equivs)[check_regno].ARRAY))
6273 
6274 	      CHECK_MODF (constant);
6275 	      CHECK_MODF (memory_loc);
6276 	      CHECK_MODF (address);
6277 	      CHECK_MODF (mem);
6278 #undef CHECK_MODF
6279 	    }
6280 #endif /* DEBUG_RELOAD */
6281 
6282 	  /* If we're replacing a LABEL_REF with a register, there must
6283 	     already be an indication (to e.g. flow) which label this
6284 	     register refers to.  */
6285 	  gcc_assert (GET_CODE (*r->where) != LABEL_REF
6286 		      || !JUMP_P (insn)
6287 		      || find_reg_note (insn,
6288 					REG_LABEL_OPERAND,
6289 					XEXP (*r->where, 0))
6290 		      || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6291 
6292 	  /* Encapsulate RELOADREG so its machine mode matches what
6293 	     used to be there.  Note that gen_lowpart_common will
6294 	     do the wrong thing if RELOADREG is multi-word.  RELOADREG
6295 	     will always be a REG here.  */
6296 	  if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6297 	    reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6298 
6299 	  *r->where = reloadreg;
6300 	}
6301       /* If reload got no reg and isn't optional, something's wrong.  */
6302       else
6303 	gcc_assert (rld[r->what].optional);
6304     }
6305 }
6306 
6307 /* Make a copy of any replacements being done into X and move those
6308    copies to locations in Y, a copy of X.  */
6309 
6310 void
6311 copy_replacements (rtx x, rtx y)
6312 {
6313   copy_replacements_1 (&x, &y, n_replacements);
6314 }
6315 
6316 static void
6317 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6318 {
6319   int i, j;
6320   rtx x, y;
6321   struct replacement *r;
6322   enum rtx_code code;
6323   const char *fmt;
6324 
6325   for (j = 0; j < orig_replacements; j++)
6326     if (replacements[j].where == px)
6327       {
6328 	r = &replacements[n_replacements++];
6329 	r->where = py;
6330 	r->what = replacements[j].what;
6331 	r->mode = replacements[j].mode;
6332       }
6333 
6334   x = *px;
6335   y = *py;
6336   code = GET_CODE (x);
6337   fmt = GET_RTX_FORMAT (code);
6338 
6339   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6340     {
6341       if (fmt[i] == 'e')
6342 	copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6343       else if (fmt[i] == 'E')
6344 	for (j = XVECLEN (x, i); --j >= 0; )
6345 	  copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6346 			       orig_replacements);
6347     }
6348 }
6349 
6350 /* Change any replacements being done to *X to be done to *Y.  */
6351 
6352 void
6353 move_replacements (rtx *x, rtx *y)
6354 {
6355   int i;
6356 
6357   for (i = 0; i < n_replacements; i++)
6358     if (replacements[i].where == x)
6359       replacements[i].where = y;
6360 }
6361 
6362 /* If LOC was scheduled to be replaced by something, return the replacement.
6363    Otherwise, return *LOC.  */
6364 
6365 rtx
6366 find_replacement (rtx *loc)
6367 {
6368   struct replacement *r;
6369 
6370   for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6371     {
6372       rtx reloadreg = rld[r->what].reg_rtx;
6373 
6374       if (reloadreg && r->where == loc)
6375 	{
6376 	  if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6377 	    reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6378 
6379 	  return reloadreg;
6380 	}
6381       else if (reloadreg && GET_CODE (*loc) == SUBREG
6382 	       && r->where == &SUBREG_REG (*loc))
6383 	{
6384 	  if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6385 	    reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6386 
6387 	  return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6388 				      GET_MODE (SUBREG_REG (*loc)),
6389 				      SUBREG_BYTE (*loc));
6390 	}
6391     }
6392 
6393   /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6394      what's inside and make a new rtl if so.  */
6395   if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6396       || GET_CODE (*loc) == MULT)
6397     {
6398       rtx x = find_replacement (&XEXP (*loc, 0));
6399       rtx y = find_replacement (&XEXP (*loc, 1));
6400 
6401       if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6402 	return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6403     }
6404 
6405   return *loc;
6406 }
6407 
6408 /* Return nonzero if register in range [REGNO, ENDREGNO)
6409    appears either explicitly or implicitly in X
6410    other than being stored into (except for earlyclobber operands).
6411 
6412    References contained within the substructure at LOC do not count.
6413    LOC may be zero, meaning don't ignore anything.
6414 
6415    This is similar to refers_to_regno_p in rtlanal.c except that we
6416    look at equivalences for pseudos that didn't get hard registers.  */
6417 
6418 static int
6419 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6420 			      rtx x, rtx *loc)
6421 {
6422   int i;
6423   unsigned int r;
6424   RTX_CODE code;
6425   const char *fmt;
6426 
6427   if (x == 0)
6428     return 0;
6429 
6430  repeat:
6431   code = GET_CODE (x);
6432 
6433   switch (code)
6434     {
6435     case REG:
6436       r = REGNO (x);
6437 
6438       /* If this is a pseudo, a hard register must not have been allocated.
6439 	 X must therefore either be a constant or be in memory.  */
6440       if (r >= FIRST_PSEUDO_REGISTER)
6441 	{
6442 	  if (reg_equiv_memory_loc (r))
6443 	    return refers_to_regno_for_reload_p (regno, endregno,
6444 						 reg_equiv_memory_loc (r),
6445 						 (rtx*) 0);
6446 
6447 	  gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6448 	  return 0;
6449 	}
6450 
6451       return endregno > r && regno < END_REGNO (x);
6452 
6453     case SUBREG:
6454       /* If this is a SUBREG of a hard reg, we can see exactly which
6455 	 registers are being modified.  Otherwise, handle normally.  */
6456       if (REG_P (SUBREG_REG (x))
6457 	  && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6458 	{
6459 	  unsigned int inner_regno = subreg_regno (x);
6460 	  unsigned int inner_endregno
6461 	    = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6462 			     ? subreg_nregs (x) : 1);
6463 
6464 	  return endregno > inner_regno && regno < inner_endregno;
6465 	}
6466       break;
6467 
6468     case CLOBBER:
6469     case SET:
6470       if (&SET_DEST (x) != loc
6471 	  /* Note setting a SUBREG counts as referring to the REG it is in for
6472 	     a pseudo but not for hard registers since we can
6473 	     treat each word individually.  */
6474 	  && ((GET_CODE (SET_DEST (x)) == SUBREG
6475 	       && loc != &SUBREG_REG (SET_DEST (x))
6476 	       && REG_P (SUBREG_REG (SET_DEST (x)))
6477 	       && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6478 	       && refers_to_regno_for_reload_p (regno, endregno,
6479 						SUBREG_REG (SET_DEST (x)),
6480 						loc))
6481 	      /* If the output is an earlyclobber operand, this is
6482 		 a conflict.  */
6483 	      || ((!REG_P (SET_DEST (x))
6484 		   || earlyclobber_operand_p (SET_DEST (x)))
6485 		  && refers_to_regno_for_reload_p (regno, endregno,
6486 						   SET_DEST (x), loc))))
6487 	return 1;
6488 
6489       if (code == CLOBBER || loc == &SET_SRC (x))
6490 	return 0;
6491       x = SET_SRC (x);
6492       goto repeat;
6493 
6494     default:
6495       break;
6496     }
6497 
6498   /* X does not match, so try its subexpressions.  */
6499 
6500   fmt = GET_RTX_FORMAT (code);
6501   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6502     {
6503       if (fmt[i] == 'e' && loc != &XEXP (x, i))
6504 	{
6505 	  if (i == 0)
6506 	    {
6507 	      x = XEXP (x, 0);
6508 	      goto repeat;
6509 	    }
6510 	  else
6511 	    if (refers_to_regno_for_reload_p (regno, endregno,
6512 					      XEXP (x, i), loc))
6513 	      return 1;
6514 	}
6515       else if (fmt[i] == 'E')
6516 	{
6517 	  int j;
6518 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6519 	    if (loc != &XVECEXP (x, i, j)
6520 		&& refers_to_regno_for_reload_p (regno, endregno,
6521 						 XVECEXP (x, i, j), loc))
6522 	      return 1;
6523 	}
6524     }
6525   return 0;
6526 }
6527 
6528 /* Nonzero if modifying X will affect IN.  If X is a register or a SUBREG,
6529    we check if any register number in X conflicts with the relevant register
6530    numbers.  If X is a constant, return 0.  If X is a MEM, return 1 iff IN
6531    contains a MEM (we don't bother checking for memory addresses that can't
6532    conflict because we expect this to be a rare case.
6533 
6534    This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6535    that we look at equivalences for pseudos that didn't get hard registers.  */
6536 
6537 int
6538 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6539 {
6540   int regno, endregno;
6541 
6542   /* Overly conservative.  */
6543   if (GET_CODE (x) == STRICT_LOW_PART
6544       || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6545     x = XEXP (x, 0);
6546 
6547   /* If either argument is a constant, then modifying X can not affect IN.  */
6548   if (CONSTANT_P (x) || CONSTANT_P (in))
6549     return 0;
6550   else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6551     return refers_to_mem_for_reload_p (in);
6552   else if (GET_CODE (x) == SUBREG)
6553     {
6554       regno = REGNO (SUBREG_REG (x));
6555       if (regno < FIRST_PSEUDO_REGISTER)
6556 	regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6557 				      GET_MODE (SUBREG_REG (x)),
6558 				      SUBREG_BYTE (x),
6559 				      GET_MODE (x));
6560       endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6561 			  ? subreg_nregs (x) : 1);
6562 
6563       return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6564     }
6565   else if (REG_P (x))
6566     {
6567       regno = REGNO (x);
6568 
6569       /* If this is a pseudo, it must not have been assigned a hard register.
6570 	 Therefore, it must either be in memory or be a constant.  */
6571 
6572       if (regno >= FIRST_PSEUDO_REGISTER)
6573 	{
6574 	  if (reg_equiv_memory_loc (regno))
6575 	    return refers_to_mem_for_reload_p (in);
6576 	  gcc_assert (reg_equiv_constant (regno));
6577 	  return 0;
6578 	}
6579 
6580       endregno = END_REGNO (x);
6581 
6582       return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6583     }
6584   else if (MEM_P (x))
6585     return refers_to_mem_for_reload_p (in);
6586   else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6587 	   || GET_CODE (x) == CC0)
6588     return reg_mentioned_p (x, in);
6589   else
6590     {
6591       gcc_assert (GET_CODE (x) == PLUS);
6592 
6593       /* We actually want to know if X is mentioned somewhere inside IN.
6594 	 We must not say that (plus (sp) (const_int 124)) is in
6595 	 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6596 	 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6597 	 into a RELOAD_OTHER on behalf of another RELOAD_OTHER.  */
6598       while (MEM_P (in))
6599 	in = XEXP (in, 0);
6600       if (REG_P (in))
6601 	return 0;
6602       else if (GET_CODE (in) == PLUS)
6603 	return (rtx_equal_p (x, in)
6604 		|| reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6605 		|| reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6606       else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6607 		   || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6608     }
6609 
6610   gcc_unreachable ();
6611 }
6612 
6613 /* Return nonzero if anything in X contains a MEM.  Look also for pseudo
6614    registers.  */
6615 
6616 static int
6617 refers_to_mem_for_reload_p (rtx x)
6618 {
6619   const char *fmt;
6620   int i;
6621 
6622   if (MEM_P (x))
6623     return 1;
6624 
6625   if (REG_P (x))
6626     return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6627 	    && reg_equiv_memory_loc (REGNO (x)));
6628 
6629   fmt = GET_RTX_FORMAT (GET_CODE (x));
6630   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6631     if (fmt[i] == 'e'
6632 	&& (MEM_P (XEXP (x, i))
6633 	    || refers_to_mem_for_reload_p (XEXP (x, i))))
6634       return 1;
6635 
6636   return 0;
6637 }
6638 
6639 /* Check the insns before INSN to see if there is a suitable register
6640    containing the same value as GOAL.
6641    If OTHER is -1, look for a register in class RCLASS.
6642    Otherwise, just see if register number OTHER shares GOAL's value.
6643 
6644    Return an rtx for the register found, or zero if none is found.
6645 
6646    If RELOAD_REG_P is (short *)1,
6647    we reject any hard reg that appears in reload_reg_rtx
6648    because such a hard reg is also needed coming into this insn.
6649 
6650    If RELOAD_REG_P is any other nonzero value,
6651    it is a vector indexed by hard reg number
6652    and we reject any hard reg whose element in the vector is nonnegative
6653    as well as any that appears in reload_reg_rtx.
6654 
6655    If GOAL is zero, then GOALREG is a register number; we look
6656    for an equivalent for that register.
6657 
6658    MODE is the machine mode of the value we want an equivalence for.
6659    If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6660 
6661    This function is used by jump.c as well as in the reload pass.
6662 
6663    If GOAL is the sum of the stack pointer and a constant, we treat it
6664    as if it were a constant except that sp is required to be unchanging.  */
6665 
6666 rtx
6667 find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
6668 		short *reload_reg_p, int goalreg, machine_mode mode)
6669 {
6670   rtx_insn *p = insn;
6671   rtx goaltry, valtry, value;
6672   rtx_insn *where;
6673   rtx pat;
6674   int regno = -1;
6675   int valueno;
6676   int goal_mem = 0;
6677   int goal_const = 0;
6678   int goal_mem_addr_varies = 0;
6679   int need_stable_sp = 0;
6680   int nregs;
6681   int valuenregs;
6682   int num = 0;
6683 
6684   if (goal == 0)
6685     regno = goalreg;
6686   else if (REG_P (goal))
6687     regno = REGNO (goal);
6688   else if (MEM_P (goal))
6689     {
6690       enum rtx_code code = GET_CODE (XEXP (goal, 0));
6691       if (MEM_VOLATILE_P (goal))
6692 	return 0;
6693       if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6694 	return 0;
6695       /* An address with side effects must be reexecuted.  */
6696       switch (code)
6697 	{
6698 	case POST_INC:
6699 	case PRE_INC:
6700 	case POST_DEC:
6701 	case PRE_DEC:
6702 	case POST_MODIFY:
6703 	case PRE_MODIFY:
6704 	  return 0;
6705 	default:
6706 	  break;
6707 	}
6708       goal_mem = 1;
6709     }
6710   else if (CONSTANT_P (goal))
6711     goal_const = 1;
6712   else if (GET_CODE (goal) == PLUS
6713 	   && XEXP (goal, 0) == stack_pointer_rtx
6714 	   && CONSTANT_P (XEXP (goal, 1)))
6715     goal_const = need_stable_sp = 1;
6716   else if (GET_CODE (goal) == PLUS
6717 	   && XEXP (goal, 0) == frame_pointer_rtx
6718 	   && CONSTANT_P (XEXP (goal, 1)))
6719     goal_const = 1;
6720   else
6721     return 0;
6722 
6723   num = 0;
6724   /* Scan insns back from INSN, looking for one that copies
6725      a value into or out of GOAL.
6726      Stop and give up if we reach a label.  */
6727 
6728   while (1)
6729     {
6730       p = PREV_INSN (p);
6731       if (p && DEBUG_INSN_P (p))
6732 	continue;
6733       num++;
6734       if (p == 0 || LABEL_P (p)
6735 	  || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6736 	return 0;
6737 
6738       /* Don't reuse register contents from before a setjmp-type
6739 	 function call; on the second return (from the longjmp) it
6740 	 might have been clobbered by a later reuse.  It doesn't
6741 	 seem worthwhile to actually go and see if it is actually
6742 	 reused even if that information would be readily available;
6743 	 just don't reuse it across the setjmp call.  */
6744       if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6745 	return 0;
6746 
6747       if (NONJUMP_INSN_P (p)
6748 	  /* If we don't want spill regs ...  */
6749 	  && (! (reload_reg_p != 0
6750 		 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6751 	      /* ... then ignore insns introduced by reload; they aren't
6752 		 useful and can cause results in reload_as_needed to be
6753 		 different from what they were when calculating the need for
6754 		 spills.  If we notice an input-reload insn here, we will
6755 		 reject it below, but it might hide a usable equivalent.
6756 		 That makes bad code.  It may even fail: perhaps no reg was
6757 		 spilled for this insn because it was assumed we would find
6758 		 that equivalent.  */
6759 	      || INSN_UID (p) < reload_first_uid))
6760 	{
6761 	  rtx tem;
6762 	  pat = single_set (p);
6763 
6764 	  /* First check for something that sets some reg equal to GOAL.  */
6765 	  if (pat != 0
6766 	      && ((regno >= 0
6767 		   && true_regnum (SET_SRC (pat)) == regno
6768 		   && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6769 		  ||
6770 		  (regno >= 0
6771 		   && true_regnum (SET_DEST (pat)) == regno
6772 		   && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6773 		  ||
6774 		  (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6775 		   /* When looking for stack pointer + const,
6776 		      make sure we don't use a stack adjust.  */
6777 		   && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6778 		   && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6779 		  || (goal_mem
6780 		      && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6781 		      && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6782 		  || (goal_mem
6783 		      && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6784 		      && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6785 		  /* If we are looking for a constant,
6786 		     and something equivalent to that constant was copied
6787 		     into a reg, we can use that reg.  */
6788 		  || (goal_const && REG_NOTES (p) != 0
6789 		      && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6790 		      && ((rtx_equal_p (XEXP (tem, 0), goal)
6791 			   && (valueno
6792 			       = true_regnum (valtry = SET_DEST (pat))) >= 0)
6793 			  || (REG_P (SET_DEST (pat))
6794 			      && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6795 			      && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6796 			      && CONST_INT_P (goal)
6797 			      && (goaltry = operand_subword (XEXP (tem, 0), 0,
6798 							     0, VOIDmode)) != 0
6799 			      && rtx_equal_p (goal, goaltry)
6800 			      && (valtry
6801 				  = operand_subword (SET_DEST (pat), 0, 0,
6802 						     VOIDmode))
6803 			      && (valueno = true_regnum (valtry)) >= 0)))
6804 		  || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6805 							  NULL_RTX))
6806 		      && REG_P (SET_DEST (pat))
6807 		      && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6808 		      && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6809 		      && CONST_INT_P (goal)
6810 		      && (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6811 						     VOIDmode)) != 0
6812 		      && rtx_equal_p (goal, goaltry)
6813 		      && (valtry
6814 			  = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6815 		      && (valueno = true_regnum (valtry)) >= 0)))
6816 	    {
6817 	      if (other >= 0)
6818 		{
6819 		  if (valueno != other)
6820 		    continue;
6821 		}
6822 	      else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6823 		continue;
6824 	      else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6825 					  mode, valueno))
6826 		continue;
6827 	      value = valtry;
6828 	      where = p;
6829 	      break;
6830 	    }
6831 	}
6832     }
6833 
6834   /* We found a previous insn copying GOAL into a suitable other reg VALUE
6835      (or copying VALUE into GOAL, if GOAL is also a register).
6836      Now verify that VALUE is really valid.  */
6837 
6838   /* VALUENO is the register number of VALUE; a hard register.  */
6839 
6840   /* Don't try to re-use something that is killed in this insn.  We want
6841      to be able to trust REG_UNUSED notes.  */
6842   if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6843     return 0;
6844 
6845   /* If we propose to get the value from the stack pointer or if GOAL is
6846      a MEM based on the stack pointer, we need a stable SP.  */
6847   if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6848       || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6849 							  goal)))
6850     need_stable_sp = 1;
6851 
6852   /* Reject VALUE if the copy-insn moved the wrong sort of datum.  */
6853   if (GET_MODE (value) != mode)
6854     return 0;
6855 
6856   /* Reject VALUE if it was loaded from GOAL
6857      and is also a register that appears in the address of GOAL.  */
6858 
6859   if (goal_mem && value == SET_DEST (single_set (where))
6860       && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6861 				       goal, (rtx*) 0))
6862     return 0;
6863 
6864   /* Reject registers that overlap GOAL.  */
6865 
6866   if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6867     nregs = hard_regno_nregs (regno, mode);
6868   else
6869     nregs = 1;
6870   valuenregs = hard_regno_nregs (valueno, mode);
6871 
6872   if (!goal_mem && !goal_const
6873       && regno + nregs > valueno && regno < valueno + valuenregs)
6874     return 0;
6875 
6876   /* Reject VALUE if it is one of the regs reserved for reloads.
6877      Reload1 knows how to reuse them anyway, and it would get
6878      confused if we allocated one without its knowledge.
6879      (Now that insns introduced by reload are ignored above,
6880      this case shouldn't happen, but I'm not positive.)  */
6881 
6882   if (reload_reg_p != 0 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6883     {
6884       int i;
6885       for (i = 0; i < valuenregs; ++i)
6886 	if (reload_reg_p[valueno + i] >= 0)
6887 	  return 0;
6888     }
6889 
6890   /* Reject VALUE if it is a register being used for an input reload
6891      even if it is not one of those reserved.  */
6892 
6893   if (reload_reg_p != 0)
6894     {
6895       int i;
6896       for (i = 0; i < n_reloads; i++)
6897 	if (rld[i].reg_rtx != 0
6898 	    && rld[i].in
6899 	    && (int) REGNO (rld[i].reg_rtx) < valueno + valuenregs
6900 	    && (int) END_REGNO (rld[i].reg_rtx) > valueno)
6901 	  return 0;
6902     }
6903 
6904   if (goal_mem)
6905     /* We must treat frame pointer as varying here,
6906        since it can vary--in a nonlocal goto as generated by expand_goto.  */
6907     goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6908 
6909   /* Now verify that the values of GOAL and VALUE remain unaltered
6910      until INSN is reached.  */
6911 
6912   p = insn;
6913   while (1)
6914     {
6915       p = PREV_INSN (p);
6916       if (p == where)
6917 	return value;
6918 
6919       /* Don't trust the conversion past a function call
6920 	 if either of the two is in a call-clobbered register, or memory.  */
6921       if (CALL_P (p))
6922 	{
6923 	  int i;
6924 
6925 	  if (goal_mem || need_stable_sp)
6926 	    return 0;
6927 
6928 	  if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6929 	    for (i = 0; i < nregs; ++i)
6930 	      if (call_used_regs[regno + i]
6931 		  || targetm.hard_regno_call_part_clobbered (regno + i, mode))
6932 		return 0;
6933 
6934 	  if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6935 	    for (i = 0; i < valuenregs; ++i)
6936 	      if (call_used_regs[valueno + i]
6937 		  || targetm.hard_regno_call_part_clobbered (valueno + i,
6938 							     mode))
6939 		return 0;
6940 	}
6941 
6942       if (INSN_P (p))
6943 	{
6944 	  pat = PATTERN (p);
6945 
6946 	  /* Watch out for unspec_volatile, and volatile asms.  */
6947 	  if (volatile_insn_p (pat))
6948 	    return 0;
6949 
6950 	  /* If this insn P stores in either GOAL or VALUE, return 0.
6951 	     If GOAL is a memory ref and this insn writes memory, return 0.
6952 	     If GOAL is a memory ref and its address is not constant,
6953 	     and this insn P changes a register used in GOAL, return 0.  */
6954 
6955 	  if (GET_CODE (pat) == COND_EXEC)
6956 	    pat = COND_EXEC_CODE (pat);
6957 	  if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6958 	    {
6959 	      rtx dest = SET_DEST (pat);
6960 	      while (GET_CODE (dest) == SUBREG
6961 		     || GET_CODE (dest) == ZERO_EXTRACT
6962 		     || GET_CODE (dest) == STRICT_LOW_PART)
6963 		dest = XEXP (dest, 0);
6964 	      if (REG_P (dest))
6965 		{
6966 		  int xregno = REGNO (dest);
6967 		  int end_xregno = END_REGNO (dest);
6968 		  if (xregno < regno + nregs && end_xregno > regno)
6969 		    return 0;
6970 		  if (xregno < valueno + valuenregs
6971 		      && end_xregno > valueno)
6972 		    return 0;
6973 		  if (goal_mem_addr_varies
6974 		      && reg_overlap_mentioned_for_reload_p (dest, goal))
6975 		    return 0;
6976 		  if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6977 		    return 0;
6978 		}
6979 	      else if (goal_mem && MEM_P (dest)
6980 		       && ! push_operand (dest, GET_MODE (dest)))
6981 		return 0;
6982 	      else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6983 		       && reg_equiv_memory_loc (regno) != 0)
6984 		return 0;
6985 	      else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
6986 		return 0;
6987 	    }
6988 	  else if (GET_CODE (pat) == PARALLEL)
6989 	    {
6990 	      int i;
6991 	      for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
6992 		{
6993 		  rtx v1 = XVECEXP (pat, 0, i);
6994 		  if (GET_CODE (v1) == COND_EXEC)
6995 		    v1 = COND_EXEC_CODE (v1);
6996 		  if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
6997 		    {
6998 		      rtx dest = SET_DEST (v1);
6999 		      while (GET_CODE (dest) == SUBREG
7000 			     || GET_CODE (dest) == ZERO_EXTRACT
7001 			     || GET_CODE (dest) == STRICT_LOW_PART)
7002 			dest = XEXP (dest, 0);
7003 		      if (REG_P (dest))
7004 			{
7005 			  int xregno = REGNO (dest);
7006 			  int end_xregno = END_REGNO (dest);
7007 			  if (xregno < regno + nregs
7008 			      && end_xregno > regno)
7009 			    return 0;
7010 			  if (xregno < valueno + valuenregs
7011 			      && end_xregno > valueno)
7012 			    return 0;
7013 			  if (goal_mem_addr_varies
7014 			      && reg_overlap_mentioned_for_reload_p (dest,
7015 								     goal))
7016 			    return 0;
7017 			  if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7018 			    return 0;
7019 			}
7020 		      else if (goal_mem && MEM_P (dest)
7021 			       && ! push_operand (dest, GET_MODE (dest)))
7022 			return 0;
7023 		      else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7024 			       && reg_equiv_memory_loc (regno) != 0)
7025 			return 0;
7026 		      else if (need_stable_sp
7027 			       && push_operand (dest, GET_MODE (dest)))
7028 			return 0;
7029 		    }
7030 		}
7031 	    }
7032 
7033 	  if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7034 	    {
7035 	      rtx link;
7036 
7037 	      for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7038 		   link = XEXP (link, 1))
7039 		{
7040 		  pat = XEXP (link, 0);
7041 		  if (GET_CODE (pat) == CLOBBER)
7042 		    {
7043 		      rtx dest = SET_DEST (pat);
7044 
7045 		      if (REG_P (dest))
7046 			{
7047 			  int xregno = REGNO (dest);
7048 			  int end_xregno = END_REGNO (dest);
7049 
7050 			  if (xregno < regno + nregs
7051 			      && end_xregno > regno)
7052 			    return 0;
7053 			  else if (xregno < valueno + valuenregs
7054 				   && end_xregno > valueno)
7055 			    return 0;
7056 			  else if (goal_mem_addr_varies
7057 				   && reg_overlap_mentioned_for_reload_p (dest,
7058 								     goal))
7059 			    return 0;
7060 			}
7061 
7062 		      else if (goal_mem && MEM_P (dest)
7063 			       && ! push_operand (dest, GET_MODE (dest)))
7064 			return 0;
7065 		      else if (need_stable_sp
7066 			       && push_operand (dest, GET_MODE (dest)))
7067 			return 0;
7068 		    }
7069 		}
7070 	    }
7071 
7072 #if AUTO_INC_DEC
7073 	  /* If this insn auto-increments or auto-decrements
7074 	     either regno or valueno, return 0 now.
7075 	     If GOAL is a memory ref and its address is not constant,
7076 	     and this insn P increments a register used in GOAL, return 0.  */
7077 	  {
7078 	    rtx link;
7079 
7080 	    for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7081 	      if (REG_NOTE_KIND (link) == REG_INC
7082 		  && REG_P (XEXP (link, 0)))
7083 		{
7084 		  int incno = REGNO (XEXP (link, 0));
7085 		  if (incno < regno + nregs && incno >= regno)
7086 		    return 0;
7087 		  if (incno < valueno + valuenregs && incno >= valueno)
7088 		    return 0;
7089 		  if (goal_mem_addr_varies
7090 		      && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7091 							     goal))
7092 		    return 0;
7093 		}
7094 	  }
7095 #endif
7096 	}
7097     }
7098 }
7099 
7100 /* Find a place where INCED appears in an increment or decrement operator
7101    within X, and return the amount INCED is incremented or decremented by.
7102    The value is always positive.  */
7103 
7104 static poly_int64
7105 find_inc_amount (rtx x, rtx inced)
7106 {
7107   enum rtx_code code = GET_CODE (x);
7108   const char *fmt;
7109   int i;
7110 
7111   if (code == MEM)
7112     {
7113       rtx addr = XEXP (x, 0);
7114       if ((GET_CODE (addr) == PRE_DEC
7115 	   || GET_CODE (addr) == POST_DEC
7116 	   || GET_CODE (addr) == PRE_INC
7117 	   || GET_CODE (addr) == POST_INC)
7118 	  && XEXP (addr, 0) == inced)
7119 	return GET_MODE_SIZE (GET_MODE (x));
7120       else if ((GET_CODE (addr) == PRE_MODIFY
7121 		|| GET_CODE (addr) == POST_MODIFY)
7122 	       && GET_CODE (XEXP (addr, 1)) == PLUS
7123 	       && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7124 	       && XEXP (addr, 0) == inced
7125 	       && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7126 	{
7127 	  i = INTVAL (XEXP (XEXP (addr, 1), 1));
7128 	  return i < 0 ? -i : i;
7129 	}
7130     }
7131 
7132   fmt = GET_RTX_FORMAT (code);
7133   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7134     {
7135       if (fmt[i] == 'e')
7136 	{
7137 	  poly_int64 tem = find_inc_amount (XEXP (x, i), inced);
7138 	  if (maybe_ne (tem, 0))
7139 	    return tem;
7140 	}
7141       if (fmt[i] == 'E')
7142 	{
7143 	  int j;
7144 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7145 	    {
7146 	      poly_int64 tem = find_inc_amount (XVECEXP (x, i, j), inced);
7147 	      if (maybe_ne (tem, 0))
7148 		return tem;
7149 	    }
7150 	}
7151     }
7152 
7153   return 0;
7154 }
7155 
7156 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7157    REG_INC note in insn INSN.  REGNO must refer to a hard register.  */
7158 
7159 static int
7160 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7161 			   rtx insn)
7162 {
7163   rtx link;
7164 
7165   if (!AUTO_INC_DEC)
7166     return 0;
7167 
7168   gcc_assert (insn);
7169 
7170   if (! INSN_P (insn))
7171     return 0;
7172 
7173   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7174     if (REG_NOTE_KIND (link) == REG_INC)
7175       {
7176 	unsigned int test = (int) REGNO (XEXP (link, 0));
7177 	if (test >= regno && test < endregno)
7178 	  return 1;
7179       }
7180   return 0;
7181 }
7182 
7183 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7184    If SETS is 1, also consider SETs.  If SETS is 2, enable checking
7185    REG_INC.  REGNO must refer to a hard register.  */
7186 
7187 int
7188 regno_clobbered_p (unsigned int regno, rtx_insn *insn, machine_mode mode,
7189 		   int sets)
7190 {
7191   /* regno must be a hard register.  */
7192   gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7193 
7194   unsigned int endregno = end_hard_regno (mode, regno);
7195 
7196   if ((GET_CODE (PATTERN (insn)) == CLOBBER
7197        || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7198       && REG_P (XEXP (PATTERN (insn), 0)))
7199     {
7200       unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7201 
7202       return test >= regno && test < endregno;
7203     }
7204 
7205   if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7206     return 1;
7207 
7208   if (GET_CODE (PATTERN (insn)) == PARALLEL)
7209     {
7210       int i = XVECLEN (PATTERN (insn), 0) - 1;
7211 
7212       for (; i >= 0; i--)
7213 	{
7214 	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
7215 	  if ((GET_CODE (elt) == CLOBBER
7216 	       || (sets == 1 && GET_CODE (elt) == SET))
7217 	      && REG_P (XEXP (elt, 0)))
7218 	    {
7219 	      unsigned int test = REGNO (XEXP (elt, 0));
7220 
7221 	      if (test >= regno && test < endregno)
7222 		return 1;
7223 	    }
7224 	  if (sets == 2
7225 	      && reg_inc_found_and_valid_p (regno, endregno, elt))
7226 	    return 1;
7227 	}
7228     }
7229 
7230   return 0;
7231 }
7232 
7233 /* Find the low part, with mode MODE, of a hard regno RELOADREG.  */
7234 rtx
7235 reload_adjust_reg_for_mode (rtx reloadreg, machine_mode mode)
7236 {
7237   int regno;
7238 
7239   if (GET_MODE (reloadreg) == mode)
7240     return reloadreg;
7241 
7242   regno = REGNO (reloadreg);
7243 
7244   if (REG_WORDS_BIG_ENDIAN)
7245     regno += ((int) REG_NREGS (reloadreg)
7246 	      - (int) hard_regno_nregs (regno, mode));
7247 
7248   return gen_rtx_REG (mode, regno);
7249 }
7250 
7251 static const char *const reload_when_needed_name[] =
7252 {
7253   "RELOAD_FOR_INPUT",
7254   "RELOAD_FOR_OUTPUT",
7255   "RELOAD_FOR_INSN",
7256   "RELOAD_FOR_INPUT_ADDRESS",
7257   "RELOAD_FOR_INPADDR_ADDRESS",
7258   "RELOAD_FOR_OUTPUT_ADDRESS",
7259   "RELOAD_FOR_OUTADDR_ADDRESS",
7260   "RELOAD_FOR_OPERAND_ADDRESS",
7261   "RELOAD_FOR_OPADDR_ADDR",
7262   "RELOAD_OTHER",
7263   "RELOAD_FOR_OTHER_ADDRESS"
7264 };
7265 
7266 /* These functions are used to print the variables set by 'find_reloads' */
7267 
7268 DEBUG_FUNCTION void
7269 debug_reload_to_stream (FILE *f)
7270 {
7271   int r;
7272   const char *prefix;
7273 
7274   if (! f)
7275     f = stderr;
7276   for (r = 0; r < n_reloads; r++)
7277     {
7278       fprintf (f, "Reload %d: ", r);
7279 
7280       if (rld[r].in != 0)
7281 	{
7282 	  fprintf (f, "reload_in (%s) = ",
7283 		   GET_MODE_NAME (rld[r].inmode));
7284 	  print_inline_rtx (f, rld[r].in, 24);
7285 	  fprintf (f, "\n\t");
7286 	}
7287 
7288       if (rld[r].out != 0)
7289 	{
7290 	  fprintf (f, "reload_out (%s) = ",
7291 		   GET_MODE_NAME (rld[r].outmode));
7292 	  print_inline_rtx (f, rld[r].out, 24);
7293 	  fprintf (f, "\n\t");
7294 	}
7295 
7296       fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7297 
7298       fprintf (f, "%s (opnum = %d)",
7299 	       reload_when_needed_name[(int) rld[r].when_needed],
7300 	       rld[r].opnum);
7301 
7302       if (rld[r].optional)
7303 	fprintf (f, ", optional");
7304 
7305       if (rld[r].nongroup)
7306 	fprintf (f, ", nongroup");
7307 
7308       if (maybe_ne (rld[r].inc, 0))
7309 	{
7310 	  fprintf (f, ", inc by ");
7311 	  print_dec (rld[r].inc, f, SIGNED);
7312 	}
7313 
7314       if (rld[r].nocombine)
7315 	fprintf (f, ", can't combine");
7316 
7317       if (rld[r].secondary_p)
7318 	fprintf (f, ", secondary_reload_p");
7319 
7320       if (rld[r].in_reg != 0)
7321 	{
7322 	  fprintf (f, "\n\treload_in_reg: ");
7323 	  print_inline_rtx (f, rld[r].in_reg, 24);
7324 	}
7325 
7326       if (rld[r].out_reg != 0)
7327 	{
7328 	  fprintf (f, "\n\treload_out_reg: ");
7329 	  print_inline_rtx (f, rld[r].out_reg, 24);
7330 	}
7331 
7332       if (rld[r].reg_rtx != 0)
7333 	{
7334 	  fprintf (f, "\n\treload_reg_rtx: ");
7335 	  print_inline_rtx (f, rld[r].reg_rtx, 24);
7336 	}
7337 
7338       prefix = "\n\t";
7339       if (rld[r].secondary_in_reload != -1)
7340 	{
7341 	  fprintf (f, "%ssecondary_in_reload = %d",
7342 		   prefix, rld[r].secondary_in_reload);
7343 	  prefix = ", ";
7344 	}
7345 
7346       if (rld[r].secondary_out_reload != -1)
7347 	fprintf (f, "%ssecondary_out_reload = %d\n",
7348 		 prefix, rld[r].secondary_out_reload);
7349 
7350       prefix = "\n\t";
7351       if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7352 	{
7353 	  fprintf (f, "%ssecondary_in_icode = %s", prefix,
7354 		   insn_data[rld[r].secondary_in_icode].name);
7355 	  prefix = ", ";
7356 	}
7357 
7358       if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7359 	fprintf (f, "%ssecondary_out_icode = %s", prefix,
7360 		 insn_data[rld[r].secondary_out_icode].name);
7361 
7362       fprintf (f, "\n");
7363     }
7364 }
7365 
7366 DEBUG_FUNCTION void
7367 debug_reload (void)
7368 {
7369   debug_reload_to_stream (stderr);
7370 }
7371