xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/reload.c (revision bdc22b2e01993381dcefeff2bc9b56ca75a4235c)
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2    Copyright (C) 1987-2015 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains subroutines used only from the file reload1.c.
21    It knows how to scan one insn for operands and values
22    that need to be copied into registers to make valid code.
23    It also finds other operands and values which are valid
24    but for which equivalent values in registers exist and
25    ought to be used instead.
26 
27    Before processing the first insn of the function, call `init_reload'.
28    init_reload actually has to be called earlier anyway.
29 
30    To scan an insn, call `find_reloads'.  This does two things:
31    1. sets up tables describing which values must be reloaded
32    for this insn, and what kind of hard regs they must be reloaded into;
33    2. optionally record the locations where those values appear in
34    the data, so they can be replaced properly later.
35    This is done only if the second arg to `find_reloads' is nonzero.
36 
37    The third arg to `find_reloads' specifies the number of levels
38    of indirect addressing supported by the machine.  If it is zero,
39    indirect addressing is not valid.  If it is one, (MEM (REG n))
40    is valid even if (REG n) did not get a hard register; if it is two,
41    (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42    hard register, and similarly for higher values.
43 
44    Then you must choose the hard regs to reload those pseudo regs into,
45    and generate appropriate load insns before this insn and perhaps
46    also store insns after this insn.  Set up the array `reload_reg_rtx'
47    to contain the REG rtx's for the registers you used.  In some
48    cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49    for certain reloads.  Then that tells you which register to use,
50    so you do not need to allocate one.  But you still do need to add extra
51    instructions to copy the value into and out of that register.
52 
53    Finally you must call `subst_reloads' to substitute the reload reg rtx's
54    into the locations already recorded.
55 
56 NOTE SIDE EFFECTS:
57 
58    find_reloads can alter the operands of the instruction it is called on.
59 
60    1. Two operands of any sort may be interchanged, if they are in a
61    commutative instruction.
62    This happens only if find_reloads thinks the instruction will compile
63    better that way.
64 
65    2. Pseudo-registers that are equivalent to constants are replaced
66    with those constants if they are not in hard registers.
67 
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
71 
72 Using a reload register for several reloads in one insn:
73 
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
77 
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
81 
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload.  */
85 
86 #define REG_OK_STRICT
87 
88 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow.  */
89 #undef DEBUG_RELOAD
90 
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "tm.h"
95 #include "rtl-error.h"
96 #include "tm_p.h"
97 #include "insn-config.h"
98 #include "symtab.h"
99 #include "hashtab.h"
100 #include "hash-set.h"
101 #include "vec.h"
102 #include "machmode.h"
103 #include "hard-reg-set.h"
104 #include "input.h"
105 #include "function.h"
106 #include "rtl.h"
107 #include "flags.h"
108 #include "statistics.h"
109 #include "double-int.h"
110 #include "real.h"
111 #include "fixed-value.h"
112 #include "alias.h"
113 #include "wide-int.h"
114 #include "inchash.h"
115 #include "tree.h"
116 #include "expmed.h"
117 #include "dojump.h"
118 #include "explow.h"
119 #include "calls.h"
120 #include "emit-rtl.h"
121 #include "varasm.h"
122 #include "stmt.h"
123 #include "expr.h"
124 #include "insn-codes.h"
125 #include "optabs.h"
126 #include "recog.h"
127 #include "dominance.h"
128 #include "cfg.h"
129 #include "predict.h"
130 #include "basic-block.h"
131 #include "df.h"
132 #include "reload.h"
133 #include "regs.h"
134 #include "addresses.h"
135 #include "params.h"
136 #include "target.h"
137 #include "ira.h"
138 
139 /* True if X is a constant that can be forced into the constant pool.
140    MODE is the mode of the operand, or VOIDmode if not known.  */
141 #define CONST_POOL_OK_P(MODE, X)		\
142   ((MODE) != VOIDmode				\
143    && CONSTANT_P (X)				\
144    && GET_CODE (X) != HIGH			\
145    && !targetm.cannot_force_const_mem (MODE, X))
146 
147 /* True if C is a non-empty register class that has too few registers
148    to be safely used as a reload target class.  */
149 
150 static inline bool
151 small_register_class_p (reg_class_t rclass)
152 {
153   return (reg_class_size [(int) rclass] == 1
154 	  || (reg_class_size [(int) rclass] >= 1
155 	      && targetm.class_likely_spilled_p (rclass)));
156 }
157 
158 
159 /* All reloads of the current insn are recorded here.  See reload.h for
160    comments.  */
161 int n_reloads;
162 struct reload rld[MAX_RELOADS];
163 
164 /* All the "earlyclobber" operands of the current insn
165    are recorded here.  */
166 int n_earlyclobbers;
167 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
168 
169 int reload_n_operands;
170 
171 /* Replacing reloads.
172 
173    If `replace_reloads' is nonzero, then as each reload is recorded
174    an entry is made for it in the table `replacements'.
175    Then later `subst_reloads' can look through that table and
176    perform all the replacements needed.  */
177 
178 /* Nonzero means record the places to replace.  */
179 static int replace_reloads;
180 
181 /* Each replacement is recorded with a structure like this.  */
182 struct replacement
183 {
184   rtx *where;			/* Location to store in */
185   int what;			/* which reload this is for */
186   machine_mode mode;	/* mode it must have */
187 };
188 
189 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
190 
191 /* Number of replacements currently recorded.  */
192 static int n_replacements;
193 
194 /* Used to track what is modified by an operand.  */
195 struct decomposition
196 {
197   int reg_flag;		/* Nonzero if referencing a register.  */
198   int safe;		/* Nonzero if this can't conflict with anything.  */
199   rtx base;		/* Base address for MEM.  */
200   HOST_WIDE_INT start;	/* Starting offset or register number.  */
201   HOST_WIDE_INT end;	/* Ending offset or register number.  */
202 };
203 
204 #ifdef SECONDARY_MEMORY_NEEDED
205 
206 /* Save MEMs needed to copy from one class of registers to another.  One MEM
207    is used per mode, but normally only one or two modes are ever used.
208 
209    We keep two versions, before and after register elimination.  The one
210    after register elimination is record separately for each operand.  This
211    is done in case the address is not valid to be sure that we separately
212    reload each.  */
213 
214 static rtx secondary_memlocs[NUM_MACHINE_MODES];
215 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
216 static int secondary_memlocs_elim_used = 0;
217 #endif
218 
219 /* The instruction we are doing reloads for;
220    so we can test whether a register dies in it.  */
221 static rtx_insn *this_insn;
222 
223 /* Nonzero if this instruction is a user-specified asm with operands.  */
224 static int this_insn_is_asm;
225 
226 /* If hard_regs_live_known is nonzero,
227    we can tell which hard regs are currently live,
228    at least enough to succeed in choosing dummy reloads.  */
229 static int hard_regs_live_known;
230 
231 /* Indexed by hard reg number,
232    element is nonnegative if hard reg has been spilled.
233    This vector is passed to `find_reloads' as an argument
234    and is not changed here.  */
235 static short *static_reload_reg_p;
236 
237 /* Set to 1 in subst_reg_equivs if it changes anything.  */
238 static int subst_reg_equivs_changed;
239 
240 /* On return from push_reload, holds the reload-number for the OUT
241    operand, which can be different for that from the input operand.  */
242 static int output_reloadnum;
243 
244   /* Compare two RTX's.  */
245 #define MATCHES(x, y) \
246  (x == y || (x != 0 && (REG_P (x)				\
247 			? REG_P (y) && REGNO (x) == REGNO (y)	\
248 			: rtx_equal_p (x, y) && ! side_effects_p (x))))
249 
250   /* Indicates if two reloads purposes are for similar enough things that we
251      can merge their reloads.  */
252 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
253   ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER	\
254    || ((when1) == (when2) && (op1) == (op2))		\
255    || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
256    || ((when1) == RELOAD_FOR_OPERAND_ADDRESS		\
257        && (when2) == RELOAD_FOR_OPERAND_ADDRESS)	\
258    || ((when1) == RELOAD_FOR_OTHER_ADDRESS		\
259        && (when2) == RELOAD_FOR_OTHER_ADDRESS))
260 
261   /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged.  */
262 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
263   ((when1) != (when2)					\
264    || ! ((op1) == (op2)					\
265 	 || (when1) == RELOAD_FOR_INPUT			\
266 	 || (when1) == RELOAD_FOR_OPERAND_ADDRESS	\
267 	 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
268 
269   /* If we are going to reload an address, compute the reload type to
270      use.  */
271 #define ADDR_TYPE(type)					\
272   ((type) == RELOAD_FOR_INPUT_ADDRESS			\
273    ? RELOAD_FOR_INPADDR_ADDRESS				\
274    : ((type) == RELOAD_FOR_OUTPUT_ADDRESS		\
275       ? RELOAD_FOR_OUTADDR_ADDRESS			\
276       : (type)))
277 
278 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
279 				  machine_mode, enum reload_type,
280 				  enum insn_code *, secondary_reload_info *);
281 static enum reg_class find_valid_class (machine_mode, machine_mode,
282 					int, unsigned int);
283 static void push_replacement (rtx *, int, machine_mode);
284 static void dup_replacements (rtx *, rtx *);
285 static void combine_reloads (void);
286 static int find_reusable_reload (rtx *, rtx, enum reg_class,
287 				 enum reload_type, int, int);
288 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, machine_mode,
289 			      machine_mode, reg_class_t, int, int);
290 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
291 static struct decomposition decompose (rtx);
292 static int immune_p (rtx, rtx, struct decomposition);
293 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
294 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
295 				rtx_insn *, int *);
296 static rtx make_memloc (rtx, int);
297 static int maybe_memory_address_addr_space_p (machine_mode, rtx,
298 					      addr_space_t, rtx *);
299 static int find_reloads_address (machine_mode, rtx *, rtx, rtx *,
300 				 int, enum reload_type, int, rtx_insn *);
301 static rtx subst_reg_equivs (rtx, rtx_insn *);
302 static rtx subst_indexed_address (rtx);
303 static void update_auto_inc_notes (rtx_insn *, int, int);
304 static int find_reloads_address_1 (machine_mode, addr_space_t, rtx, int,
305 				   enum rtx_code, enum rtx_code, rtx *,
306 				   int, enum reload_type,int, rtx_insn *);
307 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
308 				       machine_mode, int,
309 				       enum reload_type, int);
310 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
311 					int, rtx_insn *, int *);
312 static void copy_replacements_1 (rtx *, rtx *, int);
313 static int find_inc_amount (rtx, rtx);
314 static int refers_to_mem_for_reload_p (rtx);
315 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
316 					 rtx, rtx *);
317 
318 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
319    list yet.  */
320 
321 static void
322 push_reg_equiv_alt_mem (int regno, rtx mem)
323 {
324   rtx it;
325 
326   for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
327     if (rtx_equal_p (XEXP (it, 0), mem))
328       return;
329 
330   reg_equiv_alt_mem_list (regno)
331     = alloc_EXPR_LIST (REG_EQUIV, mem,
332 		       reg_equiv_alt_mem_list (regno));
333 }
334 
335 /* Determine if any secondary reloads are needed for loading (if IN_P is
336    nonzero) or storing (if IN_P is zero) X to or from a reload register of
337    register class RELOAD_CLASS in mode RELOAD_MODE.  If secondary reloads
338    are needed, push them.
339 
340    Return the reload number of the secondary reload we made, or -1 if
341    we didn't need one.  *PICODE is set to the insn_code to use if we do
342    need a secondary reload.  */
343 
344 static int
345 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
346 		       enum reg_class reload_class,
347 		       machine_mode reload_mode, enum reload_type type,
348 		       enum insn_code *picode, secondary_reload_info *prev_sri)
349 {
350   enum reg_class rclass = NO_REGS;
351   enum reg_class scratch_class;
352   machine_mode mode = reload_mode;
353   enum insn_code icode = CODE_FOR_nothing;
354   enum insn_code t_icode = CODE_FOR_nothing;
355   enum reload_type secondary_type;
356   int s_reload, t_reload = -1;
357   const char *scratch_constraint;
358   secondary_reload_info sri;
359 
360   if (type == RELOAD_FOR_INPUT_ADDRESS
361       || type == RELOAD_FOR_OUTPUT_ADDRESS
362       || type == RELOAD_FOR_INPADDR_ADDRESS
363       || type == RELOAD_FOR_OUTADDR_ADDRESS)
364     secondary_type = type;
365   else
366     secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
367 
368   *picode = CODE_FOR_nothing;
369 
370   /* If X is a paradoxical SUBREG, use the inner value to determine both the
371      mode and object being reloaded.  */
372   if (paradoxical_subreg_p (x))
373     {
374       x = SUBREG_REG (x);
375       reload_mode = GET_MODE (x);
376     }
377 
378   /* If X is a pseudo-register that has an equivalent MEM (actually, if it
379      is still a pseudo-register by now, it *must* have an equivalent MEM
380      but we don't want to assume that), use that equivalent when seeing if
381      a secondary reload is needed since whether or not a reload is needed
382      might be sensitive to the form of the MEM.  */
383 
384   if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
385       && reg_equiv_mem (REGNO (x)))
386     x = reg_equiv_mem (REGNO (x));
387 
388   sri.icode = CODE_FOR_nothing;
389   sri.prev_sri = prev_sri;
390   rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
391 						      reload_mode, &sri);
392   icode = (enum insn_code) sri.icode;
393 
394   /* If we don't need any secondary registers, done.  */
395   if (rclass == NO_REGS && icode == CODE_FOR_nothing)
396     return -1;
397 
398   if (rclass != NO_REGS)
399     t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
400 				      reload_mode, type, &t_icode, &sri);
401 
402   /* If we will be using an insn, the secondary reload is for a
403      scratch register.  */
404 
405   if (icode != CODE_FOR_nothing)
406     {
407       /* If IN_P is nonzero, the reload register will be the output in
408 	 operand 0.  If IN_P is zero, the reload register will be the input
409 	 in operand 1.  Outputs should have an initial "=", which we must
410 	 skip.  */
411 
412       /* ??? It would be useful to be able to handle only two, or more than
413 	 three, operands, but for now we can only handle the case of having
414 	 exactly three: output, input and one temp/scratch.  */
415       gcc_assert (insn_data[(int) icode].n_operands == 3);
416 
417       /* ??? We currently have no way to represent a reload that needs
418 	 an icode to reload from an intermediate tertiary reload register.
419 	 We should probably have a new field in struct reload to tag a
420 	 chain of scratch operand reloads onto.   */
421       gcc_assert (rclass == NO_REGS);
422 
423       scratch_constraint = insn_data[(int) icode].operand[2].constraint;
424       gcc_assert (*scratch_constraint == '=');
425       scratch_constraint++;
426       if (*scratch_constraint == '&')
427 	scratch_constraint++;
428       scratch_class = (reg_class_for_constraint
429 		       (lookup_constraint (scratch_constraint)));
430 
431       rclass = scratch_class;
432       mode = insn_data[(int) icode].operand[2].mode;
433     }
434 
435   /* This case isn't valid, so fail.  Reload is allowed to use the same
436      register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
437      in the case of a secondary register, we actually need two different
438      registers for correct code.  We fail here to prevent the possibility of
439      silently generating incorrect code later.
440 
441      The convention is that secondary input reloads are valid only if the
442      secondary_class is different from class.  If you have such a case, you
443      can not use secondary reloads, you must work around the problem some
444      other way.
445 
446      Allow this when a reload_in/out pattern is being used.  I.e. assume
447      that the generated code handles this case.  */
448 
449   gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
450 	      || t_icode != CODE_FOR_nothing);
451 
452   /* See if we can reuse an existing secondary reload.  */
453   for (s_reload = 0; s_reload < n_reloads; s_reload++)
454     if (rld[s_reload].secondary_p
455 	&& (reg_class_subset_p (rclass, rld[s_reload].rclass)
456 	    || reg_class_subset_p (rld[s_reload].rclass, rclass))
457 	&& ((in_p && rld[s_reload].inmode == mode)
458 	    || (! in_p && rld[s_reload].outmode == mode))
459 	&& ((in_p && rld[s_reload].secondary_in_reload == t_reload)
460 	    || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
461 	&& ((in_p && rld[s_reload].secondary_in_icode == t_icode)
462 	    || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
463 	&& (small_register_class_p (rclass)
464 	    || targetm.small_register_classes_for_mode_p (VOIDmode))
465 	&& MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
466 			     opnum, rld[s_reload].opnum))
467       {
468 	if (in_p)
469 	  rld[s_reload].inmode = mode;
470 	if (! in_p)
471 	  rld[s_reload].outmode = mode;
472 
473 	if (reg_class_subset_p (rclass, rld[s_reload].rclass))
474 	  rld[s_reload].rclass = rclass;
475 
476 	rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
477 	rld[s_reload].optional &= optional;
478 	rld[s_reload].secondary_p = 1;
479 	if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
480 			    opnum, rld[s_reload].opnum))
481 	  rld[s_reload].when_needed = RELOAD_OTHER;
482 
483 	break;
484       }
485 
486   if (s_reload == n_reloads)
487     {
488 #ifdef SECONDARY_MEMORY_NEEDED
489       /* If we need a memory location to copy between the two reload regs,
490 	 set it up now.  Note that we do the input case before making
491 	 the reload and the output case after.  This is due to the
492 	 way reloads are output.  */
493 
494       if (in_p && icode == CODE_FOR_nothing
495 	  && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
496 	{
497 	  get_secondary_mem (x, reload_mode, opnum, type);
498 
499 	  /* We may have just added new reloads.  Make sure we add
500 	     the new reload at the end.  */
501 	  s_reload = n_reloads;
502 	}
503 #endif
504 
505       /* We need to make a new secondary reload for this register class.  */
506       rld[s_reload].in = rld[s_reload].out = 0;
507       rld[s_reload].rclass = rclass;
508 
509       rld[s_reload].inmode = in_p ? mode : VOIDmode;
510       rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
511       rld[s_reload].reg_rtx = 0;
512       rld[s_reload].optional = optional;
513       rld[s_reload].inc = 0;
514       /* Maybe we could combine these, but it seems too tricky.  */
515       rld[s_reload].nocombine = 1;
516       rld[s_reload].in_reg = 0;
517       rld[s_reload].out_reg = 0;
518       rld[s_reload].opnum = opnum;
519       rld[s_reload].when_needed = secondary_type;
520       rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
521       rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
522       rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
523       rld[s_reload].secondary_out_icode
524 	= ! in_p ? t_icode : CODE_FOR_nothing;
525       rld[s_reload].secondary_p = 1;
526 
527       n_reloads++;
528 
529 #ifdef SECONDARY_MEMORY_NEEDED
530       if (! in_p && icode == CODE_FOR_nothing
531 	  && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
532 	get_secondary_mem (x, mode, opnum, type);
533 #endif
534     }
535 
536   *picode = icode;
537   return s_reload;
538 }
539 
540 /* If a secondary reload is needed, return its class.  If both an intermediate
541    register and a scratch register is needed, we return the class of the
542    intermediate register.  */
543 reg_class_t
544 secondary_reload_class (bool in_p, reg_class_t rclass, machine_mode mode,
545 			rtx x)
546 {
547   enum insn_code icode;
548   secondary_reload_info sri;
549 
550   sri.icode = CODE_FOR_nothing;
551   sri.prev_sri = NULL;
552   rclass
553     = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
554   icode = (enum insn_code) sri.icode;
555 
556   /* If there are no secondary reloads at all, we return NO_REGS.
557      If an intermediate register is needed, we return its class.  */
558   if (icode == CODE_FOR_nothing || rclass != NO_REGS)
559     return rclass;
560 
561   /* No intermediate register is needed, but we have a special reload
562      pattern, which we assume for now needs a scratch register.  */
563   return scratch_reload_class (icode);
564 }
565 
566 /* ICODE is the insn_code of a reload pattern.  Check that it has exactly
567    three operands, verify that operand 2 is an output operand, and return
568    its register class.
569    ??? We'd like to be able to handle any pattern with at least 2 operands,
570    for zero or more scratch registers, but that needs more infrastructure.  */
571 enum reg_class
572 scratch_reload_class (enum insn_code icode)
573 {
574   const char *scratch_constraint;
575   enum reg_class rclass;
576 
577   gcc_assert (insn_data[(int) icode].n_operands == 3);
578   scratch_constraint = insn_data[(int) icode].operand[2].constraint;
579   gcc_assert (*scratch_constraint == '=');
580   scratch_constraint++;
581   if (*scratch_constraint == '&')
582     scratch_constraint++;
583   rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
584   gcc_assert (rclass != NO_REGS);
585   return rclass;
586 }
587 
588 #ifdef SECONDARY_MEMORY_NEEDED
589 
590 /* Return a memory location that will be used to copy X in mode MODE.
591    If we haven't already made a location for this mode in this insn,
592    call find_reloads_address on the location being returned.  */
593 
594 rtx
595 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, machine_mode mode,
596 		   int opnum, enum reload_type type)
597 {
598   rtx loc;
599   int mem_valid;
600 
601   /* By default, if MODE is narrower than a word, widen it to a word.
602      This is required because most machines that require these memory
603      locations do not support short load and stores from all registers
604      (e.g., FP registers).  */
605 
606 #ifdef SECONDARY_MEMORY_NEEDED_MODE
607   mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
608 #else
609   if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
610     mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
611 #endif
612 
613   /* If we already have made a MEM for this operand in MODE, return it.  */
614   if (secondary_memlocs_elim[(int) mode][opnum] != 0)
615     return secondary_memlocs_elim[(int) mode][opnum];
616 
617   /* If this is the first time we've tried to get a MEM for this mode,
618      allocate a new one.  `something_changed' in reload will get set
619      by noticing that the frame size has changed.  */
620 
621   if (secondary_memlocs[(int) mode] == 0)
622     {
623 #ifdef SECONDARY_MEMORY_NEEDED_RTX
624       secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
625 #else
626       secondary_memlocs[(int) mode]
627 	= assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
628 #endif
629     }
630 
631   /* Get a version of the address doing any eliminations needed.  If that
632      didn't give us a new MEM, make a new one if it isn't valid.  */
633 
634   loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
635   mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
636 						  MEM_ADDR_SPACE (loc));
637 
638   if (! mem_valid && loc == secondary_memlocs[(int) mode])
639     loc = copy_rtx (loc);
640 
641   /* The only time the call below will do anything is if the stack
642      offset is too large.  In that case IND_LEVELS doesn't matter, so we
643      can just pass a zero.  Adjust the type to be the address of the
644      corresponding object.  If the address was valid, save the eliminated
645      address.  If it wasn't valid, we need to make a reload each time, so
646      don't save it.  */
647 
648   if (! mem_valid)
649     {
650       type =  (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
651 	       : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
652 	       : RELOAD_OTHER);
653 
654       find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
655 			    opnum, type, 0, 0);
656     }
657 
658   secondary_memlocs_elim[(int) mode][opnum] = loc;
659   if (secondary_memlocs_elim_used <= (int)mode)
660     secondary_memlocs_elim_used = (int)mode + 1;
661   return loc;
662 }
663 
664 /* Clear any secondary memory locations we've made.  */
665 
666 void
667 clear_secondary_mem (void)
668 {
669   memset (secondary_memlocs, 0, sizeof secondary_memlocs);
670 }
671 #endif /* SECONDARY_MEMORY_NEEDED */
672 
673 
674 /* Find the largest class which has at least one register valid in
675    mode INNER, and which for every such register, that register number
676    plus N is also valid in OUTER (if in range) and is cheap to move
677    into REGNO.  Such a class must exist.  */
678 
679 static enum reg_class
680 find_valid_class (machine_mode outer ATTRIBUTE_UNUSED,
681 		  machine_mode inner ATTRIBUTE_UNUSED, int n,
682 		  unsigned int dest_regno ATTRIBUTE_UNUSED)
683 {
684   int best_cost = -1;
685   int rclass;
686   int regno;
687   enum reg_class best_class = NO_REGS;
688   enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
689   unsigned int best_size = 0;
690   int cost;
691 
692   for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
693     {
694       int bad = 0;
695       int good = 0;
696       for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
697 	if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
698 	  {
699 	    if (HARD_REGNO_MODE_OK (regno, inner))
700 	      {
701 		good = 1;
702 		if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
703 		    && ! HARD_REGNO_MODE_OK (regno + n, outer))
704 		  bad = 1;
705 	      }
706 	  }
707 
708       if (bad || !good)
709 	continue;
710       cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
711 
712       if ((reg_class_size[rclass] > best_size
713 	   && (best_cost < 0 || best_cost >= cost))
714 	  || best_cost > cost)
715 	{
716 	  best_class = (enum reg_class) rclass;
717 	  best_size = reg_class_size[rclass];
718 	  best_cost = register_move_cost (outer, (enum reg_class) rclass,
719 					  dest_class);
720 	}
721     }
722 
723   gcc_assert (best_size != 0);
724 
725   return best_class;
726 }
727 
728 /* We are trying to reload a subreg of something that is not a register.
729    Find the largest class which contains only registers valid in
730    mode MODE.  OUTER is the mode of the subreg, DEST_CLASS the class in
731    which we would eventually like to obtain the object.  */
732 
733 static enum reg_class
734 find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED,
735 		    machine_mode mode ATTRIBUTE_UNUSED,
736 		    enum reg_class dest_class ATTRIBUTE_UNUSED)
737 {
738   int best_cost = -1;
739   int rclass;
740   int regno;
741   enum reg_class best_class = NO_REGS;
742   unsigned int best_size = 0;
743   int cost;
744 
745   for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
746     {
747       int bad = 0;
748       for (regno = 0; regno < FIRST_PSEUDO_REGISTER && !bad; regno++)
749 	{
750 	  if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
751 	      && !HARD_REGNO_MODE_OK (regno, mode))
752 	    bad = 1;
753 	}
754 
755       if (bad)
756 	continue;
757 
758       cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
759 
760       if ((reg_class_size[rclass] > best_size
761 	   && (best_cost < 0 || best_cost >= cost))
762 	  || best_cost > cost)
763 	{
764 	  best_class = (enum reg_class) rclass;
765 	  best_size = reg_class_size[rclass];
766 	  best_cost = register_move_cost (outer, (enum reg_class) rclass,
767 					  dest_class);
768 	}
769     }
770 
771   gcc_assert (best_size != 0);
772 
773 #ifdef LIMIT_RELOAD_CLASS
774   best_class = LIMIT_RELOAD_CLASS (mode, best_class);
775 #endif
776   return best_class;
777 }
778 
779 /* Return the number of a previously made reload that can be combined with
780    a new one, or n_reloads if none of the existing reloads can be used.
781    OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
782    push_reload, they determine the kind of the new reload that we try to
783    combine.  P_IN points to the corresponding value of IN, which can be
784    modified by this function.
785    DONT_SHARE is nonzero if we can't share any input-only reload for IN.  */
786 
787 static int
788 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
789 		      enum reload_type type, int opnum, int dont_share)
790 {
791   rtx in = *p_in;
792   int i;
793   /* We can't merge two reloads if the output of either one is
794      earlyclobbered.  */
795 
796   if (earlyclobber_operand_p (out))
797     return n_reloads;
798 
799   /* We can use an existing reload if the class is right
800      and at least one of IN and OUT is a match
801      and the other is at worst neutral.
802      (A zero compared against anything is neutral.)
803 
804      For targets with small register classes, don't use existing reloads
805      unless they are for the same thing since that can cause us to need
806      more reload registers than we otherwise would.  */
807 
808   for (i = 0; i < n_reloads; i++)
809     if ((reg_class_subset_p (rclass, rld[i].rclass)
810 	 || reg_class_subset_p (rld[i].rclass, rclass))
811 	/* If the existing reload has a register, it must fit our class.  */
812 	&& (rld[i].reg_rtx == 0
813 	    || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
814 				  true_regnum (rld[i].reg_rtx)))
815 	&& ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
816 	     && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
817 	    || (out != 0 && MATCHES (rld[i].out, out)
818 		&& (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
819 	&& (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
820 	&& (small_register_class_p (rclass)
821 	    || targetm.small_register_classes_for_mode_p (VOIDmode))
822 	&& MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
823       return i;
824 
825   /* Reloading a plain reg for input can match a reload to postincrement
826      that reg, since the postincrement's value is the right value.
827      Likewise, it can match a preincrement reload, since we regard
828      the preincrementation as happening before any ref in this insn
829      to that register.  */
830   for (i = 0; i < n_reloads; i++)
831     if ((reg_class_subset_p (rclass, rld[i].rclass)
832 	 || reg_class_subset_p (rld[i].rclass, rclass))
833 	/* If the existing reload has a register, it must fit our
834 	   class.  */
835 	&& (rld[i].reg_rtx == 0
836 	    || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
837 				  true_regnum (rld[i].reg_rtx)))
838 	&& out == 0 && rld[i].out == 0 && rld[i].in != 0
839 	&& ((REG_P (in)
840 	     && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
841 	     && MATCHES (XEXP (rld[i].in, 0), in))
842 	    || (REG_P (rld[i].in)
843 		&& GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
844 		&& MATCHES (XEXP (in, 0), rld[i].in)))
845 	&& (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
846 	&& (small_register_class_p (rclass)
847 	    || targetm.small_register_classes_for_mode_p (VOIDmode))
848 	&& MERGABLE_RELOADS (type, rld[i].when_needed,
849 			     opnum, rld[i].opnum))
850       {
851 	/* Make sure reload_in ultimately has the increment,
852 	   not the plain register.  */
853 	if (REG_P (in))
854 	  *p_in = rld[i].in;
855 	return i;
856       }
857   return n_reloads;
858 }
859 
860 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
861    expression.  MODE is the mode that X will be used in.  OUTPUT is true if
862    the function is invoked for the output part of an enclosing reload.  */
863 
864 static bool
865 reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool output)
866 {
867   rtx inner;
868   int regno;
869 
870   /* Only SUBREGs are problematical.  */
871   if (GET_CODE (x) != SUBREG)
872     return false;
873 
874   inner = SUBREG_REG (x);
875 
876   /* If INNER is a constant or PLUS, then INNER will need reloading.  */
877   if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
878     return true;
879 
880   /* If INNER is not a register, then INNER will not need reloading.  */
881   if (!REG_P (inner))
882     return false;
883 
884   regno = REGNO (inner);
885 
886   /* If INNER is not a hard register, then INNER will not need reloading
887      unless it's a mode dependent memory reference.  */
888   if (regno >= FIRST_PSEUDO_REGISTER)
889     return !output
890 	   && reg_equiv_mem (regno) != 0
891 	   && mode_dependent_address_p (XEXP (reg_equiv_mem (regno), 0),
892 					MEM_ADDR_SPACE (reg_equiv_mem (regno)));
893 
894   /* If INNER is not ok for MODE, then INNER will need reloading.  */
895   if (!HARD_REGNO_MODE_OK (subreg_regno (x), mode))
896     return true;
897 
898   /* If this is for an output, and the outer part is a word or smaller,
899      INNER is larger than a word and the number of registers in INNER is
900      not the same as the number of words in INNER, then INNER will need
901      reloading (with an in-out reload).  */
902   return (output
903 	  && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
904 	  && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
905 	  && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
906 	      != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
907 }
908 
909 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
910    requiring an extra reload register.  The caller has already found that
911    IN contains some reference to REGNO, so check that we can produce the
912    new value in a single step.  E.g. if we have
913    (set (reg r13) (plus (reg r13) (const int 1))), and there is an
914    instruction that adds one to a register, this should succeed.
915    However, if we have something like
916    (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
917    needs to be loaded into a register first, we need a separate reload
918    register.
919    Such PLUS reloads are generated by find_reload_address_part.
920    The out-of-range PLUS expressions are usually introduced in the instruction
921    patterns by register elimination and substituting pseudos without a home
922    by their function-invariant equivalences.  */
923 static int
924 can_reload_into (rtx in, int regno, machine_mode mode)
925 {
926   rtx dst;
927   rtx_insn *test_insn;
928   int r = 0;
929   struct recog_data_d save_recog_data;
930 
931   /* For matching constraints, we often get notional input reloads where
932      we want to use the original register as the reload register.  I.e.
933      technically this is a non-optional input-output reload, but IN is
934      already a valid register, and has been chosen as the reload register.
935      Speed this up, since it trivially works.  */
936   if (REG_P (in))
937     return 1;
938 
939   /* To test MEMs properly, we'd have to take into account all the reloads
940      that are already scheduled, which can become quite complicated.
941      And since we've already handled address reloads for this MEM, it
942      should always succeed anyway.  */
943   if (MEM_P (in))
944     return 1;
945 
946   /* If we can make a simple SET insn that does the job, everything should
947      be fine.  */
948   dst =  gen_rtx_REG (mode, regno);
949   test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
950   save_recog_data = recog_data;
951   if (recog_memoized (test_insn) >= 0)
952     {
953       extract_insn (test_insn);
954       r = constrain_operands (1, get_enabled_alternatives (test_insn));
955     }
956   recog_data = save_recog_data;
957   return r;
958 }
959 
960 /* Record one reload that needs to be performed.
961    IN is an rtx saying where the data are to be found before this instruction.
962    OUT says where they must be stored after the instruction.
963    (IN is zero for data not read, and OUT is zero for data not written.)
964    INLOC and OUTLOC point to the places in the instructions where
965    IN and OUT were found.
966    If IN and OUT are both nonzero, it means the same register must be used
967    to reload both IN and OUT.
968 
969    RCLASS is a register class required for the reloaded data.
970    INMODE is the machine mode that the instruction requires
971    for the reg that replaces IN and OUTMODE is likewise for OUT.
972 
973    If IN is zero, then OUT's location and mode should be passed as
974    INLOC and INMODE.
975 
976    STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
977 
978    OPTIONAL nonzero means this reload does not need to be performed:
979    it can be discarded if that is more convenient.
980 
981    OPNUM and TYPE say what the purpose of this reload is.
982 
983    The return value is the reload-number for this reload.
984 
985    If both IN and OUT are nonzero, in some rare cases we might
986    want to make two separate reloads.  (Actually we never do this now.)
987    Therefore, the reload-number for OUT is stored in
988    output_reloadnum when we return; the return value applies to IN.
989    Usually (presently always), when IN and OUT are nonzero,
990    the two reload-numbers are equal, but the caller should be careful to
991    distinguish them.  */
992 
993 int
994 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
995 	     enum reg_class rclass, machine_mode inmode,
996 	     machine_mode outmode, int strict_low, int optional,
997 	     int opnum, enum reload_type type)
998 {
999   int i;
1000   int dont_share = 0;
1001   int dont_remove_subreg = 0;
1002 #ifdef LIMIT_RELOAD_CLASS
1003   rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
1004 #endif
1005   int secondary_in_reload = -1, secondary_out_reload = -1;
1006   enum insn_code secondary_in_icode = CODE_FOR_nothing;
1007   enum insn_code secondary_out_icode = CODE_FOR_nothing;
1008   enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
1009   subreg_in_class = NO_REGS;
1010 
1011   /* INMODE and/or OUTMODE could be VOIDmode if no mode
1012      has been specified for the operand.  In that case,
1013      use the operand's mode as the mode to reload.  */
1014   if (inmode == VOIDmode && in != 0)
1015     inmode = GET_MODE (in);
1016   if (outmode == VOIDmode && out != 0)
1017     outmode = GET_MODE (out);
1018 
1019   /* If find_reloads and friends until now missed to replace a pseudo
1020      with a constant of reg_equiv_constant something went wrong
1021      beforehand.
1022      Note that it can't simply be done here if we missed it earlier
1023      since the constant might need to be pushed into the literal pool
1024      and the resulting memref would probably need further
1025      reloading.  */
1026   if (in != 0 && REG_P (in))
1027     {
1028       int regno = REGNO (in);
1029 
1030       gcc_assert (regno < FIRST_PSEUDO_REGISTER
1031 		  || reg_renumber[regno] >= 0
1032 		  || reg_equiv_constant (regno) == NULL_RTX);
1033     }
1034 
1035   /* reg_equiv_constant only contains constants which are obviously
1036      not appropriate as destination.  So if we would need to replace
1037      the destination pseudo with a constant we are in real
1038      trouble.  */
1039   if (out != 0 && REG_P (out))
1040     {
1041       int regno = REGNO (out);
1042 
1043       gcc_assert (regno < FIRST_PSEUDO_REGISTER
1044 		  || reg_renumber[regno] >= 0
1045 		  || reg_equiv_constant (regno) == NULL_RTX);
1046     }
1047 
1048   /* If we have a read-write operand with an address side-effect,
1049      change either IN or OUT so the side-effect happens only once.  */
1050   if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1051     switch (GET_CODE (XEXP (in, 0)))
1052       {
1053       case POST_INC: case POST_DEC:   case POST_MODIFY:
1054 	in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1055 	break;
1056 
1057       case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1058 	out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1059 	break;
1060 
1061       default:
1062 	break;
1063       }
1064 
1065   /* If we are reloading a (SUBREG constant ...), really reload just the
1066      inside expression in its own mode.  Similarly for (SUBREG (PLUS ...)).
1067      If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1068      a pseudo and hence will become a MEM) with M1 wider than M2 and the
1069      register is a pseudo, also reload the inside expression.
1070      For machines that extend byte loads, do this for any SUBREG of a pseudo
1071      where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1072      M2 is an integral mode that gets extended when loaded.
1073      Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1074      where either M1 is not valid for R or M2 is wider than a word but we
1075      only need one register to store an M2-sized quantity in R.
1076      (However, if OUT is nonzero, we need to reload the reg *and*
1077      the subreg, so do nothing here, and let following statement handle it.)
1078 
1079      Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1080      we can't handle it here because CONST_INT does not indicate a mode.
1081 
1082      Similarly, we must reload the inside expression if we have a
1083      STRICT_LOW_PART (presumably, in == out in this case).
1084 
1085      Also reload the inner expression if it does not require a secondary
1086      reload but the SUBREG does.
1087 
1088      Finally, reload the inner expression if it is a register that is in
1089      the class whose registers cannot be referenced in a different size
1090      and M1 is not the same size as M2.  If subreg_lowpart_p is false, we
1091      cannot reload just the inside since we might end up with the wrong
1092      register class.  But if it is inside a STRICT_LOW_PART, we have
1093      no choice, so we hope we do get the right register class there.  */
1094 
1095   if (in != 0 && GET_CODE (in) == SUBREG
1096       && (subreg_lowpart_p (in) || strict_low)
1097 #ifdef CANNOT_CHANGE_MODE_CLASS
1098       && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1099 #endif
1100       && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (in))]
1101       && (CONSTANT_P (SUBREG_REG (in))
1102 	  || GET_CODE (SUBREG_REG (in)) == PLUS
1103 	  || strict_low
1104 	  || (((REG_P (SUBREG_REG (in))
1105 		&& REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1106 	       || MEM_P (SUBREG_REG (in)))
1107 	      && ((GET_MODE_PRECISION (inmode)
1108 		   > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1109 #ifdef LOAD_EXTEND_OP
1110 		  || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1111 		      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1112 			  <= UNITS_PER_WORD)
1113 		      && (GET_MODE_PRECISION (inmode)
1114 			  > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1115 		      && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1116 		      && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1117 #endif
1118 #ifdef WORD_REGISTER_OPERATIONS
1119 		  || ((GET_MODE_PRECISION (inmode)
1120 		       < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1121 		      && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1122 			  ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1123 			   / UNITS_PER_WORD)))
1124 #endif
1125 		  ))
1126 	  || (REG_P (SUBREG_REG (in))
1127 	      && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1128 	      /* The case where out is nonzero
1129 		 is handled differently in the following statement.  */
1130 	      && (out == 0 || subreg_lowpart_p (in))
1131 	      && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1132 		   && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1133 		       > UNITS_PER_WORD)
1134 		   && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1135 			/ UNITS_PER_WORD)
1136 		       != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1137 						[GET_MODE (SUBREG_REG (in))]))
1138 		  || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1139 	  || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1140 	      && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1141 					  SUBREG_REG (in))
1142 		  == NO_REGS))
1143 #ifdef CANNOT_CHANGE_MODE_CLASS
1144 	  || (REG_P (SUBREG_REG (in))
1145 	      && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1146 	      && REG_CANNOT_CHANGE_MODE_P
1147 	      (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1148 #endif
1149 	  ))
1150     {
1151 #ifdef LIMIT_RELOAD_CLASS
1152       in_subreg_loc = inloc;
1153 #endif
1154       inloc = &SUBREG_REG (in);
1155       in = *inloc;
1156 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1157       if (MEM_P (in))
1158 	/* This is supposed to happen only for paradoxical subregs made by
1159 	   combine.c.  (SUBREG (MEM)) isn't supposed to occur other ways.  */
1160 	gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1161 #endif
1162       inmode = GET_MODE (in);
1163     }
1164 
1165   /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1166      where M1 is not valid for R if it was not handled by the code above.
1167 
1168      Similar issue for (SUBREG constant ...) if it was not handled by the
1169      code above.  This can happen if SUBREG_BYTE != 0.
1170 
1171      However, we must reload the inner reg *as well as* the subreg in
1172      that case.  */
1173 
1174   if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1175     {
1176       if (REG_P (SUBREG_REG (in)) && HARD_REGISTER_P (SUBREG_REG (in)))
1177 	subreg_in_class
1178 	  = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1179 			      subreg_regno_offset (REGNO (SUBREG_REG (in)),
1180 						   GET_MODE (SUBREG_REG (in)),
1181 						   SUBREG_BYTE (in),
1182 						   GET_MODE (in)),
1183 			      REGNO (SUBREG_REG (in)));
1184       else if (REG_P (SUBREG_REG (in))
1185                || GET_CODE (SUBREG_REG (in)) == SYMBOL_REF)
1186 	subreg_in_class = find_valid_class_1 (inmode,
1187 					      GET_MODE (SUBREG_REG (in)),
1188 					      rclass);
1189 
1190       /* This relies on the fact that emit_reload_insns outputs the
1191 	 instructions for input reloads of type RELOAD_OTHER in the same
1192 	 order as the reloads.  Thus if the outer reload is also of type
1193 	 RELOAD_OTHER, we are guaranteed that this inner reload will be
1194 	 output before the outer reload.  */
1195       push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1196 		   subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1197       dont_remove_subreg = 1;
1198     }
1199 
1200   /* Similarly for paradoxical and problematical SUBREGs on the output.
1201      Note that there is no reason we need worry about the previous value
1202      of SUBREG_REG (out); even if wider than out, storing in a subreg is
1203      entitled to clobber it all (except in the case of a word mode subreg
1204      or of a STRICT_LOW_PART, in that latter case the constraint should
1205      label it input-output.)  */
1206   if (out != 0 && GET_CODE (out) == SUBREG
1207       && (subreg_lowpart_p (out) || strict_low)
1208 #ifdef CANNOT_CHANGE_MODE_CLASS
1209       && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1210 #endif
1211       && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (out))]
1212       && (CONSTANT_P (SUBREG_REG (out))
1213 	  || strict_low
1214 	  || (((REG_P (SUBREG_REG (out))
1215 		&& REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1216 	       || MEM_P (SUBREG_REG (out)))
1217 	      && ((GET_MODE_PRECISION (outmode)
1218 		   > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1219 #ifdef WORD_REGISTER_OPERATIONS
1220 		  || ((GET_MODE_PRECISION (outmode)
1221 		       < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1222 		      && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1223 			  ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1224 			   / UNITS_PER_WORD)))
1225 #endif
1226 		  ))
1227 	  || (REG_P (SUBREG_REG (out))
1228 	      && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1229 	      /* The case of a word mode subreg
1230 		 is handled differently in the following statement.  */
1231 	      && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1232 		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1233 		        > UNITS_PER_WORD))
1234 	      && ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode))
1235 	  || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1236 	      && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1237 					  SUBREG_REG (out))
1238 		  == NO_REGS))
1239 #ifdef CANNOT_CHANGE_MODE_CLASS
1240 	  || (REG_P (SUBREG_REG (out))
1241 	      && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1242 	      && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1243 					   GET_MODE (SUBREG_REG (out)),
1244 					   outmode))
1245 #endif
1246 	  ))
1247     {
1248 #ifdef LIMIT_RELOAD_CLASS
1249       out_subreg_loc = outloc;
1250 #endif
1251       outloc = &SUBREG_REG (out);
1252       out = *outloc;
1253 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1254       gcc_assert (!MEM_P (out)
1255 		  || GET_MODE_SIZE (GET_MODE (out))
1256 		     <= GET_MODE_SIZE (outmode));
1257 #endif
1258       outmode = GET_MODE (out);
1259     }
1260 
1261   /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1262      where either M1 is not valid for R or M2 is wider than a word but we
1263      only need one register to store an M2-sized quantity in R.
1264 
1265      However, we must reload the inner reg *as well as* the subreg in
1266      that case and the inner reg is an in-out reload.  */
1267 
1268   if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1269     {
1270       enum reg_class in_out_class
1271 	= find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1272 			    subreg_regno_offset (REGNO (SUBREG_REG (out)),
1273 						 GET_MODE (SUBREG_REG (out)),
1274 						 SUBREG_BYTE (out),
1275 						 GET_MODE (out)),
1276 			    REGNO (SUBREG_REG (out)));
1277 
1278       /* This relies on the fact that emit_reload_insns outputs the
1279 	 instructions for output reloads of type RELOAD_OTHER in reverse
1280 	 order of the reloads.  Thus if the outer reload is also of type
1281 	 RELOAD_OTHER, we are guaranteed that this inner reload will be
1282 	 output after the outer reload.  */
1283       push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1284 		   &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1285 		   0, 0, opnum, RELOAD_OTHER);
1286       dont_remove_subreg = 1;
1287     }
1288 
1289   /* If IN appears in OUT, we can't share any input-only reload for IN.  */
1290   if (in != 0 && out != 0 && MEM_P (out)
1291       && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1292       && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1293     dont_share = 1;
1294 
1295   /* If IN is a SUBREG of a hard register, make a new REG.  This
1296      simplifies some of the cases below.  */
1297 
1298   if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1299       && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1300       && ! dont_remove_subreg)
1301     in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1302 
1303   /* Similarly for OUT.  */
1304   if (out != 0 && GET_CODE (out) == SUBREG
1305       && REG_P (SUBREG_REG (out))
1306       && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1307       && ! dont_remove_subreg)
1308     out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1309 
1310   /* Narrow down the class of register wanted if that is
1311      desirable on this machine for efficiency.  */
1312   {
1313     reg_class_t preferred_class = rclass;
1314 
1315     if (in != 0)
1316       preferred_class = targetm.preferred_reload_class (in, rclass);
1317 
1318     /* Output reloads may need analogous treatment, different in detail.  */
1319     if (out != 0)
1320       preferred_class
1321 	= targetm.preferred_output_reload_class (out, preferred_class);
1322 
1323     /* Discard what the target said if we cannot do it.  */
1324     if (preferred_class != NO_REGS
1325 	|| (optional && type == RELOAD_FOR_OUTPUT))
1326       rclass = (enum reg_class) preferred_class;
1327   }
1328 
1329   /* Make sure we use a class that can handle the actual pseudo
1330      inside any subreg.  For example, on the 386, QImode regs
1331      can appear within SImode subregs.  Although GENERAL_REGS
1332      can handle SImode, QImode needs a smaller class.  */
1333 #ifdef LIMIT_RELOAD_CLASS
1334   if (in_subreg_loc)
1335     rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1336   else if (in != 0 && GET_CODE (in) == SUBREG)
1337     rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1338 
1339   if (out_subreg_loc)
1340     rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1341   if (out != 0 && GET_CODE (out) == SUBREG)
1342     rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1343 #endif
1344 
1345   /* Verify that this class is at least possible for the mode that
1346      is specified.  */
1347   if (this_insn_is_asm)
1348     {
1349       machine_mode mode;
1350       if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1351 	mode = inmode;
1352       else
1353 	mode = outmode;
1354       if (mode == VOIDmode)
1355 	{
1356 	  error_for_asm (this_insn, "cannot reload integer constant "
1357 			 "operand in %<asm%>");
1358 	  mode = word_mode;
1359 	  if (in != 0)
1360 	    inmode = word_mode;
1361 	  if (out != 0)
1362 	    outmode = word_mode;
1363 	}
1364       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1365 	if (HARD_REGNO_MODE_OK (i, mode)
1366 	    && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1367 	  break;
1368       if (i == FIRST_PSEUDO_REGISTER)
1369 	{
1370 	  error_for_asm (this_insn, "impossible register constraint "
1371 			 "in %<asm%>");
1372 	  /* Avoid further trouble with this insn.  */
1373 	  PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1374 	  /* We used to continue here setting class to ALL_REGS, but it triggers
1375 	     sanity check on i386 for:
1376 	     void foo(long double d)
1377 	     {
1378 	       asm("" :: "a" (d));
1379 	     }
1380 	     Returning zero here ought to be safe as we take care in
1381 	     find_reloads to not process the reloads when instruction was
1382 	     replaced by USE.  */
1383 
1384 	  return 0;
1385 	}
1386     }
1387 
1388   /* Optional output reloads are always OK even if we have no register class,
1389      since the function of these reloads is only to have spill_reg_store etc.
1390      set, so that the storing insn can be deleted later.  */
1391   gcc_assert (rclass != NO_REGS
1392 	      || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1393 
1394   i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1395 
1396   if (i == n_reloads)
1397     {
1398       /* See if we need a secondary reload register to move between CLASS
1399 	 and IN or CLASS and OUT.  Get the icode and push any required reloads
1400 	 needed for each of them if so.  */
1401 
1402       if (in != 0)
1403 	secondary_in_reload
1404 	  = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1405 				   &secondary_in_icode, NULL);
1406       if (out != 0 && GET_CODE (out) != SCRATCH)
1407 	secondary_out_reload
1408 	  = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1409 				   type, &secondary_out_icode, NULL);
1410 
1411       /* We found no existing reload suitable for re-use.
1412 	 So add an additional reload.  */
1413 
1414 #ifdef SECONDARY_MEMORY_NEEDED
1415       if (subreg_in_class == NO_REGS
1416 	  && in != 0
1417 	  && (REG_P (in)
1418 	      || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1419 	  && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1420 	subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1421       /* If a memory location is needed for the copy, make one.  */
1422       if (subreg_in_class != NO_REGS
1423 	  && SECONDARY_MEMORY_NEEDED (subreg_in_class, rclass, inmode))
1424 	get_secondary_mem (in, inmode, opnum, type);
1425 #endif
1426 
1427       i = n_reloads;
1428       rld[i].in = in;
1429       rld[i].out = out;
1430       rld[i].rclass = rclass;
1431       rld[i].inmode = inmode;
1432       rld[i].outmode = outmode;
1433       rld[i].reg_rtx = 0;
1434       rld[i].optional = optional;
1435       rld[i].inc = 0;
1436       rld[i].nocombine = 0;
1437       rld[i].in_reg = inloc ? *inloc : 0;
1438       rld[i].out_reg = outloc ? *outloc : 0;
1439       rld[i].opnum = opnum;
1440       rld[i].when_needed = type;
1441       rld[i].secondary_in_reload = secondary_in_reload;
1442       rld[i].secondary_out_reload = secondary_out_reload;
1443       rld[i].secondary_in_icode = secondary_in_icode;
1444       rld[i].secondary_out_icode = secondary_out_icode;
1445       rld[i].secondary_p = 0;
1446 
1447       n_reloads++;
1448 
1449 #ifdef SECONDARY_MEMORY_NEEDED
1450       if (out != 0
1451           && (REG_P (out)
1452 	      || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1453 	  && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1454 	  && SECONDARY_MEMORY_NEEDED (rclass,
1455 				      REGNO_REG_CLASS (reg_or_subregno (out)),
1456 				      outmode))
1457 	get_secondary_mem (out, outmode, opnum, type);
1458 #endif
1459     }
1460   else
1461     {
1462       /* We are reusing an existing reload,
1463 	 but we may have additional information for it.
1464 	 For example, we may now have both IN and OUT
1465 	 while the old one may have just one of them.  */
1466 
1467       /* The modes can be different.  If they are, we want to reload in
1468 	 the larger mode, so that the value is valid for both modes.  */
1469       if (inmode != VOIDmode
1470 	  && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1471 	rld[i].inmode = inmode;
1472       if (outmode != VOIDmode
1473 	  && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1474 	rld[i].outmode = outmode;
1475       if (in != 0)
1476 	{
1477 	  rtx in_reg = inloc ? *inloc : 0;
1478 	  /* If we merge reloads for two distinct rtl expressions that
1479 	     are identical in content, there might be duplicate address
1480 	     reloads.  Remove the extra set now, so that if we later find
1481 	     that we can inherit this reload, we can get rid of the
1482 	     address reloads altogether.
1483 
1484 	     Do not do this if both reloads are optional since the result
1485 	     would be an optional reload which could potentially leave
1486 	     unresolved address replacements.
1487 
1488 	     It is not sufficient to call transfer_replacements since
1489 	     choose_reload_regs will remove the replacements for address
1490 	     reloads of inherited reloads which results in the same
1491 	     problem.  */
1492 	  if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1493 	      && ! (rld[i].optional && optional))
1494 	    {
1495 	      /* We must keep the address reload with the lower operand
1496 		 number alive.  */
1497 	      if (opnum > rld[i].opnum)
1498 		{
1499 		  remove_address_replacements (in);
1500 		  in = rld[i].in;
1501 		  in_reg = rld[i].in_reg;
1502 		}
1503 	      else
1504 		remove_address_replacements (rld[i].in);
1505 	    }
1506 	  /* When emitting reloads we don't necessarily look at the in-
1507 	     and outmode, but also directly at the operands (in and out).
1508 	     So we can't simply overwrite them with whatever we have found
1509 	     for this (to-be-merged) reload, we have to "merge" that too.
1510 	     Reusing another reload already verified that we deal with the
1511 	     same operands, just possibly in different modes.  So we
1512 	     overwrite the operands only when the new mode is larger.
1513 	     See also PR33613.  */
1514 	  if (!rld[i].in
1515 	      || GET_MODE_SIZE (GET_MODE (in))
1516 	           > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1517 	    rld[i].in = in;
1518 	  if (!rld[i].in_reg
1519 	      || (in_reg
1520 		  && GET_MODE_SIZE (GET_MODE (in_reg))
1521 	             > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1522 	    rld[i].in_reg = in_reg;
1523 	}
1524       if (out != 0)
1525 	{
1526 	  if (!rld[i].out
1527 	      || (out
1528 		  && GET_MODE_SIZE (GET_MODE (out))
1529 	             > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1530 	    rld[i].out = out;
1531 	  if (outloc
1532 	      && (!rld[i].out_reg
1533 		  || GET_MODE_SIZE (GET_MODE (*outloc))
1534 		     > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1535 	    rld[i].out_reg = *outloc;
1536 	}
1537       if (reg_class_subset_p (rclass, rld[i].rclass))
1538 	rld[i].rclass = rclass;
1539       rld[i].optional &= optional;
1540       if (MERGE_TO_OTHER (type, rld[i].when_needed,
1541 			  opnum, rld[i].opnum))
1542 	rld[i].when_needed = RELOAD_OTHER;
1543       rld[i].opnum = MIN (rld[i].opnum, opnum);
1544     }
1545 
1546   /* If the ostensible rtx being reloaded differs from the rtx found
1547      in the location to substitute, this reload is not safe to combine
1548      because we cannot reliably tell whether it appears in the insn.  */
1549 
1550   if (in != 0 && in != *inloc)
1551     rld[i].nocombine = 1;
1552 
1553 #if 0
1554   /* This was replaced by changes in find_reloads_address_1 and the new
1555      function inc_for_reload, which go with a new meaning of reload_inc.  */
1556 
1557   /* If this is an IN/OUT reload in an insn that sets the CC,
1558      it must be for an autoincrement.  It doesn't work to store
1559      the incremented value after the insn because that would clobber the CC.
1560      So we must do the increment of the value reloaded from,
1561      increment it, store it back, then decrement again.  */
1562   if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1563     {
1564       out = 0;
1565       rld[i].out = 0;
1566       rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1567       /* If we did not find a nonzero amount-to-increment-by,
1568 	 that contradicts the belief that IN is being incremented
1569 	 in an address in this insn.  */
1570       gcc_assert (rld[i].inc != 0);
1571     }
1572 #endif
1573 
1574   /* If we will replace IN and OUT with the reload-reg,
1575      record where they are located so that substitution need
1576      not do a tree walk.  */
1577 
1578   if (replace_reloads)
1579     {
1580       if (inloc != 0)
1581 	{
1582 	  struct replacement *r = &replacements[n_replacements++];
1583 	  r->what = i;
1584 	  r->where = inloc;
1585 	  r->mode = inmode;
1586 	}
1587       if (outloc != 0 && outloc != inloc)
1588 	{
1589 	  struct replacement *r = &replacements[n_replacements++];
1590 	  r->what = i;
1591 	  r->where = outloc;
1592 	  r->mode = outmode;
1593 	}
1594     }
1595 
1596   /* If this reload is just being introduced and it has both
1597      an incoming quantity and an outgoing quantity that are
1598      supposed to be made to match, see if either one of the two
1599      can serve as the place to reload into.
1600 
1601      If one of them is acceptable, set rld[i].reg_rtx
1602      to that one.  */
1603 
1604   if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1605     {
1606       rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1607 					  inmode, outmode,
1608 					  rld[i].rclass, i,
1609 					  earlyclobber_operand_p (out));
1610 
1611       /* If the outgoing register already contains the same value
1612 	 as the incoming one, we can dispense with loading it.
1613 	 The easiest way to tell the caller that is to give a phony
1614 	 value for the incoming operand (same as outgoing one).  */
1615       if (rld[i].reg_rtx == out
1616 	  && (REG_P (in) || CONSTANT_P (in))
1617 	  && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1618 				  static_reload_reg_p, i, inmode))
1619 	rld[i].in = out;
1620     }
1621 
1622   /* If this is an input reload and the operand contains a register that
1623      dies in this insn and is used nowhere else, see if it is the right class
1624      to be used for this reload.  Use it if so.  (This occurs most commonly
1625      in the case of paradoxical SUBREGs and in-out reloads).  We cannot do
1626      this if it is also an output reload that mentions the register unless
1627      the output is a SUBREG that clobbers an entire register.
1628 
1629      Note that the operand might be one of the spill regs, if it is a
1630      pseudo reg and we are in a block where spilling has not taken place.
1631      But if there is no spilling in this block, that is OK.
1632      An explicitly used hard reg cannot be a spill reg.  */
1633 
1634   if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1635     {
1636       rtx note;
1637       int regno;
1638       machine_mode rel_mode = inmode;
1639 
1640       if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1641 	rel_mode = outmode;
1642 
1643       for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1644 	if (REG_NOTE_KIND (note) == REG_DEAD
1645 	    && REG_P (XEXP (note, 0))
1646 	    && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1647 	    && reg_mentioned_p (XEXP (note, 0), in)
1648 	    /* Check that a former pseudo is valid; see find_dummy_reload.  */
1649 	    && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1650 		|| (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1651 				    ORIGINAL_REGNO (XEXP (note, 0)))
1652 		    && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1653 	    && ! refers_to_regno_for_reload_p (regno,
1654 					       end_hard_regno (rel_mode,
1655 							       regno),
1656 					       PATTERN (this_insn), inloc)
1657 	    && ! find_reg_fusage (this_insn, USE, XEXP (note, 0))
1658 	    /* If this is also an output reload, IN cannot be used as
1659 	       the reload register if it is set in this insn unless IN
1660 	       is also OUT.  */
1661 	    && (out == 0 || in == out
1662 		|| ! hard_reg_set_here_p (regno,
1663 					  end_hard_regno (rel_mode, regno),
1664 					  PATTERN (this_insn)))
1665 	    /* ??? Why is this code so different from the previous?
1666 	       Is there any simple coherent way to describe the two together?
1667 	       What's going on here.  */
1668 	    && (in != out
1669 		|| (GET_CODE (in) == SUBREG
1670 		    && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1671 			 / UNITS_PER_WORD)
1672 			== ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1673 			     + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1674 	    /* Make sure the operand fits in the reg that dies.  */
1675 	    && (GET_MODE_SIZE (rel_mode)
1676 		<= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1677 	    && HARD_REGNO_MODE_OK (regno, inmode)
1678 	    && HARD_REGNO_MODE_OK (regno, outmode))
1679 	  {
1680 	    unsigned int offs;
1681 	    unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1682 				      hard_regno_nregs[regno][outmode]);
1683 
1684 	    for (offs = 0; offs < nregs; offs++)
1685 	      if (fixed_regs[regno + offs]
1686 		  || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1687 					  regno + offs))
1688 		break;
1689 
1690 	    if (offs == nregs
1691 		&& (! (refers_to_regno_for_reload_p
1692 		       (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1693 		    || can_reload_into (in, regno, inmode)))
1694 	      {
1695 		rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1696 		break;
1697 	      }
1698 	  }
1699     }
1700 
1701   if (out)
1702     output_reloadnum = i;
1703 
1704   return i;
1705 }
1706 
1707 /* Record an additional place we must replace a value
1708    for which we have already recorded a reload.
1709    RELOADNUM is the value returned by push_reload
1710    when the reload was recorded.
1711    This is used in insn patterns that use match_dup.  */
1712 
1713 static void
1714 push_replacement (rtx *loc, int reloadnum, machine_mode mode)
1715 {
1716   if (replace_reloads)
1717     {
1718       struct replacement *r = &replacements[n_replacements++];
1719       r->what = reloadnum;
1720       r->where = loc;
1721       r->mode = mode;
1722     }
1723 }
1724 
1725 /* Duplicate any replacement we have recorded to apply at
1726    location ORIG_LOC to also be performed at DUP_LOC.
1727    This is used in insn patterns that use match_dup.  */
1728 
1729 static void
1730 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1731 {
1732   int i, n = n_replacements;
1733 
1734   for (i = 0; i < n; i++)
1735     {
1736       struct replacement *r = &replacements[i];
1737       if (r->where == orig_loc)
1738 	push_replacement (dup_loc, r->what, r->mode);
1739     }
1740 }
1741 
1742 /* Transfer all replacements that used to be in reload FROM to be in
1743    reload TO.  */
1744 
1745 void
1746 transfer_replacements (int to, int from)
1747 {
1748   int i;
1749 
1750   for (i = 0; i < n_replacements; i++)
1751     if (replacements[i].what == from)
1752       replacements[i].what = to;
1753 }
1754 
1755 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1756    or a subpart of it.  If we have any replacements registered for IN_RTX,
1757    cancel the reloads that were supposed to load them.
1758    Return nonzero if we canceled any reloads.  */
1759 int
1760 remove_address_replacements (rtx in_rtx)
1761 {
1762   int i, j;
1763   char reload_flags[MAX_RELOADS];
1764   int something_changed = 0;
1765 
1766   memset (reload_flags, 0, sizeof reload_flags);
1767   for (i = 0, j = 0; i < n_replacements; i++)
1768     {
1769       if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1770 	reload_flags[replacements[i].what] |= 1;
1771       else
1772 	{
1773 	  replacements[j++] = replacements[i];
1774 	  reload_flags[replacements[i].what] |= 2;
1775 	}
1776     }
1777   /* Note that the following store must be done before the recursive calls.  */
1778   n_replacements = j;
1779 
1780   for (i = n_reloads - 1; i >= 0; i--)
1781     {
1782       if (reload_flags[i] == 1)
1783 	{
1784 	  deallocate_reload_reg (i);
1785 	  remove_address_replacements (rld[i].in);
1786 	  rld[i].in = 0;
1787 	  something_changed = 1;
1788 	}
1789     }
1790   return something_changed;
1791 }
1792 
1793 /* If there is only one output reload, and it is not for an earlyclobber
1794    operand, try to combine it with a (logically unrelated) input reload
1795    to reduce the number of reload registers needed.
1796 
1797    This is safe if the input reload does not appear in
1798    the value being output-reloaded, because this implies
1799    it is not needed any more once the original insn completes.
1800 
1801    If that doesn't work, see we can use any of the registers that
1802    die in this insn as a reload register.  We can if it is of the right
1803    class and does not appear in the value being output-reloaded.  */
1804 
1805 static void
1806 combine_reloads (void)
1807 {
1808   int i, regno;
1809   int output_reload = -1;
1810   int secondary_out = -1;
1811   rtx note;
1812 
1813   /* Find the output reload; return unless there is exactly one
1814      and that one is mandatory.  */
1815 
1816   for (i = 0; i < n_reloads; i++)
1817     if (rld[i].out != 0)
1818       {
1819 	if (output_reload >= 0)
1820 	  return;
1821 	output_reload = i;
1822       }
1823 
1824   if (output_reload < 0 || rld[output_reload].optional)
1825     return;
1826 
1827   /* An input-output reload isn't combinable.  */
1828 
1829   if (rld[output_reload].in != 0)
1830     return;
1831 
1832   /* If this reload is for an earlyclobber operand, we can't do anything.  */
1833   if (earlyclobber_operand_p (rld[output_reload].out))
1834     return;
1835 
1836   /* If there is a reload for part of the address of this operand, we would
1837      need to change it to RELOAD_FOR_OTHER_ADDRESS.  But that would extend
1838      its life to the point where doing this combine would not lower the
1839      number of spill registers needed.  */
1840   for (i = 0; i < n_reloads; i++)
1841     if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1842 	 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1843 	&& rld[i].opnum == rld[output_reload].opnum)
1844       return;
1845 
1846   /* Check each input reload; can we combine it?  */
1847 
1848   for (i = 0; i < n_reloads; i++)
1849     if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1850 	/* Life span of this reload must not extend past main insn.  */
1851 	&& rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1852 	&& rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1853 	&& rld[i].when_needed != RELOAD_OTHER
1854 	&& (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1855 	    == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1856 				       [(int) rld[output_reload].outmode])
1857 	&& rld[i].inc == 0
1858 	&& rld[i].reg_rtx == 0
1859 #ifdef SECONDARY_MEMORY_NEEDED
1860 	/* Don't combine two reloads with different secondary
1861 	   memory locations.  */
1862 	&& (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1863 	    || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1864 	    || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1865 			    secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1866 #endif
1867 	&& (targetm.small_register_classes_for_mode_p (VOIDmode)
1868 	    ? (rld[i].rclass == rld[output_reload].rclass)
1869 	    : (reg_class_subset_p (rld[i].rclass,
1870 				   rld[output_reload].rclass)
1871 	       || reg_class_subset_p (rld[output_reload].rclass,
1872 				      rld[i].rclass)))
1873 	&& (MATCHES (rld[i].in, rld[output_reload].out)
1874 	    /* Args reversed because the first arg seems to be
1875 	       the one that we imagine being modified
1876 	       while the second is the one that might be affected.  */
1877 	    || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1878 						      rld[i].in)
1879 		/* However, if the input is a register that appears inside
1880 		   the output, then we also can't share.
1881 		   Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1882 		   If the same reload reg is used for both reg 69 and the
1883 		   result to be stored in memory, then that result
1884 		   will clobber the address of the memory ref.  */
1885 		&& ! (REG_P (rld[i].in)
1886 		      && reg_overlap_mentioned_for_reload_p (rld[i].in,
1887 							     rld[output_reload].out))))
1888 	&& ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1889 					 rld[i].when_needed != RELOAD_FOR_INPUT)
1890 	&& (reg_class_size[(int) rld[i].rclass]
1891 	    || targetm.small_register_classes_for_mode_p (VOIDmode))
1892 	/* We will allow making things slightly worse by combining an
1893 	   input and an output, but no worse than that.  */
1894 	&& (rld[i].when_needed == RELOAD_FOR_INPUT
1895 	    || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1896       {
1897 	int j;
1898 
1899 	/* We have found a reload to combine with!  */
1900 	rld[i].out = rld[output_reload].out;
1901 	rld[i].out_reg = rld[output_reload].out_reg;
1902 	rld[i].outmode = rld[output_reload].outmode;
1903 	/* Mark the old output reload as inoperative.  */
1904 	rld[output_reload].out = 0;
1905 	/* The combined reload is needed for the entire insn.  */
1906 	rld[i].when_needed = RELOAD_OTHER;
1907 	/* If the output reload had a secondary reload, copy it.  */
1908 	if (rld[output_reload].secondary_out_reload != -1)
1909 	  {
1910 	    rld[i].secondary_out_reload
1911 	      = rld[output_reload].secondary_out_reload;
1912 	    rld[i].secondary_out_icode
1913 	      = rld[output_reload].secondary_out_icode;
1914 	  }
1915 
1916 #ifdef SECONDARY_MEMORY_NEEDED
1917 	/* Copy any secondary MEM.  */
1918 	if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1919 	  secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1920 	    = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1921 #endif
1922 	/* If required, minimize the register class.  */
1923 	if (reg_class_subset_p (rld[output_reload].rclass,
1924 				rld[i].rclass))
1925 	  rld[i].rclass = rld[output_reload].rclass;
1926 
1927 	/* Transfer all replacements from the old reload to the combined.  */
1928 	for (j = 0; j < n_replacements; j++)
1929 	  if (replacements[j].what == output_reload)
1930 	    replacements[j].what = i;
1931 
1932 	return;
1933       }
1934 
1935   /* If this insn has only one operand that is modified or written (assumed
1936      to be the first),  it must be the one corresponding to this reload.  It
1937      is safe to use anything that dies in this insn for that output provided
1938      that it does not occur in the output (we already know it isn't an
1939      earlyclobber.  If this is an asm insn, give up.  */
1940 
1941   if (INSN_CODE (this_insn) == -1)
1942     return;
1943 
1944   for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1945     if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1946 	|| insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1947       return;
1948 
1949   /* See if some hard register that dies in this insn and is not used in
1950      the output is the right class.  Only works if the register we pick
1951      up can fully hold our output reload.  */
1952   for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1953     if (REG_NOTE_KIND (note) == REG_DEAD
1954 	&& REG_P (XEXP (note, 0))
1955 	&& !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1956 						rld[output_reload].out)
1957 	&& (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1958 	&& HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1959 	&& TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1960 			      regno)
1961 	&& (hard_regno_nregs[regno][rld[output_reload].outmode]
1962 	    <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1963 	/* Ensure that a secondary or tertiary reload for this output
1964 	   won't want this register.  */
1965 	&& ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1966 	    || (!(TEST_HARD_REG_BIT
1967 		  (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1968 		&& ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1969 		    || !(TEST_HARD_REG_BIT
1970 			 (reg_class_contents[(int) rld[secondary_out].rclass],
1971 			  regno)))))
1972 	&& !fixed_regs[regno]
1973 	/* Check that a former pseudo is valid; see find_dummy_reload.  */
1974 	&& (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1975 	    || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1976 			       ORIGINAL_REGNO (XEXP (note, 0)))
1977 		&& hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1978       {
1979 	rld[output_reload].reg_rtx
1980 	  = gen_rtx_REG (rld[output_reload].outmode, regno);
1981 	return;
1982       }
1983 }
1984 
1985 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1986    See if one of IN and OUT is a register that may be used;
1987    this is desirable since a spill-register won't be needed.
1988    If so, return the register rtx that proves acceptable.
1989 
1990    INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1991    RCLASS is the register class required for the reload.
1992 
1993    If FOR_REAL is >= 0, it is the number of the reload,
1994    and in some cases when it can be discovered that OUT doesn't need
1995    to be computed, clear out rld[FOR_REAL].out.
1996 
1997    If FOR_REAL is -1, this should not be done, because this call
1998    is just to see if a register can be found, not to find and install it.
1999 
2000    EARLYCLOBBER is nonzero if OUT is an earlyclobber operand.  This
2001    puts an additional constraint on being able to use IN for OUT since
2002    IN must not appear elsewhere in the insn (it is assumed that IN itself
2003    is safe from the earlyclobber).  */
2004 
2005 static rtx
2006 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
2007 		   machine_mode inmode, machine_mode outmode,
2008 		   reg_class_t rclass, int for_real, int earlyclobber)
2009 {
2010   rtx in = real_in;
2011   rtx out = real_out;
2012   int in_offset = 0;
2013   int out_offset = 0;
2014   rtx value = 0;
2015 
2016   /* If operands exceed a word, we can't use either of them
2017      unless they have the same size.  */
2018   if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
2019       && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
2020 	  || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
2021     return 0;
2022 
2023   /* Note that {in,out}_offset are needed only when 'in' or 'out'
2024      respectively refers to a hard register.  */
2025 
2026   /* Find the inside of any subregs.  */
2027   while (GET_CODE (out) == SUBREG)
2028     {
2029       if (REG_P (SUBREG_REG (out))
2030 	  && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
2031 	out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
2032 					   GET_MODE (SUBREG_REG (out)),
2033 					   SUBREG_BYTE (out),
2034 					   GET_MODE (out));
2035       out = SUBREG_REG (out);
2036     }
2037   while (GET_CODE (in) == SUBREG)
2038     {
2039       if (REG_P (SUBREG_REG (in))
2040 	  && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
2041 	in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
2042 					  GET_MODE (SUBREG_REG (in)),
2043 					  SUBREG_BYTE (in),
2044 					  GET_MODE (in));
2045       in = SUBREG_REG (in);
2046     }
2047 
2048   /* Narrow down the reg class, the same way push_reload will;
2049      otherwise we might find a dummy now, but push_reload won't.  */
2050   {
2051     reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
2052     if (preferred_class != NO_REGS)
2053       rclass = (enum reg_class) preferred_class;
2054   }
2055 
2056   /* See if OUT will do.  */
2057   if (REG_P (out)
2058       && REGNO (out) < FIRST_PSEUDO_REGISTER)
2059     {
2060       unsigned int regno = REGNO (out) + out_offset;
2061       unsigned int nwords = hard_regno_nregs[regno][outmode];
2062       rtx saved_rtx;
2063 
2064       /* When we consider whether the insn uses OUT,
2065 	 ignore references within IN.  They don't prevent us
2066 	 from copying IN into OUT, because those refs would
2067 	 move into the insn that reloads IN.
2068 
2069 	 However, we only ignore IN in its role as this reload.
2070 	 If the insn uses IN elsewhere and it contains OUT,
2071 	 that counts.  We can't be sure it's the "same" operand
2072 	 so it might not go through this reload.
2073 
2074          We also need to avoid using OUT if it, or part of it, is a
2075          fixed register.  Modifying such registers, even transiently,
2076          may have undefined effects on the machine, such as modifying
2077          the stack pointer.  */
2078       saved_rtx = *inloc;
2079       *inloc = const0_rtx;
2080 
2081       if (regno < FIRST_PSEUDO_REGISTER
2082 	  && HARD_REGNO_MODE_OK (regno, outmode)
2083 	  && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2084 					     PATTERN (this_insn), outloc))
2085 	{
2086 	  unsigned int i;
2087 
2088 	  for (i = 0; i < nwords; i++)
2089 	    if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2090 				     regno + i)
2091 		|| fixed_regs[regno + i])
2092 	      break;
2093 
2094 	  if (i == nwords)
2095 	    {
2096 	      if (REG_P (real_out))
2097 		value = real_out;
2098 	      else
2099 		value = gen_rtx_REG (outmode, regno);
2100 	    }
2101 	}
2102 
2103       *inloc = saved_rtx;
2104     }
2105 
2106   /* Consider using IN if OUT was not acceptable
2107      or if OUT dies in this insn (like the quotient in a divmod insn).
2108      We can't use IN unless it is dies in this insn,
2109      which means we must know accurately which hard regs are live.
2110      Also, the result can't go in IN if IN is used within OUT,
2111      or if OUT is an earlyclobber and IN appears elsewhere in the insn.  */
2112   if (hard_regs_live_known
2113       && REG_P (in)
2114       && REGNO (in) < FIRST_PSEUDO_REGISTER
2115       && (value == 0
2116 	  || find_reg_note (this_insn, REG_UNUSED, real_out))
2117       && find_reg_note (this_insn, REG_DEAD, real_in)
2118       && !fixed_regs[REGNO (in)]
2119       && HARD_REGNO_MODE_OK (REGNO (in),
2120 			     /* The only case where out and real_out might
2121 				have different modes is where real_out
2122 				is a subreg, and in that case, out
2123 				has a real mode.  */
2124 			     (GET_MODE (out) != VOIDmode
2125 			      ? GET_MODE (out) : outmode))
2126       && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2127 	  /* However only do this if we can be sure that this input
2128 	     operand doesn't correspond with an uninitialized pseudo.
2129 	     global can assign some hardreg to it that is the same as
2130 	     the one assigned to a different, also live pseudo (as it
2131 	     can ignore the conflict).  We must never introduce writes
2132 	     to such hardregs, as they would clobber the other live
2133 	     pseudo.  See PR 20973.  */
2134 	  || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2135 			     ORIGINAL_REGNO (in))
2136 	      /* Similarly, only do this if we can be sure that the death
2137 		 note is still valid.  global can assign some hardreg to
2138 		 the pseudo referenced in the note and simultaneously a
2139 		 subword of this hardreg to a different, also live pseudo,
2140 		 because only another subword of the hardreg is actually
2141 		 used in the insn.  This cannot happen if the pseudo has
2142 		 been assigned exactly one hardreg.  See PR 33732.  */
2143 	      && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2144     {
2145       unsigned int regno = REGNO (in) + in_offset;
2146       unsigned int nwords = hard_regno_nregs[regno][inmode];
2147 
2148       if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2149 	  && ! hard_reg_set_here_p (regno, regno + nwords,
2150 				    PATTERN (this_insn))
2151 	  && (! earlyclobber
2152 	      || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2153 						 PATTERN (this_insn), inloc)))
2154 	{
2155 	  unsigned int i;
2156 
2157 	  for (i = 0; i < nwords; i++)
2158 	    if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2159 				     regno + i))
2160 	      break;
2161 
2162 	  if (i == nwords)
2163 	    {
2164 	      /* If we were going to use OUT as the reload reg
2165 		 and changed our mind, it means OUT is a dummy that
2166 		 dies here.  So don't bother copying value to it.  */
2167 	      if (for_real >= 0 && value == real_out)
2168 		rld[for_real].out = 0;
2169 	      if (REG_P (real_in))
2170 		value = real_in;
2171 	      else
2172 		value = gen_rtx_REG (inmode, regno);
2173 	    }
2174 	}
2175     }
2176 
2177   return value;
2178 }
2179 
2180 /* This page contains subroutines used mainly for determining
2181    whether the IN or an OUT of a reload can serve as the
2182    reload register.  */
2183 
2184 /* Return 1 if X is an operand of an insn that is being earlyclobbered.  */
2185 
2186 int
2187 earlyclobber_operand_p (rtx x)
2188 {
2189   int i;
2190 
2191   for (i = 0; i < n_earlyclobbers; i++)
2192     if (reload_earlyclobbers[i] == x)
2193       return 1;
2194 
2195   return 0;
2196 }
2197 
2198 /* Return 1 if expression X alters a hard reg in the range
2199    from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2200    either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2201    X should be the body of an instruction.  */
2202 
2203 static int
2204 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2205 {
2206   if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2207     {
2208       rtx op0 = SET_DEST (x);
2209 
2210       while (GET_CODE (op0) == SUBREG)
2211 	op0 = SUBREG_REG (op0);
2212       if (REG_P (op0))
2213 	{
2214 	  unsigned int r = REGNO (op0);
2215 
2216 	  /* See if this reg overlaps range under consideration.  */
2217 	  if (r < end_regno
2218 	      && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2219 	    return 1;
2220 	}
2221     }
2222   else if (GET_CODE (x) == PARALLEL)
2223     {
2224       int i = XVECLEN (x, 0) - 1;
2225 
2226       for (; i >= 0; i--)
2227 	if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2228 	  return 1;
2229     }
2230 
2231   return 0;
2232 }
2233 
2234 /* Return 1 if ADDR is a valid memory address for mode MODE
2235    in address space AS, and check that each pseudo reg has the
2236    proper kind of hard reg.  */
2237 
2238 int
2239 strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
2240 				    rtx addr, addr_space_t as)
2241 {
2242 #ifdef GO_IF_LEGITIMATE_ADDRESS
2243   gcc_assert (ADDR_SPACE_GENERIC_P (as));
2244   GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2245   return 0;
2246 
2247  win:
2248   return 1;
2249 #else
2250   return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2251 #endif
2252 }
2253 
2254 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2255    if they are the same hard reg, and has special hacks for
2256    autoincrement and autodecrement.
2257    This is specifically intended for find_reloads to use
2258    in determining whether two operands match.
2259    X is the operand whose number is the lower of the two.
2260 
2261    The value is 2 if Y contains a pre-increment that matches
2262    a non-incrementing address in X.  */
2263 
2264 /* ??? To be completely correct, we should arrange to pass
2265    for X the output operand and for Y the input operand.
2266    For now, we assume that the output operand has the lower number
2267    because that is natural in (SET output (... input ...)).  */
2268 
2269 int
2270 operands_match_p (rtx x, rtx y)
2271 {
2272   int i;
2273   RTX_CODE code = GET_CODE (x);
2274   const char *fmt;
2275   int success_2;
2276 
2277   if (x == y)
2278     return 1;
2279   if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2280       && (REG_P (y) || (GET_CODE (y) == SUBREG
2281 				  && REG_P (SUBREG_REG (y)))))
2282     {
2283       int j;
2284 
2285       if (code == SUBREG)
2286 	{
2287 	  i = REGNO (SUBREG_REG (x));
2288 	  if (i >= FIRST_PSEUDO_REGISTER)
2289 	    goto slow;
2290 	  i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2291 				    GET_MODE (SUBREG_REG (x)),
2292 				    SUBREG_BYTE (x),
2293 				    GET_MODE (x));
2294 	}
2295       else
2296 	i = REGNO (x);
2297 
2298       if (GET_CODE (y) == SUBREG)
2299 	{
2300 	  j = REGNO (SUBREG_REG (y));
2301 	  if (j >= FIRST_PSEUDO_REGISTER)
2302 	    goto slow;
2303 	  j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2304 				    GET_MODE (SUBREG_REG (y)),
2305 				    SUBREG_BYTE (y),
2306 				    GET_MODE (y));
2307 	}
2308       else
2309 	j = REGNO (y);
2310 
2311       /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2312 	 multiple hard register group of scalar integer registers, so that
2313 	 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2314 	 register.  */
2315       if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2316 	  && SCALAR_INT_MODE_P (GET_MODE (x))
2317 	  && i < FIRST_PSEUDO_REGISTER)
2318 	i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2319       if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2320 	  && SCALAR_INT_MODE_P (GET_MODE (y))
2321 	  && j < FIRST_PSEUDO_REGISTER)
2322 	j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2323 
2324       return i == j;
2325     }
2326   /* If two operands must match, because they are really a single
2327      operand of an assembler insn, then two postincrements are invalid
2328      because the assembler insn would increment only once.
2329      On the other hand, a postincrement matches ordinary indexing
2330      if the postincrement is the output operand.  */
2331   if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2332     return operands_match_p (XEXP (x, 0), y);
2333   /* Two preincrements are invalid
2334      because the assembler insn would increment only once.
2335      On the other hand, a preincrement matches ordinary indexing
2336      if the preincrement is the input operand.
2337      In this case, return 2, since some callers need to do special
2338      things when this happens.  */
2339   if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2340       || GET_CODE (y) == PRE_MODIFY)
2341     return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2342 
2343  slow:
2344 
2345   /* Now we have disposed of all the cases in which different rtx codes
2346      can match.  */
2347   if (code != GET_CODE (y))
2348     return 0;
2349 
2350   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2351   if (GET_MODE (x) != GET_MODE (y))
2352     return 0;
2353 
2354   /* MEMs referring to different address space are not equivalent.  */
2355   if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2356     return 0;
2357 
2358   switch (code)
2359     {
2360     CASE_CONST_UNIQUE:
2361       return 0;
2362 
2363     case LABEL_REF:
2364       return LABEL_REF_LABEL (x) == LABEL_REF_LABEL (y);
2365     case SYMBOL_REF:
2366       return XSTR (x, 0) == XSTR (y, 0);
2367 
2368     default:
2369       break;
2370     }
2371 
2372   /* Compare the elements.  If any pair of corresponding elements
2373      fail to match, return 0 for the whole things.  */
2374 
2375   success_2 = 0;
2376   fmt = GET_RTX_FORMAT (code);
2377   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2378     {
2379       int val, j;
2380       switch (fmt[i])
2381 	{
2382 	case 'w':
2383 	  if (XWINT (x, i) != XWINT (y, i))
2384 	    return 0;
2385 	  break;
2386 
2387 	case 'i':
2388 	  if (XINT (x, i) != XINT (y, i))
2389 	    return 0;
2390 	  break;
2391 
2392 	case 'e':
2393 	  val = operands_match_p (XEXP (x, i), XEXP (y, i));
2394 	  if (val == 0)
2395 	    return 0;
2396 	  /* If any subexpression returns 2,
2397 	     we should return 2 if we are successful.  */
2398 	  if (val == 2)
2399 	    success_2 = 1;
2400 	  break;
2401 
2402 	case '0':
2403 	  break;
2404 
2405 	case 'E':
2406 	  if (XVECLEN (x, i) != XVECLEN (y, i))
2407 	    return 0;
2408 	  for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2409 	    {
2410 	      val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2411 	      if (val == 0)
2412 		return 0;
2413 	      if (val == 2)
2414 		success_2 = 1;
2415 	    }
2416 	  break;
2417 
2418 	  /* It is believed that rtx's at this level will never
2419 	     contain anything but integers and other rtx's,
2420 	     except for within LABEL_REFs and SYMBOL_REFs.  */
2421 	default:
2422 	  gcc_unreachable ();
2423 	}
2424     }
2425   return 1 + success_2;
2426 }
2427 
2428 /* Describe the range of registers or memory referenced by X.
2429    If X is a register, set REG_FLAG and put the first register
2430    number into START and the last plus one into END.
2431    If X is a memory reference, put a base address into BASE
2432    and a range of integer offsets into START and END.
2433    If X is pushing on the stack, we can assume it causes no trouble,
2434    so we set the SAFE field.  */
2435 
2436 static struct decomposition
2437 decompose (rtx x)
2438 {
2439   struct decomposition val;
2440   int all_const = 0;
2441 
2442   memset (&val, 0, sizeof (val));
2443 
2444   switch (GET_CODE (x))
2445     {
2446     case MEM:
2447       {
2448 	rtx base = NULL_RTX, offset = 0;
2449 	rtx addr = XEXP (x, 0);
2450 
2451 	if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2452 	    || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2453 	  {
2454 	    val.base = XEXP (addr, 0);
2455 	    val.start = -GET_MODE_SIZE (GET_MODE (x));
2456 	    val.end = GET_MODE_SIZE (GET_MODE (x));
2457 	    val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2458 	    return val;
2459 	  }
2460 
2461 	if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2462 	  {
2463 	    if (GET_CODE (XEXP (addr, 1)) == PLUS
2464 		&& XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2465 		&& CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2466 	      {
2467 		val.base  = XEXP (addr, 0);
2468 		val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2469 		val.end   = INTVAL (XEXP (XEXP (addr, 1), 1));
2470 		val.safe  = REGNO (val.base) == STACK_POINTER_REGNUM;
2471 		return val;
2472 	      }
2473 	  }
2474 
2475 	if (GET_CODE (addr) == CONST)
2476 	  {
2477 	    addr = XEXP (addr, 0);
2478 	    all_const = 1;
2479 	  }
2480 	if (GET_CODE (addr) == PLUS)
2481 	  {
2482 	    if (CONSTANT_P (XEXP (addr, 0)))
2483 	      {
2484 		base = XEXP (addr, 1);
2485 		offset = XEXP (addr, 0);
2486 	      }
2487 	    else if (CONSTANT_P (XEXP (addr, 1)))
2488 	      {
2489 		base = XEXP (addr, 0);
2490 		offset = XEXP (addr, 1);
2491 	      }
2492 	  }
2493 
2494 	if (offset == 0)
2495 	  {
2496 	    base = addr;
2497 	    offset = const0_rtx;
2498 	  }
2499 	if (GET_CODE (offset) == CONST)
2500 	  offset = XEXP (offset, 0);
2501 	if (GET_CODE (offset) == PLUS)
2502 	  {
2503 	    if (CONST_INT_P (XEXP (offset, 0)))
2504 	      {
2505 		base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2506 		offset = XEXP (offset, 0);
2507 	      }
2508 	    else if (CONST_INT_P (XEXP (offset, 1)))
2509 	      {
2510 		base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2511 		offset = XEXP (offset, 1);
2512 	      }
2513 	    else
2514 	      {
2515 		base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2516 		offset = const0_rtx;
2517 	      }
2518 	  }
2519 	else if (!CONST_INT_P (offset))
2520 	  {
2521 	    base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2522 	    offset = const0_rtx;
2523 	  }
2524 
2525 	if (all_const && GET_CODE (base) == PLUS)
2526 	  base = gen_rtx_CONST (GET_MODE (base), base);
2527 
2528 	gcc_assert (CONST_INT_P (offset));
2529 
2530 	val.start = INTVAL (offset);
2531 	val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2532 	val.base = base;
2533       }
2534       break;
2535 
2536     case REG:
2537       val.reg_flag = 1;
2538       val.start = true_regnum (x);
2539       if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2540 	{
2541 	  /* A pseudo with no hard reg.  */
2542 	  val.start = REGNO (x);
2543 	  val.end = val.start + 1;
2544 	}
2545       else
2546 	/* A hard reg.  */
2547 	val.end = end_hard_regno (GET_MODE (x), val.start);
2548       break;
2549 
2550     case SUBREG:
2551       if (!REG_P (SUBREG_REG (x)))
2552 	/* This could be more precise, but it's good enough.  */
2553 	return decompose (SUBREG_REG (x));
2554       val.reg_flag = 1;
2555       val.start = true_regnum (x);
2556       if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2557 	return decompose (SUBREG_REG (x));
2558       else
2559 	/* A hard reg.  */
2560 	val.end = val.start + subreg_nregs (x);
2561       break;
2562 
2563     case SCRATCH:
2564       /* This hasn't been assigned yet, so it can't conflict yet.  */
2565       val.safe = 1;
2566       break;
2567 
2568     default:
2569       gcc_assert (CONSTANT_P (x));
2570       val.safe = 1;
2571       break;
2572     }
2573   return val;
2574 }
2575 
2576 /* Return 1 if altering Y will not modify the value of X.
2577    Y is also described by YDATA, which should be decompose (Y).  */
2578 
2579 static int
2580 immune_p (rtx x, rtx y, struct decomposition ydata)
2581 {
2582   struct decomposition xdata;
2583 
2584   if (ydata.reg_flag)
2585     return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2586   if (ydata.safe)
2587     return 1;
2588 
2589   gcc_assert (MEM_P (y));
2590   /* If Y is memory and X is not, Y can't affect X.  */
2591   if (!MEM_P (x))
2592     return 1;
2593 
2594   xdata = decompose (x);
2595 
2596   if (! rtx_equal_p (xdata.base, ydata.base))
2597     {
2598       /* If bases are distinct symbolic constants, there is no overlap.  */
2599       if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2600 	return 1;
2601       /* Constants and stack slots never overlap.  */
2602       if (CONSTANT_P (xdata.base)
2603 	  && (ydata.base == frame_pointer_rtx
2604 	      || ydata.base == hard_frame_pointer_rtx
2605 	      || ydata.base == stack_pointer_rtx))
2606 	return 1;
2607       if (CONSTANT_P (ydata.base)
2608 	  && (xdata.base == frame_pointer_rtx
2609 	      || xdata.base == hard_frame_pointer_rtx
2610 	      || xdata.base == stack_pointer_rtx))
2611 	return 1;
2612       /* If either base is variable, we don't know anything.  */
2613       return 0;
2614     }
2615 
2616   return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2617 }
2618 
2619 /* Similar, but calls decompose.  */
2620 
2621 int
2622 safe_from_earlyclobber (rtx op, rtx clobber)
2623 {
2624   struct decomposition early_data;
2625 
2626   early_data = decompose (clobber);
2627   return immune_p (op, clobber, early_data);
2628 }
2629 
2630 /* Main entry point of this file: search the body of INSN
2631    for values that need reloading and record them with push_reload.
2632    REPLACE nonzero means record also where the values occur
2633    so that subst_reloads can be used.
2634 
2635    IND_LEVELS says how many levels of indirection are supported by this
2636    machine; a value of zero means that a memory reference is not a valid
2637    memory address.
2638 
2639    LIVE_KNOWN says we have valid information about which hard
2640    regs are live at each point in the program; this is true when
2641    we are called from global_alloc but false when stupid register
2642    allocation has been done.
2643 
2644    RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2645    which is nonnegative if the reg has been commandeered for reloading into.
2646    It is copied into STATIC_RELOAD_REG_P and referenced from there
2647    by various subroutines.
2648 
2649    Return TRUE if some operands need to be changed, because of swapping
2650    commutative operands, reg_equiv_address substitution, or whatever.  */
2651 
2652 int
2653 find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2654 	      short *reload_reg_p)
2655 {
2656   int insn_code_number;
2657   int i, j;
2658   int noperands;
2659   /* These start out as the constraints for the insn
2660      and they are chewed up as we consider alternatives.  */
2661   const char *constraints[MAX_RECOG_OPERANDS];
2662   /* These are the preferred classes for an operand, or NO_REGS if it isn't
2663      a register.  */
2664   enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2665   char pref_or_nothing[MAX_RECOG_OPERANDS];
2666   /* Nonzero for a MEM operand whose entire address needs a reload.
2667      May be -1 to indicate the entire address may or may not need a reload.  */
2668   int address_reloaded[MAX_RECOG_OPERANDS];
2669   /* Nonzero for an address operand that needs to be completely reloaded.
2670      May be -1 to indicate the entire operand may or may not need a reload.  */
2671   int address_operand_reloaded[MAX_RECOG_OPERANDS];
2672   /* Value of enum reload_type to use for operand.  */
2673   enum reload_type operand_type[MAX_RECOG_OPERANDS];
2674   /* Value of enum reload_type to use within address of operand.  */
2675   enum reload_type address_type[MAX_RECOG_OPERANDS];
2676   /* Save the usage of each operand.  */
2677   enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2678   int no_input_reloads = 0, no_output_reloads = 0;
2679   int n_alternatives;
2680   reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2681   char this_alternative_match_win[MAX_RECOG_OPERANDS];
2682   char this_alternative_win[MAX_RECOG_OPERANDS];
2683   char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2684   char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2685   int this_alternative_matches[MAX_RECOG_OPERANDS];
2686   reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2687   int this_alternative_number;
2688   int goal_alternative_number = 0;
2689   int operand_reloadnum[MAX_RECOG_OPERANDS];
2690   int goal_alternative_matches[MAX_RECOG_OPERANDS];
2691   int goal_alternative_matched[MAX_RECOG_OPERANDS];
2692   char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2693   char goal_alternative_win[MAX_RECOG_OPERANDS];
2694   char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2695   char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2696   int goal_alternative_swapped;
2697   int best;
2698   int commutative;
2699   char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2700   rtx substed_operand[MAX_RECOG_OPERANDS];
2701   rtx body = PATTERN (insn);
2702   rtx set = single_set (insn);
2703   int goal_earlyclobber = 0, this_earlyclobber;
2704   machine_mode operand_mode[MAX_RECOG_OPERANDS];
2705   int retval = 0;
2706 
2707   this_insn = insn;
2708   n_reloads = 0;
2709   n_replacements = 0;
2710   n_earlyclobbers = 0;
2711   replace_reloads = replace;
2712   hard_regs_live_known = live_known;
2713   static_reload_reg_p = reload_reg_p;
2714 
2715   /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2716      neither are insns that SET cc0.  Insns that use CC0 are not allowed
2717      to have any input reloads.  */
2718   if (JUMP_P (insn) || CALL_P (insn))
2719     no_output_reloads = 1;
2720 
2721 #ifdef HAVE_cc0
2722   if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2723     no_input_reloads = 1;
2724   if (reg_set_p (cc0_rtx, PATTERN (insn)))
2725     no_output_reloads = 1;
2726 #endif
2727 
2728 #ifdef SECONDARY_MEMORY_NEEDED
2729   /* The eliminated forms of any secondary memory locations are per-insn, so
2730      clear them out here.  */
2731 
2732   if (secondary_memlocs_elim_used)
2733     {
2734       memset (secondary_memlocs_elim, 0,
2735 	      sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2736       secondary_memlocs_elim_used = 0;
2737     }
2738 #endif
2739 
2740   /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2741      is cheap to move between them.  If it is not, there may not be an insn
2742      to do the copy, so we may need a reload.  */
2743   if (GET_CODE (body) == SET
2744       && REG_P (SET_DEST (body))
2745       && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2746       && REG_P (SET_SRC (body))
2747       && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2748       && register_move_cost (GET_MODE (SET_SRC (body)),
2749 			     REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2750 			     REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2751     return 0;
2752 
2753   extract_insn (insn);
2754 
2755   noperands = reload_n_operands = recog_data.n_operands;
2756   n_alternatives = recog_data.n_alternatives;
2757 
2758   /* Just return "no reloads" if insn has no operands with constraints.  */
2759   if (noperands == 0 || n_alternatives == 0)
2760     return 0;
2761 
2762   insn_code_number = INSN_CODE (insn);
2763   this_insn_is_asm = insn_code_number < 0;
2764 
2765   memcpy (operand_mode, recog_data.operand_mode,
2766 	  noperands * sizeof (machine_mode));
2767   memcpy (constraints, recog_data.constraints,
2768 	  noperands * sizeof (const char *));
2769 
2770   commutative = -1;
2771 
2772   /* If we will need to know, later, whether some pair of operands
2773      are the same, we must compare them now and save the result.
2774      Reloading the base and index registers will clobber them
2775      and afterward they will fail to match.  */
2776 
2777   for (i = 0; i < noperands; i++)
2778     {
2779       const char *p;
2780       int c;
2781       char *end;
2782 
2783       substed_operand[i] = recog_data.operand[i];
2784       p = constraints[i];
2785 
2786       modified[i] = RELOAD_READ;
2787 
2788       /* Scan this operand's constraint to see if it is an output operand,
2789 	 an in-out operand, is commutative, or should match another.  */
2790 
2791       while ((c = *p))
2792 	{
2793 	  p += CONSTRAINT_LEN (c, p);
2794 	  switch (c)
2795 	    {
2796 	    case '=':
2797 	      modified[i] = RELOAD_WRITE;
2798 	      break;
2799 	    case '+':
2800 	      modified[i] = RELOAD_READ_WRITE;
2801 	      break;
2802 	    case '%':
2803 	      {
2804 		/* The last operand should not be marked commutative.  */
2805 		gcc_assert (i != noperands - 1);
2806 
2807 		/* We currently only support one commutative pair of
2808 		   operands.  Some existing asm code currently uses more
2809 		   than one pair.  Previously, that would usually work,
2810 		   but sometimes it would crash the compiler.  We
2811 		   continue supporting that case as well as we can by
2812 		   silently ignoring all but the first pair.  In the
2813 		   future we may handle it correctly.  */
2814 		if (commutative < 0)
2815 		  commutative = i;
2816 		else
2817 		  gcc_assert (this_insn_is_asm);
2818 	      }
2819 	      break;
2820 	    /* Use of ISDIGIT is tempting here, but it may get expensive because
2821 	       of locale support we don't want.  */
2822 	    case '0': case '1': case '2': case '3': case '4':
2823 	    case '5': case '6': case '7': case '8': case '9':
2824 	      {
2825 		c = strtoul (p - 1, &end, 10);
2826 		p = end;
2827 
2828 		operands_match[c][i]
2829 		  = operands_match_p (recog_data.operand[c],
2830 				      recog_data.operand[i]);
2831 
2832 		/* An operand may not match itself.  */
2833 		gcc_assert (c != i);
2834 
2835 		/* If C can be commuted with C+1, and C might need to match I,
2836 		   then C+1 might also need to match I.  */
2837 		if (commutative >= 0)
2838 		  {
2839 		    if (c == commutative || c == commutative + 1)
2840 		      {
2841 			int other = c + (c == commutative ? 1 : -1);
2842 			operands_match[other][i]
2843 			  = operands_match_p (recog_data.operand[other],
2844 					      recog_data.operand[i]);
2845 		      }
2846 		    if (i == commutative || i == commutative + 1)
2847 		      {
2848 			int other = i + (i == commutative ? 1 : -1);
2849 			operands_match[c][other]
2850 			  = operands_match_p (recog_data.operand[c],
2851 					      recog_data.operand[other]);
2852 		      }
2853 		    /* Note that C is supposed to be less than I.
2854 		       No need to consider altering both C and I because in
2855 		       that case we would alter one into the other.  */
2856 		  }
2857 	      }
2858 	    }
2859 	}
2860     }
2861 
2862   /* Examine each operand that is a memory reference or memory address
2863      and reload parts of the addresses into index registers.
2864      Also here any references to pseudo regs that didn't get hard regs
2865      but are equivalent to constants get replaced in the insn itself
2866      with those constants.  Nobody will ever see them again.
2867 
2868      Finally, set up the preferred classes of each operand.  */
2869 
2870   for (i = 0; i < noperands; i++)
2871     {
2872       RTX_CODE code = GET_CODE (recog_data.operand[i]);
2873 
2874       address_reloaded[i] = 0;
2875       address_operand_reloaded[i] = 0;
2876       operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2877 			 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2878 			 : RELOAD_OTHER);
2879       address_type[i]
2880 	= (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2881 	   : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2882 	   : RELOAD_OTHER);
2883 
2884       if (*constraints[i] == 0)
2885 	/* Ignore things like match_operator operands.  */
2886 	;
2887       else if (insn_extra_address_constraint
2888 	       (lookup_constraint (constraints[i])))
2889 	{
2890 	  address_operand_reloaded[i]
2891 	    = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2892 				    recog_data.operand[i],
2893 				    recog_data.operand_loc[i],
2894 				    i, operand_type[i], ind_levels, insn);
2895 
2896 	  /* If we now have a simple operand where we used to have a
2897 	     PLUS or MULT, re-recognize and try again.  */
2898 	  if ((OBJECT_P (*recog_data.operand_loc[i])
2899 	       || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2900 	      && (GET_CODE (recog_data.operand[i]) == MULT
2901 		  || GET_CODE (recog_data.operand[i]) == PLUS))
2902 	    {
2903 	      INSN_CODE (insn) = -1;
2904 	      retval = find_reloads (insn, replace, ind_levels, live_known,
2905 				     reload_reg_p);
2906 	      return retval;
2907 	    }
2908 
2909 	  recog_data.operand[i] = *recog_data.operand_loc[i];
2910 	  substed_operand[i] = recog_data.operand[i];
2911 
2912 	  /* Address operands are reloaded in their existing mode,
2913 	     no matter what is specified in the machine description.  */
2914 	  operand_mode[i] = GET_MODE (recog_data.operand[i]);
2915 
2916 	  /* If the address is a single CONST_INT pick address mode
2917 	     instead otherwise we will later not know in which mode
2918 	     the reload should be performed.  */
2919 	  if (operand_mode[i] == VOIDmode)
2920 	    operand_mode[i] = Pmode;
2921 
2922 	}
2923       else if (code == MEM)
2924 	{
2925 	  address_reloaded[i]
2926 	    = find_reloads_address (GET_MODE (recog_data.operand[i]),
2927 				    recog_data.operand_loc[i],
2928 				    XEXP (recog_data.operand[i], 0),
2929 				    &XEXP (recog_data.operand[i], 0),
2930 				    i, address_type[i], ind_levels, insn);
2931 	  recog_data.operand[i] = *recog_data.operand_loc[i];
2932 	  substed_operand[i] = recog_data.operand[i];
2933 	}
2934       else if (code == SUBREG)
2935 	{
2936 	  rtx reg = SUBREG_REG (recog_data.operand[i]);
2937 	  rtx op
2938 	    = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2939 				   ind_levels,
2940 				   set != 0
2941 				   && &SET_DEST (set) == recog_data.operand_loc[i],
2942 				   insn,
2943 				   &address_reloaded[i]);
2944 
2945 	  /* If we made a MEM to load (a part of) the stackslot of a pseudo
2946 	     that didn't get a hard register, emit a USE with a REG_EQUAL
2947 	     note in front so that we might inherit a previous, possibly
2948 	     wider reload.  */
2949 
2950 	  if (replace
2951 	      && MEM_P (op)
2952 	      && REG_P (reg)
2953 	      && (GET_MODE_SIZE (GET_MODE (reg))
2954 		  >= GET_MODE_SIZE (GET_MODE (op)))
2955 	      && reg_equiv_constant (REGNO (reg)) == 0)
2956 	    set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2957 						   insn),
2958 				 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2959 
2960 	  substed_operand[i] = recog_data.operand[i] = op;
2961 	}
2962       else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2963 	/* We can get a PLUS as an "operand" as a result of register
2964 	   elimination.  See eliminate_regs and gen_reload.  We handle
2965 	   a unary operator by reloading the operand.  */
2966 	substed_operand[i] = recog_data.operand[i]
2967 	  = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2968 				 ind_levels, 0, insn,
2969 				 &address_reloaded[i]);
2970       else if (code == REG)
2971 	{
2972 	  /* This is equivalent to calling find_reloads_toplev.
2973 	     The code is duplicated for speed.
2974 	     When we find a pseudo always equivalent to a constant,
2975 	     we replace it by the constant.  We must be sure, however,
2976 	     that we don't try to replace it in the insn in which it
2977 	     is being set.  */
2978 	  int regno = REGNO (recog_data.operand[i]);
2979 	  if (reg_equiv_constant (regno) != 0
2980 	      && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2981 	    {
2982 	      /* Record the existing mode so that the check if constants are
2983 		 allowed will work when operand_mode isn't specified.  */
2984 
2985 	      if (operand_mode[i] == VOIDmode)
2986 		operand_mode[i] = GET_MODE (recog_data.operand[i]);
2987 
2988 	      substed_operand[i] = recog_data.operand[i]
2989 		= reg_equiv_constant (regno);
2990 	    }
2991 	  if (reg_equiv_memory_loc (regno) != 0
2992 	      && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2993 	    /* We need not give a valid is_set_dest argument since the case
2994 	       of a constant equivalence was checked above.  */
2995 	    substed_operand[i] = recog_data.operand[i]
2996 	      = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2997 				     ind_levels, 0, insn,
2998 				     &address_reloaded[i]);
2999 	}
3000       /* If the operand is still a register (we didn't replace it with an
3001 	 equivalent), get the preferred class to reload it into.  */
3002       code = GET_CODE (recog_data.operand[i]);
3003       preferred_class[i]
3004 	= ((code == REG && REGNO (recog_data.operand[i])
3005 	    >= FIRST_PSEUDO_REGISTER)
3006 	   ? reg_preferred_class (REGNO (recog_data.operand[i]))
3007 	   : NO_REGS);
3008       pref_or_nothing[i]
3009 	= (code == REG
3010 	   && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
3011 	   && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
3012     }
3013 
3014   /* If this is simply a copy from operand 1 to operand 0, merge the
3015      preferred classes for the operands.  */
3016   if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
3017       && recog_data.operand[1] == SET_SRC (set))
3018     {
3019       preferred_class[0] = preferred_class[1]
3020 	= reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
3021       pref_or_nothing[0] |= pref_or_nothing[1];
3022       pref_or_nothing[1] |= pref_or_nothing[0];
3023     }
3024 
3025   /* Now see what we need for pseudo-regs that didn't get hard regs
3026      or got the wrong kind of hard reg.  For this, we must consider
3027      all the operands together against the register constraints.  */
3028 
3029   best = MAX_RECOG_OPERANDS * 2 + 600;
3030 
3031   goal_alternative_swapped = 0;
3032 
3033   /* The constraints are made of several alternatives.
3034      Each operand's constraint looks like foo,bar,... with commas
3035      separating the alternatives.  The first alternatives for all
3036      operands go together, the second alternatives go together, etc.
3037 
3038      First loop over alternatives.  */
3039 
3040   alternative_mask enabled = get_enabled_alternatives (insn);
3041   for (this_alternative_number = 0;
3042        this_alternative_number < n_alternatives;
3043        this_alternative_number++)
3044     {
3045       int swapped;
3046 
3047       if (!TEST_BIT (enabled, this_alternative_number))
3048 	{
3049 	  int i;
3050 
3051 	  for (i = 0; i < recog_data.n_operands; i++)
3052 	    constraints[i] = skip_alternative (constraints[i]);
3053 
3054 	  continue;
3055 	}
3056 
3057       /* If insn is commutative (it's safe to exchange a certain pair
3058 	 of operands) then we need to try each alternative twice, the
3059 	 second time matching those two operands as if we had
3060 	 exchanged them.  To do this, really exchange them in
3061 	 operands.  */
3062       for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3063 	{
3064 	  /* Loop over operands for one constraint alternative.  */
3065 	  /* LOSERS counts those that don't fit this alternative
3066 	     and would require loading.  */
3067 	  int losers = 0;
3068 	  /* BAD is set to 1 if it some operand can't fit this alternative
3069 	     even after reloading.  */
3070 	  int bad = 0;
3071 	  /* REJECT is a count of how undesirable this alternative says it is
3072 	     if any reloading is required.  If the alternative matches exactly
3073 	     then REJECT is ignored, but otherwise it gets this much
3074 	     counted against it in addition to the reloading needed.  Each
3075 	     ? counts three times here since we want the disparaging caused by
3076 	     a bad register class to only count 1/3 as much.  */
3077 	  int reject = 0;
3078 
3079 	  if (swapped)
3080 	    {
3081 	      enum reg_class tclass;
3082 	      int t;
3083 
3084 	      recog_data.operand[commutative] = substed_operand[commutative + 1];
3085 	      recog_data.operand[commutative + 1] = substed_operand[commutative];
3086 	      /* Swap the duplicates too.  */
3087 	      for (i = 0; i < recog_data.n_dups; i++)
3088 		if (recog_data.dup_num[i] == commutative
3089 		    || recog_data.dup_num[i] == commutative + 1)
3090 		  *recog_data.dup_loc[i]
3091 		    = recog_data.operand[(int) recog_data.dup_num[i]];
3092 
3093 	      tclass = preferred_class[commutative];
3094 	      preferred_class[commutative] = preferred_class[commutative + 1];
3095 	      preferred_class[commutative + 1] = tclass;
3096 
3097 	      t = pref_or_nothing[commutative];
3098 	      pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3099 	      pref_or_nothing[commutative + 1] = t;
3100 
3101 	      t = address_reloaded[commutative];
3102 	      address_reloaded[commutative] = address_reloaded[commutative + 1];
3103 	      address_reloaded[commutative + 1] = t;
3104 	    }
3105 
3106 	  this_earlyclobber = 0;
3107 
3108 	  for (i = 0; i < noperands; i++)
3109 	    {
3110 	      const char *p = constraints[i];
3111 	      char *end;
3112 	      int len;
3113 	      int win = 0;
3114 	      int did_match = 0;
3115 	      /* 0 => this operand can be reloaded somehow for this alternative.  */
3116 	      int badop = 1;
3117 	      /* 0 => this operand can be reloaded if the alternative allows regs.  */
3118 	      int winreg = 0;
3119 	      int c;
3120 	      int m;
3121 	      rtx operand = recog_data.operand[i];
3122 	      int offset = 0;
3123 	      /* Nonzero means this is a MEM that must be reloaded into a reg
3124 		 regardless of what the constraint says.  */
3125 	      int force_reload = 0;
3126 	      int offmemok = 0;
3127 	      /* Nonzero if a constant forced into memory would be OK for this
3128 		 operand.  */
3129 	      int constmemok = 0;
3130 	      int earlyclobber = 0;
3131 	      enum constraint_num cn;
3132 	      enum reg_class cl;
3133 
3134 	      /* If the predicate accepts a unary operator, it means that
3135 		 we need to reload the operand, but do not do this for
3136 		 match_operator and friends.  */
3137 	      if (UNARY_P (operand) && *p != 0)
3138 		operand = XEXP (operand, 0);
3139 
3140 	      /* If the operand is a SUBREG, extract
3141 		 the REG or MEM (or maybe even a constant) within.
3142 		 (Constants can occur as a result of reg_equiv_constant.)  */
3143 
3144 	      while (GET_CODE (operand) == SUBREG)
3145 		{
3146 		  /* Offset only matters when operand is a REG and
3147 		     it is a hard reg.  This is because it is passed
3148 		     to reg_fits_class_p if it is a REG and all pseudos
3149 		     return 0 from that function.  */
3150 		  if (REG_P (SUBREG_REG (operand))
3151 		      && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3152 		    {
3153 		      if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3154 						 GET_MODE (SUBREG_REG (operand)),
3155 						 SUBREG_BYTE (operand),
3156 						 GET_MODE (operand)) < 0)
3157 			force_reload = 1;
3158 		      offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3159 						     GET_MODE (SUBREG_REG (operand)),
3160 						     SUBREG_BYTE (operand),
3161 						     GET_MODE (operand));
3162 		    }
3163 		  operand = SUBREG_REG (operand);
3164 		  /* Force reload if this is a constant or PLUS or if there may
3165 		     be a problem accessing OPERAND in the outer mode.  */
3166 		  if (CONSTANT_P (operand)
3167 		      || GET_CODE (operand) == PLUS
3168 		      /* We must force a reload of paradoxical SUBREGs
3169 			 of a MEM because the alignment of the inner value
3170 			 may not be enough to do the outer reference.  On
3171 			 big-endian machines, it may also reference outside
3172 			 the object.
3173 
3174 			 On machines that extend byte operations and we have a
3175 			 SUBREG where both the inner and outer modes are no wider
3176 			 than a word and the inner mode is narrower, is integral,
3177 			 and gets extended when loaded from memory, combine.c has
3178 			 made assumptions about the behavior of the machine in such
3179 			 register access.  If the data is, in fact, in memory we
3180 			 must always load using the size assumed to be in the
3181 			 register and let the insn do the different-sized
3182 			 accesses.
3183 
3184 			 This is doubly true if WORD_REGISTER_OPERATIONS.  In
3185 			 this case eliminate_regs has left non-paradoxical
3186 			 subregs for push_reload to see.  Make sure it does
3187 			 by forcing the reload.
3188 
3189 			 ??? When is it right at this stage to have a subreg
3190 			 of a mem that is _not_ to be handled specially?  IMO
3191 			 those should have been reduced to just a mem.  */
3192 		      || ((MEM_P (operand)
3193 			   || (REG_P (operand)
3194 			       && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3195 #ifndef WORD_REGISTER_OPERATIONS
3196 			  && (((GET_MODE_BITSIZE (GET_MODE (operand))
3197 				< BIGGEST_ALIGNMENT)
3198 			       && (GET_MODE_SIZE (operand_mode[i])
3199 				   > GET_MODE_SIZE (GET_MODE (operand))))
3200 			      || BYTES_BIG_ENDIAN
3201 #ifdef LOAD_EXTEND_OP
3202 			      || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3203 				  && (GET_MODE_SIZE (GET_MODE (operand))
3204 				      <= UNITS_PER_WORD)
3205 				  && (GET_MODE_SIZE (operand_mode[i])
3206 				      > GET_MODE_SIZE (GET_MODE (operand)))
3207 				  && INTEGRAL_MODE_P (GET_MODE (operand))
3208 				  && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3209 #endif
3210 			      )
3211 #endif
3212 			  )
3213 		      )
3214 		    force_reload = 1;
3215 		}
3216 
3217 	      this_alternative[i] = NO_REGS;
3218 	      this_alternative_win[i] = 0;
3219 	      this_alternative_match_win[i] = 0;
3220 	      this_alternative_offmemok[i] = 0;
3221 	      this_alternative_earlyclobber[i] = 0;
3222 	      this_alternative_matches[i] = -1;
3223 
3224 	      /* An empty constraint or empty alternative
3225 		 allows anything which matched the pattern.  */
3226 	      if (*p == 0 || *p == ',')
3227 		win = 1, badop = 0;
3228 
3229 	      /* Scan this alternative's specs for this operand;
3230 		 set WIN if the operand fits any letter in this alternative.
3231 		 Otherwise, clear BADOP if this operand could
3232 		 fit some letter after reloads,
3233 		 or set WINREG if this operand could fit after reloads
3234 		 provided the constraint allows some registers.  */
3235 
3236 	      do
3237 		switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3238 		  {
3239 		  case '\0':
3240 		    len = 0;
3241 		    break;
3242 		  case ',':
3243 		    c = '\0';
3244 		    break;
3245 
3246 		  case '?':
3247 		    reject += 6;
3248 		    break;
3249 
3250 		  case '!':
3251 		    reject = 600;
3252 		    break;
3253 
3254 		  case '#':
3255 		    /* Ignore rest of this alternative as far as
3256 		       reloading is concerned.  */
3257 		    do
3258 		      p++;
3259 		    while (*p && *p != ',');
3260 		    len = 0;
3261 		    break;
3262 
3263 		  case '0':  case '1':  case '2':  case '3':  case '4':
3264 		  case '5':  case '6':  case '7':  case '8':  case '9':
3265 		    m = strtoul (p, &end, 10);
3266 		    p = end;
3267 		    len = 0;
3268 
3269 		    this_alternative_matches[i] = m;
3270 		    /* We are supposed to match a previous operand.
3271 		       If we do, we win if that one did.
3272 		       If we do not, count both of the operands as losers.
3273 		       (This is too conservative, since most of the time
3274 		       only a single reload insn will be needed to make
3275 		       the two operands win.  As a result, this alternative
3276 		       may be rejected when it is actually desirable.)  */
3277 		    if ((swapped && (m != commutative || i != commutative + 1))
3278 			/* If we are matching as if two operands were swapped,
3279 			   also pretend that operands_match had been computed
3280 			   with swapped.
3281 			   But if I is the second of those and C is the first,
3282 			   don't exchange them, because operands_match is valid
3283 			   only on one side of its diagonal.  */
3284 			? (operands_match
3285 			   [(m == commutative || m == commutative + 1)
3286 			    ? 2 * commutative + 1 - m : m]
3287 			   [(i == commutative || i == commutative + 1)
3288 			    ? 2 * commutative + 1 - i : i])
3289 			: operands_match[m][i])
3290 		      {
3291 			/* If we are matching a non-offsettable address where an
3292 			   offsettable address was expected, then we must reject
3293 			   this combination, because we can't reload it.  */
3294 			if (this_alternative_offmemok[m]
3295 			    && MEM_P (recog_data.operand[m])
3296 			    && this_alternative[m] == NO_REGS
3297 			    && ! this_alternative_win[m])
3298 			  bad = 1;
3299 
3300 			did_match = this_alternative_win[m];
3301 		      }
3302 		    else
3303 		      {
3304 			/* Operands don't match.  */
3305 			rtx value;
3306 			int loc1, loc2;
3307 			/* Retroactively mark the operand we had to match
3308 			   as a loser, if it wasn't already.  */
3309 			if (this_alternative_win[m])
3310 			  losers++;
3311 			this_alternative_win[m] = 0;
3312 			if (this_alternative[m] == NO_REGS)
3313 			  bad = 1;
3314 			/* But count the pair only once in the total badness of
3315 			   this alternative, if the pair can be a dummy reload.
3316 			   The pointers in operand_loc are not swapped; swap
3317 			   them by hand if necessary.  */
3318 			if (swapped && i == commutative)
3319 			  loc1 = commutative + 1;
3320 			else if (swapped && i == commutative + 1)
3321 			  loc1 = commutative;
3322 			else
3323 			  loc1 = i;
3324 			if (swapped && m == commutative)
3325 			  loc2 = commutative + 1;
3326 			else if (swapped && m == commutative + 1)
3327 			  loc2 = commutative;
3328 			else
3329 			  loc2 = m;
3330 			value
3331 			  = find_dummy_reload (recog_data.operand[i],
3332 					       recog_data.operand[m],
3333 					       recog_data.operand_loc[loc1],
3334 					       recog_data.operand_loc[loc2],
3335 					       operand_mode[i], operand_mode[m],
3336 					       this_alternative[m], -1,
3337 					       this_alternative_earlyclobber[m]);
3338 
3339 			if (value != 0)
3340 			  losers--;
3341 		      }
3342 		    /* This can be fixed with reloads if the operand
3343 		       we are supposed to match can be fixed with reloads.  */
3344 		    badop = 0;
3345 		    this_alternative[i] = this_alternative[m];
3346 
3347 		    /* If we have to reload this operand and some previous
3348 		       operand also had to match the same thing as this
3349 		       operand, we don't know how to do that.  So reject this
3350 		       alternative.  */
3351 		    if (! did_match || force_reload)
3352 		      for (j = 0; j < i; j++)
3353 			if (this_alternative_matches[j]
3354 			    == this_alternative_matches[i])
3355 			  {
3356 			    badop = 1;
3357 			    break;
3358 			  }
3359 		    break;
3360 
3361 		  case 'p':
3362 		    /* All necessary reloads for an address_operand
3363 		       were handled in find_reloads_address.  */
3364 		    this_alternative[i]
3365 		      = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3366 					ADDRESS, SCRATCH);
3367 		    win = 1;
3368 		    badop = 0;
3369 		    break;
3370 
3371 		  case TARGET_MEM_CONSTRAINT:
3372 		    if (force_reload)
3373 		      break;
3374 		    if (MEM_P (operand)
3375 			|| (REG_P (operand)
3376 			    && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3377 			    && reg_renumber[REGNO (operand)] < 0))
3378 		      win = 1;
3379 		    if (CONST_POOL_OK_P (operand_mode[i], operand))
3380 		      badop = 0;
3381 		    constmemok = 1;
3382 		    break;
3383 
3384 		  case '<':
3385 		    if (MEM_P (operand)
3386 			&& ! address_reloaded[i]
3387 			&& (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3388 			    || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3389 		      win = 1;
3390 		    break;
3391 
3392 		  case '>':
3393 		    if (MEM_P (operand)
3394 			&& ! address_reloaded[i]
3395 			&& (GET_CODE (XEXP (operand, 0)) == PRE_INC
3396 			    || GET_CODE (XEXP (operand, 0)) == POST_INC))
3397 		      win = 1;
3398 		    break;
3399 
3400 		    /* Memory operand whose address is not offsettable.  */
3401 		  case 'V':
3402 		    if (force_reload)
3403 		      break;
3404 		    if (MEM_P (operand)
3405 			&& ! (ind_levels ? offsettable_memref_p (operand)
3406 			      : offsettable_nonstrict_memref_p (operand))
3407 			/* Certain mem addresses will become offsettable
3408 			   after they themselves are reloaded.  This is important;
3409 			   we don't want our own handling of unoffsettables
3410 			   to override the handling of reg_equiv_address.  */
3411 			&& !(REG_P (XEXP (operand, 0))
3412 			     && (ind_levels == 0
3413 				 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3414 		      win = 1;
3415 		    break;
3416 
3417 		    /* Memory operand whose address is offsettable.  */
3418 		  case 'o':
3419 		    if (force_reload)
3420 		      break;
3421 		    if ((MEM_P (operand)
3422 			 /* If IND_LEVELS, find_reloads_address won't reload a
3423 			    pseudo that didn't get a hard reg, so we have to
3424 			    reject that case.  */
3425 			 && ((ind_levels ? offsettable_memref_p (operand)
3426 			      : offsettable_nonstrict_memref_p (operand))
3427 			     /* A reloaded address is offsettable because it is now
3428 				just a simple register indirect.  */
3429 			     || address_reloaded[i] == 1))
3430 			|| (REG_P (operand)
3431 			    && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3432 			    && reg_renumber[REGNO (operand)] < 0
3433 			    /* If reg_equiv_address is nonzero, we will be
3434 			       loading it into a register; hence it will be
3435 			       offsettable, but we cannot say that reg_equiv_mem
3436 			       is offsettable without checking.  */
3437 			    && ((reg_equiv_mem (REGNO (operand)) != 0
3438 				 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3439 				|| (reg_equiv_address (REGNO (operand)) != 0))))
3440 		      win = 1;
3441 		    if (CONST_POOL_OK_P (operand_mode[i], operand)
3442 			|| MEM_P (operand))
3443 		      badop = 0;
3444 		    constmemok = 1;
3445 		    offmemok = 1;
3446 		    break;
3447 
3448 		  case '&':
3449 		    /* Output operand that is stored before the need for the
3450 		       input operands (and their index registers) is over.  */
3451 		    earlyclobber = 1, this_earlyclobber = 1;
3452 		    break;
3453 
3454 		  case 'X':
3455 		    force_reload = 0;
3456 		    win = 1;
3457 		    break;
3458 
3459 		  case 'g':
3460 		    if (! force_reload
3461 			/* A PLUS is never a valid operand, but reload can make
3462 			   it from a register when eliminating registers.  */
3463 			&& GET_CODE (operand) != PLUS
3464 			/* A SCRATCH is not a valid operand.  */
3465 			&& GET_CODE (operand) != SCRATCH
3466 			&& (! CONSTANT_P (operand)
3467 			    || ! flag_pic
3468 			    || LEGITIMATE_PIC_OPERAND_P (operand))
3469 			&& (GENERAL_REGS == ALL_REGS
3470 			    || !REG_P (operand)
3471 			    || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3472 				&& reg_renumber[REGNO (operand)] < 0)))
3473 		      win = 1;
3474 		    cl = GENERAL_REGS;
3475 		    goto reg;
3476 
3477 		  default:
3478 		    cn = lookup_constraint (p);
3479 		    switch (get_constraint_type (cn))
3480 		      {
3481 		      case CT_REGISTER:
3482 			cl = reg_class_for_constraint (cn);
3483 			if (cl != NO_REGS)
3484 			  goto reg;
3485 			break;
3486 
3487 		      case CT_CONST_INT:
3488 			if (CONST_INT_P (operand)
3489 			    && (insn_const_int_ok_for_constraint
3490 				(INTVAL (operand), cn)))
3491 			  win = true;
3492 			break;
3493 
3494 		      case CT_MEMORY:
3495 			if (force_reload)
3496 			  break;
3497 			if (constraint_satisfied_p (operand, cn))
3498 			  win = 1;
3499 			/* If the address was already reloaded,
3500 			   we win as well.  */
3501 			else if (MEM_P (operand) && address_reloaded[i] == 1)
3502 			  win = 1;
3503 			/* Likewise if the address will be reloaded because
3504 			   reg_equiv_address is nonzero.  For reg_equiv_mem
3505 			   we have to check.  */
3506 			else if (REG_P (operand)
3507 				 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3508 				 && reg_renumber[REGNO (operand)] < 0
3509 				 && ((reg_equiv_mem (REGNO (operand)) != 0
3510 				      && (constraint_satisfied_p
3511 					  (reg_equiv_mem (REGNO (operand)),
3512 					   cn)))
3513 				     || (reg_equiv_address (REGNO (operand))
3514 					 != 0)))
3515 			  win = 1;
3516 
3517 			/* If we didn't already win, we can reload
3518 			   constants via force_const_mem, and other
3519 			   MEMs by reloading the address like for 'o'.  */
3520 			if (CONST_POOL_OK_P (operand_mode[i], operand)
3521 			    || MEM_P (operand))
3522 			  badop = 0;
3523 			constmemok = 1;
3524 			offmemok = 1;
3525 			break;
3526 
3527 		      case CT_ADDRESS:
3528 			if (constraint_satisfied_p (operand, cn))
3529 			  win = 1;
3530 
3531 			/* If we didn't already win, we can reload
3532 			   the address into a base register.  */
3533 			this_alternative[i]
3534 			  = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3535 					    ADDRESS, SCRATCH);
3536 			badop = 0;
3537 			break;
3538 
3539 		      case CT_FIXED_FORM:
3540 			if (constraint_satisfied_p (operand, cn))
3541 			  win = 1;
3542 			break;
3543 		      }
3544 		    break;
3545 
3546 		  reg:
3547 		    this_alternative[i]
3548 		      = reg_class_subunion[this_alternative[i]][cl];
3549 		    if (GET_MODE (operand) == BLKmode)
3550 		      break;
3551 		    winreg = 1;
3552 		    if (REG_P (operand)
3553 			&& reg_fits_class_p (operand, this_alternative[i],
3554   			             offset, GET_MODE (recog_data.operand[i])))
3555 		      win = 1;
3556 		    break;
3557 		  }
3558 	      while ((p += len), c);
3559 
3560 	      if (swapped == (commutative >= 0 ? 1 : 0))
3561 		constraints[i] = p;
3562 
3563 	      /* If this operand could be handled with a reg,
3564 		 and some reg is allowed, then this operand can be handled.  */
3565 	      if (winreg && this_alternative[i] != NO_REGS
3566 		  && (win || !class_only_fixed_regs[this_alternative[i]]))
3567 		badop = 0;
3568 
3569 	      /* Record which operands fit this alternative.  */
3570 	      this_alternative_earlyclobber[i] = earlyclobber;
3571 	      if (win && ! force_reload)
3572 		this_alternative_win[i] = 1;
3573 	      else if (did_match && ! force_reload)
3574 		this_alternative_match_win[i] = 1;
3575 	      else
3576 		{
3577 		  int const_to_mem = 0;
3578 
3579 		  this_alternative_offmemok[i] = offmemok;
3580 		  losers++;
3581 		  if (badop)
3582 		    bad = 1;
3583 		  /* Alternative loses if it has no regs for a reg operand.  */
3584 		  if (REG_P (operand)
3585 		      && this_alternative[i] == NO_REGS
3586 		      && this_alternative_matches[i] < 0)
3587 		    bad = 1;
3588 
3589 		  /* If this is a constant that is reloaded into the desired
3590 		     class by copying it to memory first, count that as another
3591 		     reload.  This is consistent with other code and is
3592 		     required to avoid choosing another alternative when
3593 		     the constant is moved into memory by this function on
3594 		     an early reload pass.  Note that the test here is
3595 		     precisely the same as in the code below that calls
3596 		     force_const_mem.  */
3597 		  if (CONST_POOL_OK_P (operand_mode[i], operand)
3598 		      && ((targetm.preferred_reload_class (operand,
3599 							   this_alternative[i])
3600 			   == NO_REGS)
3601 			  || no_input_reloads))
3602 		    {
3603 		      const_to_mem = 1;
3604 		      if (this_alternative[i] != NO_REGS)
3605 			losers++;
3606 		    }
3607 
3608 		  /* Alternative loses if it requires a type of reload not
3609 		     permitted for this insn.  We can always reload SCRATCH
3610 		     and objects with a REG_UNUSED note.  */
3611 		  if (GET_CODE (operand) != SCRATCH
3612 		      && modified[i] != RELOAD_READ && no_output_reloads
3613 		      && ! find_reg_note (insn, REG_UNUSED, operand))
3614 		    bad = 1;
3615 		  else if (modified[i] != RELOAD_WRITE && no_input_reloads
3616 			   && ! const_to_mem)
3617 		    bad = 1;
3618 
3619 		  /* If we can't reload this value at all, reject this
3620 		     alternative.  Note that we could also lose due to
3621 		     LIMIT_RELOAD_CLASS, but we don't check that
3622 		     here.  */
3623 
3624 		  if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3625 		    {
3626 		      if (targetm.preferred_reload_class (operand,
3627 							  this_alternative[i])
3628 			  == NO_REGS)
3629 			reject = 600;
3630 
3631 		      if (operand_type[i] == RELOAD_FOR_OUTPUT
3632 			  && (targetm.preferred_output_reload_class (operand,
3633 							    this_alternative[i])
3634 			      == NO_REGS))
3635 			reject = 600;
3636 		    }
3637 
3638 		  /* We prefer to reload pseudos over reloading other things,
3639 		     since such reloads may be able to be eliminated later.
3640 		     If we are reloading a SCRATCH, we won't be generating any
3641 		     insns, just using a register, so it is also preferred.
3642 		     So bump REJECT in other cases.  Don't do this in the
3643 		     case where we are forcing a constant into memory and
3644 		     it will then win since we don't want to have a different
3645 		     alternative match then.  */
3646 		  if (! (REG_P (operand)
3647 			 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3648 		      && GET_CODE (operand) != SCRATCH
3649 		      && ! (const_to_mem && constmemok))
3650 		    reject += 2;
3651 
3652 		  /* Input reloads can be inherited more often than output
3653 		     reloads can be removed, so penalize output reloads.  */
3654 		  if (operand_type[i] != RELOAD_FOR_INPUT
3655 		      && GET_CODE (operand) != SCRATCH)
3656 		    reject++;
3657 		}
3658 
3659 	      /* If this operand is a pseudo register that didn't get
3660 		 a hard reg and this alternative accepts some
3661 		 register, see if the class that we want is a subset
3662 		 of the preferred class for this register.  If not,
3663 		 but it intersects that class, use the preferred class
3664 		 instead.  If it does not intersect the preferred
3665 		 class, show that usage of this alternative should be
3666 		 discouraged; it will be discouraged more still if the
3667 		 register is `preferred or nothing'.  We do this
3668 		 because it increases the chance of reusing our spill
3669 		 register in a later insn and avoiding a pair of
3670 		 memory stores and loads.
3671 
3672 		 Don't bother with this if this alternative will
3673 		 accept this operand.
3674 
3675 		 Don't do this for a multiword operand, since it is
3676 		 only a small win and has the risk of requiring more
3677 		 spill registers, which could cause a large loss.
3678 
3679 		 Don't do this if the preferred class has only one
3680 		 register because we might otherwise exhaust the
3681 		 class.  */
3682 
3683 	      if (! win && ! did_match
3684 		  && this_alternative[i] != NO_REGS
3685 		  && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3686 		  && reg_class_size [(int) preferred_class[i]] > 0
3687 		  && ! small_register_class_p (preferred_class[i]))
3688 		{
3689 		  if (! reg_class_subset_p (this_alternative[i],
3690 					    preferred_class[i]))
3691 		    {
3692 		      /* Since we don't have a way of forming the intersection,
3693 			 we just do something special if the preferred class
3694 			 is a subset of the class we have; that's the most
3695 			 common case anyway.  */
3696 		      if (reg_class_subset_p (preferred_class[i],
3697 					      this_alternative[i]))
3698 			this_alternative[i] = preferred_class[i];
3699 		      else
3700 			reject += (2 + 2 * pref_or_nothing[i]);
3701 		    }
3702 		}
3703 	    }
3704 
3705 	  /* Now see if any output operands that are marked "earlyclobber"
3706 	     in this alternative conflict with any input operands
3707 	     or any memory addresses.  */
3708 
3709 	  for (i = 0; i < noperands; i++)
3710 	    if (this_alternative_earlyclobber[i]
3711 		&& (this_alternative_win[i] || this_alternative_match_win[i]))
3712 	      {
3713 		struct decomposition early_data;
3714 
3715 		early_data = decompose (recog_data.operand[i]);
3716 
3717 		gcc_assert (modified[i] != RELOAD_READ);
3718 
3719 		if (this_alternative[i] == NO_REGS)
3720 		  {
3721 		    this_alternative_earlyclobber[i] = 0;
3722 		    gcc_assert (this_insn_is_asm);
3723 		    error_for_asm (this_insn,
3724 			      "%<&%> constraint used with no register class");
3725 		  }
3726 
3727 		for (j = 0; j < noperands; j++)
3728 		  /* Is this an input operand or a memory ref?  */
3729 		  if ((MEM_P (recog_data.operand[j])
3730 		       || modified[j] != RELOAD_WRITE)
3731 		      && j != i
3732 		      /* Ignore things like match_operator operands.  */
3733 		      && !recog_data.is_operator[j]
3734 		      /* Don't count an input operand that is constrained to match
3735 			 the early clobber operand.  */
3736 		      && ! (this_alternative_matches[j] == i
3737 			    && rtx_equal_p (recog_data.operand[i],
3738 					    recog_data.operand[j]))
3739 		      /* Is it altered by storing the earlyclobber operand?  */
3740 		      && !immune_p (recog_data.operand[j], recog_data.operand[i],
3741 				    early_data))
3742 		    {
3743 		      /* If the output is in a non-empty few-regs class,
3744 			 it's costly to reload it, so reload the input instead.  */
3745 		      if (small_register_class_p (this_alternative[i])
3746 			  && (REG_P (recog_data.operand[j])
3747 			      || GET_CODE (recog_data.operand[j]) == SUBREG))
3748 			{
3749 			  losers++;
3750 			  this_alternative_win[j] = 0;
3751 			  this_alternative_match_win[j] = 0;
3752 			}
3753 		      else
3754 			break;
3755 		    }
3756 		/* If an earlyclobber operand conflicts with something,
3757 		   it must be reloaded, so request this and count the cost.  */
3758 		if (j != noperands)
3759 		  {
3760 		    losers++;
3761 		    this_alternative_win[i] = 0;
3762 		    this_alternative_match_win[j] = 0;
3763 		    for (j = 0; j < noperands; j++)
3764 		      if (this_alternative_matches[j] == i
3765 			  && this_alternative_match_win[j])
3766 			{
3767 			  this_alternative_win[j] = 0;
3768 			  this_alternative_match_win[j] = 0;
3769 			  losers++;
3770 			}
3771 		  }
3772 	      }
3773 
3774 	  /* If one alternative accepts all the operands, no reload required,
3775 	     choose that alternative; don't consider the remaining ones.  */
3776 	  if (losers == 0)
3777 	    {
3778 	      /* Unswap these so that they are never swapped at `finish'.  */
3779 	      if (swapped)
3780 		{
3781 		  recog_data.operand[commutative] = substed_operand[commutative];
3782 		  recog_data.operand[commutative + 1]
3783 		    = substed_operand[commutative + 1];
3784 		}
3785 	      for (i = 0; i < noperands; i++)
3786 		{
3787 		  goal_alternative_win[i] = this_alternative_win[i];
3788 		  goal_alternative_match_win[i] = this_alternative_match_win[i];
3789 		  goal_alternative[i] = this_alternative[i];
3790 		  goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3791 		  goal_alternative_matches[i] = this_alternative_matches[i];
3792 		  goal_alternative_earlyclobber[i]
3793 		    = this_alternative_earlyclobber[i];
3794 		}
3795 	      goal_alternative_number = this_alternative_number;
3796 	      goal_alternative_swapped = swapped;
3797 	      goal_earlyclobber = this_earlyclobber;
3798 	      goto finish;
3799 	    }
3800 
3801 	  /* REJECT, set by the ! and ? constraint characters and when a register
3802 	     would be reloaded into a non-preferred class, discourages the use of
3803 	     this alternative for a reload goal.  REJECT is incremented by six
3804 	     for each ? and two for each non-preferred class.  */
3805 	  losers = losers * 6 + reject;
3806 
3807 	  /* If this alternative can be made to work by reloading,
3808 	     and it needs less reloading than the others checked so far,
3809 	     record it as the chosen goal for reloading.  */
3810 	  if (! bad)
3811 	    {
3812 	      if (best > losers)
3813 		{
3814 		  for (i = 0; i < noperands; i++)
3815 		    {
3816 		      goal_alternative[i] = this_alternative[i];
3817 		      goal_alternative_win[i] = this_alternative_win[i];
3818 		      goal_alternative_match_win[i]
3819 			= this_alternative_match_win[i];
3820 		      goal_alternative_offmemok[i]
3821 			= this_alternative_offmemok[i];
3822 		      goal_alternative_matches[i] = this_alternative_matches[i];
3823 		      goal_alternative_earlyclobber[i]
3824 			= this_alternative_earlyclobber[i];
3825 		    }
3826 		  goal_alternative_swapped = swapped;
3827 		  best = losers;
3828 		  goal_alternative_number = this_alternative_number;
3829 		  goal_earlyclobber = this_earlyclobber;
3830 		}
3831 	    }
3832 
3833 	  if (swapped)
3834 	    {
3835 	      enum reg_class tclass;
3836 	      int t;
3837 
3838 	      /* If the commutative operands have been swapped, swap
3839 		 them back in order to check the next alternative.  */
3840 	      recog_data.operand[commutative] = substed_operand[commutative];
3841 	      recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3842 	      /* Unswap the duplicates too.  */
3843 	      for (i = 0; i < recog_data.n_dups; i++)
3844 		if (recog_data.dup_num[i] == commutative
3845 		    || recog_data.dup_num[i] == commutative + 1)
3846 		  *recog_data.dup_loc[i]
3847 		    = recog_data.operand[(int) recog_data.dup_num[i]];
3848 
3849 	      /* Unswap the operand related information as well.  */
3850 	      tclass = preferred_class[commutative];
3851 	      preferred_class[commutative] = preferred_class[commutative + 1];
3852 	      preferred_class[commutative + 1] = tclass;
3853 
3854 	      t = pref_or_nothing[commutative];
3855 	      pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3856 	      pref_or_nothing[commutative + 1] = t;
3857 
3858 	      t = address_reloaded[commutative];
3859 	      address_reloaded[commutative] = address_reloaded[commutative + 1];
3860 	      address_reloaded[commutative + 1] = t;
3861 	    }
3862 	}
3863     }
3864 
3865   /* The operands don't meet the constraints.
3866      goal_alternative describes the alternative
3867      that we could reach by reloading the fewest operands.
3868      Reload so as to fit it.  */
3869 
3870   if (best == MAX_RECOG_OPERANDS * 2 + 600)
3871     {
3872       /* No alternative works with reloads??  */
3873       if (insn_code_number >= 0)
3874 	fatal_insn ("unable to generate reloads for:", insn);
3875       error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3876       /* Avoid further trouble with this insn.  */
3877       PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3878       n_reloads = 0;
3879       return 0;
3880     }
3881 
3882   /* Jump to `finish' from above if all operands are valid already.
3883      In that case, goal_alternative_win is all 1.  */
3884  finish:
3885 
3886   /* Right now, for any pair of operands I and J that are required to match,
3887      with I < J,
3888      goal_alternative_matches[J] is I.
3889      Set up goal_alternative_matched as the inverse function:
3890      goal_alternative_matched[I] = J.  */
3891 
3892   for (i = 0; i < noperands; i++)
3893     goal_alternative_matched[i] = -1;
3894 
3895   for (i = 0; i < noperands; i++)
3896     if (! goal_alternative_win[i]
3897 	&& goal_alternative_matches[i] >= 0)
3898       goal_alternative_matched[goal_alternative_matches[i]] = i;
3899 
3900   for (i = 0; i < noperands; i++)
3901     goal_alternative_win[i] |= goal_alternative_match_win[i];
3902 
3903   /* If the best alternative is with operands 1 and 2 swapped,
3904      consider them swapped before reporting the reloads.  Update the
3905      operand numbers of any reloads already pushed.  */
3906 
3907   if (goal_alternative_swapped)
3908     {
3909       rtx tem;
3910 
3911       tem = substed_operand[commutative];
3912       substed_operand[commutative] = substed_operand[commutative + 1];
3913       substed_operand[commutative + 1] = tem;
3914       tem = recog_data.operand[commutative];
3915       recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3916       recog_data.operand[commutative + 1] = tem;
3917       tem = *recog_data.operand_loc[commutative];
3918       *recog_data.operand_loc[commutative]
3919 	= *recog_data.operand_loc[commutative + 1];
3920       *recog_data.operand_loc[commutative + 1] = tem;
3921 
3922       for (i = 0; i < n_reloads; i++)
3923 	{
3924 	  if (rld[i].opnum == commutative)
3925 	    rld[i].opnum = commutative + 1;
3926 	  else if (rld[i].opnum == commutative + 1)
3927 	    rld[i].opnum = commutative;
3928 	}
3929     }
3930 
3931   for (i = 0; i < noperands; i++)
3932     {
3933       operand_reloadnum[i] = -1;
3934 
3935       /* If this is an earlyclobber operand, we need to widen the scope.
3936 	 The reload must remain valid from the start of the insn being
3937 	 reloaded until after the operand is stored into its destination.
3938 	 We approximate this with RELOAD_OTHER even though we know that we
3939 	 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3940 
3941 	 One special case that is worth checking is when we have an
3942 	 output that is earlyclobber but isn't used past the insn (typically
3943 	 a SCRATCH).  In this case, we only need have the reload live
3944 	 through the insn itself, but not for any of our input or output
3945 	 reloads.
3946 	 But we must not accidentally narrow the scope of an existing
3947 	 RELOAD_OTHER reload - leave these alone.
3948 
3949 	 In any case, anything needed to address this operand can remain
3950 	 however they were previously categorized.  */
3951 
3952       if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3953 	operand_type[i]
3954 	  = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3955 	     ? RELOAD_FOR_INSN : RELOAD_OTHER);
3956     }
3957 
3958   /* Any constants that aren't allowed and can't be reloaded
3959      into registers are here changed into memory references.  */
3960   for (i = 0; i < noperands; i++)
3961     if (! goal_alternative_win[i])
3962       {
3963 	rtx op = recog_data.operand[i];
3964 	rtx subreg = NULL_RTX;
3965 	rtx plus = NULL_RTX;
3966 	machine_mode mode = operand_mode[i];
3967 
3968 	/* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3969 	   push_reload so we have to let them pass here.  */
3970 	if (GET_CODE (op) == SUBREG)
3971 	  {
3972 	    subreg = op;
3973 	    op = SUBREG_REG (op);
3974 	    mode = GET_MODE (op);
3975 	  }
3976 
3977 	if (GET_CODE (op) == PLUS)
3978 	  {
3979 	    plus = op;
3980 	    op = XEXP (op, 1);
3981 	  }
3982 
3983 	if (CONST_POOL_OK_P (mode, op)
3984 	    && ((targetm.preferred_reload_class (op, goal_alternative[i])
3985 		 == NO_REGS)
3986 		|| no_input_reloads))
3987 	  {
3988 	    int this_address_reloaded;
3989 	    rtx tem = force_const_mem (mode, op);
3990 
3991 	    /* If we stripped a SUBREG or a PLUS above add it back.  */
3992 	    if (plus != NULL_RTX)
3993 	      tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3994 
3995 	    if (subreg != NULL_RTX)
3996 	      tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3997 
3998 	    this_address_reloaded = 0;
3999 	    substed_operand[i] = recog_data.operand[i]
4000 	      = find_reloads_toplev (tem, i, address_type[i], ind_levels,
4001 				     0, insn, &this_address_reloaded);
4002 
4003 	    /* If the alternative accepts constant pool refs directly
4004 	       there will be no reload needed at all.  */
4005 	    if (plus == NULL_RTX
4006 		&& subreg == NULL_RTX
4007 		&& alternative_allows_const_pool_ref (this_address_reloaded == 0
4008 						      ? substed_operand[i]
4009 						      : NULL,
4010 						      recog_data.constraints[i],
4011 						      goal_alternative_number))
4012 	      goal_alternative_win[i] = 1;
4013 	  }
4014       }
4015 
4016   /* Record the values of the earlyclobber operands for the caller.  */
4017   if (goal_earlyclobber)
4018     for (i = 0; i < noperands; i++)
4019       if (goal_alternative_earlyclobber[i])
4020 	reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
4021 
4022   /* Now record reloads for all the operands that need them.  */
4023   for (i = 0; i < noperands; i++)
4024     if (! goal_alternative_win[i])
4025       {
4026 	/* Operands that match previous ones have already been handled.  */
4027 	if (goal_alternative_matches[i] >= 0)
4028 	  ;
4029 	/* Handle an operand with a nonoffsettable address
4030 	   appearing where an offsettable address will do
4031 	   by reloading the address into a base register.
4032 
4033 	   ??? We can also do this when the operand is a register and
4034 	   reg_equiv_mem is not offsettable, but this is a bit tricky,
4035 	   so we don't bother with it.  It may not be worth doing.  */
4036 	else if (goal_alternative_matched[i] == -1
4037 		 && goal_alternative_offmemok[i]
4038 		 && MEM_P (recog_data.operand[i]))
4039 	  {
4040 	    /* If the address to be reloaded is a VOIDmode constant,
4041 	       use the default address mode as mode of the reload register,
4042 	       as would have been done by find_reloads_address.  */
4043 	    addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
4044 	    machine_mode address_mode;
4045 
4046 	    address_mode = get_address_mode (recog_data.operand[i]);
4047 	    operand_reloadnum[i]
4048 	      = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
4049 			     &XEXP (recog_data.operand[i], 0), (rtx*) 0,
4050 			     base_reg_class (VOIDmode, as, MEM, SCRATCH),
4051 			     address_mode,
4052 			     VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
4053 	    rld[operand_reloadnum[i]].inc
4054 	      = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4055 
4056 	    /* If this operand is an output, we will have made any
4057 	       reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4058 	       now we are treating part of the operand as an input, so
4059 	       we must change these to RELOAD_FOR_INPUT_ADDRESS.  */
4060 
4061 	    if (modified[i] == RELOAD_WRITE)
4062 	      {
4063 		for (j = 0; j < n_reloads; j++)
4064 		  {
4065 		    if (rld[j].opnum == i)
4066 		      {
4067 			if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4068 			  rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4069 			else if (rld[j].when_needed
4070 				 == RELOAD_FOR_OUTADDR_ADDRESS)
4071 			  rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4072 		      }
4073 		  }
4074 	      }
4075 	  }
4076 	else if (goal_alternative_matched[i] == -1)
4077 	  {
4078 	    operand_reloadnum[i]
4079 	      = push_reload ((modified[i] != RELOAD_WRITE
4080 			      ? recog_data.operand[i] : 0),
4081 			     (modified[i] != RELOAD_READ
4082 			      ? recog_data.operand[i] : 0),
4083 			     (modified[i] != RELOAD_WRITE
4084 			      ? recog_data.operand_loc[i] : 0),
4085 			     (modified[i] != RELOAD_READ
4086 			      ? recog_data.operand_loc[i] : 0),
4087 			     (enum reg_class) goal_alternative[i],
4088 			     (modified[i] == RELOAD_WRITE
4089 			      ? VOIDmode : operand_mode[i]),
4090 			     (modified[i] == RELOAD_READ
4091 			      ? VOIDmode : operand_mode[i]),
4092 			     (insn_code_number < 0 ? 0
4093 			      : insn_data[insn_code_number].operand[i].strict_low),
4094 			     0, i, operand_type[i]);
4095 	  }
4096 	/* In a matching pair of operands, one must be input only
4097 	   and the other must be output only.
4098 	   Pass the input operand as IN and the other as OUT.  */
4099 	else if (modified[i] == RELOAD_READ
4100 		 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4101 	  {
4102 	    operand_reloadnum[i]
4103 	      = push_reload (recog_data.operand[i],
4104 			     recog_data.operand[goal_alternative_matched[i]],
4105 			     recog_data.operand_loc[i],
4106 			     recog_data.operand_loc[goal_alternative_matched[i]],
4107 			     (enum reg_class) goal_alternative[i],
4108 			     operand_mode[i],
4109 			     operand_mode[goal_alternative_matched[i]],
4110 			     0, 0, i, RELOAD_OTHER);
4111 	    operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4112 	  }
4113 	else if (modified[i] == RELOAD_WRITE
4114 		 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4115 	  {
4116 	    operand_reloadnum[goal_alternative_matched[i]]
4117 	      = push_reload (recog_data.operand[goal_alternative_matched[i]],
4118 			     recog_data.operand[i],
4119 			     recog_data.operand_loc[goal_alternative_matched[i]],
4120 			     recog_data.operand_loc[i],
4121 			     (enum reg_class) goal_alternative[i],
4122 			     operand_mode[goal_alternative_matched[i]],
4123 			     operand_mode[i],
4124 			     0, 0, i, RELOAD_OTHER);
4125 	    operand_reloadnum[i] = output_reloadnum;
4126 	  }
4127 	else
4128 	  {
4129 	    gcc_assert (insn_code_number < 0);
4130 	    error_for_asm (insn, "inconsistent operand constraints "
4131 			   "in an %<asm%>");
4132 	    /* Avoid further trouble with this insn.  */
4133 	    PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4134 	    n_reloads = 0;
4135 	    return 0;
4136 	  }
4137       }
4138     else if (goal_alternative_matched[i] < 0
4139 	     && goal_alternative_matches[i] < 0
4140 	     && address_operand_reloaded[i] != 1
4141 	     && optimize)
4142       {
4143 	/* For each non-matching operand that's a MEM or a pseudo-register
4144 	   that didn't get a hard register, make an optional reload.
4145 	   This may get done even if the insn needs no reloads otherwise.  */
4146 
4147 	rtx operand = recog_data.operand[i];
4148 
4149 	while (GET_CODE (operand) == SUBREG)
4150 	  operand = SUBREG_REG (operand);
4151 	if ((MEM_P (operand)
4152 	     || (REG_P (operand)
4153 		 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4154 	    /* If this is only for an output, the optional reload would not
4155 	       actually cause us to use a register now, just note that
4156 	       something is stored here.  */
4157 	    && (goal_alternative[i] != NO_REGS
4158 		|| modified[i] == RELOAD_WRITE)
4159 	    && ! no_input_reloads
4160 	    /* An optional output reload might allow to delete INSN later.
4161 	       We mustn't make in-out reloads on insns that are not permitted
4162 	       output reloads.
4163 	       If this is an asm, we can't delete it; we must not even call
4164 	       push_reload for an optional output reload in this case,
4165 	       because we can't be sure that the constraint allows a register,
4166 	       and push_reload verifies the constraints for asms.  */
4167 	    && (modified[i] == RELOAD_READ
4168 		|| (! no_output_reloads && ! this_insn_is_asm)))
4169 	  operand_reloadnum[i]
4170 	    = push_reload ((modified[i] != RELOAD_WRITE
4171 			    ? recog_data.operand[i] : 0),
4172 			   (modified[i] != RELOAD_READ
4173 			    ? recog_data.operand[i] : 0),
4174 			   (modified[i] != RELOAD_WRITE
4175 			    ? recog_data.operand_loc[i] : 0),
4176 			   (modified[i] != RELOAD_READ
4177 			    ? recog_data.operand_loc[i] : 0),
4178 			   (enum reg_class) goal_alternative[i],
4179 			   (modified[i] == RELOAD_WRITE
4180 			    ? VOIDmode : operand_mode[i]),
4181 			   (modified[i] == RELOAD_READ
4182 			    ? VOIDmode : operand_mode[i]),
4183 			   (insn_code_number < 0 ? 0
4184 			    : insn_data[insn_code_number].operand[i].strict_low),
4185 			   1, i, operand_type[i]);
4186 	/* If a memory reference remains (either as a MEM or a pseudo that
4187 	   did not get a hard register), yet we can't make an optional
4188 	   reload, check if this is actually a pseudo register reference;
4189 	   we then need to emit a USE and/or a CLOBBER so that reload
4190 	   inheritance will do the right thing.  */
4191 	else if (replace
4192 		 && (MEM_P (operand)
4193 		     || (REG_P (operand)
4194 			 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4195 			 && reg_renumber [REGNO (operand)] < 0)))
4196 	  {
4197 	    operand = *recog_data.operand_loc[i];
4198 
4199 	    while (GET_CODE (operand) == SUBREG)
4200 	      operand = SUBREG_REG (operand);
4201 	    if (REG_P (operand))
4202 	      {
4203 		if (modified[i] != RELOAD_WRITE)
4204 		  /* We mark the USE with QImode so that we recognize
4205 		     it as one that can be safely deleted at the end
4206 		     of reload.  */
4207 		  PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4208 					      insn), QImode);
4209 		if (modified[i] != RELOAD_READ)
4210 		  emit_insn_after (gen_clobber (operand), insn);
4211 	      }
4212 	  }
4213       }
4214     else if (goal_alternative_matches[i] >= 0
4215 	     && goal_alternative_win[goal_alternative_matches[i]]
4216 	     && modified[i] == RELOAD_READ
4217 	     && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4218 	     && ! no_input_reloads && ! no_output_reloads
4219 	     && optimize)
4220       {
4221 	/* Similarly, make an optional reload for a pair of matching
4222 	   objects that are in MEM or a pseudo that didn't get a hard reg.  */
4223 
4224 	rtx operand = recog_data.operand[i];
4225 
4226 	while (GET_CODE (operand) == SUBREG)
4227 	  operand = SUBREG_REG (operand);
4228 	if ((MEM_P (operand)
4229 	     || (REG_P (operand)
4230 		 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4231 	    && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4232 	  operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4233 	    = push_reload (recog_data.operand[goal_alternative_matches[i]],
4234 			   recog_data.operand[i],
4235 			   recog_data.operand_loc[goal_alternative_matches[i]],
4236 			   recog_data.operand_loc[i],
4237 			   (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4238 			   operand_mode[goal_alternative_matches[i]],
4239 			   operand_mode[i],
4240 			   0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4241       }
4242 
4243   /* Perform whatever substitutions on the operands we are supposed
4244      to make due to commutativity or replacement of registers
4245      with equivalent constants or memory slots.  */
4246 
4247   for (i = 0; i < noperands; i++)
4248     {
4249       /* We only do this on the last pass through reload, because it is
4250 	 possible for some data (like reg_equiv_address) to be changed during
4251 	 later passes.  Moreover, we lose the opportunity to get a useful
4252 	 reload_{in,out}_reg when we do these replacements.  */
4253 
4254       if (replace)
4255 	{
4256 	  rtx substitution = substed_operand[i];
4257 
4258 	  *recog_data.operand_loc[i] = substitution;
4259 
4260 	  /* If we're replacing an operand with a LABEL_REF, we need to
4261 	     make sure that there's a REG_LABEL_OPERAND note attached to
4262 	     this instruction.  */
4263 	  if (GET_CODE (substitution) == LABEL_REF
4264 	      && !find_reg_note (insn, REG_LABEL_OPERAND,
4265 				 LABEL_REF_LABEL (substitution))
4266 	      /* For a JUMP_P, if it was a branch target it must have
4267 		 already been recorded as such.  */
4268 	      && (!JUMP_P (insn)
4269 		  || !label_is_jump_target_p (LABEL_REF_LABEL (substitution),
4270 					      insn)))
4271 	    {
4272 	      add_reg_note (insn, REG_LABEL_OPERAND,
4273 			    LABEL_REF_LABEL (substitution));
4274 	      if (LABEL_P (LABEL_REF_LABEL (substitution)))
4275 		++LABEL_NUSES (LABEL_REF_LABEL (substitution));
4276 	    }
4277 
4278 	}
4279       else
4280 	retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4281     }
4282 
4283   /* If this insn pattern contains any MATCH_DUP's, make sure that
4284      they will be substituted if the operands they match are substituted.
4285      Also do now any substitutions we already did on the operands.
4286 
4287      Don't do this if we aren't making replacements because we might be
4288      propagating things allocated by frame pointer elimination into places
4289      it doesn't expect.  */
4290 
4291   if (insn_code_number >= 0 && replace)
4292     for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4293       {
4294 	int opno = recog_data.dup_num[i];
4295 	*recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4296 	dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4297       }
4298 
4299 #if 0
4300   /* This loses because reloading of prior insns can invalidate the equivalence
4301      (or at least find_equiv_reg isn't smart enough to find it any more),
4302      causing this insn to need more reload regs than it needed before.
4303      It may be too late to make the reload regs available.
4304      Now this optimization is done safely in choose_reload_regs.  */
4305 
4306   /* For each reload of a reg into some other class of reg,
4307      search for an existing equivalent reg (same value now) in the right class.
4308      We can use it as long as we don't need to change its contents.  */
4309   for (i = 0; i < n_reloads; i++)
4310     if (rld[i].reg_rtx == 0
4311 	&& rld[i].in != 0
4312 	&& REG_P (rld[i].in)
4313 	&& rld[i].out == 0)
4314       {
4315 	rld[i].reg_rtx
4316 	  = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4317 			    static_reload_reg_p, 0, rld[i].inmode);
4318 	/* Prevent generation of insn to load the value
4319 	   because the one we found already has the value.  */
4320 	if (rld[i].reg_rtx)
4321 	  rld[i].in = rld[i].reg_rtx;
4322       }
4323 #endif
4324 
4325   /* If we detected error and replaced asm instruction by USE, forget about the
4326      reloads.  */
4327   if (GET_CODE (PATTERN (insn)) == USE
4328       && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4329     n_reloads = 0;
4330 
4331   /* Perhaps an output reload can be combined with another
4332      to reduce needs by one.  */
4333   if (!goal_earlyclobber)
4334     combine_reloads ();
4335 
4336   /* If we have a pair of reloads for parts of an address, they are reloading
4337      the same object, the operands themselves were not reloaded, and they
4338      are for two operands that are supposed to match, merge the reloads and
4339      change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS.  */
4340 
4341   for (i = 0; i < n_reloads; i++)
4342     {
4343       int k;
4344 
4345       for (j = i + 1; j < n_reloads; j++)
4346 	if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4347 	     || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4348 	     || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4349 	     || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4350 	    && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4351 		|| rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4352 		|| rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4353 		|| rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4354 	    && rtx_equal_p (rld[i].in, rld[j].in)
4355 	    && (operand_reloadnum[rld[i].opnum] < 0
4356 		|| rld[operand_reloadnum[rld[i].opnum]].optional)
4357 	    && (operand_reloadnum[rld[j].opnum] < 0
4358 		|| rld[operand_reloadnum[rld[j].opnum]].optional)
4359 	    && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4360 		|| (goal_alternative_matches[rld[j].opnum]
4361 		    == rld[i].opnum)))
4362 	  {
4363 	    for (k = 0; k < n_replacements; k++)
4364 	      if (replacements[k].what == j)
4365 		replacements[k].what = i;
4366 
4367 	    if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4368 		|| rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4369 	      rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4370 	    else
4371 	      rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4372 	    rld[j].in = 0;
4373 	  }
4374     }
4375 
4376   /* Scan all the reloads and update their type.
4377      If a reload is for the address of an operand and we didn't reload
4378      that operand, change the type.  Similarly, change the operand number
4379      of a reload when two operands match.  If a reload is optional, treat it
4380      as though the operand isn't reloaded.
4381 
4382      ??? This latter case is somewhat odd because if we do the optional
4383      reload, it means the object is hanging around.  Thus we need only
4384      do the address reload if the optional reload was NOT done.
4385 
4386      Change secondary reloads to be the address type of their operand, not
4387      the normal type.
4388 
4389      If an operand's reload is now RELOAD_OTHER, change any
4390      RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4391      RELOAD_FOR_OTHER_ADDRESS.  */
4392 
4393   for (i = 0; i < n_reloads; i++)
4394     {
4395       if (rld[i].secondary_p
4396 	  && rld[i].when_needed == operand_type[rld[i].opnum])
4397 	rld[i].when_needed = address_type[rld[i].opnum];
4398 
4399       if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4400 	   || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4401 	   || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4402 	   || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4403 	  && (operand_reloadnum[rld[i].opnum] < 0
4404 	      || rld[operand_reloadnum[rld[i].opnum]].optional))
4405 	{
4406 	  /* If we have a secondary reload to go along with this reload,
4407 	     change its type to RELOAD_FOR_OPADDR_ADDR.  */
4408 
4409 	  if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4410 	       || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4411 	      && rld[i].secondary_in_reload != -1)
4412 	    {
4413 	      int secondary_in_reload = rld[i].secondary_in_reload;
4414 
4415 	      rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4416 
4417 	      /* If there's a tertiary reload we have to change it also.  */
4418 	      if (secondary_in_reload > 0
4419 		  && rld[secondary_in_reload].secondary_in_reload != -1)
4420 		rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4421 		  = RELOAD_FOR_OPADDR_ADDR;
4422 	    }
4423 
4424 	  if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4425 	       || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4426 	      && rld[i].secondary_out_reload != -1)
4427 	    {
4428 	      int secondary_out_reload = rld[i].secondary_out_reload;
4429 
4430 	      rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4431 
4432 	      /* If there's a tertiary reload we have to change it also.  */
4433 	      if (secondary_out_reload
4434 		  && rld[secondary_out_reload].secondary_out_reload != -1)
4435 		rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4436 		  = RELOAD_FOR_OPADDR_ADDR;
4437 	    }
4438 
4439 	  if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4440 	      || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4441 	    rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4442 	  else
4443 	    rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4444 	}
4445 
4446       if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4447 	   || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4448 	  && operand_reloadnum[rld[i].opnum] >= 0
4449 	  && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4450 	      == RELOAD_OTHER))
4451 	rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4452 
4453       if (goal_alternative_matches[rld[i].opnum] >= 0)
4454 	rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4455     }
4456 
4457   /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4458      If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4459      reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4460 
4461      choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4462      conflict with RELOAD_FOR_OPERAND_ADDRESS reloads.  This is true for a
4463      single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4464      However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4465      then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4466      RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4467      This is complicated by the fact that a single operand can have more
4468      than one RELOAD_FOR_OPERAND_ADDRESS reload.  It is very difficult to fix
4469      choose_reload_regs without affecting code quality, and cases that
4470      actually fail are extremely rare, so it turns out to be better to fix
4471      the problem here by not generating cases that choose_reload_regs will
4472      fail for.  */
4473   /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4474      RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4475      a single operand.
4476      We can reduce the register pressure by exploiting that a
4477      RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4478      does not conflict with any of them, if it is only used for the first of
4479      the RELOAD_FOR_X_ADDRESS reloads.  */
4480   {
4481     int first_op_addr_num = -2;
4482     int first_inpaddr_num[MAX_RECOG_OPERANDS];
4483     int first_outpaddr_num[MAX_RECOG_OPERANDS];
4484     int need_change = 0;
4485     /* We use last_op_addr_reload and the contents of the above arrays
4486        first as flags - -2 means no instance encountered, -1 means exactly
4487        one instance encountered.
4488        If more than one instance has been encountered, we store the reload
4489        number of the first reload of the kind in question; reload numbers
4490        are known to be non-negative.  */
4491     for (i = 0; i < noperands; i++)
4492       first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4493     for (i = n_reloads - 1; i >= 0; i--)
4494       {
4495 	switch (rld[i].when_needed)
4496 	  {
4497 	  case RELOAD_FOR_OPERAND_ADDRESS:
4498 	    if (++first_op_addr_num >= 0)
4499 	      {
4500 		first_op_addr_num = i;
4501 		need_change = 1;
4502 	      }
4503 	    break;
4504 	  case RELOAD_FOR_INPUT_ADDRESS:
4505 	    if (++first_inpaddr_num[rld[i].opnum] >= 0)
4506 	      {
4507 		first_inpaddr_num[rld[i].opnum] = i;
4508 		need_change = 1;
4509 	      }
4510 	    break;
4511 	  case RELOAD_FOR_OUTPUT_ADDRESS:
4512 	    if (++first_outpaddr_num[rld[i].opnum] >= 0)
4513 	      {
4514 		first_outpaddr_num[rld[i].opnum] = i;
4515 		need_change = 1;
4516 	      }
4517 	    break;
4518 	  default:
4519 	    break;
4520 	  }
4521       }
4522 
4523     if (need_change)
4524       {
4525 	for (i = 0; i < n_reloads; i++)
4526 	  {
4527 	    int first_num;
4528 	    enum reload_type type;
4529 
4530 	    switch (rld[i].when_needed)
4531 	      {
4532 	      case RELOAD_FOR_OPADDR_ADDR:
4533 		first_num = first_op_addr_num;
4534 		type = RELOAD_FOR_OPERAND_ADDRESS;
4535 		break;
4536 	      case RELOAD_FOR_INPADDR_ADDRESS:
4537 		first_num = first_inpaddr_num[rld[i].opnum];
4538 		type = RELOAD_FOR_INPUT_ADDRESS;
4539 		break;
4540 	      case RELOAD_FOR_OUTADDR_ADDRESS:
4541 		first_num = first_outpaddr_num[rld[i].opnum];
4542 		type = RELOAD_FOR_OUTPUT_ADDRESS;
4543 		break;
4544 	      default:
4545 		continue;
4546 	      }
4547 	    if (first_num < 0)
4548 	      continue;
4549 	    else if (i > first_num)
4550 	      rld[i].when_needed = type;
4551 	    else
4552 	      {
4553 		/* Check if the only TYPE reload that uses reload I is
4554 		   reload FIRST_NUM.  */
4555 		for (j = n_reloads - 1; j > first_num; j--)
4556 		  {
4557 		    if (rld[j].when_needed == type
4558 			&& (rld[i].secondary_p
4559 			    ? rld[j].secondary_in_reload == i
4560 			    : reg_mentioned_p (rld[i].in, rld[j].in)))
4561 		      {
4562 			rld[i].when_needed = type;
4563 			break;
4564 		      }
4565 		  }
4566 	      }
4567 	  }
4568       }
4569   }
4570 
4571   /* See if we have any reloads that are now allowed to be merged
4572      because we've changed when the reload is needed to
4573      RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS.  Only
4574      check for the most common cases.  */
4575 
4576   for (i = 0; i < n_reloads; i++)
4577     if (rld[i].in != 0 && rld[i].out == 0
4578 	&& (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4579 	    || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4580 	    || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4581       for (j = 0; j < n_reloads; j++)
4582 	if (i != j && rld[j].in != 0 && rld[j].out == 0
4583 	    && rld[j].when_needed == rld[i].when_needed
4584 	    && MATCHES (rld[i].in, rld[j].in)
4585 	    && rld[i].rclass == rld[j].rclass
4586 	    && !rld[i].nocombine && !rld[j].nocombine
4587 	    && rld[i].reg_rtx == rld[j].reg_rtx)
4588 	  {
4589 	    rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4590 	    transfer_replacements (i, j);
4591 	    rld[j].in = 0;
4592 	  }
4593 
4594 #ifdef HAVE_cc0
4595   /* If we made any reloads for addresses, see if they violate a
4596      "no input reloads" requirement for this insn.  But loads that we
4597      do after the insn (such as for output addresses) are fine.  */
4598   if (no_input_reloads)
4599     for (i = 0; i < n_reloads; i++)
4600       gcc_assert (rld[i].in == 0
4601 		  || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4602 		  || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4603 #endif
4604 
4605   /* Compute reload_mode and reload_nregs.  */
4606   for (i = 0; i < n_reloads; i++)
4607     {
4608       rld[i].mode
4609 	= (rld[i].inmode == VOIDmode
4610 	   || (GET_MODE_SIZE (rld[i].outmode)
4611 	       > GET_MODE_SIZE (rld[i].inmode)))
4612 	  ? rld[i].outmode : rld[i].inmode;
4613 
4614       rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4615     }
4616 
4617   /* Special case a simple move with an input reload and a
4618      destination of a hard reg, if the hard reg is ok, use it.  */
4619   for (i = 0; i < n_reloads; i++)
4620     if (rld[i].when_needed == RELOAD_FOR_INPUT
4621 	&& GET_CODE (PATTERN (insn)) == SET
4622 	&& REG_P (SET_DEST (PATTERN (insn)))
4623 	&& (SET_SRC (PATTERN (insn)) == rld[i].in
4624 	    || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4625 	&& !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4626       {
4627 	rtx dest = SET_DEST (PATTERN (insn));
4628 	unsigned int regno = REGNO (dest);
4629 
4630 	if (regno < FIRST_PSEUDO_REGISTER
4631 	    && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4632 	    && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4633 	  {
4634 	    int nr = hard_regno_nregs[regno][rld[i].mode];
4635 	    int ok = 1, nri;
4636 
4637 	    for (nri = 1; nri < nr; nri ++)
4638 	      if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4639 		{
4640 		  ok = 0;
4641 		  break;
4642 		}
4643 
4644 	    if (ok)
4645 	      rld[i].reg_rtx = dest;
4646 	  }
4647       }
4648 
4649   return retval;
4650 }
4651 
4652 /* Return true if alternative number ALTNUM in constraint-string
4653    CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4654    MEM gives the reference if it didn't need any reloads, otherwise it
4655    is null.  */
4656 
4657 static bool
4658 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4659 				   const char *constraint, int altnum)
4660 {
4661   int c;
4662 
4663   /* Skip alternatives before the one requested.  */
4664   while (altnum > 0)
4665     {
4666       while (*constraint++ != ',')
4667 	;
4668       altnum--;
4669     }
4670   /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4671      If one of them is present, this alternative accepts the result of
4672      passing a constant-pool reference through find_reloads_toplev.
4673 
4674      The same is true of extra memory constraints if the address
4675      was reloaded into a register.  However, the target may elect
4676      to disallow the original constant address, forcing it to be
4677      reloaded into a register instead.  */
4678   for (; (c = *constraint) && c != ',' && c != '#';
4679        constraint += CONSTRAINT_LEN (c, constraint))
4680     {
4681       enum constraint_num cn = lookup_constraint (constraint);
4682       if (insn_extra_memory_constraint (cn)
4683 	  && (mem == NULL || constraint_satisfied_p (mem, cn)))
4684 	return true;
4685     }
4686   return false;
4687 }
4688 
4689 /* Scan X for memory references and scan the addresses for reloading.
4690    Also checks for references to "constant" regs that we want to eliminate
4691    and replaces them with the values they stand for.
4692    We may alter X destructively if it contains a reference to such.
4693    If X is just a constant reg, we return the equivalent value
4694    instead of X.
4695 
4696    IND_LEVELS says how many levels of indirect addressing this machine
4697    supports.
4698 
4699    OPNUM and TYPE identify the purpose of the reload.
4700 
4701    IS_SET_DEST is true if X is the destination of a SET, which is not
4702    appropriate to be replaced by a constant.
4703 
4704    INSN, if nonzero, is the insn in which we do the reload.  It is used
4705    to determine if we may generate output reloads, and where to put USEs
4706    for pseudos that we have to replace with stack slots.
4707 
4708    ADDRESS_RELOADED.  If nonzero, is a pointer to where we put the
4709    result of find_reloads_address.  */
4710 
4711 static rtx
4712 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4713 		     int ind_levels, int is_set_dest, rtx_insn *insn,
4714 		     int *address_reloaded)
4715 {
4716   RTX_CODE code = GET_CODE (x);
4717 
4718   const char *fmt = GET_RTX_FORMAT (code);
4719   int i;
4720   int copied;
4721 
4722   if (code == REG)
4723     {
4724       /* This code is duplicated for speed in find_reloads.  */
4725       int regno = REGNO (x);
4726       if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4727 	x = reg_equiv_constant (regno);
4728 #if 0
4729       /*  This creates (subreg (mem...)) which would cause an unnecessary
4730 	  reload of the mem.  */
4731       else if (reg_equiv_mem (regno) != 0)
4732 	x = reg_equiv_mem (regno);
4733 #endif
4734       else if (reg_equiv_memory_loc (regno)
4735 	       && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4736 	{
4737 	  rtx mem = make_memloc (x, regno);
4738 	  if (reg_equiv_address (regno)
4739 	      || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4740 	    {
4741 	      /* If this is not a toplevel operand, find_reloads doesn't see
4742 		 this substitution.  We have to emit a USE of the pseudo so
4743 		 that delete_output_reload can see it.  */
4744 	      if (replace_reloads && recog_data.operand[opnum] != x)
4745 		/* We mark the USE with QImode so that we recognize it
4746 		   as one that can be safely deleted at the end of
4747 		   reload.  */
4748 		PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4749 			  QImode);
4750 	      x = mem;
4751 	      i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4752 					opnum, type, ind_levels, insn);
4753 	      if (!rtx_equal_p (x, mem))
4754 		push_reg_equiv_alt_mem (regno, x);
4755 	      if (address_reloaded)
4756 		*address_reloaded = i;
4757 	    }
4758 	}
4759       return x;
4760     }
4761   if (code == MEM)
4762     {
4763       rtx tem = x;
4764 
4765       i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4766 				opnum, type, ind_levels, insn);
4767       if (address_reloaded)
4768 	*address_reloaded = i;
4769 
4770       return tem;
4771     }
4772 
4773   if (code == SUBREG && REG_P (SUBREG_REG (x)))
4774     {
4775       /* Check for SUBREG containing a REG that's equivalent to a
4776 	 constant.  If the constant has a known value, truncate it
4777 	 right now.  Similarly if we are extracting a single-word of a
4778 	 multi-word constant.  If the constant is symbolic, allow it
4779 	 to be substituted normally.  push_reload will strip the
4780 	 subreg later.  The constant must not be VOIDmode, because we
4781 	 will lose the mode of the register (this should never happen
4782 	 because one of the cases above should handle it).  */
4783 
4784       int regno = REGNO (SUBREG_REG (x));
4785       rtx tem;
4786 
4787       if (regno >= FIRST_PSEUDO_REGISTER
4788 	  && reg_renumber[regno] < 0
4789 	  && reg_equiv_constant (regno) != 0)
4790 	{
4791 	  tem =
4792 	    simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4793 				 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4794 	  gcc_assert (tem);
4795 	  if (CONSTANT_P (tem)
4796 	      && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4797 	    {
4798 	      tem = force_const_mem (GET_MODE (x), tem);
4799 	      i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4800 					&XEXP (tem, 0), opnum, type,
4801 					ind_levels, insn);
4802 	      if (address_reloaded)
4803 		*address_reloaded = i;
4804 	    }
4805 	  return tem;
4806 	}
4807 
4808       /* If the subreg contains a reg that will be converted to a mem,
4809 	 attempt to convert the whole subreg to a (narrower or wider)
4810 	 memory reference instead.  If this succeeds, we're done --
4811 	 otherwise fall through to check whether the inner reg still
4812 	 needs address reloads anyway.  */
4813 
4814       if (regno >= FIRST_PSEUDO_REGISTER
4815 	  && reg_equiv_memory_loc (regno) != 0)
4816 	{
4817 	  tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4818 					     insn, address_reloaded);
4819 	  if (tem)
4820 	    return tem;
4821 	}
4822     }
4823 
4824   for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4825     {
4826       if (fmt[i] == 'e')
4827 	{
4828 	  rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4829 					      ind_levels, is_set_dest, insn,
4830 					      address_reloaded);
4831 	  /* If we have replaced a reg with it's equivalent memory loc -
4832 	     that can still be handled here e.g. if it's in a paradoxical
4833 	     subreg - we must make the change in a copy, rather than using
4834 	     a destructive change.  This way, find_reloads can still elect
4835 	     not to do the change.  */
4836 	  if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4837 	    {
4838 	      x = shallow_copy_rtx (x);
4839 	      copied = 1;
4840 	    }
4841 	  XEXP (x, i) = new_part;
4842 	}
4843     }
4844   return x;
4845 }
4846 
4847 /* Return a mem ref for the memory equivalent of reg REGNO.
4848    This mem ref is not shared with anything.  */
4849 
4850 static rtx
4851 make_memloc (rtx ad, int regno)
4852 {
4853   /* We must rerun eliminate_regs, in case the elimination
4854      offsets have changed.  */
4855   rtx tem
4856     = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4857 	    0);
4858 
4859   /* If TEM might contain a pseudo, we must copy it to avoid
4860      modifying it when we do the substitution for the reload.  */
4861   if (rtx_varies_p (tem, 0))
4862     tem = copy_rtx (tem);
4863 
4864   tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4865   tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4866 
4867   /* Copy the result if it's still the same as the equivalence, to avoid
4868      modifying it when we do the substitution for the reload.  */
4869   if (tem == reg_equiv_memory_loc (regno))
4870     tem = copy_rtx (tem);
4871   return tem;
4872 }
4873 
4874 /* Returns true if AD could be turned into a valid memory reference
4875    to mode MODE in address space AS by reloading the part pointed to
4876    by PART into a register.  */
4877 
4878 static int
4879 maybe_memory_address_addr_space_p (machine_mode mode, rtx ad,
4880 				   addr_space_t as, rtx *part)
4881 {
4882   int retv;
4883   rtx tem = *part;
4884   rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4885 
4886   *part = reg;
4887   retv = memory_address_addr_space_p (mode, ad, as);
4888   *part = tem;
4889 
4890   return retv;
4891 }
4892 
4893 /* Record all reloads needed for handling memory address AD
4894    which appears in *LOC in a memory reference to mode MODE
4895    which itself is found in location  *MEMREFLOC.
4896    Note that we take shortcuts assuming that no multi-reg machine mode
4897    occurs as part of an address.
4898 
4899    OPNUM and TYPE specify the purpose of this reload.
4900 
4901    IND_LEVELS says how many levels of indirect addressing this machine
4902    supports.
4903 
4904    INSN, if nonzero, is the insn in which we do the reload.  It is used
4905    to determine if we may generate output reloads, and where to put USEs
4906    for pseudos that we have to replace with stack slots.
4907 
4908    Value is one if this address is reloaded or replaced as a whole; it is
4909    zero if the top level of this address was not reloaded or replaced, and
4910    it is -1 if it may or may not have been reloaded or replaced.
4911 
4912    Note that there is no verification that the address will be valid after
4913    this routine does its work.  Instead, we rely on the fact that the address
4914    was valid when reload started.  So we need only undo things that reload
4915    could have broken.  These are wrong register types, pseudos not allocated
4916    to a hard register, and frame pointer elimination.  */
4917 
4918 static int
4919 find_reloads_address (machine_mode mode, rtx *memrefloc, rtx ad,
4920 		      rtx *loc, int opnum, enum reload_type type,
4921 		      int ind_levels, rtx_insn *insn)
4922 {
4923   addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4924 			     : ADDR_SPACE_GENERIC;
4925   int regno;
4926   int removed_and = 0;
4927   int op_index;
4928   rtx tem;
4929 
4930   /* If the address is a register, see if it is a legitimate address and
4931      reload if not.  We first handle the cases where we need not reload
4932      or where we must reload in a non-standard way.  */
4933 
4934   if (REG_P (ad))
4935     {
4936       regno = REGNO (ad);
4937 
4938       if (reg_equiv_constant (regno) != 0)
4939 	{
4940 	  find_reloads_address_part (reg_equiv_constant (regno), loc,
4941 				     base_reg_class (mode, as, MEM, SCRATCH),
4942 				     GET_MODE (ad), opnum, type, ind_levels);
4943 	  return 1;
4944 	}
4945 
4946       tem = reg_equiv_memory_loc (regno);
4947       if (tem != 0)
4948 	{
4949 	  if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4950 	    {
4951 	      tem = make_memloc (ad, regno);
4952 	      if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4953 							XEXP (tem, 0),
4954 							MEM_ADDR_SPACE (tem)))
4955 		{
4956 		  rtx orig = tem;
4957 
4958 		  find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4959 					&XEXP (tem, 0), opnum,
4960 					ADDR_TYPE (type), ind_levels, insn);
4961 		  if (!rtx_equal_p (tem, orig))
4962 		    push_reg_equiv_alt_mem (regno, tem);
4963 		}
4964 	      /* We can avoid a reload if the register's equivalent memory
4965 		 expression is valid as an indirect memory address.
4966 		 But not all addresses are valid in a mem used as an indirect
4967 		 address: only reg or reg+constant.  */
4968 
4969 	      if (ind_levels > 0
4970 		  && strict_memory_address_addr_space_p (mode, tem, as)
4971 		  && (REG_P (XEXP (tem, 0))
4972 		      || (GET_CODE (XEXP (tem, 0)) == PLUS
4973 			  && REG_P (XEXP (XEXP (tem, 0), 0))
4974 			  && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4975 		{
4976 		  /* TEM is not the same as what we'll be replacing the
4977 		     pseudo with after reload, put a USE in front of INSN
4978 		     in the final reload pass.  */
4979 		  if (replace_reloads
4980 		      && num_not_at_initial_offset
4981 		      && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4982 		    {
4983 		      *loc = tem;
4984 		      /* We mark the USE with QImode so that we
4985 			 recognize it as one that can be safely
4986 			 deleted at the end of reload.  */
4987 		      PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4988 						  insn), QImode);
4989 
4990 		      /* This doesn't really count as replacing the address
4991 			 as a whole, since it is still a memory access.  */
4992 		    }
4993 		  return 0;
4994 		}
4995 	      ad = tem;
4996 	    }
4997 	}
4998 
4999       /* The only remaining case where we can avoid a reload is if this is a
5000 	 hard register that is valid as a base register and which is not the
5001 	 subject of a CLOBBER in this insn.  */
5002 
5003       else if (regno < FIRST_PSEUDO_REGISTER
5004 	       && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
5005 	       && ! regno_clobbered_p (regno, this_insn, mode, 0))
5006 	return 0;
5007 
5008       /* If we do not have one of the cases above, we must do the reload.  */
5009       push_reload (ad, NULL_RTX, loc, (rtx*) 0,
5010 		   base_reg_class (mode, as, MEM, SCRATCH),
5011 		   GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
5012       return 1;
5013     }
5014 
5015   if (strict_memory_address_addr_space_p (mode, ad, as))
5016     {
5017       /* The address appears valid, so reloads are not needed.
5018 	 But the address may contain an eliminable register.
5019 	 This can happen because a machine with indirect addressing
5020 	 may consider a pseudo register by itself a valid address even when
5021 	 it has failed to get a hard reg.
5022 	 So do a tree-walk to find and eliminate all such regs.  */
5023 
5024       /* But first quickly dispose of a common case.  */
5025       if (GET_CODE (ad) == PLUS
5026 	  && CONST_INT_P (XEXP (ad, 1))
5027 	  && REG_P (XEXP (ad, 0))
5028 	  && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
5029 	return 0;
5030 
5031       subst_reg_equivs_changed = 0;
5032       *loc = subst_reg_equivs (ad, insn);
5033 
5034       if (! subst_reg_equivs_changed)
5035 	return 0;
5036 
5037       /* Check result for validity after substitution.  */
5038       if (strict_memory_address_addr_space_p (mode, ad, as))
5039 	return 0;
5040     }
5041 
5042 #ifdef LEGITIMIZE_RELOAD_ADDRESS
5043   do
5044     {
5045       if (memrefloc && ADDR_SPACE_GENERIC_P (as))
5046 	{
5047 	  LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
5048 				     ind_levels, win);
5049 	}
5050       break;
5051     win:
5052       *memrefloc = copy_rtx (*memrefloc);
5053       XEXP (*memrefloc, 0) = ad;
5054       move_replacements (&ad, &XEXP (*memrefloc, 0));
5055       return -1;
5056     }
5057   while (0);
5058 #endif
5059 
5060   /* The address is not valid.  We have to figure out why.  First see if
5061      we have an outer AND and remove it if so.  Then analyze what's inside.  */
5062 
5063   if (GET_CODE (ad) == AND)
5064     {
5065       removed_and = 1;
5066       loc = &XEXP (ad, 0);
5067       ad = *loc;
5068     }
5069 
5070   /* One possibility for why the address is invalid is that it is itself
5071      a MEM.  This can happen when the frame pointer is being eliminated, a
5072      pseudo is not allocated to a hard register, and the offset between the
5073      frame and stack pointers is not its initial value.  In that case the
5074      pseudo will have been replaced by a MEM referring to the
5075      stack pointer.  */
5076   if (MEM_P (ad))
5077     {
5078       /* First ensure that the address in this MEM is valid.  Then, unless
5079 	 indirect addresses are valid, reload the MEM into a register.  */
5080       tem = ad;
5081       find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5082 			    opnum, ADDR_TYPE (type),
5083 			    ind_levels == 0 ? 0 : ind_levels - 1, insn);
5084 
5085       /* If tem was changed, then we must create a new memory reference to
5086 	 hold it and store it back into memrefloc.  */
5087       if (tem != ad && memrefloc)
5088 	{
5089 	  *memrefloc = copy_rtx (*memrefloc);
5090 	  copy_replacements (tem, XEXP (*memrefloc, 0));
5091 	  loc = &XEXP (*memrefloc, 0);
5092 	  if (removed_and)
5093 	    loc = &XEXP (*loc, 0);
5094 	}
5095 
5096       /* Check similar cases as for indirect addresses as above except
5097 	 that we can allow pseudos and a MEM since they should have been
5098 	 taken care of above.  */
5099 
5100       if (ind_levels == 0
5101 	  || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5102 	  || MEM_P (XEXP (tem, 0))
5103 	  || ! (REG_P (XEXP (tem, 0))
5104 		|| (GET_CODE (XEXP (tem, 0)) == PLUS
5105 		    && REG_P (XEXP (XEXP (tem, 0), 0))
5106 		    && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5107 	{
5108 	  /* Must use TEM here, not AD, since it is the one that will
5109 	     have any subexpressions reloaded, if needed.  */
5110 	  push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5111 		       base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5112 		       VOIDmode, 0,
5113 		       0, opnum, type);
5114 	  return ! removed_and;
5115 	}
5116       else
5117 	return 0;
5118     }
5119 
5120   /* If we have address of a stack slot but it's not valid because the
5121      displacement is too large, compute the sum in a register.
5122      Handle all base registers here, not just fp/ap/sp, because on some
5123      targets (namely SH) we can also get too large displacements from
5124      big-endian corrections.  */
5125   else if (GET_CODE (ad) == PLUS
5126 	   && REG_P (XEXP (ad, 0))
5127 	   && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5128 	   && CONST_INT_P (XEXP (ad, 1))
5129 	   && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5130 				    CONST_INT)
5131 	       /* Similarly, if we were to reload the base register and the
5132 		  mem+offset address is still invalid, then we want to reload
5133 		  the whole address, not just the base register.  */
5134 	       || ! maybe_memory_address_addr_space_p
5135 		     (mode, ad, as, &(XEXP (ad, 0)))))
5136 
5137     {
5138       /* Unshare the MEM rtx so we can safely alter it.  */
5139       if (memrefloc)
5140 	{
5141 	  *memrefloc = copy_rtx (*memrefloc);
5142 	  loc = &XEXP (*memrefloc, 0);
5143 	  if (removed_and)
5144 	    loc = &XEXP (*loc, 0);
5145 	}
5146 
5147       if (double_reg_address_ok
5148 	  && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5149 				  PLUS, CONST_INT))
5150 	{
5151 	  /* Unshare the sum as well.  */
5152 	  *loc = ad = copy_rtx (ad);
5153 
5154 	  /* Reload the displacement into an index reg.
5155 	     We assume the frame pointer or arg pointer is a base reg.  */
5156 	  find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5157 				     INDEX_REG_CLASS, GET_MODE (ad), opnum,
5158 				     type, ind_levels);
5159 	  return 0;
5160 	}
5161       else
5162 	{
5163 	  /* If the sum of two regs is not necessarily valid,
5164 	     reload the sum into a base reg.
5165 	     That will at least work.  */
5166 	  find_reloads_address_part (ad, loc,
5167 				     base_reg_class (mode, as, MEM, SCRATCH),
5168 				     GET_MODE (ad), opnum, type, ind_levels);
5169 	}
5170       return ! removed_and;
5171     }
5172 
5173   /* If we have an indexed stack slot, there are three possible reasons why
5174      it might be invalid: The index might need to be reloaded, the address
5175      might have been made by frame pointer elimination and hence have a
5176      constant out of range, or both reasons might apply.
5177 
5178      We can easily check for an index needing reload, but even if that is the
5179      case, we might also have an invalid constant.  To avoid making the
5180      conservative assumption and requiring two reloads, we see if this address
5181      is valid when not interpreted strictly.  If it is, the only problem is
5182      that the index needs a reload and find_reloads_address_1 will take care
5183      of it.
5184 
5185      Handle all base registers here, not just fp/ap/sp, because on some
5186      targets (namely SPARC) we can also get invalid addresses from preventive
5187      subreg big-endian corrections made by find_reloads_toplev.  We
5188      can also get expressions involving LO_SUM (rather than PLUS) from
5189      find_reloads_subreg_address.
5190 
5191      If we decide to do something, it must be that `double_reg_address_ok'
5192      is true.  We generate a reload of the base register + constant and
5193      rework the sum so that the reload register will be added to the index.
5194      This is safe because we know the address isn't shared.
5195 
5196      We check for the base register as both the first and second operand of
5197      the innermost PLUS and/or LO_SUM.  */
5198 
5199   for (op_index = 0; op_index < 2; ++op_index)
5200     {
5201       rtx operand, addend;
5202       enum rtx_code inner_code;
5203 
5204       if (GET_CODE (ad) != PLUS)
5205 	  continue;
5206 
5207       inner_code = GET_CODE (XEXP (ad, 0));
5208       if (!(GET_CODE (ad) == PLUS
5209 	    && CONST_INT_P (XEXP (ad, 1))
5210 	    && (inner_code == PLUS || inner_code == LO_SUM)))
5211 	continue;
5212 
5213       operand = XEXP (XEXP (ad, 0), op_index);
5214       if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5215 	continue;
5216 
5217       addend = XEXP (XEXP (ad, 0), 1 - op_index);
5218 
5219       if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5220 				GET_CODE (addend))
5221 	   || operand == frame_pointer_rtx
5222 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
5223 	   || operand == hard_frame_pointer_rtx
5224 #endif
5225 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5226 	   || operand == arg_pointer_rtx
5227 #endif
5228 	   || operand == stack_pointer_rtx)
5229 	  && ! maybe_memory_address_addr_space_p
5230 		(mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5231 	{
5232 	  rtx offset_reg;
5233 	  enum reg_class cls;
5234 
5235 	  offset_reg = plus_constant (GET_MODE (ad), operand,
5236 				      INTVAL (XEXP (ad, 1)));
5237 
5238 	  /* Form the adjusted address.  */
5239 	  if (GET_CODE (XEXP (ad, 0)) == PLUS)
5240 	    ad = gen_rtx_PLUS (GET_MODE (ad),
5241 			       op_index == 0 ? offset_reg : addend,
5242 			       op_index == 0 ? addend : offset_reg);
5243 	  else
5244 	    ad = gen_rtx_LO_SUM (GET_MODE (ad),
5245 				 op_index == 0 ? offset_reg : addend,
5246 				 op_index == 0 ? addend : offset_reg);
5247 	  *loc = ad;
5248 
5249 	  cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5250 	  find_reloads_address_part (XEXP (ad, op_index),
5251 				     &XEXP (ad, op_index), cls,
5252 				     GET_MODE (ad), opnum, type, ind_levels);
5253 	  find_reloads_address_1 (mode, as,
5254 				  XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5255 				  GET_CODE (XEXP (ad, op_index)),
5256 				  &XEXP (ad, 1 - op_index), opnum,
5257 				  type, 0, insn);
5258 
5259 	  return 0;
5260 	}
5261     }
5262 
5263   /* See if address becomes valid when an eliminable register
5264      in a sum is replaced.  */
5265 
5266   tem = ad;
5267   if (GET_CODE (ad) == PLUS)
5268     tem = subst_indexed_address (ad);
5269   if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5270     {
5271       /* Ok, we win that way.  Replace any additional eliminable
5272 	 registers.  */
5273 
5274       subst_reg_equivs_changed = 0;
5275       tem = subst_reg_equivs (tem, insn);
5276 
5277       /* Make sure that didn't make the address invalid again.  */
5278 
5279       if (! subst_reg_equivs_changed
5280 	  || strict_memory_address_addr_space_p (mode, tem, as))
5281 	{
5282 	  *loc = tem;
5283 	  return 0;
5284 	}
5285     }
5286 
5287   /* If constants aren't valid addresses, reload the constant address
5288      into a register.  */
5289   if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5290     {
5291       machine_mode address_mode = GET_MODE (ad);
5292       if (address_mode == VOIDmode)
5293 	address_mode = targetm.addr_space.address_mode (as);
5294 
5295       /* If AD is an address in the constant pool, the MEM rtx may be shared.
5296 	 Unshare it so we can safely alter it.  */
5297       if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5298 	  && CONSTANT_POOL_ADDRESS_P (ad))
5299 	{
5300 	  *memrefloc = copy_rtx (*memrefloc);
5301 	  loc = &XEXP (*memrefloc, 0);
5302 	  if (removed_and)
5303 	    loc = &XEXP (*loc, 0);
5304 	}
5305 
5306       find_reloads_address_part (ad, loc,
5307 				 base_reg_class (mode, as, MEM, SCRATCH),
5308 				 address_mode, opnum, type, ind_levels);
5309       return ! removed_and;
5310     }
5311 
5312   return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5313 				 opnum, type, ind_levels, insn);
5314 }
5315 
5316 /* Find all pseudo regs appearing in AD
5317    that are eliminable in favor of equivalent values
5318    and do not have hard regs; replace them by their equivalents.
5319    INSN, if nonzero, is the insn in which we do the reload.  We put USEs in
5320    front of it for pseudos that we have to replace with stack slots.  */
5321 
5322 static rtx
5323 subst_reg_equivs (rtx ad, rtx_insn *insn)
5324 {
5325   RTX_CODE code = GET_CODE (ad);
5326   int i;
5327   const char *fmt;
5328 
5329   switch (code)
5330     {
5331     case HIGH:
5332     case CONST:
5333     CASE_CONST_ANY:
5334     case SYMBOL_REF:
5335     case LABEL_REF:
5336     case PC:
5337     case CC0:
5338       return ad;
5339 
5340     case REG:
5341       {
5342 	int regno = REGNO (ad);
5343 
5344 	if (reg_equiv_constant (regno) != 0)
5345 	  {
5346 	    subst_reg_equivs_changed = 1;
5347 	    return reg_equiv_constant (regno);
5348 	  }
5349 	if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5350 	  {
5351 	    rtx mem = make_memloc (ad, regno);
5352 	    if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5353 	      {
5354 		subst_reg_equivs_changed = 1;
5355 		/* We mark the USE with QImode so that we recognize it
5356 		   as one that can be safely deleted at the end of
5357 		   reload.  */
5358 		PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5359 			  QImode);
5360 		return mem;
5361 	      }
5362 	  }
5363       }
5364       return ad;
5365 
5366     case PLUS:
5367       /* Quickly dispose of a common case.  */
5368       if (XEXP (ad, 0) == frame_pointer_rtx
5369 	  && CONST_INT_P (XEXP (ad, 1)))
5370 	return ad;
5371       break;
5372 
5373     default:
5374       break;
5375     }
5376 
5377   fmt = GET_RTX_FORMAT (code);
5378   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5379     if (fmt[i] == 'e')
5380       XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5381   return ad;
5382 }
5383 
5384 /* Compute the sum of X and Y, making canonicalizations assumed in an
5385    address, namely: sum constant integers, surround the sum of two
5386    constants with a CONST, put the constant as the second operand, and
5387    group the constant on the outermost sum.
5388 
5389    This routine assumes both inputs are already in canonical form.  */
5390 
5391 rtx
5392 form_sum (machine_mode mode, rtx x, rtx y)
5393 {
5394   rtx tem;
5395 
5396   gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5397   gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5398 
5399   if (CONST_INT_P (x))
5400     return plus_constant (mode, y, INTVAL (x));
5401   else if (CONST_INT_P (y))
5402     return plus_constant (mode, x, INTVAL (y));
5403   else if (CONSTANT_P (x))
5404     tem = x, x = y, y = tem;
5405 
5406   if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5407     return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5408 
5409   /* Note that if the operands of Y are specified in the opposite
5410      order in the recursive calls below, infinite recursion will occur.  */
5411   if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5412     return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5413 
5414   /* If both constant, encapsulate sum.  Otherwise, just form sum.  A
5415      constant will have been placed second.  */
5416   if (CONSTANT_P (x) && CONSTANT_P (y))
5417     {
5418       if (GET_CODE (x) == CONST)
5419 	x = XEXP (x, 0);
5420       if (GET_CODE (y) == CONST)
5421 	y = XEXP (y, 0);
5422 
5423       return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5424     }
5425 
5426   return gen_rtx_PLUS (mode, x, y);
5427 }
5428 
5429 /* If ADDR is a sum containing a pseudo register that should be
5430    replaced with a constant (from reg_equiv_constant),
5431    return the result of doing so, and also apply the associative
5432    law so that the result is more likely to be a valid address.
5433    (But it is not guaranteed to be one.)
5434 
5435    Note that at most one register is replaced, even if more are
5436    replaceable.  Also, we try to put the result into a canonical form
5437    so it is more likely to be a valid address.
5438 
5439    In all other cases, return ADDR.  */
5440 
5441 static rtx
5442 subst_indexed_address (rtx addr)
5443 {
5444   rtx op0 = 0, op1 = 0, op2 = 0;
5445   rtx tem;
5446   int regno;
5447 
5448   if (GET_CODE (addr) == PLUS)
5449     {
5450       /* Try to find a register to replace.  */
5451       op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5452       if (REG_P (op0)
5453 	  && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5454 	  && reg_renumber[regno] < 0
5455 	  && reg_equiv_constant (regno) != 0)
5456 	op0 = reg_equiv_constant (regno);
5457       else if (REG_P (op1)
5458 	       && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5459 	       && reg_renumber[regno] < 0
5460 	       && reg_equiv_constant (regno) != 0)
5461 	op1 = reg_equiv_constant (regno);
5462       else if (GET_CODE (op0) == PLUS
5463 	       && (tem = subst_indexed_address (op0)) != op0)
5464 	op0 = tem;
5465       else if (GET_CODE (op1) == PLUS
5466 	       && (tem = subst_indexed_address (op1)) != op1)
5467 	op1 = tem;
5468       else
5469 	return addr;
5470 
5471       /* Pick out up to three things to add.  */
5472       if (GET_CODE (op1) == PLUS)
5473 	op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5474       else if (GET_CODE (op0) == PLUS)
5475 	op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5476 
5477       /* Compute the sum.  */
5478       if (op2 != 0)
5479 	op1 = form_sum (GET_MODE (addr), op1, op2);
5480       if (op1 != 0)
5481 	op0 = form_sum (GET_MODE (addr), op0, op1);
5482 
5483       return op0;
5484     }
5485   return addr;
5486 }
5487 
5488 /* Update the REG_INC notes for an insn.  It updates all REG_INC
5489    notes for the instruction which refer to REGNO the to refer
5490    to the reload number.
5491 
5492    INSN is the insn for which any REG_INC notes need updating.
5493 
5494    REGNO is the register number which has been reloaded.
5495 
5496    RELOADNUM is the reload number.  */
5497 
5498 static void
5499 update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5500 		       int reloadnum ATTRIBUTE_UNUSED)
5501 {
5502 #ifdef AUTO_INC_DEC
5503   rtx link;
5504 
5505   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5506     if (REG_NOTE_KIND (link) == REG_INC
5507         && (int) REGNO (XEXP (link, 0)) == regno)
5508       push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5509 #endif
5510 }
5511 
5512 /* Record the pseudo registers we must reload into hard registers in a
5513    subexpression of a would-be memory address, X referring to a value
5514    in mode MODE.  (This function is not called if the address we find
5515    is strictly valid.)
5516 
5517    CONTEXT = 1 means we are considering regs as index regs,
5518    = 0 means we are considering them as base regs.
5519    OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5520    or an autoinc code.
5521    If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5522    is the code of the index part of the address.  Otherwise, pass SCRATCH
5523    for this argument.
5524    OPNUM and TYPE specify the purpose of any reloads made.
5525 
5526    IND_LEVELS says how many levels of indirect addressing are
5527    supported at this point in the address.
5528 
5529    INSN, if nonzero, is the insn in which we do the reload.  It is used
5530    to determine if we may generate output reloads.
5531 
5532    We return nonzero if X, as a whole, is reloaded or replaced.  */
5533 
5534 /* Note that we take shortcuts assuming that no multi-reg machine mode
5535    occurs as part of an address.
5536    Also, this is not fully machine-customizable; it works for machines
5537    such as VAXen and 68000's and 32000's, but other possible machines
5538    could have addressing modes that this does not handle right.
5539    If you add push_reload calls here, you need to make sure gen_reload
5540    handles those cases gracefully.  */
5541 
5542 static int
5543 find_reloads_address_1 (machine_mode mode, addr_space_t as,
5544 			rtx x, int context,
5545 			enum rtx_code outer_code, enum rtx_code index_code,
5546 			rtx *loc, int opnum, enum reload_type type,
5547 			int ind_levels, rtx_insn *insn)
5548 {
5549 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX)	\
5550   ((CONTEXT) == 0							\
5551    ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX)		\
5552    : REGNO_OK_FOR_INDEX_P (REGNO))
5553 
5554   enum reg_class context_reg_class;
5555   RTX_CODE code = GET_CODE (x);
5556   bool reloaded_inner_of_autoinc = false;
5557 
5558   if (context == 1)
5559     context_reg_class = INDEX_REG_CLASS;
5560   else
5561     context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5562 
5563   switch (code)
5564     {
5565     case PLUS:
5566       {
5567 	rtx orig_op0 = XEXP (x, 0);
5568 	rtx orig_op1 = XEXP (x, 1);
5569 	RTX_CODE code0 = GET_CODE (orig_op0);
5570 	RTX_CODE code1 = GET_CODE (orig_op1);
5571 	rtx op0 = orig_op0;
5572 	rtx op1 = orig_op1;
5573 
5574 	if (GET_CODE (op0) == SUBREG)
5575 	  {
5576 	    op0 = SUBREG_REG (op0);
5577 	    code0 = GET_CODE (op0);
5578 	    if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5579 	      op0 = gen_rtx_REG (word_mode,
5580 				 (REGNO (op0) +
5581 				  subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5582 						       GET_MODE (SUBREG_REG (orig_op0)),
5583 						       SUBREG_BYTE (orig_op0),
5584 						       GET_MODE (orig_op0))));
5585 	  }
5586 
5587 	if (GET_CODE (op1) == SUBREG)
5588 	  {
5589 	    op1 = SUBREG_REG (op1);
5590 	    code1 = GET_CODE (op1);
5591 	    if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5592 	      /* ??? Why is this given op1's mode and above for
5593 		 ??? op0 SUBREGs we use word_mode?  */
5594 	      op1 = gen_rtx_REG (GET_MODE (op1),
5595 				 (REGNO (op1) +
5596 				  subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5597 						       GET_MODE (SUBREG_REG (orig_op1)),
5598 						       SUBREG_BYTE (orig_op1),
5599 						       GET_MODE (orig_op1))));
5600 	  }
5601 	/* Plus in the index register may be created only as a result of
5602 	   register rematerialization for expression like &localvar*4.  Reload it.
5603 	   It may be possible to combine the displacement on the outer level,
5604 	   but it is probably not worthwhile to do so.  */
5605 	if (context == 1)
5606 	  {
5607 	    find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5608 				  opnum, ADDR_TYPE (type), ind_levels, insn);
5609 	    push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5610 			 context_reg_class,
5611 			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5612 	    return 1;
5613 	  }
5614 
5615 	if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5616 	    || code0 == ZERO_EXTEND || code1 == MEM)
5617 	  {
5618 	    find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5619 				    &XEXP (x, 0), opnum, type, ind_levels,
5620 				    insn);
5621 	    find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5622 				    &XEXP (x, 1), opnum, type, ind_levels,
5623 				    insn);
5624 	  }
5625 
5626 	else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5627 		 || code1 == ZERO_EXTEND || code0 == MEM)
5628 	  {
5629 	    find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5630 				    &XEXP (x, 0), opnum, type, ind_levels,
5631 				    insn);
5632 	    find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5633 				    &XEXP (x, 1), opnum, type, ind_levels,
5634 				    insn);
5635 	  }
5636 
5637 	else if (code0 == CONST_INT || code0 == CONST
5638 		 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5639 	  find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5640 				  &XEXP (x, 1), opnum, type, ind_levels,
5641 				  insn);
5642 
5643 	else if (code1 == CONST_INT || code1 == CONST
5644 		 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5645 	  find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5646 				  &XEXP (x, 0), opnum, type, ind_levels,
5647 				  insn);
5648 
5649 	else if (code0 == REG && code1 == REG)
5650 	  {
5651 	    if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5652 		&& regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5653 	      return 0;
5654 	    else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5655 		     && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5656 	      return 0;
5657 	    else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5658 	      find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5659 				      &XEXP (x, 1), opnum, type, ind_levels,
5660 				      insn);
5661 	    else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5662 	      find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5663 				      &XEXP (x, 0), opnum, type, ind_levels,
5664 				      insn);
5665 	    else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5666 	      find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5667 				      &XEXP (x, 0), opnum, type, ind_levels,
5668 				      insn);
5669 	    else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5670 	      find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5671 				      &XEXP (x, 1), opnum, type, ind_levels,
5672 				      insn);
5673 	    else
5674 	      {
5675 		find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5676 					&XEXP (x, 0), opnum, type, ind_levels,
5677 					insn);
5678 		find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5679 					&XEXP (x, 1), opnum, type, ind_levels,
5680 					insn);
5681 	      }
5682 	  }
5683 
5684 	else if (code0 == REG)
5685 	  {
5686 	    find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5687 				    &XEXP (x, 0), opnum, type, ind_levels,
5688 				    insn);
5689 	    find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5690 				    &XEXP (x, 1), opnum, type, ind_levels,
5691 				    insn);
5692 	  }
5693 
5694 	else if (code1 == REG)
5695 	  {
5696 	    find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5697 				    &XEXP (x, 1), opnum, type, ind_levels,
5698 				    insn);
5699 	    find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5700 				    &XEXP (x, 0), opnum, type, ind_levels,
5701 				    insn);
5702 	  }
5703       }
5704 
5705       return 0;
5706 
5707     case POST_MODIFY:
5708     case PRE_MODIFY:
5709       {
5710 	rtx op0 = XEXP (x, 0);
5711 	rtx op1 = XEXP (x, 1);
5712 	enum rtx_code index_code;
5713 	int regno;
5714 	int reloadnum;
5715 
5716 	if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5717 	  return 0;
5718 
5719 	/* Currently, we only support {PRE,POST}_MODIFY constructs
5720 	   where a base register is {inc,dec}remented by the contents
5721 	   of another register or by a constant value.  Thus, these
5722 	   operands must match.  */
5723 	gcc_assert (op0 == XEXP (op1, 0));
5724 
5725 	/* Require index register (or constant).  Let's just handle the
5726 	   register case in the meantime... If the target allows
5727 	   auto-modify by a constant then we could try replacing a pseudo
5728 	   register with its equivalent constant where applicable.
5729 
5730 	   We also handle the case where the register was eliminated
5731 	   resulting in a PLUS subexpression.
5732 
5733 	   If we later decide to reload the whole PRE_MODIFY or
5734 	   POST_MODIFY, inc_for_reload might clobber the reload register
5735 	   before reading the index.  The index register might therefore
5736 	   need to live longer than a TYPE reload normally would, so be
5737 	   conservative and class it as RELOAD_OTHER.  */
5738 	if ((REG_P (XEXP (op1, 1))
5739 	     && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5740 	    || GET_CODE (XEXP (op1, 1)) == PLUS)
5741 	  find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5742 				  &XEXP (op1, 1), opnum, RELOAD_OTHER,
5743 				  ind_levels, insn);
5744 
5745 	gcc_assert (REG_P (XEXP (op1, 0)));
5746 
5747 	regno = REGNO (XEXP (op1, 0));
5748 	index_code = GET_CODE (XEXP (op1, 1));
5749 
5750 	/* A register that is incremented cannot be constant!  */
5751 	gcc_assert (regno < FIRST_PSEUDO_REGISTER
5752 		    || reg_equiv_constant (regno) == 0);
5753 
5754 	/* Handle a register that is equivalent to a memory location
5755 	    which cannot be addressed directly.  */
5756 	if (reg_equiv_memory_loc (regno) != 0
5757 	    && (reg_equiv_address (regno) != 0
5758 		|| num_not_at_initial_offset))
5759 	  {
5760 	    rtx tem = make_memloc (XEXP (x, 0), regno);
5761 
5762 	    if (reg_equiv_address (regno)
5763 		|| ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5764 	      {
5765 		rtx orig = tem;
5766 
5767 		/* First reload the memory location's address.
5768 		    We can't use ADDR_TYPE (type) here, because we need to
5769 		    write back the value after reading it, hence we actually
5770 		    need two registers.  */
5771 		find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5772 				      &XEXP (tem, 0), opnum,
5773 				      RELOAD_OTHER,
5774 				      ind_levels, insn);
5775 
5776 		if (!rtx_equal_p (tem, orig))
5777 		  push_reg_equiv_alt_mem (regno, tem);
5778 
5779 		/* Then reload the memory location into a base
5780 		   register.  */
5781 		reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5782 					 &XEXP (op1, 0),
5783 					 base_reg_class (mode, as,
5784 							 code, index_code),
5785 					 GET_MODE (x), GET_MODE (x), 0,
5786 					 0, opnum, RELOAD_OTHER);
5787 
5788 		update_auto_inc_notes (this_insn, regno, reloadnum);
5789 		return 0;
5790 	      }
5791 	  }
5792 
5793 	if (reg_renumber[regno] >= 0)
5794 	  regno = reg_renumber[regno];
5795 
5796 	/* We require a base register here...  */
5797 	if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5798 	  {
5799 	    reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5800 				     &XEXP (op1, 0), &XEXP (x, 0),
5801 				     base_reg_class (mode, as,
5802 						     code, index_code),
5803 				     GET_MODE (x), GET_MODE (x), 0, 0,
5804 				     opnum, RELOAD_OTHER);
5805 
5806 	    update_auto_inc_notes (this_insn, regno, reloadnum);
5807 	    return 0;
5808 	  }
5809       }
5810       return 0;
5811 
5812     case POST_INC:
5813     case POST_DEC:
5814     case PRE_INC:
5815     case PRE_DEC:
5816       if (REG_P (XEXP (x, 0)))
5817 	{
5818 	  int regno = REGNO (XEXP (x, 0));
5819 	  int value = 0;
5820 	  rtx x_orig = x;
5821 
5822 	  /* A register that is incremented cannot be constant!  */
5823 	  gcc_assert (regno < FIRST_PSEUDO_REGISTER
5824 		      || reg_equiv_constant (regno) == 0);
5825 
5826 	  /* Handle a register that is equivalent to a memory location
5827 	     which cannot be addressed directly.  */
5828 	  if (reg_equiv_memory_loc (regno) != 0
5829 	      && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5830 	    {
5831 	      rtx tem = make_memloc (XEXP (x, 0), regno);
5832 	      if (reg_equiv_address (regno)
5833 		  || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5834 		{
5835 		  rtx orig = tem;
5836 
5837 		  /* First reload the memory location's address.
5838 		     We can't use ADDR_TYPE (type) here, because we need to
5839 		     write back the value after reading it, hence we actually
5840 		     need two registers.  */
5841 		  find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5842 					&XEXP (tem, 0), opnum, type,
5843 					ind_levels, insn);
5844 		  reloaded_inner_of_autoinc = true;
5845 		  if (!rtx_equal_p (tem, orig))
5846 		    push_reg_equiv_alt_mem (regno, tem);
5847 		  /* Put this inside a new increment-expression.  */
5848 		  x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5849 		  /* Proceed to reload that, as if it contained a register.  */
5850 		}
5851 	    }
5852 
5853 	  /* If we have a hard register that is ok in this incdec context,
5854 	     don't make a reload.  If the register isn't nice enough for
5855 	     autoincdec, we can reload it.  But, if an autoincrement of a
5856 	     register that we here verified as playing nice, still outside
5857 	     isn't "valid", it must be that no autoincrement is "valid".
5858 	     If that is true and something made an autoincrement anyway,
5859 	     this must be a special context where one is allowed.
5860 	     (For example, a "push" instruction.)
5861 	     We can't improve this address, so leave it alone.  */
5862 
5863 	  /* Otherwise, reload the autoincrement into a suitable hard reg
5864 	     and record how much to increment by.  */
5865 
5866 	  if (reg_renumber[regno] >= 0)
5867 	    regno = reg_renumber[regno];
5868 	  if (regno >= FIRST_PSEUDO_REGISTER
5869 	      || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5870 				      index_code))
5871 	    {
5872 	      int reloadnum;
5873 
5874 	      /* If we can output the register afterwards, do so, this
5875 		 saves the extra update.
5876 		 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5877 		 CALL_INSN - and it does not set CC0.
5878 		 But don't do this if we cannot directly address the
5879 		 memory location, since this will make it harder to
5880 		 reuse address reloads, and increases register pressure.
5881 		 Also don't do this if we can probably update x directly.  */
5882 	      rtx equiv = (MEM_P (XEXP (x, 0))
5883 			   ? XEXP (x, 0)
5884 			   : reg_equiv_mem (regno));
5885 	      enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5886 	      if (insn && NONJUMP_INSN_P (insn) && equiv
5887 		  && memory_operand (equiv, GET_MODE (equiv))
5888 #ifdef HAVE_cc0
5889 		  && ! sets_cc0_p (PATTERN (insn))
5890 #endif
5891 		  && ! (icode != CODE_FOR_nothing
5892 			&& insn_operand_matches (icode, 0, equiv)
5893 			&& insn_operand_matches (icode, 1, equiv))
5894 		  /* Using RELOAD_OTHER means we emit this and the reload we
5895 		     made earlier in the wrong order.  */
5896 		  && !reloaded_inner_of_autoinc)
5897 		{
5898 		  /* We use the original pseudo for loc, so that
5899 		     emit_reload_insns() knows which pseudo this
5900 		     reload refers to and updates the pseudo rtx, not
5901 		     its equivalent memory location, as well as the
5902 		     corresponding entry in reg_last_reload_reg.  */
5903 		  loc = &XEXP (x_orig, 0);
5904 		  x = XEXP (x, 0);
5905 		  reloadnum
5906 		    = push_reload (x, x, loc, loc,
5907 				   context_reg_class,
5908 				   GET_MODE (x), GET_MODE (x), 0, 0,
5909 				   opnum, RELOAD_OTHER);
5910 		}
5911 	      else
5912 		{
5913 		  reloadnum
5914 		    = push_reload (x, x, loc, (rtx*) 0,
5915 				   context_reg_class,
5916 				   GET_MODE (x), GET_MODE (x), 0, 0,
5917 				   opnum, type);
5918 		  rld[reloadnum].inc
5919 		    = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5920 
5921 		  value = 1;
5922 		}
5923 
5924 	      update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5925 				     reloadnum);
5926 	    }
5927 	  return value;
5928 	}
5929       return 0;
5930 
5931     case TRUNCATE:
5932     case SIGN_EXTEND:
5933     case ZERO_EXTEND:
5934       /* Look for parts to reload in the inner expression and reload them
5935 	 too, in addition to this operation.  Reloading all inner parts in
5936 	 addition to this one shouldn't be necessary, but at this point,
5937 	 we don't know if we can possibly omit any part that *can* be
5938 	 reloaded.  Targets that are better off reloading just either part
5939 	 (or perhaps even a different part of an outer expression), should
5940 	 define LEGITIMIZE_RELOAD_ADDRESS.  */
5941       find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5942 			      context, code, SCRATCH, &XEXP (x, 0), opnum,
5943 			      type, ind_levels, insn);
5944       push_reload (x, NULL_RTX, loc, (rtx*) 0,
5945 		   context_reg_class,
5946 		   GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5947       return 1;
5948 
5949     case MEM:
5950       /* This is probably the result of a substitution, by eliminate_regs, of
5951 	 an equivalent address for a pseudo that was not allocated to a hard
5952 	 register.  Verify that the specified address is valid and reload it
5953 	 into a register.
5954 
5955 	 Since we know we are going to reload this item, don't decrement for
5956 	 the indirection level.
5957 
5958 	 Note that this is actually conservative:  it would be slightly more
5959 	 efficient to use the value of SPILL_INDIRECT_LEVELS from
5960 	 reload1.c here.  */
5961 
5962       find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5963 			    opnum, ADDR_TYPE (type), ind_levels, insn);
5964       push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5965 		   context_reg_class,
5966 		   GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5967       return 1;
5968 
5969     case REG:
5970       {
5971 	int regno = REGNO (x);
5972 
5973 	if (reg_equiv_constant (regno) != 0)
5974 	  {
5975 	    find_reloads_address_part (reg_equiv_constant (regno), loc,
5976 				       context_reg_class,
5977 				       GET_MODE (x), opnum, type, ind_levels);
5978 	    return 1;
5979 	  }
5980 
5981 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5982 	 that feeds this insn.  */
5983 	if (reg_equiv_mem (regno) != 0)
5984 	  {
5985 	    push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5986 			 context_reg_class,
5987 			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5988 	    return 1;
5989 	  }
5990 #endif
5991 
5992 	if (reg_equiv_memory_loc (regno)
5993 	    && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5994 	  {
5995 	    rtx tem = make_memloc (x, regno);
5996 	    if (reg_equiv_address (regno) != 0
5997 		|| ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5998 	      {
5999 		x = tem;
6000 		find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
6001 				      &XEXP (x, 0), opnum, ADDR_TYPE (type),
6002 				      ind_levels, insn);
6003 		if (!rtx_equal_p (x, tem))
6004 		  push_reg_equiv_alt_mem (regno, x);
6005 	      }
6006 	  }
6007 
6008 	if (reg_renumber[regno] >= 0)
6009 	  regno = reg_renumber[regno];
6010 
6011 	if (regno >= FIRST_PSEUDO_REGISTER
6012 	    || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6013 				    index_code))
6014 	  {
6015 	    push_reload (x, NULL_RTX, loc, (rtx*) 0,
6016 			 context_reg_class,
6017 			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6018 	    return 1;
6019 	  }
6020 
6021 	/* If a register appearing in an address is the subject of a CLOBBER
6022 	   in this insn, reload it into some other register to be safe.
6023 	   The CLOBBER is supposed to make the register unavailable
6024 	   from before this insn to after it.  */
6025 	if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
6026 	  {
6027 	    push_reload (x, NULL_RTX, loc, (rtx*) 0,
6028 			 context_reg_class,
6029 			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6030 	    return 1;
6031 	  }
6032       }
6033       return 0;
6034 
6035     case SUBREG:
6036       if (REG_P (SUBREG_REG (x)))
6037 	{
6038 	  /* If this is a SUBREG of a hard register and the resulting register
6039 	     is of the wrong class, reload the whole SUBREG.  This avoids
6040 	     needless copies if SUBREG_REG is multi-word.  */
6041 	  if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6042 	    {
6043 	      int regno ATTRIBUTE_UNUSED = subreg_regno (x);
6044 
6045 	      if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6046 				       index_code))
6047 		{
6048 		  push_reload (x, NULL_RTX, loc, (rtx*) 0,
6049 			       context_reg_class,
6050 			       GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6051 		  return 1;
6052 		}
6053 	    }
6054 	  /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6055 	     is larger than the class size, then reload the whole SUBREG.  */
6056 	  else
6057 	    {
6058 	      enum reg_class rclass = context_reg_class;
6059 	      if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6060 		  > reg_class_size[(int) rclass])
6061 		{
6062 		  /* If the inner register will be replaced by a memory
6063 		     reference, we can do this only if we can replace the
6064 		     whole subreg by a (narrower) memory reference.  If
6065 		     this is not possible, fall through and reload just
6066 		     the inner register (including address reloads).  */
6067 		  if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6068 		    {
6069 		      rtx tem = find_reloads_subreg_address (x, opnum,
6070 							     ADDR_TYPE (type),
6071 							     ind_levels, insn,
6072 							     NULL);
6073 		      if (tem)
6074 			{
6075 			  push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6076 				       GET_MODE (tem), VOIDmode, 0, 0,
6077 				       opnum, type);
6078 			  return 1;
6079 			}
6080 		    }
6081 		  else
6082 		    {
6083 		      push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6084 				   GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6085 		      return 1;
6086 		    }
6087 		}
6088 	    }
6089 	}
6090       break;
6091 
6092     default:
6093       break;
6094     }
6095 
6096   {
6097     const char *fmt = GET_RTX_FORMAT (code);
6098     int i;
6099 
6100     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6101       {
6102 	if (fmt[i] == 'e')
6103 	  /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6104 	     we get here.  */
6105 	  find_reloads_address_1 (mode, as, XEXP (x, i), context,
6106 				  code, SCRATCH, &XEXP (x, i),
6107 				  opnum, type, ind_levels, insn);
6108       }
6109   }
6110 
6111 #undef REG_OK_FOR_CONTEXT
6112   return 0;
6113 }
6114 
6115 /* X, which is found at *LOC, is a part of an address that needs to be
6116    reloaded into a register of class RCLASS.  If X is a constant, or if
6117    X is a PLUS that contains a constant, check that the constant is a
6118    legitimate operand and that we are supposed to be able to load
6119    it into the register.
6120 
6121    If not, force the constant into memory and reload the MEM instead.
6122 
6123    MODE is the mode to use, in case X is an integer constant.
6124 
6125    OPNUM and TYPE describe the purpose of any reloads made.
6126 
6127    IND_LEVELS says how many levels of indirect addressing this machine
6128    supports.  */
6129 
6130 static void
6131 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6132 			   machine_mode mode, int opnum,
6133 			   enum reload_type type, int ind_levels)
6134 {
6135   if (CONSTANT_P (x)
6136       && (!targetm.legitimate_constant_p (mode, x)
6137 	  || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6138     {
6139       x = force_const_mem (mode, x);
6140       find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6141 			    opnum, type, ind_levels, 0);
6142     }
6143 
6144   else if (GET_CODE (x) == PLUS
6145 	   && CONSTANT_P (XEXP (x, 1))
6146 	   && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6147 	       || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6148 		   == NO_REGS))
6149     {
6150       rtx tem;
6151 
6152       tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6153       x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6154       find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6155 			    opnum, type, ind_levels, 0);
6156     }
6157 
6158   push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6159 	       mode, VOIDmode, 0, 0, opnum, type);
6160 }
6161 
6162 /* X, a subreg of a pseudo, is a part of an address that needs to be
6163    reloaded, and the pseusdo is equivalent to a memory location.
6164 
6165    Attempt to replace the whole subreg by a (possibly narrower or wider)
6166    memory reference.  If this is possible, return this new memory
6167    reference, and push all required address reloads.  Otherwise,
6168    return NULL.
6169 
6170    OPNUM and TYPE identify the purpose of the reload.
6171 
6172    IND_LEVELS says how many levels of indirect addressing are
6173    supported at this point in the address.
6174 
6175    INSN, if nonzero, is the insn in which we do the reload.  It is used
6176    to determine where to put USEs for pseudos that we have to replace with
6177    stack slots.  */
6178 
6179 static rtx
6180 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6181 			     int ind_levels, rtx_insn *insn,
6182 			     int *address_reloaded)
6183 {
6184   machine_mode outer_mode = GET_MODE (x);
6185   machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6186   int regno = REGNO (SUBREG_REG (x));
6187   int reloaded = 0;
6188   rtx tem, orig;
6189   int offset;
6190 
6191   gcc_assert (reg_equiv_memory_loc (regno) != 0);
6192 
6193   /* We cannot replace the subreg with a modified memory reference if:
6194 
6195      - we have a paradoxical subreg that implicitly acts as a zero or
6196        sign extension operation due to LOAD_EXTEND_OP;
6197 
6198      - we have a subreg that is implicitly supposed to act on the full
6199        register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6200 
6201      - the address of the equivalent memory location is mode-dependent;  or
6202 
6203      - we have a paradoxical subreg and the resulting memory is not
6204        sufficiently aligned to allow access in the wider mode.
6205 
6206     In addition, we choose not to perform the replacement for *any*
6207     paradoxical subreg, even if it were possible in principle.  This
6208     is to avoid generating wider memory references than necessary.
6209 
6210     This corresponds to how previous versions of reload used to handle
6211     paradoxical subregs where no address reload was required.  */
6212 
6213   if (paradoxical_subreg_p (x))
6214     return NULL;
6215 
6216 #ifdef WORD_REGISTER_OPERATIONS
6217   if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode)
6218       && ((GET_MODE_SIZE (outer_mode) - 1) / UNITS_PER_WORD
6219           == (GET_MODE_SIZE (inner_mode) - 1) / UNITS_PER_WORD))
6220     return NULL;
6221 #endif
6222 
6223   /* Since we don't attempt to handle paradoxical subregs, we can just
6224      call into simplify_subreg, which will handle all remaining checks
6225      for us.  */
6226   orig = make_memloc (SUBREG_REG (x), regno);
6227   offset = SUBREG_BYTE (x);
6228   tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6229   if (!tem || !MEM_P (tem))
6230     return NULL;
6231 
6232   /* Now push all required address reloads, if any.  */
6233   reloaded = find_reloads_address (GET_MODE (tem), &tem,
6234 				   XEXP (tem, 0), &XEXP (tem, 0),
6235 				   opnum, type, ind_levels, insn);
6236   /* ??? Do we need to handle nonzero offsets somehow?  */
6237   if (!offset && !rtx_equal_p (tem, orig))
6238     push_reg_equiv_alt_mem (regno, tem);
6239 
6240   /* For some processors an address may be valid in the original mode but
6241      not in a smaller mode.  For example, ARM accepts a scaled index register
6242      in SImode but not in HImode.  Note that this is only a problem if the
6243      address in reg_equiv_mem is already invalid in the new mode; other
6244      cases would be fixed by find_reloads_address as usual.
6245 
6246      ??? We attempt to handle such cases here by doing an additional reload
6247      of the full address after the usual processing by find_reloads_address.
6248      Note that this may not work in the general case, but it seems to cover
6249      the cases where this situation currently occurs.  A more general fix
6250      might be to reload the *value* instead of the address, but this would
6251      not be expected by the callers of this routine as-is.
6252 
6253      If find_reloads_address already completed replaced the address, there
6254      is nothing further to do.  */
6255   if (reloaded == 0
6256       && reg_equiv_mem (regno) != 0
6257       && !strict_memory_address_addr_space_p
6258 		(GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6259 		 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6260     {
6261       push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6262 		   base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6263 				   MEM, SCRATCH),
6264 		   GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6265       reloaded = 1;
6266     }
6267 
6268   /* If this is not a toplevel operand, find_reloads doesn't see this
6269      substitution.  We have to emit a USE of the pseudo so that
6270      delete_output_reload can see it.  */
6271   if (replace_reloads && recog_data.operand[opnum] != x)
6272     /* We mark the USE with QImode so that we recognize it as one that
6273        can be safely deleted at the end of reload.  */
6274     PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6275 	      QImode);
6276 
6277   if (address_reloaded)
6278     *address_reloaded = reloaded;
6279 
6280   return tem;
6281 }
6282 
6283 /* Substitute into the current INSN the registers into which we have reloaded
6284    the things that need reloading.  The array `replacements'
6285    contains the locations of all pointers that must be changed
6286    and says what to replace them with.
6287 
6288    Return the rtx that X translates into; usually X, but modified.  */
6289 
6290 void
6291 subst_reloads (rtx_insn *insn)
6292 {
6293   int i;
6294 
6295   for (i = 0; i < n_replacements; i++)
6296     {
6297       struct replacement *r = &replacements[i];
6298       rtx reloadreg = rld[r->what].reg_rtx;
6299       if (reloadreg)
6300 	{
6301 #ifdef DEBUG_RELOAD
6302 	  /* This checking takes a very long time on some platforms
6303 	     causing the gcc.c-torture/compile/limits-fnargs.c test
6304 	     to time out during testing.  See PR 31850.
6305 
6306 	     Internal consistency test.  Check that we don't modify
6307 	     anything in the equivalence arrays.  Whenever something from
6308 	     those arrays needs to be reloaded, it must be unshared before
6309 	     being substituted into; the equivalence must not be modified.
6310 	     Otherwise, if the equivalence is used after that, it will
6311 	     have been modified, and the thing substituted (probably a
6312 	     register) is likely overwritten and not a usable equivalence.  */
6313 	  int check_regno;
6314 
6315 	  for (check_regno = 0; check_regno < max_regno; check_regno++)
6316 	    {
6317 #define CHECK_MODF(ARRAY)						\
6318 	      gcc_assert (!(*reg_equivs)[check_regno].ARRAY		\
6319 			  || !loc_mentioned_in_p (r->where,		\
6320 						  (*reg_equivs)[check_regno].ARRAY))
6321 
6322 	      CHECK_MODF (constant);
6323 	      CHECK_MODF (memory_loc);
6324 	      CHECK_MODF (address);
6325 	      CHECK_MODF (mem);
6326 #undef CHECK_MODF
6327 	    }
6328 #endif /* DEBUG_RELOAD */
6329 
6330 	  /* If we're replacing a LABEL_REF with a register, there must
6331 	     already be an indication (to e.g. flow) which label this
6332 	     register refers to.  */
6333 	  gcc_assert (GET_CODE (*r->where) != LABEL_REF
6334 		      || !JUMP_P (insn)
6335 		      || find_reg_note (insn,
6336 					REG_LABEL_OPERAND,
6337 					XEXP (*r->where, 0))
6338 		      || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6339 
6340 	  /* Encapsulate RELOADREG so its machine mode matches what
6341 	     used to be there.  Note that gen_lowpart_common will
6342 	     do the wrong thing if RELOADREG is multi-word.  RELOADREG
6343 	     will always be a REG here.  */
6344 	  if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6345 	    reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6346 
6347 	  *r->where = reloadreg;
6348 	}
6349       /* If reload got no reg and isn't optional, something's wrong.  */
6350       else
6351 	gcc_assert (rld[r->what].optional);
6352     }
6353 }
6354 
6355 /* Make a copy of any replacements being done into X and move those
6356    copies to locations in Y, a copy of X.  */
6357 
6358 void
6359 copy_replacements (rtx x, rtx y)
6360 {
6361   copy_replacements_1 (&x, &y, n_replacements);
6362 }
6363 
6364 static void
6365 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6366 {
6367   int i, j;
6368   rtx x, y;
6369   struct replacement *r;
6370   enum rtx_code code;
6371   const char *fmt;
6372 
6373   for (j = 0; j < orig_replacements; j++)
6374     if (replacements[j].where == px)
6375       {
6376 	r = &replacements[n_replacements++];
6377 	r->where = py;
6378 	r->what = replacements[j].what;
6379 	r->mode = replacements[j].mode;
6380       }
6381 
6382   x = *px;
6383   y = *py;
6384   code = GET_CODE (x);
6385   fmt = GET_RTX_FORMAT (code);
6386 
6387   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6388     {
6389       if (fmt[i] == 'e')
6390 	copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6391       else if (fmt[i] == 'E')
6392 	for (j = XVECLEN (x, i); --j >= 0; )
6393 	  copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6394 			       orig_replacements);
6395     }
6396 }
6397 
6398 /* Change any replacements being done to *X to be done to *Y.  */
6399 
6400 void
6401 move_replacements (rtx *x, rtx *y)
6402 {
6403   int i;
6404 
6405   for (i = 0; i < n_replacements; i++)
6406     if (replacements[i].where == x)
6407       replacements[i].where = y;
6408 }
6409 
6410 /* If LOC was scheduled to be replaced by something, return the replacement.
6411    Otherwise, return *LOC.  */
6412 
6413 rtx
6414 find_replacement (rtx *loc)
6415 {
6416   struct replacement *r;
6417 
6418   for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6419     {
6420       rtx reloadreg = rld[r->what].reg_rtx;
6421 
6422       if (reloadreg && r->where == loc)
6423 	{
6424 	  if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6425 	    reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6426 
6427 	  return reloadreg;
6428 	}
6429       else if (reloadreg && GET_CODE (*loc) == SUBREG
6430 	       && r->where == &SUBREG_REG (*loc))
6431 	{
6432 	  if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6433 	    reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6434 
6435 	  return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6436 				      GET_MODE (SUBREG_REG (*loc)),
6437 				      SUBREG_BYTE (*loc));
6438 	}
6439     }
6440 
6441   /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6442      what's inside and make a new rtl if so.  */
6443   if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6444       || GET_CODE (*loc) == MULT)
6445     {
6446       rtx x = find_replacement (&XEXP (*loc, 0));
6447       rtx y = find_replacement (&XEXP (*loc, 1));
6448 
6449       if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6450 	return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6451     }
6452 
6453   return *loc;
6454 }
6455 
6456 /* Return nonzero if register in range [REGNO, ENDREGNO)
6457    appears either explicitly or implicitly in X
6458    other than being stored into (except for earlyclobber operands).
6459 
6460    References contained within the substructure at LOC do not count.
6461    LOC may be zero, meaning don't ignore anything.
6462 
6463    This is similar to refers_to_regno_p in rtlanal.c except that we
6464    look at equivalences for pseudos that didn't get hard registers.  */
6465 
6466 static int
6467 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6468 			      rtx x, rtx *loc)
6469 {
6470   int i;
6471   unsigned int r;
6472   RTX_CODE code;
6473   const char *fmt;
6474 
6475   if (x == 0)
6476     return 0;
6477 
6478  repeat:
6479   code = GET_CODE (x);
6480 
6481   switch (code)
6482     {
6483     case REG:
6484       r = REGNO (x);
6485 
6486       /* If this is a pseudo, a hard register must not have been allocated.
6487 	 X must therefore either be a constant or be in memory.  */
6488       if (r >= FIRST_PSEUDO_REGISTER)
6489 	{
6490 	  if (reg_equiv_memory_loc (r))
6491 	    return refers_to_regno_for_reload_p (regno, endregno,
6492 						 reg_equiv_memory_loc (r),
6493 						 (rtx*) 0);
6494 
6495 	  gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6496 	  return 0;
6497 	}
6498 
6499       return (endregno > r
6500 	      && regno < r + (r < FIRST_PSEUDO_REGISTER
6501 			      ? hard_regno_nregs[r][GET_MODE (x)]
6502 			      : 1));
6503 
6504     case SUBREG:
6505       /* If this is a SUBREG of a hard reg, we can see exactly which
6506 	 registers are being modified.  Otherwise, handle normally.  */
6507       if (REG_P (SUBREG_REG (x))
6508 	  && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6509 	{
6510 	  unsigned int inner_regno = subreg_regno (x);
6511 	  unsigned int inner_endregno
6512 	    = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6513 			     ? subreg_nregs (x) : 1);
6514 
6515 	  return endregno > inner_regno && regno < inner_endregno;
6516 	}
6517       break;
6518 
6519     case CLOBBER:
6520     case SET:
6521       if (&SET_DEST (x) != loc
6522 	  /* Note setting a SUBREG counts as referring to the REG it is in for
6523 	     a pseudo but not for hard registers since we can
6524 	     treat each word individually.  */
6525 	  && ((GET_CODE (SET_DEST (x)) == SUBREG
6526 	       && loc != &SUBREG_REG (SET_DEST (x))
6527 	       && REG_P (SUBREG_REG (SET_DEST (x)))
6528 	       && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6529 	       && refers_to_regno_for_reload_p (regno, endregno,
6530 						SUBREG_REG (SET_DEST (x)),
6531 						loc))
6532 	      /* If the output is an earlyclobber operand, this is
6533 		 a conflict.  */
6534 	      || ((!REG_P (SET_DEST (x))
6535 		   || earlyclobber_operand_p (SET_DEST (x)))
6536 		  && refers_to_regno_for_reload_p (regno, endregno,
6537 						   SET_DEST (x), loc))))
6538 	return 1;
6539 
6540       if (code == CLOBBER || loc == &SET_SRC (x))
6541 	return 0;
6542       x = SET_SRC (x);
6543       goto repeat;
6544 
6545     default:
6546       break;
6547     }
6548 
6549   /* X does not match, so try its subexpressions.  */
6550 
6551   fmt = GET_RTX_FORMAT (code);
6552   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6553     {
6554       if (fmt[i] == 'e' && loc != &XEXP (x, i))
6555 	{
6556 	  if (i == 0)
6557 	    {
6558 	      x = XEXP (x, 0);
6559 	      goto repeat;
6560 	    }
6561 	  else
6562 	    if (refers_to_regno_for_reload_p (regno, endregno,
6563 					      XEXP (x, i), loc))
6564 	      return 1;
6565 	}
6566       else if (fmt[i] == 'E')
6567 	{
6568 	  int j;
6569 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6570 	    if (loc != &XVECEXP (x, i, j)
6571 		&& refers_to_regno_for_reload_p (regno, endregno,
6572 						 XVECEXP (x, i, j), loc))
6573 	      return 1;
6574 	}
6575     }
6576   return 0;
6577 }
6578 
6579 /* Nonzero if modifying X will affect IN.  If X is a register or a SUBREG,
6580    we check if any register number in X conflicts with the relevant register
6581    numbers.  If X is a constant, return 0.  If X is a MEM, return 1 iff IN
6582    contains a MEM (we don't bother checking for memory addresses that can't
6583    conflict because we expect this to be a rare case.
6584 
6585    This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6586    that we look at equivalences for pseudos that didn't get hard registers.  */
6587 
6588 int
6589 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6590 {
6591   int regno, endregno;
6592 
6593   /* Overly conservative.  */
6594   if (GET_CODE (x) == STRICT_LOW_PART
6595       || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6596     x = XEXP (x, 0);
6597 
6598   /* If either argument is a constant, then modifying X can not affect IN.  */
6599   if (CONSTANT_P (x) || CONSTANT_P (in))
6600     return 0;
6601   else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6602     return refers_to_mem_for_reload_p (in);
6603   else if (GET_CODE (x) == SUBREG)
6604     {
6605       regno = REGNO (SUBREG_REG (x));
6606       if (regno < FIRST_PSEUDO_REGISTER)
6607 	regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6608 				      GET_MODE (SUBREG_REG (x)),
6609 				      SUBREG_BYTE (x),
6610 				      GET_MODE (x));
6611       endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6612 			  ? subreg_nregs (x) : 1);
6613 
6614       return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6615     }
6616   else if (REG_P (x))
6617     {
6618       regno = REGNO (x);
6619 
6620       /* If this is a pseudo, it must not have been assigned a hard register.
6621 	 Therefore, it must either be in memory or be a constant.  */
6622 
6623       if (regno >= FIRST_PSEUDO_REGISTER)
6624 	{
6625 	  if (reg_equiv_memory_loc (regno))
6626 	    return refers_to_mem_for_reload_p (in);
6627 	  gcc_assert (reg_equiv_constant (regno));
6628 	  return 0;
6629 	}
6630 
6631       endregno = END_HARD_REGNO (x);
6632 
6633       return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6634     }
6635   else if (MEM_P (x))
6636     return refers_to_mem_for_reload_p (in);
6637   else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6638 	   || GET_CODE (x) == CC0)
6639     return reg_mentioned_p (x, in);
6640   else
6641     {
6642       gcc_assert (GET_CODE (x) == PLUS);
6643 
6644       /* We actually want to know if X is mentioned somewhere inside IN.
6645 	 We must not say that (plus (sp) (const_int 124)) is in
6646 	 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6647 	 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6648 	 into a RELOAD_OTHER on behalf of another RELOAD_OTHER.  */
6649       while (MEM_P (in))
6650 	in = XEXP (in, 0);
6651       if (REG_P (in))
6652 	return 0;
6653       else if (GET_CODE (in) == PLUS)
6654 	return (rtx_equal_p (x, in)
6655 		|| reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6656 		|| reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6657       else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6658 		   || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6659     }
6660 
6661   gcc_unreachable ();
6662 }
6663 
6664 /* Return nonzero if anything in X contains a MEM.  Look also for pseudo
6665    registers.  */
6666 
6667 static int
6668 refers_to_mem_for_reload_p (rtx x)
6669 {
6670   const char *fmt;
6671   int i;
6672 
6673   if (MEM_P (x))
6674     return 1;
6675 
6676   if (REG_P (x))
6677     return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6678 	    && reg_equiv_memory_loc (REGNO (x)));
6679 
6680   fmt = GET_RTX_FORMAT (GET_CODE (x));
6681   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6682     if (fmt[i] == 'e'
6683 	&& (MEM_P (XEXP (x, i))
6684 	    || refers_to_mem_for_reload_p (XEXP (x, i))))
6685       return 1;
6686 
6687   return 0;
6688 }
6689 
6690 /* Check the insns before INSN to see if there is a suitable register
6691    containing the same value as GOAL.
6692    If OTHER is -1, look for a register in class RCLASS.
6693    Otherwise, just see if register number OTHER shares GOAL's value.
6694 
6695    Return an rtx for the register found, or zero if none is found.
6696 
6697    If RELOAD_REG_P is (short *)1,
6698    we reject any hard reg that appears in reload_reg_rtx
6699    because such a hard reg is also needed coming into this insn.
6700 
6701    If RELOAD_REG_P is any other nonzero value,
6702    it is a vector indexed by hard reg number
6703    and we reject any hard reg whose element in the vector is nonnegative
6704    as well as any that appears in reload_reg_rtx.
6705 
6706    If GOAL is zero, then GOALREG is a register number; we look
6707    for an equivalent for that register.
6708 
6709    MODE is the machine mode of the value we want an equivalence for.
6710    If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6711 
6712    This function is used by jump.c as well as in the reload pass.
6713 
6714    If GOAL is the sum of the stack pointer and a constant, we treat it
6715    as if it were a constant except that sp is required to be unchanging.  */
6716 
6717 rtx
6718 find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
6719 		short *reload_reg_p, int goalreg, machine_mode mode)
6720 {
6721   rtx_insn *p = insn;
6722   rtx goaltry, valtry, value;
6723   rtx_insn *where;
6724   rtx pat;
6725   int regno = -1;
6726   int valueno;
6727   int goal_mem = 0;
6728   int goal_const = 0;
6729   int goal_mem_addr_varies = 0;
6730   int need_stable_sp = 0;
6731   int nregs;
6732   int valuenregs;
6733   int num = 0;
6734 
6735   if (goal == 0)
6736     regno = goalreg;
6737   else if (REG_P (goal))
6738     regno = REGNO (goal);
6739   else if (MEM_P (goal))
6740     {
6741       enum rtx_code code = GET_CODE (XEXP (goal, 0));
6742       if (MEM_VOLATILE_P (goal))
6743 	return 0;
6744       if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6745 	return 0;
6746       /* An address with side effects must be reexecuted.  */
6747       switch (code)
6748 	{
6749 	case POST_INC:
6750 	case PRE_INC:
6751 	case POST_DEC:
6752 	case PRE_DEC:
6753 	case POST_MODIFY:
6754 	case PRE_MODIFY:
6755 	  return 0;
6756 	default:
6757 	  break;
6758 	}
6759       goal_mem = 1;
6760     }
6761   else if (CONSTANT_P (goal))
6762     goal_const = 1;
6763   else if (GET_CODE (goal) == PLUS
6764 	   && XEXP (goal, 0) == stack_pointer_rtx
6765 	   && CONSTANT_P (XEXP (goal, 1)))
6766     goal_const = need_stable_sp = 1;
6767   else if (GET_CODE (goal) == PLUS
6768 	   && XEXP (goal, 0) == frame_pointer_rtx
6769 	   && CONSTANT_P (XEXP (goal, 1)))
6770     goal_const = 1;
6771   else
6772     return 0;
6773 
6774   num = 0;
6775   /* Scan insns back from INSN, looking for one that copies
6776      a value into or out of GOAL.
6777      Stop and give up if we reach a label.  */
6778 
6779   while (1)
6780     {
6781       p = PREV_INSN (p);
6782       if (p && DEBUG_INSN_P (p))
6783 	continue;
6784       num++;
6785       if (p == 0 || LABEL_P (p)
6786 	  || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6787 	return 0;
6788 
6789       /* Don't reuse register contents from before a setjmp-type
6790 	 function call; on the second return (from the longjmp) it
6791 	 might have been clobbered by a later reuse.  It doesn't
6792 	 seem worthwhile to actually go and see if it is actually
6793 	 reused even if that information would be readily available;
6794 	 just don't reuse it across the setjmp call.  */
6795       if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6796 	return 0;
6797 
6798       if (NONJUMP_INSN_P (p)
6799 	  /* If we don't want spill regs ...  */
6800 	  && (! (reload_reg_p != 0
6801 		 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6802 	      /* ... then ignore insns introduced by reload; they aren't
6803 		 useful and can cause results in reload_as_needed to be
6804 		 different from what they were when calculating the need for
6805 		 spills.  If we notice an input-reload insn here, we will
6806 		 reject it below, but it might hide a usable equivalent.
6807 		 That makes bad code.  It may even fail: perhaps no reg was
6808 		 spilled for this insn because it was assumed we would find
6809 		 that equivalent.  */
6810 	      || INSN_UID (p) < reload_first_uid))
6811 	{
6812 	  rtx tem;
6813 	  pat = single_set (p);
6814 
6815 	  /* First check for something that sets some reg equal to GOAL.  */
6816 	  if (pat != 0
6817 	      && ((regno >= 0
6818 		   && true_regnum (SET_SRC (pat)) == regno
6819 		   && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6820 		  ||
6821 		  (regno >= 0
6822 		   && true_regnum (SET_DEST (pat)) == regno
6823 		   && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6824 		  ||
6825 		  (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6826 		   /* When looking for stack pointer + const,
6827 		      make sure we don't use a stack adjust.  */
6828 		   && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6829 		   && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6830 		  || (goal_mem
6831 		      && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6832 		      && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6833 		  || (goal_mem
6834 		      && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6835 		      && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6836 		  /* If we are looking for a constant,
6837 		     and something equivalent to that constant was copied
6838 		     into a reg, we can use that reg.  */
6839 		  || (goal_const && REG_NOTES (p) != 0
6840 		      && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6841 		      && ((rtx_equal_p (XEXP (tem, 0), goal)
6842 			   && (valueno
6843 			       = true_regnum (valtry = SET_DEST (pat))) >= 0)
6844 			  || (REG_P (SET_DEST (pat))
6845 			      && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6846 			      && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6847 			      && CONST_INT_P (goal)
6848 			      && 0 != (goaltry
6849 				       = operand_subword (XEXP (tem, 0), 0, 0,
6850 							  VOIDmode))
6851 			      && rtx_equal_p (goal, goaltry)
6852 			      && (valtry
6853 				  = operand_subword (SET_DEST (pat), 0, 0,
6854 						     VOIDmode))
6855 			      && (valueno = true_regnum (valtry)) >= 0)))
6856 		  || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6857 							  NULL_RTX))
6858 		      && REG_P (SET_DEST (pat))
6859 		      && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6860 		      && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6861 		      && CONST_INT_P (goal)
6862 		      && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6863 							  VOIDmode))
6864 		      && rtx_equal_p (goal, goaltry)
6865 		      && (valtry
6866 			  = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6867 		      && (valueno = true_regnum (valtry)) >= 0)))
6868 	    {
6869 	      if (other >= 0)
6870 		{
6871 		  if (valueno != other)
6872 		    continue;
6873 		}
6874 	      else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6875 		continue;
6876 	      else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6877 					  mode, valueno))
6878 		continue;
6879 	      value = valtry;
6880 	      where = p;
6881 	      break;
6882 	    }
6883 	}
6884     }
6885 
6886   /* We found a previous insn copying GOAL into a suitable other reg VALUE
6887      (or copying VALUE into GOAL, if GOAL is also a register).
6888      Now verify that VALUE is really valid.  */
6889 
6890   /* VALUENO is the register number of VALUE; a hard register.  */
6891 
6892   /* Don't try to re-use something that is killed in this insn.  We want
6893      to be able to trust REG_UNUSED notes.  */
6894   if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6895     return 0;
6896 
6897   /* If we propose to get the value from the stack pointer or if GOAL is
6898      a MEM based on the stack pointer, we need a stable SP.  */
6899   if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6900       || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6901 							  goal)))
6902     need_stable_sp = 1;
6903 
6904   /* Reject VALUE if the copy-insn moved the wrong sort of datum.  */
6905   if (GET_MODE (value) != mode)
6906     return 0;
6907 
6908   /* Reject VALUE if it was loaded from GOAL
6909      and is also a register that appears in the address of GOAL.  */
6910 
6911   if (goal_mem && value == SET_DEST (single_set (where))
6912       && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6913 				       goal, (rtx*) 0))
6914     return 0;
6915 
6916   /* Reject registers that overlap GOAL.  */
6917 
6918   if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6919     nregs = hard_regno_nregs[regno][mode];
6920   else
6921     nregs = 1;
6922   valuenregs = hard_regno_nregs[valueno][mode];
6923 
6924   if (!goal_mem && !goal_const
6925       && regno + nregs > valueno && regno < valueno + valuenregs)
6926     return 0;
6927 
6928   /* Reject VALUE if it is one of the regs reserved for reloads.
6929      Reload1 knows how to reuse them anyway, and it would get
6930      confused if we allocated one without its knowledge.
6931      (Now that insns introduced by reload are ignored above,
6932      this case shouldn't happen, but I'm not positive.)  */
6933 
6934   if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6935     {
6936       int i;
6937       for (i = 0; i < valuenregs; ++i)
6938 	if (reload_reg_p[valueno + i] >= 0)
6939 	  return 0;
6940     }
6941 
6942   /* Reject VALUE if it is a register being used for an input reload
6943      even if it is not one of those reserved.  */
6944 
6945   if (reload_reg_p != 0)
6946     {
6947       int i;
6948       for (i = 0; i < n_reloads; i++)
6949 	if (rld[i].reg_rtx != 0 && rld[i].in)
6950 	  {
6951 	    int regno1 = REGNO (rld[i].reg_rtx);
6952 	    int nregs1 = hard_regno_nregs[regno1]
6953 					 [GET_MODE (rld[i].reg_rtx)];
6954 	    if (regno1 < valueno + valuenregs
6955 		&& regno1 + nregs1 > valueno)
6956 	      return 0;
6957 	  }
6958     }
6959 
6960   if (goal_mem)
6961     /* We must treat frame pointer as varying here,
6962        since it can vary--in a nonlocal goto as generated by expand_goto.  */
6963     goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6964 
6965   /* Now verify that the values of GOAL and VALUE remain unaltered
6966      until INSN is reached.  */
6967 
6968   p = insn;
6969   while (1)
6970     {
6971       p = PREV_INSN (p);
6972       if (p == where)
6973 	return value;
6974 
6975       /* Don't trust the conversion past a function call
6976 	 if either of the two is in a call-clobbered register, or memory.  */
6977       if (CALL_P (p))
6978 	{
6979 	  int i;
6980 
6981 	  if (goal_mem || need_stable_sp)
6982 	    return 0;
6983 
6984 	  if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6985 	    for (i = 0; i < nregs; ++i)
6986 	      if (call_used_regs[regno + i]
6987 		  || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6988 		return 0;
6989 
6990 	  if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6991 	    for (i = 0; i < valuenregs; ++i)
6992 	      if (call_used_regs[valueno + i]
6993 		  || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6994 		return 0;
6995 	}
6996 
6997       if (INSN_P (p))
6998 	{
6999 	  pat = PATTERN (p);
7000 
7001 	  /* Watch out for unspec_volatile, and volatile asms.  */
7002 	  if (volatile_insn_p (pat))
7003 	    return 0;
7004 
7005 	  /* If this insn P stores in either GOAL or VALUE, return 0.
7006 	     If GOAL is a memory ref and this insn writes memory, return 0.
7007 	     If GOAL is a memory ref and its address is not constant,
7008 	     and this insn P changes a register used in GOAL, return 0.  */
7009 
7010 	  if (GET_CODE (pat) == COND_EXEC)
7011 	    pat = COND_EXEC_CODE (pat);
7012 	  if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
7013 	    {
7014 	      rtx dest = SET_DEST (pat);
7015 	      while (GET_CODE (dest) == SUBREG
7016 		     || GET_CODE (dest) == ZERO_EXTRACT
7017 		     || GET_CODE (dest) == STRICT_LOW_PART)
7018 		dest = XEXP (dest, 0);
7019 	      if (REG_P (dest))
7020 		{
7021 		  int xregno = REGNO (dest);
7022 		  int xnregs;
7023 		  if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7024 		    xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7025 		  else
7026 		    xnregs = 1;
7027 		  if (xregno < regno + nregs && xregno + xnregs > regno)
7028 		    return 0;
7029 		  if (xregno < valueno + valuenregs
7030 		      && xregno + xnregs > valueno)
7031 		    return 0;
7032 		  if (goal_mem_addr_varies
7033 		      && reg_overlap_mentioned_for_reload_p (dest, goal))
7034 		    return 0;
7035 		  if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7036 		    return 0;
7037 		}
7038 	      else if (goal_mem && MEM_P (dest)
7039 		       && ! push_operand (dest, GET_MODE (dest)))
7040 		return 0;
7041 	      else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7042 		       && reg_equiv_memory_loc (regno) != 0)
7043 		return 0;
7044 	      else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
7045 		return 0;
7046 	    }
7047 	  else if (GET_CODE (pat) == PARALLEL)
7048 	    {
7049 	      int i;
7050 	      for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7051 		{
7052 		  rtx v1 = XVECEXP (pat, 0, i);
7053 		  if (GET_CODE (v1) == COND_EXEC)
7054 		    v1 = COND_EXEC_CODE (v1);
7055 		  if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7056 		    {
7057 		      rtx dest = SET_DEST (v1);
7058 		      while (GET_CODE (dest) == SUBREG
7059 			     || GET_CODE (dest) == ZERO_EXTRACT
7060 			     || GET_CODE (dest) == STRICT_LOW_PART)
7061 			dest = XEXP (dest, 0);
7062 		      if (REG_P (dest))
7063 			{
7064 			  int xregno = REGNO (dest);
7065 			  int xnregs;
7066 			  if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7067 			    xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7068 			  else
7069 			    xnregs = 1;
7070 			  if (xregno < regno + nregs
7071 			      && xregno + xnregs > regno)
7072 			    return 0;
7073 			  if (xregno < valueno + valuenregs
7074 			      && xregno + xnregs > valueno)
7075 			    return 0;
7076 			  if (goal_mem_addr_varies
7077 			      && reg_overlap_mentioned_for_reload_p (dest,
7078 								     goal))
7079 			    return 0;
7080 			  if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7081 			    return 0;
7082 			}
7083 		      else if (goal_mem && MEM_P (dest)
7084 			       && ! push_operand (dest, GET_MODE (dest)))
7085 			return 0;
7086 		      else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7087 			       && reg_equiv_memory_loc (regno) != 0)
7088 			return 0;
7089 		      else if (need_stable_sp
7090 			       && push_operand (dest, GET_MODE (dest)))
7091 			return 0;
7092 		    }
7093 		}
7094 	    }
7095 
7096 	  if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7097 	    {
7098 	      rtx link;
7099 
7100 	      for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7101 		   link = XEXP (link, 1))
7102 		{
7103 		  pat = XEXP (link, 0);
7104 		  if (GET_CODE (pat) == CLOBBER)
7105 		    {
7106 		      rtx dest = SET_DEST (pat);
7107 
7108 		      if (REG_P (dest))
7109 			{
7110 			  int xregno = REGNO (dest);
7111 			  int xnregs
7112 			    = hard_regno_nregs[xregno][GET_MODE (dest)];
7113 
7114 			  if (xregno < regno + nregs
7115 			      && xregno + xnregs > regno)
7116 			    return 0;
7117 			  else if (xregno < valueno + valuenregs
7118 				   && xregno + xnregs > valueno)
7119 			    return 0;
7120 			  else if (goal_mem_addr_varies
7121 				   && reg_overlap_mentioned_for_reload_p (dest,
7122 								     goal))
7123 			    return 0;
7124 			}
7125 
7126 		      else if (goal_mem && MEM_P (dest)
7127 			       && ! push_operand (dest, GET_MODE (dest)))
7128 			return 0;
7129 		      else if (need_stable_sp
7130 			       && push_operand (dest, GET_MODE (dest)))
7131 			return 0;
7132 		    }
7133 		}
7134 	    }
7135 
7136 #ifdef AUTO_INC_DEC
7137 	  /* If this insn auto-increments or auto-decrements
7138 	     either regno or valueno, return 0 now.
7139 	     If GOAL is a memory ref and its address is not constant,
7140 	     and this insn P increments a register used in GOAL, return 0.  */
7141 	  {
7142 	    rtx link;
7143 
7144 	    for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7145 	      if (REG_NOTE_KIND (link) == REG_INC
7146 		  && REG_P (XEXP (link, 0)))
7147 		{
7148 		  int incno = REGNO (XEXP (link, 0));
7149 		  if (incno < regno + nregs && incno >= regno)
7150 		    return 0;
7151 		  if (incno < valueno + valuenregs && incno >= valueno)
7152 		    return 0;
7153 		  if (goal_mem_addr_varies
7154 		      && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7155 							     goal))
7156 		    return 0;
7157 		}
7158 	  }
7159 #endif
7160 	}
7161     }
7162 }
7163 
7164 /* Find a place where INCED appears in an increment or decrement operator
7165    within X, and return the amount INCED is incremented or decremented by.
7166    The value is always positive.  */
7167 
7168 static int
7169 find_inc_amount (rtx x, rtx inced)
7170 {
7171   enum rtx_code code = GET_CODE (x);
7172   const char *fmt;
7173   int i;
7174 
7175   if (code == MEM)
7176     {
7177       rtx addr = XEXP (x, 0);
7178       if ((GET_CODE (addr) == PRE_DEC
7179 	   || GET_CODE (addr) == POST_DEC
7180 	   || GET_CODE (addr) == PRE_INC
7181 	   || GET_CODE (addr) == POST_INC)
7182 	  && XEXP (addr, 0) == inced)
7183 	return GET_MODE_SIZE (GET_MODE (x));
7184       else if ((GET_CODE (addr) == PRE_MODIFY
7185 		|| GET_CODE (addr) == POST_MODIFY)
7186 	       && GET_CODE (XEXP (addr, 1)) == PLUS
7187 	       && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7188 	       && XEXP (addr, 0) == inced
7189 	       && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7190 	{
7191 	  i = INTVAL (XEXP (XEXP (addr, 1), 1));
7192 	  return i < 0 ? -i : i;
7193 	}
7194     }
7195 
7196   fmt = GET_RTX_FORMAT (code);
7197   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7198     {
7199       if (fmt[i] == 'e')
7200 	{
7201 	  int tem = find_inc_amount (XEXP (x, i), inced);
7202 	  if (tem != 0)
7203 	    return tem;
7204 	}
7205       if (fmt[i] == 'E')
7206 	{
7207 	  int j;
7208 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7209 	    {
7210 	      int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7211 	      if (tem != 0)
7212 		return tem;
7213 	    }
7214 	}
7215     }
7216 
7217   return 0;
7218 }
7219 
7220 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7221    REG_INC note in insn INSN.  REGNO must refer to a hard register.  */
7222 
7223 #ifdef AUTO_INC_DEC
7224 static int
7225 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7226 			   rtx insn)
7227 {
7228   rtx link;
7229 
7230   gcc_assert (insn);
7231 
7232   if (! INSN_P (insn))
7233     return 0;
7234 
7235   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7236     if (REG_NOTE_KIND (link) == REG_INC)
7237       {
7238 	unsigned int test = (int) REGNO (XEXP (link, 0));
7239 	if (test >= regno && test < endregno)
7240 	  return 1;
7241       }
7242   return 0;
7243 }
7244 #else
7245 
7246 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7247 
7248 #endif
7249 
7250 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7251    If SETS is 1, also consider SETs.  If SETS is 2, enable checking
7252    REG_INC.  REGNO must refer to a hard register.  */
7253 
7254 int
7255 regno_clobbered_p (unsigned int regno, rtx_insn *insn, machine_mode mode,
7256 		   int sets)
7257 {
7258   unsigned int nregs, endregno;
7259 
7260   /* regno must be a hard register.  */
7261   gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7262 
7263   nregs = hard_regno_nregs[regno][mode];
7264   endregno = regno + nregs;
7265 
7266   if ((GET_CODE (PATTERN (insn)) == CLOBBER
7267        || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7268       && REG_P (XEXP (PATTERN (insn), 0)))
7269     {
7270       unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7271 
7272       return test >= regno && test < endregno;
7273     }
7274 
7275   if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7276     return 1;
7277 
7278   if (GET_CODE (PATTERN (insn)) == PARALLEL)
7279     {
7280       int i = XVECLEN (PATTERN (insn), 0) - 1;
7281 
7282       for (; i >= 0; i--)
7283 	{
7284 	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
7285 	  if ((GET_CODE (elt) == CLOBBER
7286 	       || (sets == 1 && GET_CODE (elt) == SET))
7287 	      && REG_P (XEXP (elt, 0)))
7288 	    {
7289 	      unsigned int test = REGNO (XEXP (elt, 0));
7290 
7291 	      if (test >= regno && test < endregno)
7292 		return 1;
7293 	    }
7294 	  if (sets == 2
7295 	      && reg_inc_found_and_valid_p (regno, endregno, elt))
7296 	    return 1;
7297 	}
7298     }
7299 
7300   return 0;
7301 }
7302 
7303 /* Find the low part, with mode MODE, of a hard regno RELOADREG.  */
7304 rtx
7305 reload_adjust_reg_for_mode (rtx reloadreg, machine_mode mode)
7306 {
7307   int regno;
7308 
7309   if (GET_MODE (reloadreg) == mode)
7310     return reloadreg;
7311 
7312   regno = REGNO (reloadreg);
7313 
7314   if (REG_WORDS_BIG_ENDIAN)
7315     regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7316       - (int) hard_regno_nregs[regno][mode];
7317 
7318   return gen_rtx_REG (mode, regno);
7319 }
7320 
7321 static const char *const reload_when_needed_name[] =
7322 {
7323   "RELOAD_FOR_INPUT",
7324   "RELOAD_FOR_OUTPUT",
7325   "RELOAD_FOR_INSN",
7326   "RELOAD_FOR_INPUT_ADDRESS",
7327   "RELOAD_FOR_INPADDR_ADDRESS",
7328   "RELOAD_FOR_OUTPUT_ADDRESS",
7329   "RELOAD_FOR_OUTADDR_ADDRESS",
7330   "RELOAD_FOR_OPERAND_ADDRESS",
7331   "RELOAD_FOR_OPADDR_ADDR",
7332   "RELOAD_OTHER",
7333   "RELOAD_FOR_OTHER_ADDRESS"
7334 };
7335 
7336 /* These functions are used to print the variables set by 'find_reloads' */
7337 
7338 DEBUG_FUNCTION void
7339 debug_reload_to_stream (FILE *f)
7340 {
7341   int r;
7342   const char *prefix;
7343 
7344   if (! f)
7345     f = stderr;
7346   for (r = 0; r < n_reloads; r++)
7347     {
7348       fprintf (f, "Reload %d: ", r);
7349 
7350       if (rld[r].in != 0)
7351 	{
7352 	  fprintf (f, "reload_in (%s) = ",
7353 		   GET_MODE_NAME (rld[r].inmode));
7354 	  print_inline_rtx (f, rld[r].in, 24);
7355 	  fprintf (f, "\n\t");
7356 	}
7357 
7358       if (rld[r].out != 0)
7359 	{
7360 	  fprintf (f, "reload_out (%s) = ",
7361 		   GET_MODE_NAME (rld[r].outmode));
7362 	  print_inline_rtx (f, rld[r].out, 24);
7363 	  fprintf (f, "\n\t");
7364 	}
7365 
7366       fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7367 
7368       fprintf (f, "%s (opnum = %d)",
7369 	       reload_when_needed_name[(int) rld[r].when_needed],
7370 	       rld[r].opnum);
7371 
7372       if (rld[r].optional)
7373 	fprintf (f, ", optional");
7374 
7375       if (rld[r].nongroup)
7376 	fprintf (f, ", nongroup");
7377 
7378       if (rld[r].inc != 0)
7379 	fprintf (f, ", inc by %d", rld[r].inc);
7380 
7381       if (rld[r].nocombine)
7382 	fprintf (f, ", can't combine");
7383 
7384       if (rld[r].secondary_p)
7385 	fprintf (f, ", secondary_reload_p");
7386 
7387       if (rld[r].in_reg != 0)
7388 	{
7389 	  fprintf (f, "\n\treload_in_reg: ");
7390 	  print_inline_rtx (f, rld[r].in_reg, 24);
7391 	}
7392 
7393       if (rld[r].out_reg != 0)
7394 	{
7395 	  fprintf (f, "\n\treload_out_reg: ");
7396 	  print_inline_rtx (f, rld[r].out_reg, 24);
7397 	}
7398 
7399       if (rld[r].reg_rtx != 0)
7400 	{
7401 	  fprintf (f, "\n\treload_reg_rtx: ");
7402 	  print_inline_rtx (f, rld[r].reg_rtx, 24);
7403 	}
7404 
7405       prefix = "\n\t";
7406       if (rld[r].secondary_in_reload != -1)
7407 	{
7408 	  fprintf (f, "%ssecondary_in_reload = %d",
7409 		   prefix, rld[r].secondary_in_reload);
7410 	  prefix = ", ";
7411 	}
7412 
7413       if (rld[r].secondary_out_reload != -1)
7414 	fprintf (f, "%ssecondary_out_reload = %d\n",
7415 		 prefix, rld[r].secondary_out_reload);
7416 
7417       prefix = "\n\t";
7418       if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7419 	{
7420 	  fprintf (f, "%ssecondary_in_icode = %s", prefix,
7421 		   insn_data[rld[r].secondary_in_icode].name);
7422 	  prefix = ", ";
7423 	}
7424 
7425       if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7426 	fprintf (f, "%ssecondary_out_icode = %s", prefix,
7427 		 insn_data[rld[r].secondary_out_icode].name);
7428 
7429       fprintf (f, "\n");
7430     }
7431 }
7432 
7433 DEBUG_FUNCTION void
7434 debug_reload (void)
7435 {
7436   debug_reload_to_stream (stderr);
7437 }
7438