xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/reload.c (revision cef8759bd76c1b621f8eab8faa6f208faabc2e15)
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2    Copyright (C) 1987-2017 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains subroutines used only from the file reload1.c.
21    It knows how to scan one insn for operands and values
22    that need to be copied into registers to make valid code.
23    It also finds other operands and values which are valid
24    but for which equivalent values in registers exist and
25    ought to be used instead.
26 
27    Before processing the first insn of the function, call `init_reload'.
28    init_reload actually has to be called earlier anyway.
29 
30    To scan an insn, call `find_reloads'.  This does two things:
31    1. sets up tables describing which values must be reloaded
32    for this insn, and what kind of hard regs they must be reloaded into;
33    2. optionally record the locations where those values appear in
34    the data, so they can be replaced properly later.
35    This is done only if the second arg to `find_reloads' is nonzero.
36 
37    The third arg to `find_reloads' specifies the number of levels
38    of indirect addressing supported by the machine.  If it is zero,
39    indirect addressing is not valid.  If it is one, (MEM (REG n))
40    is valid even if (REG n) did not get a hard register; if it is two,
41    (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42    hard register, and similarly for higher values.
43 
44    Then you must choose the hard regs to reload those pseudo regs into,
45    and generate appropriate load insns before this insn and perhaps
46    also store insns after this insn.  Set up the array `reload_reg_rtx'
47    to contain the REG rtx's for the registers you used.  In some
48    cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49    for certain reloads.  Then that tells you which register to use,
50    so you do not need to allocate one.  But you still do need to add extra
51    instructions to copy the value into and out of that register.
52 
53    Finally you must call `subst_reloads' to substitute the reload reg rtx's
54    into the locations already recorded.
55 
56 NOTE SIDE EFFECTS:
57 
58    find_reloads can alter the operands of the instruction it is called on.
59 
60    1. Two operands of any sort may be interchanged, if they are in a
61    commutative instruction.
62    This happens only if find_reloads thinks the instruction will compile
63    better that way.
64 
65    2. Pseudo-registers that are equivalent to constants are replaced
66    with those constants if they are not in hard registers.
67 
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
71 
72 Using a reload register for several reloads in one insn:
73 
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
77 
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
81 
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload.  */
85 
86 #define REG_OK_STRICT
87 
88 /* We do not enable this with CHECKING_P, since it is awfully slow.  */
89 #undef DEBUG_RELOAD
90 
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "backend.h"
95 #include "target.h"
96 #include "rtl.h"
97 #include "tree.h"
98 #include "df.h"
99 #include "memmodel.h"
100 #include "tm_p.h"
101 #include "optabs.h"
102 #include "regs.h"
103 #include "ira.h"
104 #include "recog.h"
105 #include "rtl-error.h"
106 #include "reload.h"
107 #include "addresses.h"
108 #include "params.h"
109 
110 /* True if X is a constant that can be forced into the constant pool.
111    MODE is the mode of the operand, or VOIDmode if not known.  */
112 #define CONST_POOL_OK_P(MODE, X)		\
113   ((MODE) != VOIDmode				\
114    && CONSTANT_P (X)				\
115    && GET_CODE (X) != HIGH			\
116    && !targetm.cannot_force_const_mem (MODE, X))
117 
118 /* True if C is a non-empty register class that has too few registers
119    to be safely used as a reload target class.  */
120 
121 static inline bool
122 small_register_class_p (reg_class_t rclass)
123 {
124   return (reg_class_size [(int) rclass] == 1
125 	  || (reg_class_size [(int) rclass] >= 1
126 	      && targetm.class_likely_spilled_p (rclass)));
127 }
128 
129 
130 /* All reloads of the current insn are recorded here.  See reload.h for
131    comments.  */
132 int n_reloads;
133 struct reload rld[MAX_RELOADS];
134 
135 /* All the "earlyclobber" operands of the current insn
136    are recorded here.  */
137 int n_earlyclobbers;
138 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
139 
140 int reload_n_operands;
141 
142 /* Replacing reloads.
143 
144    If `replace_reloads' is nonzero, then as each reload is recorded
145    an entry is made for it in the table `replacements'.
146    Then later `subst_reloads' can look through that table and
147    perform all the replacements needed.  */
148 
149 /* Nonzero means record the places to replace.  */
150 static int replace_reloads;
151 
152 /* Each replacement is recorded with a structure like this.  */
153 struct replacement
154 {
155   rtx *where;			/* Location to store in */
156   int what;			/* which reload this is for */
157   machine_mode mode;	/* mode it must have */
158 };
159 
160 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
161 
162 /* Number of replacements currently recorded.  */
163 static int n_replacements;
164 
165 /* Used to track what is modified by an operand.  */
166 struct decomposition
167 {
168   int reg_flag;		/* Nonzero if referencing a register.  */
169   int safe;		/* Nonzero if this can't conflict with anything.  */
170   rtx base;		/* Base address for MEM.  */
171   HOST_WIDE_INT start;	/* Starting offset or register number.  */
172   HOST_WIDE_INT end;	/* Ending offset or register number.  */
173 };
174 
175 #ifdef SECONDARY_MEMORY_NEEDED
176 
177 /* Save MEMs needed to copy from one class of registers to another.  One MEM
178    is used per mode, but normally only one or two modes are ever used.
179 
180    We keep two versions, before and after register elimination.  The one
181    after register elimination is record separately for each operand.  This
182    is done in case the address is not valid to be sure that we separately
183    reload each.  */
184 
185 static rtx secondary_memlocs[NUM_MACHINE_MODES];
186 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
187 static int secondary_memlocs_elim_used = 0;
188 #endif
189 
190 /* The instruction we are doing reloads for;
191    so we can test whether a register dies in it.  */
192 static rtx_insn *this_insn;
193 
194 /* Nonzero if this instruction is a user-specified asm with operands.  */
195 static int this_insn_is_asm;
196 
197 /* If hard_regs_live_known is nonzero,
198    we can tell which hard regs are currently live,
199    at least enough to succeed in choosing dummy reloads.  */
200 static int hard_regs_live_known;
201 
202 /* Indexed by hard reg number,
203    element is nonnegative if hard reg has been spilled.
204    This vector is passed to `find_reloads' as an argument
205    and is not changed here.  */
206 static short *static_reload_reg_p;
207 
208 /* Set to 1 in subst_reg_equivs if it changes anything.  */
209 static int subst_reg_equivs_changed;
210 
211 /* On return from push_reload, holds the reload-number for the OUT
212    operand, which can be different for that from the input operand.  */
213 static int output_reloadnum;
214 
215   /* Compare two RTX's.  */
216 #define MATCHES(x, y) \
217  (x == y || (x != 0 && (REG_P (x)				\
218 			? REG_P (y) && REGNO (x) == REGNO (y)	\
219 			: rtx_equal_p (x, y) && ! side_effects_p (x))))
220 
221   /* Indicates if two reloads purposes are for similar enough things that we
222      can merge their reloads.  */
223 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
224   ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER	\
225    || ((when1) == (when2) && (op1) == (op2))		\
226    || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
227    || ((when1) == RELOAD_FOR_OPERAND_ADDRESS		\
228        && (when2) == RELOAD_FOR_OPERAND_ADDRESS)	\
229    || ((when1) == RELOAD_FOR_OTHER_ADDRESS		\
230        && (when2) == RELOAD_FOR_OTHER_ADDRESS))
231 
232   /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged.  */
233 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
234   ((when1) != (when2)					\
235    || ! ((op1) == (op2)					\
236 	 || (when1) == RELOAD_FOR_INPUT			\
237 	 || (when1) == RELOAD_FOR_OPERAND_ADDRESS	\
238 	 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
239 
240   /* If we are going to reload an address, compute the reload type to
241      use.  */
242 #define ADDR_TYPE(type)					\
243   ((type) == RELOAD_FOR_INPUT_ADDRESS			\
244    ? RELOAD_FOR_INPADDR_ADDRESS				\
245    : ((type) == RELOAD_FOR_OUTPUT_ADDRESS		\
246       ? RELOAD_FOR_OUTADDR_ADDRESS			\
247       : (type)))
248 
249 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
250 				  machine_mode, enum reload_type,
251 				  enum insn_code *, secondary_reload_info *);
252 static enum reg_class find_valid_class (machine_mode, machine_mode,
253 					int, unsigned int);
254 static void push_replacement (rtx *, int, machine_mode);
255 static void dup_replacements (rtx *, rtx *);
256 static void combine_reloads (void);
257 static int find_reusable_reload (rtx *, rtx, enum reg_class,
258 				 enum reload_type, int, int);
259 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, machine_mode,
260 			      machine_mode, reg_class_t, int, int);
261 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
262 static struct decomposition decompose (rtx);
263 static int immune_p (rtx, rtx, struct decomposition);
264 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
265 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
266 				rtx_insn *, int *);
267 static rtx make_memloc (rtx, int);
268 static int maybe_memory_address_addr_space_p (machine_mode, rtx,
269 					      addr_space_t, rtx *);
270 static int find_reloads_address (machine_mode, rtx *, rtx, rtx *,
271 				 int, enum reload_type, int, rtx_insn *);
272 static rtx subst_reg_equivs (rtx, rtx_insn *);
273 static rtx subst_indexed_address (rtx);
274 static void update_auto_inc_notes (rtx_insn *, int, int);
275 static int find_reloads_address_1 (machine_mode, addr_space_t, rtx, int,
276 				   enum rtx_code, enum rtx_code, rtx *,
277 				   int, enum reload_type,int, rtx_insn *);
278 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
279 				       machine_mode, int,
280 				       enum reload_type, int);
281 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
282 					int, rtx_insn *, int *);
283 static void copy_replacements_1 (rtx *, rtx *, int);
284 static int find_inc_amount (rtx, rtx);
285 static int refers_to_mem_for_reload_p (rtx);
286 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
287 					 rtx, rtx *);
288 
289 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
290    list yet.  */
291 
292 static void
293 push_reg_equiv_alt_mem (int regno, rtx mem)
294 {
295   rtx it;
296 
297   for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
298     if (rtx_equal_p (XEXP (it, 0), mem))
299       return;
300 
301   reg_equiv_alt_mem_list (regno)
302     = alloc_EXPR_LIST (REG_EQUIV, mem,
303 		       reg_equiv_alt_mem_list (regno));
304 }
305 
306 /* Determine if any secondary reloads are needed for loading (if IN_P is
307    nonzero) or storing (if IN_P is zero) X to or from a reload register of
308    register class RELOAD_CLASS in mode RELOAD_MODE.  If secondary reloads
309    are needed, push them.
310 
311    Return the reload number of the secondary reload we made, or -1 if
312    we didn't need one.  *PICODE is set to the insn_code to use if we do
313    need a secondary reload.  */
314 
315 static int
316 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
317 		       enum reg_class reload_class,
318 		       machine_mode reload_mode, enum reload_type type,
319 		       enum insn_code *picode, secondary_reload_info *prev_sri)
320 {
321   enum reg_class rclass = NO_REGS;
322   enum reg_class scratch_class;
323   machine_mode mode = reload_mode;
324   enum insn_code icode = CODE_FOR_nothing;
325   enum insn_code t_icode = CODE_FOR_nothing;
326   enum reload_type secondary_type;
327   int s_reload, t_reload = -1;
328   const char *scratch_constraint;
329   secondary_reload_info sri;
330 
331   if (type == RELOAD_FOR_INPUT_ADDRESS
332       || type == RELOAD_FOR_OUTPUT_ADDRESS
333       || type == RELOAD_FOR_INPADDR_ADDRESS
334       || type == RELOAD_FOR_OUTADDR_ADDRESS)
335     secondary_type = type;
336   else
337     secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
338 
339   *picode = CODE_FOR_nothing;
340 
341   /* If X is a paradoxical SUBREG, use the inner value to determine both the
342      mode and object being reloaded.  */
343   if (paradoxical_subreg_p (x))
344     {
345       x = SUBREG_REG (x);
346       reload_mode = GET_MODE (x);
347     }
348 
349   /* If X is a pseudo-register that has an equivalent MEM (actually, if it
350      is still a pseudo-register by now, it *must* have an equivalent MEM
351      but we don't want to assume that), use that equivalent when seeing if
352      a secondary reload is needed since whether or not a reload is needed
353      might be sensitive to the form of the MEM.  */
354 
355   if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
356       && reg_equiv_mem (REGNO (x)))
357     x = reg_equiv_mem (REGNO (x));
358 
359   sri.icode = CODE_FOR_nothing;
360   sri.prev_sri = prev_sri;
361   rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
362 						      reload_mode, &sri);
363   icode = (enum insn_code) sri.icode;
364 
365   /* If we don't need any secondary registers, done.  */
366   if (rclass == NO_REGS && icode == CODE_FOR_nothing)
367     return -1;
368 
369   if (rclass != NO_REGS)
370     t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
371 				      reload_mode, type, &t_icode, &sri);
372 
373   /* If we will be using an insn, the secondary reload is for a
374      scratch register.  */
375 
376   if (icode != CODE_FOR_nothing)
377     {
378       /* If IN_P is nonzero, the reload register will be the output in
379 	 operand 0.  If IN_P is zero, the reload register will be the input
380 	 in operand 1.  Outputs should have an initial "=", which we must
381 	 skip.  */
382 
383       /* ??? It would be useful to be able to handle only two, or more than
384 	 three, operands, but for now we can only handle the case of having
385 	 exactly three: output, input and one temp/scratch.  */
386       gcc_assert (insn_data[(int) icode].n_operands == 3);
387 
388       /* ??? We currently have no way to represent a reload that needs
389 	 an icode to reload from an intermediate tertiary reload register.
390 	 We should probably have a new field in struct reload to tag a
391 	 chain of scratch operand reloads onto.   */
392       gcc_assert (rclass == NO_REGS);
393 
394       scratch_constraint = insn_data[(int) icode].operand[2].constraint;
395       gcc_assert (*scratch_constraint == '=');
396       scratch_constraint++;
397       if (*scratch_constraint == '&')
398 	scratch_constraint++;
399       scratch_class = (reg_class_for_constraint
400 		       (lookup_constraint (scratch_constraint)));
401 
402       rclass = scratch_class;
403       mode = insn_data[(int) icode].operand[2].mode;
404     }
405 
406   /* This case isn't valid, so fail.  Reload is allowed to use the same
407      register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
408      in the case of a secondary register, we actually need two different
409      registers for correct code.  We fail here to prevent the possibility of
410      silently generating incorrect code later.
411 
412      The convention is that secondary input reloads are valid only if the
413      secondary_class is different from class.  If you have such a case, you
414      can not use secondary reloads, you must work around the problem some
415      other way.
416 
417      Allow this when a reload_in/out pattern is being used.  I.e. assume
418      that the generated code handles this case.  */
419 
420   gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
421 	      || t_icode != CODE_FOR_nothing);
422 
423   /* See if we can reuse an existing secondary reload.  */
424   for (s_reload = 0; s_reload < n_reloads; s_reload++)
425     if (rld[s_reload].secondary_p
426 	&& (reg_class_subset_p (rclass, rld[s_reload].rclass)
427 	    || reg_class_subset_p (rld[s_reload].rclass, rclass))
428 	&& ((in_p && rld[s_reload].inmode == mode)
429 	    || (! in_p && rld[s_reload].outmode == mode))
430 	&& ((in_p && rld[s_reload].secondary_in_reload == t_reload)
431 	    || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
432 	&& ((in_p && rld[s_reload].secondary_in_icode == t_icode)
433 	    || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
434 	&& (small_register_class_p (rclass)
435 	    || targetm.small_register_classes_for_mode_p (VOIDmode))
436 	&& MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
437 			     opnum, rld[s_reload].opnum))
438       {
439 	if (in_p)
440 	  rld[s_reload].inmode = mode;
441 	if (! in_p)
442 	  rld[s_reload].outmode = mode;
443 
444 	if (reg_class_subset_p (rclass, rld[s_reload].rclass))
445 	  rld[s_reload].rclass = rclass;
446 
447 	rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
448 	rld[s_reload].optional &= optional;
449 	rld[s_reload].secondary_p = 1;
450 	if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
451 			    opnum, rld[s_reload].opnum))
452 	  rld[s_reload].when_needed = RELOAD_OTHER;
453 
454 	break;
455       }
456 
457   if (s_reload == n_reloads)
458     {
459 #ifdef SECONDARY_MEMORY_NEEDED
460       /* If we need a memory location to copy between the two reload regs,
461 	 set it up now.  Note that we do the input case before making
462 	 the reload and the output case after.  This is due to the
463 	 way reloads are output.  */
464 
465       if (in_p && icode == CODE_FOR_nothing
466 	  && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
467 	{
468 	  get_secondary_mem (x, reload_mode, opnum, type);
469 
470 	  /* We may have just added new reloads.  Make sure we add
471 	     the new reload at the end.  */
472 	  s_reload = n_reloads;
473 	}
474 #endif
475 
476       /* We need to make a new secondary reload for this register class.  */
477       rld[s_reload].in = rld[s_reload].out = 0;
478       rld[s_reload].rclass = rclass;
479 
480       rld[s_reload].inmode = in_p ? mode : VOIDmode;
481       rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
482       rld[s_reload].reg_rtx = 0;
483       rld[s_reload].optional = optional;
484       rld[s_reload].inc = 0;
485       /* Maybe we could combine these, but it seems too tricky.  */
486       rld[s_reload].nocombine = 1;
487       rld[s_reload].in_reg = 0;
488       rld[s_reload].out_reg = 0;
489       rld[s_reload].opnum = opnum;
490       rld[s_reload].when_needed = secondary_type;
491       rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
492       rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
493       rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
494       rld[s_reload].secondary_out_icode
495 	= ! in_p ? t_icode : CODE_FOR_nothing;
496       rld[s_reload].secondary_p = 1;
497 
498       n_reloads++;
499 
500 #ifdef SECONDARY_MEMORY_NEEDED
501       if (! in_p && icode == CODE_FOR_nothing
502 	  && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
503 	get_secondary_mem (x, mode, opnum, type);
504 #endif
505     }
506 
507   *picode = icode;
508   return s_reload;
509 }
510 
511 /* If a secondary reload is needed, return its class.  If both an intermediate
512    register and a scratch register is needed, we return the class of the
513    intermediate register.  */
514 reg_class_t
515 secondary_reload_class (bool in_p, reg_class_t rclass, machine_mode mode,
516 			rtx x)
517 {
518   enum insn_code icode;
519   secondary_reload_info sri;
520 
521   sri.icode = CODE_FOR_nothing;
522   sri.prev_sri = NULL;
523   rclass
524     = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
525   icode = (enum insn_code) sri.icode;
526 
527   /* If there are no secondary reloads at all, we return NO_REGS.
528      If an intermediate register is needed, we return its class.  */
529   if (icode == CODE_FOR_nothing || rclass != NO_REGS)
530     return rclass;
531 
532   /* No intermediate register is needed, but we have a special reload
533      pattern, which we assume for now needs a scratch register.  */
534   return scratch_reload_class (icode);
535 }
536 
537 /* ICODE is the insn_code of a reload pattern.  Check that it has exactly
538    three operands, verify that operand 2 is an output operand, and return
539    its register class.
540    ??? We'd like to be able to handle any pattern with at least 2 operands,
541    for zero or more scratch registers, but that needs more infrastructure.  */
542 enum reg_class
543 scratch_reload_class (enum insn_code icode)
544 {
545   const char *scratch_constraint;
546   enum reg_class rclass;
547 
548   gcc_assert (insn_data[(int) icode].n_operands == 3);
549   scratch_constraint = insn_data[(int) icode].operand[2].constraint;
550   gcc_assert (*scratch_constraint == '=');
551   scratch_constraint++;
552   if (*scratch_constraint == '&')
553     scratch_constraint++;
554   rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
555   gcc_assert (rclass != NO_REGS);
556   return rclass;
557 }
558 
559 #ifdef SECONDARY_MEMORY_NEEDED
560 
561 /* Return a memory location that will be used to copy X in mode MODE.
562    If we haven't already made a location for this mode in this insn,
563    call find_reloads_address on the location being returned.  */
564 
565 rtx
566 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, machine_mode mode,
567 		   int opnum, enum reload_type type)
568 {
569   rtx loc;
570   int mem_valid;
571 
572   /* By default, if MODE is narrower than a word, widen it to a word.
573      This is required because most machines that require these memory
574      locations do not support short load and stores from all registers
575      (e.g., FP registers).  */
576 
577 #ifdef SECONDARY_MEMORY_NEEDED_MODE
578   mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
579 #else
580   if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
581     mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
582 #endif
583 
584   /* If we already have made a MEM for this operand in MODE, return it.  */
585   if (secondary_memlocs_elim[(int) mode][opnum] != 0)
586     return secondary_memlocs_elim[(int) mode][opnum];
587 
588   /* If this is the first time we've tried to get a MEM for this mode,
589      allocate a new one.  `something_changed' in reload will get set
590      by noticing that the frame size has changed.  */
591 
592   if (secondary_memlocs[(int) mode] == 0)
593     {
594 #ifdef SECONDARY_MEMORY_NEEDED_RTX
595       secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
596 #else
597       secondary_memlocs[(int) mode]
598 	= assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
599 #endif
600     }
601 
602   /* Get a version of the address doing any eliminations needed.  If that
603      didn't give us a new MEM, make a new one if it isn't valid.  */
604 
605   loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
606   mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
607 						  MEM_ADDR_SPACE (loc));
608 
609   if (! mem_valid && loc == secondary_memlocs[(int) mode])
610     loc = copy_rtx (loc);
611 
612   /* The only time the call below will do anything is if the stack
613      offset is too large.  In that case IND_LEVELS doesn't matter, so we
614      can just pass a zero.  Adjust the type to be the address of the
615      corresponding object.  If the address was valid, save the eliminated
616      address.  If it wasn't valid, we need to make a reload each time, so
617      don't save it.  */
618 
619   if (! mem_valid)
620     {
621       type =  (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
622 	       : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
623 	       : RELOAD_OTHER);
624 
625       find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
626 			    opnum, type, 0, 0);
627     }
628 
629   secondary_memlocs_elim[(int) mode][opnum] = loc;
630   if (secondary_memlocs_elim_used <= (int)mode)
631     secondary_memlocs_elim_used = (int)mode + 1;
632   return loc;
633 }
634 
635 /* Clear any secondary memory locations we've made.  */
636 
637 void
638 clear_secondary_mem (void)
639 {
640   memset (secondary_memlocs, 0, sizeof secondary_memlocs);
641 }
642 #endif /* SECONDARY_MEMORY_NEEDED */
643 
644 
645 /* Find the largest class which has at least one register valid in
646    mode INNER, and which for every such register, that register number
647    plus N is also valid in OUTER (if in range) and is cheap to move
648    into REGNO.  Such a class must exist.  */
649 
650 static enum reg_class
651 find_valid_class (machine_mode outer ATTRIBUTE_UNUSED,
652 		  machine_mode inner ATTRIBUTE_UNUSED, int n,
653 		  unsigned int dest_regno ATTRIBUTE_UNUSED)
654 {
655   int best_cost = -1;
656   int rclass;
657   int regno;
658   enum reg_class best_class = NO_REGS;
659   enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
660   unsigned int best_size = 0;
661   int cost;
662 
663   for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
664     {
665       int bad = 0;
666       int good = 0;
667       for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
668 	if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
669 	  {
670 	    if (HARD_REGNO_MODE_OK (regno, inner))
671 	      {
672 		good = 1;
673 		if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
674 		    && ! HARD_REGNO_MODE_OK (regno + n, outer))
675 		  bad = 1;
676 	      }
677 	  }
678 
679       if (bad || !good)
680 	continue;
681       cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
682 
683       if ((reg_class_size[rclass] > best_size
684 	   && (best_cost < 0 || best_cost >= cost))
685 	  || best_cost > cost)
686 	{
687 	  best_class = (enum reg_class) rclass;
688 	  best_size = reg_class_size[rclass];
689 	  best_cost = register_move_cost (outer, (enum reg_class) rclass,
690 					  dest_class);
691 	}
692     }
693 
694   gcc_assert (best_size != 0);
695 
696   return best_class;
697 }
698 
699 /* We are trying to reload a subreg of something that is not a register.
700    Find the largest class which contains only registers valid in
701    mode MODE.  OUTER is the mode of the subreg, DEST_CLASS the class in
702    which we would eventually like to obtain the object.  */
703 
704 static enum reg_class
705 find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED,
706 		    machine_mode mode ATTRIBUTE_UNUSED,
707 		    enum reg_class dest_class ATTRIBUTE_UNUSED)
708 {
709   int best_cost = -1;
710   int rclass;
711   int regno;
712   enum reg_class best_class = NO_REGS;
713   unsigned int best_size = 0;
714   int cost;
715 
716   for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
717     {
718       unsigned int computed_rclass_size = 0;
719 
720       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
721         {
722           if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
723               && (HARD_REGNO_MODE_OK (regno, mode)))
724             computed_rclass_size++;
725         }
726 
727       cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
728 
729       if ((computed_rclass_size > best_size
730 	   && (best_cost < 0 || best_cost >= cost))
731 	  || best_cost > cost)
732 	{
733 	  best_class = (enum reg_class) rclass;
734 	  best_size = computed_rclass_size;
735 	  best_cost = register_move_cost (outer, (enum reg_class) rclass,
736 					  dest_class);
737 	}
738     }
739 
740   gcc_assert (best_size != 0);
741 
742 #ifdef LIMIT_RELOAD_CLASS
743   best_class = LIMIT_RELOAD_CLASS (mode, best_class);
744 #endif
745   return best_class;
746 }
747 
748 /* Return the number of a previously made reload that can be combined with
749    a new one, or n_reloads if none of the existing reloads can be used.
750    OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
751    push_reload, they determine the kind of the new reload that we try to
752    combine.  P_IN points to the corresponding value of IN, which can be
753    modified by this function.
754    DONT_SHARE is nonzero if we can't share any input-only reload for IN.  */
755 
756 static int
757 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
758 		      enum reload_type type, int opnum, int dont_share)
759 {
760   rtx in = *p_in;
761   int i;
762   /* We can't merge two reloads if the output of either one is
763      earlyclobbered.  */
764 
765   if (earlyclobber_operand_p (out))
766     return n_reloads;
767 
768   /* We can use an existing reload if the class is right
769      and at least one of IN and OUT is a match
770      and the other is at worst neutral.
771      (A zero compared against anything is neutral.)
772 
773      For targets with small register classes, don't use existing reloads
774      unless they are for the same thing since that can cause us to need
775      more reload registers than we otherwise would.  */
776 
777   for (i = 0; i < n_reloads; i++)
778     if ((reg_class_subset_p (rclass, rld[i].rclass)
779 	 || reg_class_subset_p (rld[i].rclass, rclass))
780 	/* If the existing reload has a register, it must fit our class.  */
781 	&& (rld[i].reg_rtx == 0
782 	    || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
783 				  true_regnum (rld[i].reg_rtx)))
784 	&& ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
785 	     && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
786 	    || (out != 0 && MATCHES (rld[i].out, out)
787 		&& (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
788 	&& (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
789 	&& (small_register_class_p (rclass)
790 	    || targetm.small_register_classes_for_mode_p (VOIDmode))
791 	&& MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
792       return i;
793 
794   /* Reloading a plain reg for input can match a reload to postincrement
795      that reg, since the postincrement's value is the right value.
796      Likewise, it can match a preincrement reload, since we regard
797      the preincrementation as happening before any ref in this insn
798      to that register.  */
799   for (i = 0; i < n_reloads; i++)
800     if ((reg_class_subset_p (rclass, rld[i].rclass)
801 	 || reg_class_subset_p (rld[i].rclass, rclass))
802 	/* If the existing reload has a register, it must fit our
803 	   class.  */
804 	&& (rld[i].reg_rtx == 0
805 	    || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
806 				  true_regnum (rld[i].reg_rtx)))
807 	&& out == 0 && rld[i].out == 0 && rld[i].in != 0
808 	&& ((REG_P (in)
809 	     && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
810 	     && MATCHES (XEXP (rld[i].in, 0), in))
811 	    || (REG_P (rld[i].in)
812 		&& GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
813 		&& MATCHES (XEXP (in, 0), rld[i].in)))
814 	&& (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
815 	&& (small_register_class_p (rclass)
816 	    || targetm.small_register_classes_for_mode_p (VOIDmode))
817 	&& MERGABLE_RELOADS (type, rld[i].when_needed,
818 			     opnum, rld[i].opnum))
819       {
820 	/* Make sure reload_in ultimately has the increment,
821 	   not the plain register.  */
822 	if (REG_P (in))
823 	  *p_in = rld[i].in;
824 	return i;
825       }
826   return n_reloads;
827 }
828 
829 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
830    expression.  MODE is the mode that X will be used in.  OUTPUT is true if
831    the function is invoked for the output part of an enclosing reload.  */
832 
833 static bool
834 reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool output)
835 {
836   rtx inner;
837   int regno;
838 
839   /* Only SUBREGs are problematical.  */
840   if (GET_CODE (x) != SUBREG)
841     return false;
842 
843   inner = SUBREG_REG (x);
844 
845   /* If INNER is a constant or PLUS, then INNER will need reloading.  */
846   if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
847     return true;
848 
849   /* If INNER is not a register, then INNER will not need reloading.  */
850   if (!REG_P (inner))
851     return false;
852 
853   regno = REGNO (inner);
854 
855   /* If INNER is not a hard register, then INNER will not need reloading
856      unless it's a mode dependent memory reference.  */
857   if (regno >= FIRST_PSEUDO_REGISTER)
858     return !output
859 	   && reg_equiv_mem (regno) != 0
860 	   && mode_dependent_address_p (XEXP (reg_equiv_mem (regno), 0),
861 					MEM_ADDR_SPACE (reg_equiv_mem (regno)));
862 
863   /* If INNER is not ok for MODE, then INNER will need reloading.  */
864   if (!HARD_REGNO_MODE_OK (subreg_regno (x), mode))
865     return true;
866 
867   /* If this is for an output, and the outer part is a word or smaller,
868      INNER is larger than a word and the number of registers in INNER is
869      not the same as the number of words in INNER, then INNER will need
870      reloading (with an in-out reload).  */
871   return (output
872 	  && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
873 	  && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
874 	  && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
875 	      != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
876 }
877 
878 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
879    requiring an extra reload register.  The caller has already found that
880    IN contains some reference to REGNO, so check that we can produce the
881    new value in a single step.  E.g. if we have
882    (set (reg r13) (plus (reg r13) (const int 1))), and there is an
883    instruction that adds one to a register, this should succeed.
884    However, if we have something like
885    (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
886    needs to be loaded into a register first, we need a separate reload
887    register.
888    Such PLUS reloads are generated by find_reload_address_part.
889    The out-of-range PLUS expressions are usually introduced in the instruction
890    patterns by register elimination and substituting pseudos without a home
891    by their function-invariant equivalences.  */
892 static int
893 can_reload_into (rtx in, int regno, machine_mode mode)
894 {
895   rtx dst;
896   rtx_insn *test_insn;
897   int r = 0;
898   struct recog_data_d save_recog_data;
899 
900   /* For matching constraints, we often get notional input reloads where
901      we want to use the original register as the reload register.  I.e.
902      technically this is a non-optional input-output reload, but IN is
903      already a valid register, and has been chosen as the reload register.
904      Speed this up, since it trivially works.  */
905   if (REG_P (in))
906     return 1;
907 
908   /* To test MEMs properly, we'd have to take into account all the reloads
909      that are already scheduled, which can become quite complicated.
910      And since we've already handled address reloads for this MEM, it
911      should always succeed anyway.  */
912   if (MEM_P (in))
913     return 1;
914 
915   /* If we can make a simple SET insn that does the job, everything should
916      be fine.  */
917   dst =  gen_rtx_REG (mode, regno);
918   test_insn = make_insn_raw (gen_rtx_SET (dst, in));
919   save_recog_data = recog_data;
920   if (recog_memoized (test_insn) >= 0)
921     {
922       extract_insn (test_insn);
923       r = constrain_operands (1, get_enabled_alternatives (test_insn));
924     }
925   recog_data = save_recog_data;
926   return r;
927 }
928 
929 /* Record one reload that needs to be performed.
930    IN is an rtx saying where the data are to be found before this instruction.
931    OUT says where they must be stored after the instruction.
932    (IN is zero for data not read, and OUT is zero for data not written.)
933    INLOC and OUTLOC point to the places in the instructions where
934    IN and OUT were found.
935    If IN and OUT are both nonzero, it means the same register must be used
936    to reload both IN and OUT.
937 
938    RCLASS is a register class required for the reloaded data.
939    INMODE is the machine mode that the instruction requires
940    for the reg that replaces IN and OUTMODE is likewise for OUT.
941 
942    If IN is zero, then OUT's location and mode should be passed as
943    INLOC and INMODE.
944 
945    STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
946 
947    OPTIONAL nonzero means this reload does not need to be performed:
948    it can be discarded if that is more convenient.
949 
950    OPNUM and TYPE say what the purpose of this reload is.
951 
952    The return value is the reload-number for this reload.
953 
954    If both IN and OUT are nonzero, in some rare cases we might
955    want to make two separate reloads.  (Actually we never do this now.)
956    Therefore, the reload-number for OUT is stored in
957    output_reloadnum when we return; the return value applies to IN.
958    Usually (presently always), when IN and OUT are nonzero,
959    the two reload-numbers are equal, but the caller should be careful to
960    distinguish them.  */
961 
962 int
963 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
964 	     enum reg_class rclass, machine_mode inmode,
965 	     machine_mode outmode, int strict_low, int optional,
966 	     int opnum, enum reload_type type)
967 {
968   int i;
969   int dont_share = 0;
970   int dont_remove_subreg = 0;
971 #ifdef LIMIT_RELOAD_CLASS
972   rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
973 #endif
974   int secondary_in_reload = -1, secondary_out_reload = -1;
975   enum insn_code secondary_in_icode = CODE_FOR_nothing;
976   enum insn_code secondary_out_icode = CODE_FOR_nothing;
977   enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
978   subreg_in_class = NO_REGS;
979 
980   /* INMODE and/or OUTMODE could be VOIDmode if no mode
981      has been specified for the operand.  In that case,
982      use the operand's mode as the mode to reload.  */
983   if (inmode == VOIDmode && in != 0)
984     inmode = GET_MODE (in);
985   if (outmode == VOIDmode && out != 0)
986     outmode = GET_MODE (out);
987 
988   /* If find_reloads and friends until now missed to replace a pseudo
989      with a constant of reg_equiv_constant something went wrong
990      beforehand.
991      Note that it can't simply be done here if we missed it earlier
992      since the constant might need to be pushed into the literal pool
993      and the resulting memref would probably need further
994      reloading.  */
995   if (in != 0 && REG_P (in))
996     {
997       int regno = REGNO (in);
998 
999       gcc_assert (regno < FIRST_PSEUDO_REGISTER
1000 		  || reg_renumber[regno] >= 0
1001 		  || reg_equiv_constant (regno) == NULL_RTX);
1002     }
1003 
1004   /* reg_equiv_constant only contains constants which are obviously
1005      not appropriate as destination.  So if we would need to replace
1006      the destination pseudo with a constant we are in real
1007      trouble.  */
1008   if (out != 0 && REG_P (out))
1009     {
1010       int regno = REGNO (out);
1011 
1012       gcc_assert (regno < FIRST_PSEUDO_REGISTER
1013 		  || reg_renumber[regno] >= 0
1014 		  || reg_equiv_constant (regno) == NULL_RTX);
1015     }
1016 
1017   /* If we have a read-write operand with an address side-effect,
1018      change either IN or OUT so the side-effect happens only once.  */
1019   if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1020     switch (GET_CODE (XEXP (in, 0)))
1021       {
1022       case POST_INC: case POST_DEC:   case POST_MODIFY:
1023 	in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1024 	break;
1025 
1026       case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1027 	out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1028 	break;
1029 
1030       default:
1031 	break;
1032       }
1033 
1034   /* If we are reloading a (SUBREG constant ...), really reload just the
1035      inside expression in its own mode.  Similarly for (SUBREG (PLUS ...)).
1036      If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1037      a pseudo and hence will become a MEM) with M1 wider than M2 and the
1038      register is a pseudo, also reload the inside expression.
1039      For machines that extend byte loads, do this for any SUBREG of a pseudo
1040      where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1041      M2 is an integral mode that gets extended when loaded.
1042      Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1043      where either M1 is not valid for R or M2 is wider than a word but we
1044      only need one register to store an M2-sized quantity in R.
1045      (However, if OUT is nonzero, we need to reload the reg *and*
1046      the subreg, so do nothing here, and let following statement handle it.)
1047 
1048      Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1049      we can't handle it here because CONST_INT does not indicate a mode.
1050 
1051      Similarly, we must reload the inside expression if we have a
1052      STRICT_LOW_PART (presumably, in == out in this case).
1053 
1054      Also reload the inner expression if it does not require a secondary
1055      reload but the SUBREG does.
1056 
1057      Finally, reload the inner expression if it is a register that is in
1058      the class whose registers cannot be referenced in a different size
1059      and M1 is not the same size as M2.  If subreg_lowpart_p is false, we
1060      cannot reload just the inside since we might end up with the wrong
1061      register class.  But if it is inside a STRICT_LOW_PART, we have
1062      no choice, so we hope we do get the right register class there.  */
1063 
1064   if (in != 0 && GET_CODE (in) == SUBREG
1065       && (subreg_lowpart_p (in) || strict_low)
1066 #ifdef CANNOT_CHANGE_MODE_CLASS
1067       && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1068 #endif
1069       && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (in))]
1070       && (CONSTANT_P (SUBREG_REG (in))
1071 	  || GET_CODE (SUBREG_REG (in)) == PLUS
1072 	  || strict_low
1073 	  || (((REG_P (SUBREG_REG (in))
1074 		&& REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1075 	       || MEM_P (SUBREG_REG (in)))
1076 	      && ((GET_MODE_PRECISION (inmode)
1077 		   > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1078 		  || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1079 		      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1080 			  <= UNITS_PER_WORD)
1081 		      && (GET_MODE_PRECISION (inmode)
1082 			  > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1083 		      && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1084 		      && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1085 		  || (WORD_REGISTER_OPERATIONS
1086 		      && (GET_MODE_PRECISION (inmode)
1087 			  < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1088 		      && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1089 			  ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1090 			   / UNITS_PER_WORD)))))
1091 	  || (REG_P (SUBREG_REG (in))
1092 	      && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1093 	      /* The case where out is nonzero
1094 		 is handled differently in the following statement.  */
1095 	      && (out == 0 || subreg_lowpart_p (in))
1096 	      && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1097 		   && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1098 		       > UNITS_PER_WORD)
1099 		   && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1100 			/ UNITS_PER_WORD)
1101 		       != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1102 						[GET_MODE (SUBREG_REG (in))]))
1103 		  || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1104 	  || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1105 	      && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1106 					  SUBREG_REG (in))
1107 		  == NO_REGS))
1108 #ifdef CANNOT_CHANGE_MODE_CLASS
1109 	  || (REG_P (SUBREG_REG (in))
1110 	      && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1111 	      && REG_CANNOT_CHANGE_MODE_P
1112 	      (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1113 #endif
1114 	  ))
1115     {
1116 #ifdef LIMIT_RELOAD_CLASS
1117       in_subreg_loc = inloc;
1118 #endif
1119       inloc = &SUBREG_REG (in);
1120       in = *inloc;
1121 
1122       if (!WORD_REGISTER_OPERATIONS
1123 	  && LOAD_EXTEND_OP (GET_MODE (in)) == UNKNOWN
1124 	  && MEM_P (in))
1125 	/* This is supposed to happen only for paradoxical subregs made by
1126 	   combine.c.  (SUBREG (MEM)) isn't supposed to occur other ways.  */
1127 	gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1128 
1129       inmode = GET_MODE (in);
1130     }
1131 
1132   /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1133      where M1 is not valid for R if it was not handled by the code above.
1134 
1135      Similar issue for (SUBREG constant ...) if it was not handled by the
1136      code above.  This can happen if SUBREG_BYTE != 0.
1137 
1138      However, we must reload the inner reg *as well as* the subreg in
1139      that case.  */
1140 
1141   if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1142     {
1143       if (REG_P (SUBREG_REG (in)) && HARD_REGISTER_P (SUBREG_REG (in)))
1144 	subreg_in_class
1145 	  = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1146 			      subreg_regno_offset (REGNO (SUBREG_REG (in)),
1147 						   GET_MODE (SUBREG_REG (in)),
1148 						   SUBREG_BYTE (in),
1149 						   GET_MODE (in)),
1150 			      REGNO (SUBREG_REG (in)));
1151 #if 1 // XXXMRG
1152       else if (REG_P (SUBREG_REG (in))
1153                || GET_CODE (SUBREG_REG (in)) == SYMBOL_REF)
1154 #else
1155       else if (CONSTANT_P (SUBREG_REG (in))
1156                || GET_CODE (SUBREG_REG (in)) == PLUS)
1157 #endif
1158 	subreg_in_class = find_valid_class_1 (inmode,
1159 					      GET_MODE (SUBREG_REG (in)),
1160 					      rclass);
1161 
1162       /* This relies on the fact that emit_reload_insns outputs the
1163 	 instructions for input reloads of type RELOAD_OTHER in the same
1164 	 order as the reloads.  Thus if the outer reload is also of type
1165 	 RELOAD_OTHER, we are guaranteed that this inner reload will be
1166 	 output before the outer reload.  */
1167       push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1168 		   subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1169       dont_remove_subreg = 1;
1170     }
1171 
1172   /* Similarly for paradoxical and problematical SUBREGs on the output.
1173      Note that there is no reason we need worry about the previous value
1174      of SUBREG_REG (out); even if wider than out, storing in a subreg is
1175      entitled to clobber it all (except in the case of a word mode subreg
1176      or of a STRICT_LOW_PART, in that latter case the constraint should
1177      label it input-output.)  */
1178   if (out != 0 && GET_CODE (out) == SUBREG
1179       && (subreg_lowpart_p (out) || strict_low)
1180 #ifdef CANNOT_CHANGE_MODE_CLASS
1181       && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1182 #endif
1183       && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (out))]
1184       && (CONSTANT_P (SUBREG_REG (out))
1185 	  || strict_low
1186 	  || (((REG_P (SUBREG_REG (out))
1187 		&& REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1188 	       || MEM_P (SUBREG_REG (out)))
1189 	      && ((GET_MODE_PRECISION (outmode)
1190 		   > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1191 		  || (WORD_REGISTER_OPERATIONS
1192 		      && (GET_MODE_PRECISION (outmode)
1193 			  < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1194 		      && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1195 			  ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1196 			   / UNITS_PER_WORD)))))
1197 	  || (REG_P (SUBREG_REG (out))
1198 	      && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1199 	      /* The case of a word mode subreg
1200 		 is handled differently in the following statement.  */
1201 	      && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1202 		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1203 		        > UNITS_PER_WORD))
1204 	      && ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode))
1205 	  || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1206 	      && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1207 					  SUBREG_REG (out))
1208 		  == NO_REGS))
1209 #ifdef CANNOT_CHANGE_MODE_CLASS
1210 	  || (REG_P (SUBREG_REG (out))
1211 	      && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1212 	      && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1213 					   GET_MODE (SUBREG_REG (out)),
1214 					   outmode))
1215 #endif
1216 	  ))
1217     {
1218 #ifdef LIMIT_RELOAD_CLASS
1219       out_subreg_loc = outloc;
1220 #endif
1221       outloc = &SUBREG_REG (out);
1222       out = *outloc;
1223       gcc_assert (WORD_REGISTER_OPERATIONS || !MEM_P (out)
1224 		  || GET_MODE_SIZE (GET_MODE (out))
1225 		     <= GET_MODE_SIZE (outmode));
1226       outmode = GET_MODE (out);
1227     }
1228 
1229   /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1230      where either M1 is not valid for R or M2 is wider than a word but we
1231      only need one register to store an M2-sized quantity in R.
1232 
1233      However, we must reload the inner reg *as well as* the subreg in
1234      that case and the inner reg is an in-out reload.  */
1235 
1236   if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1237     {
1238       enum reg_class in_out_class
1239 	= find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1240 			    subreg_regno_offset (REGNO (SUBREG_REG (out)),
1241 						 GET_MODE (SUBREG_REG (out)),
1242 						 SUBREG_BYTE (out),
1243 						 GET_MODE (out)),
1244 			    REGNO (SUBREG_REG (out)));
1245 
1246       /* This relies on the fact that emit_reload_insns outputs the
1247 	 instructions for output reloads of type RELOAD_OTHER in reverse
1248 	 order of the reloads.  Thus if the outer reload is also of type
1249 	 RELOAD_OTHER, we are guaranteed that this inner reload will be
1250 	 output after the outer reload.  */
1251       push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1252 		   &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1253 		   0, 0, opnum, RELOAD_OTHER);
1254       dont_remove_subreg = 1;
1255     }
1256 
1257   /* If IN appears in OUT, we can't share any input-only reload for IN.  */
1258   if (in != 0 && out != 0 && MEM_P (out)
1259       && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1260       && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1261     dont_share = 1;
1262 
1263   /* If IN is a SUBREG of a hard register, make a new REG.  This
1264      simplifies some of the cases below.  */
1265 
1266   if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1267       && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1268       && ! dont_remove_subreg)
1269     in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1270 
1271   /* Similarly for OUT.  */
1272   if (out != 0 && GET_CODE (out) == SUBREG
1273       && REG_P (SUBREG_REG (out))
1274       && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1275       && ! dont_remove_subreg)
1276     out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1277 
1278   /* Narrow down the class of register wanted if that is
1279      desirable on this machine for efficiency.  */
1280   {
1281     reg_class_t preferred_class = rclass;
1282 
1283     if (in != 0)
1284       preferred_class = targetm.preferred_reload_class (in, rclass);
1285 
1286     /* Output reloads may need analogous treatment, different in detail.  */
1287     if (out != 0)
1288       preferred_class
1289 	= targetm.preferred_output_reload_class (out, preferred_class);
1290 
1291     /* Discard what the target said if we cannot do it.  */
1292     if (preferred_class != NO_REGS
1293 	|| (optional && type == RELOAD_FOR_OUTPUT))
1294       rclass = (enum reg_class) preferred_class;
1295   }
1296 
1297   /* Make sure we use a class that can handle the actual pseudo
1298      inside any subreg.  For example, on the 386, QImode regs
1299      can appear within SImode subregs.  Although GENERAL_REGS
1300      can handle SImode, QImode needs a smaller class.  */
1301 #ifdef LIMIT_RELOAD_CLASS
1302   if (in_subreg_loc)
1303     rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1304   else if (in != 0 && GET_CODE (in) == SUBREG)
1305     rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1306 
1307   if (out_subreg_loc)
1308     rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1309   if (out != 0 && GET_CODE (out) == SUBREG)
1310     rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1311 #endif
1312 
1313   /* Verify that this class is at least possible for the mode that
1314      is specified.  */
1315   if (this_insn_is_asm)
1316     {
1317       machine_mode mode;
1318       if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1319 	mode = inmode;
1320       else
1321 	mode = outmode;
1322       if (mode == VOIDmode)
1323 	{
1324 	  error_for_asm (this_insn, "cannot reload integer constant "
1325 			 "operand in %<asm%>");
1326 	  mode = word_mode;
1327 	  if (in != 0)
1328 	    inmode = word_mode;
1329 	  if (out != 0)
1330 	    outmode = word_mode;
1331 	}
1332       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1333 	if (HARD_REGNO_MODE_OK (i, mode)
1334 	    && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1335 	  break;
1336       if (i == FIRST_PSEUDO_REGISTER)
1337 	{
1338 	  error_for_asm (this_insn, "impossible register constraint "
1339 			 "in %<asm%>");
1340 	  /* Avoid further trouble with this insn.  */
1341 	  PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1342 	  /* We used to continue here setting class to ALL_REGS, but it triggers
1343 	     sanity check on i386 for:
1344 	     void foo(long double d)
1345 	     {
1346 	       asm("" :: "a" (d));
1347 	     }
1348 	     Returning zero here ought to be safe as we take care in
1349 	     find_reloads to not process the reloads when instruction was
1350 	     replaced by USE.  */
1351 
1352 	  return 0;
1353 	}
1354     }
1355 
1356   /* Optional output reloads are always OK even if we have no register class,
1357      since the function of these reloads is only to have spill_reg_store etc.
1358      set, so that the storing insn can be deleted later.  */
1359   gcc_assert (rclass != NO_REGS
1360 	      || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1361 
1362   i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1363 
1364   if (i == n_reloads)
1365     {
1366       /* See if we need a secondary reload register to move between CLASS
1367 	 and IN or CLASS and OUT.  Get the icode and push any required reloads
1368 	 needed for each of them if so.  */
1369 
1370       if (in != 0)
1371 	secondary_in_reload
1372 	  = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1373 				   &secondary_in_icode, NULL);
1374       if (out != 0 && GET_CODE (out) != SCRATCH)
1375 	secondary_out_reload
1376 	  = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1377 				   type, &secondary_out_icode, NULL);
1378 
1379       /* We found no existing reload suitable for re-use.
1380 	 So add an additional reload.  */
1381 
1382 #ifdef SECONDARY_MEMORY_NEEDED
1383       if (subreg_in_class == NO_REGS
1384 	  && in != 0
1385 	  && (REG_P (in)
1386 	      || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1387 	  && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1388 	subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1389       /* If a memory location is needed for the copy, make one.  */
1390       if (subreg_in_class != NO_REGS
1391 	  && SECONDARY_MEMORY_NEEDED (subreg_in_class, rclass, inmode))
1392 	get_secondary_mem (in, inmode, opnum, type);
1393 #endif
1394 
1395       i = n_reloads;
1396       rld[i].in = in;
1397       rld[i].out = out;
1398       rld[i].rclass = rclass;
1399       rld[i].inmode = inmode;
1400       rld[i].outmode = outmode;
1401       rld[i].reg_rtx = 0;
1402       rld[i].optional = optional;
1403       rld[i].inc = 0;
1404       rld[i].nocombine = 0;
1405       rld[i].in_reg = inloc ? *inloc : 0;
1406       rld[i].out_reg = outloc ? *outloc : 0;
1407       rld[i].opnum = opnum;
1408       rld[i].when_needed = type;
1409       rld[i].secondary_in_reload = secondary_in_reload;
1410       rld[i].secondary_out_reload = secondary_out_reload;
1411       rld[i].secondary_in_icode = secondary_in_icode;
1412       rld[i].secondary_out_icode = secondary_out_icode;
1413       rld[i].secondary_p = 0;
1414 
1415       n_reloads++;
1416 
1417 #ifdef SECONDARY_MEMORY_NEEDED
1418       if (out != 0
1419           && (REG_P (out)
1420 	      || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1421 	  && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1422 	  && SECONDARY_MEMORY_NEEDED (rclass,
1423 				      REGNO_REG_CLASS (reg_or_subregno (out)),
1424 				      outmode))
1425 	get_secondary_mem (out, outmode, opnum, type);
1426 #endif
1427     }
1428   else
1429     {
1430       /* We are reusing an existing reload,
1431 	 but we may have additional information for it.
1432 	 For example, we may now have both IN and OUT
1433 	 while the old one may have just one of them.  */
1434 
1435       /* The modes can be different.  If they are, we want to reload in
1436 	 the larger mode, so that the value is valid for both modes.  */
1437       if (inmode != VOIDmode
1438 	  && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1439 	rld[i].inmode = inmode;
1440       if (outmode != VOIDmode
1441 	  && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1442 	rld[i].outmode = outmode;
1443       if (in != 0)
1444 	{
1445 	  rtx in_reg = inloc ? *inloc : 0;
1446 	  /* If we merge reloads for two distinct rtl expressions that
1447 	     are identical in content, there might be duplicate address
1448 	     reloads.  Remove the extra set now, so that if we later find
1449 	     that we can inherit this reload, we can get rid of the
1450 	     address reloads altogether.
1451 
1452 	     Do not do this if both reloads are optional since the result
1453 	     would be an optional reload which could potentially leave
1454 	     unresolved address replacements.
1455 
1456 	     It is not sufficient to call transfer_replacements since
1457 	     choose_reload_regs will remove the replacements for address
1458 	     reloads of inherited reloads which results in the same
1459 	     problem.  */
1460 	  if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1461 	      && ! (rld[i].optional && optional))
1462 	    {
1463 	      /* We must keep the address reload with the lower operand
1464 		 number alive.  */
1465 	      if (opnum > rld[i].opnum)
1466 		{
1467 		  remove_address_replacements (in);
1468 		  in = rld[i].in;
1469 		  in_reg = rld[i].in_reg;
1470 		}
1471 	      else
1472 		remove_address_replacements (rld[i].in);
1473 	    }
1474 	  /* When emitting reloads we don't necessarily look at the in-
1475 	     and outmode, but also directly at the operands (in and out).
1476 	     So we can't simply overwrite them with whatever we have found
1477 	     for this (to-be-merged) reload, we have to "merge" that too.
1478 	     Reusing another reload already verified that we deal with the
1479 	     same operands, just possibly in different modes.  So we
1480 	     overwrite the operands only when the new mode is larger.
1481 	     See also PR33613.  */
1482 	  if (!rld[i].in
1483 	      || GET_MODE_SIZE (GET_MODE (in))
1484 	           > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1485 	    rld[i].in = in;
1486 	  if (!rld[i].in_reg
1487 	      || (in_reg
1488 		  && GET_MODE_SIZE (GET_MODE (in_reg))
1489 	             > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1490 	    rld[i].in_reg = in_reg;
1491 	}
1492       if (out != 0)
1493 	{
1494 	  if (!rld[i].out
1495 	      || (out
1496 		  && GET_MODE_SIZE (GET_MODE (out))
1497 	             > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1498 	    rld[i].out = out;
1499 	  if (outloc
1500 	      && (!rld[i].out_reg
1501 		  || GET_MODE_SIZE (GET_MODE (*outloc))
1502 		     > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1503 	    rld[i].out_reg = *outloc;
1504 	}
1505       if (reg_class_subset_p (rclass, rld[i].rclass))
1506 	rld[i].rclass = rclass;
1507       rld[i].optional &= optional;
1508       if (MERGE_TO_OTHER (type, rld[i].when_needed,
1509 			  opnum, rld[i].opnum))
1510 	rld[i].when_needed = RELOAD_OTHER;
1511       rld[i].opnum = MIN (rld[i].opnum, opnum);
1512     }
1513 
1514   /* If the ostensible rtx being reloaded differs from the rtx found
1515      in the location to substitute, this reload is not safe to combine
1516      because we cannot reliably tell whether it appears in the insn.  */
1517 
1518   if (in != 0 && in != *inloc)
1519     rld[i].nocombine = 1;
1520 
1521 #if 0
1522   /* This was replaced by changes in find_reloads_address_1 and the new
1523      function inc_for_reload, which go with a new meaning of reload_inc.  */
1524 
1525   /* If this is an IN/OUT reload in an insn that sets the CC,
1526      it must be for an autoincrement.  It doesn't work to store
1527      the incremented value after the insn because that would clobber the CC.
1528      So we must do the increment of the value reloaded from,
1529      increment it, store it back, then decrement again.  */
1530   if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1531     {
1532       out = 0;
1533       rld[i].out = 0;
1534       rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1535       /* If we did not find a nonzero amount-to-increment-by,
1536 	 that contradicts the belief that IN is being incremented
1537 	 in an address in this insn.  */
1538       gcc_assert (rld[i].inc != 0);
1539     }
1540 #endif
1541 
1542   /* If we will replace IN and OUT with the reload-reg,
1543      record where they are located so that substitution need
1544      not do a tree walk.  */
1545 
1546   if (replace_reloads)
1547     {
1548       if (inloc != 0)
1549 	{
1550 	  struct replacement *r = &replacements[n_replacements++];
1551 	  r->what = i;
1552 	  r->where = inloc;
1553 	  r->mode = inmode;
1554 	}
1555       if (outloc != 0 && outloc != inloc)
1556 	{
1557 	  struct replacement *r = &replacements[n_replacements++];
1558 	  r->what = i;
1559 	  r->where = outloc;
1560 	  r->mode = outmode;
1561 	}
1562     }
1563 
1564   /* If this reload is just being introduced and it has both
1565      an incoming quantity and an outgoing quantity that are
1566      supposed to be made to match, see if either one of the two
1567      can serve as the place to reload into.
1568 
1569      If one of them is acceptable, set rld[i].reg_rtx
1570      to that one.  */
1571 
1572   if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1573     {
1574       rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1575 					  inmode, outmode,
1576 					  rld[i].rclass, i,
1577 					  earlyclobber_operand_p (out));
1578 
1579       /* If the outgoing register already contains the same value
1580 	 as the incoming one, we can dispense with loading it.
1581 	 The easiest way to tell the caller that is to give a phony
1582 	 value for the incoming operand (same as outgoing one).  */
1583       if (rld[i].reg_rtx == out
1584 	  && (REG_P (in) || CONSTANT_P (in))
1585 	  && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1586 				  static_reload_reg_p, i, inmode))
1587 	rld[i].in = out;
1588     }
1589 
1590   /* If this is an input reload and the operand contains a register that
1591      dies in this insn and is used nowhere else, see if it is the right class
1592      to be used for this reload.  Use it if so.  (This occurs most commonly
1593      in the case of paradoxical SUBREGs and in-out reloads).  We cannot do
1594      this if it is also an output reload that mentions the register unless
1595      the output is a SUBREG that clobbers an entire register.
1596 
1597      Note that the operand might be one of the spill regs, if it is a
1598      pseudo reg and we are in a block where spilling has not taken place.
1599      But if there is no spilling in this block, that is OK.
1600      An explicitly used hard reg cannot be a spill reg.  */
1601 
1602   if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1603     {
1604       rtx note;
1605       int regno;
1606       machine_mode rel_mode = inmode;
1607 
1608       if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1609 	rel_mode = outmode;
1610 
1611       for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1612 	if (REG_NOTE_KIND (note) == REG_DEAD
1613 	    && REG_P (XEXP (note, 0))
1614 	    && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1615 	    && reg_mentioned_p (XEXP (note, 0), in)
1616 	    /* Check that a former pseudo is valid; see find_dummy_reload.  */
1617 	    && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1618 		|| (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1619 				    ORIGINAL_REGNO (XEXP (note, 0)))
1620 		    && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1621 	    && ! refers_to_regno_for_reload_p (regno,
1622 					       end_hard_regno (rel_mode,
1623 							       regno),
1624 					       PATTERN (this_insn), inloc)
1625 	    && ! find_reg_fusage (this_insn, USE, XEXP (note, 0))
1626 	    /* If this is also an output reload, IN cannot be used as
1627 	       the reload register if it is set in this insn unless IN
1628 	       is also OUT.  */
1629 	    && (out == 0 || in == out
1630 		|| ! hard_reg_set_here_p (regno,
1631 					  end_hard_regno (rel_mode, regno),
1632 					  PATTERN (this_insn)))
1633 	    /* ??? Why is this code so different from the previous?
1634 	       Is there any simple coherent way to describe the two together?
1635 	       What's going on here.  */
1636 	    && (in != out
1637 		|| (GET_CODE (in) == SUBREG
1638 		    && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1639 			 / UNITS_PER_WORD)
1640 			== ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1641 			     + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1642 	    /* Make sure the operand fits in the reg that dies.  */
1643 	    && (GET_MODE_SIZE (rel_mode)
1644 		<= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1645 	    && HARD_REGNO_MODE_OK (regno, inmode)
1646 	    && HARD_REGNO_MODE_OK (regno, outmode))
1647 	  {
1648 	    unsigned int offs;
1649 	    unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1650 				      hard_regno_nregs[regno][outmode]);
1651 
1652 	    for (offs = 0; offs < nregs; offs++)
1653 	      if (fixed_regs[regno + offs]
1654 		  || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1655 					  regno + offs))
1656 		break;
1657 
1658 	    if (offs == nregs
1659 		&& (! (refers_to_regno_for_reload_p
1660 		       (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1661 		    || can_reload_into (in, regno, inmode)))
1662 	      {
1663 		rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1664 		break;
1665 	      }
1666 	  }
1667     }
1668 
1669   if (out)
1670     output_reloadnum = i;
1671 
1672   return i;
1673 }
1674 
1675 /* Record an additional place we must replace a value
1676    for which we have already recorded a reload.
1677    RELOADNUM is the value returned by push_reload
1678    when the reload was recorded.
1679    This is used in insn patterns that use match_dup.  */
1680 
1681 static void
1682 push_replacement (rtx *loc, int reloadnum, machine_mode mode)
1683 {
1684   if (replace_reloads)
1685     {
1686       struct replacement *r = &replacements[n_replacements++];
1687       r->what = reloadnum;
1688       r->where = loc;
1689       r->mode = mode;
1690     }
1691 }
1692 
1693 /* Duplicate any replacement we have recorded to apply at
1694    location ORIG_LOC to also be performed at DUP_LOC.
1695    This is used in insn patterns that use match_dup.  */
1696 
1697 static void
1698 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1699 {
1700   int i, n = n_replacements;
1701 
1702   for (i = 0; i < n; i++)
1703     {
1704       struct replacement *r = &replacements[i];
1705       if (r->where == orig_loc)
1706 	push_replacement (dup_loc, r->what, r->mode);
1707     }
1708 }
1709 
1710 /* Transfer all replacements that used to be in reload FROM to be in
1711    reload TO.  */
1712 
1713 void
1714 transfer_replacements (int to, int from)
1715 {
1716   int i;
1717 
1718   for (i = 0; i < n_replacements; i++)
1719     if (replacements[i].what == from)
1720       replacements[i].what = to;
1721 }
1722 
1723 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1724    or a subpart of it.  If we have any replacements registered for IN_RTX,
1725    cancel the reloads that were supposed to load them.
1726    Return nonzero if we canceled any reloads.  */
1727 int
1728 remove_address_replacements (rtx in_rtx)
1729 {
1730   int i, j;
1731   char reload_flags[MAX_RELOADS];
1732   int something_changed = 0;
1733 
1734   memset (reload_flags, 0, sizeof reload_flags);
1735   for (i = 0, j = 0; i < n_replacements; i++)
1736     {
1737       if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1738 	reload_flags[replacements[i].what] |= 1;
1739       else
1740 	{
1741 	  replacements[j++] = replacements[i];
1742 	  reload_flags[replacements[i].what] |= 2;
1743 	}
1744     }
1745   /* Note that the following store must be done before the recursive calls.  */
1746   n_replacements = j;
1747 
1748   for (i = n_reloads - 1; i >= 0; i--)
1749     {
1750       if (reload_flags[i] == 1)
1751 	{
1752 	  deallocate_reload_reg (i);
1753 	  remove_address_replacements (rld[i].in);
1754 	  rld[i].in = 0;
1755 	  something_changed = 1;
1756 	}
1757     }
1758   return something_changed;
1759 }
1760 
1761 /* If there is only one output reload, and it is not for an earlyclobber
1762    operand, try to combine it with a (logically unrelated) input reload
1763    to reduce the number of reload registers needed.
1764 
1765    This is safe if the input reload does not appear in
1766    the value being output-reloaded, because this implies
1767    it is not needed any more once the original insn completes.
1768 
1769    If that doesn't work, see we can use any of the registers that
1770    die in this insn as a reload register.  We can if it is of the right
1771    class and does not appear in the value being output-reloaded.  */
1772 
1773 static void
1774 combine_reloads (void)
1775 {
1776   int i, regno;
1777   int output_reload = -1;
1778   int secondary_out = -1;
1779   rtx note;
1780 
1781   /* Find the output reload; return unless there is exactly one
1782      and that one is mandatory.  */
1783 
1784   for (i = 0; i < n_reloads; i++)
1785     if (rld[i].out != 0)
1786       {
1787 	if (output_reload >= 0)
1788 	  return;
1789 	output_reload = i;
1790       }
1791 
1792   if (output_reload < 0 || rld[output_reload].optional)
1793     return;
1794 
1795   /* An input-output reload isn't combinable.  */
1796 
1797   if (rld[output_reload].in != 0)
1798     return;
1799 
1800   /* If this reload is for an earlyclobber operand, we can't do anything.  */
1801   if (earlyclobber_operand_p (rld[output_reload].out))
1802     return;
1803 
1804   /* If there is a reload for part of the address of this operand, we would
1805      need to change it to RELOAD_FOR_OTHER_ADDRESS.  But that would extend
1806      its life to the point where doing this combine would not lower the
1807      number of spill registers needed.  */
1808   for (i = 0; i < n_reloads; i++)
1809     if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1810 	 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1811 	&& rld[i].opnum == rld[output_reload].opnum)
1812       return;
1813 
1814   /* Check each input reload; can we combine it?  */
1815 
1816   for (i = 0; i < n_reloads; i++)
1817     if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1818 	/* Life span of this reload must not extend past main insn.  */
1819 	&& rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1820 	&& rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1821 	&& rld[i].when_needed != RELOAD_OTHER
1822 	&& (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1823 	    == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1824 				       [(int) rld[output_reload].outmode])
1825 	&& rld[i].inc == 0
1826 	&& rld[i].reg_rtx == 0
1827 #ifdef SECONDARY_MEMORY_NEEDED
1828 	/* Don't combine two reloads with different secondary
1829 	   memory locations.  */
1830 	&& (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1831 	    || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1832 	    || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1833 			    secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1834 #endif
1835 	&& (targetm.small_register_classes_for_mode_p (VOIDmode)
1836 	    ? (rld[i].rclass == rld[output_reload].rclass)
1837 	    : (reg_class_subset_p (rld[i].rclass,
1838 				   rld[output_reload].rclass)
1839 	       || reg_class_subset_p (rld[output_reload].rclass,
1840 				      rld[i].rclass)))
1841 	&& (MATCHES (rld[i].in, rld[output_reload].out)
1842 	    /* Args reversed because the first arg seems to be
1843 	       the one that we imagine being modified
1844 	       while the second is the one that might be affected.  */
1845 	    || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1846 						      rld[i].in)
1847 		/* However, if the input is a register that appears inside
1848 		   the output, then we also can't share.
1849 		   Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1850 		   If the same reload reg is used for both reg 69 and the
1851 		   result to be stored in memory, then that result
1852 		   will clobber the address of the memory ref.  */
1853 		&& ! (REG_P (rld[i].in)
1854 		      && reg_overlap_mentioned_for_reload_p (rld[i].in,
1855 							     rld[output_reload].out))))
1856 	&& ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1857 					 rld[i].when_needed != RELOAD_FOR_INPUT)
1858 	&& (reg_class_size[(int) rld[i].rclass]
1859 	    || targetm.small_register_classes_for_mode_p (VOIDmode))
1860 	/* We will allow making things slightly worse by combining an
1861 	   input and an output, but no worse than that.  */
1862 	&& (rld[i].when_needed == RELOAD_FOR_INPUT
1863 	    || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1864       {
1865 	int j;
1866 
1867 	/* We have found a reload to combine with!  */
1868 	rld[i].out = rld[output_reload].out;
1869 	rld[i].out_reg = rld[output_reload].out_reg;
1870 	rld[i].outmode = rld[output_reload].outmode;
1871 	/* Mark the old output reload as inoperative.  */
1872 	rld[output_reload].out = 0;
1873 	/* The combined reload is needed for the entire insn.  */
1874 	rld[i].when_needed = RELOAD_OTHER;
1875 	/* If the output reload had a secondary reload, copy it.  */
1876 	if (rld[output_reload].secondary_out_reload != -1)
1877 	  {
1878 	    rld[i].secondary_out_reload
1879 	      = rld[output_reload].secondary_out_reload;
1880 	    rld[i].secondary_out_icode
1881 	      = rld[output_reload].secondary_out_icode;
1882 	  }
1883 
1884 #ifdef SECONDARY_MEMORY_NEEDED
1885 	/* Copy any secondary MEM.  */
1886 	if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1887 	  secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1888 	    = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1889 #endif
1890 	/* If required, minimize the register class.  */
1891 	if (reg_class_subset_p (rld[output_reload].rclass,
1892 				rld[i].rclass))
1893 	  rld[i].rclass = rld[output_reload].rclass;
1894 
1895 	/* Transfer all replacements from the old reload to the combined.  */
1896 	for (j = 0; j < n_replacements; j++)
1897 	  if (replacements[j].what == output_reload)
1898 	    replacements[j].what = i;
1899 
1900 	return;
1901       }
1902 
1903   /* If this insn has only one operand that is modified or written (assumed
1904      to be the first),  it must be the one corresponding to this reload.  It
1905      is safe to use anything that dies in this insn for that output provided
1906      that it does not occur in the output (we already know it isn't an
1907      earlyclobber.  If this is an asm insn, give up.  */
1908 
1909   if (INSN_CODE (this_insn) == -1)
1910     return;
1911 
1912   for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1913     if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1914 	|| insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1915       return;
1916 
1917   /* See if some hard register that dies in this insn and is not used in
1918      the output is the right class.  Only works if the register we pick
1919      up can fully hold our output reload.  */
1920   for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1921     if (REG_NOTE_KIND (note) == REG_DEAD
1922 	&& REG_P (XEXP (note, 0))
1923 	&& !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1924 						rld[output_reload].out)
1925 	&& (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1926 	&& HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1927 	&& TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1928 			      regno)
1929 	&& (hard_regno_nregs[regno][rld[output_reload].outmode]
1930 	    <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1931 	/* Ensure that a secondary or tertiary reload for this output
1932 	   won't want this register.  */
1933 	&& ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1934 	    || (!(TEST_HARD_REG_BIT
1935 		  (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1936 		&& ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1937 		    || !(TEST_HARD_REG_BIT
1938 			 (reg_class_contents[(int) rld[secondary_out].rclass],
1939 			  regno)))))
1940 	&& !fixed_regs[regno]
1941 	/* Check that a former pseudo is valid; see find_dummy_reload.  */
1942 	&& (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1943 	    || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1944 			       ORIGINAL_REGNO (XEXP (note, 0)))
1945 		&& hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1946       {
1947 	rld[output_reload].reg_rtx
1948 	  = gen_rtx_REG (rld[output_reload].outmode, regno);
1949 	return;
1950       }
1951 }
1952 
1953 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1954    See if one of IN and OUT is a register that may be used;
1955    this is desirable since a spill-register won't be needed.
1956    If so, return the register rtx that proves acceptable.
1957 
1958    INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1959    RCLASS is the register class required for the reload.
1960 
1961    If FOR_REAL is >= 0, it is the number of the reload,
1962    and in some cases when it can be discovered that OUT doesn't need
1963    to be computed, clear out rld[FOR_REAL].out.
1964 
1965    If FOR_REAL is -1, this should not be done, because this call
1966    is just to see if a register can be found, not to find and install it.
1967 
1968    EARLYCLOBBER is nonzero if OUT is an earlyclobber operand.  This
1969    puts an additional constraint on being able to use IN for OUT since
1970    IN must not appear elsewhere in the insn (it is assumed that IN itself
1971    is safe from the earlyclobber).  */
1972 
1973 static rtx
1974 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1975 		   machine_mode inmode, machine_mode outmode,
1976 		   reg_class_t rclass, int for_real, int earlyclobber)
1977 {
1978   rtx in = real_in;
1979   rtx out = real_out;
1980   int in_offset = 0;
1981   int out_offset = 0;
1982   rtx value = 0;
1983 
1984   /* If operands exceed a word, we can't use either of them
1985      unless they have the same size.  */
1986   if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1987       && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1988 	  || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1989     return 0;
1990 
1991   /* Note that {in,out}_offset are needed only when 'in' or 'out'
1992      respectively refers to a hard register.  */
1993 
1994   /* Find the inside of any subregs.  */
1995   while (GET_CODE (out) == SUBREG)
1996     {
1997       if (REG_P (SUBREG_REG (out))
1998 	  && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1999 	out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
2000 					   GET_MODE (SUBREG_REG (out)),
2001 					   SUBREG_BYTE (out),
2002 					   GET_MODE (out));
2003       out = SUBREG_REG (out);
2004     }
2005   while (GET_CODE (in) == SUBREG)
2006     {
2007       if (REG_P (SUBREG_REG (in))
2008 	  && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
2009 	in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
2010 					  GET_MODE (SUBREG_REG (in)),
2011 					  SUBREG_BYTE (in),
2012 					  GET_MODE (in));
2013       in = SUBREG_REG (in);
2014     }
2015 
2016   /* Narrow down the reg class, the same way push_reload will;
2017      otherwise we might find a dummy now, but push_reload won't.  */
2018   {
2019     reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
2020     if (preferred_class != NO_REGS)
2021       rclass = (enum reg_class) preferred_class;
2022   }
2023 
2024   /* See if OUT will do.  */
2025   if (REG_P (out)
2026       && REGNO (out) < FIRST_PSEUDO_REGISTER)
2027     {
2028       unsigned int regno = REGNO (out) + out_offset;
2029       unsigned int nwords = hard_regno_nregs[regno][outmode];
2030       rtx saved_rtx;
2031 
2032       /* When we consider whether the insn uses OUT,
2033 	 ignore references within IN.  They don't prevent us
2034 	 from copying IN into OUT, because those refs would
2035 	 move into the insn that reloads IN.
2036 
2037 	 However, we only ignore IN in its role as this reload.
2038 	 If the insn uses IN elsewhere and it contains OUT,
2039 	 that counts.  We can't be sure it's the "same" operand
2040 	 so it might not go through this reload.
2041 
2042          We also need to avoid using OUT if it, or part of it, is a
2043          fixed register.  Modifying such registers, even transiently,
2044          may have undefined effects on the machine, such as modifying
2045          the stack pointer.  */
2046       saved_rtx = *inloc;
2047       *inloc = const0_rtx;
2048 
2049       if (regno < FIRST_PSEUDO_REGISTER
2050 	  && HARD_REGNO_MODE_OK (regno, outmode)
2051 	  && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2052 					     PATTERN (this_insn), outloc))
2053 	{
2054 	  unsigned int i;
2055 
2056 	  for (i = 0; i < nwords; i++)
2057 	    if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2058 				     regno + i)
2059 		|| fixed_regs[regno + i])
2060 	      break;
2061 
2062 	  if (i == nwords)
2063 	    {
2064 	      if (REG_P (real_out))
2065 		value = real_out;
2066 	      else
2067 		value = gen_rtx_REG (outmode, regno);
2068 	    }
2069 	}
2070 
2071       *inloc = saved_rtx;
2072     }
2073 
2074   /* Consider using IN if OUT was not acceptable
2075      or if OUT dies in this insn (like the quotient in a divmod insn).
2076      We can't use IN unless it is dies in this insn,
2077      which means we must know accurately which hard regs are live.
2078      Also, the result can't go in IN if IN is used within OUT,
2079      or if OUT is an earlyclobber and IN appears elsewhere in the insn.  */
2080   if (hard_regs_live_known
2081       && REG_P (in)
2082       && REGNO (in) < FIRST_PSEUDO_REGISTER
2083       && (value == 0
2084 	  || find_reg_note (this_insn, REG_UNUSED, real_out))
2085       && find_reg_note (this_insn, REG_DEAD, real_in)
2086       && !fixed_regs[REGNO (in)]
2087       && HARD_REGNO_MODE_OK (REGNO (in),
2088 			     /* The only case where out and real_out might
2089 				have different modes is where real_out
2090 				is a subreg, and in that case, out
2091 				has a real mode.  */
2092 			     (GET_MODE (out) != VOIDmode
2093 			      ? GET_MODE (out) : outmode))
2094       && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2095 	  /* However only do this if we can be sure that this input
2096 	     operand doesn't correspond with an uninitialized pseudo.
2097 	     global can assign some hardreg to it that is the same as
2098 	     the one assigned to a different, also live pseudo (as it
2099 	     can ignore the conflict).  We must never introduce writes
2100 	     to such hardregs, as they would clobber the other live
2101 	     pseudo.  See PR 20973.  */
2102 	  || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2103 			     ORIGINAL_REGNO (in))
2104 	      /* Similarly, only do this if we can be sure that the death
2105 		 note is still valid.  global can assign some hardreg to
2106 		 the pseudo referenced in the note and simultaneously a
2107 		 subword of this hardreg to a different, also live pseudo,
2108 		 because only another subword of the hardreg is actually
2109 		 used in the insn.  This cannot happen if the pseudo has
2110 		 been assigned exactly one hardreg.  See PR 33732.  */
2111 	      && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2112     {
2113       unsigned int regno = REGNO (in) + in_offset;
2114       unsigned int nwords = hard_regno_nregs[regno][inmode];
2115 
2116       if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2117 	  && ! hard_reg_set_here_p (regno, regno + nwords,
2118 				    PATTERN (this_insn))
2119 	  && (! earlyclobber
2120 	      || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2121 						 PATTERN (this_insn), inloc)))
2122 	{
2123 	  unsigned int i;
2124 
2125 	  for (i = 0; i < nwords; i++)
2126 	    if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2127 				     regno + i))
2128 	      break;
2129 
2130 	  if (i == nwords)
2131 	    {
2132 	      /* If we were going to use OUT as the reload reg
2133 		 and changed our mind, it means OUT is a dummy that
2134 		 dies here.  So don't bother copying value to it.  */
2135 	      if (for_real >= 0 && value == real_out)
2136 		rld[for_real].out = 0;
2137 	      if (REG_P (real_in))
2138 		value = real_in;
2139 	      else
2140 		value = gen_rtx_REG (inmode, regno);
2141 	    }
2142 	}
2143     }
2144 
2145   return value;
2146 }
2147 
2148 /* This page contains subroutines used mainly for determining
2149    whether the IN or an OUT of a reload can serve as the
2150    reload register.  */
2151 
2152 /* Return 1 if X is an operand of an insn that is being earlyclobbered.  */
2153 
2154 int
2155 earlyclobber_operand_p (rtx x)
2156 {
2157   int i;
2158 
2159   for (i = 0; i < n_earlyclobbers; i++)
2160     if (reload_earlyclobbers[i] == x)
2161       return 1;
2162 
2163   return 0;
2164 }
2165 
2166 /* Return 1 if expression X alters a hard reg in the range
2167    from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2168    either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2169    X should be the body of an instruction.  */
2170 
2171 static int
2172 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2173 {
2174   if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2175     {
2176       rtx op0 = SET_DEST (x);
2177 
2178       while (GET_CODE (op0) == SUBREG)
2179 	op0 = SUBREG_REG (op0);
2180       if (REG_P (op0))
2181 	{
2182 	  unsigned int r = REGNO (op0);
2183 
2184 	  /* See if this reg overlaps range under consideration.  */
2185 	  if (r < end_regno
2186 	      && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2187 	    return 1;
2188 	}
2189     }
2190   else if (GET_CODE (x) == PARALLEL)
2191     {
2192       int i = XVECLEN (x, 0) - 1;
2193 
2194       for (; i >= 0; i--)
2195 	if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2196 	  return 1;
2197     }
2198 
2199   return 0;
2200 }
2201 
2202 /* Return 1 if ADDR is a valid memory address for mode MODE
2203    in address space AS, and check that each pseudo reg has the
2204    proper kind of hard reg.  */
2205 
2206 int
2207 strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
2208 				    rtx addr, addr_space_t as)
2209 {
2210 #ifdef GO_IF_LEGITIMATE_ADDRESS
2211   gcc_assert (ADDR_SPACE_GENERIC_P (as));
2212   GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2213   return 0;
2214 
2215  win:
2216   return 1;
2217 #else
2218   return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2219 #endif
2220 }
2221 
2222 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2223    if they are the same hard reg, and has special hacks for
2224    autoincrement and autodecrement.
2225    This is specifically intended for find_reloads to use
2226    in determining whether two operands match.
2227    X is the operand whose number is the lower of the two.
2228 
2229    The value is 2 if Y contains a pre-increment that matches
2230    a non-incrementing address in X.  */
2231 
2232 /* ??? To be completely correct, we should arrange to pass
2233    for X the output operand and for Y the input operand.
2234    For now, we assume that the output operand has the lower number
2235    because that is natural in (SET output (... input ...)).  */
2236 
2237 int
2238 operands_match_p (rtx x, rtx y)
2239 {
2240   int i;
2241   RTX_CODE code = GET_CODE (x);
2242   const char *fmt;
2243   int success_2;
2244 
2245   if (x == y)
2246     return 1;
2247   if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2248       && (REG_P (y) || (GET_CODE (y) == SUBREG
2249 				  && REG_P (SUBREG_REG (y)))))
2250     {
2251       int j;
2252 
2253       if (code == SUBREG)
2254 	{
2255 	  i = REGNO (SUBREG_REG (x));
2256 	  if (i >= FIRST_PSEUDO_REGISTER)
2257 	    goto slow;
2258 	  i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2259 				    GET_MODE (SUBREG_REG (x)),
2260 				    SUBREG_BYTE (x),
2261 				    GET_MODE (x));
2262 	}
2263       else
2264 	i = REGNO (x);
2265 
2266       if (GET_CODE (y) == SUBREG)
2267 	{
2268 	  j = REGNO (SUBREG_REG (y));
2269 	  if (j >= FIRST_PSEUDO_REGISTER)
2270 	    goto slow;
2271 	  j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2272 				    GET_MODE (SUBREG_REG (y)),
2273 				    SUBREG_BYTE (y),
2274 				    GET_MODE (y));
2275 	}
2276       else
2277 	j = REGNO (y);
2278 
2279       /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2280 	 multiple hard register group of scalar integer registers, so that
2281 	 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2282 	 register.  */
2283       if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2284 	  && SCALAR_INT_MODE_P (GET_MODE (x))
2285 	  && i < FIRST_PSEUDO_REGISTER)
2286 	i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2287       if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2288 	  && SCALAR_INT_MODE_P (GET_MODE (y))
2289 	  && j < FIRST_PSEUDO_REGISTER)
2290 	j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2291 
2292       return i == j;
2293     }
2294   /* If two operands must match, because they are really a single
2295      operand of an assembler insn, then two postincrements are invalid
2296      because the assembler insn would increment only once.
2297      On the other hand, a postincrement matches ordinary indexing
2298      if the postincrement is the output operand.  */
2299   if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2300     return operands_match_p (XEXP (x, 0), y);
2301   /* Two preincrements are invalid
2302      because the assembler insn would increment only once.
2303      On the other hand, a preincrement matches ordinary indexing
2304      if the preincrement is the input operand.
2305      In this case, return 2, since some callers need to do special
2306      things when this happens.  */
2307   if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2308       || GET_CODE (y) == PRE_MODIFY)
2309     return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2310 
2311  slow:
2312 
2313   /* Now we have disposed of all the cases in which different rtx codes
2314      can match.  */
2315   if (code != GET_CODE (y))
2316     return 0;
2317 
2318   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2319   if (GET_MODE (x) != GET_MODE (y))
2320     return 0;
2321 
2322   /* MEMs referring to different address space are not equivalent.  */
2323   if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2324     return 0;
2325 
2326   switch (code)
2327     {
2328     CASE_CONST_UNIQUE:
2329       return 0;
2330 
2331     case LABEL_REF:
2332       return label_ref_label (x) == label_ref_label (y);
2333     case SYMBOL_REF:
2334       return XSTR (x, 0) == XSTR (y, 0);
2335 
2336     default:
2337       break;
2338     }
2339 
2340   /* Compare the elements.  If any pair of corresponding elements
2341      fail to match, return 0 for the whole things.  */
2342 
2343   success_2 = 0;
2344   fmt = GET_RTX_FORMAT (code);
2345   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2346     {
2347       int val, j;
2348       switch (fmt[i])
2349 	{
2350 	case 'w':
2351 	  if (XWINT (x, i) != XWINT (y, i))
2352 	    return 0;
2353 	  break;
2354 
2355 	case 'i':
2356 	  if (XINT (x, i) != XINT (y, i))
2357 	    return 0;
2358 	  break;
2359 
2360 	case 'e':
2361 	  val = operands_match_p (XEXP (x, i), XEXP (y, i));
2362 	  if (val == 0)
2363 	    return 0;
2364 	  /* If any subexpression returns 2,
2365 	     we should return 2 if we are successful.  */
2366 	  if (val == 2)
2367 	    success_2 = 1;
2368 	  break;
2369 
2370 	case '0':
2371 	  break;
2372 
2373 	case 'E':
2374 	  if (XVECLEN (x, i) != XVECLEN (y, i))
2375 	    return 0;
2376 	  for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2377 	    {
2378 	      val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2379 	      if (val == 0)
2380 		return 0;
2381 	      if (val == 2)
2382 		success_2 = 1;
2383 	    }
2384 	  break;
2385 
2386 	  /* It is believed that rtx's at this level will never
2387 	     contain anything but integers and other rtx's,
2388 	     except for within LABEL_REFs and SYMBOL_REFs.  */
2389 	default:
2390 	  gcc_unreachable ();
2391 	}
2392     }
2393   return 1 + success_2;
2394 }
2395 
2396 /* Describe the range of registers or memory referenced by X.
2397    If X is a register, set REG_FLAG and put the first register
2398    number into START and the last plus one into END.
2399    If X is a memory reference, put a base address into BASE
2400    and a range of integer offsets into START and END.
2401    If X is pushing on the stack, we can assume it causes no trouble,
2402    so we set the SAFE field.  */
2403 
2404 static struct decomposition
2405 decompose (rtx x)
2406 {
2407   struct decomposition val;
2408   int all_const = 0;
2409 
2410   memset (&val, 0, sizeof (val));
2411 
2412   switch (GET_CODE (x))
2413     {
2414     case MEM:
2415       {
2416 	rtx base = NULL_RTX, offset = 0;
2417 	rtx addr = XEXP (x, 0);
2418 
2419 	if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2420 	    || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2421 	  {
2422 	    val.base = XEXP (addr, 0);
2423 	    val.start = -GET_MODE_SIZE (GET_MODE (x));
2424 	    val.end = GET_MODE_SIZE (GET_MODE (x));
2425 	    val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2426 	    return val;
2427 	  }
2428 
2429 	if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2430 	  {
2431 	    if (GET_CODE (XEXP (addr, 1)) == PLUS
2432 		&& XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2433 		&& CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2434 	      {
2435 		val.base  = XEXP (addr, 0);
2436 		val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2437 		val.end   = INTVAL (XEXP (XEXP (addr, 1), 1));
2438 		val.safe  = REGNO (val.base) == STACK_POINTER_REGNUM;
2439 		return val;
2440 	      }
2441 	  }
2442 
2443 	if (GET_CODE (addr) == CONST)
2444 	  {
2445 	    addr = XEXP (addr, 0);
2446 	    all_const = 1;
2447 	  }
2448 	if (GET_CODE (addr) == PLUS)
2449 	  {
2450 	    if (CONSTANT_P (XEXP (addr, 0)))
2451 	      {
2452 		base = XEXP (addr, 1);
2453 		offset = XEXP (addr, 0);
2454 	      }
2455 	    else if (CONSTANT_P (XEXP (addr, 1)))
2456 	      {
2457 		base = XEXP (addr, 0);
2458 		offset = XEXP (addr, 1);
2459 	      }
2460 	  }
2461 
2462 	if (offset == 0)
2463 	  {
2464 	    base = addr;
2465 	    offset = const0_rtx;
2466 	  }
2467 	if (GET_CODE (offset) == CONST)
2468 	  offset = XEXP (offset, 0);
2469 	if (GET_CODE (offset) == PLUS)
2470 	  {
2471 	    if (CONST_INT_P (XEXP (offset, 0)))
2472 	      {
2473 		base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2474 		offset = XEXP (offset, 0);
2475 	      }
2476 	    else if (CONST_INT_P (XEXP (offset, 1)))
2477 	      {
2478 		base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2479 		offset = XEXP (offset, 1);
2480 	      }
2481 	    else
2482 	      {
2483 		base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2484 		offset = const0_rtx;
2485 	      }
2486 	  }
2487 	else if (!CONST_INT_P (offset))
2488 	  {
2489 	    base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2490 	    offset = const0_rtx;
2491 	  }
2492 
2493 	if (all_const && GET_CODE (base) == PLUS)
2494 	  base = gen_rtx_CONST (GET_MODE (base), base);
2495 
2496 	gcc_assert (CONST_INT_P (offset));
2497 
2498 	val.start = INTVAL (offset);
2499 	val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2500 	val.base = base;
2501       }
2502       break;
2503 
2504     case REG:
2505       val.reg_flag = 1;
2506       val.start = true_regnum (x);
2507       if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2508 	{
2509 	  /* A pseudo with no hard reg.  */
2510 	  val.start = REGNO (x);
2511 	  val.end = val.start + 1;
2512 	}
2513       else
2514 	/* A hard reg.  */
2515 	val.end = end_hard_regno (GET_MODE (x), val.start);
2516       break;
2517 
2518     case SUBREG:
2519       if (!REG_P (SUBREG_REG (x)))
2520 	/* This could be more precise, but it's good enough.  */
2521 	return decompose (SUBREG_REG (x));
2522       val.reg_flag = 1;
2523       val.start = true_regnum (x);
2524       if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2525 	return decompose (SUBREG_REG (x));
2526       else
2527 	/* A hard reg.  */
2528 	val.end = val.start + subreg_nregs (x);
2529       break;
2530 
2531     case SCRATCH:
2532       /* This hasn't been assigned yet, so it can't conflict yet.  */
2533       val.safe = 1;
2534       break;
2535 
2536     default:
2537       gcc_assert (CONSTANT_P (x));
2538       val.safe = 1;
2539       break;
2540     }
2541   return val;
2542 }
2543 
2544 /* Return 1 if altering Y will not modify the value of X.
2545    Y is also described by YDATA, which should be decompose (Y).  */
2546 
2547 static int
2548 immune_p (rtx x, rtx y, struct decomposition ydata)
2549 {
2550   struct decomposition xdata;
2551 
2552   if (ydata.reg_flag)
2553     return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2554   if (ydata.safe)
2555     return 1;
2556 
2557   gcc_assert (MEM_P (y));
2558   /* If Y is memory and X is not, Y can't affect X.  */
2559   if (!MEM_P (x))
2560     return 1;
2561 
2562   xdata = decompose (x);
2563 
2564   if (! rtx_equal_p (xdata.base, ydata.base))
2565     {
2566       /* If bases are distinct symbolic constants, there is no overlap.  */
2567       if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2568 	return 1;
2569       /* Constants and stack slots never overlap.  */
2570       if (CONSTANT_P (xdata.base)
2571 	  && (ydata.base == frame_pointer_rtx
2572 	      || ydata.base == hard_frame_pointer_rtx
2573 	      || ydata.base == stack_pointer_rtx))
2574 	return 1;
2575       if (CONSTANT_P (ydata.base)
2576 	  && (xdata.base == frame_pointer_rtx
2577 	      || xdata.base == hard_frame_pointer_rtx
2578 	      || xdata.base == stack_pointer_rtx))
2579 	return 1;
2580       /* If either base is variable, we don't know anything.  */
2581       return 0;
2582     }
2583 
2584   return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2585 }
2586 
2587 /* Similar, but calls decompose.  */
2588 
2589 int
2590 safe_from_earlyclobber (rtx op, rtx clobber)
2591 {
2592   struct decomposition early_data;
2593 
2594   early_data = decompose (clobber);
2595   return immune_p (op, clobber, early_data);
2596 }
2597 
2598 /* Main entry point of this file: search the body of INSN
2599    for values that need reloading and record them with push_reload.
2600    REPLACE nonzero means record also where the values occur
2601    so that subst_reloads can be used.
2602 
2603    IND_LEVELS says how many levels of indirection are supported by this
2604    machine; a value of zero means that a memory reference is not a valid
2605    memory address.
2606 
2607    LIVE_KNOWN says we have valid information about which hard
2608    regs are live at each point in the program; this is true when
2609    we are called from global_alloc but false when stupid register
2610    allocation has been done.
2611 
2612    RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2613    which is nonnegative if the reg has been commandeered for reloading into.
2614    It is copied into STATIC_RELOAD_REG_P and referenced from there
2615    by various subroutines.
2616 
2617    Return TRUE if some operands need to be changed, because of swapping
2618    commutative operands, reg_equiv_address substitution, or whatever.  */
2619 
2620 int
2621 find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2622 	      short *reload_reg_p)
2623 {
2624   int insn_code_number;
2625   int i, j;
2626   int noperands;
2627   /* These start out as the constraints for the insn
2628      and they are chewed up as we consider alternatives.  */
2629   const char *constraints[MAX_RECOG_OPERANDS];
2630   /* These are the preferred classes for an operand, or NO_REGS if it isn't
2631      a register.  */
2632   enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2633   char pref_or_nothing[MAX_RECOG_OPERANDS];
2634   /* Nonzero for a MEM operand whose entire address needs a reload.
2635      May be -1 to indicate the entire address may or may not need a reload.  */
2636   int address_reloaded[MAX_RECOG_OPERANDS];
2637   /* Nonzero for an address operand that needs to be completely reloaded.
2638      May be -1 to indicate the entire operand may or may not need a reload.  */
2639   int address_operand_reloaded[MAX_RECOG_OPERANDS];
2640   /* Value of enum reload_type to use for operand.  */
2641   enum reload_type operand_type[MAX_RECOG_OPERANDS];
2642   /* Value of enum reload_type to use within address of operand.  */
2643   enum reload_type address_type[MAX_RECOG_OPERANDS];
2644   /* Save the usage of each operand.  */
2645   enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2646   int no_input_reloads = 0, no_output_reloads = 0;
2647   int n_alternatives;
2648   reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2649   char this_alternative_match_win[MAX_RECOG_OPERANDS];
2650   char this_alternative_win[MAX_RECOG_OPERANDS];
2651   char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2652   char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2653   int this_alternative_matches[MAX_RECOG_OPERANDS];
2654   reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2655   int this_alternative_number;
2656   int goal_alternative_number = 0;
2657   int operand_reloadnum[MAX_RECOG_OPERANDS];
2658   int goal_alternative_matches[MAX_RECOG_OPERANDS];
2659   int goal_alternative_matched[MAX_RECOG_OPERANDS];
2660   char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2661   char goal_alternative_win[MAX_RECOG_OPERANDS];
2662   char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2663   char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2664   int goal_alternative_swapped;
2665   int best;
2666   int commutative;
2667   char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2668   rtx substed_operand[MAX_RECOG_OPERANDS];
2669   rtx body = PATTERN (insn);
2670   rtx set = single_set (insn);
2671   int goal_earlyclobber = 0, this_earlyclobber;
2672   machine_mode operand_mode[MAX_RECOG_OPERANDS];
2673   int retval = 0;
2674 
2675   this_insn = insn;
2676   n_reloads = 0;
2677   n_replacements = 0;
2678   n_earlyclobbers = 0;
2679   replace_reloads = replace;
2680   hard_regs_live_known = live_known;
2681   static_reload_reg_p = reload_reg_p;
2682 
2683   /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2684      neither are insns that SET cc0.  Insns that use CC0 are not allowed
2685      to have any input reloads.  */
2686   if (JUMP_P (insn) || CALL_P (insn))
2687     no_output_reloads = 1;
2688 
2689   if (HAVE_cc0 && reg_referenced_p (cc0_rtx, PATTERN (insn)))
2690     no_input_reloads = 1;
2691   if (HAVE_cc0 && reg_set_p (cc0_rtx, PATTERN (insn)))
2692     no_output_reloads = 1;
2693 
2694 #ifdef SECONDARY_MEMORY_NEEDED
2695   /* The eliminated forms of any secondary memory locations are per-insn, so
2696      clear them out here.  */
2697 
2698   if (secondary_memlocs_elim_used)
2699     {
2700       memset (secondary_memlocs_elim, 0,
2701 	      sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2702       secondary_memlocs_elim_used = 0;
2703     }
2704 #endif
2705 
2706   /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2707      is cheap to move between them.  If it is not, there may not be an insn
2708      to do the copy, so we may need a reload.  */
2709   if (GET_CODE (body) == SET
2710       && REG_P (SET_DEST (body))
2711       && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2712       && REG_P (SET_SRC (body))
2713       && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2714       && register_move_cost (GET_MODE (SET_SRC (body)),
2715 			     REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2716 			     REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2717     return 0;
2718 
2719   extract_insn (insn);
2720 
2721   noperands = reload_n_operands = recog_data.n_operands;
2722   n_alternatives = recog_data.n_alternatives;
2723 
2724   /* Just return "no reloads" if insn has no operands with constraints.  */
2725   if (noperands == 0 || n_alternatives == 0)
2726     return 0;
2727 
2728   insn_code_number = INSN_CODE (insn);
2729   this_insn_is_asm = insn_code_number < 0;
2730 
2731   memcpy (operand_mode, recog_data.operand_mode,
2732 	  noperands * sizeof (machine_mode));
2733   memcpy (constraints, recog_data.constraints,
2734 	  noperands * sizeof (const char *));
2735 
2736   commutative = -1;
2737 
2738   /* If we will need to know, later, whether some pair of operands
2739      are the same, we must compare them now and save the result.
2740      Reloading the base and index registers will clobber them
2741      and afterward they will fail to match.  */
2742 
2743   for (i = 0; i < noperands; i++)
2744     {
2745       const char *p;
2746       int c;
2747       char *end;
2748 
2749       substed_operand[i] = recog_data.operand[i];
2750       p = constraints[i];
2751 
2752       modified[i] = RELOAD_READ;
2753 
2754       /* Scan this operand's constraint to see if it is an output operand,
2755 	 an in-out operand, is commutative, or should match another.  */
2756 
2757       while ((c = *p))
2758 	{
2759 	  p += CONSTRAINT_LEN (c, p);
2760 	  switch (c)
2761 	    {
2762 	    case '=':
2763 	      modified[i] = RELOAD_WRITE;
2764 	      break;
2765 	    case '+':
2766 	      modified[i] = RELOAD_READ_WRITE;
2767 	      break;
2768 	    case '%':
2769 	      {
2770 		/* The last operand should not be marked commutative.  */
2771 		gcc_assert (i != noperands - 1);
2772 
2773 		/* We currently only support one commutative pair of
2774 		   operands.  Some existing asm code currently uses more
2775 		   than one pair.  Previously, that would usually work,
2776 		   but sometimes it would crash the compiler.  We
2777 		   continue supporting that case as well as we can by
2778 		   silently ignoring all but the first pair.  In the
2779 		   future we may handle it correctly.  */
2780 		if (commutative < 0)
2781 		  commutative = i;
2782 		else
2783 		  gcc_assert (this_insn_is_asm);
2784 	      }
2785 	      break;
2786 	    /* Use of ISDIGIT is tempting here, but it may get expensive because
2787 	       of locale support we don't want.  */
2788 	    case '0': case '1': case '2': case '3': case '4':
2789 	    case '5': case '6': case '7': case '8': case '9':
2790 	      {
2791 		c = strtoul (p - 1, &end, 10);
2792 		p = end;
2793 
2794 		operands_match[c][i]
2795 		  = operands_match_p (recog_data.operand[c],
2796 				      recog_data.operand[i]);
2797 
2798 		/* An operand may not match itself.  */
2799 		gcc_assert (c != i);
2800 
2801 		/* If C can be commuted with C+1, and C might need to match I,
2802 		   then C+1 might also need to match I.  */
2803 		if (commutative >= 0)
2804 		  {
2805 		    if (c == commutative || c == commutative + 1)
2806 		      {
2807 			int other = c + (c == commutative ? 1 : -1);
2808 			operands_match[other][i]
2809 			  = operands_match_p (recog_data.operand[other],
2810 					      recog_data.operand[i]);
2811 		      }
2812 		    if (i == commutative || i == commutative + 1)
2813 		      {
2814 			int other = i + (i == commutative ? 1 : -1);
2815 			operands_match[c][other]
2816 			  = operands_match_p (recog_data.operand[c],
2817 					      recog_data.operand[other]);
2818 		      }
2819 		    /* Note that C is supposed to be less than I.
2820 		       No need to consider altering both C and I because in
2821 		       that case we would alter one into the other.  */
2822 		  }
2823 	      }
2824 	    }
2825 	}
2826     }
2827 
2828   /* Examine each operand that is a memory reference or memory address
2829      and reload parts of the addresses into index registers.
2830      Also here any references to pseudo regs that didn't get hard regs
2831      but are equivalent to constants get replaced in the insn itself
2832      with those constants.  Nobody will ever see them again.
2833 
2834      Finally, set up the preferred classes of each operand.  */
2835 
2836   for (i = 0; i < noperands; i++)
2837     {
2838       RTX_CODE code = GET_CODE (recog_data.operand[i]);
2839 
2840       address_reloaded[i] = 0;
2841       address_operand_reloaded[i] = 0;
2842       operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2843 			 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2844 			 : RELOAD_OTHER);
2845       address_type[i]
2846 	= (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2847 	   : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2848 	   : RELOAD_OTHER);
2849 
2850       if (*constraints[i] == 0)
2851 	/* Ignore things like match_operator operands.  */
2852 	;
2853       else if (insn_extra_address_constraint
2854 	       (lookup_constraint (constraints[i])))
2855 	{
2856 	  address_operand_reloaded[i]
2857 	    = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2858 				    recog_data.operand[i],
2859 				    recog_data.operand_loc[i],
2860 				    i, operand_type[i], ind_levels, insn);
2861 
2862 	  /* If we now have a simple operand where we used to have a
2863 	     PLUS or MULT, re-recognize and try again.  */
2864 	  if ((OBJECT_P (*recog_data.operand_loc[i])
2865 	       || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2866 	      && (GET_CODE (recog_data.operand[i]) == MULT
2867 		  || GET_CODE (recog_data.operand[i]) == PLUS))
2868 	    {
2869 	      INSN_CODE (insn) = -1;
2870 	      retval = find_reloads (insn, replace, ind_levels, live_known,
2871 				     reload_reg_p);
2872 	      return retval;
2873 	    }
2874 
2875 	  recog_data.operand[i] = *recog_data.operand_loc[i];
2876 	  substed_operand[i] = recog_data.operand[i];
2877 
2878 	  /* Address operands are reloaded in their existing mode,
2879 	     no matter what is specified in the machine description.  */
2880 	  operand_mode[i] = GET_MODE (recog_data.operand[i]);
2881 
2882 	  /* If the address is a single CONST_INT pick address mode
2883 	     instead otherwise we will later not know in which mode
2884 	     the reload should be performed.  */
2885 	  if (operand_mode[i] == VOIDmode)
2886 	    operand_mode[i] = Pmode;
2887 
2888 	}
2889       else if (code == MEM)
2890 	{
2891 	  address_reloaded[i]
2892 	    = find_reloads_address (GET_MODE (recog_data.operand[i]),
2893 				    recog_data.operand_loc[i],
2894 				    XEXP (recog_data.operand[i], 0),
2895 				    &XEXP (recog_data.operand[i], 0),
2896 				    i, address_type[i], ind_levels, insn);
2897 	  recog_data.operand[i] = *recog_data.operand_loc[i];
2898 	  substed_operand[i] = recog_data.operand[i];
2899 	}
2900       else if (code == SUBREG)
2901 	{
2902 	  rtx reg = SUBREG_REG (recog_data.operand[i]);
2903 	  rtx op
2904 	    = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2905 				   ind_levels,
2906 				   set != 0
2907 				   && &SET_DEST (set) == recog_data.operand_loc[i],
2908 				   insn,
2909 				   &address_reloaded[i]);
2910 
2911 	  /* If we made a MEM to load (a part of) the stackslot of a pseudo
2912 	     that didn't get a hard register, emit a USE with a REG_EQUAL
2913 	     note in front so that we might inherit a previous, possibly
2914 	     wider reload.  */
2915 
2916 	  if (replace
2917 	      && MEM_P (op)
2918 	      && REG_P (reg)
2919 	      && (GET_MODE_SIZE (GET_MODE (reg))
2920 		  >= GET_MODE_SIZE (GET_MODE (op)))
2921 	      && reg_equiv_constant (REGNO (reg)) == 0)
2922 	    set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2923 						   insn),
2924 				 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2925 
2926 	  substed_operand[i] = recog_data.operand[i] = op;
2927 	}
2928       else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2929 	/* We can get a PLUS as an "operand" as a result of register
2930 	   elimination.  See eliminate_regs and gen_reload.  We handle
2931 	   a unary operator by reloading the operand.  */
2932 	substed_operand[i] = recog_data.operand[i]
2933 	  = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2934 				 ind_levels, 0, insn,
2935 				 &address_reloaded[i]);
2936       else if (code == REG)
2937 	{
2938 	  /* This is equivalent to calling find_reloads_toplev.
2939 	     The code is duplicated for speed.
2940 	     When we find a pseudo always equivalent to a constant,
2941 	     we replace it by the constant.  We must be sure, however,
2942 	     that we don't try to replace it in the insn in which it
2943 	     is being set.  */
2944 	  int regno = REGNO (recog_data.operand[i]);
2945 	  if (reg_equiv_constant (regno) != 0
2946 	      && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2947 	    {
2948 	      /* Record the existing mode so that the check if constants are
2949 		 allowed will work when operand_mode isn't specified.  */
2950 
2951 	      if (operand_mode[i] == VOIDmode)
2952 		operand_mode[i] = GET_MODE (recog_data.operand[i]);
2953 
2954 	      substed_operand[i] = recog_data.operand[i]
2955 		= reg_equiv_constant (regno);
2956 	    }
2957 	  if (reg_equiv_memory_loc (regno) != 0
2958 	      && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2959 	    /* We need not give a valid is_set_dest argument since the case
2960 	       of a constant equivalence was checked above.  */
2961 	    substed_operand[i] = recog_data.operand[i]
2962 	      = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2963 				     ind_levels, 0, insn,
2964 				     &address_reloaded[i]);
2965 	}
2966       /* If the operand is still a register (we didn't replace it with an
2967 	 equivalent), get the preferred class to reload it into.  */
2968       code = GET_CODE (recog_data.operand[i]);
2969       preferred_class[i]
2970 	= ((code == REG && REGNO (recog_data.operand[i])
2971 	    >= FIRST_PSEUDO_REGISTER)
2972 	   ? reg_preferred_class (REGNO (recog_data.operand[i]))
2973 	   : NO_REGS);
2974       pref_or_nothing[i]
2975 	= (code == REG
2976 	   && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2977 	   && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2978     }
2979 
2980   /* If this is simply a copy from operand 1 to operand 0, merge the
2981      preferred classes for the operands.  */
2982   if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2983       && recog_data.operand[1] == SET_SRC (set))
2984     {
2985       preferred_class[0] = preferred_class[1]
2986 	= reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2987       pref_or_nothing[0] |= pref_or_nothing[1];
2988       pref_or_nothing[1] |= pref_or_nothing[0];
2989     }
2990 
2991   /* Now see what we need for pseudo-regs that didn't get hard regs
2992      or got the wrong kind of hard reg.  For this, we must consider
2993      all the operands together against the register constraints.  */
2994 
2995   best = MAX_RECOG_OPERANDS * 2 + 600;
2996 
2997   goal_alternative_swapped = 0;
2998 
2999   /* The constraints are made of several alternatives.
3000      Each operand's constraint looks like foo,bar,... with commas
3001      separating the alternatives.  The first alternatives for all
3002      operands go together, the second alternatives go together, etc.
3003 
3004      First loop over alternatives.  */
3005 
3006   alternative_mask enabled = get_enabled_alternatives (insn);
3007   for (this_alternative_number = 0;
3008        this_alternative_number < n_alternatives;
3009        this_alternative_number++)
3010     {
3011       int swapped;
3012 
3013       if (!TEST_BIT (enabled, this_alternative_number))
3014 	{
3015 	  int i;
3016 
3017 	  for (i = 0; i < recog_data.n_operands; i++)
3018 	    constraints[i] = skip_alternative (constraints[i]);
3019 
3020 	  continue;
3021 	}
3022 
3023       /* If insn is commutative (it's safe to exchange a certain pair
3024 	 of operands) then we need to try each alternative twice, the
3025 	 second time matching those two operands as if we had
3026 	 exchanged them.  To do this, really exchange them in
3027 	 operands.  */
3028       for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3029 	{
3030 	  /* Loop over operands for one constraint alternative.  */
3031 	  /* LOSERS counts those that don't fit this alternative
3032 	     and would require loading.  */
3033 	  int losers = 0;
3034 	  /* BAD is set to 1 if it some operand can't fit this alternative
3035 	     even after reloading.  */
3036 	  int bad = 0;
3037 	  /* REJECT is a count of how undesirable this alternative says it is
3038 	     if any reloading is required.  If the alternative matches exactly
3039 	     then REJECT is ignored, but otherwise it gets this much
3040 	     counted against it in addition to the reloading needed.  Each
3041 	     ? counts three times here since we want the disparaging caused by
3042 	     a bad register class to only count 1/3 as much.  */
3043 	  int reject = 0;
3044 
3045 	  if (swapped)
3046 	    {
3047 	      recog_data.operand[commutative] = substed_operand[commutative + 1];
3048 	      recog_data.operand[commutative + 1] = substed_operand[commutative];
3049 	      /* Swap the duplicates too.  */
3050 	      for (i = 0; i < recog_data.n_dups; i++)
3051 		if (recog_data.dup_num[i] == commutative
3052 		    || recog_data.dup_num[i] == commutative + 1)
3053 		  *recog_data.dup_loc[i]
3054 		    = recog_data.operand[(int) recog_data.dup_num[i]];
3055 
3056 	      std::swap (preferred_class[commutative],
3057 			 preferred_class[commutative + 1]);
3058 	      std::swap (pref_or_nothing[commutative],
3059 			 pref_or_nothing[commutative + 1]);
3060 	      std::swap (address_reloaded[commutative],
3061 			 address_reloaded[commutative + 1]);
3062 	    }
3063 
3064 	  this_earlyclobber = 0;
3065 
3066 	  for (i = 0; i < noperands; i++)
3067 	    {
3068 	      const char *p = constraints[i];
3069 	      char *end;
3070 	      int len;
3071 	      int win = 0;
3072 	      int did_match = 0;
3073 	      /* 0 => this operand can be reloaded somehow for this alternative.  */
3074 	      int badop = 1;
3075 	      /* 0 => this operand can be reloaded if the alternative allows regs.  */
3076 	      int winreg = 0;
3077 	      int c;
3078 	      int m;
3079 	      rtx operand = recog_data.operand[i];
3080 	      int offset = 0;
3081 	      /* Nonzero means this is a MEM that must be reloaded into a reg
3082 		 regardless of what the constraint says.  */
3083 	      int force_reload = 0;
3084 	      int offmemok = 0;
3085 	      /* Nonzero if a constant forced into memory would be OK for this
3086 		 operand.  */
3087 	      int constmemok = 0;
3088 	      int earlyclobber = 0;
3089 	      enum constraint_num cn;
3090 	      enum reg_class cl;
3091 
3092 	      /* If the predicate accepts a unary operator, it means that
3093 		 we need to reload the operand, but do not do this for
3094 		 match_operator and friends.  */
3095 	      if (UNARY_P (operand) && *p != 0)
3096 		operand = XEXP (operand, 0);
3097 
3098 	      /* If the operand is a SUBREG, extract
3099 		 the REG or MEM (or maybe even a constant) within.
3100 		 (Constants can occur as a result of reg_equiv_constant.)  */
3101 
3102 	      while (GET_CODE (operand) == SUBREG)
3103 		{
3104 		  /* Offset only matters when operand is a REG and
3105 		     it is a hard reg.  This is because it is passed
3106 		     to reg_fits_class_p if it is a REG and all pseudos
3107 		     return 0 from that function.  */
3108 		  if (REG_P (SUBREG_REG (operand))
3109 		      && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3110 		    {
3111 		      if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3112 						 GET_MODE (SUBREG_REG (operand)),
3113 						 SUBREG_BYTE (operand),
3114 						 GET_MODE (operand)) < 0)
3115 			force_reload = 1;
3116 		      offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3117 						     GET_MODE (SUBREG_REG (operand)),
3118 						     SUBREG_BYTE (operand),
3119 						     GET_MODE (operand));
3120 		    }
3121 		  operand = SUBREG_REG (operand);
3122 		  /* Force reload if this is a constant or PLUS or if there may
3123 		     be a problem accessing OPERAND in the outer mode.  */
3124 		  if (CONSTANT_P (operand)
3125 		      || GET_CODE (operand) == PLUS
3126 		      /* We must force a reload of paradoxical SUBREGs
3127 			 of a MEM because the alignment of the inner value
3128 			 may not be enough to do the outer reference.  On
3129 			 big-endian machines, it may also reference outside
3130 			 the object.
3131 
3132 			 On machines that extend byte operations and we have a
3133 			 SUBREG where both the inner and outer modes are no wider
3134 			 than a word and the inner mode is narrower, is integral,
3135 			 and gets extended when loaded from memory, combine.c has
3136 			 made assumptions about the behavior of the machine in such
3137 			 register access.  If the data is, in fact, in memory we
3138 			 must always load using the size assumed to be in the
3139 			 register and let the insn do the different-sized
3140 			 accesses.
3141 
3142 			 This is doubly true if WORD_REGISTER_OPERATIONS.  In
3143 			 this case eliminate_regs has left non-paradoxical
3144 			 subregs for push_reload to see.  Make sure it does
3145 			 by forcing the reload.
3146 
3147 			 ??? When is it right at this stage to have a subreg
3148 			 of a mem that is _not_ to be handled specially?  IMO
3149 			 those should have been reduced to just a mem.  */
3150 		      || ((MEM_P (operand)
3151 			   || (REG_P (operand)
3152 			       && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3153 			  && (WORD_REGISTER_OPERATIONS
3154 			      || ((GET_MODE_BITSIZE (GET_MODE (operand))
3155 				   < BIGGEST_ALIGNMENT)
3156 				 && (GET_MODE_SIZE (operand_mode[i])
3157 				     > GET_MODE_SIZE (GET_MODE (operand))))
3158 			      || BYTES_BIG_ENDIAN
3159 			      || ((GET_MODE_SIZE (operand_mode[i])
3160 				   <= UNITS_PER_WORD)
3161 				  && (GET_MODE_SIZE (GET_MODE (operand))
3162 				      <= UNITS_PER_WORD)
3163 				  && (GET_MODE_SIZE (operand_mode[i])
3164 				      > GET_MODE_SIZE (GET_MODE (operand)))
3165 				  && INTEGRAL_MODE_P (GET_MODE (operand))
3166 				  && LOAD_EXTEND_OP (GET_MODE (operand))
3167 				     != UNKNOWN)))
3168 		      )
3169 		    force_reload = 1;
3170 		}
3171 
3172 	      this_alternative[i] = NO_REGS;
3173 	      this_alternative_win[i] = 0;
3174 	      this_alternative_match_win[i] = 0;
3175 	      this_alternative_offmemok[i] = 0;
3176 	      this_alternative_earlyclobber[i] = 0;
3177 	      this_alternative_matches[i] = -1;
3178 
3179 	      /* An empty constraint or empty alternative
3180 		 allows anything which matched the pattern.  */
3181 	      if (*p == 0 || *p == ',')
3182 		win = 1, badop = 0;
3183 
3184 	      /* Scan this alternative's specs for this operand;
3185 		 set WIN if the operand fits any letter in this alternative.
3186 		 Otherwise, clear BADOP if this operand could
3187 		 fit some letter after reloads,
3188 		 or set WINREG if this operand could fit after reloads
3189 		 provided the constraint allows some registers.  */
3190 
3191 	      do
3192 		switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3193 		  {
3194 		  case '\0':
3195 		    len = 0;
3196 		    break;
3197 		  case ',':
3198 		    c = '\0';
3199 		    break;
3200 
3201 		  case '?':
3202 		    reject += 6;
3203 		    break;
3204 
3205 		  case '!':
3206 		    reject = 600;
3207 		    break;
3208 
3209 		  case '#':
3210 		    /* Ignore rest of this alternative as far as
3211 		       reloading is concerned.  */
3212 		    do
3213 		      p++;
3214 		    while (*p && *p != ',');
3215 		    len = 0;
3216 		    break;
3217 
3218 		  case '0':  case '1':  case '2':  case '3':  case '4':
3219 		  case '5':  case '6':  case '7':  case '8':  case '9':
3220 		    m = strtoul (p, &end, 10);
3221 		    p = end;
3222 		    len = 0;
3223 
3224 		    this_alternative_matches[i] = m;
3225 		    /* We are supposed to match a previous operand.
3226 		       If we do, we win if that one did.
3227 		       If we do not, count both of the operands as losers.
3228 		       (This is too conservative, since most of the time
3229 		       only a single reload insn will be needed to make
3230 		       the two operands win.  As a result, this alternative
3231 		       may be rejected when it is actually desirable.)  */
3232 		    if ((swapped && (m != commutative || i != commutative + 1))
3233 			/* If we are matching as if two operands were swapped,
3234 			   also pretend that operands_match had been computed
3235 			   with swapped.
3236 			   But if I is the second of those and C is the first,
3237 			   don't exchange them, because operands_match is valid
3238 			   only on one side of its diagonal.  */
3239 			? (operands_match
3240 			   [(m == commutative || m == commutative + 1)
3241 			    ? 2 * commutative + 1 - m : m]
3242 			   [(i == commutative || i == commutative + 1)
3243 			    ? 2 * commutative + 1 - i : i])
3244 			: operands_match[m][i])
3245 		      {
3246 			/* If we are matching a non-offsettable address where an
3247 			   offsettable address was expected, then we must reject
3248 			   this combination, because we can't reload it.  */
3249 			if (this_alternative_offmemok[m]
3250 			    && MEM_P (recog_data.operand[m])
3251 			    && this_alternative[m] == NO_REGS
3252 			    && ! this_alternative_win[m])
3253 			  bad = 1;
3254 
3255 			did_match = this_alternative_win[m];
3256 		      }
3257 		    else
3258 		      {
3259 			/* Operands don't match.  */
3260 			rtx value;
3261 			int loc1, loc2;
3262 			/* Retroactively mark the operand we had to match
3263 			   as a loser, if it wasn't already.  */
3264 			if (this_alternative_win[m])
3265 			  losers++;
3266 			this_alternative_win[m] = 0;
3267 			if (this_alternative[m] == NO_REGS)
3268 			  bad = 1;
3269 			/* But count the pair only once in the total badness of
3270 			   this alternative, if the pair can be a dummy reload.
3271 			   The pointers in operand_loc are not swapped; swap
3272 			   them by hand if necessary.  */
3273 			if (swapped && i == commutative)
3274 			  loc1 = commutative + 1;
3275 			else if (swapped && i == commutative + 1)
3276 			  loc1 = commutative;
3277 			else
3278 			  loc1 = i;
3279 			if (swapped && m == commutative)
3280 			  loc2 = commutative + 1;
3281 			else if (swapped && m == commutative + 1)
3282 			  loc2 = commutative;
3283 			else
3284 			  loc2 = m;
3285 			value
3286 			  = find_dummy_reload (recog_data.operand[i],
3287 					       recog_data.operand[m],
3288 					       recog_data.operand_loc[loc1],
3289 					       recog_data.operand_loc[loc2],
3290 					       operand_mode[i], operand_mode[m],
3291 					       this_alternative[m], -1,
3292 					       this_alternative_earlyclobber[m]);
3293 
3294 			if (value != 0)
3295 			  losers--;
3296 		      }
3297 		    /* This can be fixed with reloads if the operand
3298 		       we are supposed to match can be fixed with reloads.  */
3299 		    badop = 0;
3300 		    this_alternative[i] = this_alternative[m];
3301 
3302 		    /* If we have to reload this operand and some previous
3303 		       operand also had to match the same thing as this
3304 		       operand, we don't know how to do that.  So reject this
3305 		       alternative.  */
3306 		    if (! did_match || force_reload)
3307 		      for (j = 0; j < i; j++)
3308 			if (this_alternative_matches[j]
3309 			    == this_alternative_matches[i])
3310 			  {
3311 			    badop = 1;
3312 			    break;
3313 			  }
3314 		    break;
3315 
3316 		  case 'p':
3317 		    /* All necessary reloads for an address_operand
3318 		       were handled in find_reloads_address.  */
3319 		    this_alternative[i]
3320 		      = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3321 					ADDRESS, SCRATCH);
3322 		    win = 1;
3323 		    badop = 0;
3324 		    break;
3325 
3326 		  case TARGET_MEM_CONSTRAINT:
3327 		    if (force_reload)
3328 		      break;
3329 		    if (MEM_P (operand)
3330 			|| (REG_P (operand)
3331 			    && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3332 			    && reg_renumber[REGNO (operand)] < 0))
3333 		      win = 1;
3334 		    if (CONST_POOL_OK_P (operand_mode[i], operand))
3335 		      badop = 0;
3336 		    constmemok = 1;
3337 		    break;
3338 
3339 		  case '<':
3340 		    if (MEM_P (operand)
3341 			&& ! address_reloaded[i]
3342 			&& (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3343 			    || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3344 		      win = 1;
3345 		    break;
3346 
3347 		  case '>':
3348 		    if (MEM_P (operand)
3349 			&& ! address_reloaded[i]
3350 			&& (GET_CODE (XEXP (operand, 0)) == PRE_INC
3351 			    || GET_CODE (XEXP (operand, 0)) == POST_INC))
3352 		      win = 1;
3353 		    break;
3354 
3355 		    /* Memory operand whose address is not offsettable.  */
3356 		  case 'V':
3357 		    if (force_reload)
3358 		      break;
3359 		    if (MEM_P (operand)
3360 			&& ! (ind_levels ? offsettable_memref_p (operand)
3361 			      : offsettable_nonstrict_memref_p (operand))
3362 			/* Certain mem addresses will become offsettable
3363 			   after they themselves are reloaded.  This is important;
3364 			   we don't want our own handling of unoffsettables
3365 			   to override the handling of reg_equiv_address.  */
3366 			&& !(REG_P (XEXP (operand, 0))
3367 			     && (ind_levels == 0
3368 				 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3369 		      win = 1;
3370 		    break;
3371 
3372 		    /* Memory operand whose address is offsettable.  */
3373 		  case 'o':
3374 		    if (force_reload)
3375 		      break;
3376 		    if ((MEM_P (operand)
3377 			 /* If IND_LEVELS, find_reloads_address won't reload a
3378 			    pseudo that didn't get a hard reg, so we have to
3379 			    reject that case.  */
3380 			 && ((ind_levels ? offsettable_memref_p (operand)
3381 			      : offsettable_nonstrict_memref_p (operand))
3382 			     /* A reloaded address is offsettable because it is now
3383 				just a simple register indirect.  */
3384 			     || address_reloaded[i] == 1))
3385 			|| (REG_P (operand)
3386 			    && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3387 			    && reg_renumber[REGNO (operand)] < 0
3388 			    /* If reg_equiv_address is nonzero, we will be
3389 			       loading it into a register; hence it will be
3390 			       offsettable, but we cannot say that reg_equiv_mem
3391 			       is offsettable without checking.  */
3392 			    && ((reg_equiv_mem (REGNO (operand)) != 0
3393 				 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3394 				|| (reg_equiv_address (REGNO (operand)) != 0))))
3395 		      win = 1;
3396 		    if (CONST_POOL_OK_P (operand_mode[i], operand)
3397 			|| MEM_P (operand))
3398 		      badop = 0;
3399 		    constmemok = 1;
3400 		    offmemok = 1;
3401 		    break;
3402 
3403 		  case '&':
3404 		    /* Output operand that is stored before the need for the
3405 		       input operands (and their index registers) is over.  */
3406 		    earlyclobber = 1, this_earlyclobber = 1;
3407 		    break;
3408 
3409 		  case 'X':
3410 		    force_reload = 0;
3411 		    win = 1;
3412 		    break;
3413 
3414 		  case 'g':
3415 		    if (! force_reload
3416 			/* A PLUS is never a valid operand, but reload can make
3417 			   it from a register when eliminating registers.  */
3418 			&& GET_CODE (operand) != PLUS
3419 			/* A SCRATCH is not a valid operand.  */
3420 			&& GET_CODE (operand) != SCRATCH
3421 			&& (! CONSTANT_P (operand)
3422 			    || ! flag_pic
3423 			    || LEGITIMATE_PIC_OPERAND_P (operand))
3424 			&& (GENERAL_REGS == ALL_REGS
3425 			    || !REG_P (operand)
3426 			    || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3427 				&& reg_renumber[REGNO (operand)] < 0)))
3428 		      win = 1;
3429 		    cl = GENERAL_REGS;
3430 		    goto reg;
3431 
3432 		  default:
3433 		    cn = lookup_constraint (p);
3434 		    switch (get_constraint_type (cn))
3435 		      {
3436 		      case CT_REGISTER:
3437 			cl = reg_class_for_constraint (cn);
3438 			if (cl != NO_REGS)
3439 			  goto reg;
3440 			break;
3441 
3442 		      case CT_CONST_INT:
3443 			if (CONST_INT_P (operand)
3444 			    && (insn_const_int_ok_for_constraint
3445 				(INTVAL (operand), cn)))
3446 			  win = true;
3447 			break;
3448 
3449 		      case CT_MEMORY:
3450 			if (force_reload)
3451 			  break;
3452 			if (constraint_satisfied_p (operand, cn))
3453 			  win = 1;
3454 			/* If the address was already reloaded,
3455 			   we win as well.  */
3456 			else if (MEM_P (operand) && address_reloaded[i] == 1)
3457 			  win = 1;
3458 			/* Likewise if the address will be reloaded because
3459 			   reg_equiv_address is nonzero.  For reg_equiv_mem
3460 			   we have to check.  */
3461 			else if (REG_P (operand)
3462 				 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3463 				 && reg_renumber[REGNO (operand)] < 0
3464 				 && ((reg_equiv_mem (REGNO (operand)) != 0
3465 				      && (constraint_satisfied_p
3466 					  (reg_equiv_mem (REGNO (operand)),
3467 					   cn)))
3468 				     || (reg_equiv_address (REGNO (operand))
3469 					 != 0)))
3470 			  win = 1;
3471 
3472 			/* If we didn't already win, we can reload
3473 			   constants via force_const_mem, and other
3474 			   MEMs by reloading the address like for 'o'.  */
3475 			if (CONST_POOL_OK_P (operand_mode[i], operand)
3476 			    || MEM_P (operand))
3477 			  badop = 0;
3478 			constmemok = 1;
3479 			offmemok = 1;
3480 			break;
3481 
3482 		      case CT_SPECIAL_MEMORY:
3483 			if (force_reload)
3484 			  break;
3485 			if (constraint_satisfied_p (operand, cn))
3486 			  win = 1;
3487 			/* Likewise if the address will be reloaded because
3488 			   reg_equiv_address is nonzero.  For reg_equiv_mem
3489 			   we have to check.  */
3490 			else if (REG_P (operand)
3491 				 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3492 				 && reg_renumber[REGNO (operand)] < 0
3493 				 && reg_equiv_mem (REGNO (operand)) != 0
3494 				 && (constraint_satisfied_p
3495 				     (reg_equiv_mem (REGNO (operand)), cn)))
3496 			  win = 1;
3497 			break;
3498 
3499 		      case CT_ADDRESS:
3500 			if (constraint_satisfied_p (operand, cn))
3501 			  win = 1;
3502 
3503 			/* If we didn't already win, we can reload
3504 			   the address into a base register.  */
3505 			this_alternative[i]
3506 			  = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3507 					    ADDRESS, SCRATCH);
3508 			badop = 0;
3509 			break;
3510 
3511 		      case CT_FIXED_FORM:
3512 			if (constraint_satisfied_p (operand, cn))
3513 			  win = 1;
3514 			break;
3515 		      }
3516 		    break;
3517 
3518 		  reg:
3519 		    this_alternative[i]
3520 		      = reg_class_subunion[this_alternative[i]][cl];
3521 		    if (GET_MODE (operand) == BLKmode)
3522 		      break;
3523 		    winreg = 1;
3524 		    if (REG_P (operand)
3525 			&& reg_fits_class_p (operand, this_alternative[i],
3526   			             offset, GET_MODE (recog_data.operand[i])))
3527 		      win = 1;
3528 		    break;
3529 		  }
3530 	      while ((p += len), c);
3531 
3532 	      if (swapped == (commutative >= 0 ? 1 : 0))
3533 		constraints[i] = p;
3534 
3535 	      /* If this operand could be handled with a reg,
3536 		 and some reg is allowed, then this operand can be handled.  */
3537 	      if (winreg && this_alternative[i] != NO_REGS
3538 		  && (win || !class_only_fixed_regs[this_alternative[i]]))
3539 		badop = 0;
3540 
3541 	      /* Record which operands fit this alternative.  */
3542 	      this_alternative_earlyclobber[i] = earlyclobber;
3543 	      if (win && ! force_reload)
3544 		this_alternative_win[i] = 1;
3545 	      else if (did_match && ! force_reload)
3546 		this_alternative_match_win[i] = 1;
3547 	      else
3548 		{
3549 		  int const_to_mem = 0;
3550 
3551 		  this_alternative_offmemok[i] = offmemok;
3552 		  losers++;
3553 		  if (badop)
3554 		    bad = 1;
3555 		  /* Alternative loses if it has no regs for a reg operand.  */
3556 		  if (REG_P (operand)
3557 		      && this_alternative[i] == NO_REGS
3558 		      && this_alternative_matches[i] < 0)
3559 		    bad = 1;
3560 
3561 		  /* If this is a constant that is reloaded into the desired
3562 		     class by copying it to memory first, count that as another
3563 		     reload.  This is consistent with other code and is
3564 		     required to avoid choosing another alternative when
3565 		     the constant is moved into memory by this function on
3566 		     an early reload pass.  Note that the test here is
3567 		     precisely the same as in the code below that calls
3568 		     force_const_mem.  */
3569 		  if (CONST_POOL_OK_P (operand_mode[i], operand)
3570 		      && ((targetm.preferred_reload_class (operand,
3571 							   this_alternative[i])
3572 			   == NO_REGS)
3573 			  || no_input_reloads))
3574 		    {
3575 		      const_to_mem = 1;
3576 		      if (this_alternative[i] != NO_REGS)
3577 			losers++;
3578 		    }
3579 
3580 		  /* Alternative loses if it requires a type of reload not
3581 		     permitted for this insn.  We can always reload SCRATCH
3582 		     and objects with a REG_UNUSED note.  */
3583 		  if (GET_CODE (operand) != SCRATCH
3584 		      && modified[i] != RELOAD_READ && no_output_reloads
3585 		      && ! find_reg_note (insn, REG_UNUSED, operand))
3586 		    bad = 1;
3587 		  else if (modified[i] != RELOAD_WRITE && no_input_reloads
3588 			   && ! const_to_mem)
3589 		    bad = 1;
3590 
3591 		  /* If we can't reload this value at all, reject this
3592 		     alternative.  Note that we could also lose due to
3593 		     LIMIT_RELOAD_CLASS, but we don't check that
3594 		     here.  */
3595 
3596 		  if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3597 		    {
3598 		      if (targetm.preferred_reload_class (operand,
3599 							  this_alternative[i])
3600 			  == NO_REGS)
3601 			reject = 600;
3602 
3603 		      if (operand_type[i] == RELOAD_FOR_OUTPUT
3604 			  && (targetm.preferred_output_reload_class (operand,
3605 							    this_alternative[i])
3606 			      == NO_REGS))
3607 			reject = 600;
3608 		    }
3609 
3610 		  /* We prefer to reload pseudos over reloading other things,
3611 		     since such reloads may be able to be eliminated later.
3612 		     If we are reloading a SCRATCH, we won't be generating any
3613 		     insns, just using a register, so it is also preferred.
3614 		     So bump REJECT in other cases.  Don't do this in the
3615 		     case where we are forcing a constant into memory and
3616 		     it will then win since we don't want to have a different
3617 		     alternative match then.  */
3618 		  if (! (REG_P (operand)
3619 			 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3620 		      && GET_CODE (operand) != SCRATCH
3621 		      && ! (const_to_mem && constmemok))
3622 		    reject += 2;
3623 
3624 		  /* Input reloads can be inherited more often than output
3625 		     reloads can be removed, so penalize output reloads.  */
3626 		  if (operand_type[i] != RELOAD_FOR_INPUT
3627 		      && GET_CODE (operand) != SCRATCH)
3628 		    reject++;
3629 		}
3630 
3631 	      /* If this operand is a pseudo register that didn't get
3632 		 a hard reg and this alternative accepts some
3633 		 register, see if the class that we want is a subset
3634 		 of the preferred class for this register.  If not,
3635 		 but it intersects that class, use the preferred class
3636 		 instead.  If it does not intersect the preferred
3637 		 class, show that usage of this alternative should be
3638 		 discouraged; it will be discouraged more still if the
3639 		 register is `preferred or nothing'.  We do this
3640 		 because it increases the chance of reusing our spill
3641 		 register in a later insn and avoiding a pair of
3642 		 memory stores and loads.
3643 
3644 		 Don't bother with this if this alternative will
3645 		 accept this operand.
3646 
3647 		 Don't do this for a multiword operand, since it is
3648 		 only a small win and has the risk of requiring more
3649 		 spill registers, which could cause a large loss.
3650 
3651 		 Don't do this if the preferred class has only one
3652 		 register because we might otherwise exhaust the
3653 		 class.  */
3654 
3655 	      if (! win && ! did_match
3656 		  && this_alternative[i] != NO_REGS
3657 		  && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3658 		  && reg_class_size [(int) preferred_class[i]] > 0
3659 		  && ! small_register_class_p (preferred_class[i]))
3660 		{
3661 		  if (! reg_class_subset_p (this_alternative[i],
3662 					    preferred_class[i]))
3663 		    {
3664 		      /* Since we don't have a way of forming the intersection,
3665 			 we just do something special if the preferred class
3666 			 is a subset of the class we have; that's the most
3667 			 common case anyway.  */
3668 		      if (reg_class_subset_p (preferred_class[i],
3669 					      this_alternative[i]))
3670 			this_alternative[i] = preferred_class[i];
3671 		      else
3672 			reject += (2 + 2 * pref_or_nothing[i]);
3673 		    }
3674 		}
3675 	    }
3676 
3677 	  /* Now see if any output operands that are marked "earlyclobber"
3678 	     in this alternative conflict with any input operands
3679 	     or any memory addresses.  */
3680 
3681 	  for (i = 0; i < noperands; i++)
3682 	    if (this_alternative_earlyclobber[i]
3683 		&& (this_alternative_win[i] || this_alternative_match_win[i]))
3684 	      {
3685 		struct decomposition early_data;
3686 
3687 		early_data = decompose (recog_data.operand[i]);
3688 
3689 		gcc_assert (modified[i] != RELOAD_READ);
3690 
3691 		if (this_alternative[i] == NO_REGS)
3692 		  {
3693 		    this_alternative_earlyclobber[i] = 0;
3694 		    gcc_assert (this_insn_is_asm);
3695 		    error_for_asm (this_insn,
3696 			      "%<&%> constraint used with no register class");
3697 		  }
3698 
3699 		for (j = 0; j < noperands; j++)
3700 		  /* Is this an input operand or a memory ref?  */
3701 		  if ((MEM_P (recog_data.operand[j])
3702 		       || modified[j] != RELOAD_WRITE)
3703 		      && j != i
3704 		      /* Ignore things like match_operator operands.  */
3705 		      && !recog_data.is_operator[j]
3706 		      /* Don't count an input operand that is constrained to match
3707 			 the early clobber operand.  */
3708 		      && ! (this_alternative_matches[j] == i
3709 			    && rtx_equal_p (recog_data.operand[i],
3710 					    recog_data.operand[j]))
3711 		      /* Is it altered by storing the earlyclobber operand?  */
3712 		      && !immune_p (recog_data.operand[j], recog_data.operand[i],
3713 				    early_data))
3714 		    {
3715 		      /* If the output is in a non-empty few-regs class,
3716 			 it's costly to reload it, so reload the input instead.  */
3717 		      if (small_register_class_p (this_alternative[i])
3718 			  && (REG_P (recog_data.operand[j])
3719 			      || GET_CODE (recog_data.operand[j]) == SUBREG))
3720 			{
3721 			  losers++;
3722 			  this_alternative_win[j] = 0;
3723 			  this_alternative_match_win[j] = 0;
3724 			}
3725 		      else
3726 			break;
3727 		    }
3728 		/* If an earlyclobber operand conflicts with something,
3729 		   it must be reloaded, so request this and count the cost.  */
3730 		if (j != noperands)
3731 		  {
3732 		    losers++;
3733 		    this_alternative_win[i] = 0;
3734 		    this_alternative_match_win[j] = 0;
3735 		    for (j = 0; j < noperands; j++)
3736 		      if (this_alternative_matches[j] == i
3737 			  && this_alternative_match_win[j])
3738 			{
3739 			  this_alternative_win[j] = 0;
3740 			  this_alternative_match_win[j] = 0;
3741 			  losers++;
3742 			}
3743 		  }
3744 	      }
3745 
3746 	  /* If one alternative accepts all the operands, no reload required,
3747 	     choose that alternative; don't consider the remaining ones.  */
3748 	  if (losers == 0)
3749 	    {
3750 	      /* Unswap these so that they are never swapped at `finish'.  */
3751 	      if (swapped)
3752 		{
3753 		  recog_data.operand[commutative] = substed_operand[commutative];
3754 		  recog_data.operand[commutative + 1]
3755 		    = substed_operand[commutative + 1];
3756 		}
3757 	      for (i = 0; i < noperands; i++)
3758 		{
3759 		  goal_alternative_win[i] = this_alternative_win[i];
3760 		  goal_alternative_match_win[i] = this_alternative_match_win[i];
3761 		  goal_alternative[i] = this_alternative[i];
3762 		  goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3763 		  goal_alternative_matches[i] = this_alternative_matches[i];
3764 		  goal_alternative_earlyclobber[i]
3765 		    = this_alternative_earlyclobber[i];
3766 		}
3767 	      goal_alternative_number = this_alternative_number;
3768 	      goal_alternative_swapped = swapped;
3769 	      goal_earlyclobber = this_earlyclobber;
3770 	      goto finish;
3771 	    }
3772 
3773 	  /* REJECT, set by the ! and ? constraint characters and when a register
3774 	     would be reloaded into a non-preferred class, discourages the use of
3775 	     this alternative for a reload goal.  REJECT is incremented by six
3776 	     for each ? and two for each non-preferred class.  */
3777 	  losers = losers * 6 + reject;
3778 
3779 	  /* If this alternative can be made to work by reloading,
3780 	     and it needs less reloading than the others checked so far,
3781 	     record it as the chosen goal for reloading.  */
3782 	  if (! bad)
3783 	    {
3784 	      if (best > losers)
3785 		{
3786 		  for (i = 0; i < noperands; i++)
3787 		    {
3788 		      goal_alternative[i] = this_alternative[i];
3789 		      goal_alternative_win[i] = this_alternative_win[i];
3790 		      goal_alternative_match_win[i]
3791 			= this_alternative_match_win[i];
3792 		      goal_alternative_offmemok[i]
3793 			= this_alternative_offmemok[i];
3794 		      goal_alternative_matches[i] = this_alternative_matches[i];
3795 		      goal_alternative_earlyclobber[i]
3796 			= this_alternative_earlyclobber[i];
3797 		    }
3798 		  goal_alternative_swapped = swapped;
3799 		  best = losers;
3800 		  goal_alternative_number = this_alternative_number;
3801 		  goal_earlyclobber = this_earlyclobber;
3802 		}
3803 	    }
3804 
3805 	  if (swapped)
3806 	    {
3807 	      /* If the commutative operands have been swapped, swap
3808 		 them back in order to check the next alternative.  */
3809 	      recog_data.operand[commutative] = substed_operand[commutative];
3810 	      recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3811 	      /* Unswap the duplicates too.  */
3812 	      for (i = 0; i < recog_data.n_dups; i++)
3813 		if (recog_data.dup_num[i] == commutative
3814 		    || recog_data.dup_num[i] == commutative + 1)
3815 		  *recog_data.dup_loc[i]
3816 		    = recog_data.operand[(int) recog_data.dup_num[i]];
3817 
3818 	      /* Unswap the operand related information as well.  */
3819 	      std::swap (preferred_class[commutative],
3820 			 preferred_class[commutative + 1]);
3821 	      std::swap (pref_or_nothing[commutative],
3822 			 pref_or_nothing[commutative + 1]);
3823 	      std::swap (address_reloaded[commutative],
3824 			 address_reloaded[commutative + 1]);
3825 	    }
3826 	}
3827     }
3828 
3829   /* The operands don't meet the constraints.
3830      goal_alternative describes the alternative
3831      that we could reach by reloading the fewest operands.
3832      Reload so as to fit it.  */
3833 
3834   if (best == MAX_RECOG_OPERANDS * 2 + 600)
3835     {
3836       /* No alternative works with reloads??  */
3837       if (insn_code_number >= 0)
3838 	fatal_insn ("unable to generate reloads for:", insn);
3839       error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3840       /* Avoid further trouble with this insn.  */
3841       PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3842       n_reloads = 0;
3843       return 0;
3844     }
3845 
3846   /* Jump to `finish' from above if all operands are valid already.
3847      In that case, goal_alternative_win is all 1.  */
3848  finish:
3849 
3850   /* Right now, for any pair of operands I and J that are required to match,
3851      with I < J,
3852      goal_alternative_matches[J] is I.
3853      Set up goal_alternative_matched as the inverse function:
3854      goal_alternative_matched[I] = J.  */
3855 
3856   for (i = 0; i < noperands; i++)
3857     goal_alternative_matched[i] = -1;
3858 
3859   for (i = 0; i < noperands; i++)
3860     if (! goal_alternative_win[i]
3861 	&& goal_alternative_matches[i] >= 0)
3862       goal_alternative_matched[goal_alternative_matches[i]] = i;
3863 
3864   for (i = 0; i < noperands; i++)
3865     goal_alternative_win[i] |= goal_alternative_match_win[i];
3866 
3867   /* If the best alternative is with operands 1 and 2 swapped,
3868      consider them swapped before reporting the reloads.  Update the
3869      operand numbers of any reloads already pushed.  */
3870 
3871   if (goal_alternative_swapped)
3872     {
3873       std::swap (substed_operand[commutative],
3874 		 substed_operand[commutative + 1]);
3875       std::swap (recog_data.operand[commutative],
3876 		 recog_data.operand[commutative + 1]);
3877       std::swap (*recog_data.operand_loc[commutative],
3878 		 *recog_data.operand_loc[commutative + 1]);
3879 
3880       for (i = 0; i < recog_data.n_dups; i++)
3881 	if (recog_data.dup_num[i] == commutative
3882 	    || recog_data.dup_num[i] == commutative + 1)
3883 	  *recog_data.dup_loc[i]
3884 	    = recog_data.operand[(int) recog_data.dup_num[i]];
3885 
3886       for (i = 0; i < n_reloads; i++)
3887 	{
3888 	  if (rld[i].opnum == commutative)
3889 	    rld[i].opnum = commutative + 1;
3890 	  else if (rld[i].opnum == commutative + 1)
3891 	    rld[i].opnum = commutative;
3892 	}
3893     }
3894 
3895   for (i = 0; i < noperands; i++)
3896     {
3897       operand_reloadnum[i] = -1;
3898 
3899       /* If this is an earlyclobber operand, we need to widen the scope.
3900 	 The reload must remain valid from the start of the insn being
3901 	 reloaded until after the operand is stored into its destination.
3902 	 We approximate this with RELOAD_OTHER even though we know that we
3903 	 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3904 
3905 	 One special case that is worth checking is when we have an
3906 	 output that is earlyclobber but isn't used past the insn (typically
3907 	 a SCRATCH).  In this case, we only need have the reload live
3908 	 through the insn itself, but not for any of our input or output
3909 	 reloads.
3910 	 But we must not accidentally narrow the scope of an existing
3911 	 RELOAD_OTHER reload - leave these alone.
3912 
3913 	 In any case, anything needed to address this operand can remain
3914 	 however they were previously categorized.  */
3915 
3916       if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3917 	operand_type[i]
3918 	  = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3919 	     ? RELOAD_FOR_INSN : RELOAD_OTHER);
3920     }
3921 
3922   /* Any constants that aren't allowed and can't be reloaded
3923      into registers are here changed into memory references.  */
3924   for (i = 0; i < noperands; i++)
3925     if (! goal_alternative_win[i])
3926       {
3927 	rtx op = recog_data.operand[i];
3928 	rtx subreg = NULL_RTX;
3929 	rtx plus = NULL_RTX;
3930 	machine_mode mode = operand_mode[i];
3931 
3932 	/* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3933 	   push_reload so we have to let them pass here.  */
3934 	if (GET_CODE (op) == SUBREG)
3935 	  {
3936 	    subreg = op;
3937 	    op = SUBREG_REG (op);
3938 	    mode = GET_MODE (op);
3939 	  }
3940 
3941 	if (GET_CODE (op) == PLUS)
3942 	  {
3943 	    plus = op;
3944 	    op = XEXP (op, 1);
3945 	  }
3946 
3947 	if (CONST_POOL_OK_P (mode, op)
3948 	    && ((targetm.preferred_reload_class (op, goal_alternative[i])
3949 		 == NO_REGS)
3950 		|| no_input_reloads))
3951 	  {
3952 	    int this_address_reloaded;
3953 	    rtx tem = force_const_mem (mode, op);
3954 
3955 	    /* If we stripped a SUBREG or a PLUS above add it back.  */
3956 	    if (plus != NULL_RTX)
3957 	      tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3958 
3959 	    if (subreg != NULL_RTX)
3960 	      tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3961 
3962 	    this_address_reloaded = 0;
3963 	    substed_operand[i] = recog_data.operand[i]
3964 	      = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3965 				     0, insn, &this_address_reloaded);
3966 
3967 	    /* If the alternative accepts constant pool refs directly
3968 	       there will be no reload needed at all.  */
3969 	    if (plus == NULL_RTX
3970 		&& subreg == NULL_RTX
3971 		&& alternative_allows_const_pool_ref (this_address_reloaded != 1
3972 						      ? substed_operand[i]
3973 						      : NULL,
3974 						      recog_data.constraints[i],
3975 						      goal_alternative_number))
3976 	      goal_alternative_win[i] = 1;
3977 	  }
3978       }
3979 
3980   /* Record the values of the earlyclobber operands for the caller.  */
3981   if (goal_earlyclobber)
3982     for (i = 0; i < noperands; i++)
3983       if (goal_alternative_earlyclobber[i])
3984 	reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3985 
3986   /* Now record reloads for all the operands that need them.  */
3987   for (i = 0; i < noperands; i++)
3988     if (! goal_alternative_win[i])
3989       {
3990 	/* Operands that match previous ones have already been handled.  */
3991 	if (goal_alternative_matches[i] >= 0)
3992 	  ;
3993 	/* Handle an operand with a nonoffsettable address
3994 	   appearing where an offsettable address will do
3995 	   by reloading the address into a base register.
3996 
3997 	   ??? We can also do this when the operand is a register and
3998 	   reg_equiv_mem is not offsettable, but this is a bit tricky,
3999 	   so we don't bother with it.  It may not be worth doing.  */
4000 	else if (goal_alternative_matched[i] == -1
4001 		 && goal_alternative_offmemok[i]
4002 		 && MEM_P (recog_data.operand[i]))
4003 	  {
4004 	    /* If the address to be reloaded is a VOIDmode constant,
4005 	       use the default address mode as mode of the reload register,
4006 	       as would have been done by find_reloads_address.  */
4007 	    addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
4008 	    machine_mode address_mode;
4009 
4010 	    address_mode = get_address_mode (recog_data.operand[i]);
4011 	    operand_reloadnum[i]
4012 	      = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
4013 			     &XEXP (recog_data.operand[i], 0), (rtx*) 0,
4014 			     base_reg_class (VOIDmode, as, MEM, SCRATCH),
4015 			     address_mode,
4016 			     VOIDmode, 0, 0, i, RELOAD_OTHER);
4017 	    rld[operand_reloadnum[i]].inc
4018 	      = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4019 
4020 	    /* If this operand is an output, we will have made any
4021 	       reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4022 	       now we are treating part of the operand as an input, so
4023 	       we must change these to RELOAD_FOR_OTHER_ADDRESS.  */
4024 
4025 	    if (modified[i] == RELOAD_WRITE)
4026 	      {
4027 		for (j = 0; j < n_reloads; j++)
4028 		  {
4029 		    if (rld[j].opnum == i)
4030 		      {
4031 			if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4032 			  rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4033 			else if (rld[j].when_needed
4034 				 == RELOAD_FOR_OUTADDR_ADDRESS)
4035 			  rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4036 		      }
4037 		  }
4038 	      }
4039 	  }
4040 	else if (goal_alternative_matched[i] == -1)
4041 	  {
4042 	    operand_reloadnum[i]
4043 	      = push_reload ((modified[i] != RELOAD_WRITE
4044 			      ? recog_data.operand[i] : 0),
4045 			     (modified[i] != RELOAD_READ
4046 			      ? recog_data.operand[i] : 0),
4047 			     (modified[i] != RELOAD_WRITE
4048 			      ? recog_data.operand_loc[i] : 0),
4049 			     (modified[i] != RELOAD_READ
4050 			      ? recog_data.operand_loc[i] : 0),
4051 			     (enum reg_class) goal_alternative[i],
4052 			     (modified[i] == RELOAD_WRITE
4053 			      ? VOIDmode : operand_mode[i]),
4054 			     (modified[i] == RELOAD_READ
4055 			      ? VOIDmode : operand_mode[i]),
4056 			     (insn_code_number < 0 ? 0
4057 			      : insn_data[insn_code_number].operand[i].strict_low),
4058 			     0, i, operand_type[i]);
4059 	  }
4060 	/* In a matching pair of operands, one must be input only
4061 	   and the other must be output only.
4062 	   Pass the input operand as IN and the other as OUT.  */
4063 	else if (modified[i] == RELOAD_READ
4064 		 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4065 	  {
4066 	    operand_reloadnum[i]
4067 	      = push_reload (recog_data.operand[i],
4068 			     recog_data.operand[goal_alternative_matched[i]],
4069 			     recog_data.operand_loc[i],
4070 			     recog_data.operand_loc[goal_alternative_matched[i]],
4071 			     (enum reg_class) goal_alternative[i],
4072 			     operand_mode[i],
4073 			     operand_mode[goal_alternative_matched[i]],
4074 			     0, 0, i, RELOAD_OTHER);
4075 	    operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4076 	  }
4077 	else if (modified[i] == RELOAD_WRITE
4078 		 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4079 	  {
4080 	    operand_reloadnum[goal_alternative_matched[i]]
4081 	      = push_reload (recog_data.operand[goal_alternative_matched[i]],
4082 			     recog_data.operand[i],
4083 			     recog_data.operand_loc[goal_alternative_matched[i]],
4084 			     recog_data.operand_loc[i],
4085 			     (enum reg_class) goal_alternative[i],
4086 			     operand_mode[goal_alternative_matched[i]],
4087 			     operand_mode[i],
4088 			     0, 0, i, RELOAD_OTHER);
4089 	    operand_reloadnum[i] = output_reloadnum;
4090 	  }
4091 	else
4092 	  {
4093 	    gcc_assert (insn_code_number < 0);
4094 	    error_for_asm (insn, "inconsistent operand constraints "
4095 			   "in an %<asm%>");
4096 	    /* Avoid further trouble with this insn.  */
4097 	    PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4098 	    n_reloads = 0;
4099 	    return 0;
4100 	  }
4101       }
4102     else if (goal_alternative_matched[i] < 0
4103 	     && goal_alternative_matches[i] < 0
4104 	     && address_operand_reloaded[i] != 1
4105 	     && optimize)
4106       {
4107 	/* For each non-matching operand that's a MEM or a pseudo-register
4108 	   that didn't get a hard register, make an optional reload.
4109 	   This may get done even if the insn needs no reloads otherwise.  */
4110 
4111 	rtx operand = recog_data.operand[i];
4112 
4113 	while (GET_CODE (operand) == SUBREG)
4114 	  operand = SUBREG_REG (operand);
4115 	if ((MEM_P (operand)
4116 	     || (REG_P (operand)
4117 		 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4118 	    /* If this is only for an output, the optional reload would not
4119 	       actually cause us to use a register now, just note that
4120 	       something is stored here.  */
4121 	    && (goal_alternative[i] != NO_REGS
4122 		|| modified[i] == RELOAD_WRITE)
4123 	    && ! no_input_reloads
4124 	    /* An optional output reload might allow to delete INSN later.
4125 	       We mustn't make in-out reloads on insns that are not permitted
4126 	       output reloads.
4127 	       If this is an asm, we can't delete it; we must not even call
4128 	       push_reload for an optional output reload in this case,
4129 	       because we can't be sure that the constraint allows a register,
4130 	       and push_reload verifies the constraints for asms.  */
4131 	    && (modified[i] == RELOAD_READ
4132 		|| (! no_output_reloads && ! this_insn_is_asm)))
4133 	  operand_reloadnum[i]
4134 	    = push_reload ((modified[i] != RELOAD_WRITE
4135 			    ? recog_data.operand[i] : 0),
4136 			   (modified[i] != RELOAD_READ
4137 			    ? recog_data.operand[i] : 0),
4138 			   (modified[i] != RELOAD_WRITE
4139 			    ? recog_data.operand_loc[i] : 0),
4140 			   (modified[i] != RELOAD_READ
4141 			    ? recog_data.operand_loc[i] : 0),
4142 			   (enum reg_class) goal_alternative[i],
4143 			   (modified[i] == RELOAD_WRITE
4144 			    ? VOIDmode : operand_mode[i]),
4145 			   (modified[i] == RELOAD_READ
4146 			    ? VOIDmode : operand_mode[i]),
4147 			   (insn_code_number < 0 ? 0
4148 			    : insn_data[insn_code_number].operand[i].strict_low),
4149 			   1, i, operand_type[i]);
4150 	/* If a memory reference remains (either as a MEM or a pseudo that
4151 	   did not get a hard register), yet we can't make an optional
4152 	   reload, check if this is actually a pseudo register reference;
4153 	   we then need to emit a USE and/or a CLOBBER so that reload
4154 	   inheritance will do the right thing.  */
4155 	else if (replace
4156 		 && (MEM_P (operand)
4157 		     || (REG_P (operand)
4158 			 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4159 			 && reg_renumber [REGNO (operand)] < 0)))
4160 	  {
4161 	    operand = *recog_data.operand_loc[i];
4162 
4163 	    while (GET_CODE (operand) == SUBREG)
4164 	      operand = SUBREG_REG (operand);
4165 	    if (REG_P (operand))
4166 	      {
4167 		if (modified[i] != RELOAD_WRITE)
4168 		  /* We mark the USE with QImode so that we recognize
4169 		     it as one that can be safely deleted at the end
4170 		     of reload.  */
4171 		  PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4172 					      insn), QImode);
4173 		if (modified[i] != RELOAD_READ)
4174 		  emit_insn_after (gen_clobber (operand), insn);
4175 	      }
4176 	  }
4177       }
4178     else if (goal_alternative_matches[i] >= 0
4179 	     && goal_alternative_win[goal_alternative_matches[i]]
4180 	     && modified[i] == RELOAD_READ
4181 	     && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4182 	     && ! no_input_reloads && ! no_output_reloads
4183 	     && optimize)
4184       {
4185 	/* Similarly, make an optional reload for a pair of matching
4186 	   objects that are in MEM or a pseudo that didn't get a hard reg.  */
4187 
4188 	rtx operand = recog_data.operand[i];
4189 
4190 	while (GET_CODE (operand) == SUBREG)
4191 	  operand = SUBREG_REG (operand);
4192 	if ((MEM_P (operand)
4193 	     || (REG_P (operand)
4194 		 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4195 	    && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4196 	  operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4197 	    = push_reload (recog_data.operand[goal_alternative_matches[i]],
4198 			   recog_data.operand[i],
4199 			   recog_data.operand_loc[goal_alternative_matches[i]],
4200 			   recog_data.operand_loc[i],
4201 			   (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4202 			   operand_mode[goal_alternative_matches[i]],
4203 			   operand_mode[i],
4204 			   0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4205       }
4206 
4207   /* Perform whatever substitutions on the operands we are supposed
4208      to make due to commutativity or replacement of registers
4209      with equivalent constants or memory slots.  */
4210 
4211   for (i = 0; i < noperands; i++)
4212     {
4213       /* We only do this on the last pass through reload, because it is
4214 	 possible for some data (like reg_equiv_address) to be changed during
4215 	 later passes.  Moreover, we lose the opportunity to get a useful
4216 	 reload_{in,out}_reg when we do these replacements.  */
4217 
4218       if (replace)
4219 	{
4220 	  rtx substitution = substed_operand[i];
4221 
4222 	  *recog_data.operand_loc[i] = substitution;
4223 
4224 	  /* If we're replacing an operand with a LABEL_REF, we need to
4225 	     make sure that there's a REG_LABEL_OPERAND note attached to
4226 	     this instruction.  */
4227 	  if (GET_CODE (substitution) == LABEL_REF
4228 	      && !find_reg_note (insn, REG_LABEL_OPERAND,
4229 				 label_ref_label (substitution))
4230 	      /* For a JUMP_P, if it was a branch target it must have
4231 		 already been recorded as such.  */
4232 	      && (!JUMP_P (insn)
4233 		  || !label_is_jump_target_p (label_ref_label (substitution),
4234 					      insn)))
4235 	    {
4236 	      add_reg_note (insn, REG_LABEL_OPERAND,
4237 			    label_ref_label (substitution));
4238 	      if (LABEL_P (label_ref_label (substitution)))
4239 		++LABEL_NUSES (label_ref_label (substitution));
4240 	    }
4241 
4242 	}
4243       else
4244 	retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4245     }
4246 
4247   /* If this insn pattern contains any MATCH_DUP's, make sure that
4248      they will be substituted if the operands they match are substituted.
4249      Also do now any substitutions we already did on the operands.
4250 
4251      Don't do this if we aren't making replacements because we might be
4252      propagating things allocated by frame pointer elimination into places
4253      it doesn't expect.  */
4254 
4255   if (insn_code_number >= 0 && replace)
4256     for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4257       {
4258 	int opno = recog_data.dup_num[i];
4259 	*recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4260 	dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4261       }
4262 
4263 #if 0
4264   /* This loses because reloading of prior insns can invalidate the equivalence
4265      (or at least find_equiv_reg isn't smart enough to find it any more),
4266      causing this insn to need more reload regs than it needed before.
4267      It may be too late to make the reload regs available.
4268      Now this optimization is done safely in choose_reload_regs.  */
4269 
4270   /* For each reload of a reg into some other class of reg,
4271      search for an existing equivalent reg (same value now) in the right class.
4272      We can use it as long as we don't need to change its contents.  */
4273   for (i = 0; i < n_reloads; i++)
4274     if (rld[i].reg_rtx == 0
4275 	&& rld[i].in != 0
4276 	&& REG_P (rld[i].in)
4277 	&& rld[i].out == 0)
4278       {
4279 	rld[i].reg_rtx
4280 	  = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4281 			    static_reload_reg_p, 0, rld[i].inmode);
4282 	/* Prevent generation of insn to load the value
4283 	   because the one we found already has the value.  */
4284 	if (rld[i].reg_rtx)
4285 	  rld[i].in = rld[i].reg_rtx;
4286       }
4287 #endif
4288 
4289   /* If we detected error and replaced asm instruction by USE, forget about the
4290      reloads.  */
4291   if (GET_CODE (PATTERN (insn)) == USE
4292       && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4293     n_reloads = 0;
4294 
4295   /* Perhaps an output reload can be combined with another
4296      to reduce needs by one.  */
4297   if (!goal_earlyclobber)
4298     combine_reloads ();
4299 
4300   /* If we have a pair of reloads for parts of an address, they are reloading
4301      the same object, the operands themselves were not reloaded, and they
4302      are for two operands that are supposed to match, merge the reloads and
4303      change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS.  */
4304 
4305   for (i = 0; i < n_reloads; i++)
4306     {
4307       int k;
4308 
4309       for (j = i + 1; j < n_reloads; j++)
4310 	if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4311 	     || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4312 	     || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4313 	     || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4314 	    && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4315 		|| rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4316 		|| rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4317 		|| rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4318 	    && rtx_equal_p (rld[i].in, rld[j].in)
4319 	    && (operand_reloadnum[rld[i].opnum] < 0
4320 		|| rld[operand_reloadnum[rld[i].opnum]].optional)
4321 	    && (operand_reloadnum[rld[j].opnum] < 0
4322 		|| rld[operand_reloadnum[rld[j].opnum]].optional)
4323 	    && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4324 		|| (goal_alternative_matches[rld[j].opnum]
4325 		    == rld[i].opnum)))
4326 	  {
4327 	    for (k = 0; k < n_replacements; k++)
4328 	      if (replacements[k].what == j)
4329 		replacements[k].what = i;
4330 
4331 	    if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4332 		|| rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4333 	      rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4334 	    else
4335 	      rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4336 	    rld[j].in = 0;
4337 	  }
4338     }
4339 
4340   /* Scan all the reloads and update their type.
4341      If a reload is for the address of an operand and we didn't reload
4342      that operand, change the type.  Similarly, change the operand number
4343      of a reload when two operands match.  If a reload is optional, treat it
4344      as though the operand isn't reloaded.
4345 
4346      ??? This latter case is somewhat odd because if we do the optional
4347      reload, it means the object is hanging around.  Thus we need only
4348      do the address reload if the optional reload was NOT done.
4349 
4350      Change secondary reloads to be the address type of their operand, not
4351      the normal type.
4352 
4353      If an operand's reload is now RELOAD_OTHER, change any
4354      RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4355      RELOAD_FOR_OTHER_ADDRESS.  */
4356 
4357   for (i = 0; i < n_reloads; i++)
4358     {
4359       if (rld[i].secondary_p
4360 	  && rld[i].when_needed == operand_type[rld[i].opnum])
4361 	rld[i].when_needed = address_type[rld[i].opnum];
4362 
4363       if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4364 	   || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4365 	   || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4366 	   || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4367 	  && (operand_reloadnum[rld[i].opnum] < 0
4368 	      || rld[operand_reloadnum[rld[i].opnum]].optional))
4369 	{
4370 	  /* If we have a secondary reload to go along with this reload,
4371 	     change its type to RELOAD_FOR_OPADDR_ADDR.  */
4372 
4373 	  if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4374 	       || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4375 	      && rld[i].secondary_in_reload != -1)
4376 	    {
4377 	      int secondary_in_reload = rld[i].secondary_in_reload;
4378 
4379 	      rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4380 
4381 	      /* If there's a tertiary reload we have to change it also.  */
4382 	      if (secondary_in_reload > 0
4383 		  && rld[secondary_in_reload].secondary_in_reload != -1)
4384 		rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4385 		  = RELOAD_FOR_OPADDR_ADDR;
4386 	    }
4387 
4388 	  if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4389 	       || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4390 	      && rld[i].secondary_out_reload != -1)
4391 	    {
4392 	      int secondary_out_reload = rld[i].secondary_out_reload;
4393 
4394 	      rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4395 
4396 	      /* If there's a tertiary reload we have to change it also.  */
4397 	      if (secondary_out_reload
4398 		  && rld[secondary_out_reload].secondary_out_reload != -1)
4399 		rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4400 		  = RELOAD_FOR_OPADDR_ADDR;
4401 	    }
4402 
4403 	  if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4404 	      || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4405 	    rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4406 	  else
4407 	    rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4408 	}
4409 
4410       if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4411 	   || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4412 	  && operand_reloadnum[rld[i].opnum] >= 0
4413 	  && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4414 	      == RELOAD_OTHER))
4415 	rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4416 
4417       if (goal_alternative_matches[rld[i].opnum] >= 0)
4418 	rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4419     }
4420 
4421   /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4422      If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4423      reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4424 
4425      choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4426      conflict with RELOAD_FOR_OPERAND_ADDRESS reloads.  This is true for a
4427      single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4428      However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4429      then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4430      RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4431      This is complicated by the fact that a single operand can have more
4432      than one RELOAD_FOR_OPERAND_ADDRESS reload.  It is very difficult to fix
4433      choose_reload_regs without affecting code quality, and cases that
4434      actually fail are extremely rare, so it turns out to be better to fix
4435      the problem here by not generating cases that choose_reload_regs will
4436      fail for.  */
4437   /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4438      RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4439      a single operand.
4440      We can reduce the register pressure by exploiting that a
4441      RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4442      does not conflict with any of them, if it is only used for the first of
4443      the RELOAD_FOR_X_ADDRESS reloads.  */
4444   {
4445     int first_op_addr_num = -2;
4446     int first_inpaddr_num[MAX_RECOG_OPERANDS];
4447     int first_outpaddr_num[MAX_RECOG_OPERANDS];
4448     int need_change = 0;
4449     /* We use last_op_addr_reload and the contents of the above arrays
4450        first as flags - -2 means no instance encountered, -1 means exactly
4451        one instance encountered.
4452        If more than one instance has been encountered, we store the reload
4453        number of the first reload of the kind in question; reload numbers
4454        are known to be non-negative.  */
4455     for (i = 0; i < noperands; i++)
4456       first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4457     for (i = n_reloads - 1; i >= 0; i--)
4458       {
4459 	switch (rld[i].when_needed)
4460 	  {
4461 	  case RELOAD_FOR_OPERAND_ADDRESS:
4462 	    if (++first_op_addr_num >= 0)
4463 	      {
4464 		first_op_addr_num = i;
4465 		need_change = 1;
4466 	      }
4467 	    break;
4468 	  case RELOAD_FOR_INPUT_ADDRESS:
4469 	    if (++first_inpaddr_num[rld[i].opnum] >= 0)
4470 	      {
4471 		first_inpaddr_num[rld[i].opnum] = i;
4472 		need_change = 1;
4473 	      }
4474 	    break;
4475 	  case RELOAD_FOR_OUTPUT_ADDRESS:
4476 	    if (++first_outpaddr_num[rld[i].opnum] >= 0)
4477 	      {
4478 		first_outpaddr_num[rld[i].opnum] = i;
4479 		need_change = 1;
4480 	      }
4481 	    break;
4482 	  default:
4483 	    break;
4484 	  }
4485       }
4486 
4487     if (need_change)
4488       {
4489 	for (i = 0; i < n_reloads; i++)
4490 	  {
4491 	    int first_num;
4492 	    enum reload_type type;
4493 
4494 	    switch (rld[i].when_needed)
4495 	      {
4496 	      case RELOAD_FOR_OPADDR_ADDR:
4497 		first_num = first_op_addr_num;
4498 		type = RELOAD_FOR_OPERAND_ADDRESS;
4499 		break;
4500 	      case RELOAD_FOR_INPADDR_ADDRESS:
4501 		first_num = first_inpaddr_num[rld[i].opnum];
4502 		type = RELOAD_FOR_INPUT_ADDRESS;
4503 		break;
4504 	      case RELOAD_FOR_OUTADDR_ADDRESS:
4505 		first_num = first_outpaddr_num[rld[i].opnum];
4506 		type = RELOAD_FOR_OUTPUT_ADDRESS;
4507 		break;
4508 	      default:
4509 		continue;
4510 	      }
4511 	    if (first_num < 0)
4512 	      continue;
4513 	    else if (i > first_num)
4514 	      rld[i].when_needed = type;
4515 	    else
4516 	      {
4517 		/* Check if the only TYPE reload that uses reload I is
4518 		   reload FIRST_NUM.  */
4519 		for (j = n_reloads - 1; j > first_num; j--)
4520 		  {
4521 		    if (rld[j].when_needed == type
4522 			&& (rld[i].secondary_p
4523 			    ? rld[j].secondary_in_reload == i
4524 			    : reg_mentioned_p (rld[i].in, rld[j].in)))
4525 		      {
4526 			rld[i].when_needed = type;
4527 			break;
4528 		      }
4529 		  }
4530 	      }
4531 	  }
4532       }
4533   }
4534 
4535   /* See if we have any reloads that are now allowed to be merged
4536      because we've changed when the reload is needed to
4537      RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS.  Only
4538      check for the most common cases.  */
4539 
4540   for (i = 0; i < n_reloads; i++)
4541     if (rld[i].in != 0 && rld[i].out == 0
4542 	&& (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4543 	    || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4544 	    || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4545       for (j = 0; j < n_reloads; j++)
4546 	if (i != j && rld[j].in != 0 && rld[j].out == 0
4547 	    && rld[j].when_needed == rld[i].when_needed
4548 	    && MATCHES (rld[i].in, rld[j].in)
4549 	    && rld[i].rclass == rld[j].rclass
4550 	    && !rld[i].nocombine && !rld[j].nocombine
4551 	    && rld[i].reg_rtx == rld[j].reg_rtx)
4552 	  {
4553 	    rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4554 	    transfer_replacements (i, j);
4555 	    rld[j].in = 0;
4556 	  }
4557 
4558   /* If we made any reloads for addresses, see if they violate a
4559      "no input reloads" requirement for this insn.  But loads that we
4560      do after the insn (such as for output addresses) are fine.  */
4561   if (HAVE_cc0 && no_input_reloads)
4562     for (i = 0; i < n_reloads; i++)
4563       gcc_assert (rld[i].in == 0
4564 		  || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4565 		  || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4566 
4567   /* Compute reload_mode and reload_nregs.  */
4568   for (i = 0; i < n_reloads; i++)
4569     {
4570       rld[i].mode
4571 	= (rld[i].inmode == VOIDmode
4572 	   || (GET_MODE_SIZE (rld[i].outmode)
4573 	       > GET_MODE_SIZE (rld[i].inmode)))
4574 	  ? rld[i].outmode : rld[i].inmode;
4575 
4576       rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4577     }
4578 
4579   /* Special case a simple move with an input reload and a
4580      destination of a hard reg, if the hard reg is ok, use it.  */
4581   for (i = 0; i < n_reloads; i++)
4582     if (rld[i].when_needed == RELOAD_FOR_INPUT
4583 	&& GET_CODE (PATTERN (insn)) == SET
4584 	&& REG_P (SET_DEST (PATTERN (insn)))
4585 	&& (SET_SRC (PATTERN (insn)) == rld[i].in
4586 	    || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4587 	&& !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4588       {
4589 	rtx dest = SET_DEST (PATTERN (insn));
4590 	unsigned int regno = REGNO (dest);
4591 
4592 	if (regno < FIRST_PSEUDO_REGISTER
4593 	    && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4594 	    && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4595 	  {
4596 	    int nr = hard_regno_nregs[regno][rld[i].mode];
4597 	    int ok = 1, nri;
4598 
4599 	    for (nri = 1; nri < nr; nri ++)
4600 	      if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4601 		{
4602 		  ok = 0;
4603 		  break;
4604 		}
4605 
4606 	    if (ok)
4607 	      rld[i].reg_rtx = dest;
4608 	  }
4609       }
4610 
4611   return retval;
4612 }
4613 
4614 /* Return true if alternative number ALTNUM in constraint-string
4615    CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4616    MEM gives the reference if its address hasn't been fully reloaded,
4617    otherwise it is NULL.  */
4618 
4619 static bool
4620 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4621 				   const char *constraint, int altnum)
4622 {
4623   int c;
4624 
4625   /* Skip alternatives before the one requested.  */
4626   while (altnum > 0)
4627     {
4628       while (*constraint++ != ',')
4629 	;
4630       altnum--;
4631     }
4632   /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4633      If one of them is present, this alternative accepts the result of
4634      passing a constant-pool reference through find_reloads_toplev.
4635 
4636      The same is true of extra memory constraints if the address
4637      was reloaded into a register.  However, the target may elect
4638      to disallow the original constant address, forcing it to be
4639      reloaded into a register instead.  */
4640   for (; (c = *constraint) && c != ',' && c != '#';
4641        constraint += CONSTRAINT_LEN (c, constraint))
4642     {
4643       enum constraint_num cn = lookup_constraint (constraint);
4644       if (insn_extra_memory_constraint (cn)
4645 	  && (mem == NULL || constraint_satisfied_p (mem, cn)))
4646 	return true;
4647     }
4648   return false;
4649 }
4650 
4651 /* Scan X for memory references and scan the addresses for reloading.
4652    Also checks for references to "constant" regs that we want to eliminate
4653    and replaces them with the values they stand for.
4654    We may alter X destructively if it contains a reference to such.
4655    If X is just a constant reg, we return the equivalent value
4656    instead of X.
4657 
4658    IND_LEVELS says how many levels of indirect addressing this machine
4659    supports.
4660 
4661    OPNUM and TYPE identify the purpose of the reload.
4662 
4663    IS_SET_DEST is true if X is the destination of a SET, which is not
4664    appropriate to be replaced by a constant.
4665 
4666    INSN, if nonzero, is the insn in which we do the reload.  It is used
4667    to determine if we may generate output reloads, and where to put USEs
4668    for pseudos that we have to replace with stack slots.
4669 
4670    ADDRESS_RELOADED.  If nonzero, is a pointer to where we put the
4671    result of find_reloads_address.  */
4672 
4673 static rtx
4674 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4675 		     int ind_levels, int is_set_dest, rtx_insn *insn,
4676 		     int *address_reloaded)
4677 {
4678   RTX_CODE code = GET_CODE (x);
4679 
4680   const char *fmt = GET_RTX_FORMAT (code);
4681   int i;
4682   int copied;
4683 
4684   if (code == REG)
4685     {
4686       /* This code is duplicated for speed in find_reloads.  */
4687       int regno = REGNO (x);
4688       if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4689 	x = reg_equiv_constant (regno);
4690 #if 0
4691       /*  This creates (subreg (mem...)) which would cause an unnecessary
4692 	  reload of the mem.  */
4693       else if (reg_equiv_mem (regno) != 0)
4694 	x = reg_equiv_mem (regno);
4695 #endif
4696       else if (reg_equiv_memory_loc (regno)
4697 	       && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4698 	{
4699 	  rtx mem = make_memloc (x, regno);
4700 	  if (reg_equiv_address (regno)
4701 	      || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4702 	    {
4703 	      /* If this is not a toplevel operand, find_reloads doesn't see
4704 		 this substitution.  We have to emit a USE of the pseudo so
4705 		 that delete_output_reload can see it.  */
4706 	      if (replace_reloads && recog_data.operand[opnum] != x)
4707 		/* We mark the USE with QImode so that we recognize it
4708 		   as one that can be safely deleted at the end of
4709 		   reload.  */
4710 		PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4711 			  QImode);
4712 	      x = mem;
4713 	      i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4714 					opnum, type, ind_levels, insn);
4715 	      if (!rtx_equal_p (x, mem))
4716 		push_reg_equiv_alt_mem (regno, x);
4717 	      if (address_reloaded)
4718 		*address_reloaded = i;
4719 	    }
4720 	}
4721       return x;
4722     }
4723   if (code == MEM)
4724     {
4725       rtx tem = x;
4726 
4727       i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4728 				opnum, type, ind_levels, insn);
4729       if (address_reloaded)
4730 	*address_reloaded = i;
4731 
4732       return tem;
4733     }
4734 
4735   if (code == SUBREG && REG_P (SUBREG_REG (x)))
4736     {
4737       /* Check for SUBREG containing a REG that's equivalent to a
4738 	 constant.  If the constant has a known value, truncate it
4739 	 right now.  Similarly if we are extracting a single-word of a
4740 	 multi-word constant.  If the constant is symbolic, allow it
4741 	 to be substituted normally.  push_reload will strip the
4742 	 subreg later.  The constant must not be VOIDmode, because we
4743 	 will lose the mode of the register (this should never happen
4744 	 because one of the cases above should handle it).  */
4745 
4746       int regno = REGNO (SUBREG_REG (x));
4747       rtx tem;
4748 
4749       if (regno >= FIRST_PSEUDO_REGISTER
4750 	  && reg_renumber[regno] < 0
4751 	  && reg_equiv_constant (regno) != 0)
4752 	{
4753 	  tem =
4754 	    simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4755 				 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4756 	  gcc_assert (tem);
4757 	  if (CONSTANT_P (tem)
4758 	      && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4759 	    {
4760 	      tem = force_const_mem (GET_MODE (x), tem);
4761 	      i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4762 					&XEXP (tem, 0), opnum, type,
4763 					ind_levels, insn);
4764 	      if (address_reloaded)
4765 		*address_reloaded = i;
4766 	    }
4767 	  return tem;
4768 	}
4769 
4770       /* If the subreg contains a reg that will be converted to a mem,
4771 	 attempt to convert the whole subreg to a (narrower or wider)
4772 	 memory reference instead.  If this succeeds, we're done --
4773 	 otherwise fall through to check whether the inner reg still
4774 	 needs address reloads anyway.  */
4775 
4776       if (regno >= FIRST_PSEUDO_REGISTER
4777 	  && reg_equiv_memory_loc (regno) != 0)
4778 	{
4779 	  tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4780 					     insn, address_reloaded);
4781 	  if (tem)
4782 	    return tem;
4783 	}
4784     }
4785 
4786   for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4787     {
4788       if (fmt[i] == 'e')
4789 	{
4790 	  rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4791 					      ind_levels, is_set_dest, insn,
4792 					      address_reloaded);
4793 	  /* If we have replaced a reg with it's equivalent memory loc -
4794 	     that can still be handled here e.g. if it's in a paradoxical
4795 	     subreg - we must make the change in a copy, rather than using
4796 	     a destructive change.  This way, find_reloads can still elect
4797 	     not to do the change.  */
4798 	  if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4799 	    {
4800 	      x = shallow_copy_rtx (x);
4801 	      copied = 1;
4802 	    }
4803 	  XEXP (x, i) = new_part;
4804 	}
4805     }
4806   return x;
4807 }
4808 
4809 /* Return a mem ref for the memory equivalent of reg REGNO.
4810    This mem ref is not shared with anything.  */
4811 
4812 static rtx
4813 make_memloc (rtx ad, int regno)
4814 {
4815   /* We must rerun eliminate_regs, in case the elimination
4816      offsets have changed.  */
4817   rtx tem
4818     = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4819 	    0);
4820 
4821   /* If TEM might contain a pseudo, we must copy it to avoid
4822      modifying it when we do the substitution for the reload.  */
4823   if (rtx_varies_p (tem, 0))
4824     tem = copy_rtx (tem);
4825 
4826   tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4827   tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4828 
4829   /* Copy the result if it's still the same as the equivalence, to avoid
4830      modifying it when we do the substitution for the reload.  */
4831   if (tem == reg_equiv_memory_loc (regno))
4832     tem = copy_rtx (tem);
4833   return tem;
4834 }
4835 
4836 /* Returns true if AD could be turned into a valid memory reference
4837    to mode MODE in address space AS by reloading the part pointed to
4838    by PART into a register.  */
4839 
4840 static int
4841 maybe_memory_address_addr_space_p (machine_mode mode, rtx ad,
4842 				   addr_space_t as, rtx *part)
4843 {
4844   int retv;
4845   rtx tem = *part;
4846   rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4847 
4848   *part = reg;
4849   retv = memory_address_addr_space_p (mode, ad, as);
4850   *part = tem;
4851 
4852   return retv;
4853 }
4854 
4855 /* Record all reloads needed for handling memory address AD
4856    which appears in *LOC in a memory reference to mode MODE
4857    which itself is found in location  *MEMREFLOC.
4858    Note that we take shortcuts assuming that no multi-reg machine mode
4859    occurs as part of an address.
4860 
4861    OPNUM and TYPE specify the purpose of this reload.
4862 
4863    IND_LEVELS says how many levels of indirect addressing this machine
4864    supports.
4865 
4866    INSN, if nonzero, is the insn in which we do the reload.  It is used
4867    to determine if we may generate output reloads, and where to put USEs
4868    for pseudos that we have to replace with stack slots.
4869 
4870    Value is one if this address is reloaded or replaced as a whole; it is
4871    zero if the top level of this address was not reloaded or replaced, and
4872    it is -1 if it may or may not have been reloaded or replaced.
4873 
4874    Note that there is no verification that the address will be valid after
4875    this routine does its work.  Instead, we rely on the fact that the address
4876    was valid when reload started.  So we need only undo things that reload
4877    could have broken.  These are wrong register types, pseudos not allocated
4878    to a hard register, and frame pointer elimination.  */
4879 
4880 static int
4881 find_reloads_address (machine_mode mode, rtx *memrefloc, rtx ad,
4882 		      rtx *loc, int opnum, enum reload_type type,
4883 		      int ind_levels, rtx_insn *insn)
4884 {
4885   addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4886 			     : ADDR_SPACE_GENERIC;
4887   int regno;
4888   int removed_and = 0;
4889   int op_index;
4890   rtx tem;
4891 
4892   /* If the address is a register, see if it is a legitimate address and
4893      reload if not.  We first handle the cases where we need not reload
4894      or where we must reload in a non-standard way.  */
4895 
4896   if (REG_P (ad))
4897     {
4898       regno = REGNO (ad);
4899 
4900       if (reg_equiv_constant (regno) != 0)
4901 	{
4902 	  find_reloads_address_part (reg_equiv_constant (regno), loc,
4903 				     base_reg_class (mode, as, MEM, SCRATCH),
4904 				     GET_MODE (ad), opnum, type, ind_levels);
4905 	  return 1;
4906 	}
4907 
4908       tem = reg_equiv_memory_loc (regno);
4909       if (tem != 0)
4910 	{
4911 	  if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4912 	    {
4913 	      tem = make_memloc (ad, regno);
4914 	      if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4915 							XEXP (tem, 0),
4916 							MEM_ADDR_SPACE (tem)))
4917 		{
4918 		  rtx orig = tem;
4919 
4920 		  find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4921 					&XEXP (tem, 0), opnum,
4922 					ADDR_TYPE (type), ind_levels, insn);
4923 		  if (!rtx_equal_p (tem, orig))
4924 		    push_reg_equiv_alt_mem (regno, tem);
4925 		}
4926 	      /* We can avoid a reload if the register's equivalent memory
4927 		 expression is valid as an indirect memory address.
4928 		 But not all addresses are valid in a mem used as an indirect
4929 		 address: only reg or reg+constant.  */
4930 
4931 	      if (ind_levels > 0
4932 		  && strict_memory_address_addr_space_p (mode, tem, as)
4933 		  && (REG_P (XEXP (tem, 0))
4934 		      || (GET_CODE (XEXP (tem, 0)) == PLUS
4935 			  && REG_P (XEXP (XEXP (tem, 0), 0))
4936 			  && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4937 		{
4938 		  /* TEM is not the same as what we'll be replacing the
4939 		     pseudo with after reload, put a USE in front of INSN
4940 		     in the final reload pass.  */
4941 		  if (replace_reloads
4942 		      && num_not_at_initial_offset
4943 		      && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4944 		    {
4945 		      *loc = tem;
4946 		      /* We mark the USE with QImode so that we
4947 			 recognize it as one that can be safely
4948 			 deleted at the end of reload.  */
4949 		      PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4950 						  insn), QImode);
4951 
4952 		      /* This doesn't really count as replacing the address
4953 			 as a whole, since it is still a memory access.  */
4954 		    }
4955 		  return 0;
4956 		}
4957 	      ad = tem;
4958 	    }
4959 	}
4960 
4961       /* The only remaining case where we can avoid a reload is if this is a
4962 	 hard register that is valid as a base register and which is not the
4963 	 subject of a CLOBBER in this insn.  */
4964 
4965       else if (regno < FIRST_PSEUDO_REGISTER
4966 	       && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
4967 	       && ! regno_clobbered_p (regno, this_insn, mode, 0))
4968 	return 0;
4969 
4970       /* If we do not have one of the cases above, we must do the reload.  */
4971       push_reload (ad, NULL_RTX, loc, (rtx*) 0,
4972 		   base_reg_class (mode, as, MEM, SCRATCH),
4973 		   GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4974       return 1;
4975     }
4976 
4977   if (strict_memory_address_addr_space_p (mode, ad, as))
4978     {
4979       /* The address appears valid, so reloads are not needed.
4980 	 But the address may contain an eliminable register.
4981 	 This can happen because a machine with indirect addressing
4982 	 may consider a pseudo register by itself a valid address even when
4983 	 it has failed to get a hard reg.
4984 	 So do a tree-walk to find and eliminate all such regs.  */
4985 
4986       /* But first quickly dispose of a common case.  */
4987       if (GET_CODE (ad) == PLUS
4988 	  && CONST_INT_P (XEXP (ad, 1))
4989 	  && REG_P (XEXP (ad, 0))
4990 	  && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4991 	return 0;
4992 
4993       subst_reg_equivs_changed = 0;
4994       *loc = subst_reg_equivs (ad, insn);
4995 
4996       if (! subst_reg_equivs_changed)
4997 	return 0;
4998 
4999       /* Check result for validity after substitution.  */
5000       if (strict_memory_address_addr_space_p (mode, ad, as))
5001 	return 0;
5002     }
5003 
5004 #ifdef LEGITIMIZE_RELOAD_ADDRESS
5005   do
5006     {
5007       if (memrefloc && ADDR_SPACE_GENERIC_P (as))
5008 	{
5009 	  LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
5010 				     ind_levels, win);
5011 	}
5012       break;
5013     win:
5014       *memrefloc = copy_rtx (*memrefloc);
5015       XEXP (*memrefloc, 0) = ad;
5016       move_replacements (&ad, &XEXP (*memrefloc, 0));
5017       return -1;
5018     }
5019   while (0);
5020 #endif
5021 
5022   /* The address is not valid.  We have to figure out why.  First see if
5023      we have an outer AND and remove it if so.  Then analyze what's inside.  */
5024 
5025   if (GET_CODE (ad) == AND)
5026     {
5027       removed_and = 1;
5028       loc = &XEXP (ad, 0);
5029       ad = *loc;
5030     }
5031 
5032   /* One possibility for why the address is invalid is that it is itself
5033      a MEM.  This can happen when the frame pointer is being eliminated, a
5034      pseudo is not allocated to a hard register, and the offset between the
5035      frame and stack pointers is not its initial value.  In that case the
5036      pseudo will have been replaced by a MEM referring to the
5037      stack pointer.  */
5038   if (MEM_P (ad))
5039     {
5040       /* First ensure that the address in this MEM is valid.  Then, unless
5041 	 indirect addresses are valid, reload the MEM into a register.  */
5042       tem = ad;
5043       find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5044 			    opnum, ADDR_TYPE (type),
5045 			    ind_levels == 0 ? 0 : ind_levels - 1, insn);
5046 
5047       /* If tem was changed, then we must create a new memory reference to
5048 	 hold it and store it back into memrefloc.  */
5049       if (tem != ad && memrefloc)
5050 	{
5051 	  *memrefloc = copy_rtx (*memrefloc);
5052 	  copy_replacements (tem, XEXP (*memrefloc, 0));
5053 	  loc = &XEXP (*memrefloc, 0);
5054 	  if (removed_and)
5055 	    loc = &XEXP (*loc, 0);
5056 	}
5057 
5058       /* Check similar cases as for indirect addresses as above except
5059 	 that we can allow pseudos and a MEM since they should have been
5060 	 taken care of above.  */
5061 
5062       if (ind_levels == 0
5063 	  || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5064 	  || MEM_P (XEXP (tem, 0))
5065 	  || ! (REG_P (XEXP (tem, 0))
5066 		|| (GET_CODE (XEXP (tem, 0)) == PLUS
5067 		    && REG_P (XEXP (XEXP (tem, 0), 0))
5068 		    && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5069 	{
5070 	  /* Must use TEM here, not AD, since it is the one that will
5071 	     have any subexpressions reloaded, if needed.  */
5072 	  push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5073 		       base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5074 		       VOIDmode, 0,
5075 		       0, opnum, type);
5076 	  return ! removed_and;
5077 	}
5078       else
5079 	return 0;
5080     }
5081 
5082   /* If we have address of a stack slot but it's not valid because the
5083      displacement is too large, compute the sum in a register.
5084      Handle all base registers here, not just fp/ap/sp, because on some
5085      targets (namely SH) we can also get too large displacements from
5086      big-endian corrections.  */
5087   else if (GET_CODE (ad) == PLUS
5088 	   && REG_P (XEXP (ad, 0))
5089 	   && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5090 	   && CONST_INT_P (XEXP (ad, 1))
5091 	   && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5092 				    CONST_INT)
5093 	       /* Similarly, if we were to reload the base register and the
5094 		  mem+offset address is still invalid, then we want to reload
5095 		  the whole address, not just the base register.  */
5096 	       || ! maybe_memory_address_addr_space_p
5097 		     (mode, ad, as, &(XEXP (ad, 0)))))
5098 
5099     {
5100       /* Unshare the MEM rtx so we can safely alter it.  */
5101       if (memrefloc)
5102 	{
5103 	  *memrefloc = copy_rtx (*memrefloc);
5104 	  loc = &XEXP (*memrefloc, 0);
5105 	  if (removed_and)
5106 	    loc = &XEXP (*loc, 0);
5107 	}
5108 
5109       if (double_reg_address_ok[mode]
5110 	  && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5111 				  PLUS, CONST_INT))
5112 	{
5113 	  /* Unshare the sum as well.  */
5114 	  *loc = ad = copy_rtx (ad);
5115 
5116 	  /* Reload the displacement into an index reg.
5117 	     We assume the frame pointer or arg pointer is a base reg.  */
5118 	  find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5119 				     INDEX_REG_CLASS, GET_MODE (ad), opnum,
5120 				     type, ind_levels);
5121 	  return 0;
5122 	}
5123       else
5124 	{
5125 	  /* If the sum of two regs is not necessarily valid,
5126 	     reload the sum into a base reg.
5127 	     That will at least work.  */
5128 	  find_reloads_address_part (ad, loc,
5129 				     base_reg_class (mode, as, MEM, SCRATCH),
5130 				     GET_MODE (ad), opnum, type, ind_levels);
5131 	}
5132       return ! removed_and;
5133     }
5134 
5135   /* If we have an indexed stack slot, there are three possible reasons why
5136      it might be invalid: The index might need to be reloaded, the address
5137      might have been made by frame pointer elimination and hence have a
5138      constant out of range, or both reasons might apply.
5139 
5140      We can easily check for an index needing reload, but even if that is the
5141      case, we might also have an invalid constant.  To avoid making the
5142      conservative assumption and requiring two reloads, we see if this address
5143      is valid when not interpreted strictly.  If it is, the only problem is
5144      that the index needs a reload and find_reloads_address_1 will take care
5145      of it.
5146 
5147      Handle all base registers here, not just fp/ap/sp, because on some
5148      targets (namely SPARC) we can also get invalid addresses from preventive
5149      subreg big-endian corrections made by find_reloads_toplev.  We
5150      can also get expressions involving LO_SUM (rather than PLUS) from
5151      find_reloads_subreg_address.
5152 
5153      If we decide to do something, it must be that `double_reg_address_ok'
5154      is true.  We generate a reload of the base register + constant and
5155      rework the sum so that the reload register will be added to the index.
5156      This is safe because we know the address isn't shared.
5157 
5158      We check for the base register as both the first and second operand of
5159      the innermost PLUS and/or LO_SUM.  */
5160 
5161   for (op_index = 0; op_index < 2; ++op_index)
5162     {
5163       rtx operand, addend;
5164       enum rtx_code inner_code;
5165 
5166       if (GET_CODE (ad) != PLUS)
5167 	  continue;
5168 
5169       inner_code = GET_CODE (XEXP (ad, 0));
5170       if (!(GET_CODE (ad) == PLUS
5171 	    && CONST_INT_P (XEXP (ad, 1))
5172 	    && (inner_code == PLUS || inner_code == LO_SUM)))
5173 	continue;
5174 
5175       operand = XEXP (XEXP (ad, 0), op_index);
5176       if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5177 	continue;
5178 
5179       addend = XEXP (XEXP (ad, 0), 1 - op_index);
5180 
5181       if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5182 				GET_CODE (addend))
5183 	   || operand == frame_pointer_rtx
5184 	   || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
5185 	       && operand == hard_frame_pointer_rtx)
5186 	   || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5187 	       && operand == arg_pointer_rtx)
5188 	   || operand == stack_pointer_rtx)
5189 	  && ! maybe_memory_address_addr_space_p
5190 		(mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5191 	{
5192 	  rtx offset_reg;
5193 	  enum reg_class cls;
5194 
5195 	  offset_reg = plus_constant (GET_MODE (ad), operand,
5196 				      INTVAL (XEXP (ad, 1)));
5197 
5198 	  /* Form the adjusted address.  */
5199 	  if (GET_CODE (XEXP (ad, 0)) == PLUS)
5200 	    ad = gen_rtx_PLUS (GET_MODE (ad),
5201 			       op_index == 0 ? offset_reg : addend,
5202 			       op_index == 0 ? addend : offset_reg);
5203 	  else
5204 	    ad = gen_rtx_LO_SUM (GET_MODE (ad),
5205 				 op_index == 0 ? offset_reg : addend,
5206 				 op_index == 0 ? addend : offset_reg);
5207 	  *loc = ad;
5208 
5209 	  cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5210 	  find_reloads_address_part (XEXP (ad, op_index),
5211 				     &XEXP (ad, op_index), cls,
5212 				     GET_MODE (ad), opnum, type, ind_levels);
5213 	  find_reloads_address_1 (mode, as,
5214 				  XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5215 				  GET_CODE (XEXP (ad, op_index)),
5216 				  &XEXP (ad, 1 - op_index), opnum,
5217 				  type, 0, insn);
5218 
5219 	  return 0;
5220 	}
5221     }
5222 
5223   /* See if address becomes valid when an eliminable register
5224      in a sum is replaced.  */
5225 
5226   tem = ad;
5227   if (GET_CODE (ad) == PLUS)
5228     tem = subst_indexed_address (ad);
5229   if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5230     {
5231       /* Ok, we win that way.  Replace any additional eliminable
5232 	 registers.  */
5233 
5234       subst_reg_equivs_changed = 0;
5235       tem = subst_reg_equivs (tem, insn);
5236 
5237       /* Make sure that didn't make the address invalid again.  */
5238 
5239       if (! subst_reg_equivs_changed
5240 	  || strict_memory_address_addr_space_p (mode, tem, as))
5241 	{
5242 	  *loc = tem;
5243 	  return 0;
5244 	}
5245     }
5246 
5247   /* If constants aren't valid addresses, reload the constant address
5248      into a register.  */
5249   if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5250     {
5251       machine_mode address_mode = GET_MODE (ad);
5252       if (address_mode == VOIDmode)
5253 	address_mode = targetm.addr_space.address_mode (as);
5254 
5255       /* If AD is an address in the constant pool, the MEM rtx may be shared.
5256 	 Unshare it so we can safely alter it.  */
5257       if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5258 	  && CONSTANT_POOL_ADDRESS_P (ad))
5259 	{
5260 	  *memrefloc = copy_rtx (*memrefloc);
5261 	  loc = &XEXP (*memrefloc, 0);
5262 	  if (removed_and)
5263 	    loc = &XEXP (*loc, 0);
5264 	}
5265 
5266       find_reloads_address_part (ad, loc,
5267 				 base_reg_class (mode, as, MEM, SCRATCH),
5268 				 address_mode, opnum, type, ind_levels);
5269       return ! removed_and;
5270     }
5271 
5272   return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5273 				 opnum, type, ind_levels, insn);
5274 }
5275 
5276 /* Find all pseudo regs appearing in AD
5277    that are eliminable in favor of equivalent values
5278    and do not have hard regs; replace them by their equivalents.
5279    INSN, if nonzero, is the insn in which we do the reload.  We put USEs in
5280    front of it for pseudos that we have to replace with stack slots.  */
5281 
5282 static rtx
5283 subst_reg_equivs (rtx ad, rtx_insn *insn)
5284 {
5285   RTX_CODE code = GET_CODE (ad);
5286   int i;
5287   const char *fmt;
5288 
5289   switch (code)
5290     {
5291     case HIGH:
5292     case CONST:
5293     CASE_CONST_ANY:
5294     case SYMBOL_REF:
5295     case LABEL_REF:
5296     case PC:
5297     case CC0:
5298       return ad;
5299 
5300     case REG:
5301       {
5302 	int regno = REGNO (ad);
5303 
5304 	if (reg_equiv_constant (regno) != 0)
5305 	  {
5306 	    subst_reg_equivs_changed = 1;
5307 	    return reg_equiv_constant (regno);
5308 	  }
5309 	if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5310 	  {
5311 	    rtx mem = make_memloc (ad, regno);
5312 	    if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5313 	      {
5314 		subst_reg_equivs_changed = 1;
5315 		/* We mark the USE with QImode so that we recognize it
5316 		   as one that can be safely deleted at the end of
5317 		   reload.  */
5318 		PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5319 			  QImode);
5320 		return mem;
5321 	      }
5322 	  }
5323       }
5324       return ad;
5325 
5326     case PLUS:
5327       /* Quickly dispose of a common case.  */
5328       if (XEXP (ad, 0) == frame_pointer_rtx
5329 	  && CONST_INT_P (XEXP (ad, 1)))
5330 	return ad;
5331       break;
5332 
5333     default:
5334       break;
5335     }
5336 
5337   fmt = GET_RTX_FORMAT (code);
5338   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5339     if (fmt[i] == 'e')
5340       XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5341   return ad;
5342 }
5343 
5344 /* Compute the sum of X and Y, making canonicalizations assumed in an
5345    address, namely: sum constant integers, surround the sum of two
5346    constants with a CONST, put the constant as the second operand, and
5347    group the constant on the outermost sum.
5348 
5349    This routine assumes both inputs are already in canonical form.  */
5350 
5351 rtx
5352 form_sum (machine_mode mode, rtx x, rtx y)
5353 {
5354   rtx tem;
5355 
5356   gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5357   gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5358 
5359   if (CONST_INT_P (x))
5360     return plus_constant (mode, y, INTVAL (x));
5361   else if (CONST_INT_P (y))
5362     return plus_constant (mode, x, INTVAL (y));
5363   else if (CONSTANT_P (x))
5364     tem = x, x = y, y = tem;
5365 
5366   if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5367     return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5368 
5369   /* Note that if the operands of Y are specified in the opposite
5370      order in the recursive calls below, infinite recursion will occur.  */
5371   if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5372     return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5373 
5374   /* If both constant, encapsulate sum.  Otherwise, just form sum.  A
5375      constant will have been placed second.  */
5376   if (CONSTANT_P (x) && CONSTANT_P (y))
5377     {
5378       if (GET_CODE (x) == CONST)
5379 	x = XEXP (x, 0);
5380       if (GET_CODE (y) == CONST)
5381 	y = XEXP (y, 0);
5382 
5383       return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5384     }
5385 
5386   return gen_rtx_PLUS (mode, x, y);
5387 }
5388 
5389 /* If ADDR is a sum containing a pseudo register that should be
5390    replaced with a constant (from reg_equiv_constant),
5391    return the result of doing so, and also apply the associative
5392    law so that the result is more likely to be a valid address.
5393    (But it is not guaranteed to be one.)
5394 
5395    Note that at most one register is replaced, even if more are
5396    replaceable.  Also, we try to put the result into a canonical form
5397    so it is more likely to be a valid address.
5398 
5399    In all other cases, return ADDR.  */
5400 
5401 static rtx
5402 subst_indexed_address (rtx addr)
5403 {
5404   rtx op0 = 0, op1 = 0, op2 = 0;
5405   rtx tem;
5406   int regno;
5407 
5408   if (GET_CODE (addr) == PLUS)
5409     {
5410       /* Try to find a register to replace.  */
5411       op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5412       if (REG_P (op0)
5413 	  && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5414 	  && reg_renumber[regno] < 0
5415 	  && reg_equiv_constant (regno) != 0)
5416 	op0 = reg_equiv_constant (regno);
5417       else if (REG_P (op1)
5418 	       && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5419 	       && reg_renumber[regno] < 0
5420 	       && reg_equiv_constant (regno) != 0)
5421 	op1 = reg_equiv_constant (regno);
5422       else if (GET_CODE (op0) == PLUS
5423 	       && (tem = subst_indexed_address (op0)) != op0)
5424 	op0 = tem;
5425       else if (GET_CODE (op1) == PLUS
5426 	       && (tem = subst_indexed_address (op1)) != op1)
5427 	op1 = tem;
5428       else
5429 	return addr;
5430 
5431       /* Pick out up to three things to add.  */
5432       if (GET_CODE (op1) == PLUS)
5433 	op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5434       else if (GET_CODE (op0) == PLUS)
5435 	op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5436 
5437       /* Compute the sum.  */
5438       if (op2 != 0)
5439 	op1 = form_sum (GET_MODE (addr), op1, op2);
5440       if (op1 != 0)
5441 	op0 = form_sum (GET_MODE (addr), op0, op1);
5442 
5443       return op0;
5444     }
5445   return addr;
5446 }
5447 
5448 /* Update the REG_INC notes for an insn.  It updates all REG_INC
5449    notes for the instruction which refer to REGNO the to refer
5450    to the reload number.
5451 
5452    INSN is the insn for which any REG_INC notes need updating.
5453 
5454    REGNO is the register number which has been reloaded.
5455 
5456    RELOADNUM is the reload number.  */
5457 
5458 static void
5459 update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5460 		       int reloadnum ATTRIBUTE_UNUSED)
5461 {
5462   if (!AUTO_INC_DEC)
5463     return;
5464 
5465   for (rtx link = REG_NOTES (insn); link; link = XEXP (link, 1))
5466     if (REG_NOTE_KIND (link) == REG_INC
5467         && (int) REGNO (XEXP (link, 0)) == regno)
5468       push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5469 }
5470 
5471 /* Record the pseudo registers we must reload into hard registers in a
5472    subexpression of a would-be memory address, X referring to a value
5473    in mode MODE.  (This function is not called if the address we find
5474    is strictly valid.)
5475 
5476    CONTEXT = 1 means we are considering regs as index regs,
5477    = 0 means we are considering them as base regs.
5478    OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5479    or an autoinc code.
5480    If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5481    is the code of the index part of the address.  Otherwise, pass SCRATCH
5482    for this argument.
5483    OPNUM and TYPE specify the purpose of any reloads made.
5484 
5485    IND_LEVELS says how many levels of indirect addressing are
5486    supported at this point in the address.
5487 
5488    INSN, if nonzero, is the insn in which we do the reload.  It is used
5489    to determine if we may generate output reloads.
5490 
5491    We return nonzero if X, as a whole, is reloaded or replaced.  */
5492 
5493 /* Note that we take shortcuts assuming that no multi-reg machine mode
5494    occurs as part of an address.
5495    Also, this is not fully machine-customizable; it works for machines
5496    such as VAXen and 68000's and 32000's, but other possible machines
5497    could have addressing modes that this does not handle right.
5498    If you add push_reload calls here, you need to make sure gen_reload
5499    handles those cases gracefully.  */
5500 
5501 static int
5502 find_reloads_address_1 (machine_mode mode, addr_space_t as,
5503 			rtx x, int context,
5504 			enum rtx_code outer_code, enum rtx_code index_code,
5505 			rtx *loc, int opnum, enum reload_type type,
5506 			int ind_levels, rtx_insn *insn)
5507 {
5508 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX)	\
5509   ((CONTEXT) == 0							\
5510    ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX)		\
5511    : REGNO_OK_FOR_INDEX_P (REGNO))
5512 
5513   enum reg_class context_reg_class;
5514   RTX_CODE code = GET_CODE (x);
5515   bool reloaded_inner_of_autoinc = false;
5516 
5517   if (context == 1)
5518     context_reg_class = INDEX_REG_CLASS;
5519   else
5520     context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5521 
5522   switch (code)
5523     {
5524     case PLUS:
5525       {
5526 	rtx orig_op0 = XEXP (x, 0);
5527 	rtx orig_op1 = XEXP (x, 1);
5528 	RTX_CODE code0 = GET_CODE (orig_op0);
5529 	RTX_CODE code1 = GET_CODE (orig_op1);
5530 	rtx op0 = orig_op0;
5531 	rtx op1 = orig_op1;
5532 
5533 	if (GET_CODE (op0) == SUBREG)
5534 	  {
5535 	    op0 = SUBREG_REG (op0);
5536 	    code0 = GET_CODE (op0);
5537 	    if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5538 	      op0 = gen_rtx_REG (word_mode,
5539 				 (REGNO (op0) +
5540 				  subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5541 						       GET_MODE (SUBREG_REG (orig_op0)),
5542 						       SUBREG_BYTE (orig_op0),
5543 						       GET_MODE (orig_op0))));
5544 	  }
5545 
5546 	if (GET_CODE (op1) == SUBREG)
5547 	  {
5548 	    op1 = SUBREG_REG (op1);
5549 	    code1 = GET_CODE (op1);
5550 	    if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5551 	      /* ??? Why is this given op1's mode and above for
5552 		 ??? op0 SUBREGs we use word_mode?  */
5553 	      op1 = gen_rtx_REG (GET_MODE (op1),
5554 				 (REGNO (op1) +
5555 				  subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5556 						       GET_MODE (SUBREG_REG (orig_op1)),
5557 						       SUBREG_BYTE (orig_op1),
5558 						       GET_MODE (orig_op1))));
5559 	  }
5560 	/* Plus in the index register may be created only as a result of
5561 	   register rematerialization for expression like &localvar*4.  Reload it.
5562 	   It may be possible to combine the displacement on the outer level,
5563 	   but it is probably not worthwhile to do so.  */
5564 	if (context == 1)
5565 	  {
5566 	    find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5567 				  opnum, ADDR_TYPE (type), ind_levels, insn);
5568 	    push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5569 			 context_reg_class,
5570 			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5571 	    return 1;
5572 	  }
5573 
5574 	if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5575 	    || code0 == ZERO_EXTEND || code1 == MEM)
5576 	  {
5577 	    find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5578 				    &XEXP (x, 0), opnum, type, ind_levels,
5579 				    insn);
5580 	    find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5581 				    &XEXP (x, 1), opnum, type, ind_levels,
5582 				    insn);
5583 	  }
5584 
5585 	else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5586 		 || code1 == ZERO_EXTEND || code0 == MEM)
5587 	  {
5588 	    find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5589 				    &XEXP (x, 0), opnum, type, ind_levels,
5590 				    insn);
5591 	    find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5592 				    &XEXP (x, 1), opnum, type, ind_levels,
5593 				    insn);
5594 	  }
5595 
5596 	else if (code0 == CONST_INT || code0 == CONST
5597 		 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5598 	  find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5599 				  &XEXP (x, 1), opnum, type, ind_levels,
5600 				  insn);
5601 
5602 	else if (code1 == CONST_INT || code1 == CONST
5603 		 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5604 	  find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5605 				  &XEXP (x, 0), opnum, type, ind_levels,
5606 				  insn);
5607 
5608 	else if (code0 == REG && code1 == REG)
5609 	  {
5610 	    if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5611 		&& regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5612 	      return 0;
5613 	    else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5614 		     && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5615 	      return 0;
5616 	    else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5617 	      find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5618 				      &XEXP (x, 1), opnum, type, ind_levels,
5619 				      insn);
5620 	    else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5621 	      find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5622 				      &XEXP (x, 0), opnum, type, ind_levels,
5623 				      insn);
5624 	    else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5625 	      find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5626 				      &XEXP (x, 0), opnum, type, ind_levels,
5627 				      insn);
5628 	    else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5629 	      find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5630 				      &XEXP (x, 1), opnum, type, ind_levels,
5631 				      insn);
5632 	    else
5633 	      {
5634 		find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5635 					&XEXP (x, 0), opnum, type, ind_levels,
5636 					insn);
5637 		find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5638 					&XEXP (x, 1), opnum, type, ind_levels,
5639 					insn);
5640 	      }
5641 	  }
5642 
5643 	else if (code0 == REG)
5644 	  {
5645 	    find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5646 				    &XEXP (x, 0), opnum, type, ind_levels,
5647 				    insn);
5648 	    find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5649 				    &XEXP (x, 1), opnum, type, ind_levels,
5650 				    insn);
5651 	  }
5652 
5653 	else if (code1 == REG)
5654 	  {
5655 	    find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5656 				    &XEXP (x, 1), opnum, type, ind_levels,
5657 				    insn);
5658 	    find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5659 				    &XEXP (x, 0), opnum, type, ind_levels,
5660 				    insn);
5661 	  }
5662       }
5663 
5664       return 0;
5665 
5666     case POST_MODIFY:
5667     case PRE_MODIFY:
5668       {
5669 	rtx op0 = XEXP (x, 0);
5670 	rtx op1 = XEXP (x, 1);
5671 	enum rtx_code index_code;
5672 	int regno;
5673 	int reloadnum;
5674 
5675 	if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5676 	  return 0;
5677 
5678 	/* Currently, we only support {PRE,POST}_MODIFY constructs
5679 	   where a base register is {inc,dec}remented by the contents
5680 	   of another register or by a constant value.  Thus, these
5681 	   operands must match.  */
5682 	gcc_assert (op0 == XEXP (op1, 0));
5683 
5684 	/* Require index register (or constant).  Let's just handle the
5685 	   register case in the meantime... If the target allows
5686 	   auto-modify by a constant then we could try replacing a pseudo
5687 	   register with its equivalent constant where applicable.
5688 
5689 	   We also handle the case where the register was eliminated
5690 	   resulting in a PLUS subexpression.
5691 
5692 	   If we later decide to reload the whole PRE_MODIFY or
5693 	   POST_MODIFY, inc_for_reload might clobber the reload register
5694 	   before reading the index.  The index register might therefore
5695 	   need to live longer than a TYPE reload normally would, so be
5696 	   conservative and class it as RELOAD_OTHER.  */
5697 	if ((REG_P (XEXP (op1, 1))
5698 	     && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5699 	    || GET_CODE (XEXP (op1, 1)) == PLUS)
5700 	  find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5701 				  &XEXP (op1, 1), opnum, RELOAD_OTHER,
5702 				  ind_levels, insn);
5703 
5704 	gcc_assert (REG_P (XEXP (op1, 0)));
5705 
5706 	regno = REGNO (XEXP (op1, 0));
5707 	index_code = GET_CODE (XEXP (op1, 1));
5708 
5709 	/* A register that is incremented cannot be constant!  */
5710 	gcc_assert (regno < FIRST_PSEUDO_REGISTER
5711 		    || reg_equiv_constant (regno) == 0);
5712 
5713 	/* Handle a register that is equivalent to a memory location
5714 	    which cannot be addressed directly.  */
5715 	if (reg_equiv_memory_loc (regno) != 0
5716 	    && (reg_equiv_address (regno) != 0
5717 		|| num_not_at_initial_offset))
5718 	  {
5719 	    rtx tem = make_memloc (XEXP (x, 0), regno);
5720 
5721 	    if (reg_equiv_address (regno)
5722 		|| ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5723 	      {
5724 		rtx orig = tem;
5725 
5726 		/* First reload the memory location's address.
5727 		    We can't use ADDR_TYPE (type) here, because we need to
5728 		    write back the value after reading it, hence we actually
5729 		    need two registers.  */
5730 		find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5731 				      &XEXP (tem, 0), opnum,
5732 				      RELOAD_OTHER,
5733 				      ind_levels, insn);
5734 
5735 		if (!rtx_equal_p (tem, orig))
5736 		  push_reg_equiv_alt_mem (regno, tem);
5737 
5738 		/* Then reload the memory location into a base
5739 		   register.  */
5740 		reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5741 					 &XEXP (op1, 0),
5742 					 base_reg_class (mode, as,
5743 							 code, index_code),
5744 					 GET_MODE (x), GET_MODE (x), 0,
5745 					 0, opnum, RELOAD_OTHER);
5746 
5747 		update_auto_inc_notes (this_insn, regno, reloadnum);
5748 		return 0;
5749 	      }
5750 	  }
5751 
5752 	if (reg_renumber[regno] >= 0)
5753 	  regno = reg_renumber[regno];
5754 
5755 	/* We require a base register here...  */
5756 	if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5757 	  {
5758 	    reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5759 				     &XEXP (op1, 0), &XEXP (x, 0),
5760 				     base_reg_class (mode, as,
5761 						     code, index_code),
5762 				     GET_MODE (x), GET_MODE (x), 0, 0,
5763 				     opnum, RELOAD_OTHER);
5764 
5765 	    update_auto_inc_notes (this_insn, regno, reloadnum);
5766 	    return 0;
5767 	  }
5768       }
5769       return 0;
5770 
5771     case POST_INC:
5772     case POST_DEC:
5773     case PRE_INC:
5774     case PRE_DEC:
5775       if (REG_P (XEXP (x, 0)))
5776 	{
5777 	  int regno = REGNO (XEXP (x, 0));
5778 	  int value = 0;
5779 	  rtx x_orig = x;
5780 
5781 	  /* A register that is incremented cannot be constant!  */
5782 	  gcc_assert (regno < FIRST_PSEUDO_REGISTER
5783 		      || reg_equiv_constant (regno) == 0);
5784 
5785 	  /* Handle a register that is equivalent to a memory location
5786 	     which cannot be addressed directly.  */
5787 	  if (reg_equiv_memory_loc (regno) != 0
5788 	      && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5789 	    {
5790 	      rtx tem = make_memloc (XEXP (x, 0), regno);
5791 	      if (reg_equiv_address (regno)
5792 		  || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5793 		{
5794 		  rtx orig = tem;
5795 
5796 		  /* First reload the memory location's address.
5797 		     We can't use ADDR_TYPE (type) here, because we need to
5798 		     write back the value after reading it, hence we actually
5799 		     need two registers.  */
5800 		  find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5801 					&XEXP (tem, 0), opnum, type,
5802 					ind_levels, insn);
5803 		  reloaded_inner_of_autoinc = true;
5804 		  if (!rtx_equal_p (tem, orig))
5805 		    push_reg_equiv_alt_mem (regno, tem);
5806 		  /* Put this inside a new increment-expression.  */
5807 		  x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5808 		  /* Proceed to reload that, as if it contained a register.  */
5809 		}
5810 	    }
5811 
5812 	  /* If we have a hard register that is ok in this incdec context,
5813 	     don't make a reload.  If the register isn't nice enough for
5814 	     autoincdec, we can reload it.  But, if an autoincrement of a
5815 	     register that we here verified as playing nice, still outside
5816 	     isn't "valid", it must be that no autoincrement is "valid".
5817 	     If that is true and something made an autoincrement anyway,
5818 	     this must be a special context where one is allowed.
5819 	     (For example, a "push" instruction.)
5820 	     We can't improve this address, so leave it alone.  */
5821 
5822 	  /* Otherwise, reload the autoincrement into a suitable hard reg
5823 	     and record how much to increment by.  */
5824 
5825 	  if (reg_renumber[regno] >= 0)
5826 	    regno = reg_renumber[regno];
5827 	  if (regno >= FIRST_PSEUDO_REGISTER
5828 	      || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5829 				      index_code))
5830 	    {
5831 	      int reloadnum;
5832 
5833 	      /* If we can output the register afterwards, do so, this
5834 		 saves the extra update.
5835 		 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5836 		 CALL_INSN - and it does not set CC0.
5837 		 But don't do this if we cannot directly address the
5838 		 memory location, since this will make it harder to
5839 		 reuse address reloads, and increases register pressure.
5840 		 Also don't do this if we can probably update x directly.  */
5841 	      rtx equiv = (MEM_P (XEXP (x, 0))
5842 			   ? XEXP (x, 0)
5843 			   : reg_equiv_mem (regno));
5844 	      enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5845 	      if (insn && NONJUMP_INSN_P (insn)
5846 #if HAVE_cc0
5847 		  && ! sets_cc0_p (PATTERN (insn))
5848 #endif
5849 		  && (regno < FIRST_PSEUDO_REGISTER
5850 		      || (equiv
5851 			  && memory_operand (equiv, GET_MODE (equiv))
5852 			  && ! (icode != CODE_FOR_nothing
5853 				&& insn_operand_matches (icode, 0, equiv)
5854 				&& insn_operand_matches (icode, 1, equiv))))
5855 		  /* Using RELOAD_OTHER means we emit this and the reload we
5856 		     made earlier in the wrong order.  */
5857 		  && !reloaded_inner_of_autoinc)
5858 		{
5859 		  /* We use the original pseudo for loc, so that
5860 		     emit_reload_insns() knows which pseudo this
5861 		     reload refers to and updates the pseudo rtx, not
5862 		     its equivalent memory location, as well as the
5863 		     corresponding entry in reg_last_reload_reg.  */
5864 		  loc = &XEXP (x_orig, 0);
5865 		  x = XEXP (x, 0);
5866 		  reloadnum
5867 		    = push_reload (x, x, loc, loc,
5868 				   context_reg_class,
5869 				   GET_MODE (x), GET_MODE (x), 0, 0,
5870 				   opnum, RELOAD_OTHER);
5871 		}
5872 	      else
5873 		{
5874 		  reloadnum
5875 		    = push_reload (x, x, loc, (rtx*) 0,
5876 				   context_reg_class,
5877 				   GET_MODE (x), GET_MODE (x), 0, 0,
5878 				   opnum, type);
5879 		  rld[reloadnum].inc
5880 		    = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5881 
5882 		  value = 1;
5883 		}
5884 
5885 	      update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5886 				     reloadnum);
5887 	    }
5888 	  return value;
5889 	}
5890       return 0;
5891 
5892     case TRUNCATE:
5893     case SIGN_EXTEND:
5894     case ZERO_EXTEND:
5895       /* Look for parts to reload in the inner expression and reload them
5896 	 too, in addition to this operation.  Reloading all inner parts in
5897 	 addition to this one shouldn't be necessary, but at this point,
5898 	 we don't know if we can possibly omit any part that *can* be
5899 	 reloaded.  Targets that are better off reloading just either part
5900 	 (or perhaps even a different part of an outer expression), should
5901 	 define LEGITIMIZE_RELOAD_ADDRESS.  */
5902       find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5903 			      context, code, SCRATCH, &XEXP (x, 0), opnum,
5904 			      type, ind_levels, insn);
5905       push_reload (x, NULL_RTX, loc, (rtx*) 0,
5906 		   context_reg_class,
5907 		   GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5908       return 1;
5909 
5910     case MEM:
5911       /* This is probably the result of a substitution, by eliminate_regs, of
5912 	 an equivalent address for a pseudo that was not allocated to a hard
5913 	 register.  Verify that the specified address is valid and reload it
5914 	 into a register.
5915 
5916 	 Since we know we are going to reload this item, don't decrement for
5917 	 the indirection level.
5918 
5919 	 Note that this is actually conservative:  it would be slightly more
5920 	 efficient to use the value of SPILL_INDIRECT_LEVELS from
5921 	 reload1.c here.  */
5922 
5923       find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5924 			    opnum, ADDR_TYPE (type), ind_levels, insn);
5925       push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5926 		   context_reg_class,
5927 		   GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5928       return 1;
5929 
5930     case REG:
5931       {
5932 	int regno = REGNO (x);
5933 
5934 	if (reg_equiv_constant (regno) != 0)
5935 	  {
5936 	    find_reloads_address_part (reg_equiv_constant (regno), loc,
5937 				       context_reg_class,
5938 				       GET_MODE (x), opnum, type, ind_levels);
5939 	    return 1;
5940 	  }
5941 
5942 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5943 	 that feeds this insn.  */
5944 	if (reg_equiv_mem (regno) != 0)
5945 	  {
5946 	    push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5947 			 context_reg_class,
5948 			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5949 	    return 1;
5950 	  }
5951 #endif
5952 
5953 	if (reg_equiv_memory_loc (regno)
5954 	    && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5955 	  {
5956 	    rtx tem = make_memloc (x, regno);
5957 	    if (reg_equiv_address (regno) != 0
5958 		|| ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5959 	      {
5960 		x = tem;
5961 		find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5962 				      &XEXP (x, 0), opnum, ADDR_TYPE (type),
5963 				      ind_levels, insn);
5964 		if (!rtx_equal_p (x, tem))
5965 		  push_reg_equiv_alt_mem (regno, x);
5966 	      }
5967 	  }
5968 
5969 	if (reg_renumber[regno] >= 0)
5970 	  regno = reg_renumber[regno];
5971 
5972 	if (regno >= FIRST_PSEUDO_REGISTER
5973 	    || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5974 				    index_code))
5975 	  {
5976 	    push_reload (x, NULL_RTX, loc, (rtx*) 0,
5977 			 context_reg_class,
5978 			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5979 	    return 1;
5980 	  }
5981 
5982 	/* If a register appearing in an address is the subject of a CLOBBER
5983 	   in this insn, reload it into some other register to be safe.
5984 	   The CLOBBER is supposed to make the register unavailable
5985 	   from before this insn to after it.  */
5986 	if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5987 	  {
5988 	    push_reload (x, NULL_RTX, loc, (rtx*) 0,
5989 			 context_reg_class,
5990 			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5991 	    return 1;
5992 	  }
5993       }
5994       return 0;
5995 
5996     case SUBREG:
5997       if (REG_P (SUBREG_REG (x)))
5998 	{
5999 	  /* If this is a SUBREG of a hard register and the resulting register
6000 	     is of the wrong class, reload the whole SUBREG.  This avoids
6001 	     needless copies if SUBREG_REG is multi-word.  */
6002 	  if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6003 	    {
6004 	      int regno ATTRIBUTE_UNUSED = subreg_regno (x);
6005 
6006 	      if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6007 				       index_code))
6008 		{
6009 		  push_reload (x, NULL_RTX, loc, (rtx*) 0,
6010 			       context_reg_class,
6011 			       GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6012 		  return 1;
6013 		}
6014 	    }
6015 	  /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6016 	     is larger than the class size, then reload the whole SUBREG.  */
6017 	  else
6018 	    {
6019 	      enum reg_class rclass = context_reg_class;
6020 	      if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6021 		  > reg_class_size[(int) rclass])
6022 		{
6023 		  /* If the inner register will be replaced by a memory
6024 		     reference, we can do this only if we can replace the
6025 		     whole subreg by a (narrower) memory reference.  If
6026 		     this is not possible, fall through and reload just
6027 		     the inner register (including address reloads).  */
6028 		  if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6029 		    {
6030 		      rtx tem = find_reloads_subreg_address (x, opnum,
6031 							     ADDR_TYPE (type),
6032 							     ind_levels, insn,
6033 							     NULL);
6034 		      if (tem)
6035 			{
6036 			  push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6037 				       GET_MODE (tem), VOIDmode, 0, 0,
6038 				       opnum, type);
6039 			  return 1;
6040 			}
6041 		    }
6042 		  else
6043 		    {
6044 		      push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6045 				   GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6046 		      return 1;
6047 		    }
6048 		}
6049 	    }
6050 	}
6051       break;
6052 
6053     default:
6054       break;
6055     }
6056 
6057   {
6058     const char *fmt = GET_RTX_FORMAT (code);
6059     int i;
6060 
6061     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6062       {
6063 	if (fmt[i] == 'e')
6064 	  /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6065 	     we get here.  */
6066 	  find_reloads_address_1 (mode, as, XEXP (x, i), context,
6067 				  code, SCRATCH, &XEXP (x, i),
6068 				  opnum, type, ind_levels, insn);
6069       }
6070   }
6071 
6072 #undef REG_OK_FOR_CONTEXT
6073   return 0;
6074 }
6075 
6076 /* X, which is found at *LOC, is a part of an address that needs to be
6077    reloaded into a register of class RCLASS.  If X is a constant, or if
6078    X is a PLUS that contains a constant, check that the constant is a
6079    legitimate operand and that we are supposed to be able to load
6080    it into the register.
6081 
6082    If not, force the constant into memory and reload the MEM instead.
6083 
6084    MODE is the mode to use, in case X is an integer constant.
6085 
6086    OPNUM and TYPE describe the purpose of any reloads made.
6087 
6088    IND_LEVELS says how many levels of indirect addressing this machine
6089    supports.  */
6090 
6091 static void
6092 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6093 			   machine_mode mode, int opnum,
6094 			   enum reload_type type, int ind_levels)
6095 {
6096   if (CONSTANT_P (x)
6097       && (!targetm.legitimate_constant_p (mode, x)
6098 	  || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6099     {
6100       x = force_const_mem (mode, x);
6101       find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6102 			    opnum, type, ind_levels, 0);
6103     }
6104 
6105   else if (GET_CODE (x) == PLUS
6106 	   && CONSTANT_P (XEXP (x, 1))
6107 	   && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6108 	       || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6109 		   == NO_REGS))
6110     {
6111       rtx tem;
6112 
6113       tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6114       x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6115       find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6116 			    opnum, type, ind_levels, 0);
6117     }
6118 
6119   push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6120 	       mode, VOIDmode, 0, 0, opnum, type);
6121 }
6122 
6123 /* X, a subreg of a pseudo, is a part of an address that needs to be
6124    reloaded, and the pseusdo is equivalent to a memory location.
6125 
6126    Attempt to replace the whole subreg by a (possibly narrower or wider)
6127    memory reference.  If this is possible, return this new memory
6128    reference, and push all required address reloads.  Otherwise,
6129    return NULL.
6130 
6131    OPNUM and TYPE identify the purpose of the reload.
6132 
6133    IND_LEVELS says how many levels of indirect addressing are
6134    supported at this point in the address.
6135 
6136    INSN, if nonzero, is the insn in which we do the reload.  It is used
6137    to determine where to put USEs for pseudos that we have to replace with
6138    stack slots.  */
6139 
6140 static rtx
6141 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6142 			     int ind_levels, rtx_insn *insn,
6143 			     int *address_reloaded)
6144 {
6145   machine_mode outer_mode = GET_MODE (x);
6146   machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6147   int regno = REGNO (SUBREG_REG (x));
6148   int reloaded = 0;
6149   rtx tem, orig;
6150   int offset;
6151 
6152   gcc_assert (reg_equiv_memory_loc (regno) != 0);
6153 
6154   /* We cannot replace the subreg with a modified memory reference if:
6155 
6156      - we have a paradoxical subreg that implicitly acts as a zero or
6157        sign extension operation due to LOAD_EXTEND_OP;
6158 
6159      - we have a subreg that is implicitly supposed to act on the full
6160        register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6161 
6162      - the address of the equivalent memory location is mode-dependent;  or
6163 
6164      - we have a paradoxical subreg and the resulting memory is not
6165        sufficiently aligned to allow access in the wider mode.
6166 
6167     In addition, we choose not to perform the replacement for *any*
6168     paradoxical subreg, even if it were possible in principle.  This
6169     is to avoid generating wider memory references than necessary.
6170 
6171     This corresponds to how previous versions of reload used to handle
6172     paradoxical subregs where no address reload was required.  */
6173 
6174   if (paradoxical_subreg_p (x))
6175     return NULL;
6176 
6177   if (WORD_REGISTER_OPERATIONS
6178       && GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode)
6179       && ((GET_MODE_SIZE (outer_mode) - 1) / UNITS_PER_WORD
6180           == (GET_MODE_SIZE (inner_mode) - 1) / UNITS_PER_WORD))
6181     return NULL;
6182 
6183   /* Since we don't attempt to handle paradoxical subregs, we can just
6184      call into simplify_subreg, which will handle all remaining checks
6185      for us.  */
6186   orig = make_memloc (SUBREG_REG (x), regno);
6187   offset = SUBREG_BYTE (x);
6188   tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6189   if (!tem || !MEM_P (tem))
6190     return NULL;
6191 
6192   /* Now push all required address reloads, if any.  */
6193   reloaded = find_reloads_address (GET_MODE (tem), &tem,
6194 				   XEXP (tem, 0), &XEXP (tem, 0),
6195 				   opnum, type, ind_levels, insn);
6196   /* ??? Do we need to handle nonzero offsets somehow?  */
6197   if (!offset && !rtx_equal_p (tem, orig))
6198     push_reg_equiv_alt_mem (regno, tem);
6199 
6200   /* For some processors an address may be valid in the original mode but
6201      not in a smaller mode.  For example, ARM accepts a scaled index register
6202      in SImode but not in HImode.  Note that this is only a problem if the
6203      address in reg_equiv_mem is already invalid in the new mode; other
6204      cases would be fixed by find_reloads_address as usual.
6205 
6206      ??? We attempt to handle such cases here by doing an additional reload
6207      of the full address after the usual processing by find_reloads_address.
6208      Note that this may not work in the general case, but it seems to cover
6209      the cases where this situation currently occurs.  A more general fix
6210      might be to reload the *value* instead of the address, but this would
6211      not be expected by the callers of this routine as-is.
6212 
6213      If find_reloads_address already completed replaced the address, there
6214      is nothing further to do.  */
6215   if (reloaded == 0
6216       && reg_equiv_mem (regno) != 0
6217       && !strict_memory_address_addr_space_p
6218 		(GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6219 		 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6220     {
6221       push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6222 		   base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6223 				   MEM, SCRATCH),
6224 		   GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6225       reloaded = 1;
6226     }
6227 
6228   /* If this is not a toplevel operand, find_reloads doesn't see this
6229      substitution.  We have to emit a USE of the pseudo so that
6230      delete_output_reload can see it.  */
6231   if (replace_reloads && recog_data.operand[opnum] != x)
6232     /* We mark the USE with QImode so that we recognize it as one that
6233        can be safely deleted at the end of reload.  */
6234     PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6235 	      QImode);
6236 
6237   if (address_reloaded)
6238     *address_reloaded = reloaded;
6239 
6240   return tem;
6241 }
6242 
6243 /* Substitute into the current INSN the registers into which we have reloaded
6244    the things that need reloading.  The array `replacements'
6245    contains the locations of all pointers that must be changed
6246    and says what to replace them with.
6247 
6248    Return the rtx that X translates into; usually X, but modified.  */
6249 
6250 void
6251 subst_reloads (rtx_insn *insn)
6252 {
6253   int i;
6254 
6255   for (i = 0; i < n_replacements; i++)
6256     {
6257       struct replacement *r = &replacements[i];
6258       rtx reloadreg = rld[r->what].reg_rtx;
6259       if (reloadreg)
6260 	{
6261 #ifdef DEBUG_RELOAD
6262 	  /* This checking takes a very long time on some platforms
6263 	     causing the gcc.c-torture/compile/limits-fnargs.c test
6264 	     to time out during testing.  See PR 31850.
6265 
6266 	     Internal consistency test.  Check that we don't modify
6267 	     anything in the equivalence arrays.  Whenever something from
6268 	     those arrays needs to be reloaded, it must be unshared before
6269 	     being substituted into; the equivalence must not be modified.
6270 	     Otherwise, if the equivalence is used after that, it will
6271 	     have been modified, and the thing substituted (probably a
6272 	     register) is likely overwritten and not a usable equivalence.  */
6273 	  int check_regno;
6274 
6275 	  for (check_regno = 0; check_regno < max_regno; check_regno++)
6276 	    {
6277 #define CHECK_MODF(ARRAY)						\
6278 	      gcc_assert (!(*reg_equivs)[check_regno].ARRAY		\
6279 			  || !loc_mentioned_in_p (r->where,		\
6280 						  (*reg_equivs)[check_regno].ARRAY))
6281 
6282 	      CHECK_MODF (constant);
6283 	      CHECK_MODF (memory_loc);
6284 	      CHECK_MODF (address);
6285 	      CHECK_MODF (mem);
6286 #undef CHECK_MODF
6287 	    }
6288 #endif /* DEBUG_RELOAD */
6289 
6290 	  /* If we're replacing a LABEL_REF with a register, there must
6291 	     already be an indication (to e.g. flow) which label this
6292 	     register refers to.  */
6293 	  gcc_assert (GET_CODE (*r->where) != LABEL_REF
6294 		      || !JUMP_P (insn)
6295 		      || find_reg_note (insn,
6296 					REG_LABEL_OPERAND,
6297 					XEXP (*r->where, 0))
6298 		      || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6299 
6300 	  /* Encapsulate RELOADREG so its machine mode matches what
6301 	     used to be there.  Note that gen_lowpart_common will
6302 	     do the wrong thing if RELOADREG is multi-word.  RELOADREG
6303 	     will always be a REG here.  */
6304 	  if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6305 	    reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6306 
6307 	  *r->where = reloadreg;
6308 	}
6309       /* If reload got no reg and isn't optional, something's wrong.  */
6310       else
6311 	gcc_assert (rld[r->what].optional);
6312     }
6313 }
6314 
6315 /* Make a copy of any replacements being done into X and move those
6316    copies to locations in Y, a copy of X.  */
6317 
6318 void
6319 copy_replacements (rtx x, rtx y)
6320 {
6321   copy_replacements_1 (&x, &y, n_replacements);
6322 }
6323 
6324 static void
6325 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6326 {
6327   int i, j;
6328   rtx x, y;
6329   struct replacement *r;
6330   enum rtx_code code;
6331   const char *fmt;
6332 
6333   for (j = 0; j < orig_replacements; j++)
6334     if (replacements[j].where == px)
6335       {
6336 	r = &replacements[n_replacements++];
6337 	r->where = py;
6338 	r->what = replacements[j].what;
6339 	r->mode = replacements[j].mode;
6340       }
6341 
6342   x = *px;
6343   y = *py;
6344   code = GET_CODE (x);
6345   fmt = GET_RTX_FORMAT (code);
6346 
6347   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6348     {
6349       if (fmt[i] == 'e')
6350 	copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6351       else if (fmt[i] == 'E')
6352 	for (j = XVECLEN (x, i); --j >= 0; )
6353 	  copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6354 			       orig_replacements);
6355     }
6356 }
6357 
6358 /* Change any replacements being done to *X to be done to *Y.  */
6359 
6360 void
6361 move_replacements (rtx *x, rtx *y)
6362 {
6363   int i;
6364 
6365   for (i = 0; i < n_replacements; i++)
6366     if (replacements[i].where == x)
6367       replacements[i].where = y;
6368 }
6369 
6370 /* If LOC was scheduled to be replaced by something, return the replacement.
6371    Otherwise, return *LOC.  */
6372 
6373 rtx
6374 find_replacement (rtx *loc)
6375 {
6376   struct replacement *r;
6377 
6378   for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6379     {
6380       rtx reloadreg = rld[r->what].reg_rtx;
6381 
6382       if (reloadreg && r->where == loc)
6383 	{
6384 	  if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6385 	    reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6386 
6387 	  return reloadreg;
6388 	}
6389       else if (reloadreg && GET_CODE (*loc) == SUBREG
6390 	       && r->where == &SUBREG_REG (*loc))
6391 	{
6392 	  if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6393 	    reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6394 
6395 	  return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6396 				      GET_MODE (SUBREG_REG (*loc)),
6397 				      SUBREG_BYTE (*loc));
6398 	}
6399     }
6400 
6401   /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6402      what's inside and make a new rtl if so.  */
6403   if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6404       || GET_CODE (*loc) == MULT)
6405     {
6406       rtx x = find_replacement (&XEXP (*loc, 0));
6407       rtx y = find_replacement (&XEXP (*loc, 1));
6408 
6409       if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6410 	return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6411     }
6412 
6413   return *loc;
6414 }
6415 
6416 /* Return nonzero if register in range [REGNO, ENDREGNO)
6417    appears either explicitly or implicitly in X
6418    other than being stored into (except for earlyclobber operands).
6419 
6420    References contained within the substructure at LOC do not count.
6421    LOC may be zero, meaning don't ignore anything.
6422 
6423    This is similar to refers_to_regno_p in rtlanal.c except that we
6424    look at equivalences for pseudos that didn't get hard registers.  */
6425 
6426 static int
6427 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6428 			      rtx x, rtx *loc)
6429 {
6430   int i;
6431   unsigned int r;
6432   RTX_CODE code;
6433   const char *fmt;
6434 
6435   if (x == 0)
6436     return 0;
6437 
6438  repeat:
6439   code = GET_CODE (x);
6440 
6441   switch (code)
6442     {
6443     case REG:
6444       r = REGNO (x);
6445 
6446       /* If this is a pseudo, a hard register must not have been allocated.
6447 	 X must therefore either be a constant or be in memory.  */
6448       if (r >= FIRST_PSEUDO_REGISTER)
6449 	{
6450 	  if (reg_equiv_memory_loc (r))
6451 	    return refers_to_regno_for_reload_p (regno, endregno,
6452 						 reg_equiv_memory_loc (r),
6453 						 (rtx*) 0);
6454 
6455 	  gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6456 	  return 0;
6457 	}
6458 
6459       return (endregno > r
6460 	      && regno < r + (r < FIRST_PSEUDO_REGISTER
6461 			      ? hard_regno_nregs[r][GET_MODE (x)]
6462 			      : 1));
6463 
6464     case SUBREG:
6465       /* If this is a SUBREG of a hard reg, we can see exactly which
6466 	 registers are being modified.  Otherwise, handle normally.  */
6467       if (REG_P (SUBREG_REG (x))
6468 	  && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6469 	{
6470 	  unsigned int inner_regno = subreg_regno (x);
6471 	  unsigned int inner_endregno
6472 	    = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6473 			     ? subreg_nregs (x) : 1);
6474 
6475 	  return endregno > inner_regno && regno < inner_endregno;
6476 	}
6477       break;
6478 
6479     case CLOBBER:
6480     case SET:
6481       if (&SET_DEST (x) != loc
6482 	  /* Note setting a SUBREG counts as referring to the REG it is in for
6483 	     a pseudo but not for hard registers since we can
6484 	     treat each word individually.  */
6485 	  && ((GET_CODE (SET_DEST (x)) == SUBREG
6486 	       && loc != &SUBREG_REG (SET_DEST (x))
6487 	       && REG_P (SUBREG_REG (SET_DEST (x)))
6488 	       && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6489 	       && refers_to_regno_for_reload_p (regno, endregno,
6490 						SUBREG_REG (SET_DEST (x)),
6491 						loc))
6492 	      /* If the output is an earlyclobber operand, this is
6493 		 a conflict.  */
6494 	      || ((!REG_P (SET_DEST (x))
6495 		   || earlyclobber_operand_p (SET_DEST (x)))
6496 		  && refers_to_regno_for_reload_p (regno, endregno,
6497 						   SET_DEST (x), loc))))
6498 	return 1;
6499 
6500       if (code == CLOBBER || loc == &SET_SRC (x))
6501 	return 0;
6502       x = SET_SRC (x);
6503       goto repeat;
6504 
6505     default:
6506       break;
6507     }
6508 
6509   /* X does not match, so try its subexpressions.  */
6510 
6511   fmt = GET_RTX_FORMAT (code);
6512   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6513     {
6514       if (fmt[i] == 'e' && loc != &XEXP (x, i))
6515 	{
6516 	  if (i == 0)
6517 	    {
6518 	      x = XEXP (x, 0);
6519 	      goto repeat;
6520 	    }
6521 	  else
6522 	    if (refers_to_regno_for_reload_p (regno, endregno,
6523 					      XEXP (x, i), loc))
6524 	      return 1;
6525 	}
6526       else if (fmt[i] == 'E')
6527 	{
6528 	  int j;
6529 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6530 	    if (loc != &XVECEXP (x, i, j)
6531 		&& refers_to_regno_for_reload_p (regno, endregno,
6532 						 XVECEXP (x, i, j), loc))
6533 	      return 1;
6534 	}
6535     }
6536   return 0;
6537 }
6538 
6539 /* Nonzero if modifying X will affect IN.  If X is a register or a SUBREG,
6540    we check if any register number in X conflicts with the relevant register
6541    numbers.  If X is a constant, return 0.  If X is a MEM, return 1 iff IN
6542    contains a MEM (we don't bother checking for memory addresses that can't
6543    conflict because we expect this to be a rare case.
6544 
6545    This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6546    that we look at equivalences for pseudos that didn't get hard registers.  */
6547 
6548 int
6549 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6550 {
6551   int regno, endregno;
6552 
6553   /* Overly conservative.  */
6554   if (GET_CODE (x) == STRICT_LOW_PART
6555       || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6556     x = XEXP (x, 0);
6557 
6558   /* If either argument is a constant, then modifying X can not affect IN.  */
6559   if (CONSTANT_P (x) || CONSTANT_P (in))
6560     return 0;
6561   else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6562     return refers_to_mem_for_reload_p (in);
6563   else if (GET_CODE (x) == SUBREG)
6564     {
6565       regno = REGNO (SUBREG_REG (x));
6566       if (regno < FIRST_PSEUDO_REGISTER)
6567 	regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6568 				      GET_MODE (SUBREG_REG (x)),
6569 				      SUBREG_BYTE (x),
6570 				      GET_MODE (x));
6571       endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6572 			  ? subreg_nregs (x) : 1);
6573 
6574       return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6575     }
6576   else if (REG_P (x))
6577     {
6578       regno = REGNO (x);
6579 
6580       /* If this is a pseudo, it must not have been assigned a hard register.
6581 	 Therefore, it must either be in memory or be a constant.  */
6582 
6583       if (regno >= FIRST_PSEUDO_REGISTER)
6584 	{
6585 	  if (reg_equiv_memory_loc (regno))
6586 	    return refers_to_mem_for_reload_p (in);
6587 	  gcc_assert (reg_equiv_constant (regno));
6588 	  return 0;
6589 	}
6590 
6591       endregno = END_REGNO (x);
6592 
6593       return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6594     }
6595   else if (MEM_P (x))
6596     return refers_to_mem_for_reload_p (in);
6597   else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6598 	   || GET_CODE (x) == CC0)
6599     return reg_mentioned_p (x, in);
6600   else
6601     {
6602       gcc_assert (GET_CODE (x) == PLUS);
6603 
6604       /* We actually want to know if X is mentioned somewhere inside IN.
6605 	 We must not say that (plus (sp) (const_int 124)) is in
6606 	 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6607 	 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6608 	 into a RELOAD_OTHER on behalf of another RELOAD_OTHER.  */
6609       while (MEM_P (in))
6610 	in = XEXP (in, 0);
6611       if (REG_P (in))
6612 	return 0;
6613       else if (GET_CODE (in) == PLUS)
6614 	return (rtx_equal_p (x, in)
6615 		|| reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6616 		|| reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6617       else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6618 		   || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6619     }
6620 
6621   gcc_unreachable ();
6622 }
6623 
6624 /* Return nonzero if anything in X contains a MEM.  Look also for pseudo
6625    registers.  */
6626 
6627 static int
6628 refers_to_mem_for_reload_p (rtx x)
6629 {
6630   const char *fmt;
6631   int i;
6632 
6633   if (MEM_P (x))
6634     return 1;
6635 
6636   if (REG_P (x))
6637     return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6638 	    && reg_equiv_memory_loc (REGNO (x)));
6639 
6640   fmt = GET_RTX_FORMAT (GET_CODE (x));
6641   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6642     if (fmt[i] == 'e'
6643 	&& (MEM_P (XEXP (x, i))
6644 	    || refers_to_mem_for_reload_p (XEXP (x, i))))
6645       return 1;
6646 
6647   return 0;
6648 }
6649 
6650 /* Check the insns before INSN to see if there is a suitable register
6651    containing the same value as GOAL.
6652    If OTHER is -1, look for a register in class RCLASS.
6653    Otherwise, just see if register number OTHER shares GOAL's value.
6654 
6655    Return an rtx for the register found, or zero if none is found.
6656 
6657    If RELOAD_REG_P is (short *)1,
6658    we reject any hard reg that appears in reload_reg_rtx
6659    because such a hard reg is also needed coming into this insn.
6660 
6661    If RELOAD_REG_P is any other nonzero value,
6662    it is a vector indexed by hard reg number
6663    and we reject any hard reg whose element in the vector is nonnegative
6664    as well as any that appears in reload_reg_rtx.
6665 
6666    If GOAL is zero, then GOALREG is a register number; we look
6667    for an equivalent for that register.
6668 
6669    MODE is the machine mode of the value we want an equivalence for.
6670    If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6671 
6672    This function is used by jump.c as well as in the reload pass.
6673 
6674    If GOAL is the sum of the stack pointer and a constant, we treat it
6675    as if it were a constant except that sp is required to be unchanging.  */
6676 
6677 rtx
6678 find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
6679 		short *reload_reg_p, int goalreg, machine_mode mode)
6680 {
6681   rtx_insn *p = insn;
6682   rtx goaltry, valtry, value;
6683   rtx_insn *where;
6684   rtx pat;
6685   int regno = -1;
6686   int valueno;
6687   int goal_mem = 0;
6688   int goal_const = 0;
6689   int goal_mem_addr_varies = 0;
6690   int need_stable_sp = 0;
6691   int nregs;
6692   int valuenregs;
6693   int num = 0;
6694 
6695   if (goal == 0)
6696     regno = goalreg;
6697   else if (REG_P (goal))
6698     regno = REGNO (goal);
6699   else if (MEM_P (goal))
6700     {
6701       enum rtx_code code = GET_CODE (XEXP (goal, 0));
6702       if (MEM_VOLATILE_P (goal))
6703 	return 0;
6704       if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6705 	return 0;
6706       /* An address with side effects must be reexecuted.  */
6707       switch (code)
6708 	{
6709 	case POST_INC:
6710 	case PRE_INC:
6711 	case POST_DEC:
6712 	case PRE_DEC:
6713 	case POST_MODIFY:
6714 	case PRE_MODIFY:
6715 	  return 0;
6716 	default:
6717 	  break;
6718 	}
6719       goal_mem = 1;
6720     }
6721   else if (CONSTANT_P (goal))
6722     goal_const = 1;
6723   else if (GET_CODE (goal) == PLUS
6724 	   && XEXP (goal, 0) == stack_pointer_rtx
6725 	   && CONSTANT_P (XEXP (goal, 1)))
6726     goal_const = need_stable_sp = 1;
6727   else if (GET_CODE (goal) == PLUS
6728 	   && XEXP (goal, 0) == frame_pointer_rtx
6729 	   && CONSTANT_P (XEXP (goal, 1)))
6730     goal_const = 1;
6731   else
6732     return 0;
6733 
6734   num = 0;
6735   /* Scan insns back from INSN, looking for one that copies
6736      a value into or out of GOAL.
6737      Stop and give up if we reach a label.  */
6738 
6739   while (1)
6740     {
6741       p = PREV_INSN (p);
6742       if (p && DEBUG_INSN_P (p))
6743 	continue;
6744       num++;
6745       if (p == 0 || LABEL_P (p)
6746 	  || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6747 	return 0;
6748 
6749       /* Don't reuse register contents from before a setjmp-type
6750 	 function call; on the second return (from the longjmp) it
6751 	 might have been clobbered by a later reuse.  It doesn't
6752 	 seem worthwhile to actually go and see if it is actually
6753 	 reused even if that information would be readily available;
6754 	 just don't reuse it across the setjmp call.  */
6755       if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6756 	return 0;
6757 
6758       if (NONJUMP_INSN_P (p)
6759 	  /* If we don't want spill regs ...  */
6760 	  && (! (reload_reg_p != 0
6761 		 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6762 	      /* ... then ignore insns introduced by reload; they aren't
6763 		 useful and can cause results in reload_as_needed to be
6764 		 different from what they were when calculating the need for
6765 		 spills.  If we notice an input-reload insn here, we will
6766 		 reject it below, but it might hide a usable equivalent.
6767 		 That makes bad code.  It may even fail: perhaps no reg was
6768 		 spilled for this insn because it was assumed we would find
6769 		 that equivalent.  */
6770 	      || INSN_UID (p) < reload_first_uid))
6771 	{
6772 	  rtx tem;
6773 	  pat = single_set (p);
6774 
6775 	  /* First check for something that sets some reg equal to GOAL.  */
6776 	  if (pat != 0
6777 	      && ((regno >= 0
6778 		   && true_regnum (SET_SRC (pat)) == regno
6779 		   && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6780 		  ||
6781 		  (regno >= 0
6782 		   && true_regnum (SET_DEST (pat)) == regno
6783 		   && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6784 		  ||
6785 		  (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6786 		   /* When looking for stack pointer + const,
6787 		      make sure we don't use a stack adjust.  */
6788 		   && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6789 		   && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6790 		  || (goal_mem
6791 		      && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6792 		      && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6793 		  || (goal_mem
6794 		      && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6795 		      && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6796 		  /* If we are looking for a constant,
6797 		     and something equivalent to that constant was copied
6798 		     into a reg, we can use that reg.  */
6799 		  || (goal_const && REG_NOTES (p) != 0
6800 		      && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6801 		      && ((rtx_equal_p (XEXP (tem, 0), goal)
6802 			   && (valueno
6803 			       = true_regnum (valtry = SET_DEST (pat))) >= 0)
6804 			  || (REG_P (SET_DEST (pat))
6805 			      && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6806 			      && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6807 			      && CONST_INT_P (goal)
6808 			      && 0 != (goaltry
6809 				       = operand_subword (XEXP (tem, 0), 0, 0,
6810 							  VOIDmode))
6811 			      && rtx_equal_p (goal, goaltry)
6812 			      && (valtry
6813 				  = operand_subword (SET_DEST (pat), 0, 0,
6814 						     VOIDmode))
6815 			      && (valueno = true_regnum (valtry)) >= 0)))
6816 		  || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6817 							  NULL_RTX))
6818 		      && REG_P (SET_DEST (pat))
6819 		      && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6820 		      && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6821 		      && CONST_INT_P (goal)
6822 		      && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6823 							  VOIDmode))
6824 		      && rtx_equal_p (goal, goaltry)
6825 		      && (valtry
6826 			  = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6827 		      && (valueno = true_regnum (valtry)) >= 0)))
6828 	    {
6829 	      if (other >= 0)
6830 		{
6831 		  if (valueno != other)
6832 		    continue;
6833 		}
6834 	      else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6835 		continue;
6836 	      else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6837 					  mode, valueno))
6838 		continue;
6839 	      value = valtry;
6840 	      where = p;
6841 	      break;
6842 	    }
6843 	}
6844     }
6845 
6846   /* We found a previous insn copying GOAL into a suitable other reg VALUE
6847      (or copying VALUE into GOAL, if GOAL is also a register).
6848      Now verify that VALUE is really valid.  */
6849 
6850   /* VALUENO is the register number of VALUE; a hard register.  */
6851 
6852   /* Don't try to re-use something that is killed in this insn.  We want
6853      to be able to trust REG_UNUSED notes.  */
6854   if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6855     return 0;
6856 
6857   /* If we propose to get the value from the stack pointer or if GOAL is
6858      a MEM based on the stack pointer, we need a stable SP.  */
6859   if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6860       || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6861 							  goal)))
6862     need_stable_sp = 1;
6863 
6864   /* Reject VALUE if the copy-insn moved the wrong sort of datum.  */
6865   if (GET_MODE (value) != mode)
6866     return 0;
6867 
6868   /* Reject VALUE if it was loaded from GOAL
6869      and is also a register that appears in the address of GOAL.  */
6870 
6871   if (goal_mem && value == SET_DEST (single_set (where))
6872       && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6873 				       goal, (rtx*) 0))
6874     return 0;
6875 
6876   /* Reject registers that overlap GOAL.  */
6877 
6878   if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6879     nregs = hard_regno_nregs[regno][mode];
6880   else
6881     nregs = 1;
6882   valuenregs = hard_regno_nregs[valueno][mode];
6883 
6884   if (!goal_mem && !goal_const
6885       && regno + nregs > valueno && regno < valueno + valuenregs)
6886     return 0;
6887 
6888   /* Reject VALUE if it is one of the regs reserved for reloads.
6889      Reload1 knows how to reuse them anyway, and it would get
6890      confused if we allocated one without its knowledge.
6891      (Now that insns introduced by reload are ignored above,
6892      this case shouldn't happen, but I'm not positive.)  */
6893 
6894   if (reload_reg_p != 0 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6895     {
6896       int i;
6897       for (i = 0; i < valuenregs; ++i)
6898 	if (reload_reg_p[valueno + i] >= 0)
6899 	  return 0;
6900     }
6901 
6902   /* Reject VALUE if it is a register being used for an input reload
6903      even if it is not one of those reserved.  */
6904 
6905   if (reload_reg_p != 0)
6906     {
6907       int i;
6908       for (i = 0; i < n_reloads; i++)
6909 	if (rld[i].reg_rtx != 0 && rld[i].in)
6910 	  {
6911 	    int regno1 = REGNO (rld[i].reg_rtx);
6912 	    int nregs1 = hard_regno_nregs[regno1]
6913 					 [GET_MODE (rld[i].reg_rtx)];
6914 	    if (regno1 < valueno + valuenregs
6915 		&& regno1 + nregs1 > valueno)
6916 	      return 0;
6917 	  }
6918     }
6919 
6920   if (goal_mem)
6921     /* We must treat frame pointer as varying here,
6922        since it can vary--in a nonlocal goto as generated by expand_goto.  */
6923     goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6924 
6925   /* Now verify that the values of GOAL and VALUE remain unaltered
6926      until INSN is reached.  */
6927 
6928   p = insn;
6929   while (1)
6930     {
6931       p = PREV_INSN (p);
6932       if (p == where)
6933 	return value;
6934 
6935       /* Don't trust the conversion past a function call
6936 	 if either of the two is in a call-clobbered register, or memory.  */
6937       if (CALL_P (p))
6938 	{
6939 	  int i;
6940 
6941 	  if (goal_mem || need_stable_sp)
6942 	    return 0;
6943 
6944 	  if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6945 	    for (i = 0; i < nregs; ++i)
6946 	      if (call_used_regs[regno + i]
6947 		  || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6948 		return 0;
6949 
6950 	  if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6951 	    for (i = 0; i < valuenregs; ++i)
6952 	      if (call_used_regs[valueno + i]
6953 		  || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6954 		return 0;
6955 	}
6956 
6957       if (INSN_P (p))
6958 	{
6959 	  pat = PATTERN (p);
6960 
6961 	  /* Watch out for unspec_volatile, and volatile asms.  */
6962 	  if (volatile_insn_p (pat))
6963 	    return 0;
6964 
6965 	  /* If this insn P stores in either GOAL or VALUE, return 0.
6966 	     If GOAL is a memory ref and this insn writes memory, return 0.
6967 	     If GOAL is a memory ref and its address is not constant,
6968 	     and this insn P changes a register used in GOAL, return 0.  */
6969 
6970 	  if (GET_CODE (pat) == COND_EXEC)
6971 	    pat = COND_EXEC_CODE (pat);
6972 	  if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6973 	    {
6974 	      rtx dest = SET_DEST (pat);
6975 	      while (GET_CODE (dest) == SUBREG
6976 		     || GET_CODE (dest) == ZERO_EXTRACT
6977 		     || GET_CODE (dest) == STRICT_LOW_PART)
6978 		dest = XEXP (dest, 0);
6979 	      if (REG_P (dest))
6980 		{
6981 		  int xregno = REGNO (dest);
6982 		  int xnregs;
6983 		  if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6984 		    xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6985 		  else
6986 		    xnregs = 1;
6987 		  if (xregno < regno + nregs && xregno + xnregs > regno)
6988 		    return 0;
6989 		  if (xregno < valueno + valuenregs
6990 		      && xregno + xnregs > valueno)
6991 		    return 0;
6992 		  if (goal_mem_addr_varies
6993 		      && reg_overlap_mentioned_for_reload_p (dest, goal))
6994 		    return 0;
6995 		  if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6996 		    return 0;
6997 		}
6998 	      else if (goal_mem && MEM_P (dest)
6999 		       && ! push_operand (dest, GET_MODE (dest)))
7000 		return 0;
7001 	      else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7002 		       && reg_equiv_memory_loc (regno) != 0)
7003 		return 0;
7004 	      else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
7005 		return 0;
7006 	    }
7007 	  else if (GET_CODE (pat) == PARALLEL)
7008 	    {
7009 	      int i;
7010 	      for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7011 		{
7012 		  rtx v1 = XVECEXP (pat, 0, i);
7013 		  if (GET_CODE (v1) == COND_EXEC)
7014 		    v1 = COND_EXEC_CODE (v1);
7015 		  if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7016 		    {
7017 		      rtx dest = SET_DEST (v1);
7018 		      while (GET_CODE (dest) == SUBREG
7019 			     || GET_CODE (dest) == ZERO_EXTRACT
7020 			     || GET_CODE (dest) == STRICT_LOW_PART)
7021 			dest = XEXP (dest, 0);
7022 		      if (REG_P (dest))
7023 			{
7024 			  int xregno = REGNO (dest);
7025 			  int xnregs;
7026 			  if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7027 			    xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7028 			  else
7029 			    xnregs = 1;
7030 			  if (xregno < regno + nregs
7031 			      && xregno + xnregs > regno)
7032 			    return 0;
7033 			  if (xregno < valueno + valuenregs
7034 			      && xregno + xnregs > valueno)
7035 			    return 0;
7036 			  if (goal_mem_addr_varies
7037 			      && reg_overlap_mentioned_for_reload_p (dest,
7038 								     goal))
7039 			    return 0;
7040 			  if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7041 			    return 0;
7042 			}
7043 		      else if (goal_mem && MEM_P (dest)
7044 			       && ! push_operand (dest, GET_MODE (dest)))
7045 			return 0;
7046 		      else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7047 			       && reg_equiv_memory_loc (regno) != 0)
7048 			return 0;
7049 		      else if (need_stable_sp
7050 			       && push_operand (dest, GET_MODE (dest)))
7051 			return 0;
7052 		    }
7053 		}
7054 	    }
7055 
7056 	  if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7057 	    {
7058 	      rtx link;
7059 
7060 	      for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7061 		   link = XEXP (link, 1))
7062 		{
7063 		  pat = XEXP (link, 0);
7064 		  if (GET_CODE (pat) == CLOBBER)
7065 		    {
7066 		      rtx dest = SET_DEST (pat);
7067 
7068 		      if (REG_P (dest))
7069 			{
7070 			  int xregno = REGNO (dest);
7071 			  int xnregs
7072 			    = hard_regno_nregs[xregno][GET_MODE (dest)];
7073 
7074 			  if (xregno < regno + nregs
7075 			      && xregno + xnregs > regno)
7076 			    return 0;
7077 			  else if (xregno < valueno + valuenregs
7078 				   && xregno + xnregs > valueno)
7079 			    return 0;
7080 			  else if (goal_mem_addr_varies
7081 				   && reg_overlap_mentioned_for_reload_p (dest,
7082 								     goal))
7083 			    return 0;
7084 			}
7085 
7086 		      else if (goal_mem && MEM_P (dest)
7087 			       && ! push_operand (dest, GET_MODE (dest)))
7088 			return 0;
7089 		      else if (need_stable_sp
7090 			       && push_operand (dest, GET_MODE (dest)))
7091 			return 0;
7092 		    }
7093 		}
7094 	    }
7095 
7096 #if AUTO_INC_DEC
7097 	  /* If this insn auto-increments or auto-decrements
7098 	     either regno or valueno, return 0 now.
7099 	     If GOAL is a memory ref and its address is not constant,
7100 	     and this insn P increments a register used in GOAL, return 0.  */
7101 	  {
7102 	    rtx link;
7103 
7104 	    for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7105 	      if (REG_NOTE_KIND (link) == REG_INC
7106 		  && REG_P (XEXP (link, 0)))
7107 		{
7108 		  int incno = REGNO (XEXP (link, 0));
7109 		  if (incno < regno + nregs && incno >= regno)
7110 		    return 0;
7111 		  if (incno < valueno + valuenregs && incno >= valueno)
7112 		    return 0;
7113 		  if (goal_mem_addr_varies
7114 		      && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7115 							     goal))
7116 		    return 0;
7117 		}
7118 	  }
7119 #endif
7120 	}
7121     }
7122 }
7123 
7124 /* Find a place where INCED appears in an increment or decrement operator
7125    within X, and return the amount INCED is incremented or decremented by.
7126    The value is always positive.  */
7127 
7128 static int
7129 find_inc_amount (rtx x, rtx inced)
7130 {
7131   enum rtx_code code = GET_CODE (x);
7132   const char *fmt;
7133   int i;
7134 
7135   if (code == MEM)
7136     {
7137       rtx addr = XEXP (x, 0);
7138       if ((GET_CODE (addr) == PRE_DEC
7139 	   || GET_CODE (addr) == POST_DEC
7140 	   || GET_CODE (addr) == PRE_INC
7141 	   || GET_CODE (addr) == POST_INC)
7142 	  && XEXP (addr, 0) == inced)
7143 	return GET_MODE_SIZE (GET_MODE (x));
7144       else if ((GET_CODE (addr) == PRE_MODIFY
7145 		|| GET_CODE (addr) == POST_MODIFY)
7146 	       && GET_CODE (XEXP (addr, 1)) == PLUS
7147 	       && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7148 	       && XEXP (addr, 0) == inced
7149 	       && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7150 	{
7151 	  i = INTVAL (XEXP (XEXP (addr, 1), 1));
7152 	  return i < 0 ? -i : i;
7153 	}
7154     }
7155 
7156   fmt = GET_RTX_FORMAT (code);
7157   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7158     {
7159       if (fmt[i] == 'e')
7160 	{
7161 	  int tem = find_inc_amount (XEXP (x, i), inced);
7162 	  if (tem != 0)
7163 	    return tem;
7164 	}
7165       if (fmt[i] == 'E')
7166 	{
7167 	  int j;
7168 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7169 	    {
7170 	      int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7171 	      if (tem != 0)
7172 		return tem;
7173 	    }
7174 	}
7175     }
7176 
7177   return 0;
7178 }
7179 
7180 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7181    REG_INC note in insn INSN.  REGNO must refer to a hard register.  */
7182 
7183 static int
7184 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7185 			   rtx insn)
7186 {
7187   rtx link;
7188 
7189   if (!AUTO_INC_DEC)
7190     return 0;
7191 
7192   gcc_assert (insn);
7193 
7194   if (! INSN_P (insn))
7195     return 0;
7196 
7197   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7198     if (REG_NOTE_KIND (link) == REG_INC)
7199       {
7200 	unsigned int test = (int) REGNO (XEXP (link, 0));
7201 	if (test >= regno && test < endregno)
7202 	  return 1;
7203       }
7204   return 0;
7205 }
7206 
7207 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7208    If SETS is 1, also consider SETs.  If SETS is 2, enable checking
7209    REG_INC.  REGNO must refer to a hard register.  */
7210 
7211 int
7212 regno_clobbered_p (unsigned int regno, rtx_insn *insn, machine_mode mode,
7213 		   int sets)
7214 {
7215   unsigned int nregs, endregno;
7216 
7217   /* regno must be a hard register.  */
7218   gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7219 
7220   nregs = hard_regno_nregs[regno][mode];
7221   endregno = regno + nregs;
7222 
7223   if ((GET_CODE (PATTERN (insn)) == CLOBBER
7224        || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7225       && REG_P (XEXP (PATTERN (insn), 0)))
7226     {
7227       unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7228 
7229       return test >= regno && test < endregno;
7230     }
7231 
7232   if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7233     return 1;
7234 
7235   if (GET_CODE (PATTERN (insn)) == PARALLEL)
7236     {
7237       int i = XVECLEN (PATTERN (insn), 0) - 1;
7238 
7239       for (; i >= 0; i--)
7240 	{
7241 	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
7242 	  if ((GET_CODE (elt) == CLOBBER
7243 	       || (sets == 1 && GET_CODE (elt) == SET))
7244 	      && REG_P (XEXP (elt, 0)))
7245 	    {
7246 	      unsigned int test = REGNO (XEXP (elt, 0));
7247 
7248 	      if (test >= regno && test < endregno)
7249 		return 1;
7250 	    }
7251 	  if (sets == 2
7252 	      && reg_inc_found_and_valid_p (regno, endregno, elt))
7253 	    return 1;
7254 	}
7255     }
7256 
7257   return 0;
7258 }
7259 
7260 /* Find the low part, with mode MODE, of a hard regno RELOADREG.  */
7261 rtx
7262 reload_adjust_reg_for_mode (rtx reloadreg, machine_mode mode)
7263 {
7264   int regno;
7265 
7266   if (GET_MODE (reloadreg) == mode)
7267     return reloadreg;
7268 
7269   regno = REGNO (reloadreg);
7270 
7271   if (REG_WORDS_BIG_ENDIAN)
7272     regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7273       - (int) hard_regno_nregs[regno][mode];
7274 
7275   return gen_rtx_REG (mode, regno);
7276 }
7277 
7278 static const char *const reload_when_needed_name[] =
7279 {
7280   "RELOAD_FOR_INPUT",
7281   "RELOAD_FOR_OUTPUT",
7282   "RELOAD_FOR_INSN",
7283   "RELOAD_FOR_INPUT_ADDRESS",
7284   "RELOAD_FOR_INPADDR_ADDRESS",
7285   "RELOAD_FOR_OUTPUT_ADDRESS",
7286   "RELOAD_FOR_OUTADDR_ADDRESS",
7287   "RELOAD_FOR_OPERAND_ADDRESS",
7288   "RELOAD_FOR_OPADDR_ADDR",
7289   "RELOAD_OTHER",
7290   "RELOAD_FOR_OTHER_ADDRESS"
7291 };
7292 
7293 /* These functions are used to print the variables set by 'find_reloads' */
7294 
7295 DEBUG_FUNCTION void
7296 debug_reload_to_stream (FILE *f)
7297 {
7298   int r;
7299   const char *prefix;
7300 
7301   if (! f)
7302     f = stderr;
7303   for (r = 0; r < n_reloads; r++)
7304     {
7305       fprintf (f, "Reload %d: ", r);
7306 
7307       if (rld[r].in != 0)
7308 	{
7309 	  fprintf (f, "reload_in (%s) = ",
7310 		   GET_MODE_NAME (rld[r].inmode));
7311 	  print_inline_rtx (f, rld[r].in, 24);
7312 	  fprintf (f, "\n\t");
7313 	}
7314 
7315       if (rld[r].out != 0)
7316 	{
7317 	  fprintf (f, "reload_out (%s) = ",
7318 		   GET_MODE_NAME (rld[r].outmode));
7319 	  print_inline_rtx (f, rld[r].out, 24);
7320 	  fprintf (f, "\n\t");
7321 	}
7322 
7323       fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7324 
7325       fprintf (f, "%s (opnum = %d)",
7326 	       reload_when_needed_name[(int) rld[r].when_needed],
7327 	       rld[r].opnum);
7328 
7329       if (rld[r].optional)
7330 	fprintf (f, ", optional");
7331 
7332       if (rld[r].nongroup)
7333 	fprintf (f, ", nongroup");
7334 
7335       if (rld[r].inc != 0)
7336 	fprintf (f, ", inc by %d", rld[r].inc);
7337 
7338       if (rld[r].nocombine)
7339 	fprintf (f, ", can't combine");
7340 
7341       if (rld[r].secondary_p)
7342 	fprintf (f, ", secondary_reload_p");
7343 
7344       if (rld[r].in_reg != 0)
7345 	{
7346 	  fprintf (f, "\n\treload_in_reg: ");
7347 	  print_inline_rtx (f, rld[r].in_reg, 24);
7348 	}
7349 
7350       if (rld[r].out_reg != 0)
7351 	{
7352 	  fprintf (f, "\n\treload_out_reg: ");
7353 	  print_inline_rtx (f, rld[r].out_reg, 24);
7354 	}
7355 
7356       if (rld[r].reg_rtx != 0)
7357 	{
7358 	  fprintf (f, "\n\treload_reg_rtx: ");
7359 	  print_inline_rtx (f, rld[r].reg_rtx, 24);
7360 	}
7361 
7362       prefix = "\n\t";
7363       if (rld[r].secondary_in_reload != -1)
7364 	{
7365 	  fprintf (f, "%ssecondary_in_reload = %d",
7366 		   prefix, rld[r].secondary_in_reload);
7367 	  prefix = ", ";
7368 	}
7369 
7370       if (rld[r].secondary_out_reload != -1)
7371 	fprintf (f, "%ssecondary_out_reload = %d\n",
7372 		 prefix, rld[r].secondary_out_reload);
7373 
7374       prefix = "\n\t";
7375       if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7376 	{
7377 	  fprintf (f, "%ssecondary_in_icode = %s", prefix,
7378 		   insn_data[rld[r].secondary_in_icode].name);
7379 	  prefix = ", ";
7380 	}
7381 
7382       if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7383 	fprintf (f, "%ssecondary_out_icode = %s", prefix,
7384 		 insn_data[rld[r].secondary_out_icode].name);
7385 
7386       fprintf (f, "\n");
7387     }
7388 }
7389 
7390 DEBUG_FUNCTION void
7391 debug_reload (void)
7392 {
7393   debug_reload_to_stream (stderr);
7394 }
7395