xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/reload.c (revision 5c422d1814c2c024ae90981eeff62cc81ef02338)
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2    Copyright (C) 1987-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains subroutines used only from the file reload1.c.
21    It knows how to scan one insn for operands and values
22    that need to be copied into registers to make valid code.
23    It also finds other operands and values which are valid
24    but for which equivalent values in registers exist and
25    ought to be used instead.
26 
27    Before processing the first insn of the function, call `init_reload'.
28    init_reload actually has to be called earlier anyway.
29 
30    To scan an insn, call `find_reloads'.  This does two things:
31    1. sets up tables describing which values must be reloaded
32    for this insn, and what kind of hard regs they must be reloaded into;
33    2. optionally record the locations where those values appear in
34    the data, so they can be replaced properly later.
35    This is done only if the second arg to `find_reloads' is nonzero.
36 
37    The third arg to `find_reloads' specifies the number of levels
38    of indirect addressing supported by the machine.  If it is zero,
39    indirect addressing is not valid.  If it is one, (MEM (REG n))
40    is valid even if (REG n) did not get a hard register; if it is two,
41    (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42    hard register, and similarly for higher values.
43 
44    Then you must choose the hard regs to reload those pseudo regs into,
45    and generate appropriate load insns before this insn and perhaps
46    also store insns after this insn.  Set up the array `reload_reg_rtx'
47    to contain the REG rtx's for the registers you used.  In some
48    cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49    for certain reloads.  Then that tells you which register to use,
50    so you do not need to allocate one.  But you still do need to add extra
51    instructions to copy the value into and out of that register.
52 
53    Finally you must call `subst_reloads' to substitute the reload reg rtx's
54    into the locations already recorded.
55 
56 NOTE SIDE EFFECTS:
57 
58    find_reloads can alter the operands of the instruction it is called on.
59 
60    1. Two operands of any sort may be interchanged, if they are in a
61    commutative instruction.
62    This happens only if find_reloads thinks the instruction will compile
63    better that way.
64 
65    2. Pseudo-registers that are equivalent to constants are replaced
66    with those constants if they are not in hard registers.
67 
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
71 
72 Using a reload register for several reloads in one insn:
73 
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
77 
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
81 
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload.  */
85 
86 #define REG_OK_STRICT
87 
88 /* We do not enable this with CHECKING_P, since it is awfully slow.  */
89 #undef DEBUG_RELOAD
90 
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "backend.h"
95 #include "target.h"
96 #include "rtl.h"
97 #include "tree.h"
98 #include "df.h"
99 #include "memmodel.h"
100 #include "tm_p.h"
101 #include "optabs.h"
102 #include "regs.h"
103 #include "ira.h"
104 #include "recog.h"
105 #include "rtl-error.h"
106 #include "reload.h"
107 #include "addresses.h"
108 #include "function-abi.h"
109 
110 /* True if X is a constant that can be forced into the constant pool.
111    MODE is the mode of the operand, or VOIDmode if not known.  */
112 #define CONST_POOL_OK_P(MODE, X)		\
113   ((MODE) != VOIDmode				\
114    && CONSTANT_P (X)				\
115    && GET_CODE (X) != HIGH			\
116    && !targetm.cannot_force_const_mem (MODE, X))
117 
118 /* True if C is a non-empty register class that has too few registers
119    to be safely used as a reload target class.  */
120 
121 static inline bool
small_register_class_p(reg_class_t rclass)122 small_register_class_p (reg_class_t rclass)
123 {
124   return (reg_class_size [(int) rclass] == 1
125 	  || (reg_class_size [(int) rclass] >= 1
126 	      && targetm.class_likely_spilled_p (rclass)));
127 }
128 
129 
130 /* All reloads of the current insn are recorded here.  See reload.h for
131    comments.  */
132 int n_reloads;
133 struct reload rld[MAX_RELOADS];
134 
135 /* All the "earlyclobber" operands of the current insn
136    are recorded here.  */
137 int n_earlyclobbers;
138 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
139 
140 int reload_n_operands;
141 
142 /* Replacing reloads.
143 
144    If `replace_reloads' is nonzero, then as each reload is recorded
145    an entry is made for it in the table `replacements'.
146    Then later `subst_reloads' can look through that table and
147    perform all the replacements needed.  */
148 
149 /* Nonzero means record the places to replace.  */
150 static int replace_reloads;
151 
152 /* Each replacement is recorded with a structure like this.  */
153 struct replacement
154 {
155   rtx *where;			/* Location to store in */
156   int what;			/* which reload this is for */
157   machine_mode mode;	/* mode it must have */
158 };
159 
160 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
161 
162 /* Number of replacements currently recorded.  */
163 static int n_replacements;
164 
165 /* Used to track what is modified by an operand.  */
166 struct decomposition
167 {
168   int reg_flag;		/* Nonzero if referencing a register.  */
169   int safe;		/* Nonzero if this can't conflict with anything.  */
170   rtx base;		/* Base address for MEM.  */
171   poly_int64_pod start;	/* Starting offset or register number.  */
172   poly_int64_pod end;	/* Ending offset or register number.  */
173 };
174 
175 /* Save MEMs needed to copy from one class of registers to another.  One MEM
176    is used per mode, but normally only one or two modes are ever used.
177 
178    We keep two versions, before and after register elimination.  The one
179    after register elimination is record separately for each operand.  This
180    is done in case the address is not valid to be sure that we separately
181    reload each.  */
182 
183 static rtx secondary_memlocs[NUM_MACHINE_MODES];
184 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
185 static int secondary_memlocs_elim_used = 0;
186 
187 /* The instruction we are doing reloads for;
188    so we can test whether a register dies in it.  */
189 static rtx_insn *this_insn;
190 
191 /* Nonzero if this instruction is a user-specified asm with operands.  */
192 static int this_insn_is_asm;
193 
194 /* If hard_regs_live_known is nonzero,
195    we can tell which hard regs are currently live,
196    at least enough to succeed in choosing dummy reloads.  */
197 static int hard_regs_live_known;
198 
199 /* Indexed by hard reg number,
200    element is nonnegative if hard reg has been spilled.
201    This vector is passed to `find_reloads' as an argument
202    and is not changed here.  */
203 static short *static_reload_reg_p;
204 
205 /* Set to 1 in subst_reg_equivs if it changes anything.  */
206 static int subst_reg_equivs_changed;
207 
208 /* On return from push_reload, holds the reload-number for the OUT
209    operand, which can be different for that from the input operand.  */
210 static int output_reloadnum;
211 
212   /* Compare two RTX's.  */
213 #define MATCHES(x, y) \
214  (x == y || (x != 0 && (REG_P (x)				\
215 			? REG_P (y) && REGNO (x) == REGNO (y)	\
216 			: rtx_equal_p (x, y) && ! side_effects_p (x))))
217 
218   /* Indicates if two reloads purposes are for similar enough things that we
219      can merge their reloads.  */
220 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
221   ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER	\
222    || ((when1) == (when2) && (op1) == (op2))		\
223    || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
224    || ((when1) == RELOAD_FOR_OPERAND_ADDRESS		\
225        && (when2) == RELOAD_FOR_OPERAND_ADDRESS)	\
226    || ((when1) == RELOAD_FOR_OTHER_ADDRESS		\
227        && (when2) == RELOAD_FOR_OTHER_ADDRESS))
228 
229   /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged.  */
230 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
231   ((when1) != (when2)					\
232    || ! ((op1) == (op2)					\
233 	 || (when1) == RELOAD_FOR_INPUT			\
234 	 || (when1) == RELOAD_FOR_OPERAND_ADDRESS	\
235 	 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
236 
237   /* If we are going to reload an address, compute the reload type to
238      use.  */
239 #define ADDR_TYPE(type)					\
240   ((type) == RELOAD_FOR_INPUT_ADDRESS			\
241    ? RELOAD_FOR_INPADDR_ADDRESS				\
242    : ((type) == RELOAD_FOR_OUTPUT_ADDRESS		\
243       ? RELOAD_FOR_OUTADDR_ADDRESS			\
244       : (type)))
245 
246 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
247 				  machine_mode, enum reload_type,
248 				  enum insn_code *, secondary_reload_info *);
249 static enum reg_class find_valid_class (machine_mode, machine_mode,
250 					int, unsigned int);
251 static void push_replacement (rtx *, int, machine_mode);
252 static void dup_replacements (rtx *, rtx *);
253 static void combine_reloads (void);
254 static int find_reusable_reload (rtx *, rtx, enum reg_class,
255 				 enum reload_type, int, int);
256 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, machine_mode,
257 			      machine_mode, reg_class_t, int, int);
258 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
259 static struct decomposition decompose (rtx);
260 static int immune_p (rtx, rtx, struct decomposition);
261 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
262 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
263 				rtx_insn *, int *);
264 static rtx make_memloc (rtx, int);
265 static int maybe_memory_address_addr_space_p (machine_mode, rtx,
266 					      addr_space_t, rtx *);
267 static int find_reloads_address (machine_mode, rtx *, rtx, rtx *,
268 				 int, enum reload_type, int, rtx_insn *);
269 static rtx subst_reg_equivs (rtx, rtx_insn *);
270 static rtx subst_indexed_address (rtx);
271 static void update_auto_inc_notes (rtx_insn *, int, int);
272 static int find_reloads_address_1 (machine_mode, addr_space_t, rtx, int,
273 				   enum rtx_code, enum rtx_code, rtx *,
274 				   int, enum reload_type,int, rtx_insn *);
275 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
276 				       machine_mode, int,
277 				       enum reload_type, int);
278 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
279 					int, rtx_insn *, int *);
280 static void copy_replacements_1 (rtx *, rtx *, int);
281 static poly_int64 find_inc_amount (rtx, rtx);
282 static int refers_to_mem_for_reload_p (rtx);
283 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
284 					 rtx, rtx *);
285 
286 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
287    list yet.  */
288 
289 static void
push_reg_equiv_alt_mem(int regno,rtx mem)290 push_reg_equiv_alt_mem (int regno, rtx mem)
291 {
292   rtx it;
293 
294   for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
295     if (rtx_equal_p (XEXP (it, 0), mem))
296       return;
297 
298   reg_equiv_alt_mem_list (regno)
299     = alloc_EXPR_LIST (REG_EQUIV, mem,
300 		       reg_equiv_alt_mem_list (regno));
301 }
302 
303 /* Determine if any secondary reloads are needed for loading (if IN_P is
304    nonzero) or storing (if IN_P is zero) X to or from a reload register of
305    register class RELOAD_CLASS in mode RELOAD_MODE.  If secondary reloads
306    are needed, push them.
307 
308    Return the reload number of the secondary reload we made, or -1 if
309    we didn't need one.  *PICODE is set to the insn_code to use if we do
310    need a secondary reload.  */
311 
312 static int
push_secondary_reload(int in_p,rtx x,int opnum,int optional,enum reg_class reload_class,machine_mode reload_mode,enum reload_type type,enum insn_code * picode,secondary_reload_info * prev_sri)313 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
314 		       enum reg_class reload_class,
315 		       machine_mode reload_mode, enum reload_type type,
316 		       enum insn_code *picode, secondary_reload_info *prev_sri)
317 {
318   enum reg_class rclass = NO_REGS;
319   enum reg_class scratch_class;
320   machine_mode mode = reload_mode;
321   enum insn_code icode = CODE_FOR_nothing;
322   enum insn_code t_icode = CODE_FOR_nothing;
323   enum reload_type secondary_type;
324   int s_reload, t_reload = -1;
325   const char *scratch_constraint;
326   secondary_reload_info sri;
327 
328   if (type == RELOAD_FOR_INPUT_ADDRESS
329       || type == RELOAD_FOR_OUTPUT_ADDRESS
330       || type == RELOAD_FOR_INPADDR_ADDRESS
331       || type == RELOAD_FOR_OUTADDR_ADDRESS)
332     secondary_type = type;
333   else
334     secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
335 
336   *picode = CODE_FOR_nothing;
337 
338   /* If X is a paradoxical SUBREG, use the inner value to determine both the
339      mode and object being reloaded.  */
340   if (paradoxical_subreg_p (x))
341     {
342       x = SUBREG_REG (x);
343       reload_mode = GET_MODE (x);
344     }
345 
346   /* If X is a pseudo-register that has an equivalent MEM (actually, if it
347      is still a pseudo-register by now, it *must* have an equivalent MEM
348      but we don't want to assume that), use that equivalent when seeing if
349      a secondary reload is needed since whether or not a reload is needed
350      might be sensitive to the form of the MEM.  */
351 
352   if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
353       && reg_equiv_mem (REGNO (x)))
354     x = reg_equiv_mem (REGNO (x));
355 
356   sri.icode = CODE_FOR_nothing;
357   sri.prev_sri = prev_sri;
358   rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
359 						      reload_mode, &sri);
360   icode = (enum insn_code) sri.icode;
361 
362   /* If we don't need any secondary registers, done.  */
363   if (rclass == NO_REGS && icode == CODE_FOR_nothing)
364     return -1;
365 
366   if (rclass != NO_REGS)
367     t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
368 				      reload_mode, type, &t_icode, &sri);
369 
370   /* If we will be using an insn, the secondary reload is for a
371      scratch register.  */
372 
373   if (icode != CODE_FOR_nothing)
374     {
375       /* If IN_P is nonzero, the reload register will be the output in
376 	 operand 0.  If IN_P is zero, the reload register will be the input
377 	 in operand 1.  Outputs should have an initial "=", which we must
378 	 skip.  */
379 
380       /* ??? It would be useful to be able to handle only two, or more than
381 	 three, operands, but for now we can only handle the case of having
382 	 exactly three: output, input and one temp/scratch.  */
383       gcc_assert (insn_data[(int) icode].n_operands == 3);
384 
385       /* ??? We currently have no way to represent a reload that needs
386 	 an icode to reload from an intermediate tertiary reload register.
387 	 We should probably have a new field in struct reload to tag a
388 	 chain of scratch operand reloads onto.   */
389       gcc_assert (rclass == NO_REGS);
390 
391       scratch_constraint = insn_data[(int) icode].operand[2].constraint;
392       gcc_assert (*scratch_constraint == '=');
393       scratch_constraint++;
394       if (*scratch_constraint == '&')
395 	scratch_constraint++;
396       scratch_class = (reg_class_for_constraint
397 		       (lookup_constraint (scratch_constraint)));
398 
399       rclass = scratch_class;
400       mode = insn_data[(int) icode].operand[2].mode;
401     }
402 
403   /* This case isn't valid, so fail.  Reload is allowed to use the same
404      register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
405      in the case of a secondary register, we actually need two different
406      registers for correct code.  We fail here to prevent the possibility of
407      silently generating incorrect code later.
408 
409      The convention is that secondary input reloads are valid only if the
410      secondary_class is different from class.  If you have such a case, you
411      cannot use secondary reloads, you must work around the problem some
412      other way.
413 
414      Allow this when a reload_in/out pattern is being used.  I.e. assume
415      that the generated code handles this case.  */
416 
417   gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
418 	      || t_icode != CODE_FOR_nothing);
419 
420   /* See if we can reuse an existing secondary reload.  */
421   for (s_reload = 0; s_reload < n_reloads; s_reload++)
422     if (rld[s_reload].secondary_p
423 	&& (reg_class_subset_p (rclass, rld[s_reload].rclass)
424 	    || reg_class_subset_p (rld[s_reload].rclass, rclass))
425 	&& ((in_p && rld[s_reload].inmode == mode)
426 	    || (! in_p && rld[s_reload].outmode == mode))
427 	&& ((in_p && rld[s_reload].secondary_in_reload == t_reload)
428 	    || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
429 	&& ((in_p && rld[s_reload].secondary_in_icode == t_icode)
430 	    || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
431 	&& (small_register_class_p (rclass)
432 	    || targetm.small_register_classes_for_mode_p (VOIDmode))
433 	&& MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
434 			     opnum, rld[s_reload].opnum))
435       {
436 	if (in_p)
437 	  rld[s_reload].inmode = mode;
438 	if (! in_p)
439 	  rld[s_reload].outmode = mode;
440 
441 	if (reg_class_subset_p (rclass, rld[s_reload].rclass))
442 	  rld[s_reload].rclass = rclass;
443 
444 	rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
445 	rld[s_reload].optional &= optional;
446 	rld[s_reload].secondary_p = 1;
447 	if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
448 			    opnum, rld[s_reload].opnum))
449 	  rld[s_reload].when_needed = RELOAD_OTHER;
450 
451 	break;
452       }
453 
454   if (s_reload == n_reloads)
455     {
456       /* If we need a memory location to copy between the two reload regs,
457 	 set it up now.  Note that we do the input case before making
458 	 the reload and the output case after.  This is due to the
459 	 way reloads are output.  */
460 
461       if (in_p && icode == CODE_FOR_nothing
462 	  && targetm.secondary_memory_needed (mode, rclass, reload_class))
463 	{
464 	  get_secondary_mem (x, reload_mode, opnum, type);
465 
466 	  /* We may have just added new reloads.  Make sure we add
467 	     the new reload at the end.  */
468 	  s_reload = n_reloads;
469 	}
470 
471       /* We need to make a new secondary reload for this register class.  */
472       rld[s_reload].in = rld[s_reload].out = 0;
473       rld[s_reload].rclass = rclass;
474 
475       rld[s_reload].inmode = in_p ? mode : VOIDmode;
476       rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
477       rld[s_reload].reg_rtx = 0;
478       rld[s_reload].optional = optional;
479       rld[s_reload].inc = 0;
480       /* Maybe we could combine these, but it seems too tricky.  */
481       rld[s_reload].nocombine = 1;
482       rld[s_reload].in_reg = 0;
483       rld[s_reload].out_reg = 0;
484       rld[s_reload].opnum = opnum;
485       rld[s_reload].when_needed = secondary_type;
486       rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
487       rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
488       rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
489       rld[s_reload].secondary_out_icode
490 	= ! in_p ? t_icode : CODE_FOR_nothing;
491       rld[s_reload].secondary_p = 1;
492 
493       n_reloads++;
494 
495       if (! in_p && icode == CODE_FOR_nothing
496 	  && targetm.secondary_memory_needed (mode, reload_class, rclass))
497 	get_secondary_mem (x, mode, opnum, type);
498     }
499 
500   *picode = icode;
501   return s_reload;
502 }
503 
504 /* If a secondary reload is needed, return its class.  If both an intermediate
505    register and a scratch register is needed, we return the class of the
506    intermediate register.  */
507 reg_class_t
secondary_reload_class(bool in_p,reg_class_t rclass,machine_mode mode,rtx x)508 secondary_reload_class (bool in_p, reg_class_t rclass, machine_mode mode,
509 			rtx x)
510 {
511   enum insn_code icode;
512   secondary_reload_info sri;
513 
514   sri.icode = CODE_FOR_nothing;
515   sri.prev_sri = NULL;
516   rclass
517     = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
518   icode = (enum insn_code) sri.icode;
519 
520   /* If there are no secondary reloads at all, we return NO_REGS.
521      If an intermediate register is needed, we return its class.  */
522   if (icode == CODE_FOR_nothing || rclass != NO_REGS)
523     return rclass;
524 
525   /* No intermediate register is needed, but we have a special reload
526      pattern, which we assume for now needs a scratch register.  */
527   return scratch_reload_class (icode);
528 }
529 
530 /* ICODE is the insn_code of a reload pattern.  Check that it has exactly
531    three operands, verify that operand 2 is an output operand, and return
532    its register class.
533    ??? We'd like to be able to handle any pattern with at least 2 operands,
534    for zero or more scratch registers, but that needs more infrastructure.  */
535 enum reg_class
scratch_reload_class(enum insn_code icode)536 scratch_reload_class (enum insn_code icode)
537 {
538   const char *scratch_constraint;
539   enum reg_class rclass;
540 
541   gcc_assert (insn_data[(int) icode].n_operands == 3);
542   scratch_constraint = insn_data[(int) icode].operand[2].constraint;
543   gcc_assert (*scratch_constraint == '=');
544   scratch_constraint++;
545   if (*scratch_constraint == '&')
546     scratch_constraint++;
547   rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
548   gcc_assert (rclass != NO_REGS);
549   return rclass;
550 }
551 
552 /* Return a memory location that will be used to copy X in mode MODE.
553    If we haven't already made a location for this mode in this insn,
554    call find_reloads_address on the location being returned.  */
555 
556 rtx
get_secondary_mem(rtx x ATTRIBUTE_UNUSED,machine_mode mode,int opnum,enum reload_type type)557 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, machine_mode mode,
558 		   int opnum, enum reload_type type)
559 {
560   rtx loc;
561   int mem_valid;
562 
563   /* By default, if MODE is narrower than a word, widen it to a word.
564      This is required because most machines that require these memory
565      locations do not support short load and stores from all registers
566      (e.g., FP registers).  */
567 
568   mode = targetm.secondary_memory_needed_mode (mode);
569 
570   /* If we already have made a MEM for this operand in MODE, return it.  */
571   if (secondary_memlocs_elim[(int) mode][opnum] != 0)
572     return secondary_memlocs_elim[(int) mode][opnum];
573 
574   /* If this is the first time we've tried to get a MEM for this mode,
575      allocate a new one.  `something_changed' in reload will get set
576      by noticing that the frame size has changed.  */
577 
578   if (secondary_memlocs[(int) mode] == 0)
579     {
580 #ifdef SECONDARY_MEMORY_NEEDED_RTX
581       secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
582 #else
583       secondary_memlocs[(int) mode]
584 	= assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
585 #endif
586     }
587 
588   /* Get a version of the address doing any eliminations needed.  If that
589      didn't give us a new MEM, make a new one if it isn't valid.  */
590 
591   loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
592   mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
593 						  MEM_ADDR_SPACE (loc));
594 
595   if (! mem_valid && loc == secondary_memlocs[(int) mode])
596     loc = copy_rtx (loc);
597 
598   /* The only time the call below will do anything is if the stack
599      offset is too large.  In that case IND_LEVELS doesn't matter, so we
600      can just pass a zero.  Adjust the type to be the address of the
601      corresponding object.  If the address was valid, save the eliminated
602      address.  If it wasn't valid, we need to make a reload each time, so
603      don't save it.  */
604 
605   if (! mem_valid)
606     {
607       type =  (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
608 	       : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
609 	       : RELOAD_OTHER);
610 
611       find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
612 			    opnum, type, 0, 0);
613     }
614 
615   secondary_memlocs_elim[(int) mode][opnum] = loc;
616   if (secondary_memlocs_elim_used <= (int)mode)
617     secondary_memlocs_elim_used = (int)mode + 1;
618   return loc;
619 }
620 
621 /* Clear any secondary memory locations we've made.  */
622 
623 void
clear_secondary_mem(void)624 clear_secondary_mem (void)
625 {
626   memset (secondary_memlocs, 0, sizeof secondary_memlocs);
627 }
628 
629 
630 /* Find the largest class which has at least one register valid in
631    mode INNER, and which for every such register, that register number
632    plus N is also valid in OUTER (if in range) and is cheap to move
633    into REGNO.  Such a class must exist.  */
634 
635 static enum reg_class
find_valid_class(machine_mode outer ATTRIBUTE_UNUSED,machine_mode inner ATTRIBUTE_UNUSED,int n,unsigned int dest_regno ATTRIBUTE_UNUSED)636 find_valid_class (machine_mode outer ATTRIBUTE_UNUSED,
637 		  machine_mode inner ATTRIBUTE_UNUSED, int n,
638 		  unsigned int dest_regno ATTRIBUTE_UNUSED)
639 {
640   int best_cost = -1;
641   int rclass;
642   int regno;
643   enum reg_class best_class = NO_REGS;
644   enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
645   unsigned int best_size = 0;
646   int cost;
647 
648   for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
649     {
650       int bad = 0;
651       int good = 0;
652       for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
653 	if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
654 	  {
655 	    if (targetm.hard_regno_mode_ok (regno, inner))
656 	      {
657 		good = 1;
658 		if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
659 		    && !targetm.hard_regno_mode_ok (regno + n, outer))
660 		  bad = 1;
661 	      }
662 	  }
663 
664       if (bad || !good)
665 	continue;
666       cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
667 
668       if ((reg_class_size[rclass] > best_size
669 	   && (best_cost < 0 || best_cost >= cost))
670 	  || best_cost > cost)
671 	{
672 	  best_class = (enum reg_class) rclass;
673 	  best_size = reg_class_size[rclass];
674 	  best_cost = register_move_cost (outer, (enum reg_class) rclass,
675 					  dest_class);
676 	}
677     }
678 
679   gcc_assert (best_size != 0);
680 
681   return best_class;
682 }
683 
684 /* We are trying to reload a subreg of something that is not a register.
685    Find the largest class which contains only registers valid in
686    mode MODE.  OUTER is the mode of the subreg, DEST_CLASS the class in
687    which we would eventually like to obtain the object.  */
688 
689 static enum reg_class
find_valid_class_1(machine_mode outer ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,enum reg_class dest_class ATTRIBUTE_UNUSED)690 find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED,
691 		    machine_mode mode ATTRIBUTE_UNUSED,
692 		    enum reg_class dest_class ATTRIBUTE_UNUSED)
693 {
694   int best_cost = -1;
695   int rclass;
696   int regno;
697   enum reg_class best_class = NO_REGS;
698   unsigned int best_size = 0;
699   int cost;
700 
701   for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
702     {
703       unsigned int computed_rclass_size = 0;
704 
705       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
706         {
707           if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
708 	      && targetm.hard_regno_mode_ok (regno, mode))
709             computed_rclass_size++;
710         }
711 
712       cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
713 
714       if ((computed_rclass_size > best_size
715 	   && (best_cost < 0 || best_cost >= cost))
716 	  || best_cost > cost)
717 	{
718 	  best_class = (enum reg_class) rclass;
719 	  best_size = computed_rclass_size;
720 	  best_cost = register_move_cost (outer, (enum reg_class) rclass,
721 					  dest_class);
722 	}
723     }
724 
725   gcc_assert (best_size != 0);
726 
727 #ifdef LIMIT_RELOAD_CLASS
728   best_class = LIMIT_RELOAD_CLASS (mode, best_class);
729 #endif
730   return best_class;
731 }
732 
733 /* Return the number of a previously made reload that can be combined with
734    a new one, or n_reloads if none of the existing reloads can be used.
735    OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
736    push_reload, they determine the kind of the new reload that we try to
737    combine.  P_IN points to the corresponding value of IN, which can be
738    modified by this function.
739    DONT_SHARE is nonzero if we can't share any input-only reload for IN.  */
740 
741 static int
find_reusable_reload(rtx * p_in,rtx out,enum reg_class rclass,enum reload_type type,int opnum,int dont_share)742 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
743 		      enum reload_type type, int opnum, int dont_share)
744 {
745   rtx in = *p_in;
746   int i;
747   /* We can't merge two reloads if the output of either one is
748      earlyclobbered.  */
749 
750   if (earlyclobber_operand_p (out))
751     return n_reloads;
752 
753   /* We can use an existing reload if the class is right
754      and at least one of IN and OUT is a match
755      and the other is at worst neutral.
756      (A zero compared against anything is neutral.)
757 
758      For targets with small register classes, don't use existing reloads
759      unless they are for the same thing since that can cause us to need
760      more reload registers than we otherwise would.  */
761 
762   for (i = 0; i < n_reloads; i++)
763     if ((reg_class_subset_p (rclass, rld[i].rclass)
764 	 || reg_class_subset_p (rld[i].rclass, rclass))
765 	/* If the existing reload has a register, it must fit our class.  */
766 	&& (rld[i].reg_rtx == 0
767 	    || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
768 				  true_regnum (rld[i].reg_rtx)))
769 	&& ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
770 	     && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
771 	    || (out != 0 && MATCHES (rld[i].out, out)
772 		&& (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
773 	&& (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
774 	&& (small_register_class_p (rclass)
775 	    || targetm.small_register_classes_for_mode_p (VOIDmode))
776 	&& MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
777       return i;
778 
779   /* Reloading a plain reg for input can match a reload to postincrement
780      that reg, since the postincrement's value is the right value.
781      Likewise, it can match a preincrement reload, since we regard
782      the preincrementation as happening before any ref in this insn
783      to that register.  */
784   for (i = 0; i < n_reloads; i++)
785     if ((reg_class_subset_p (rclass, rld[i].rclass)
786 	 || reg_class_subset_p (rld[i].rclass, rclass))
787 	/* If the existing reload has a register, it must fit our
788 	   class.  */
789 	&& (rld[i].reg_rtx == 0
790 	    || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
791 				  true_regnum (rld[i].reg_rtx)))
792 	&& out == 0 && rld[i].out == 0 && rld[i].in != 0
793 	&& ((REG_P (in)
794 	     && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
795 	     && MATCHES (XEXP (rld[i].in, 0), in))
796 	    || (REG_P (rld[i].in)
797 		&& GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
798 		&& MATCHES (XEXP (in, 0), rld[i].in)))
799 	&& (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
800 	&& (small_register_class_p (rclass)
801 	    || targetm.small_register_classes_for_mode_p (VOIDmode))
802 	&& MERGABLE_RELOADS (type, rld[i].when_needed,
803 			     opnum, rld[i].opnum))
804       {
805 	/* Make sure reload_in ultimately has the increment,
806 	   not the plain register.  */
807 	if (REG_P (in))
808 	  *p_in = rld[i].in;
809 	return i;
810       }
811   return n_reloads;
812 }
813 
814 /* Return true if:
815 
816    (a) (subreg:OUTER_MODE REG ...) represents a word or subword subreg
817        of a multiword value; and
818 
819    (b) the number of *words* in REG does not match the number of *registers*
820        in REG.  */
821 
822 static bool
complex_word_subreg_p(machine_mode outer_mode,rtx reg)823 complex_word_subreg_p (machine_mode outer_mode, rtx reg)
824 {
825   machine_mode inner_mode = GET_MODE (reg);
826   poly_uint64 reg_words = REG_NREGS (reg) * UNITS_PER_WORD;
827   return (known_le (GET_MODE_SIZE (outer_mode), UNITS_PER_WORD)
828 	  && maybe_gt (GET_MODE_SIZE (inner_mode), UNITS_PER_WORD)
829 	  && !known_equal_after_align_up (GET_MODE_SIZE (inner_mode),
830 					  reg_words, UNITS_PER_WORD));
831 }
832 
833 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
834    expression.  MODE is the mode that X will be used in.  OUTPUT is true if
835    the function is invoked for the output part of an enclosing reload.  */
836 
837 static bool
reload_inner_reg_of_subreg(rtx x,machine_mode mode,bool output)838 reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool output)
839 {
840   rtx inner;
841   int regno;
842 
843   /* Only SUBREGs are problematical.  */
844   if (GET_CODE (x) != SUBREG)
845     return false;
846 
847   inner = SUBREG_REG (x);
848 
849   /* If INNER is a constant or PLUS, then INNER will need reloading.  */
850   if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
851     return true;
852 
853   /* If INNER is not a register, then INNER will not need reloading.  */
854   if (!REG_P (inner))
855     return false;
856 
857   regno = REGNO (inner);
858 
859   /* If INNER is not a hard register, then INNER will not need reloading
860      unless it's a mode dependent memory reference.  */
861   if (regno >= FIRST_PSEUDO_REGISTER)
862     return !output
863 	   && reg_equiv_mem (regno) != 0
864 	   && mode_dependent_address_p (XEXP (reg_equiv_mem (regno), 0),
865 					MEM_ADDR_SPACE (reg_equiv_mem (regno)));
866 
867   /* If INNER is not ok for MODE, then INNER will need reloading.  */
868   if (!targetm.hard_regno_mode_ok (subreg_regno (x), mode))
869     return true;
870 
871   /* If this is for an output, and the outer part is a word or smaller,
872      INNER is larger than a word and the number of registers in INNER is
873      not the same as the number of words in INNER, then INNER will need
874      reloading (with an in-out reload).  */
875   return output && complex_word_subreg_p (mode, inner);
876 }
877 
878 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
879    requiring an extra reload register.  The caller has already found that
880    IN contains some reference to REGNO, so check that we can produce the
881    new value in a single step.  E.g. if we have
882    (set (reg r13) (plus (reg r13) (const int 1))), and there is an
883    instruction that adds one to a register, this should succeed.
884    However, if we have something like
885    (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
886    needs to be loaded into a register first, we need a separate reload
887    register.
888    Such PLUS reloads are generated by find_reload_address_part.
889    The out-of-range PLUS expressions are usually introduced in the instruction
890    patterns by register elimination and substituting pseudos without a home
891    by their function-invariant equivalences.  */
892 static int
can_reload_into(rtx in,int regno,machine_mode mode)893 can_reload_into (rtx in, int regno, machine_mode mode)
894 {
895   rtx dst;
896   rtx_insn *test_insn;
897   int r = 0;
898   struct recog_data_d save_recog_data;
899 
900   /* For matching constraints, we often get notional input reloads where
901      we want to use the original register as the reload register.  I.e.
902      technically this is a non-optional input-output reload, but IN is
903      already a valid register, and has been chosen as the reload register.
904      Speed this up, since it trivially works.  */
905   if (REG_P (in))
906     return 1;
907 
908   /* To test MEMs properly, we'd have to take into account all the reloads
909      that are already scheduled, which can become quite complicated.
910      And since we've already handled address reloads for this MEM, it
911      should always succeed anyway.  */
912   if (MEM_P (in))
913     return 1;
914 
915   /* If we can make a simple SET insn that does the job, everything should
916      be fine.  */
917   dst =  gen_rtx_REG (mode, regno);
918   test_insn = make_insn_raw (gen_rtx_SET (dst, in));
919   save_recog_data = recog_data;
920   if (recog_memoized (test_insn) >= 0)
921     {
922       extract_insn (test_insn);
923       r = constrain_operands (1, get_enabled_alternatives (test_insn));
924     }
925   recog_data = save_recog_data;
926   return r;
927 }
928 
929 /* Record one reload that needs to be performed.
930    IN is an rtx saying where the data are to be found before this instruction.
931    OUT says where they must be stored after the instruction.
932    (IN is zero for data not read, and OUT is zero for data not written.)
933    INLOC and OUTLOC point to the places in the instructions where
934    IN and OUT were found.
935    If IN and OUT are both nonzero, it means the same register must be used
936    to reload both IN and OUT.
937 
938    RCLASS is a register class required for the reloaded data.
939    INMODE is the machine mode that the instruction requires
940    for the reg that replaces IN and OUTMODE is likewise for OUT.
941 
942    If IN is zero, then OUT's location and mode should be passed as
943    INLOC and INMODE.
944 
945    STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
946 
947    OPTIONAL nonzero means this reload does not need to be performed:
948    it can be discarded if that is more convenient.
949 
950    OPNUM and TYPE say what the purpose of this reload is.
951 
952    The return value is the reload-number for this reload.
953 
954    If both IN and OUT are nonzero, in some rare cases we might
955    want to make two separate reloads.  (Actually we never do this now.)
956    Therefore, the reload-number for OUT is stored in
957    output_reloadnum when we return; the return value applies to IN.
958    Usually (presently always), when IN and OUT are nonzero,
959    the two reload-numbers are equal, but the caller should be careful to
960    distinguish them.  */
961 
962 int
push_reload(rtx in,rtx out,rtx * inloc,rtx * outloc,enum reg_class rclass,machine_mode inmode,machine_mode outmode,int strict_low,int optional,int opnum,enum reload_type type)963 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
964 	     enum reg_class rclass, machine_mode inmode,
965 	     machine_mode outmode, int strict_low, int optional,
966 	     int opnum, enum reload_type type)
967 {
968   int i;
969   int dont_share = 0;
970   int dont_remove_subreg = 0;
971 #ifdef LIMIT_RELOAD_CLASS
972   rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
973 #endif
974   int secondary_in_reload = -1, secondary_out_reload = -1;
975   enum insn_code secondary_in_icode = CODE_FOR_nothing;
976   enum insn_code secondary_out_icode = CODE_FOR_nothing;
977   enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
978   subreg_in_class = NO_REGS;
979 
980   /* INMODE and/or OUTMODE could be VOIDmode if no mode
981      has been specified for the operand.  In that case,
982      use the operand's mode as the mode to reload.  */
983   if (inmode == VOIDmode && in != 0)
984     inmode = GET_MODE (in);
985   if (outmode == VOIDmode && out != 0)
986     outmode = GET_MODE (out);
987 
988   /* If find_reloads and friends until now missed to replace a pseudo
989      with a constant of reg_equiv_constant something went wrong
990      beforehand.
991      Note that it can't simply be done here if we missed it earlier
992      since the constant might need to be pushed into the literal pool
993      and the resulting memref would probably need further
994      reloading.  */
995   if (in != 0 && REG_P (in))
996     {
997       int regno = REGNO (in);
998 
999       gcc_assert (regno < FIRST_PSEUDO_REGISTER
1000 		  || reg_renumber[regno] >= 0
1001 		  || reg_equiv_constant (regno) == NULL_RTX);
1002     }
1003 
1004   /* reg_equiv_constant only contains constants which are obviously
1005      not appropriate as destination.  So if we would need to replace
1006      the destination pseudo with a constant we are in real
1007      trouble.  */
1008   if (out != 0 && REG_P (out))
1009     {
1010       int regno = REGNO (out);
1011 
1012       gcc_assert (regno < FIRST_PSEUDO_REGISTER
1013 		  || reg_renumber[regno] >= 0
1014 		  || reg_equiv_constant (regno) == NULL_RTX);
1015     }
1016 
1017   /* If we have a read-write operand with an address side-effect,
1018      change either IN or OUT so the side-effect happens only once.  */
1019   if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1020     switch (GET_CODE (XEXP (in, 0)))
1021       {
1022       case POST_INC: case POST_DEC:   case POST_MODIFY:
1023 	in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1024 	break;
1025 
1026       case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1027 	out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1028 	break;
1029 
1030       default:
1031 	break;
1032       }
1033 
1034   /* If we are reloading a (SUBREG constant ...), really reload just the
1035      inside expression in its own mode.  Similarly for (SUBREG (PLUS ...)).
1036      If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1037      a pseudo and hence will become a MEM) with M1 wider than M2 and the
1038      register is a pseudo, also reload the inside expression.
1039      For machines that extend byte loads, do this for any SUBREG of a pseudo
1040      where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1041      M2 is an integral mode that gets extended when loaded.
1042      Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1043      where either M1 is not valid for R or M2 is wider than a word but we
1044      only need one register to store an M2-sized quantity in R.
1045      (However, if OUT is nonzero, we need to reload the reg *and*
1046      the subreg, so do nothing here, and let following statement handle it.)
1047 
1048      Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1049      we can't handle it here because CONST_INT does not indicate a mode.
1050 
1051      Similarly, we must reload the inside expression if we have a
1052      STRICT_LOW_PART (presumably, in == out in this case).
1053 
1054      Also reload the inner expression if it does not require a secondary
1055      reload but the SUBREG does.
1056 
1057      Finally, reload the inner expression if it is a register that is in
1058      the class whose registers cannot be referenced in a different size
1059      and M1 is not the same size as M2.  If subreg_lowpart_p is false, we
1060      cannot reload just the inside since we might end up with the wrong
1061      register class.  But if it is inside a STRICT_LOW_PART, we have
1062      no choice, so we hope we do get the right register class there.  */
1063 
1064   scalar_int_mode inner_mode;
1065   if (in != 0 && GET_CODE (in) == SUBREG
1066       && (subreg_lowpart_p (in) || strict_low)
1067       && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (in)),
1068 					inmode, rclass)
1069       && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (in))]
1070       && (CONSTANT_P (SUBREG_REG (in))
1071 	  || GET_CODE (SUBREG_REG (in)) == PLUS
1072 	  || strict_low
1073 	  || (((REG_P (SUBREG_REG (in))
1074 		&& REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1075 	       || MEM_P (SUBREG_REG (in)))
1076 	      && (paradoxical_subreg_p (inmode, GET_MODE (SUBREG_REG (in)))
1077 		  || (known_le (GET_MODE_SIZE (inmode), UNITS_PER_WORD)
1078 		      && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (in)),
1079 						 &inner_mode)
1080 		      && GET_MODE_SIZE (inner_mode) <= UNITS_PER_WORD
1081 		      && paradoxical_subreg_p (inmode, inner_mode)
1082 		      && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)
1083 		  || (WORD_REGISTER_OPERATIONS
1084 		      && partial_subreg_p (inmode, GET_MODE (SUBREG_REG (in)))
1085 		      && (known_equal_after_align_down
1086 			  (GET_MODE_SIZE (inmode) - 1,
1087 			   GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1,
1088 			   UNITS_PER_WORD)))))
1089 	  || (REG_P (SUBREG_REG (in))
1090 	      && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1091 	      /* The case where out is nonzero
1092 		 is handled differently in the following statement.  */
1093 	      && (out == 0 || subreg_lowpart_p (in))
1094 	      && (complex_word_subreg_p (inmode, SUBREG_REG (in))
1095 		  || !targetm.hard_regno_mode_ok (subreg_regno (in), inmode)))
1096 	  || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1097 	      && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1098 					  SUBREG_REG (in))
1099 		  == NO_REGS))
1100 	  || (REG_P (SUBREG_REG (in))
1101 	      && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1102 	      && !REG_CAN_CHANGE_MODE_P (REGNO (SUBREG_REG (in)),
1103 					 GET_MODE (SUBREG_REG (in)), inmode))))
1104     {
1105 #ifdef LIMIT_RELOAD_CLASS
1106       in_subreg_loc = inloc;
1107 #endif
1108       inloc = &SUBREG_REG (in);
1109       in = *inloc;
1110 
1111       if (!WORD_REGISTER_OPERATIONS
1112 	  && LOAD_EXTEND_OP (GET_MODE (in)) == UNKNOWN
1113 	  && MEM_P (in))
1114 	/* This is supposed to happen only for paradoxical subregs made by
1115 	   combine.c.  (SUBREG (MEM)) isn't supposed to occur other ways.  */
1116 	gcc_assert (known_le (GET_MODE_SIZE (GET_MODE (in)),
1117 			      GET_MODE_SIZE (inmode)));
1118 
1119       inmode = GET_MODE (in);
1120     }
1121 
1122   /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1123      where M1 is not valid for R if it was not handled by the code above.
1124 
1125      Similar issue for (SUBREG constant ...) if it was not handled by the
1126      code above.  This can happen if SUBREG_BYTE != 0.
1127 
1128      However, we must reload the inner reg *as well as* the subreg in
1129      that case.  */
1130 
1131   if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1132     {
1133       if (REG_P (SUBREG_REG (in)) && HARD_REGISTER_P (SUBREG_REG (in)))
1134 	subreg_in_class
1135 	  = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1136 			      subreg_regno_offset (REGNO (SUBREG_REG (in)),
1137 						   GET_MODE (SUBREG_REG (in)),
1138 						   SUBREG_BYTE (in),
1139 						   GET_MODE (in)),
1140 			      REGNO (SUBREG_REG (in)));
1141 #if 1 // XXXMRG
1142       else if (REG_P (SUBREG_REG (in))
1143                || GET_CODE (SUBREG_REG (in)) == SYMBOL_REF)
1144 #else
1145       else if (CONSTANT_P (SUBREG_REG (in))
1146                || GET_CODE (SUBREG_REG (in)) == PLUS)
1147 #endif
1148 	subreg_in_class = find_valid_class_1 (inmode,
1149 					      GET_MODE (SUBREG_REG (in)),
1150 					      rclass);
1151 
1152       /* This relies on the fact that emit_reload_insns outputs the
1153 	 instructions for input reloads of type RELOAD_OTHER in the same
1154 	 order as the reloads.  Thus if the outer reload is also of type
1155 	 RELOAD_OTHER, we are guaranteed that this inner reload will be
1156 	 output before the outer reload.  */
1157       push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1158 		   subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1159       dont_remove_subreg = 1;
1160     }
1161 
1162   /* Similarly for paradoxical and problematical SUBREGs on the output.
1163      Note that there is no reason we need worry about the previous value
1164      of SUBREG_REG (out); even if wider than out, storing in a subreg is
1165      entitled to clobber it all (except in the case of a word mode subreg
1166      or of a STRICT_LOW_PART, in that latter case the constraint should
1167      label it input-output.)  */
1168   if (out != 0 && GET_CODE (out) == SUBREG
1169       && (subreg_lowpart_p (out) || strict_low)
1170       && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (out)),
1171 					outmode, rclass)
1172       && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (out))]
1173       && (CONSTANT_P (SUBREG_REG (out))
1174 	  || strict_low
1175 	  || (((REG_P (SUBREG_REG (out))
1176 		&& REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1177 	       || MEM_P (SUBREG_REG (out)))
1178 	      && (paradoxical_subreg_p (outmode, GET_MODE (SUBREG_REG (out)))
1179 		  || (WORD_REGISTER_OPERATIONS
1180 		      && partial_subreg_p (outmode, GET_MODE (SUBREG_REG (out)))
1181 		      && (known_equal_after_align_down
1182 			  (GET_MODE_SIZE (outmode) - 1,
1183 			   GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1,
1184 			   UNITS_PER_WORD)))))
1185 	  || (REG_P (SUBREG_REG (out))
1186 	      && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1187 	      /* The case of a word mode subreg
1188 		 is handled differently in the following statement.  */
1189 	      && ! (known_le (GET_MODE_SIZE (outmode), UNITS_PER_WORD)
1190 		    && maybe_gt (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))),
1191 				 UNITS_PER_WORD))
1192 	      && !targetm.hard_regno_mode_ok (subreg_regno (out), outmode))
1193 	  || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1194 	      && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1195 					  SUBREG_REG (out))
1196 		  == NO_REGS))
1197 	  || (REG_P (SUBREG_REG (out))
1198 	      && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1199 	      && !REG_CAN_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1200 					 GET_MODE (SUBREG_REG (out)),
1201 					 outmode))))
1202     {
1203 #ifdef LIMIT_RELOAD_CLASS
1204       out_subreg_loc = outloc;
1205 #endif
1206       outloc = &SUBREG_REG (out);
1207       out = *outloc;
1208       gcc_assert (WORD_REGISTER_OPERATIONS || !MEM_P (out)
1209 		  || known_le (GET_MODE_SIZE (GET_MODE (out)),
1210 			       GET_MODE_SIZE (outmode)));
1211       outmode = GET_MODE (out);
1212     }
1213 
1214   /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1215      where either M1 is not valid for R or M2 is wider than a word but we
1216      only need one register to store an M2-sized quantity in R.
1217 
1218      However, we must reload the inner reg *as well as* the subreg in
1219      that case and the inner reg is an in-out reload.  */
1220 
1221   if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1222     {
1223       enum reg_class in_out_class
1224 	= find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1225 			    subreg_regno_offset (REGNO (SUBREG_REG (out)),
1226 						 GET_MODE (SUBREG_REG (out)),
1227 						 SUBREG_BYTE (out),
1228 						 GET_MODE (out)),
1229 			    REGNO (SUBREG_REG (out)));
1230 
1231       /* This relies on the fact that emit_reload_insns outputs the
1232 	 instructions for output reloads of type RELOAD_OTHER in reverse
1233 	 order of the reloads.  Thus if the outer reload is also of type
1234 	 RELOAD_OTHER, we are guaranteed that this inner reload will be
1235 	 output after the outer reload.  */
1236       push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1237 		   &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1238 		   0, 0, opnum, RELOAD_OTHER);
1239       dont_remove_subreg = 1;
1240     }
1241 
1242   /* If IN appears in OUT, we can't share any input-only reload for IN.  */
1243   if (in != 0 && out != 0 && MEM_P (out)
1244       && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1245       && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1246     dont_share = 1;
1247 
1248   /* If IN is a SUBREG of a hard register, make a new REG.  This
1249      simplifies some of the cases below.  */
1250 
1251   if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1252       && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1253       && ! dont_remove_subreg)
1254     in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1255 
1256   /* Similarly for OUT.  */
1257   if (out != 0 && GET_CODE (out) == SUBREG
1258       && REG_P (SUBREG_REG (out))
1259       && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1260       && ! dont_remove_subreg)
1261     out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1262 
1263   /* Narrow down the class of register wanted if that is
1264      desirable on this machine for efficiency.  */
1265   {
1266     reg_class_t preferred_class = rclass;
1267 
1268     if (in != 0)
1269       preferred_class = targetm.preferred_reload_class (in, rclass);
1270 
1271     /* Output reloads may need analogous treatment, different in detail.  */
1272     if (out != 0)
1273       preferred_class
1274 	= targetm.preferred_output_reload_class (out, preferred_class);
1275 
1276     /* Discard what the target said if we cannot do it.  */
1277     if (preferred_class != NO_REGS
1278 	|| (optional && type == RELOAD_FOR_OUTPUT))
1279       rclass = (enum reg_class) preferred_class;
1280   }
1281 
1282   /* Make sure we use a class that can handle the actual pseudo
1283      inside any subreg.  For example, on the 386, QImode regs
1284      can appear within SImode subregs.  Although GENERAL_REGS
1285      can handle SImode, QImode needs a smaller class.  */
1286 #ifdef LIMIT_RELOAD_CLASS
1287   if (in_subreg_loc)
1288     rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1289   else if (in != 0 && GET_CODE (in) == SUBREG)
1290     rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1291 
1292   if (out_subreg_loc)
1293     rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1294   if (out != 0 && GET_CODE (out) == SUBREG)
1295     rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1296 #endif
1297 
1298   /* Verify that this class is at least possible for the mode that
1299      is specified.  */
1300   if (this_insn_is_asm)
1301     {
1302       machine_mode mode;
1303       if (paradoxical_subreg_p (inmode, outmode))
1304 	mode = inmode;
1305       else
1306 	mode = outmode;
1307       if (mode == VOIDmode)
1308 	{
1309 	  error_for_asm (this_insn, "cannot reload integer constant "
1310 			 "operand in %<asm%>");
1311 	  mode = word_mode;
1312 	  if (in != 0)
1313 	    inmode = word_mode;
1314 	  if (out != 0)
1315 	    outmode = word_mode;
1316 	}
1317       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1318 	if (targetm.hard_regno_mode_ok (i, mode)
1319 	    && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1320 	  break;
1321       if (i == FIRST_PSEUDO_REGISTER)
1322 	{
1323 	  error_for_asm (this_insn, "impossible register constraint "
1324 			 "in %<asm%>");
1325 	  /* Avoid further trouble with this insn.  */
1326 	  PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1327 	  /* We used to continue here setting class to ALL_REGS, but it triggers
1328 	     sanity check on i386 for:
1329 	     void foo(long double d)
1330 	     {
1331 	       asm("" :: "a" (d));
1332 	     }
1333 	     Returning zero here ought to be safe as we take care in
1334 	     find_reloads to not process the reloads when instruction was
1335 	     replaced by USE.  */
1336 
1337 	  return 0;
1338 	}
1339     }
1340 
1341   /* Optional output reloads are always OK even if we have no register class,
1342      since the function of these reloads is only to have spill_reg_store etc.
1343      set, so that the storing insn can be deleted later.  */
1344   gcc_assert (rclass != NO_REGS
1345 	      || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1346 
1347   i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1348 
1349   if (i == n_reloads)
1350     {
1351       /* See if we need a secondary reload register to move between CLASS
1352 	 and IN or CLASS and OUT.  Get the icode and push any required reloads
1353 	 needed for each of them if so.  */
1354 
1355       if (in != 0)
1356 	secondary_in_reload
1357 	  = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1358 				   &secondary_in_icode, NULL);
1359       if (out != 0 && GET_CODE (out) != SCRATCH)
1360 	secondary_out_reload
1361 	  = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1362 				   type, &secondary_out_icode, NULL);
1363 
1364       /* We found no existing reload suitable for re-use.
1365 	 So add an additional reload.  */
1366 
1367       if (subreg_in_class == NO_REGS
1368 	  && in != 0
1369 	  && (REG_P (in)
1370 	      || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1371 	  && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1372 	subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1373       /* If a memory location is needed for the copy, make one.  */
1374       if (subreg_in_class != NO_REGS
1375 	  && targetm.secondary_memory_needed (inmode, subreg_in_class, rclass))
1376 	get_secondary_mem (in, inmode, opnum, type);
1377 
1378       i = n_reloads;
1379       rld[i].in = in;
1380       rld[i].out = out;
1381       rld[i].rclass = rclass;
1382       rld[i].inmode = inmode;
1383       rld[i].outmode = outmode;
1384       rld[i].reg_rtx = 0;
1385       rld[i].optional = optional;
1386       rld[i].inc = 0;
1387       rld[i].nocombine = 0;
1388       rld[i].in_reg = inloc ? *inloc : 0;
1389       rld[i].out_reg = outloc ? *outloc : 0;
1390       rld[i].opnum = opnum;
1391       rld[i].when_needed = type;
1392       rld[i].secondary_in_reload = secondary_in_reload;
1393       rld[i].secondary_out_reload = secondary_out_reload;
1394       rld[i].secondary_in_icode = secondary_in_icode;
1395       rld[i].secondary_out_icode = secondary_out_icode;
1396       rld[i].secondary_p = 0;
1397 
1398       n_reloads++;
1399 
1400       if (out != 0
1401           && (REG_P (out)
1402 	      || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1403 	  && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1404 	  && (targetm.secondary_memory_needed
1405 	      (outmode, rclass, REGNO_REG_CLASS (reg_or_subregno (out)))))
1406 	get_secondary_mem (out, outmode, opnum, type);
1407     }
1408   else
1409     {
1410       /* We are reusing an existing reload,
1411 	 but we may have additional information for it.
1412 	 For example, we may now have both IN and OUT
1413 	 while the old one may have just one of them.  */
1414 
1415       /* The modes can be different.  If they are, we want to reload in
1416 	 the larger mode, so that the value is valid for both modes.  */
1417       if (inmode != VOIDmode
1418 	  && partial_subreg_p (rld[i].inmode, inmode))
1419 	rld[i].inmode = inmode;
1420       if (outmode != VOIDmode
1421 	  && partial_subreg_p (rld[i].outmode, outmode))
1422 	rld[i].outmode = outmode;
1423       if (in != 0)
1424 	{
1425 	  rtx in_reg = inloc ? *inloc : 0;
1426 	  /* If we merge reloads for two distinct rtl expressions that
1427 	     are identical in content, there might be duplicate address
1428 	     reloads.  Remove the extra set now, so that if we later find
1429 	     that we can inherit this reload, we can get rid of the
1430 	     address reloads altogether.
1431 
1432 	     Do not do this if both reloads are optional since the result
1433 	     would be an optional reload which could potentially leave
1434 	     unresolved address replacements.
1435 
1436 	     It is not sufficient to call transfer_replacements since
1437 	     choose_reload_regs will remove the replacements for address
1438 	     reloads of inherited reloads which results in the same
1439 	     problem.  */
1440 	  if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1441 	      && ! (rld[i].optional && optional))
1442 	    {
1443 	      /* We must keep the address reload with the lower operand
1444 		 number alive.  */
1445 	      if (opnum > rld[i].opnum)
1446 		{
1447 		  remove_address_replacements (in);
1448 		  in = rld[i].in;
1449 		  in_reg = rld[i].in_reg;
1450 		}
1451 	      else
1452 		remove_address_replacements (rld[i].in);
1453 	    }
1454 	  /* When emitting reloads we don't necessarily look at the in-
1455 	     and outmode, but also directly at the operands (in and out).
1456 	     So we can't simply overwrite them with whatever we have found
1457 	     for this (to-be-merged) reload, we have to "merge" that too.
1458 	     Reusing another reload already verified that we deal with the
1459 	     same operands, just possibly in different modes.  So we
1460 	     overwrite the operands only when the new mode is larger.
1461 	     See also PR33613.  */
1462 	  if (!rld[i].in
1463 	      || partial_subreg_p (GET_MODE (rld[i].in), GET_MODE (in)))
1464 	    rld[i].in = in;
1465 	  if (!rld[i].in_reg
1466 	      || (in_reg
1467 		  && partial_subreg_p (GET_MODE (rld[i].in_reg),
1468 				       GET_MODE (in_reg))))
1469 	    rld[i].in_reg = in_reg;
1470 	}
1471       if (out != 0)
1472 	{
1473 	  if (!rld[i].out
1474 	      || (out
1475 		  && partial_subreg_p (GET_MODE (rld[i].out),
1476 				       GET_MODE (out))))
1477 	    rld[i].out = out;
1478 	  if (outloc
1479 	      && (!rld[i].out_reg
1480 		  || partial_subreg_p (GET_MODE (rld[i].out_reg),
1481 				       GET_MODE (*outloc))))
1482 	    rld[i].out_reg = *outloc;
1483 	}
1484       if (reg_class_subset_p (rclass, rld[i].rclass))
1485 	rld[i].rclass = rclass;
1486       rld[i].optional &= optional;
1487       if (MERGE_TO_OTHER (type, rld[i].when_needed,
1488 			  opnum, rld[i].opnum))
1489 	rld[i].when_needed = RELOAD_OTHER;
1490       rld[i].opnum = MIN (rld[i].opnum, opnum);
1491     }
1492 
1493   /* If the ostensible rtx being reloaded differs from the rtx found
1494      in the location to substitute, this reload is not safe to combine
1495      because we cannot reliably tell whether it appears in the insn.  */
1496 
1497   if (in != 0 && in != *inloc)
1498     rld[i].nocombine = 1;
1499 
1500 #if 0
1501   /* This was replaced by changes in find_reloads_address_1 and the new
1502      function inc_for_reload, which go with a new meaning of reload_inc.  */
1503 
1504   /* If this is an IN/OUT reload in an insn that sets the CC,
1505      it must be for an autoincrement.  It doesn't work to store
1506      the incremented value after the insn because that would clobber the CC.
1507      So we must do the increment of the value reloaded from,
1508      increment it, store it back, then decrement again.  */
1509   if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1510     {
1511       out = 0;
1512       rld[i].out = 0;
1513       rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1514       /* If we did not find a nonzero amount-to-increment-by,
1515 	 that contradicts the belief that IN is being incremented
1516 	 in an address in this insn.  */
1517       gcc_assert (rld[i].inc != 0);
1518     }
1519 #endif
1520 
1521   /* If we will replace IN and OUT with the reload-reg,
1522      record where they are located so that substitution need
1523      not do a tree walk.  */
1524 
1525   if (replace_reloads)
1526     {
1527       if (inloc != 0)
1528 	{
1529 	  struct replacement *r = &replacements[n_replacements++];
1530 	  r->what = i;
1531 	  r->where = inloc;
1532 	  r->mode = inmode;
1533 	}
1534       if (outloc != 0 && outloc != inloc)
1535 	{
1536 	  struct replacement *r = &replacements[n_replacements++];
1537 	  r->what = i;
1538 	  r->where = outloc;
1539 	  r->mode = outmode;
1540 	}
1541     }
1542 
1543   /* If this reload is just being introduced and it has both
1544      an incoming quantity and an outgoing quantity that are
1545      supposed to be made to match, see if either one of the two
1546      can serve as the place to reload into.
1547 
1548      If one of them is acceptable, set rld[i].reg_rtx
1549      to that one.  */
1550 
1551   if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1552     {
1553       rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1554 					  inmode, outmode,
1555 					  rld[i].rclass, i,
1556 					  earlyclobber_operand_p (out));
1557 
1558       /* If the outgoing register already contains the same value
1559 	 as the incoming one, we can dispense with loading it.
1560 	 The easiest way to tell the caller that is to give a phony
1561 	 value for the incoming operand (same as outgoing one).  */
1562       if (rld[i].reg_rtx == out
1563 	  && (REG_P (in) || CONSTANT_P (in))
1564 	  && find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1565 			     static_reload_reg_p, i, inmode) != 0)
1566 	rld[i].in = out;
1567     }
1568 
1569   /* If this is an input reload and the operand contains a register that
1570      dies in this insn and is used nowhere else, see if it is the right class
1571      to be used for this reload.  Use it if so.  (This occurs most commonly
1572      in the case of paradoxical SUBREGs and in-out reloads).  We cannot do
1573      this if it is also an output reload that mentions the register unless
1574      the output is a SUBREG that clobbers an entire register.
1575 
1576      Note that the operand might be one of the spill regs, if it is a
1577      pseudo reg and we are in a block where spilling has not taken place.
1578      But if there is no spilling in this block, that is OK.
1579      An explicitly used hard reg cannot be a spill reg.  */
1580 
1581   if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1582     {
1583       rtx note;
1584       int regno;
1585       machine_mode rel_mode = inmode;
1586 
1587       if (out && partial_subreg_p (rel_mode, outmode))
1588 	rel_mode = outmode;
1589 
1590       for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1591 	if (REG_NOTE_KIND (note) == REG_DEAD
1592 	    && REG_P (XEXP (note, 0))
1593 	    && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1594 	    && reg_mentioned_p (XEXP (note, 0), in)
1595 	    /* Check that a former pseudo is valid; see find_dummy_reload.  */
1596 	    && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1597 		|| (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1598 				    ORIGINAL_REGNO (XEXP (note, 0)))
1599 		    && REG_NREGS (XEXP (note, 0)) == 1))
1600 	    && ! refers_to_regno_for_reload_p (regno,
1601 					       end_hard_regno (rel_mode,
1602 							       regno),
1603 					       PATTERN (this_insn), inloc)
1604 	    && ! find_reg_fusage (this_insn, USE, XEXP (note, 0))
1605 	    /* If this is also an output reload, IN cannot be used as
1606 	       the reload register if it is set in this insn unless IN
1607 	       is also OUT.  */
1608 	    && (out == 0 || in == out
1609 		|| ! hard_reg_set_here_p (regno,
1610 					  end_hard_regno (rel_mode, regno),
1611 					  PATTERN (this_insn)))
1612 	    /* ??? Why is this code so different from the previous?
1613 	       Is there any simple coherent way to describe the two together?
1614 	       What's going on here.  */
1615 	    && (in != out
1616 		|| (GET_CODE (in) == SUBREG
1617 		    && (known_equal_after_align_up
1618 			(GET_MODE_SIZE (GET_MODE (in)),
1619 			 GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))),
1620 			 UNITS_PER_WORD))))
1621 	    /* Make sure the operand fits in the reg that dies.  */
1622 	    && known_le (GET_MODE_SIZE (rel_mode),
1623 			 GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1624 	    && targetm.hard_regno_mode_ok (regno, inmode)
1625 	    && targetm.hard_regno_mode_ok (regno, outmode))
1626 	  {
1627 	    unsigned int offs;
1628 	    unsigned int nregs = MAX (hard_regno_nregs (regno, inmode),
1629 				      hard_regno_nregs (regno, outmode));
1630 
1631 	    for (offs = 0; offs < nregs; offs++)
1632 	      if (fixed_regs[regno + offs]
1633 		  || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1634 					  regno + offs))
1635 		break;
1636 
1637 	    if (offs == nregs
1638 		&& (! (refers_to_regno_for_reload_p
1639 		       (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1640 		    || can_reload_into (in, regno, inmode)))
1641 	      {
1642 		rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1643 		break;
1644 	      }
1645 	  }
1646     }
1647 
1648   if (out)
1649     output_reloadnum = i;
1650 
1651   return i;
1652 }
1653 
1654 /* Record an additional place we must replace a value
1655    for which we have already recorded a reload.
1656    RELOADNUM is the value returned by push_reload
1657    when the reload was recorded.
1658    This is used in insn patterns that use match_dup.  */
1659 
1660 static void
push_replacement(rtx * loc,int reloadnum,machine_mode mode)1661 push_replacement (rtx *loc, int reloadnum, machine_mode mode)
1662 {
1663   if (replace_reloads)
1664     {
1665       struct replacement *r = &replacements[n_replacements++];
1666       r->what = reloadnum;
1667       r->where = loc;
1668       r->mode = mode;
1669     }
1670 }
1671 
1672 /* Duplicate any replacement we have recorded to apply at
1673    location ORIG_LOC to also be performed at DUP_LOC.
1674    This is used in insn patterns that use match_dup.  */
1675 
1676 static void
dup_replacements(rtx * dup_loc,rtx * orig_loc)1677 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1678 {
1679   int i, n = n_replacements;
1680 
1681   for (i = 0; i < n; i++)
1682     {
1683       struct replacement *r = &replacements[i];
1684       if (r->where == orig_loc)
1685 	push_replacement (dup_loc, r->what, r->mode);
1686     }
1687 }
1688 
1689 /* Transfer all replacements that used to be in reload FROM to be in
1690    reload TO.  */
1691 
1692 void
transfer_replacements(int to,int from)1693 transfer_replacements (int to, int from)
1694 {
1695   int i;
1696 
1697   for (i = 0; i < n_replacements; i++)
1698     if (replacements[i].what == from)
1699       replacements[i].what = to;
1700 }
1701 
1702 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1703    or a subpart of it.  If we have any replacements registered for IN_RTX,
1704    cancel the reloads that were supposed to load them.
1705    Return nonzero if we canceled any reloads.  */
1706 int
remove_address_replacements(rtx in_rtx)1707 remove_address_replacements (rtx in_rtx)
1708 {
1709   int i, j;
1710   char reload_flags[MAX_RELOADS];
1711   int something_changed = 0;
1712 
1713   memset (reload_flags, 0, sizeof reload_flags);
1714   for (i = 0, j = 0; i < n_replacements; i++)
1715     {
1716       if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1717 	reload_flags[replacements[i].what] |= 1;
1718       else
1719 	{
1720 	  replacements[j++] = replacements[i];
1721 	  reload_flags[replacements[i].what] |= 2;
1722 	}
1723     }
1724   /* Note that the following store must be done before the recursive calls.  */
1725   n_replacements = j;
1726 
1727   for (i = n_reloads - 1; i >= 0; i--)
1728     {
1729       if (reload_flags[i] == 1)
1730 	{
1731 	  deallocate_reload_reg (i);
1732 	  remove_address_replacements (rld[i].in);
1733 	  rld[i].in = 0;
1734 	  something_changed = 1;
1735 	}
1736     }
1737   return something_changed;
1738 }
1739 
1740 /* If there is only one output reload, and it is not for an earlyclobber
1741    operand, try to combine it with a (logically unrelated) input reload
1742    to reduce the number of reload registers needed.
1743 
1744    This is safe if the input reload does not appear in
1745    the value being output-reloaded, because this implies
1746    it is not needed any more once the original insn completes.
1747 
1748    If that doesn't work, see we can use any of the registers that
1749    die in this insn as a reload register.  We can if it is of the right
1750    class and does not appear in the value being output-reloaded.  */
1751 
1752 static void
combine_reloads(void)1753 combine_reloads (void)
1754 {
1755   int i, regno;
1756   int output_reload = -1;
1757   int secondary_out = -1;
1758   rtx note;
1759 
1760   /* Find the output reload; return unless there is exactly one
1761      and that one is mandatory.  */
1762 
1763   for (i = 0; i < n_reloads; i++)
1764     if (rld[i].out != 0)
1765       {
1766 	if (output_reload >= 0)
1767 	  return;
1768 	output_reload = i;
1769       }
1770 
1771   if (output_reload < 0 || rld[output_reload].optional)
1772     return;
1773 
1774   /* An input-output reload isn't combinable.  */
1775 
1776   if (rld[output_reload].in != 0)
1777     return;
1778 
1779   /* If this reload is for an earlyclobber operand, we can't do anything.  */
1780   if (earlyclobber_operand_p (rld[output_reload].out))
1781     return;
1782 
1783   /* If there is a reload for part of the address of this operand, we would
1784      need to change it to RELOAD_FOR_OTHER_ADDRESS.  But that would extend
1785      its life to the point where doing this combine would not lower the
1786      number of spill registers needed.  */
1787   for (i = 0; i < n_reloads; i++)
1788     if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1789 	 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1790 	&& rld[i].opnum == rld[output_reload].opnum)
1791       return;
1792 
1793   /* Check each input reload; can we combine it?  */
1794 
1795   for (i = 0; i < n_reloads; i++)
1796     if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1797 	/* Life span of this reload must not extend past main insn.  */
1798 	&& rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1799 	&& rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1800 	&& rld[i].when_needed != RELOAD_OTHER
1801 	&& (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1802 	    == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1803 				       [(int) rld[output_reload].outmode])
1804 	&& known_eq (rld[i].inc, 0)
1805 	&& rld[i].reg_rtx == 0
1806 	/* Don't combine two reloads with different secondary
1807 	   memory locations.  */
1808 	&& (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1809 	    || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1810 	    || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1811 			    secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1812 	&& (targetm.small_register_classes_for_mode_p (VOIDmode)
1813 	    ? (rld[i].rclass == rld[output_reload].rclass)
1814 	    : (reg_class_subset_p (rld[i].rclass,
1815 				   rld[output_reload].rclass)
1816 	       || reg_class_subset_p (rld[output_reload].rclass,
1817 				      rld[i].rclass)))
1818 	&& (MATCHES (rld[i].in, rld[output_reload].out)
1819 	    /* Args reversed because the first arg seems to be
1820 	       the one that we imagine being modified
1821 	       while the second is the one that might be affected.  */
1822 	    || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1823 						      rld[i].in)
1824 		/* However, if the input is a register that appears inside
1825 		   the output, then we also can't share.
1826 		   Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1827 		   If the same reload reg is used for both reg 69 and the
1828 		   result to be stored in memory, then that result
1829 		   will clobber the address of the memory ref.  */
1830 		&& ! (REG_P (rld[i].in)
1831 		      && reg_overlap_mentioned_for_reload_p (rld[i].in,
1832 							     rld[output_reload].out))))
1833 	&& ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1834 					 rld[i].when_needed != RELOAD_FOR_INPUT)
1835 	&& (reg_class_size[(int) rld[i].rclass]
1836 	    || targetm.small_register_classes_for_mode_p (VOIDmode))
1837 	/* We will allow making things slightly worse by combining an
1838 	   input and an output, but no worse than that.  */
1839 	&& (rld[i].when_needed == RELOAD_FOR_INPUT
1840 	    || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1841       {
1842 	int j;
1843 
1844 	/* We have found a reload to combine with!  */
1845 	rld[i].out = rld[output_reload].out;
1846 	rld[i].out_reg = rld[output_reload].out_reg;
1847 	rld[i].outmode = rld[output_reload].outmode;
1848 	/* Mark the old output reload as inoperative.  */
1849 	rld[output_reload].out = 0;
1850 	/* The combined reload is needed for the entire insn.  */
1851 	rld[i].when_needed = RELOAD_OTHER;
1852 	/* If the output reload had a secondary reload, copy it.  */
1853 	if (rld[output_reload].secondary_out_reload != -1)
1854 	  {
1855 	    rld[i].secondary_out_reload
1856 	      = rld[output_reload].secondary_out_reload;
1857 	    rld[i].secondary_out_icode
1858 	      = rld[output_reload].secondary_out_icode;
1859 	  }
1860 
1861 	/* Copy any secondary MEM.  */
1862 	if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1863 	  secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1864 	    = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1865 	/* If required, minimize the register class.  */
1866 	if (reg_class_subset_p (rld[output_reload].rclass,
1867 				rld[i].rclass))
1868 	  rld[i].rclass = rld[output_reload].rclass;
1869 
1870 	/* Transfer all replacements from the old reload to the combined.  */
1871 	for (j = 0; j < n_replacements; j++)
1872 	  if (replacements[j].what == output_reload)
1873 	    replacements[j].what = i;
1874 
1875 	return;
1876       }
1877 
1878   /* If this insn has only one operand that is modified or written (assumed
1879      to be the first),  it must be the one corresponding to this reload.  It
1880      is safe to use anything that dies in this insn for that output provided
1881      that it does not occur in the output (we already know it isn't an
1882      earlyclobber.  If this is an asm insn, give up.  */
1883 
1884   if (INSN_CODE (this_insn) == -1)
1885     return;
1886 
1887   for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1888     if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1889 	|| insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1890       return;
1891 
1892   /* See if some hard register that dies in this insn and is not used in
1893      the output is the right class.  Only works if the register we pick
1894      up can fully hold our output reload.  */
1895   for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1896     if (REG_NOTE_KIND (note) == REG_DEAD
1897 	&& REG_P (XEXP (note, 0))
1898 	&& !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1899 						rld[output_reload].out)
1900 	&& (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1901 	&& targetm.hard_regno_mode_ok (regno, rld[output_reload].outmode)
1902 	&& TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1903 			      regno)
1904 	&& (hard_regno_nregs (regno, rld[output_reload].outmode)
1905 	    <= REG_NREGS (XEXP (note, 0)))
1906 	/* Ensure that a secondary or tertiary reload for this output
1907 	   won't want this register.  */
1908 	&& ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1909 	    || (!(TEST_HARD_REG_BIT
1910 		  (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1911 		&& ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1912 		    || !(TEST_HARD_REG_BIT
1913 			 (reg_class_contents[(int) rld[secondary_out].rclass],
1914 			  regno)))))
1915 	&& !fixed_regs[regno]
1916 	/* Check that a former pseudo is valid; see find_dummy_reload.  */
1917 	&& (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1918 	    || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1919 			       ORIGINAL_REGNO (XEXP (note, 0)))
1920 		&& REG_NREGS (XEXP (note, 0)) == 1)))
1921       {
1922 	rld[output_reload].reg_rtx
1923 	  = gen_rtx_REG (rld[output_reload].outmode, regno);
1924 	return;
1925       }
1926 }
1927 
1928 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1929    See if one of IN and OUT is a register that may be used;
1930    this is desirable since a spill-register won't be needed.
1931    If so, return the register rtx that proves acceptable.
1932 
1933    INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1934    RCLASS is the register class required for the reload.
1935 
1936    If FOR_REAL is >= 0, it is the number of the reload,
1937    and in some cases when it can be discovered that OUT doesn't need
1938    to be computed, clear out rld[FOR_REAL].out.
1939 
1940    If FOR_REAL is -1, this should not be done, because this call
1941    is just to see if a register can be found, not to find and install it.
1942 
1943    EARLYCLOBBER is nonzero if OUT is an earlyclobber operand.  This
1944    puts an additional constraint on being able to use IN for OUT since
1945    IN must not appear elsewhere in the insn (it is assumed that IN itself
1946    is safe from the earlyclobber).  */
1947 
1948 static rtx
find_dummy_reload(rtx real_in,rtx real_out,rtx * inloc,rtx * outloc,machine_mode inmode,machine_mode outmode,reg_class_t rclass,int for_real,int earlyclobber)1949 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1950 		   machine_mode inmode, machine_mode outmode,
1951 		   reg_class_t rclass, int for_real, int earlyclobber)
1952 {
1953   rtx in = real_in;
1954   rtx out = real_out;
1955   int in_offset = 0;
1956   int out_offset = 0;
1957   rtx value = 0;
1958 
1959   /* If operands exceed a word, we can't use either of them
1960      unless they have the same size.  */
1961   if (maybe_ne (GET_MODE_SIZE (outmode), GET_MODE_SIZE (inmode))
1962       && (maybe_gt (GET_MODE_SIZE (outmode), UNITS_PER_WORD)
1963 	  || maybe_gt (GET_MODE_SIZE (inmode), UNITS_PER_WORD)))
1964     return 0;
1965 
1966   /* Note that {in,out}_offset are needed only when 'in' or 'out'
1967      respectively refers to a hard register.  */
1968 
1969   /* Find the inside of any subregs.  */
1970   while (GET_CODE (out) == SUBREG)
1971     {
1972       if (REG_P (SUBREG_REG (out))
1973 	  && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1974 	out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1975 					   GET_MODE (SUBREG_REG (out)),
1976 					   SUBREG_BYTE (out),
1977 					   GET_MODE (out));
1978       out = SUBREG_REG (out);
1979     }
1980   while (GET_CODE (in) == SUBREG)
1981     {
1982       if (REG_P (SUBREG_REG (in))
1983 	  && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1984 	in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1985 					  GET_MODE (SUBREG_REG (in)),
1986 					  SUBREG_BYTE (in),
1987 					  GET_MODE (in));
1988       in = SUBREG_REG (in);
1989     }
1990 
1991   /* Narrow down the reg class, the same way push_reload will;
1992      otherwise we might find a dummy now, but push_reload won't.  */
1993   {
1994     reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
1995     if (preferred_class != NO_REGS)
1996       rclass = (enum reg_class) preferred_class;
1997   }
1998 
1999   /* See if OUT will do.  */
2000   if (REG_P (out)
2001       && REGNO (out) < FIRST_PSEUDO_REGISTER)
2002     {
2003       unsigned int regno = REGNO (out) + out_offset;
2004       unsigned int nwords = hard_regno_nregs (regno, outmode);
2005       rtx saved_rtx;
2006 
2007       /* When we consider whether the insn uses OUT,
2008 	 ignore references within IN.  They don't prevent us
2009 	 from copying IN into OUT, because those refs would
2010 	 move into the insn that reloads IN.
2011 
2012 	 However, we only ignore IN in its role as this reload.
2013 	 If the insn uses IN elsewhere and it contains OUT,
2014 	 that counts.  We can't be sure it's the "same" operand
2015 	 so it might not go through this reload.
2016 
2017          We also need to avoid using OUT if it, or part of it, is a
2018          fixed register.  Modifying such registers, even transiently,
2019          may have undefined effects on the machine, such as modifying
2020          the stack pointer.  */
2021       saved_rtx = *inloc;
2022       *inloc = const0_rtx;
2023 
2024       if (regno < FIRST_PSEUDO_REGISTER
2025 	  && targetm.hard_regno_mode_ok (regno, outmode)
2026 	  && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2027 					     PATTERN (this_insn), outloc))
2028 	{
2029 	  unsigned int i;
2030 
2031 	  for (i = 0; i < nwords; i++)
2032 	    if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2033 				     regno + i)
2034 		|| fixed_regs[regno + i])
2035 	      break;
2036 
2037 	  if (i == nwords)
2038 	    {
2039 	      if (REG_P (real_out))
2040 		value = real_out;
2041 	      else
2042 		value = gen_rtx_REG (outmode, regno);
2043 	    }
2044 	}
2045 
2046       *inloc = saved_rtx;
2047     }
2048 
2049   /* Consider using IN if OUT was not acceptable
2050      or if OUT dies in this insn (like the quotient in a divmod insn).
2051      We can't use IN unless it is dies in this insn,
2052      which means we must know accurately which hard regs are live.
2053      Also, the result can't go in IN if IN is used within OUT,
2054      or if OUT is an earlyclobber and IN appears elsewhere in the insn.  */
2055   if (hard_regs_live_known
2056       && REG_P (in)
2057       && REGNO (in) < FIRST_PSEUDO_REGISTER
2058       && (value == 0
2059 	  || find_reg_note (this_insn, REG_UNUSED, real_out))
2060       && find_reg_note (this_insn, REG_DEAD, real_in)
2061       && !fixed_regs[REGNO (in)]
2062       && targetm.hard_regno_mode_ok (REGNO (in),
2063 				     /* The only case where out and real_out
2064 					might have different modes is where
2065 					real_out is a subreg, and in that
2066 					case, out has a real mode.  */
2067 				     (GET_MODE (out) != VOIDmode
2068 				      ? GET_MODE (out) : outmode))
2069       && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2070 	  /* However only do this if we can be sure that this input
2071 	     operand doesn't correspond with an uninitialized pseudo.
2072 	     global can assign some hardreg to it that is the same as
2073 	     the one assigned to a different, also live pseudo (as it
2074 	     can ignore the conflict).  We must never introduce writes
2075 	     to such hardregs, as they would clobber the other live
2076 	     pseudo.  See PR 20973.  */
2077 	  || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2078 			     ORIGINAL_REGNO (in))
2079 	      /* Similarly, only do this if we can be sure that the death
2080 		 note is still valid.  global can assign some hardreg to
2081 		 the pseudo referenced in the note and simultaneously a
2082 		 subword of this hardreg to a different, also live pseudo,
2083 		 because only another subword of the hardreg is actually
2084 		 used in the insn.  This cannot happen if the pseudo has
2085 		 been assigned exactly one hardreg.  See PR 33732.  */
2086 	      && REG_NREGS (in) == 1)))
2087     {
2088       unsigned int regno = REGNO (in) + in_offset;
2089       unsigned int nwords = hard_regno_nregs (regno, inmode);
2090 
2091       if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2092 	  && ! hard_reg_set_here_p (regno, regno + nwords,
2093 				    PATTERN (this_insn))
2094 	  && (! earlyclobber
2095 	      || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2096 						 PATTERN (this_insn), inloc)))
2097 	{
2098 	  unsigned int i;
2099 
2100 	  for (i = 0; i < nwords; i++)
2101 	    if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2102 				     regno + i))
2103 	      break;
2104 
2105 	  if (i == nwords)
2106 	    {
2107 	      /* If we were going to use OUT as the reload reg
2108 		 and changed our mind, it means OUT is a dummy that
2109 		 dies here.  So don't bother copying value to it.  */
2110 	      if (for_real >= 0 && value == real_out)
2111 		rld[for_real].out = 0;
2112 	      if (REG_P (real_in))
2113 		value = real_in;
2114 	      else
2115 		value = gen_rtx_REG (inmode, regno);
2116 	    }
2117 	}
2118     }
2119 
2120   return value;
2121 }
2122 
2123 /* This page contains subroutines used mainly for determining
2124    whether the IN or an OUT of a reload can serve as the
2125    reload register.  */
2126 
2127 /* Return 1 if X is an operand of an insn that is being earlyclobbered.  */
2128 
2129 int
earlyclobber_operand_p(rtx x)2130 earlyclobber_operand_p (rtx x)
2131 {
2132   int i;
2133 
2134   for (i = 0; i < n_earlyclobbers; i++)
2135     if (reload_earlyclobbers[i] == x)
2136       return 1;
2137 
2138   return 0;
2139 }
2140 
2141 /* Return 1 if expression X alters a hard reg in the range
2142    from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2143    either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2144    X should be the body of an instruction.  */
2145 
2146 static int
hard_reg_set_here_p(unsigned int beg_regno,unsigned int end_regno,rtx x)2147 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2148 {
2149   if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2150     {
2151       rtx op0 = SET_DEST (x);
2152 
2153       while (GET_CODE (op0) == SUBREG)
2154 	op0 = SUBREG_REG (op0);
2155       if (REG_P (op0))
2156 	{
2157 	  unsigned int r = REGNO (op0);
2158 
2159 	  /* See if this reg overlaps range under consideration.  */
2160 	  if (r < end_regno
2161 	      && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2162 	    return 1;
2163 	}
2164     }
2165   else if (GET_CODE (x) == PARALLEL)
2166     {
2167       int i = XVECLEN (x, 0) - 1;
2168 
2169       for (; i >= 0; i--)
2170 	if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2171 	  return 1;
2172     }
2173 
2174   return 0;
2175 }
2176 
2177 /* Return 1 if ADDR is a valid memory address for mode MODE
2178    in address space AS, and check that each pseudo reg has the
2179    proper kind of hard reg.  */
2180 
2181 int
strict_memory_address_addr_space_p(machine_mode mode ATTRIBUTE_UNUSED,rtx addr,addr_space_t as)2182 strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
2183 				    rtx addr, addr_space_t as)
2184 {
2185 #ifdef GO_IF_LEGITIMATE_ADDRESS
2186   gcc_assert (ADDR_SPACE_GENERIC_P (as));
2187   GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2188   return 0;
2189 
2190  win:
2191   return 1;
2192 #else
2193   return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2194 #endif
2195 }
2196 
2197 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2198    if they are the same hard reg, and has special hacks for
2199    autoincrement and autodecrement.
2200    This is specifically intended for find_reloads to use
2201    in determining whether two operands match.
2202    X is the operand whose number is the lower of the two.
2203 
2204    The value is 2 if Y contains a pre-increment that matches
2205    a non-incrementing address in X.  */
2206 
2207 /* ??? To be completely correct, we should arrange to pass
2208    for X the output operand and for Y the input operand.
2209    For now, we assume that the output operand has the lower number
2210    because that is natural in (SET output (... input ...)).  */
2211 
2212 int
operands_match_p(rtx x,rtx y)2213 operands_match_p (rtx x, rtx y)
2214 {
2215   int i;
2216   RTX_CODE code = GET_CODE (x);
2217   const char *fmt;
2218   int success_2;
2219 
2220   if (x == y)
2221     return 1;
2222   if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2223       && (REG_P (y) || (GET_CODE (y) == SUBREG
2224 				  && REG_P (SUBREG_REG (y)))))
2225     {
2226       int j;
2227 
2228       if (code == SUBREG)
2229 	{
2230 	  i = REGNO (SUBREG_REG (x));
2231 	  if (i >= FIRST_PSEUDO_REGISTER)
2232 	    goto slow;
2233 	  i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2234 				    GET_MODE (SUBREG_REG (x)),
2235 				    SUBREG_BYTE (x),
2236 				    GET_MODE (x));
2237 	}
2238       else
2239 	i = REGNO (x);
2240 
2241       if (GET_CODE (y) == SUBREG)
2242 	{
2243 	  j = REGNO (SUBREG_REG (y));
2244 	  if (j >= FIRST_PSEUDO_REGISTER)
2245 	    goto slow;
2246 	  j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2247 				    GET_MODE (SUBREG_REG (y)),
2248 				    SUBREG_BYTE (y),
2249 				    GET_MODE (y));
2250 	}
2251       else
2252 	j = REGNO (y);
2253 
2254       /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2255 	 multiple hard register group of scalar integer registers, so that
2256 	 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2257 	 register.  */
2258       scalar_int_mode xmode;
2259       if (REG_WORDS_BIG_ENDIAN
2260 	  && is_a <scalar_int_mode> (GET_MODE (x), &xmode)
2261 	  && GET_MODE_SIZE (xmode) > UNITS_PER_WORD
2262 	  && i < FIRST_PSEUDO_REGISTER)
2263 	i += hard_regno_nregs (i, xmode) - 1;
2264       scalar_int_mode ymode;
2265       if (REG_WORDS_BIG_ENDIAN
2266 	  && is_a <scalar_int_mode> (GET_MODE (y), &ymode)
2267 	  && GET_MODE_SIZE (ymode) > UNITS_PER_WORD
2268 	  && j < FIRST_PSEUDO_REGISTER)
2269 	j += hard_regno_nregs (j, ymode) - 1;
2270 
2271       return i == j;
2272     }
2273   /* If two operands must match, because they are really a single
2274      operand of an assembler insn, then two postincrements are invalid
2275      because the assembler insn would increment only once.
2276      On the other hand, a postincrement matches ordinary indexing
2277      if the postincrement is the output operand.  */
2278   if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2279     return operands_match_p (XEXP (x, 0), y);
2280   /* Two preincrements are invalid
2281      because the assembler insn would increment only once.
2282      On the other hand, a preincrement matches ordinary indexing
2283      if the preincrement is the input operand.
2284      In this case, return 2, since some callers need to do special
2285      things when this happens.  */
2286   if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2287       || GET_CODE (y) == PRE_MODIFY)
2288     return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2289 
2290  slow:
2291 
2292   /* Now we have disposed of all the cases in which different rtx codes
2293      can match.  */
2294   if (code != GET_CODE (y))
2295     return 0;
2296 
2297   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2298   if (GET_MODE (x) != GET_MODE (y))
2299     return 0;
2300 
2301   /* MEMs referring to different address space are not equivalent.  */
2302   if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2303     return 0;
2304 
2305   switch (code)
2306     {
2307     CASE_CONST_UNIQUE:
2308       return 0;
2309 
2310     case CONST_VECTOR:
2311       if (!same_vector_encodings_p (x, y))
2312 	return false;
2313       break;
2314 
2315     case LABEL_REF:
2316       return label_ref_label (x) == label_ref_label (y);
2317     case SYMBOL_REF:
2318       return XSTR (x, 0) == XSTR (y, 0);
2319 
2320     default:
2321       break;
2322     }
2323 
2324   /* Compare the elements.  If any pair of corresponding elements
2325      fail to match, return 0 for the whole things.  */
2326 
2327   success_2 = 0;
2328   fmt = GET_RTX_FORMAT (code);
2329   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2330     {
2331       int val, j;
2332       switch (fmt[i])
2333 	{
2334 	case 'w':
2335 	  if (XWINT (x, i) != XWINT (y, i))
2336 	    return 0;
2337 	  break;
2338 
2339 	case 'i':
2340 	  if (XINT (x, i) != XINT (y, i))
2341 	    return 0;
2342 	  break;
2343 
2344 	case 'p':
2345 	  if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
2346 	    return 0;
2347 	  break;
2348 
2349 	case 'e':
2350 	  val = operands_match_p (XEXP (x, i), XEXP (y, i));
2351 	  if (val == 0)
2352 	    return 0;
2353 	  /* If any subexpression returns 2,
2354 	     we should return 2 if we are successful.  */
2355 	  if (val == 2)
2356 	    success_2 = 1;
2357 	  break;
2358 
2359 	case '0':
2360 	  break;
2361 
2362 	case 'E':
2363 	  if (XVECLEN (x, i) != XVECLEN (y, i))
2364 	    return 0;
2365 	  for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2366 	    {
2367 	      val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2368 	      if (val == 0)
2369 		return 0;
2370 	      if (val == 2)
2371 		success_2 = 1;
2372 	    }
2373 	  break;
2374 
2375 	  /* It is believed that rtx's at this level will never
2376 	     contain anything but integers and other rtx's,
2377 	     except for within LABEL_REFs and SYMBOL_REFs.  */
2378 	default:
2379 	  gcc_unreachable ();
2380 	}
2381     }
2382   return 1 + success_2;
2383 }
2384 
2385 /* Describe the range of registers or memory referenced by X.
2386    If X is a register, set REG_FLAG and put the first register
2387    number into START and the last plus one into END.
2388    If X is a memory reference, put a base address into BASE
2389    and a range of integer offsets into START and END.
2390    If X is pushing on the stack, we can assume it causes no trouble,
2391    so we set the SAFE field.  */
2392 
2393 static struct decomposition
decompose(rtx x)2394 decompose (rtx x)
2395 {
2396   struct decomposition val;
2397   int all_const = 0, regno;
2398 
2399   memset (&val, 0, sizeof (val));
2400 
2401   switch (GET_CODE (x))
2402     {
2403     case MEM:
2404       {
2405 	rtx base = NULL_RTX, offset = 0;
2406 	rtx addr = XEXP (x, 0);
2407 
2408 	if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2409 	    || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2410 	  {
2411 	    val.base = XEXP (addr, 0);
2412 	    val.start = -GET_MODE_SIZE (GET_MODE (x));
2413 	    val.end = GET_MODE_SIZE (GET_MODE (x));
2414 	    val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2415 	    return val;
2416 	  }
2417 
2418 	if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2419 	  {
2420 	    if (GET_CODE (XEXP (addr, 1)) == PLUS
2421 		&& XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2422 		&& CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2423 	      {
2424 		val.base  = XEXP (addr, 0);
2425 		val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2426 		val.end   = INTVAL (XEXP (XEXP (addr, 1), 1));
2427 		val.safe  = REGNO (val.base) == STACK_POINTER_REGNUM;
2428 		return val;
2429 	      }
2430 	  }
2431 
2432 	if (GET_CODE (addr) == CONST)
2433 	  {
2434 	    addr = XEXP (addr, 0);
2435 	    all_const = 1;
2436 	  }
2437 	if (GET_CODE (addr) == PLUS)
2438 	  {
2439 	    if (CONSTANT_P (XEXP (addr, 0)))
2440 	      {
2441 		base = XEXP (addr, 1);
2442 		offset = XEXP (addr, 0);
2443 	      }
2444 	    else if (CONSTANT_P (XEXP (addr, 1)))
2445 	      {
2446 		base = XEXP (addr, 0);
2447 		offset = XEXP (addr, 1);
2448 	      }
2449 	  }
2450 
2451 	if (offset == 0)
2452 	  {
2453 	    base = addr;
2454 	    offset = const0_rtx;
2455 	  }
2456 	if (GET_CODE (offset) == CONST)
2457 	  offset = XEXP (offset, 0);
2458 	if (GET_CODE (offset) == PLUS)
2459 	  {
2460 	    if (CONST_INT_P (XEXP (offset, 0)))
2461 	      {
2462 		base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2463 		offset = XEXP (offset, 0);
2464 	      }
2465 	    else if (CONST_INT_P (XEXP (offset, 1)))
2466 	      {
2467 		base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2468 		offset = XEXP (offset, 1);
2469 	      }
2470 	    else
2471 	      {
2472 		base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2473 		offset = const0_rtx;
2474 	      }
2475 	  }
2476 	else if (!CONST_INT_P (offset))
2477 	  {
2478 	    base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2479 	    offset = const0_rtx;
2480 	  }
2481 
2482 	if (all_const && GET_CODE (base) == PLUS)
2483 	  base = gen_rtx_CONST (GET_MODE (base), base);
2484 
2485 	gcc_assert (CONST_INT_P (offset));
2486 
2487 	val.start = INTVAL (offset);
2488 	val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2489 	val.base = base;
2490       }
2491       break;
2492 
2493     case REG:
2494       val.reg_flag = 1;
2495       regno = true_regnum (x);
2496       if (regno < 0 || regno >= FIRST_PSEUDO_REGISTER)
2497 	{
2498 	  /* A pseudo with no hard reg.  */
2499 	  val.start = REGNO (x);
2500 	  val.end = val.start + 1;
2501 	}
2502       else
2503 	{
2504 	  /* A hard reg.  */
2505 	  val.start = regno;
2506 	  val.end = end_hard_regno (GET_MODE (x), regno);
2507 	}
2508       break;
2509 
2510     case SUBREG:
2511       if (!REG_P (SUBREG_REG (x)))
2512 	/* This could be more precise, but it's good enough.  */
2513 	return decompose (SUBREG_REG (x));
2514       regno = true_regnum (x);
2515       if (regno < 0 || regno >= FIRST_PSEUDO_REGISTER)
2516 	return decompose (SUBREG_REG (x));
2517 
2518       /* A hard reg.  */
2519       val.reg_flag = 1;
2520       val.start = regno;
2521       val.end = regno + subreg_nregs (x);
2522       break;
2523 
2524     case SCRATCH:
2525       /* This hasn't been assigned yet, so it can't conflict yet.  */
2526       val.safe = 1;
2527       break;
2528 
2529     default:
2530       gcc_assert (CONSTANT_P (x));
2531       val.safe = 1;
2532       break;
2533     }
2534   return val;
2535 }
2536 
2537 /* Return 1 if altering Y will not modify the value of X.
2538    Y is also described by YDATA, which should be decompose (Y).  */
2539 
2540 static int
immune_p(rtx x,rtx y,struct decomposition ydata)2541 immune_p (rtx x, rtx y, struct decomposition ydata)
2542 {
2543   struct decomposition xdata;
2544 
2545   if (ydata.reg_flag)
2546     /* In this case the decomposition structure contains register
2547        numbers rather than byte offsets.  */
2548     return !refers_to_regno_for_reload_p (ydata.start.to_constant (),
2549 					  ydata.end.to_constant (),
2550 					  x, (rtx *) 0);
2551   if (ydata.safe)
2552     return 1;
2553 
2554   gcc_assert (MEM_P (y));
2555   /* If Y is memory and X is not, Y can't affect X.  */
2556   if (!MEM_P (x))
2557     return 1;
2558 
2559   xdata = decompose (x);
2560 
2561   if (! rtx_equal_p (xdata.base, ydata.base))
2562     {
2563       /* If bases are distinct symbolic constants, there is no overlap.  */
2564       if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2565 	return 1;
2566       /* Constants and stack slots never overlap.  */
2567       if (CONSTANT_P (xdata.base)
2568 	  && (ydata.base == frame_pointer_rtx
2569 	      || ydata.base == hard_frame_pointer_rtx
2570 	      || ydata.base == stack_pointer_rtx))
2571 	return 1;
2572       if (CONSTANT_P (ydata.base)
2573 	  && (xdata.base == frame_pointer_rtx
2574 	      || xdata.base == hard_frame_pointer_rtx
2575 	      || xdata.base == stack_pointer_rtx))
2576 	return 1;
2577       /* If either base is variable, we don't know anything.  */
2578       return 0;
2579     }
2580 
2581   return known_ge (xdata.start, ydata.end) || known_ge (ydata.start, xdata.end);
2582 }
2583 
2584 /* Similar, but calls decompose.  */
2585 
2586 int
safe_from_earlyclobber(rtx op,rtx clobber)2587 safe_from_earlyclobber (rtx op, rtx clobber)
2588 {
2589   struct decomposition early_data;
2590 
2591   early_data = decompose (clobber);
2592   return immune_p (op, clobber, early_data);
2593 }
2594 
2595 /* Main entry point of this file: search the body of INSN
2596    for values that need reloading and record them with push_reload.
2597    REPLACE nonzero means record also where the values occur
2598    so that subst_reloads can be used.
2599 
2600    IND_LEVELS says how many levels of indirection are supported by this
2601    machine; a value of zero means that a memory reference is not a valid
2602    memory address.
2603 
2604    LIVE_KNOWN says we have valid information about which hard
2605    regs are live at each point in the program; this is true when
2606    we are called from global_alloc but false when stupid register
2607    allocation has been done.
2608 
2609    RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2610    which is nonnegative if the reg has been commandeered for reloading into.
2611    It is copied into STATIC_RELOAD_REG_P and referenced from there
2612    by various subroutines.
2613 
2614    Return TRUE if some operands need to be changed, because of swapping
2615    commutative operands, reg_equiv_address substitution, or whatever.  */
2616 
2617 int
find_reloads(rtx_insn * insn,int replace,int ind_levels,int live_known,short * reload_reg_p)2618 find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2619 	      short *reload_reg_p)
2620 {
2621   int insn_code_number;
2622   int i, j;
2623   int noperands;
2624   /* These start out as the constraints for the insn
2625      and they are chewed up as we consider alternatives.  */
2626   const char *constraints[MAX_RECOG_OPERANDS];
2627   /* These are the preferred classes for an operand, or NO_REGS if it isn't
2628      a register.  */
2629   enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2630   char pref_or_nothing[MAX_RECOG_OPERANDS];
2631   /* Nonzero for a MEM operand whose entire address needs a reload.
2632      May be -1 to indicate the entire address may or may not need a reload.  */
2633   int address_reloaded[MAX_RECOG_OPERANDS];
2634   /* Nonzero for an address operand that needs to be completely reloaded.
2635      May be -1 to indicate the entire operand may or may not need a reload.  */
2636   int address_operand_reloaded[MAX_RECOG_OPERANDS];
2637   /* Value of enum reload_type to use for operand.  */
2638   enum reload_type operand_type[MAX_RECOG_OPERANDS];
2639   /* Value of enum reload_type to use within address of operand.  */
2640   enum reload_type address_type[MAX_RECOG_OPERANDS];
2641   /* Save the usage of each operand.  */
2642   enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2643   int no_input_reloads = 0, no_output_reloads = 0;
2644   int n_alternatives;
2645   reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2646   char this_alternative_match_win[MAX_RECOG_OPERANDS];
2647   char this_alternative_win[MAX_RECOG_OPERANDS];
2648   char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2649   char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2650   int this_alternative_matches[MAX_RECOG_OPERANDS];
2651   reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2652   int this_alternative_number;
2653   int goal_alternative_number = 0;
2654   int operand_reloadnum[MAX_RECOG_OPERANDS];
2655   int goal_alternative_matches[MAX_RECOG_OPERANDS];
2656   int goal_alternative_matched[MAX_RECOG_OPERANDS];
2657   char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2658   char goal_alternative_win[MAX_RECOG_OPERANDS];
2659   char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2660   char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2661   int goal_alternative_swapped;
2662   int best;
2663   int commutative;
2664   char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2665   rtx substed_operand[MAX_RECOG_OPERANDS];
2666   rtx body = PATTERN (insn);
2667   rtx set = single_set (insn);
2668   int goal_earlyclobber = 0, this_earlyclobber;
2669   machine_mode operand_mode[MAX_RECOG_OPERANDS];
2670   int retval = 0;
2671 
2672   this_insn = insn;
2673   n_reloads = 0;
2674   n_replacements = 0;
2675   n_earlyclobbers = 0;
2676   replace_reloads = replace;
2677   hard_regs_live_known = live_known;
2678   static_reload_reg_p = reload_reg_p;
2679 
2680   /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2681      neither are insns that SET cc0.  Insns that use CC0 are not allowed
2682      to have any input reloads.  */
2683   if (JUMP_P (insn) || CALL_P (insn))
2684     no_output_reloads = 1;
2685 
2686   if (HAVE_cc0 && reg_referenced_p (cc0_rtx, PATTERN (insn)))
2687     no_input_reloads = 1;
2688   if (HAVE_cc0 && reg_set_p (cc0_rtx, PATTERN (insn)))
2689     no_output_reloads = 1;
2690 
2691   /* The eliminated forms of any secondary memory locations are per-insn, so
2692      clear them out here.  */
2693 
2694   if (secondary_memlocs_elim_used)
2695     {
2696       memset (secondary_memlocs_elim, 0,
2697 	      sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2698       secondary_memlocs_elim_used = 0;
2699     }
2700 
2701   /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2702      is cheap to move between them.  If it is not, there may not be an insn
2703      to do the copy, so we may need a reload.  */
2704   if (GET_CODE (body) == SET
2705       && REG_P (SET_DEST (body))
2706       && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2707       && REG_P (SET_SRC (body))
2708       && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2709       && register_move_cost (GET_MODE (SET_SRC (body)),
2710 			     REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2711 			     REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2712     return 0;
2713 
2714   extract_insn (insn);
2715 
2716   noperands = reload_n_operands = recog_data.n_operands;
2717   n_alternatives = recog_data.n_alternatives;
2718 
2719   /* Just return "no reloads" if insn has no operands with constraints.  */
2720   if (noperands == 0 || n_alternatives == 0)
2721     return 0;
2722 
2723   insn_code_number = INSN_CODE (insn);
2724   this_insn_is_asm = insn_code_number < 0;
2725 
2726   memcpy (operand_mode, recog_data.operand_mode,
2727 	  noperands * sizeof (machine_mode));
2728   memcpy (constraints, recog_data.constraints,
2729 	  noperands * sizeof (const char *));
2730 
2731   commutative = -1;
2732 
2733   /* If we will need to know, later, whether some pair of operands
2734      are the same, we must compare them now and save the result.
2735      Reloading the base and index registers will clobber them
2736      and afterward they will fail to match.  */
2737 
2738   for (i = 0; i < noperands; i++)
2739     {
2740       const char *p;
2741       int c;
2742       char *end;
2743 
2744       substed_operand[i] = recog_data.operand[i];
2745       p = constraints[i];
2746 
2747       modified[i] = RELOAD_READ;
2748 
2749       /* Scan this operand's constraint to see if it is an output operand,
2750 	 an in-out operand, is commutative, or should match another.  */
2751 
2752       while ((c = *p))
2753 	{
2754 	  p += CONSTRAINT_LEN (c, p);
2755 	  switch (c)
2756 	    {
2757 	    case '=':
2758 	      modified[i] = RELOAD_WRITE;
2759 	      break;
2760 	    case '+':
2761 	      modified[i] = RELOAD_READ_WRITE;
2762 	      break;
2763 	    case '%':
2764 	      {
2765 		/* The last operand should not be marked commutative.  */
2766 		gcc_assert (i != noperands - 1);
2767 
2768 		/* We currently only support one commutative pair of
2769 		   operands.  Some existing asm code currently uses more
2770 		   than one pair.  Previously, that would usually work,
2771 		   but sometimes it would crash the compiler.  We
2772 		   continue supporting that case as well as we can by
2773 		   silently ignoring all but the first pair.  In the
2774 		   future we may handle it correctly.  */
2775 		if (commutative < 0)
2776 		  commutative = i;
2777 		else
2778 		  gcc_assert (this_insn_is_asm);
2779 	      }
2780 	      break;
2781 	    /* Use of ISDIGIT is tempting here, but it may get expensive because
2782 	       of locale support we don't want.  */
2783 	    case '0': case '1': case '2': case '3': case '4':
2784 	    case '5': case '6': case '7': case '8': case '9':
2785 	      {
2786 		c = strtoul (p - 1, &end, 10);
2787 		p = end;
2788 
2789 		operands_match[c][i]
2790 		  = operands_match_p (recog_data.operand[c],
2791 				      recog_data.operand[i]);
2792 
2793 		/* An operand may not match itself.  */
2794 		gcc_assert (c != i);
2795 
2796 		/* If C can be commuted with C+1, and C might need to match I,
2797 		   then C+1 might also need to match I.  */
2798 		if (commutative >= 0)
2799 		  {
2800 		    if (c == commutative || c == commutative + 1)
2801 		      {
2802 			int other = c + (c == commutative ? 1 : -1);
2803 			operands_match[other][i]
2804 			  = operands_match_p (recog_data.operand[other],
2805 					      recog_data.operand[i]);
2806 		      }
2807 		    if (i == commutative || i == commutative + 1)
2808 		      {
2809 			int other = i + (i == commutative ? 1 : -1);
2810 			operands_match[c][other]
2811 			  = operands_match_p (recog_data.operand[c],
2812 					      recog_data.operand[other]);
2813 		      }
2814 		    /* Note that C is supposed to be less than I.
2815 		       No need to consider altering both C and I because in
2816 		       that case we would alter one into the other.  */
2817 		  }
2818 	      }
2819 	    }
2820 	}
2821     }
2822 
2823   /* Examine each operand that is a memory reference or memory address
2824      and reload parts of the addresses into index registers.
2825      Also here any references to pseudo regs that didn't get hard regs
2826      but are equivalent to constants get replaced in the insn itself
2827      with those constants.  Nobody will ever see them again.
2828 
2829      Finally, set up the preferred classes of each operand.  */
2830 
2831   for (i = 0; i < noperands; i++)
2832     {
2833       RTX_CODE code = GET_CODE (recog_data.operand[i]);
2834 
2835       address_reloaded[i] = 0;
2836       address_operand_reloaded[i] = 0;
2837       operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2838 			 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2839 			 : RELOAD_OTHER);
2840       address_type[i]
2841 	= (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2842 	   : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2843 	   : RELOAD_OTHER);
2844 
2845       if (*constraints[i] == 0)
2846 	/* Ignore things like match_operator operands.  */
2847 	;
2848       else if (insn_extra_address_constraint
2849 	       (lookup_constraint (constraints[i])))
2850 	{
2851 	  address_operand_reloaded[i]
2852 	    = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2853 				    recog_data.operand[i],
2854 				    recog_data.operand_loc[i],
2855 				    i, operand_type[i], ind_levels, insn);
2856 
2857 	  /* If we now have a simple operand where we used to have a
2858 	     PLUS or MULT, re-recognize and try again.  */
2859 	  if ((OBJECT_P (*recog_data.operand_loc[i])
2860 	       || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2861 	      && (GET_CODE (recog_data.operand[i]) == MULT
2862 		  || GET_CODE (recog_data.operand[i]) == PLUS))
2863 	    {
2864 	      INSN_CODE (insn) = -1;
2865 	      retval = find_reloads (insn, replace, ind_levels, live_known,
2866 				     reload_reg_p);
2867 	      return retval;
2868 	    }
2869 
2870 	  recog_data.operand[i] = *recog_data.operand_loc[i];
2871 	  substed_operand[i] = recog_data.operand[i];
2872 
2873 	  /* Address operands are reloaded in their existing mode,
2874 	     no matter what is specified in the machine description.  */
2875 	  operand_mode[i] = GET_MODE (recog_data.operand[i]);
2876 
2877 	  /* If the address is a single CONST_INT pick address mode
2878 	     instead otherwise we will later not know in which mode
2879 	     the reload should be performed.  */
2880 	  if (operand_mode[i] == VOIDmode)
2881 	    operand_mode[i] = Pmode;
2882 
2883 	}
2884       else if (code == MEM)
2885 	{
2886 	  address_reloaded[i]
2887 	    = find_reloads_address (GET_MODE (recog_data.operand[i]),
2888 				    recog_data.operand_loc[i],
2889 				    XEXP (recog_data.operand[i], 0),
2890 				    &XEXP (recog_data.operand[i], 0),
2891 				    i, address_type[i], ind_levels, insn);
2892 	  recog_data.operand[i] = *recog_data.operand_loc[i];
2893 	  substed_operand[i] = recog_data.operand[i];
2894 	}
2895       else if (code == SUBREG)
2896 	{
2897 	  rtx reg = SUBREG_REG (recog_data.operand[i]);
2898 	  rtx op
2899 	    = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2900 				   ind_levels,
2901 				   set != 0
2902 				   && &SET_DEST (set) == recog_data.operand_loc[i],
2903 				   insn,
2904 				   &address_reloaded[i]);
2905 
2906 	  /* If we made a MEM to load (a part of) the stackslot of a pseudo
2907 	     that didn't get a hard register, emit a USE with a REG_EQUAL
2908 	     note in front so that we might inherit a previous, possibly
2909 	     wider reload.  */
2910 
2911 	  if (replace
2912 	      && MEM_P (op)
2913 	      && REG_P (reg)
2914 	      && known_ge (GET_MODE_SIZE (GET_MODE (reg)),
2915 			   GET_MODE_SIZE (GET_MODE (op)))
2916 	      && reg_equiv_constant (REGNO (reg)) == 0)
2917 	    set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2918 						   insn),
2919 				 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2920 
2921 	  substed_operand[i] = recog_data.operand[i] = op;
2922 	}
2923       else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2924 	/* We can get a PLUS as an "operand" as a result of register
2925 	   elimination.  See eliminate_regs and gen_reload.  We handle
2926 	   a unary operator by reloading the operand.  */
2927 	substed_operand[i] = recog_data.operand[i]
2928 	  = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2929 				 ind_levels, 0, insn,
2930 				 &address_reloaded[i]);
2931       else if (code == REG)
2932 	{
2933 	  /* This is equivalent to calling find_reloads_toplev.
2934 	     The code is duplicated for speed.
2935 	     When we find a pseudo always equivalent to a constant,
2936 	     we replace it by the constant.  We must be sure, however,
2937 	     that we don't try to replace it in the insn in which it
2938 	     is being set.  */
2939 	  int regno = REGNO (recog_data.operand[i]);
2940 	  if (reg_equiv_constant (regno) != 0
2941 	      && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2942 	    {
2943 	      /* Record the existing mode so that the check if constants are
2944 		 allowed will work when operand_mode isn't specified.  */
2945 
2946 	      if (operand_mode[i] == VOIDmode)
2947 		operand_mode[i] = GET_MODE (recog_data.operand[i]);
2948 
2949 	      substed_operand[i] = recog_data.operand[i]
2950 		= reg_equiv_constant (regno);
2951 	    }
2952 	  if (reg_equiv_memory_loc (regno) != 0
2953 	      && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2954 	    /* We need not give a valid is_set_dest argument since the case
2955 	       of a constant equivalence was checked above.  */
2956 	    substed_operand[i] = recog_data.operand[i]
2957 	      = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2958 				     ind_levels, 0, insn,
2959 				     &address_reloaded[i]);
2960 	}
2961       /* If the operand is still a register (we didn't replace it with an
2962 	 equivalent), get the preferred class to reload it into.  */
2963       code = GET_CODE (recog_data.operand[i]);
2964       preferred_class[i]
2965 	= ((code == REG && REGNO (recog_data.operand[i])
2966 	    >= FIRST_PSEUDO_REGISTER)
2967 	   ? reg_preferred_class (REGNO (recog_data.operand[i]))
2968 	   : NO_REGS);
2969       pref_or_nothing[i]
2970 	= (code == REG
2971 	   && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2972 	   && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2973     }
2974 
2975   /* If this is simply a copy from operand 1 to operand 0, merge the
2976      preferred classes for the operands.  */
2977   if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2978       && recog_data.operand[1] == SET_SRC (set))
2979     {
2980       preferred_class[0] = preferred_class[1]
2981 	= reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2982       pref_or_nothing[0] |= pref_or_nothing[1];
2983       pref_or_nothing[1] |= pref_or_nothing[0];
2984     }
2985 
2986   /* Now see what we need for pseudo-regs that didn't get hard regs
2987      or got the wrong kind of hard reg.  For this, we must consider
2988      all the operands together against the register constraints.  */
2989 
2990   best = MAX_RECOG_OPERANDS * 2 + 600;
2991 
2992   goal_alternative_swapped = 0;
2993 
2994   /* The constraints are made of several alternatives.
2995      Each operand's constraint looks like foo,bar,... with commas
2996      separating the alternatives.  The first alternatives for all
2997      operands go together, the second alternatives go together, etc.
2998 
2999      First loop over alternatives.  */
3000 
3001   alternative_mask enabled = get_enabled_alternatives (insn);
3002   for (this_alternative_number = 0;
3003        this_alternative_number < n_alternatives;
3004        this_alternative_number++)
3005     {
3006       int swapped;
3007 
3008       if (!TEST_BIT (enabled, this_alternative_number))
3009 	{
3010 	  int i;
3011 
3012 	  for (i = 0; i < recog_data.n_operands; i++)
3013 	    constraints[i] = skip_alternative (constraints[i]);
3014 
3015 	  continue;
3016 	}
3017 
3018       /* If insn is commutative (it's safe to exchange a certain pair
3019 	 of operands) then we need to try each alternative twice, the
3020 	 second time matching those two operands as if we had
3021 	 exchanged them.  To do this, really exchange them in
3022 	 operands.  */
3023       for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3024 	{
3025 	  /* Loop over operands for one constraint alternative.  */
3026 	  /* LOSERS counts those that don't fit this alternative
3027 	     and would require loading.  */
3028 	  int losers = 0;
3029 	  /* BAD is set to 1 if it some operand can't fit this alternative
3030 	     even after reloading.  */
3031 	  int bad = 0;
3032 	  /* REJECT is a count of how undesirable this alternative says it is
3033 	     if any reloading is required.  If the alternative matches exactly
3034 	     then REJECT is ignored, but otherwise it gets this much
3035 	     counted against it in addition to the reloading needed.  Each
3036 	     ? counts three times here since we want the disparaging caused by
3037 	     a bad register class to only count 1/3 as much.  */
3038 	  int reject = 0;
3039 
3040 	  if (swapped)
3041 	    {
3042 	      recog_data.operand[commutative] = substed_operand[commutative + 1];
3043 	      recog_data.operand[commutative + 1] = substed_operand[commutative];
3044 	      /* Swap the duplicates too.  */
3045 	      for (i = 0; i < recog_data.n_dups; i++)
3046 		if (recog_data.dup_num[i] == commutative
3047 		    || recog_data.dup_num[i] == commutative + 1)
3048 		  *recog_data.dup_loc[i]
3049 		    = recog_data.operand[(int) recog_data.dup_num[i]];
3050 
3051 	      std::swap (preferred_class[commutative],
3052 			 preferred_class[commutative + 1]);
3053 	      std::swap (pref_or_nothing[commutative],
3054 			 pref_or_nothing[commutative + 1]);
3055 	      std::swap (address_reloaded[commutative],
3056 			 address_reloaded[commutative + 1]);
3057 	    }
3058 
3059 	  this_earlyclobber = 0;
3060 
3061 	  for (i = 0; i < noperands; i++)
3062 	    {
3063 	      const char *p = constraints[i];
3064 	      char *end;
3065 	      int len;
3066 	      int win = 0;
3067 	      int did_match = 0;
3068 	      /* 0 => this operand can be reloaded somehow for this alternative.  */
3069 	      int badop = 1;
3070 	      /* 0 => this operand can be reloaded if the alternative allows regs.  */
3071 	      int winreg = 0;
3072 	      int c;
3073 	      int m;
3074 	      rtx operand = recog_data.operand[i];
3075 	      int offset = 0;
3076 	      /* Nonzero means this is a MEM that must be reloaded into a reg
3077 		 regardless of what the constraint says.  */
3078 	      int force_reload = 0;
3079 	      int offmemok = 0;
3080 	      /* Nonzero if a constant forced into memory would be OK for this
3081 		 operand.  */
3082 	      int constmemok = 0;
3083 	      int earlyclobber = 0;
3084 	      enum constraint_num cn;
3085 	      enum reg_class cl;
3086 
3087 	      /* If the predicate accepts a unary operator, it means that
3088 		 we need to reload the operand, but do not do this for
3089 		 match_operator and friends.  */
3090 	      if (UNARY_P (operand) && *p != 0)
3091 		operand = XEXP (operand, 0);
3092 
3093 	      /* If the operand is a SUBREG, extract
3094 		 the REG or MEM (or maybe even a constant) within.
3095 		 (Constants can occur as a result of reg_equiv_constant.)  */
3096 
3097 	      while (GET_CODE (operand) == SUBREG)
3098 		{
3099 		  /* Offset only matters when operand is a REG and
3100 		     it is a hard reg.  This is because it is passed
3101 		     to reg_fits_class_p if it is a REG and all pseudos
3102 		     return 0 from that function.  */
3103 		  if (REG_P (SUBREG_REG (operand))
3104 		      && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3105 		    {
3106 		      if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3107 						 GET_MODE (SUBREG_REG (operand)),
3108 						 SUBREG_BYTE (operand),
3109 						 GET_MODE (operand)) < 0)
3110 			force_reload = 1;
3111 		      offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3112 						     GET_MODE (SUBREG_REG (operand)),
3113 						     SUBREG_BYTE (operand),
3114 						     GET_MODE (operand));
3115 		    }
3116 		  operand = SUBREG_REG (operand);
3117 		  /* Force reload if this is a constant or PLUS or if there may
3118 		     be a problem accessing OPERAND in the outer mode.  */
3119 		  scalar_int_mode inner_mode;
3120 		  if (CONSTANT_P (operand)
3121 		      || GET_CODE (operand) == PLUS
3122 		      /* We must force a reload of paradoxical SUBREGs
3123 			 of a MEM because the alignment of the inner value
3124 			 may not be enough to do the outer reference.  On
3125 			 big-endian machines, it may also reference outside
3126 			 the object.
3127 
3128 			 On machines that extend byte operations and we have a
3129 			 SUBREG where both the inner and outer modes are no wider
3130 			 than a word and the inner mode is narrower, is integral,
3131 			 and gets extended when loaded from memory, combine.c has
3132 			 made assumptions about the behavior of the machine in such
3133 			 register access.  If the data is, in fact, in memory we
3134 			 must always load using the size assumed to be in the
3135 			 register and let the insn do the different-sized
3136 			 accesses.
3137 
3138 			 This is doubly true if WORD_REGISTER_OPERATIONS.  In
3139 			 this case eliminate_regs has left non-paradoxical
3140 			 subregs for push_reload to see.  Make sure it does
3141 			 by forcing the reload.
3142 
3143 			 ??? When is it right at this stage to have a subreg
3144 			 of a mem that is _not_ to be handled specially?  IMO
3145 			 those should have been reduced to just a mem.  */
3146 		      || ((MEM_P (operand)
3147 			   || (REG_P (operand)
3148 			       && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3149 			  && (WORD_REGISTER_OPERATIONS
3150 			      || (((maybe_lt
3151 				    (GET_MODE_BITSIZE (GET_MODE (operand)),
3152 				     BIGGEST_ALIGNMENT))
3153 				   && (paradoxical_subreg_p
3154 				       (operand_mode[i], GET_MODE (operand)))))
3155 			      || BYTES_BIG_ENDIAN
3156 			      || (known_le (GET_MODE_SIZE (operand_mode[i]),
3157 					    UNITS_PER_WORD)
3158 				  && (is_a <scalar_int_mode>
3159 				      (GET_MODE (operand), &inner_mode))
3160 				  && (GET_MODE_SIZE (inner_mode)
3161 				      <= UNITS_PER_WORD)
3162 				  && paradoxical_subreg_p (operand_mode[i],
3163 							   inner_mode)
3164 				  && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)))
3165 		      )
3166 		    force_reload = 1;
3167 		}
3168 
3169 	      this_alternative[i] = NO_REGS;
3170 	      this_alternative_win[i] = 0;
3171 	      this_alternative_match_win[i] = 0;
3172 	      this_alternative_offmemok[i] = 0;
3173 	      this_alternative_earlyclobber[i] = 0;
3174 	      this_alternative_matches[i] = -1;
3175 
3176 	      /* An empty constraint or empty alternative
3177 		 allows anything which matched the pattern.  */
3178 	      if (*p == 0 || *p == ',')
3179 		win = 1, badop = 0;
3180 
3181 	      /* Scan this alternative's specs for this operand;
3182 		 set WIN if the operand fits any letter in this alternative.
3183 		 Otherwise, clear BADOP if this operand could
3184 		 fit some letter after reloads,
3185 		 or set WINREG if this operand could fit after reloads
3186 		 provided the constraint allows some registers.  */
3187 
3188 	      do
3189 		switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3190 		  {
3191 		  case '\0':
3192 		    len = 0;
3193 		    break;
3194 		  case ',':
3195 		    c = '\0';
3196 		    break;
3197 
3198 		  case '?':
3199 		    reject += 6;
3200 		    break;
3201 
3202 		  case '!':
3203 		    reject = 600;
3204 		    break;
3205 
3206 		  case '#':
3207 		    /* Ignore rest of this alternative as far as
3208 		       reloading is concerned.  */
3209 		    do
3210 		      p++;
3211 		    while (*p && *p != ',');
3212 		    len = 0;
3213 		    break;
3214 
3215 		  case '0':  case '1':  case '2':  case '3':  case '4':
3216 		  case '5':  case '6':  case '7':  case '8':  case '9':
3217 		    m = strtoul (p, &end, 10);
3218 		    p = end;
3219 		    len = 0;
3220 
3221 		    this_alternative_matches[i] = m;
3222 		    /* We are supposed to match a previous operand.
3223 		       If we do, we win if that one did.
3224 		       If we do not, count both of the operands as losers.
3225 		       (This is too conservative, since most of the time
3226 		       only a single reload insn will be needed to make
3227 		       the two operands win.  As a result, this alternative
3228 		       may be rejected when it is actually desirable.)  */
3229 		    if ((swapped && (m != commutative || i != commutative + 1))
3230 			/* If we are matching as if two operands were swapped,
3231 			   also pretend that operands_match had been computed
3232 			   with swapped.
3233 			   But if I is the second of those and C is the first,
3234 			   don't exchange them, because operands_match is valid
3235 			   only on one side of its diagonal.  */
3236 			? (operands_match
3237 			   [(m == commutative || m == commutative + 1)
3238 			    ? 2 * commutative + 1 - m : m]
3239 			   [(i == commutative || i == commutative + 1)
3240 			    ? 2 * commutative + 1 - i : i])
3241 			: operands_match[m][i])
3242 		      {
3243 			/* If we are matching a non-offsettable address where an
3244 			   offsettable address was expected, then we must reject
3245 			   this combination, because we can't reload it.  */
3246 			if (this_alternative_offmemok[m]
3247 			    && MEM_P (recog_data.operand[m])
3248 			    && this_alternative[m] == NO_REGS
3249 			    && ! this_alternative_win[m])
3250 			  bad = 1;
3251 
3252 			did_match = this_alternative_win[m];
3253 		      }
3254 		    else
3255 		      {
3256 			/* Operands don't match.  */
3257 			rtx value;
3258 			int loc1, loc2;
3259 			/* Retroactively mark the operand we had to match
3260 			   as a loser, if it wasn't already.  */
3261 			if (this_alternative_win[m])
3262 			  losers++;
3263 			this_alternative_win[m] = 0;
3264 			if (this_alternative[m] == NO_REGS)
3265 			  bad = 1;
3266 			/* But count the pair only once in the total badness of
3267 			   this alternative, if the pair can be a dummy reload.
3268 			   The pointers in operand_loc are not swapped; swap
3269 			   them by hand if necessary.  */
3270 			if (swapped && i == commutative)
3271 			  loc1 = commutative + 1;
3272 			else if (swapped && i == commutative + 1)
3273 			  loc1 = commutative;
3274 			else
3275 			  loc1 = i;
3276 			if (swapped && m == commutative)
3277 			  loc2 = commutative + 1;
3278 			else if (swapped && m == commutative + 1)
3279 			  loc2 = commutative;
3280 			else
3281 			  loc2 = m;
3282 			value
3283 			  = find_dummy_reload (recog_data.operand[i],
3284 					       recog_data.operand[m],
3285 					       recog_data.operand_loc[loc1],
3286 					       recog_data.operand_loc[loc2],
3287 					       operand_mode[i], operand_mode[m],
3288 					       this_alternative[m], -1,
3289 					       this_alternative_earlyclobber[m]);
3290 
3291 			if (value != 0)
3292 			  losers--;
3293 		      }
3294 		    /* This can be fixed with reloads if the operand
3295 		       we are supposed to match can be fixed with reloads.  */
3296 		    badop = 0;
3297 		    this_alternative[i] = this_alternative[m];
3298 
3299 		    /* If we have to reload this operand and some previous
3300 		       operand also had to match the same thing as this
3301 		       operand, we don't know how to do that.  So reject this
3302 		       alternative.  */
3303 		    if (! did_match || force_reload)
3304 		      for (j = 0; j < i; j++)
3305 			if (this_alternative_matches[j]
3306 			    == this_alternative_matches[i])
3307 			  {
3308 			    badop = 1;
3309 			    break;
3310 			  }
3311 		    break;
3312 
3313 		  case 'p':
3314 		    /* All necessary reloads for an address_operand
3315 		       were handled in find_reloads_address.  */
3316 		    this_alternative[i]
3317 		      = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3318 					ADDRESS, SCRATCH);
3319 		    win = 1;
3320 		    badop = 0;
3321 		    break;
3322 
3323 		  case TARGET_MEM_CONSTRAINT:
3324 		    if (force_reload)
3325 		      break;
3326 		    if (MEM_P (operand)
3327 			|| (REG_P (operand)
3328 			    && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3329 			    && reg_renumber[REGNO (operand)] < 0))
3330 		      win = 1;
3331 		    if (CONST_POOL_OK_P (operand_mode[i], operand))
3332 		      badop = 0;
3333 		    constmemok = 1;
3334 		    break;
3335 
3336 		  case '<':
3337 		    if (MEM_P (operand)
3338 			&& ! address_reloaded[i]
3339 			&& (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3340 			    || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3341 		      win = 1;
3342 		    break;
3343 
3344 		  case '>':
3345 		    if (MEM_P (operand)
3346 			&& ! address_reloaded[i]
3347 			&& (GET_CODE (XEXP (operand, 0)) == PRE_INC
3348 			    || GET_CODE (XEXP (operand, 0)) == POST_INC))
3349 		      win = 1;
3350 		    break;
3351 
3352 		    /* Memory operand whose address is not offsettable.  */
3353 		  case 'V':
3354 		    if (force_reload)
3355 		      break;
3356 		    if (MEM_P (operand)
3357 			&& ! (ind_levels ? offsettable_memref_p (operand)
3358 			      : offsettable_nonstrict_memref_p (operand))
3359 			/* Certain mem addresses will become offsettable
3360 			   after they themselves are reloaded.  This is important;
3361 			   we don't want our own handling of unoffsettables
3362 			   to override the handling of reg_equiv_address.  */
3363 			&& !(REG_P (XEXP (operand, 0))
3364 			     && (ind_levels == 0
3365 				 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3366 		      win = 1;
3367 		    break;
3368 
3369 		    /* Memory operand whose address is offsettable.  */
3370 		  case 'o':
3371 		    if (force_reload)
3372 		      break;
3373 		    if ((MEM_P (operand)
3374 			 /* If IND_LEVELS, find_reloads_address won't reload a
3375 			    pseudo that didn't get a hard reg, so we have to
3376 			    reject that case.  */
3377 			 && ((ind_levels ? offsettable_memref_p (operand)
3378 			      : offsettable_nonstrict_memref_p (operand))
3379 			     /* A reloaded address is offsettable because it is now
3380 				just a simple register indirect.  */
3381 			     || address_reloaded[i] == 1))
3382 			|| (REG_P (operand)
3383 			    && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3384 			    && reg_renumber[REGNO (operand)] < 0
3385 			    /* If reg_equiv_address is nonzero, we will be
3386 			       loading it into a register; hence it will be
3387 			       offsettable, but we cannot say that reg_equiv_mem
3388 			       is offsettable without checking.  */
3389 			    && ((reg_equiv_mem (REGNO (operand)) != 0
3390 				 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3391 				|| (reg_equiv_address (REGNO (operand)) != 0))))
3392 		      win = 1;
3393 		    if (CONST_POOL_OK_P (operand_mode[i], operand)
3394 			|| MEM_P (operand))
3395 		      badop = 0;
3396 		    constmemok = 1;
3397 		    offmemok = 1;
3398 		    break;
3399 
3400 		  case '&':
3401 		    /* Output operand that is stored before the need for the
3402 		       input operands (and their index registers) is over.  */
3403 		    earlyclobber = 1, this_earlyclobber = 1;
3404 		    break;
3405 
3406 		  case 'X':
3407 		    force_reload = 0;
3408 		    win = 1;
3409 		    break;
3410 
3411 		  case 'g':
3412 		    if (! force_reload
3413 			/* A PLUS is never a valid operand, but reload can make
3414 			   it from a register when eliminating registers.  */
3415 			&& GET_CODE (operand) != PLUS
3416 			/* A SCRATCH is not a valid operand.  */
3417 			&& GET_CODE (operand) != SCRATCH
3418 			&& (! CONSTANT_P (operand)
3419 			    || ! flag_pic
3420 			    || LEGITIMATE_PIC_OPERAND_P (operand))
3421 			&& (GENERAL_REGS == ALL_REGS
3422 			    || !REG_P (operand)
3423 			    || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3424 				&& reg_renumber[REGNO (operand)] < 0)))
3425 		      win = 1;
3426 		    cl = GENERAL_REGS;
3427 		    goto reg;
3428 
3429 		  default:
3430 		    cn = lookup_constraint (p);
3431 		    switch (get_constraint_type (cn))
3432 		      {
3433 		      case CT_REGISTER:
3434 			cl = reg_class_for_constraint (cn);
3435 			if (cl != NO_REGS)
3436 			  goto reg;
3437 			break;
3438 
3439 		      case CT_CONST_INT:
3440 			if (CONST_INT_P (operand)
3441 			    && (insn_const_int_ok_for_constraint
3442 				(INTVAL (operand), cn)))
3443 			  win = true;
3444 			break;
3445 
3446 		      case CT_MEMORY:
3447 			if (force_reload)
3448 			  break;
3449 			if (constraint_satisfied_p (operand, cn))
3450 			  win = 1;
3451 			/* If the address was already reloaded,
3452 			   we win as well.  */
3453 			else if (MEM_P (operand) && address_reloaded[i] == 1)
3454 			  win = 1;
3455 			/* Likewise if the address will be reloaded because
3456 			   reg_equiv_address is nonzero.  For reg_equiv_mem
3457 			   we have to check.  */
3458 			else if (REG_P (operand)
3459 				 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3460 				 && reg_renumber[REGNO (operand)] < 0
3461 				 && ((reg_equiv_mem (REGNO (operand)) != 0
3462 				      && (constraint_satisfied_p
3463 					  (reg_equiv_mem (REGNO (operand)),
3464 					   cn)))
3465 				     || (reg_equiv_address (REGNO (operand))
3466 					 != 0)))
3467 			  win = 1;
3468 
3469 			/* If we didn't already win, we can reload
3470 			   constants via force_const_mem, and other
3471 			   MEMs by reloading the address like for 'o'.  */
3472 			if (CONST_POOL_OK_P (operand_mode[i], operand)
3473 			    || MEM_P (operand))
3474 			  badop = 0;
3475 			constmemok = 1;
3476 			offmemok = 1;
3477 			break;
3478 
3479 		      case CT_SPECIAL_MEMORY:
3480 			if (force_reload)
3481 			  break;
3482 			if (constraint_satisfied_p (operand, cn))
3483 			  win = 1;
3484 			/* Likewise if the address will be reloaded because
3485 			   reg_equiv_address is nonzero.  For reg_equiv_mem
3486 			   we have to check.  */
3487 			else if (REG_P (operand)
3488 				 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3489 				 && reg_renumber[REGNO (operand)] < 0
3490 				 && reg_equiv_mem (REGNO (operand)) != 0
3491 				 && (constraint_satisfied_p
3492 				     (reg_equiv_mem (REGNO (operand)), cn)))
3493 			  win = 1;
3494 			break;
3495 
3496 		      case CT_ADDRESS:
3497 			if (constraint_satisfied_p (operand, cn))
3498 			  win = 1;
3499 
3500 			/* If we didn't already win, we can reload
3501 			   the address into a base register.  */
3502 			this_alternative[i]
3503 			  = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3504 					    ADDRESS, SCRATCH);
3505 			badop = 0;
3506 			break;
3507 
3508 		      case CT_FIXED_FORM:
3509 			if (constraint_satisfied_p (operand, cn))
3510 			  win = 1;
3511 			break;
3512 		      }
3513 		    break;
3514 
3515 		  reg:
3516 		    this_alternative[i]
3517 		      = reg_class_subunion[this_alternative[i]][cl];
3518 		    if (GET_MODE (operand) == BLKmode)
3519 		      break;
3520 		    winreg = 1;
3521 		    if (REG_P (operand)
3522 			&& reg_fits_class_p (operand, this_alternative[i],
3523   			             offset, GET_MODE (recog_data.operand[i])))
3524 		      win = 1;
3525 		    break;
3526 		  }
3527 	      while ((p += len), c);
3528 
3529 	      if (swapped == (commutative >= 0 ? 1 : 0))
3530 		constraints[i] = p;
3531 
3532 	      /* If this operand could be handled with a reg,
3533 		 and some reg is allowed, then this operand can be handled.  */
3534 	      if (winreg && this_alternative[i] != NO_REGS
3535 		  && (win || !class_only_fixed_regs[this_alternative[i]]))
3536 		badop = 0;
3537 
3538 	      /* Record which operands fit this alternative.  */
3539 	      this_alternative_earlyclobber[i] = earlyclobber;
3540 	      if (win && ! force_reload)
3541 		this_alternative_win[i] = 1;
3542 	      else if (did_match && ! force_reload)
3543 		this_alternative_match_win[i] = 1;
3544 	      else
3545 		{
3546 		  int const_to_mem = 0;
3547 
3548 		  this_alternative_offmemok[i] = offmemok;
3549 		  losers++;
3550 		  if (badop)
3551 		    bad = 1;
3552 		  /* Alternative loses if it has no regs for a reg operand.  */
3553 		  if (REG_P (operand)
3554 		      && this_alternative[i] == NO_REGS
3555 		      && this_alternative_matches[i] < 0)
3556 		    bad = 1;
3557 
3558 		  /* If this is a constant that is reloaded into the desired
3559 		     class by copying it to memory first, count that as another
3560 		     reload.  This is consistent with other code and is
3561 		     required to avoid choosing another alternative when
3562 		     the constant is moved into memory by this function on
3563 		     an early reload pass.  Note that the test here is
3564 		     precisely the same as in the code below that calls
3565 		     force_const_mem.  */
3566 		  if (CONST_POOL_OK_P (operand_mode[i], operand)
3567 		      && ((targetm.preferred_reload_class (operand,
3568 							   this_alternative[i])
3569 			   == NO_REGS)
3570 			  || no_input_reloads))
3571 		    {
3572 		      const_to_mem = 1;
3573 		      if (this_alternative[i] != NO_REGS)
3574 			losers++;
3575 		    }
3576 
3577 		  /* Alternative loses if it requires a type of reload not
3578 		     permitted for this insn.  We can always reload SCRATCH
3579 		     and objects with a REG_UNUSED note.  */
3580 		  if (GET_CODE (operand) != SCRATCH
3581 		      && modified[i] != RELOAD_READ && no_output_reloads
3582 		      && ! find_reg_note (insn, REG_UNUSED, operand))
3583 		    bad = 1;
3584 		  else if (modified[i] != RELOAD_WRITE && no_input_reloads
3585 			   && ! const_to_mem)
3586 		    bad = 1;
3587 
3588 		  /* If we can't reload this value at all, reject this
3589 		     alternative.  Note that we could also lose due to
3590 		     LIMIT_RELOAD_CLASS, but we don't check that
3591 		     here.  */
3592 
3593 		  if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3594 		    {
3595 		      if (targetm.preferred_reload_class (operand,
3596 							  this_alternative[i])
3597 			  == NO_REGS)
3598 			reject = 600;
3599 
3600 		      if (operand_type[i] == RELOAD_FOR_OUTPUT
3601 			  && (targetm.preferred_output_reload_class (operand,
3602 							    this_alternative[i])
3603 			      == NO_REGS))
3604 			reject = 600;
3605 		    }
3606 
3607 		  /* We prefer to reload pseudos over reloading other things,
3608 		     since such reloads may be able to be eliminated later.
3609 		     If we are reloading a SCRATCH, we won't be generating any
3610 		     insns, just using a register, so it is also preferred.
3611 		     So bump REJECT in other cases.  Don't do this in the
3612 		     case where we are forcing a constant into memory and
3613 		     it will then win since we don't want to have a different
3614 		     alternative match then.  */
3615 		  if (! (REG_P (operand)
3616 			 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3617 		      && GET_CODE (operand) != SCRATCH
3618 		      && ! (const_to_mem && constmemok))
3619 		    reject += 2;
3620 
3621 		  /* Input reloads can be inherited more often than output
3622 		     reloads can be removed, so penalize output reloads.  */
3623 		  if (operand_type[i] != RELOAD_FOR_INPUT
3624 		      && GET_CODE (operand) != SCRATCH)
3625 		    reject++;
3626 		}
3627 
3628 	      /* If this operand is a pseudo register that didn't get
3629 		 a hard reg and this alternative accepts some
3630 		 register, see if the class that we want is a subset
3631 		 of the preferred class for this register.  If not,
3632 		 but it intersects that class, use the preferred class
3633 		 instead.  If it does not intersect the preferred
3634 		 class, show that usage of this alternative should be
3635 		 discouraged; it will be discouraged more still if the
3636 		 register is `preferred or nothing'.  We do this
3637 		 because it increases the chance of reusing our spill
3638 		 register in a later insn and avoiding a pair of
3639 		 memory stores and loads.
3640 
3641 		 Don't bother with this if this alternative will
3642 		 accept this operand.
3643 
3644 		 Don't do this for a multiword operand, since it is
3645 		 only a small win and has the risk of requiring more
3646 		 spill registers, which could cause a large loss.
3647 
3648 		 Don't do this if the preferred class has only one
3649 		 register because we might otherwise exhaust the
3650 		 class.  */
3651 
3652 	      if (! win && ! did_match
3653 		  && this_alternative[i] != NO_REGS
3654 		  && known_le (GET_MODE_SIZE (operand_mode[i]), UNITS_PER_WORD)
3655 		  && reg_class_size [(int) preferred_class[i]] > 0
3656 		  && ! small_register_class_p (preferred_class[i]))
3657 		{
3658 		  if (! reg_class_subset_p (this_alternative[i],
3659 					    preferred_class[i]))
3660 		    {
3661 		      /* Since we don't have a way of forming the intersection,
3662 			 we just do something special if the preferred class
3663 			 is a subset of the class we have; that's the most
3664 			 common case anyway.  */
3665 		      if (reg_class_subset_p (preferred_class[i],
3666 					      this_alternative[i]))
3667 			this_alternative[i] = preferred_class[i];
3668 		      else
3669 			reject += (2 + 2 * pref_or_nothing[i]);
3670 		    }
3671 		}
3672 	    }
3673 
3674 	  /* Now see if any output operands that are marked "earlyclobber"
3675 	     in this alternative conflict with any input operands
3676 	     or any memory addresses.  */
3677 
3678 	  for (i = 0; i < noperands; i++)
3679 	    if (this_alternative_earlyclobber[i]
3680 		&& (this_alternative_win[i] || this_alternative_match_win[i]))
3681 	      {
3682 		struct decomposition early_data;
3683 
3684 		early_data = decompose (recog_data.operand[i]);
3685 
3686 		gcc_assert (modified[i] != RELOAD_READ);
3687 
3688 		if (this_alternative[i] == NO_REGS)
3689 		  {
3690 		    this_alternative_earlyclobber[i] = 0;
3691 		    gcc_assert (this_insn_is_asm);
3692 		    error_for_asm (this_insn,
3693 			      "%<&%> constraint used with no register class");
3694 		  }
3695 
3696 		for (j = 0; j < noperands; j++)
3697 		  /* Is this an input operand or a memory ref?  */
3698 		  if ((MEM_P (recog_data.operand[j])
3699 		       || modified[j] != RELOAD_WRITE)
3700 		      && j != i
3701 		      /* Ignore things like match_operator operands.  */
3702 		      && !recog_data.is_operator[j]
3703 		      /* Don't count an input operand that is constrained to match
3704 			 the early clobber operand.  */
3705 		      && ! (this_alternative_matches[j] == i
3706 			    && rtx_equal_p (recog_data.operand[i],
3707 					    recog_data.operand[j]))
3708 		      /* Is it altered by storing the earlyclobber operand?  */
3709 		      && !immune_p (recog_data.operand[j], recog_data.operand[i],
3710 				    early_data))
3711 		    {
3712 		      /* If the output is in a non-empty few-regs class,
3713 			 it's costly to reload it, so reload the input instead.  */
3714 		      if (small_register_class_p (this_alternative[i])
3715 			  && (REG_P (recog_data.operand[j])
3716 			      || GET_CODE (recog_data.operand[j]) == SUBREG))
3717 			{
3718 			  losers++;
3719 			  this_alternative_win[j] = 0;
3720 			  this_alternative_match_win[j] = 0;
3721 			}
3722 		      else
3723 			break;
3724 		    }
3725 		/* If an earlyclobber operand conflicts with something,
3726 		   it must be reloaded, so request this and count the cost.  */
3727 		if (j != noperands)
3728 		  {
3729 		    losers++;
3730 		    this_alternative_win[i] = 0;
3731 		    this_alternative_match_win[j] = 0;
3732 		    for (j = 0; j < noperands; j++)
3733 		      if (this_alternative_matches[j] == i
3734 			  && this_alternative_match_win[j])
3735 			{
3736 			  this_alternative_win[j] = 0;
3737 			  this_alternative_match_win[j] = 0;
3738 			  losers++;
3739 			}
3740 		  }
3741 	      }
3742 
3743 	  /* If one alternative accepts all the operands, no reload required,
3744 	     choose that alternative; don't consider the remaining ones.  */
3745 	  if (losers == 0)
3746 	    {
3747 	      /* Unswap these so that they are never swapped at `finish'.  */
3748 	      if (swapped)
3749 		{
3750 		  recog_data.operand[commutative] = substed_operand[commutative];
3751 		  recog_data.operand[commutative + 1]
3752 		    = substed_operand[commutative + 1];
3753 		}
3754 	      for (i = 0; i < noperands; i++)
3755 		{
3756 		  goal_alternative_win[i] = this_alternative_win[i];
3757 		  goal_alternative_match_win[i] = this_alternative_match_win[i];
3758 		  goal_alternative[i] = this_alternative[i];
3759 		  goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3760 		  goal_alternative_matches[i] = this_alternative_matches[i];
3761 		  goal_alternative_earlyclobber[i]
3762 		    = this_alternative_earlyclobber[i];
3763 		}
3764 	      goal_alternative_number = this_alternative_number;
3765 	      goal_alternative_swapped = swapped;
3766 	      goal_earlyclobber = this_earlyclobber;
3767 	      goto finish;
3768 	    }
3769 
3770 	  /* REJECT, set by the ! and ? constraint characters and when a register
3771 	     would be reloaded into a non-preferred class, discourages the use of
3772 	     this alternative for a reload goal.  REJECT is incremented by six
3773 	     for each ? and two for each non-preferred class.  */
3774 	  losers = losers * 6 + reject;
3775 
3776 	  /* If this alternative can be made to work by reloading,
3777 	     and it needs less reloading than the others checked so far,
3778 	     record it as the chosen goal for reloading.  */
3779 	  if (! bad)
3780 	    {
3781 	      if (best > losers)
3782 		{
3783 		  for (i = 0; i < noperands; i++)
3784 		    {
3785 		      goal_alternative[i] = this_alternative[i];
3786 		      goal_alternative_win[i] = this_alternative_win[i];
3787 		      goal_alternative_match_win[i]
3788 			= this_alternative_match_win[i];
3789 		      goal_alternative_offmemok[i]
3790 			= this_alternative_offmemok[i];
3791 		      goal_alternative_matches[i] = this_alternative_matches[i];
3792 		      goal_alternative_earlyclobber[i]
3793 			= this_alternative_earlyclobber[i];
3794 		    }
3795 		  goal_alternative_swapped = swapped;
3796 		  best = losers;
3797 		  goal_alternative_number = this_alternative_number;
3798 		  goal_earlyclobber = this_earlyclobber;
3799 		}
3800 	    }
3801 
3802 	  if (swapped)
3803 	    {
3804 	      /* If the commutative operands have been swapped, swap
3805 		 them back in order to check the next alternative.  */
3806 	      recog_data.operand[commutative] = substed_operand[commutative];
3807 	      recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3808 	      /* Unswap the duplicates too.  */
3809 	      for (i = 0; i < recog_data.n_dups; i++)
3810 		if (recog_data.dup_num[i] == commutative
3811 		    || recog_data.dup_num[i] == commutative + 1)
3812 		  *recog_data.dup_loc[i]
3813 		    = recog_data.operand[(int) recog_data.dup_num[i]];
3814 
3815 	      /* Unswap the operand related information as well.  */
3816 	      std::swap (preferred_class[commutative],
3817 			 preferred_class[commutative + 1]);
3818 	      std::swap (pref_or_nothing[commutative],
3819 			 pref_or_nothing[commutative + 1]);
3820 	      std::swap (address_reloaded[commutative],
3821 			 address_reloaded[commutative + 1]);
3822 	    }
3823 	}
3824     }
3825 
3826   /* The operands don't meet the constraints.
3827      goal_alternative describes the alternative
3828      that we could reach by reloading the fewest operands.
3829      Reload so as to fit it.  */
3830 
3831   if (best == MAX_RECOG_OPERANDS * 2 + 600)
3832     {
3833       /* No alternative works with reloads??  */
3834       if (insn_code_number >= 0)
3835 	fatal_insn ("unable to generate reloads for:", insn);
3836       error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3837       /* Avoid further trouble with this insn.  */
3838       PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3839       n_reloads = 0;
3840       return 0;
3841     }
3842 
3843   /* Jump to `finish' from above if all operands are valid already.
3844      In that case, goal_alternative_win is all 1.  */
3845  finish:
3846 
3847   /* Right now, for any pair of operands I and J that are required to match,
3848      with I < J,
3849      goal_alternative_matches[J] is I.
3850      Set up goal_alternative_matched as the inverse function:
3851      goal_alternative_matched[I] = J.  */
3852 
3853   for (i = 0; i < noperands; i++)
3854     goal_alternative_matched[i] = -1;
3855 
3856   for (i = 0; i < noperands; i++)
3857     if (! goal_alternative_win[i]
3858 	&& goal_alternative_matches[i] >= 0)
3859       goal_alternative_matched[goal_alternative_matches[i]] = i;
3860 
3861   for (i = 0; i < noperands; i++)
3862     goal_alternative_win[i] |= goal_alternative_match_win[i];
3863 
3864   /* If the best alternative is with operands 1 and 2 swapped,
3865      consider them swapped before reporting the reloads.  Update the
3866      operand numbers of any reloads already pushed.  */
3867 
3868   if (goal_alternative_swapped)
3869     {
3870       std::swap (substed_operand[commutative],
3871 		 substed_operand[commutative + 1]);
3872       std::swap (recog_data.operand[commutative],
3873 		 recog_data.operand[commutative + 1]);
3874       std::swap (*recog_data.operand_loc[commutative],
3875 		 *recog_data.operand_loc[commutative + 1]);
3876 
3877       for (i = 0; i < recog_data.n_dups; i++)
3878 	if (recog_data.dup_num[i] == commutative
3879 	    || recog_data.dup_num[i] == commutative + 1)
3880 	  *recog_data.dup_loc[i]
3881 	    = recog_data.operand[(int) recog_data.dup_num[i]];
3882 
3883       for (i = 0; i < n_reloads; i++)
3884 	{
3885 	  if (rld[i].opnum == commutative)
3886 	    rld[i].opnum = commutative + 1;
3887 	  else if (rld[i].opnum == commutative + 1)
3888 	    rld[i].opnum = commutative;
3889 	}
3890     }
3891 
3892   for (i = 0; i < noperands; i++)
3893     {
3894       operand_reloadnum[i] = -1;
3895 
3896       /* If this is an earlyclobber operand, we need to widen the scope.
3897 	 The reload must remain valid from the start of the insn being
3898 	 reloaded until after the operand is stored into its destination.
3899 	 We approximate this with RELOAD_OTHER even though we know that we
3900 	 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3901 
3902 	 One special case that is worth checking is when we have an
3903 	 output that is earlyclobber but isn't used past the insn (typically
3904 	 a SCRATCH).  In this case, we only need have the reload live
3905 	 through the insn itself, but not for any of our input or output
3906 	 reloads.
3907 	 But we must not accidentally narrow the scope of an existing
3908 	 RELOAD_OTHER reload - leave these alone.
3909 
3910 	 In any case, anything needed to address this operand can remain
3911 	 however they were previously categorized.  */
3912 
3913       if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3914 	operand_type[i]
3915 	  = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3916 	     ? RELOAD_FOR_INSN : RELOAD_OTHER);
3917     }
3918 
3919   /* Any constants that aren't allowed and can't be reloaded
3920      into registers are here changed into memory references.  */
3921   for (i = 0; i < noperands; i++)
3922     if (! goal_alternative_win[i])
3923       {
3924 	rtx op = recog_data.operand[i];
3925 	rtx subreg = NULL_RTX;
3926 	rtx plus = NULL_RTX;
3927 	machine_mode mode = operand_mode[i];
3928 
3929 	/* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3930 	   push_reload so we have to let them pass here.  */
3931 	if (GET_CODE (op) == SUBREG)
3932 	  {
3933 	    subreg = op;
3934 	    op = SUBREG_REG (op);
3935 	    mode = GET_MODE (op);
3936 	  }
3937 
3938 	if (GET_CODE (op) == PLUS)
3939 	  {
3940 	    plus = op;
3941 	    op = XEXP (op, 1);
3942 	  }
3943 
3944 	if (CONST_POOL_OK_P (mode, op)
3945 	    && ((targetm.preferred_reload_class (op, goal_alternative[i])
3946 		 == NO_REGS)
3947 		|| no_input_reloads))
3948 	  {
3949 	    int this_address_reloaded;
3950 	    rtx tem = force_const_mem (mode, op);
3951 
3952 	    /* If we stripped a SUBREG or a PLUS above add it back.  */
3953 	    if (plus != NULL_RTX)
3954 	      tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3955 
3956 	    if (subreg != NULL_RTX)
3957 	      tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3958 
3959 	    this_address_reloaded = 0;
3960 	    substed_operand[i] = recog_data.operand[i]
3961 	      = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3962 				     0, insn, &this_address_reloaded);
3963 
3964 	    /* If the alternative accepts constant pool refs directly
3965 	       there will be no reload needed at all.  */
3966 	    if (plus == NULL_RTX
3967 		&& subreg == NULL_RTX
3968 		&& alternative_allows_const_pool_ref (this_address_reloaded != 1
3969 						      ? substed_operand[i]
3970 						      : NULL,
3971 						      recog_data.constraints[i],
3972 						      goal_alternative_number))
3973 	      goal_alternative_win[i] = 1;
3974 	  }
3975       }
3976 
3977   /* Record the values of the earlyclobber operands for the caller.  */
3978   if (goal_earlyclobber)
3979     for (i = 0; i < noperands; i++)
3980       if (goal_alternative_earlyclobber[i])
3981 	reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3982 
3983   /* Now record reloads for all the operands that need them.  */
3984   for (i = 0; i < noperands; i++)
3985     if (! goal_alternative_win[i])
3986       {
3987 	/* Operands that match previous ones have already been handled.  */
3988 	if (goal_alternative_matches[i] >= 0)
3989 	  ;
3990 	/* Handle an operand with a nonoffsettable address
3991 	   appearing where an offsettable address will do
3992 	   by reloading the address into a base register.
3993 
3994 	   ??? We can also do this when the operand is a register and
3995 	   reg_equiv_mem is not offsettable, but this is a bit tricky,
3996 	   so we don't bother with it.  It may not be worth doing.  */
3997 	else if (goal_alternative_matched[i] == -1
3998 		 && goal_alternative_offmemok[i]
3999 		 && MEM_P (recog_data.operand[i]))
4000 	  {
4001 	    /* If the address to be reloaded is a VOIDmode constant,
4002 	       use the default address mode as mode of the reload register,
4003 	       as would have been done by find_reloads_address.  */
4004 	    addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
4005 	    machine_mode address_mode;
4006 
4007 	    address_mode = get_address_mode (recog_data.operand[i]);
4008 	    operand_reloadnum[i]
4009 	      = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
4010 			     &XEXP (recog_data.operand[i], 0), (rtx*) 0,
4011 			     base_reg_class (VOIDmode, as, MEM, SCRATCH),
4012 			     address_mode,
4013 			     VOIDmode, 0, 0, i, RELOAD_OTHER);
4014 	    rld[operand_reloadnum[i]].inc
4015 	      = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4016 
4017 	    /* If this operand is an output, we will have made any
4018 	       reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4019 	       now we are treating part of the operand as an input, so
4020 	       we must change these to RELOAD_FOR_OTHER_ADDRESS.  */
4021 
4022 	    if (modified[i] == RELOAD_WRITE)
4023 	      {
4024 		for (j = 0; j < n_reloads; j++)
4025 		  {
4026 		    if (rld[j].opnum == i)
4027 		      {
4028 			if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4029 			  rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4030 			else if (rld[j].when_needed
4031 				 == RELOAD_FOR_OUTADDR_ADDRESS)
4032 			  rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4033 		      }
4034 		  }
4035 	      }
4036 	  }
4037 	else if (goal_alternative_matched[i] == -1)
4038 	  {
4039 	    operand_reloadnum[i]
4040 	      = push_reload ((modified[i] != RELOAD_WRITE
4041 			      ? recog_data.operand[i] : 0),
4042 			     (modified[i] != RELOAD_READ
4043 			      ? recog_data.operand[i] : 0),
4044 			     (modified[i] != RELOAD_WRITE
4045 			      ? recog_data.operand_loc[i] : 0),
4046 			     (modified[i] != RELOAD_READ
4047 			      ? recog_data.operand_loc[i] : 0),
4048 			     (enum reg_class) goal_alternative[i],
4049 			     (modified[i] == RELOAD_WRITE
4050 			      ? VOIDmode : operand_mode[i]),
4051 			     (modified[i] == RELOAD_READ
4052 			      ? VOIDmode : operand_mode[i]),
4053 			     (insn_code_number < 0 ? 0
4054 			      : insn_data[insn_code_number].operand[i].strict_low),
4055 			     0, i, operand_type[i]);
4056 	  }
4057 	/* In a matching pair of operands, one must be input only
4058 	   and the other must be output only.
4059 	   Pass the input operand as IN and the other as OUT.  */
4060 	else if (modified[i] == RELOAD_READ
4061 		 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4062 	  {
4063 	    operand_reloadnum[i]
4064 	      = push_reload (recog_data.operand[i],
4065 			     recog_data.operand[goal_alternative_matched[i]],
4066 			     recog_data.operand_loc[i],
4067 			     recog_data.operand_loc[goal_alternative_matched[i]],
4068 			     (enum reg_class) goal_alternative[i],
4069 			     operand_mode[i],
4070 			     operand_mode[goal_alternative_matched[i]],
4071 			     0, 0, i, RELOAD_OTHER);
4072 	    operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4073 	  }
4074 	else if (modified[i] == RELOAD_WRITE
4075 		 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4076 	  {
4077 	    operand_reloadnum[goal_alternative_matched[i]]
4078 	      = push_reload (recog_data.operand[goal_alternative_matched[i]],
4079 			     recog_data.operand[i],
4080 			     recog_data.operand_loc[goal_alternative_matched[i]],
4081 			     recog_data.operand_loc[i],
4082 			     (enum reg_class) goal_alternative[i],
4083 			     operand_mode[goal_alternative_matched[i]],
4084 			     operand_mode[i],
4085 			     0, 0, i, RELOAD_OTHER);
4086 	    operand_reloadnum[i] = output_reloadnum;
4087 	  }
4088 	else
4089 	  {
4090 	    gcc_assert (insn_code_number < 0);
4091 	    error_for_asm (insn, "inconsistent operand constraints "
4092 			   "in an %<asm%>");
4093 	    /* Avoid further trouble with this insn.  */
4094 	    PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4095 	    n_reloads = 0;
4096 	    return 0;
4097 	  }
4098       }
4099     else if (goal_alternative_matched[i] < 0
4100 	     && goal_alternative_matches[i] < 0
4101 	     && address_operand_reloaded[i] != 1
4102 	     && optimize)
4103       {
4104 	/* For each non-matching operand that's a MEM or a pseudo-register
4105 	   that didn't get a hard register, make an optional reload.
4106 	   This may get done even if the insn needs no reloads otherwise.  */
4107 
4108 	rtx operand = recog_data.operand[i];
4109 
4110 	while (GET_CODE (operand) == SUBREG)
4111 	  operand = SUBREG_REG (operand);
4112 	if ((MEM_P (operand)
4113 	     || (REG_P (operand)
4114 		 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4115 	    /* If this is only for an output, the optional reload would not
4116 	       actually cause us to use a register now, just note that
4117 	       something is stored here.  */
4118 	    && (goal_alternative[i] != NO_REGS
4119 		|| modified[i] == RELOAD_WRITE)
4120 	    && ! no_input_reloads
4121 	    /* An optional output reload might allow to delete INSN later.
4122 	       We mustn't make in-out reloads on insns that are not permitted
4123 	       output reloads.
4124 	       If this is an asm, we can't delete it; we must not even call
4125 	       push_reload for an optional output reload in this case,
4126 	       because we can't be sure that the constraint allows a register,
4127 	       and push_reload verifies the constraints for asms.  */
4128 	    && (modified[i] == RELOAD_READ
4129 		|| (! no_output_reloads && ! this_insn_is_asm)))
4130 	  operand_reloadnum[i]
4131 	    = push_reload ((modified[i] != RELOAD_WRITE
4132 			    ? recog_data.operand[i] : 0),
4133 			   (modified[i] != RELOAD_READ
4134 			    ? recog_data.operand[i] : 0),
4135 			   (modified[i] != RELOAD_WRITE
4136 			    ? recog_data.operand_loc[i] : 0),
4137 			   (modified[i] != RELOAD_READ
4138 			    ? recog_data.operand_loc[i] : 0),
4139 			   (enum reg_class) goal_alternative[i],
4140 			   (modified[i] == RELOAD_WRITE
4141 			    ? VOIDmode : operand_mode[i]),
4142 			   (modified[i] == RELOAD_READ
4143 			    ? VOIDmode : operand_mode[i]),
4144 			   (insn_code_number < 0 ? 0
4145 			    : insn_data[insn_code_number].operand[i].strict_low),
4146 			   1, i, operand_type[i]);
4147 	/* If a memory reference remains (either as a MEM or a pseudo that
4148 	   did not get a hard register), yet we can't make an optional
4149 	   reload, check if this is actually a pseudo register reference;
4150 	   we then need to emit a USE and/or a CLOBBER so that reload
4151 	   inheritance will do the right thing.  */
4152 	else if (replace
4153 		 && (MEM_P (operand)
4154 		     || (REG_P (operand)
4155 			 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4156 			 && reg_renumber [REGNO (operand)] < 0)))
4157 	  {
4158 	    operand = *recog_data.operand_loc[i];
4159 
4160 	    while (GET_CODE (operand) == SUBREG)
4161 	      operand = SUBREG_REG (operand);
4162 	    if (REG_P (operand))
4163 	      {
4164 		if (modified[i] != RELOAD_WRITE)
4165 		  /* We mark the USE with QImode so that we recognize
4166 		     it as one that can be safely deleted at the end
4167 		     of reload.  */
4168 		  PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4169 					      insn), QImode);
4170 		if (modified[i] != RELOAD_READ)
4171 		  emit_insn_after (gen_clobber (operand), insn);
4172 	      }
4173 	  }
4174       }
4175     else if (goal_alternative_matches[i] >= 0
4176 	     && goal_alternative_win[goal_alternative_matches[i]]
4177 	     && modified[i] == RELOAD_READ
4178 	     && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4179 	     && ! no_input_reloads && ! no_output_reloads
4180 	     && optimize)
4181       {
4182 	/* Similarly, make an optional reload for a pair of matching
4183 	   objects that are in MEM or a pseudo that didn't get a hard reg.  */
4184 
4185 	rtx operand = recog_data.operand[i];
4186 
4187 	while (GET_CODE (operand) == SUBREG)
4188 	  operand = SUBREG_REG (operand);
4189 	if ((MEM_P (operand)
4190 	     || (REG_P (operand)
4191 		 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4192 	    && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4193 	  operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4194 	    = push_reload (recog_data.operand[goal_alternative_matches[i]],
4195 			   recog_data.operand[i],
4196 			   recog_data.operand_loc[goal_alternative_matches[i]],
4197 			   recog_data.operand_loc[i],
4198 			   (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4199 			   operand_mode[goal_alternative_matches[i]],
4200 			   operand_mode[i],
4201 			   0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4202       }
4203 
4204   /* Perform whatever substitutions on the operands we are supposed
4205      to make due to commutativity or replacement of registers
4206      with equivalent constants or memory slots.  */
4207 
4208   for (i = 0; i < noperands; i++)
4209     {
4210       /* We only do this on the last pass through reload, because it is
4211 	 possible for some data (like reg_equiv_address) to be changed during
4212 	 later passes.  Moreover, we lose the opportunity to get a useful
4213 	 reload_{in,out}_reg when we do these replacements.  */
4214 
4215       if (replace)
4216 	{
4217 	  rtx substitution = substed_operand[i];
4218 
4219 	  *recog_data.operand_loc[i] = substitution;
4220 
4221 	  /* If we're replacing an operand with a LABEL_REF, we need to
4222 	     make sure that there's a REG_LABEL_OPERAND note attached to
4223 	     this instruction.  */
4224 	  if (GET_CODE (substitution) == LABEL_REF
4225 	      && !find_reg_note (insn, REG_LABEL_OPERAND,
4226 				 label_ref_label (substitution))
4227 	      /* For a JUMP_P, if it was a branch target it must have
4228 		 already been recorded as such.  */
4229 	      && (!JUMP_P (insn)
4230 		  || !label_is_jump_target_p (label_ref_label (substitution),
4231 					      insn)))
4232 	    {
4233 	      add_reg_note (insn, REG_LABEL_OPERAND,
4234 			    label_ref_label (substitution));
4235 	      if (LABEL_P (label_ref_label (substitution)))
4236 		++LABEL_NUSES (label_ref_label (substitution));
4237 	    }
4238 
4239 	}
4240       else
4241 	retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4242     }
4243 
4244   /* If this insn pattern contains any MATCH_DUP's, make sure that
4245      they will be substituted if the operands they match are substituted.
4246      Also do now any substitutions we already did on the operands.
4247 
4248      Don't do this if we aren't making replacements because we might be
4249      propagating things allocated by frame pointer elimination into places
4250      it doesn't expect.  */
4251 
4252   if (insn_code_number >= 0 && replace)
4253     for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4254       {
4255 	int opno = recog_data.dup_num[i];
4256 	*recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4257 	dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4258       }
4259 
4260 #if 0
4261   /* This loses because reloading of prior insns can invalidate the equivalence
4262      (or at least find_equiv_reg isn't smart enough to find it any more),
4263      causing this insn to need more reload regs than it needed before.
4264      It may be too late to make the reload regs available.
4265      Now this optimization is done safely in choose_reload_regs.  */
4266 
4267   /* For each reload of a reg into some other class of reg,
4268      search for an existing equivalent reg (same value now) in the right class.
4269      We can use it as long as we don't need to change its contents.  */
4270   for (i = 0; i < n_reloads; i++)
4271     if (rld[i].reg_rtx == 0
4272 	&& rld[i].in != 0
4273 	&& REG_P (rld[i].in)
4274 	&& rld[i].out == 0)
4275       {
4276 	rld[i].reg_rtx
4277 	  = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4278 			    static_reload_reg_p, 0, rld[i].inmode);
4279 	/* Prevent generation of insn to load the value
4280 	   because the one we found already has the value.  */
4281 	if (rld[i].reg_rtx)
4282 	  rld[i].in = rld[i].reg_rtx;
4283       }
4284 #endif
4285 
4286   /* If we detected error and replaced asm instruction by USE, forget about the
4287      reloads.  */
4288   if (GET_CODE (PATTERN (insn)) == USE
4289       && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4290     n_reloads = 0;
4291 
4292   /* Perhaps an output reload can be combined with another
4293      to reduce needs by one.  */
4294   if (!goal_earlyclobber)
4295     combine_reloads ();
4296 
4297   /* If we have a pair of reloads for parts of an address, they are reloading
4298      the same object, the operands themselves were not reloaded, and they
4299      are for two operands that are supposed to match, merge the reloads and
4300      change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS.  */
4301 
4302   for (i = 0; i < n_reloads; i++)
4303     {
4304       int k;
4305 
4306       for (j = i + 1; j < n_reloads; j++)
4307 	if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4308 	     || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4309 	     || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4310 	     || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4311 	    && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4312 		|| rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4313 		|| rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4314 		|| rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4315 	    && rtx_equal_p (rld[i].in, rld[j].in)
4316 	    && (operand_reloadnum[rld[i].opnum] < 0
4317 		|| rld[operand_reloadnum[rld[i].opnum]].optional)
4318 	    && (operand_reloadnum[rld[j].opnum] < 0
4319 		|| rld[operand_reloadnum[rld[j].opnum]].optional)
4320 	    && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4321 		|| (goal_alternative_matches[rld[j].opnum]
4322 		    == rld[i].opnum)))
4323 	  {
4324 	    for (k = 0; k < n_replacements; k++)
4325 	      if (replacements[k].what == j)
4326 		replacements[k].what = i;
4327 
4328 	    if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4329 		|| rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4330 	      rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4331 	    else
4332 	      rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4333 	    rld[j].in = 0;
4334 	  }
4335     }
4336 
4337   /* Scan all the reloads and update their type.
4338      If a reload is for the address of an operand and we didn't reload
4339      that operand, change the type.  Similarly, change the operand number
4340      of a reload when two operands match.  If a reload is optional, treat it
4341      as though the operand isn't reloaded.
4342 
4343      ??? This latter case is somewhat odd because if we do the optional
4344      reload, it means the object is hanging around.  Thus we need only
4345      do the address reload if the optional reload was NOT done.
4346 
4347      Change secondary reloads to be the address type of their operand, not
4348      the normal type.
4349 
4350      If an operand's reload is now RELOAD_OTHER, change any
4351      RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4352      RELOAD_FOR_OTHER_ADDRESS.  */
4353 
4354   for (i = 0; i < n_reloads; i++)
4355     {
4356       if (rld[i].secondary_p
4357 	  && rld[i].when_needed == operand_type[rld[i].opnum])
4358 	rld[i].when_needed = address_type[rld[i].opnum];
4359 
4360       if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4361 	   || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4362 	   || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4363 	   || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4364 	  && (operand_reloadnum[rld[i].opnum] < 0
4365 	      || rld[operand_reloadnum[rld[i].opnum]].optional))
4366 	{
4367 	  /* If we have a secondary reload to go along with this reload,
4368 	     change its type to RELOAD_FOR_OPADDR_ADDR.  */
4369 
4370 	  if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4371 	       || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4372 	      && rld[i].secondary_in_reload != -1)
4373 	    {
4374 	      int secondary_in_reload = rld[i].secondary_in_reload;
4375 
4376 	      rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4377 
4378 	      /* If there's a tertiary reload we have to change it also.  */
4379 	      if (secondary_in_reload > 0
4380 		  && rld[secondary_in_reload].secondary_in_reload != -1)
4381 		rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4382 		  = RELOAD_FOR_OPADDR_ADDR;
4383 	    }
4384 
4385 	  if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4386 	       || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4387 	      && rld[i].secondary_out_reload != -1)
4388 	    {
4389 	      int secondary_out_reload = rld[i].secondary_out_reload;
4390 
4391 	      rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4392 
4393 	      /* If there's a tertiary reload we have to change it also.  */
4394 	      if (secondary_out_reload
4395 		  && rld[secondary_out_reload].secondary_out_reload != -1)
4396 		rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4397 		  = RELOAD_FOR_OPADDR_ADDR;
4398 	    }
4399 
4400 	  if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4401 	      || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4402 	    rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4403 	  else
4404 	    rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4405 	}
4406 
4407       if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4408 	   || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4409 	  && operand_reloadnum[rld[i].opnum] >= 0
4410 	  && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4411 	      == RELOAD_OTHER))
4412 	rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4413 
4414       if (goal_alternative_matches[rld[i].opnum] >= 0)
4415 	rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4416     }
4417 
4418   /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4419      If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4420      reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4421 
4422      choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4423      conflict with RELOAD_FOR_OPERAND_ADDRESS reloads.  This is true for a
4424      single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4425      However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4426      then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4427      RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4428      This is complicated by the fact that a single operand can have more
4429      than one RELOAD_FOR_OPERAND_ADDRESS reload.  It is very difficult to fix
4430      choose_reload_regs without affecting code quality, and cases that
4431      actually fail are extremely rare, so it turns out to be better to fix
4432      the problem here by not generating cases that choose_reload_regs will
4433      fail for.  */
4434   /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4435      RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4436      a single operand.
4437      We can reduce the register pressure by exploiting that a
4438      RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4439      does not conflict with any of them, if it is only used for the first of
4440      the RELOAD_FOR_X_ADDRESS reloads.  */
4441   {
4442     int first_op_addr_num = -2;
4443     int first_inpaddr_num[MAX_RECOG_OPERANDS];
4444     int first_outpaddr_num[MAX_RECOG_OPERANDS];
4445     int need_change = 0;
4446     /* We use last_op_addr_reload and the contents of the above arrays
4447        first as flags - -2 means no instance encountered, -1 means exactly
4448        one instance encountered.
4449        If more than one instance has been encountered, we store the reload
4450        number of the first reload of the kind in question; reload numbers
4451        are known to be non-negative.  */
4452     for (i = 0; i < noperands; i++)
4453       first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4454     for (i = n_reloads - 1; i >= 0; i--)
4455       {
4456 	switch (rld[i].when_needed)
4457 	  {
4458 	  case RELOAD_FOR_OPERAND_ADDRESS:
4459 	    if (++first_op_addr_num >= 0)
4460 	      {
4461 		first_op_addr_num = i;
4462 		need_change = 1;
4463 	      }
4464 	    break;
4465 	  case RELOAD_FOR_INPUT_ADDRESS:
4466 	    if (++first_inpaddr_num[rld[i].opnum] >= 0)
4467 	      {
4468 		first_inpaddr_num[rld[i].opnum] = i;
4469 		need_change = 1;
4470 	      }
4471 	    break;
4472 	  case RELOAD_FOR_OUTPUT_ADDRESS:
4473 	    if (++first_outpaddr_num[rld[i].opnum] >= 0)
4474 	      {
4475 		first_outpaddr_num[rld[i].opnum] = i;
4476 		need_change = 1;
4477 	      }
4478 	    break;
4479 	  default:
4480 	    break;
4481 	  }
4482       }
4483 
4484     if (need_change)
4485       {
4486 	for (i = 0; i < n_reloads; i++)
4487 	  {
4488 	    int first_num;
4489 	    enum reload_type type;
4490 
4491 	    switch (rld[i].when_needed)
4492 	      {
4493 	      case RELOAD_FOR_OPADDR_ADDR:
4494 		first_num = first_op_addr_num;
4495 		type = RELOAD_FOR_OPERAND_ADDRESS;
4496 		break;
4497 	      case RELOAD_FOR_INPADDR_ADDRESS:
4498 		first_num = first_inpaddr_num[rld[i].opnum];
4499 		type = RELOAD_FOR_INPUT_ADDRESS;
4500 		break;
4501 	      case RELOAD_FOR_OUTADDR_ADDRESS:
4502 		first_num = first_outpaddr_num[rld[i].opnum];
4503 		type = RELOAD_FOR_OUTPUT_ADDRESS;
4504 		break;
4505 	      default:
4506 		continue;
4507 	      }
4508 	    if (first_num < 0)
4509 	      continue;
4510 	    else if (i > first_num)
4511 	      rld[i].when_needed = type;
4512 	    else
4513 	      {
4514 		/* Check if the only TYPE reload that uses reload I is
4515 		   reload FIRST_NUM.  */
4516 		for (j = n_reloads - 1; j > first_num; j--)
4517 		  {
4518 		    if (rld[j].when_needed == type
4519 			&& (rld[i].secondary_p
4520 			    ? rld[j].secondary_in_reload == i
4521 			    : reg_mentioned_p (rld[i].in, rld[j].in)))
4522 		      {
4523 			rld[i].when_needed = type;
4524 			break;
4525 		      }
4526 		  }
4527 	      }
4528 	  }
4529       }
4530   }
4531 
4532 #if NB_FIX_VAX_BACKEND
4533   /*
4534    * Scan the reloads again looking for a case where there is
4535    * precisely one RELOAD_FOR_OPERAND_ADDRESS reload and one
4536    * RELOAD_FOR_OPADDR_ADDR reload BUT they are for different
4537    * operands.  choose_reload_regs assumes that the
4538    * RELOAD_FOR_OPADDR_ADDR and RELOAD_FOR_OPERAND_ADDRESS reloads are
4539    * a pair operating on the same operand and will choose the same
4540    * register for both, which is not what is wanted.
4541    */
4542   {
4543     int n_operand_address_reloads = 0,
4544 	n_opaddr_addr_reloads = 0;
4545     int reloadnum_for_operand_address_reload = -1,
4546 	reloadnum_for_opaddr_addr_reload = -1;
4547 
4548     for (i = 0; i < n_reloads; i++)
4549       {
4550 	switch (rld[i].when_needed)
4551 	  {
4552 	  case RELOAD_FOR_OPADDR_ADDR:
4553 	    n_opaddr_addr_reloads++;
4554 	    reloadnum_for_opaddr_addr_reload = i;
4555 	    break;
4556 	  case RELOAD_FOR_OPERAND_ADDRESS:
4557 	    n_operand_address_reloads++;
4558 	    reloadnum_for_operand_address_reload = i;
4559 	    break;
4560 	  default:
4561 	    break;
4562 	  }
4563       }
4564 
4565     if (n_operand_address_reloads == 1
4566 	&& n_opaddr_addr_reloads == 1
4567 	&& rld[reloadnum_for_opaddr_addr_reload].opnum
4568 	   != rld[reloadnum_for_operand_address_reload].opnum)
4569       {
4570 	rld[reloadnum_for_opaddr_addr_reload].when_needed
4571 	= RELOAD_FOR_OPERAND_ADDRESS;
4572       }
4573   }
4574 #endif
4575 
4576   /* See if we have any reloads that are now allowed to be merged
4577      because we've changed when the reload is needed to
4578      RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS.  Only
4579      check for the most common cases.  */
4580 
4581   for (i = 0; i < n_reloads; i++)
4582     if (rld[i].in != 0 && rld[i].out == 0
4583 	&& (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4584 	    || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4585 	    || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4586       for (j = 0; j < n_reloads; j++)
4587 	if (i != j && rld[j].in != 0 && rld[j].out == 0
4588 	    && rld[j].when_needed == rld[i].when_needed
4589 	    && MATCHES (rld[i].in, rld[j].in)
4590 	    && rld[i].rclass == rld[j].rclass
4591 	    && !rld[i].nocombine && !rld[j].nocombine
4592 	    && rld[i].reg_rtx == rld[j].reg_rtx)
4593 	  {
4594 	    rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4595 	    transfer_replacements (i, j);
4596 	    rld[j].in = 0;
4597 	  }
4598 
4599   /* If we made any reloads for addresses, see if they violate a
4600      "no input reloads" requirement for this insn.  But loads that we
4601      do after the insn (such as for output addresses) are fine.  */
4602   if (HAVE_cc0 && no_input_reloads)
4603     for (i = 0; i < n_reloads; i++)
4604       gcc_assert (rld[i].in == 0
4605 		  || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4606 		  || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4607 
4608   /* Compute reload_mode and reload_nregs.  */
4609   for (i = 0; i < n_reloads; i++)
4610     {
4611       rld[i].mode = rld[i].inmode;
4612       if (rld[i].mode == VOIDmode
4613 	  || partial_subreg_p (rld[i].mode, rld[i].outmode))
4614 	rld[i].mode = rld[i].outmode;
4615 
4616       rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4617     }
4618 
4619   /* Special case a simple move with an input reload and a
4620      destination of a hard reg, if the hard reg is ok, use it.  */
4621   for (i = 0; i < n_reloads; i++)
4622     if (rld[i].when_needed == RELOAD_FOR_INPUT
4623 	&& GET_CODE (PATTERN (insn)) == SET
4624 	&& REG_P (SET_DEST (PATTERN (insn)))
4625 	&& (SET_SRC (PATTERN (insn)) == rld[i].in
4626 	    || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4627 	&& !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4628       {
4629 	rtx dest = SET_DEST (PATTERN (insn));
4630 	unsigned int regno = REGNO (dest);
4631 
4632 	if (regno < FIRST_PSEUDO_REGISTER
4633 	    && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4634 	    && targetm.hard_regno_mode_ok (regno, rld[i].mode))
4635 	  {
4636 	    int nr = hard_regno_nregs (regno, rld[i].mode);
4637 	    int ok = 1, nri;
4638 
4639 	    for (nri = 1; nri < nr; nri ++)
4640 	      if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4641 		{
4642 		  ok = 0;
4643 		  break;
4644 		}
4645 
4646 	    if (ok)
4647 	      rld[i].reg_rtx = dest;
4648 	  }
4649       }
4650 
4651   return retval;
4652 }
4653 
4654 /* Return true if alternative number ALTNUM in constraint-string
4655    CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4656    MEM gives the reference if its address hasn't been fully reloaded,
4657    otherwise it is NULL.  */
4658 
4659 static bool
alternative_allows_const_pool_ref(rtx mem ATTRIBUTE_UNUSED,const char * constraint,int altnum)4660 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4661 				   const char *constraint, int altnum)
4662 {
4663   int c;
4664 
4665   /* Skip alternatives before the one requested.  */
4666   while (altnum > 0)
4667     {
4668       while (*constraint++ != ',')
4669 	;
4670       altnum--;
4671     }
4672   /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4673      If one of them is present, this alternative accepts the result of
4674      passing a constant-pool reference through find_reloads_toplev.
4675 
4676      The same is true of extra memory constraints if the address
4677      was reloaded into a register.  However, the target may elect
4678      to disallow the original constant address, forcing it to be
4679      reloaded into a register instead.  */
4680   for (; (c = *constraint) && c != ',' && c != '#';
4681        constraint += CONSTRAINT_LEN (c, constraint))
4682     {
4683       enum constraint_num cn = lookup_constraint (constraint);
4684       if (insn_extra_memory_constraint (cn)
4685 	  && (mem == NULL || constraint_satisfied_p (mem, cn)))
4686 	return true;
4687     }
4688   return false;
4689 }
4690 
4691 /* Scan X for memory references and scan the addresses for reloading.
4692    Also checks for references to "constant" regs that we want to eliminate
4693    and replaces them with the values they stand for.
4694    We may alter X destructively if it contains a reference to such.
4695    If X is just a constant reg, we return the equivalent value
4696    instead of X.
4697 
4698    IND_LEVELS says how many levels of indirect addressing this machine
4699    supports.
4700 
4701    OPNUM and TYPE identify the purpose of the reload.
4702 
4703    IS_SET_DEST is true if X is the destination of a SET, which is not
4704    appropriate to be replaced by a constant.
4705 
4706    INSN, if nonzero, is the insn in which we do the reload.  It is used
4707    to determine if we may generate output reloads, and where to put USEs
4708    for pseudos that we have to replace with stack slots.
4709 
4710    ADDRESS_RELOADED.  If nonzero, is a pointer to where we put the
4711    result of find_reloads_address.  */
4712 
4713 static rtx
find_reloads_toplev(rtx x,int opnum,enum reload_type type,int ind_levels,int is_set_dest,rtx_insn * insn,int * address_reloaded)4714 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4715 		     int ind_levels, int is_set_dest, rtx_insn *insn,
4716 		     int *address_reloaded)
4717 {
4718   RTX_CODE code = GET_CODE (x);
4719 
4720   const char *fmt = GET_RTX_FORMAT (code);
4721   int i;
4722   int copied;
4723 
4724   if (code == REG)
4725     {
4726       /* This code is duplicated for speed in find_reloads.  */
4727       int regno = REGNO (x);
4728       if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4729 	x = reg_equiv_constant (regno);
4730 #if 0
4731       /*  This creates (subreg (mem...)) which would cause an unnecessary
4732 	  reload of the mem.  */
4733       else if (reg_equiv_mem (regno) != 0)
4734 	x = reg_equiv_mem (regno);
4735 #endif
4736       else if (reg_equiv_memory_loc (regno)
4737 	       && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4738 	{
4739 	  rtx mem = make_memloc (x, regno);
4740 	  if (reg_equiv_address (regno)
4741 	      || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4742 	    {
4743 	      /* If this is not a toplevel operand, find_reloads doesn't see
4744 		 this substitution.  We have to emit a USE of the pseudo so
4745 		 that delete_output_reload can see it.  */
4746 	      if (replace_reloads && recog_data.operand[opnum] != x)
4747 		/* We mark the USE with QImode so that we recognize it
4748 		   as one that can be safely deleted at the end of
4749 		   reload.  */
4750 		PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4751 			  QImode);
4752 	      x = mem;
4753 	      i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4754 					opnum, type, ind_levels, insn);
4755 	      if (!rtx_equal_p (x, mem))
4756 		push_reg_equiv_alt_mem (regno, x);
4757 	      if (address_reloaded)
4758 		*address_reloaded = i;
4759 	    }
4760 	}
4761       return x;
4762     }
4763   if (code == MEM)
4764     {
4765       rtx tem = x;
4766 
4767       i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4768 				opnum, type, ind_levels, insn);
4769       if (address_reloaded)
4770 	*address_reloaded = i;
4771 
4772       return tem;
4773     }
4774 
4775   if (code == SUBREG && REG_P (SUBREG_REG (x)))
4776     {
4777       /* Check for SUBREG containing a REG that's equivalent to a
4778 	 constant.  If the constant has a known value, truncate it
4779 	 right now.  Similarly if we are extracting a single-word of a
4780 	 multi-word constant.  If the constant is symbolic, allow it
4781 	 to be substituted normally.  push_reload will strip the
4782 	 subreg later.  The constant must not be VOIDmode, because we
4783 	 will lose the mode of the register (this should never happen
4784 	 because one of the cases above should handle it).  */
4785 
4786       int regno = REGNO (SUBREG_REG (x));
4787       rtx tem;
4788 
4789       if (regno >= FIRST_PSEUDO_REGISTER
4790 	  && reg_renumber[regno] < 0
4791 	  && reg_equiv_constant (regno) != 0)
4792 	{
4793 	  tem =
4794 	    simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4795 				 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4796 	  gcc_assert (tem);
4797 	  if (CONSTANT_P (tem)
4798 	      && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4799 	    {
4800 	      tem = force_const_mem (GET_MODE (x), tem);
4801 	      i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4802 					&XEXP (tem, 0), opnum, type,
4803 					ind_levels, insn);
4804 	      if (address_reloaded)
4805 		*address_reloaded = i;
4806 	    }
4807 	  return tem;
4808 	}
4809 
4810       /* If the subreg contains a reg that will be converted to a mem,
4811 	 attempt to convert the whole subreg to a (narrower or wider)
4812 	 memory reference instead.  If this succeeds, we're done --
4813 	 otherwise fall through to check whether the inner reg still
4814 	 needs address reloads anyway.  */
4815 
4816       if (regno >= FIRST_PSEUDO_REGISTER
4817 	  && reg_equiv_memory_loc (regno) != 0)
4818 	{
4819 	  tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4820 					     insn, address_reloaded);
4821 	  if (tem)
4822 	    return tem;
4823 	}
4824     }
4825 
4826   for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4827     {
4828       if (fmt[i] == 'e')
4829 	{
4830 	  rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4831 					      ind_levels, is_set_dest, insn,
4832 					      address_reloaded);
4833 	  /* If we have replaced a reg with it's equivalent memory loc -
4834 	     that can still be handled here e.g. if it's in a paradoxical
4835 	     subreg - we must make the change in a copy, rather than using
4836 	     a destructive change.  This way, find_reloads can still elect
4837 	     not to do the change.  */
4838 	  if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4839 	    {
4840 	      x = shallow_copy_rtx (x);
4841 	      copied = 1;
4842 	    }
4843 	  XEXP (x, i) = new_part;
4844 	}
4845     }
4846   return x;
4847 }
4848 
4849 /* Return a mem ref for the memory equivalent of reg REGNO.
4850    This mem ref is not shared with anything.  */
4851 
4852 static rtx
make_memloc(rtx ad,int regno)4853 make_memloc (rtx ad, int regno)
4854 {
4855   /* We must rerun eliminate_regs, in case the elimination
4856      offsets have changed.  */
4857   rtx tem
4858     = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4859 	    0);
4860 
4861   /* If TEM might contain a pseudo, we must copy it to avoid
4862      modifying it when we do the substitution for the reload.  */
4863   if (rtx_varies_p (tem, 0))
4864     tem = copy_rtx (tem);
4865 
4866   tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4867   tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4868 
4869   /* Copy the result if it's still the same as the equivalence, to avoid
4870      modifying it when we do the substitution for the reload.  */
4871   if (tem == reg_equiv_memory_loc (regno))
4872     tem = copy_rtx (tem);
4873   return tem;
4874 }
4875 
4876 /* Returns true if AD could be turned into a valid memory reference
4877    to mode MODE in address space AS by reloading the part pointed to
4878    by PART into a register.  */
4879 
4880 static int
maybe_memory_address_addr_space_p(machine_mode mode,rtx ad,addr_space_t as,rtx * part)4881 maybe_memory_address_addr_space_p (machine_mode mode, rtx ad,
4882 				   addr_space_t as, rtx *part)
4883 {
4884   int retv;
4885   rtx tem = *part;
4886   rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4887 
4888   *part = reg;
4889   retv = memory_address_addr_space_p (mode, ad, as);
4890   *part = tem;
4891 
4892   return retv;
4893 }
4894 
4895 /* Record all reloads needed for handling memory address AD
4896    which appears in *LOC in a memory reference to mode MODE
4897    which itself is found in location  *MEMREFLOC.
4898    Note that we take shortcuts assuming that no multi-reg machine mode
4899    occurs as part of an address.
4900 
4901    OPNUM and TYPE specify the purpose of this reload.
4902 
4903    IND_LEVELS says how many levels of indirect addressing this machine
4904    supports.
4905 
4906    INSN, if nonzero, is the insn in which we do the reload.  It is used
4907    to determine if we may generate output reloads, and where to put USEs
4908    for pseudos that we have to replace with stack slots.
4909 
4910    Value is one if this address is reloaded or replaced as a whole; it is
4911    zero if the top level of this address was not reloaded or replaced, and
4912    it is -1 if it may or may not have been reloaded or replaced.
4913 
4914    Note that there is no verification that the address will be valid after
4915    this routine does its work.  Instead, we rely on the fact that the address
4916    was valid when reload started.  So we need only undo things that reload
4917    could have broken.  These are wrong register types, pseudos not allocated
4918    to a hard register, and frame pointer elimination.  */
4919 
4920 static int
find_reloads_address(machine_mode mode,rtx * memrefloc,rtx ad,rtx * loc,int opnum,enum reload_type type,int ind_levels,rtx_insn * insn)4921 find_reloads_address (machine_mode mode, rtx *memrefloc, rtx ad,
4922 		      rtx *loc, int opnum, enum reload_type type,
4923 		      int ind_levels, rtx_insn *insn)
4924 {
4925   addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4926 			     : ADDR_SPACE_GENERIC;
4927   int regno;
4928   int removed_and = 0;
4929   int op_index;
4930   rtx tem;
4931 
4932   /* If the address is a register, see if it is a legitimate address and
4933      reload if not.  We first handle the cases where we need not reload
4934      or where we must reload in a non-standard way.  */
4935 
4936   if (REG_P (ad))
4937     {
4938       regno = REGNO (ad);
4939 
4940       if (reg_equiv_constant (regno) != 0)
4941 	{
4942 	  find_reloads_address_part (reg_equiv_constant (regno), loc,
4943 				     base_reg_class (mode, as, MEM, SCRATCH),
4944 				     GET_MODE (ad), opnum, type, ind_levels);
4945 	  return 1;
4946 	}
4947 
4948       tem = reg_equiv_memory_loc (regno);
4949       if (tem != 0)
4950 	{
4951 	  if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4952 	    {
4953 	      tem = make_memloc (ad, regno);
4954 	      if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4955 							XEXP (tem, 0),
4956 							MEM_ADDR_SPACE (tem)))
4957 		{
4958 		  rtx orig = tem;
4959 
4960 		  find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4961 					&XEXP (tem, 0), opnum,
4962 					ADDR_TYPE (type), ind_levels, insn);
4963 		  if (!rtx_equal_p (tem, orig))
4964 		    push_reg_equiv_alt_mem (regno, tem);
4965 		}
4966 	      /* We can avoid a reload if the register's equivalent memory
4967 		 expression is valid as an indirect memory address.
4968 		 But not all addresses are valid in a mem used as an indirect
4969 		 address: only reg or reg+constant.  */
4970 
4971 	      if (ind_levels > 0
4972 		  && strict_memory_address_addr_space_p (mode, tem, as)
4973 		  && (REG_P (XEXP (tem, 0))
4974 		      || (GET_CODE (XEXP (tem, 0)) == PLUS
4975 			  && REG_P (XEXP (XEXP (tem, 0), 0))
4976 			  && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4977 		{
4978 		  /* TEM is not the same as what we'll be replacing the
4979 		     pseudo with after reload, put a USE in front of INSN
4980 		     in the final reload pass.  */
4981 		  if (replace_reloads
4982 		      && num_not_at_initial_offset
4983 		      && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4984 		    {
4985 		      *loc = tem;
4986 		      /* We mark the USE with QImode so that we
4987 			 recognize it as one that can be safely
4988 			 deleted at the end of reload.  */
4989 		      PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4990 						  insn), QImode);
4991 
4992 		      /* This doesn't really count as replacing the address
4993 			 as a whole, since it is still a memory access.  */
4994 		    }
4995 		  return 0;
4996 		}
4997 	      ad = tem;
4998 	    }
4999 	}
5000 
5001       /* The only remaining case where we can avoid a reload is if this is a
5002 	 hard register that is valid as a base register and which is not the
5003 	 subject of a CLOBBER in this insn.  */
5004 
5005       else if (regno < FIRST_PSEUDO_REGISTER
5006 	       && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
5007 	       && ! regno_clobbered_p (regno, this_insn, mode, 0))
5008 	return 0;
5009 
5010       /* If we do not have one of the cases above, we must do the reload.  */
5011       push_reload (ad, NULL_RTX, loc, (rtx*) 0,
5012 		   base_reg_class (mode, as, MEM, SCRATCH),
5013 		   GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
5014       return 1;
5015     }
5016 
5017   if (strict_memory_address_addr_space_p (mode, ad, as))
5018     {
5019       /* The address appears valid, so reloads are not needed.
5020 	 But the address may contain an eliminable register.
5021 	 This can happen because a machine with indirect addressing
5022 	 may consider a pseudo register by itself a valid address even when
5023 	 it has failed to get a hard reg.
5024 	 So do a tree-walk to find and eliminate all such regs.  */
5025 
5026       /* But first quickly dispose of a common case.  */
5027       if (GET_CODE (ad) == PLUS
5028 	  && CONST_INT_P (XEXP (ad, 1))
5029 	  && REG_P (XEXP (ad, 0))
5030 	  && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
5031 	return 0;
5032 
5033       subst_reg_equivs_changed = 0;
5034       *loc = subst_reg_equivs (ad, insn);
5035 
5036       if (! subst_reg_equivs_changed)
5037 	return 0;
5038 
5039       /* Check result for validity after substitution.  */
5040       if (strict_memory_address_addr_space_p (mode, ad, as))
5041 	return 0;
5042     }
5043 
5044 #ifdef LEGITIMIZE_RELOAD_ADDRESS
5045   do
5046     {
5047       if (memrefloc && ADDR_SPACE_GENERIC_P (as))
5048 	{
5049 	  LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
5050 				     ind_levels, win);
5051 	}
5052       break;
5053     win:
5054       *memrefloc = copy_rtx (*memrefloc);
5055       XEXP (*memrefloc, 0) = ad;
5056       move_replacements (&ad, &XEXP (*memrefloc, 0));
5057       return -1;
5058     }
5059   while (0);
5060 #endif
5061 
5062   /* The address is not valid.  We have to figure out why.  First see if
5063      we have an outer AND and remove it if so.  Then analyze what's inside.  */
5064 
5065   if (GET_CODE (ad) == AND)
5066     {
5067       removed_and = 1;
5068       loc = &XEXP (ad, 0);
5069       ad = *loc;
5070     }
5071 
5072   /* One possibility for why the address is invalid is that it is itself
5073      a MEM.  This can happen when the frame pointer is being eliminated, a
5074      pseudo is not allocated to a hard register, and the offset between the
5075      frame and stack pointers is not its initial value.  In that case the
5076      pseudo will have been replaced by a MEM referring to the
5077      stack pointer.  */
5078   if (MEM_P (ad))
5079     {
5080       /* First ensure that the address in this MEM is valid.  Then, unless
5081 	 indirect addresses are valid, reload the MEM into a register.  */
5082       tem = ad;
5083       find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5084 			    opnum, ADDR_TYPE (type),
5085 			    ind_levels == 0 ? 0 : ind_levels - 1, insn);
5086 
5087       /* If tem was changed, then we must create a new memory reference to
5088 	 hold it and store it back into memrefloc.  */
5089       if (tem != ad && memrefloc)
5090 	{
5091 	  *memrefloc = copy_rtx (*memrefloc);
5092 	  copy_replacements (tem, XEXP (*memrefloc, 0));
5093 	  loc = &XEXP (*memrefloc, 0);
5094 	  if (removed_and)
5095 	    loc = &XEXP (*loc, 0);
5096 	}
5097 
5098       /* Check similar cases as for indirect addresses as above except
5099 	 that we can allow pseudos and a MEM since they should have been
5100 	 taken care of above.  */
5101 
5102       if (ind_levels == 0
5103 	  || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5104 	  || MEM_P (XEXP (tem, 0))
5105 	  || ! (REG_P (XEXP (tem, 0))
5106 		|| (GET_CODE (XEXP (tem, 0)) == PLUS
5107 		    && REG_P (XEXP (XEXP (tem, 0), 0))
5108 		    && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5109 	{
5110 	  /* Must use TEM here, not AD, since it is the one that will
5111 	     have any subexpressions reloaded, if needed.  */
5112 	  push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5113 		       base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5114 		       VOIDmode, 0,
5115 		       0, opnum, type);
5116 	  return ! removed_and;
5117 	}
5118       else
5119 	return 0;
5120     }
5121 
5122   /* If we have address of a stack slot but it's not valid because the
5123      displacement is too large, compute the sum in a register.
5124      Handle all base registers here, not just fp/ap/sp, because on some
5125      targets (namely SH) we can also get too large displacements from
5126      big-endian corrections.  */
5127   else if (GET_CODE (ad) == PLUS
5128 	   && REG_P (XEXP (ad, 0))
5129 	   && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5130 	   && CONST_INT_P (XEXP (ad, 1))
5131 	   && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5132 				    CONST_INT)
5133 	       /* Similarly, if we were to reload the base register and the
5134 		  mem+offset address is still invalid, then we want to reload
5135 		  the whole address, not just the base register.  */
5136 	       || ! maybe_memory_address_addr_space_p
5137 		     (mode, ad, as, &(XEXP (ad, 0)))))
5138 
5139     {
5140       /* Unshare the MEM rtx so we can safely alter it.  */
5141       if (memrefloc)
5142 	{
5143 	  *memrefloc = copy_rtx (*memrefloc);
5144 	  loc = &XEXP (*memrefloc, 0);
5145 	  if (removed_and)
5146 	    loc = &XEXP (*loc, 0);
5147 	}
5148 
5149       if (double_reg_address_ok[mode]
5150 	  && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5151 				  PLUS, CONST_INT))
5152 	{
5153 	  /* Unshare the sum as well.  */
5154 	  *loc = ad = copy_rtx (ad);
5155 
5156 	  /* Reload the displacement into an index reg.
5157 	     We assume the frame pointer or arg pointer is a base reg.  */
5158 	  find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5159 				     INDEX_REG_CLASS, GET_MODE (ad), opnum,
5160 				     type, ind_levels);
5161 	  return 0;
5162 	}
5163       else
5164 	{
5165 	  /* If the sum of two regs is not necessarily valid,
5166 	     reload the sum into a base reg.
5167 	     That will at least work.  */
5168 	  find_reloads_address_part (ad, loc,
5169 				     base_reg_class (mode, as, MEM, SCRATCH),
5170 				     GET_MODE (ad), opnum, type, ind_levels);
5171 	}
5172       return ! removed_and;
5173     }
5174 
5175   /* If we have an indexed stack slot, there are three possible reasons why
5176      it might be invalid: The index might need to be reloaded, the address
5177      might have been made by frame pointer elimination and hence have a
5178      constant out of range, or both reasons might apply.
5179 
5180      We can easily check for an index needing reload, but even if that is the
5181      case, we might also have an invalid constant.  To avoid making the
5182      conservative assumption and requiring two reloads, we see if this address
5183      is valid when not interpreted strictly.  If it is, the only problem is
5184      that the index needs a reload and find_reloads_address_1 will take care
5185      of it.
5186 
5187      Handle all base registers here, not just fp/ap/sp, because on some
5188      targets (namely SPARC) we can also get invalid addresses from preventive
5189      subreg big-endian corrections made by find_reloads_toplev.  We
5190      can also get expressions involving LO_SUM (rather than PLUS) from
5191      find_reloads_subreg_address.
5192 
5193      If we decide to do something, it must be that `double_reg_address_ok'
5194      is true.  We generate a reload of the base register + constant and
5195      rework the sum so that the reload register will be added to the index.
5196      This is safe because we know the address isn't shared.
5197 
5198      We check for the base register as both the first and second operand of
5199      the innermost PLUS and/or LO_SUM.  */
5200 
5201   for (op_index = 0; op_index < 2; ++op_index)
5202     {
5203       rtx operand, addend;
5204       enum rtx_code inner_code;
5205 
5206       if (GET_CODE (ad) != PLUS)
5207 	  continue;
5208 
5209       inner_code = GET_CODE (XEXP (ad, 0));
5210       if (!(GET_CODE (ad) == PLUS
5211 	    && CONST_INT_P (XEXP (ad, 1))
5212 	    && (inner_code == PLUS || inner_code == LO_SUM)))
5213 	continue;
5214 
5215       operand = XEXP (XEXP (ad, 0), op_index);
5216       if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5217 	continue;
5218 
5219       addend = XEXP (XEXP (ad, 0), 1 - op_index);
5220 
5221       if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5222 				GET_CODE (addend))
5223 	   || operand == frame_pointer_rtx
5224 	   || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
5225 	       && operand == hard_frame_pointer_rtx)
5226 	   || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5227 	       && operand == arg_pointer_rtx)
5228 	   || operand == stack_pointer_rtx)
5229 	  && ! maybe_memory_address_addr_space_p
5230 		(mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5231 	{
5232 	  rtx offset_reg;
5233 	  enum reg_class cls;
5234 
5235 	  offset_reg = plus_constant (GET_MODE (ad), operand,
5236 				      INTVAL (XEXP (ad, 1)));
5237 
5238 	  /* Form the adjusted address.  */
5239 	  if (GET_CODE (XEXP (ad, 0)) == PLUS)
5240 	    ad = gen_rtx_PLUS (GET_MODE (ad),
5241 			       op_index == 0 ? offset_reg : addend,
5242 			       op_index == 0 ? addend : offset_reg);
5243 	  else
5244 	    ad = gen_rtx_LO_SUM (GET_MODE (ad),
5245 				 op_index == 0 ? offset_reg : addend,
5246 				 op_index == 0 ? addend : offset_reg);
5247 	  *loc = ad;
5248 
5249 	  cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5250 	  find_reloads_address_part (XEXP (ad, op_index),
5251 				     &XEXP (ad, op_index), cls,
5252 				     GET_MODE (ad), opnum, type, ind_levels);
5253 	  find_reloads_address_1 (mode, as,
5254 				  XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5255 				  GET_CODE (XEXP (ad, op_index)),
5256 				  &XEXP (ad, 1 - op_index), opnum,
5257 				  type, 0, insn);
5258 
5259 	  return 0;
5260 	}
5261     }
5262 
5263   /* See if address becomes valid when an eliminable register
5264      in a sum is replaced.  */
5265 
5266   tem = ad;
5267   if (GET_CODE (ad) == PLUS)
5268     tem = subst_indexed_address (ad);
5269   if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5270     {
5271       /* Ok, we win that way.  Replace any additional eliminable
5272 	 registers.  */
5273 
5274       subst_reg_equivs_changed = 0;
5275       tem = subst_reg_equivs (tem, insn);
5276 
5277       /* Make sure that didn't make the address invalid again.  */
5278 
5279       if (! subst_reg_equivs_changed
5280 	  || strict_memory_address_addr_space_p (mode, tem, as))
5281 	{
5282 	  *loc = tem;
5283 	  return 0;
5284 	}
5285     }
5286 
5287   /* If constants aren't valid addresses, reload the constant address
5288      into a register.  */
5289   if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5290     {
5291       machine_mode address_mode = GET_MODE (ad);
5292       if (address_mode == VOIDmode)
5293 	address_mode = targetm.addr_space.address_mode (as);
5294 
5295       /* If AD is an address in the constant pool, the MEM rtx may be shared.
5296 	 Unshare it so we can safely alter it.  */
5297       if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5298 	  && CONSTANT_POOL_ADDRESS_P (ad))
5299 	{
5300 	  *memrefloc = copy_rtx (*memrefloc);
5301 	  loc = &XEXP (*memrefloc, 0);
5302 	  if (removed_and)
5303 	    loc = &XEXP (*loc, 0);
5304 	}
5305 
5306       find_reloads_address_part (ad, loc,
5307 				 base_reg_class (mode, as, MEM, SCRATCH),
5308 				 address_mode, opnum, type, ind_levels);
5309       return ! removed_and;
5310     }
5311 
5312   return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5313 				 opnum, type, ind_levels, insn);
5314 }
5315 
5316 /* Find all pseudo regs appearing in AD
5317    that are eliminable in favor of equivalent values
5318    and do not have hard regs; replace them by their equivalents.
5319    INSN, if nonzero, is the insn in which we do the reload.  We put USEs in
5320    front of it for pseudos that we have to replace with stack slots.  */
5321 
5322 static rtx
subst_reg_equivs(rtx ad,rtx_insn * insn)5323 subst_reg_equivs (rtx ad, rtx_insn *insn)
5324 {
5325   RTX_CODE code = GET_CODE (ad);
5326   int i;
5327   const char *fmt;
5328 
5329   switch (code)
5330     {
5331     case HIGH:
5332     case CONST:
5333     CASE_CONST_ANY:
5334     case SYMBOL_REF:
5335     case LABEL_REF:
5336     case PC:
5337     case CC0:
5338       return ad;
5339 
5340     case REG:
5341       {
5342 	int regno = REGNO (ad);
5343 
5344 	if (reg_equiv_constant (regno) != 0)
5345 	  {
5346 	    subst_reg_equivs_changed = 1;
5347 	    return reg_equiv_constant (regno);
5348 	  }
5349 	if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5350 	  {
5351 	    rtx mem = make_memloc (ad, regno);
5352 	    if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5353 	      {
5354 		subst_reg_equivs_changed = 1;
5355 		/* We mark the USE with QImode so that we recognize it
5356 		   as one that can be safely deleted at the end of
5357 		   reload.  */
5358 		PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5359 			  QImode);
5360 		return mem;
5361 	      }
5362 	  }
5363       }
5364       return ad;
5365 
5366     case PLUS:
5367       /* Quickly dispose of a common case.  */
5368       if (XEXP (ad, 0) == frame_pointer_rtx
5369 	  && CONST_INT_P (XEXP (ad, 1)))
5370 	return ad;
5371       break;
5372 
5373     default:
5374       break;
5375     }
5376 
5377   fmt = GET_RTX_FORMAT (code);
5378   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5379     if (fmt[i] == 'e')
5380       XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5381   return ad;
5382 }
5383 
5384 /* Compute the sum of X and Y, making canonicalizations assumed in an
5385    address, namely: sum constant integers, surround the sum of two
5386    constants with a CONST, put the constant as the second operand, and
5387    group the constant on the outermost sum.
5388 
5389    This routine assumes both inputs are already in canonical form.  */
5390 
5391 rtx
form_sum(machine_mode mode,rtx x,rtx y)5392 form_sum (machine_mode mode, rtx x, rtx y)
5393 {
5394   rtx tem;
5395 
5396   gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5397   gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5398 
5399   if (CONST_INT_P (x))
5400     return plus_constant (mode, y, INTVAL (x));
5401   else if (CONST_INT_P (y))
5402     return plus_constant (mode, x, INTVAL (y));
5403   else if (CONSTANT_P (x))
5404     tem = x, x = y, y = tem;
5405 
5406   if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5407     return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5408 
5409   /* Note that if the operands of Y are specified in the opposite
5410      order in the recursive calls below, infinite recursion will occur.  */
5411   if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5412     return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5413 
5414   /* If both constant, encapsulate sum.  Otherwise, just form sum.  A
5415      constant will have been placed second.  */
5416   if (CONSTANT_P (x) && CONSTANT_P (y))
5417     {
5418       if (GET_CODE (x) == CONST)
5419 	x = XEXP (x, 0);
5420       if (GET_CODE (y) == CONST)
5421 	y = XEXP (y, 0);
5422 
5423       return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5424     }
5425 
5426   return gen_rtx_PLUS (mode, x, y);
5427 }
5428 
5429 /* If ADDR is a sum containing a pseudo register that should be
5430    replaced with a constant (from reg_equiv_constant),
5431    return the result of doing so, and also apply the associative
5432    law so that the result is more likely to be a valid address.
5433    (But it is not guaranteed to be one.)
5434 
5435    Note that at most one register is replaced, even if more are
5436    replaceable.  Also, we try to put the result into a canonical form
5437    so it is more likely to be a valid address.
5438 
5439    In all other cases, return ADDR.  */
5440 
5441 static rtx
subst_indexed_address(rtx addr)5442 subst_indexed_address (rtx addr)
5443 {
5444   rtx op0 = 0, op1 = 0, op2 = 0;
5445   rtx tem;
5446   int regno;
5447 
5448   if (GET_CODE (addr) == PLUS)
5449     {
5450       /* Try to find a register to replace.  */
5451       op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5452       if (REG_P (op0)
5453 	  && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5454 	  && reg_renumber[regno] < 0
5455 	  && reg_equiv_constant (regno) != 0)
5456 	op0 = reg_equiv_constant (regno);
5457       else if (REG_P (op1)
5458 	       && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5459 	       && reg_renumber[regno] < 0
5460 	       && reg_equiv_constant (regno) != 0)
5461 	op1 = reg_equiv_constant (regno);
5462       else if (GET_CODE (op0) == PLUS
5463 	       && (tem = subst_indexed_address (op0)) != op0)
5464 	op0 = tem;
5465       else if (GET_CODE (op1) == PLUS
5466 	       && (tem = subst_indexed_address (op1)) != op1)
5467 	op1 = tem;
5468       else
5469 	return addr;
5470 
5471       /* Pick out up to three things to add.  */
5472       if (GET_CODE (op1) == PLUS)
5473 	op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5474       else if (GET_CODE (op0) == PLUS)
5475 	op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5476 
5477       /* Compute the sum.  */
5478       if (op2 != 0)
5479 	op1 = form_sum (GET_MODE (addr), op1, op2);
5480       if (op1 != 0)
5481 	op0 = form_sum (GET_MODE (addr), op0, op1);
5482 
5483       return op0;
5484     }
5485   return addr;
5486 }
5487 
5488 /* Update the REG_INC notes for an insn.  It updates all REG_INC
5489    notes for the instruction which refer to REGNO the to refer
5490    to the reload number.
5491 
5492    INSN is the insn for which any REG_INC notes need updating.
5493 
5494    REGNO is the register number which has been reloaded.
5495 
5496    RELOADNUM is the reload number.  */
5497 
5498 static void
update_auto_inc_notes(rtx_insn * insn ATTRIBUTE_UNUSED,int regno ATTRIBUTE_UNUSED,int reloadnum ATTRIBUTE_UNUSED)5499 update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5500 		       int reloadnum ATTRIBUTE_UNUSED)
5501 {
5502   if (!AUTO_INC_DEC)
5503     return;
5504 
5505   for (rtx link = REG_NOTES (insn); link; link = XEXP (link, 1))
5506     if (REG_NOTE_KIND (link) == REG_INC
5507         && (int) REGNO (XEXP (link, 0)) == regno)
5508       push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5509 }
5510 
5511 /* Record the pseudo registers we must reload into hard registers in a
5512    subexpression of a would-be memory address, X referring to a value
5513    in mode MODE.  (This function is not called if the address we find
5514    is strictly valid.)
5515 
5516    CONTEXT = 1 means we are considering regs as index regs,
5517    = 0 means we are considering them as base regs.
5518    OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5519    or an autoinc code.
5520    If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5521    is the code of the index part of the address.  Otherwise, pass SCRATCH
5522    for this argument.
5523    OPNUM and TYPE specify the purpose of any reloads made.
5524 
5525    IND_LEVELS says how many levels of indirect addressing are
5526    supported at this point in the address.
5527 
5528    INSN, if nonzero, is the insn in which we do the reload.  It is used
5529    to determine if we may generate output reloads.
5530 
5531    We return nonzero if X, as a whole, is reloaded or replaced.  */
5532 
5533 /* Note that we take shortcuts assuming that no multi-reg machine mode
5534    occurs as part of an address.
5535    Also, this is not fully machine-customizable; it works for machines
5536    such as VAXen and 68000's and 32000's, but other possible machines
5537    could have addressing modes that this does not handle right.
5538    If you add push_reload calls here, you need to make sure gen_reload
5539    handles those cases gracefully.  */
5540 
5541 static int
find_reloads_address_1(machine_mode mode,addr_space_t as,rtx x,int context,enum rtx_code outer_code,enum rtx_code index_code,rtx * loc,int opnum,enum reload_type type,int ind_levels,rtx_insn * insn)5542 find_reloads_address_1 (machine_mode mode, addr_space_t as,
5543 			rtx x, int context,
5544 			enum rtx_code outer_code, enum rtx_code index_code,
5545 			rtx *loc, int opnum, enum reload_type type,
5546 			int ind_levels, rtx_insn *insn)
5547 {
5548 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX)	\
5549   ((CONTEXT) == 0							\
5550    ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX)		\
5551    : REGNO_OK_FOR_INDEX_P (REGNO))
5552 
5553   enum reg_class context_reg_class;
5554   RTX_CODE code = GET_CODE (x);
5555   bool reloaded_inner_of_autoinc = false;
5556 
5557   if (context == 1)
5558     context_reg_class = INDEX_REG_CLASS;
5559   else
5560     context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5561 
5562   switch (code)
5563     {
5564     case PLUS:
5565       {
5566 	rtx orig_op0 = XEXP (x, 0);
5567 	rtx orig_op1 = XEXP (x, 1);
5568 	RTX_CODE code0 = GET_CODE (orig_op0);
5569 	RTX_CODE code1 = GET_CODE (orig_op1);
5570 	rtx op0 = orig_op0;
5571 	rtx op1 = orig_op1;
5572 
5573 	if (GET_CODE (op0) == SUBREG)
5574 	  {
5575 	    op0 = SUBREG_REG (op0);
5576 	    code0 = GET_CODE (op0);
5577 	    if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5578 	      op0 = gen_rtx_REG (word_mode,
5579 				 (REGNO (op0) +
5580 				  subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5581 						       GET_MODE (SUBREG_REG (orig_op0)),
5582 						       SUBREG_BYTE (orig_op0),
5583 						       GET_MODE (orig_op0))));
5584 	  }
5585 
5586 	if (GET_CODE (op1) == SUBREG)
5587 	  {
5588 	    op1 = SUBREG_REG (op1);
5589 	    code1 = GET_CODE (op1);
5590 	    if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5591 	      /* ??? Why is this given op1's mode and above for
5592 		 ??? op0 SUBREGs we use word_mode?  */
5593 	      op1 = gen_rtx_REG (GET_MODE (op1),
5594 				 (REGNO (op1) +
5595 				  subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5596 						       GET_MODE (SUBREG_REG (orig_op1)),
5597 						       SUBREG_BYTE (orig_op1),
5598 						       GET_MODE (orig_op1))));
5599 	  }
5600 	/* Plus in the index register may be created only as a result of
5601 	   register rematerialization for expression like &localvar*4.  Reload it.
5602 	   It may be possible to combine the displacement on the outer level,
5603 	   but it is probably not worthwhile to do so.  */
5604 	if (context == 1)
5605 	  {
5606 	    find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5607 				  opnum, ADDR_TYPE (type), ind_levels, insn);
5608 	    push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5609 			 context_reg_class,
5610 			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5611 	    return 1;
5612 	  }
5613 
5614 	if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5615 	    || code0 == ZERO_EXTEND || code1 == MEM)
5616 	  {
5617 	    find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5618 				    &XEXP (x, 0), opnum, type, ind_levels,
5619 				    insn);
5620 	    find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5621 				    &XEXP (x, 1), opnum, type, ind_levels,
5622 				    insn);
5623 	  }
5624 
5625 	else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5626 		 || code1 == ZERO_EXTEND || code0 == MEM)
5627 	  {
5628 	    find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5629 				    &XEXP (x, 0), opnum, type, ind_levels,
5630 				    insn);
5631 	    find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5632 				    &XEXP (x, 1), opnum, type, ind_levels,
5633 				    insn);
5634 	  }
5635 
5636 	else if (code0 == CONST_INT || code0 == CONST
5637 		 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5638 	  find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5639 				  &XEXP (x, 1), opnum, type, ind_levels,
5640 				  insn);
5641 
5642 	else if (code1 == CONST_INT || code1 == CONST
5643 		 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5644 	  find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5645 				  &XEXP (x, 0), opnum, type, ind_levels,
5646 				  insn);
5647 
5648 	else if (code0 == REG && code1 == REG)
5649 	  {
5650 	    if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5651 		&& regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5652 	      return 0;
5653 	    else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5654 		     && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5655 	      return 0;
5656 	    else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5657 	      find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5658 				      &XEXP (x, 1), opnum, type, ind_levels,
5659 				      insn);
5660 	    else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5661 	      find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5662 				      &XEXP (x, 0), opnum, type, ind_levels,
5663 				      insn);
5664 	    else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5665 	      find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5666 				      &XEXP (x, 0), opnum, type, ind_levels,
5667 				      insn);
5668 	    else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5669 	      find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5670 				      &XEXP (x, 1), opnum, type, ind_levels,
5671 				      insn);
5672 	    else
5673 	      {
5674 		find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5675 					&XEXP (x, 0), opnum, type, ind_levels,
5676 					insn);
5677 		find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5678 					&XEXP (x, 1), opnum, type, ind_levels,
5679 					insn);
5680 	      }
5681 	  }
5682 
5683 	else if (code0 == REG)
5684 	  {
5685 	    find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5686 				    &XEXP (x, 0), opnum, type, ind_levels,
5687 				    insn);
5688 	    find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5689 				    &XEXP (x, 1), opnum, type, ind_levels,
5690 				    insn);
5691 	  }
5692 
5693 	else if (code1 == REG)
5694 	  {
5695 	    find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5696 				    &XEXP (x, 1), opnum, type, ind_levels,
5697 				    insn);
5698 	    find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5699 				    &XEXP (x, 0), opnum, type, ind_levels,
5700 				    insn);
5701 	  }
5702       }
5703 
5704       return 0;
5705 
5706     case POST_MODIFY:
5707     case PRE_MODIFY:
5708       {
5709 	rtx op0 = XEXP (x, 0);
5710 	rtx op1 = XEXP (x, 1);
5711 	enum rtx_code index_code;
5712 	int regno;
5713 	int reloadnum;
5714 
5715 	if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5716 	  return 0;
5717 
5718 	/* Currently, we only support {PRE,POST}_MODIFY constructs
5719 	   where a base register is {inc,dec}remented by the contents
5720 	   of another register or by a constant value.  Thus, these
5721 	   operands must match.  */
5722 	gcc_assert (op0 == XEXP (op1, 0));
5723 
5724 	/* Require index register (or constant).  Let's just handle the
5725 	   register case in the meantime... If the target allows
5726 	   auto-modify by a constant then we could try replacing a pseudo
5727 	   register with its equivalent constant where applicable.
5728 
5729 	   We also handle the case where the register was eliminated
5730 	   resulting in a PLUS subexpression.
5731 
5732 	   If we later decide to reload the whole PRE_MODIFY or
5733 	   POST_MODIFY, inc_for_reload might clobber the reload register
5734 	   before reading the index.  The index register might therefore
5735 	   need to live longer than a TYPE reload normally would, so be
5736 	   conservative and class it as RELOAD_OTHER.  */
5737 	if ((REG_P (XEXP (op1, 1))
5738 	     && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5739 	    || GET_CODE (XEXP (op1, 1)) == PLUS)
5740 	  find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5741 				  &XEXP (op1, 1), opnum, RELOAD_OTHER,
5742 				  ind_levels, insn);
5743 
5744 	gcc_assert (REG_P (XEXP (op1, 0)));
5745 
5746 	regno = REGNO (XEXP (op1, 0));
5747 	index_code = GET_CODE (XEXP (op1, 1));
5748 
5749 	/* A register that is incremented cannot be constant!  */
5750 	gcc_assert (regno < FIRST_PSEUDO_REGISTER
5751 		    || reg_equiv_constant (regno) == 0);
5752 
5753 	/* Handle a register that is equivalent to a memory location
5754 	    which cannot be addressed directly.  */
5755 	if (reg_equiv_memory_loc (regno) != 0
5756 	    && (reg_equiv_address (regno) != 0
5757 		|| num_not_at_initial_offset))
5758 	  {
5759 	    rtx tem = make_memloc (XEXP (x, 0), regno);
5760 
5761 	    if (reg_equiv_address (regno)
5762 		|| ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5763 	      {
5764 		rtx orig = tem;
5765 
5766 		/* First reload the memory location's address.
5767 		    We can't use ADDR_TYPE (type) here, because we need to
5768 		    write back the value after reading it, hence we actually
5769 		    need two registers.  */
5770 		find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5771 				      &XEXP (tem, 0), opnum,
5772 				      RELOAD_OTHER,
5773 				      ind_levels, insn);
5774 
5775 		if (!rtx_equal_p (tem, orig))
5776 		  push_reg_equiv_alt_mem (regno, tem);
5777 
5778 		/* Then reload the memory location into a base
5779 		   register.  */
5780 		reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5781 					 &XEXP (op1, 0),
5782 					 base_reg_class (mode, as,
5783 							 code, index_code),
5784 					 GET_MODE (x), GET_MODE (x), 0,
5785 					 0, opnum, RELOAD_OTHER);
5786 
5787 		update_auto_inc_notes (this_insn, regno, reloadnum);
5788 		return 0;
5789 	      }
5790 	  }
5791 
5792 	if (reg_renumber[regno] >= 0)
5793 	  regno = reg_renumber[regno];
5794 
5795 	/* We require a base register here...  */
5796 	if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5797 	  {
5798 	    reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5799 				     &XEXP (op1, 0), &XEXP (x, 0),
5800 				     base_reg_class (mode, as,
5801 						     code, index_code),
5802 				     GET_MODE (x), GET_MODE (x), 0, 0,
5803 				     opnum, RELOAD_OTHER);
5804 
5805 	    update_auto_inc_notes (this_insn, regno, reloadnum);
5806 	    return 0;
5807 	  }
5808       }
5809       return 0;
5810 
5811     case POST_INC:
5812     case POST_DEC:
5813     case PRE_INC:
5814     case PRE_DEC:
5815       if (REG_P (XEXP (x, 0)))
5816 	{
5817 	  int regno = REGNO (XEXP (x, 0));
5818 	  int value = 0;
5819 	  rtx x_orig = x;
5820 
5821 	  /* A register that is incremented cannot be constant!  */
5822 	  gcc_assert (regno < FIRST_PSEUDO_REGISTER
5823 		      || reg_equiv_constant (regno) == 0);
5824 
5825 	  /* Handle a register that is equivalent to a memory location
5826 	     which cannot be addressed directly.  */
5827 	  if (reg_equiv_memory_loc (regno) != 0
5828 	      && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5829 	    {
5830 	      rtx tem = make_memloc (XEXP (x, 0), regno);
5831 	      if (reg_equiv_address (regno)
5832 		  || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5833 		{
5834 		  rtx orig = tem;
5835 
5836 		  /* First reload the memory location's address.
5837 		     We can't use ADDR_TYPE (type) here, because we need to
5838 		     write back the value after reading it, hence we actually
5839 		     need two registers.  */
5840 		  find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5841 					&XEXP (tem, 0), opnum, type,
5842 					ind_levels, insn);
5843 		  reloaded_inner_of_autoinc = true;
5844 		  if (!rtx_equal_p (tem, orig))
5845 		    push_reg_equiv_alt_mem (regno, tem);
5846 		  /* Put this inside a new increment-expression.  */
5847 		  x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5848 		  /* Proceed to reload that, as if it contained a register.  */
5849 		}
5850 	    }
5851 
5852 	  /* If we have a hard register that is ok in this incdec context,
5853 	     don't make a reload.  If the register isn't nice enough for
5854 	     autoincdec, we can reload it.  But, if an autoincrement of a
5855 	     register that we here verified as playing nice, still outside
5856 	     isn't "valid", it must be that no autoincrement is "valid".
5857 	     If that is true and something made an autoincrement anyway,
5858 	     this must be a special context where one is allowed.
5859 	     (For example, a "push" instruction.)
5860 	     We can't improve this address, so leave it alone.  */
5861 
5862 	  /* Otherwise, reload the autoincrement into a suitable hard reg
5863 	     and record how much to increment by.  */
5864 
5865 	  if (reg_renumber[regno] >= 0)
5866 	    regno = reg_renumber[regno];
5867 	  if (regno >= FIRST_PSEUDO_REGISTER
5868 	      || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5869 				      index_code))
5870 	    {
5871 	      int reloadnum;
5872 
5873 	      /* If we can output the register afterwards, do so, this
5874 		 saves the extra update.
5875 		 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5876 		 CALL_INSN - and it does not set CC0.
5877 		 But don't do this if we cannot directly address the
5878 		 memory location, since this will make it harder to
5879 		 reuse address reloads, and increases register pressure.
5880 		 Also don't do this if we can probably update x directly.  */
5881 	      rtx equiv = (MEM_P (XEXP (x, 0))
5882 			   ? XEXP (x, 0)
5883 			   : reg_equiv_mem (regno));
5884 	      enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5885 	      if (insn && NONJUMP_INSN_P (insn)
5886 #if HAVE_cc0
5887 		  && ! sets_cc0_p (PATTERN (insn))
5888 #endif
5889 		  && (regno < FIRST_PSEUDO_REGISTER
5890 		      || (equiv
5891 			  && memory_operand (equiv, GET_MODE (equiv))
5892 			  && ! (icode != CODE_FOR_nothing
5893 				&& insn_operand_matches (icode, 0, equiv)
5894 				&& insn_operand_matches (icode, 1, equiv))))
5895 		  /* Using RELOAD_OTHER means we emit this and the reload we
5896 		     made earlier in the wrong order.  */
5897 		  && !reloaded_inner_of_autoinc)
5898 		{
5899 		  /* We use the original pseudo for loc, so that
5900 		     emit_reload_insns() knows which pseudo this
5901 		     reload refers to and updates the pseudo rtx, not
5902 		     its equivalent memory location, as well as the
5903 		     corresponding entry in reg_last_reload_reg.  */
5904 		  loc = &XEXP (x_orig, 0);
5905 		  x = XEXP (x, 0);
5906 		  reloadnum
5907 		    = push_reload (x, x, loc, loc,
5908 				   context_reg_class,
5909 				   GET_MODE (x), GET_MODE (x), 0, 0,
5910 				   opnum, RELOAD_OTHER);
5911 		}
5912 	      else
5913 		{
5914 		  reloadnum
5915 		    = push_reload (x, x, loc, (rtx*) 0,
5916 				   context_reg_class,
5917 				   GET_MODE (x), GET_MODE (x), 0, 0,
5918 				   opnum, type);
5919 		  rld[reloadnum].inc
5920 		    = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5921 
5922 		  value = 1;
5923 		}
5924 
5925 	      update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5926 				     reloadnum);
5927 	    }
5928 	  return value;
5929 	}
5930       return 0;
5931 
5932     case TRUNCATE:
5933     case SIGN_EXTEND:
5934     case ZERO_EXTEND:
5935       /* Look for parts to reload in the inner expression and reload them
5936 	 too, in addition to this operation.  Reloading all inner parts in
5937 	 addition to this one shouldn't be necessary, but at this point,
5938 	 we don't know if we can possibly omit any part that *can* be
5939 	 reloaded.  Targets that are better off reloading just either part
5940 	 (or perhaps even a different part of an outer expression), should
5941 	 define LEGITIMIZE_RELOAD_ADDRESS.  */
5942       find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5943 			      context, code, SCRATCH, &XEXP (x, 0), opnum,
5944 			      type, ind_levels, insn);
5945       push_reload (x, NULL_RTX, loc, (rtx*) 0,
5946 		   context_reg_class,
5947 		   GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5948       return 1;
5949 
5950     case MEM:
5951       /* This is probably the result of a substitution, by eliminate_regs, of
5952 	 an equivalent address for a pseudo that was not allocated to a hard
5953 	 register.  Verify that the specified address is valid and reload it
5954 	 into a register.
5955 
5956 	 Since we know we are going to reload this item, don't decrement for
5957 	 the indirection level.
5958 
5959 	 Note that this is actually conservative:  it would be slightly more
5960 	 efficient to use the value of SPILL_INDIRECT_LEVELS from
5961 	 reload1.c here.  */
5962 
5963       find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5964 			    opnum, ADDR_TYPE (type), ind_levels, insn);
5965       push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5966 		   context_reg_class,
5967 		   GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5968       return 1;
5969 
5970     case REG:
5971       {
5972 	int regno = REGNO (x);
5973 
5974 	if (reg_equiv_constant (regno) != 0)
5975 	  {
5976 	    find_reloads_address_part (reg_equiv_constant (regno), loc,
5977 				       context_reg_class,
5978 				       GET_MODE (x), opnum, type, ind_levels);
5979 	    return 1;
5980 	  }
5981 
5982 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5983 	 that feeds this insn.  */
5984 	if (reg_equiv_mem (regno) != 0)
5985 	  {
5986 	    push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5987 			 context_reg_class,
5988 			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5989 	    return 1;
5990 	  }
5991 #endif
5992 
5993 	if (reg_equiv_memory_loc (regno)
5994 	    && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5995 	  {
5996 	    rtx tem = make_memloc (x, regno);
5997 	    if (reg_equiv_address (regno) != 0
5998 		|| ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5999 	      {
6000 		x = tem;
6001 		find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
6002 				      &XEXP (x, 0), opnum, ADDR_TYPE (type),
6003 				      ind_levels, insn);
6004 		if (!rtx_equal_p (x, tem))
6005 		  push_reg_equiv_alt_mem (regno, x);
6006 	      }
6007 	  }
6008 
6009 	if (reg_renumber[regno] >= 0)
6010 	  regno = reg_renumber[regno];
6011 
6012 	if (regno >= FIRST_PSEUDO_REGISTER
6013 	    || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6014 				    index_code))
6015 	  {
6016 	    push_reload (x, NULL_RTX, loc, (rtx*) 0,
6017 			 context_reg_class,
6018 			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6019 	    return 1;
6020 	  }
6021 
6022 	/* If a register appearing in an address is the subject of a CLOBBER
6023 	   in this insn, reload it into some other register to be safe.
6024 	   The CLOBBER is supposed to make the register unavailable
6025 	   from before this insn to after it.  */
6026 	if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
6027 	  {
6028 	    push_reload (x, NULL_RTX, loc, (rtx*) 0,
6029 			 context_reg_class,
6030 			 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6031 	    return 1;
6032 	  }
6033       }
6034       return 0;
6035 
6036     case SUBREG:
6037       if (REG_P (SUBREG_REG (x)))
6038 	{
6039 	  /* If this is a SUBREG of a hard register and the resulting register
6040 	     is of the wrong class, reload the whole SUBREG.  This avoids
6041 	     needless copies if SUBREG_REG is multi-word.  */
6042 	  if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6043 	    {
6044 	      int regno ATTRIBUTE_UNUSED = subreg_regno (x);
6045 
6046 	      if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6047 				       index_code))
6048 		{
6049 		  push_reload (x, NULL_RTX, loc, (rtx*) 0,
6050 			       context_reg_class,
6051 			       GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6052 		  return 1;
6053 		}
6054 	    }
6055 	  /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6056 	     is larger than the class size, then reload the whole SUBREG.  */
6057 	  else
6058 	    {
6059 	      enum reg_class rclass = context_reg_class;
6060 	      if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6061 		  > reg_class_size[(int) rclass])
6062 		{
6063 		  /* If the inner register will be replaced by a memory
6064 		     reference, we can do this only if we can replace the
6065 		     whole subreg by a (narrower) memory reference.  If
6066 		     this is not possible, fall through and reload just
6067 		     the inner register (including address reloads).  */
6068 		  if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6069 		    {
6070 		      rtx tem = find_reloads_subreg_address (x, opnum,
6071 							     ADDR_TYPE (type),
6072 							     ind_levels, insn,
6073 							     NULL);
6074 		      if (tem)
6075 			{
6076 			  push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6077 				       GET_MODE (tem), VOIDmode, 0, 0,
6078 				       opnum, type);
6079 			  return 1;
6080 			}
6081 		    }
6082 		  else
6083 		    {
6084 		      push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6085 				   GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6086 		      return 1;
6087 		    }
6088 		}
6089 	    }
6090 	}
6091       break;
6092 
6093     default:
6094       break;
6095     }
6096 
6097   {
6098     const char *fmt = GET_RTX_FORMAT (code);
6099     int i;
6100 
6101     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6102       {
6103 	if (fmt[i] == 'e')
6104 	  /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6105 	     we get here.  */
6106 	  find_reloads_address_1 (mode, as, XEXP (x, i), context,
6107 				  code, SCRATCH, &XEXP (x, i),
6108 				  opnum, type, ind_levels, insn);
6109       }
6110   }
6111 
6112 #undef REG_OK_FOR_CONTEXT
6113   return 0;
6114 }
6115 
6116 /* X, which is found at *LOC, is a part of an address that needs to be
6117    reloaded into a register of class RCLASS.  If X is a constant, or if
6118    X is a PLUS that contains a constant, check that the constant is a
6119    legitimate operand and that we are supposed to be able to load
6120    it into the register.
6121 
6122    If not, force the constant into memory and reload the MEM instead.
6123 
6124    MODE is the mode to use, in case X is an integer constant.
6125 
6126    OPNUM and TYPE describe the purpose of any reloads made.
6127 
6128    IND_LEVELS says how many levels of indirect addressing this machine
6129    supports.  */
6130 
6131 static void
find_reloads_address_part(rtx x,rtx * loc,enum reg_class rclass,machine_mode mode,int opnum,enum reload_type type,int ind_levels)6132 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6133 			   machine_mode mode, int opnum,
6134 			   enum reload_type type, int ind_levels)
6135 {
6136   if (CONSTANT_P (x)
6137       && (!targetm.legitimate_constant_p (mode, x)
6138 	  || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6139     {
6140       x = force_const_mem (mode, x);
6141       find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6142 			    opnum, type, ind_levels, 0);
6143     }
6144 
6145   else if (GET_CODE (x) == PLUS
6146 	   && CONSTANT_P (XEXP (x, 1))
6147 	   && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6148 	       || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6149 		   == NO_REGS))
6150     {
6151       rtx tem;
6152 
6153       tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6154       x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6155       find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6156 			    opnum, type, ind_levels, 0);
6157     }
6158 
6159   push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6160 	       mode, VOIDmode, 0, 0, opnum, type);
6161 }
6162 
6163 /* X, a subreg of a pseudo, is a part of an address that needs to be
6164    reloaded, and the pseusdo is equivalent to a memory location.
6165 
6166    Attempt to replace the whole subreg by a (possibly narrower or wider)
6167    memory reference.  If this is possible, return this new memory
6168    reference, and push all required address reloads.  Otherwise,
6169    return NULL.
6170 
6171    OPNUM and TYPE identify the purpose of the reload.
6172 
6173    IND_LEVELS says how many levels of indirect addressing are
6174    supported at this point in the address.
6175 
6176    INSN, if nonzero, is the insn in which we do the reload.  It is used
6177    to determine where to put USEs for pseudos that we have to replace with
6178    stack slots.  */
6179 
6180 static rtx
find_reloads_subreg_address(rtx x,int opnum,enum reload_type type,int ind_levels,rtx_insn * insn,int * address_reloaded)6181 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6182 			     int ind_levels, rtx_insn *insn,
6183 			     int *address_reloaded)
6184 {
6185   machine_mode outer_mode = GET_MODE (x);
6186   machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6187   int regno = REGNO (SUBREG_REG (x));
6188   int reloaded = 0;
6189   rtx tem, orig;
6190   poly_int64 offset;
6191 
6192   gcc_assert (reg_equiv_memory_loc (regno) != 0);
6193 
6194   /* We cannot replace the subreg with a modified memory reference if:
6195 
6196      - we have a paradoxical subreg that implicitly acts as a zero or
6197        sign extension operation due to LOAD_EXTEND_OP;
6198 
6199      - we have a subreg that is implicitly supposed to act on the full
6200        register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6201 
6202      - the address of the equivalent memory location is mode-dependent;  or
6203 
6204      - we have a paradoxical subreg and the resulting memory is not
6205        sufficiently aligned to allow access in the wider mode.
6206 
6207     In addition, we choose not to perform the replacement for *any*
6208     paradoxical subreg, even if it were possible in principle.  This
6209     is to avoid generating wider memory references than necessary.
6210 
6211     This corresponds to how previous versions of reload used to handle
6212     paradoxical subregs where no address reload was required.  */
6213 
6214   if (paradoxical_subreg_p (x))
6215     return NULL;
6216 
6217   if (WORD_REGISTER_OPERATIONS
6218       && partial_subreg_p (outer_mode, inner_mode)
6219       && known_equal_after_align_down (GET_MODE_SIZE (outer_mode) - 1,
6220 				       GET_MODE_SIZE (inner_mode) - 1,
6221 				       UNITS_PER_WORD))
6222     return NULL;
6223 
6224   /* Since we don't attempt to handle paradoxical subregs, we can just
6225      call into simplify_subreg, which will handle all remaining checks
6226      for us.  */
6227   orig = make_memloc (SUBREG_REG (x), regno);
6228   offset = SUBREG_BYTE (x);
6229   tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6230   if (!tem || !MEM_P (tem))
6231     return NULL;
6232 
6233   /* Now push all required address reloads, if any.  */
6234   reloaded = find_reloads_address (GET_MODE (tem), &tem,
6235 				   XEXP (tem, 0), &XEXP (tem, 0),
6236 				   opnum, type, ind_levels, insn);
6237   /* ??? Do we need to handle nonzero offsets somehow?  */
6238   if (known_eq (offset, 0) && !rtx_equal_p (tem, orig))
6239     push_reg_equiv_alt_mem (regno, tem);
6240 
6241   /* For some processors an address may be valid in the original mode but
6242      not in a smaller mode.  For example, ARM accepts a scaled index register
6243      in SImode but not in HImode.  Note that this is only a problem if the
6244      address in reg_equiv_mem is already invalid in the new mode; other
6245      cases would be fixed by find_reloads_address as usual.
6246 
6247      ??? We attempt to handle such cases here by doing an additional reload
6248      of the full address after the usual processing by find_reloads_address.
6249      Note that this may not work in the general case, but it seems to cover
6250      the cases where this situation currently occurs.  A more general fix
6251      might be to reload the *value* instead of the address, but this would
6252      not be expected by the callers of this routine as-is.
6253 
6254      If find_reloads_address already completed replaced the address, there
6255      is nothing further to do.  */
6256   if (reloaded == 0
6257       && reg_equiv_mem (regno) != 0
6258       && !strict_memory_address_addr_space_p
6259 		(GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6260 		 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6261     {
6262       push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6263 		   base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6264 				   MEM, SCRATCH),
6265 		   GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6266       reloaded = 1;
6267     }
6268 
6269   /* If this is not a toplevel operand, find_reloads doesn't see this
6270      substitution.  We have to emit a USE of the pseudo so that
6271      delete_output_reload can see it.  */
6272   if (replace_reloads && recog_data.operand[opnum] != x)
6273     /* We mark the USE with QImode so that we recognize it as one that
6274        can be safely deleted at the end of reload.  */
6275     PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6276 	      QImode);
6277 
6278   if (address_reloaded)
6279     *address_reloaded = reloaded;
6280 
6281   return tem;
6282 }
6283 
6284 /* Substitute into the current INSN the registers into which we have reloaded
6285    the things that need reloading.  The array `replacements'
6286    contains the locations of all pointers that must be changed
6287    and says what to replace them with.
6288 
6289    Return the rtx that X translates into; usually X, but modified.  */
6290 
6291 void
subst_reloads(rtx_insn * insn)6292 subst_reloads (rtx_insn *insn)
6293 {
6294   int i;
6295 
6296   for (i = 0; i < n_replacements; i++)
6297     {
6298       struct replacement *r = &replacements[i];
6299       rtx reloadreg = rld[r->what].reg_rtx;
6300       if (reloadreg)
6301 	{
6302 #ifdef DEBUG_RELOAD
6303 	  /* This checking takes a very long time on some platforms
6304 	     causing the gcc.c-torture/compile/limits-fnargs.c test
6305 	     to time out during testing.  See PR 31850.
6306 
6307 	     Internal consistency test.  Check that we don't modify
6308 	     anything in the equivalence arrays.  Whenever something from
6309 	     those arrays needs to be reloaded, it must be unshared before
6310 	     being substituted into; the equivalence must not be modified.
6311 	     Otherwise, if the equivalence is used after that, it will
6312 	     have been modified, and the thing substituted (probably a
6313 	     register) is likely overwritten and not a usable equivalence.  */
6314 	  int check_regno;
6315 
6316 	  for (check_regno = 0; check_regno < max_regno; check_regno++)
6317 	    {
6318 #define CHECK_MODF(ARRAY)						\
6319 	      gcc_assert (!(*reg_equivs)[check_regno].ARRAY		\
6320 			  || !loc_mentioned_in_p (r->where,		\
6321 						  (*reg_equivs)[check_regno].ARRAY))
6322 
6323 	      CHECK_MODF (constant);
6324 	      CHECK_MODF (memory_loc);
6325 	      CHECK_MODF (address);
6326 	      CHECK_MODF (mem);
6327 #undef CHECK_MODF
6328 	    }
6329 #endif /* DEBUG_RELOAD */
6330 
6331 	  /* If we're replacing a LABEL_REF with a register, there must
6332 	     already be an indication (to e.g. flow) which label this
6333 	     register refers to.  */
6334 	  gcc_assert (GET_CODE (*r->where) != LABEL_REF
6335 		      || !JUMP_P (insn)
6336 		      || find_reg_note (insn,
6337 					REG_LABEL_OPERAND,
6338 					XEXP (*r->where, 0))
6339 		      || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6340 
6341 	  /* Encapsulate RELOADREG so its machine mode matches what
6342 	     used to be there.  Note that gen_lowpart_common will
6343 	     do the wrong thing if RELOADREG is multi-word.  RELOADREG
6344 	     will always be a REG here.  */
6345 	  if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6346 	    reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6347 
6348 	  *r->where = reloadreg;
6349 	}
6350       /* If reload got no reg and isn't optional, something's wrong.  */
6351       else
6352 	gcc_assert (rld[r->what].optional);
6353     }
6354 }
6355 
6356 /* Make a copy of any replacements being done into X and move those
6357    copies to locations in Y, a copy of X.  */
6358 
6359 void
copy_replacements(rtx x,rtx y)6360 copy_replacements (rtx x, rtx y)
6361 {
6362   copy_replacements_1 (&x, &y, n_replacements);
6363 }
6364 
6365 static void
copy_replacements_1(rtx * px,rtx * py,int orig_replacements)6366 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6367 {
6368   int i, j;
6369   rtx x, y;
6370   struct replacement *r;
6371   enum rtx_code code;
6372   const char *fmt;
6373 
6374   for (j = 0; j < orig_replacements; j++)
6375     if (replacements[j].where == px)
6376       {
6377 	r = &replacements[n_replacements++];
6378 	r->where = py;
6379 	r->what = replacements[j].what;
6380 	r->mode = replacements[j].mode;
6381       }
6382 
6383   x = *px;
6384   y = *py;
6385   code = GET_CODE (x);
6386   fmt = GET_RTX_FORMAT (code);
6387 
6388   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6389     {
6390       if (fmt[i] == 'e')
6391 	copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6392       else if (fmt[i] == 'E')
6393 	for (j = XVECLEN (x, i); --j >= 0; )
6394 	  copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6395 			       orig_replacements);
6396     }
6397 }
6398 
6399 /* Change any replacements being done to *X to be done to *Y.  */
6400 
6401 void
move_replacements(rtx * x,rtx * y)6402 move_replacements (rtx *x, rtx *y)
6403 {
6404   int i;
6405 
6406   for (i = 0; i < n_replacements; i++)
6407     if (replacements[i].where == x)
6408       replacements[i].where = y;
6409 }
6410 
6411 /* If LOC was scheduled to be replaced by something, return the replacement.
6412    Otherwise, return *LOC.  */
6413 
6414 rtx
find_replacement(rtx * loc)6415 find_replacement (rtx *loc)
6416 {
6417   struct replacement *r;
6418 
6419   for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6420     {
6421       rtx reloadreg = rld[r->what].reg_rtx;
6422 
6423       if (reloadreg && r->where == loc)
6424 	{
6425 	  if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6426 	    reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6427 
6428 	  return reloadreg;
6429 	}
6430       else if (reloadreg && GET_CODE (*loc) == SUBREG
6431 	       && r->where == &SUBREG_REG (*loc))
6432 	{
6433 	  if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6434 	    reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6435 
6436 	  return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6437 				      GET_MODE (SUBREG_REG (*loc)),
6438 				      SUBREG_BYTE (*loc));
6439 	}
6440     }
6441 
6442   /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6443      what's inside and make a new rtl if so.  */
6444   if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6445       || GET_CODE (*loc) == MULT)
6446     {
6447       rtx x = find_replacement (&XEXP (*loc, 0));
6448       rtx y = find_replacement (&XEXP (*loc, 1));
6449 
6450       if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6451 	return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6452     }
6453 
6454   return *loc;
6455 }
6456 
6457 /* Return nonzero if register in range [REGNO, ENDREGNO)
6458    appears either explicitly or implicitly in X
6459    other than being stored into (except for earlyclobber operands).
6460 
6461    References contained within the substructure at LOC do not count.
6462    LOC may be zero, meaning don't ignore anything.
6463 
6464    This is similar to refers_to_regno_p in rtlanal.c except that we
6465    look at equivalences for pseudos that didn't get hard registers.  */
6466 
6467 static int
refers_to_regno_for_reload_p(unsigned int regno,unsigned int endregno,rtx x,rtx * loc)6468 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6469 			      rtx x, rtx *loc)
6470 {
6471   int i;
6472   unsigned int r;
6473   RTX_CODE code;
6474   const char *fmt;
6475 
6476   if (x == 0)
6477     return 0;
6478 
6479  repeat:
6480   code = GET_CODE (x);
6481 
6482   switch (code)
6483     {
6484     case REG:
6485       r = REGNO (x);
6486 
6487       /* If this is a pseudo, a hard register must not have been allocated.
6488 	 X must therefore either be a constant or be in memory.  */
6489       if (r >= FIRST_PSEUDO_REGISTER)
6490 	{
6491 	  if (reg_equiv_memory_loc (r))
6492 	    return refers_to_regno_for_reload_p (regno, endregno,
6493 						 reg_equiv_memory_loc (r),
6494 						 (rtx*) 0);
6495 
6496 	  gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6497 	  return 0;
6498 	}
6499 
6500       return endregno > r && regno < END_REGNO (x);
6501 
6502     case SUBREG:
6503       /* If this is a SUBREG of a hard reg, we can see exactly which
6504 	 registers are being modified.  Otherwise, handle normally.  */
6505       if (REG_P (SUBREG_REG (x))
6506 	  && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6507 	{
6508 	  unsigned int inner_regno = subreg_regno (x);
6509 	  unsigned int inner_endregno
6510 	    = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6511 			     ? subreg_nregs (x) : 1);
6512 
6513 	  return endregno > inner_regno && regno < inner_endregno;
6514 	}
6515       break;
6516 
6517     case CLOBBER:
6518     case SET:
6519       if (&SET_DEST (x) != loc
6520 	  /* Note setting a SUBREG counts as referring to the REG it is in for
6521 	     a pseudo but not for hard registers since we can
6522 	     treat each word individually.  */
6523 	  && ((GET_CODE (SET_DEST (x)) == SUBREG
6524 	       && loc != &SUBREG_REG (SET_DEST (x))
6525 	       && REG_P (SUBREG_REG (SET_DEST (x)))
6526 	       && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6527 	       && refers_to_regno_for_reload_p (regno, endregno,
6528 						SUBREG_REG (SET_DEST (x)),
6529 						loc))
6530 	      /* If the output is an earlyclobber operand, this is
6531 		 a conflict.  */
6532 	      || ((!REG_P (SET_DEST (x))
6533 		   || earlyclobber_operand_p (SET_DEST (x)))
6534 		  && refers_to_regno_for_reload_p (regno, endregno,
6535 						   SET_DEST (x), loc))))
6536 	return 1;
6537 
6538       if (code == CLOBBER || loc == &SET_SRC (x))
6539 	return 0;
6540       x = SET_SRC (x);
6541       goto repeat;
6542 
6543     default:
6544       break;
6545     }
6546 
6547   /* X does not match, so try its subexpressions.  */
6548 
6549   fmt = GET_RTX_FORMAT (code);
6550   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6551     {
6552       if (fmt[i] == 'e' && loc != &XEXP (x, i))
6553 	{
6554 	  if (i == 0)
6555 	    {
6556 	      x = XEXP (x, 0);
6557 	      goto repeat;
6558 	    }
6559 	  else
6560 	    if (refers_to_regno_for_reload_p (regno, endregno,
6561 					      XEXP (x, i), loc))
6562 	      return 1;
6563 	}
6564       else if (fmt[i] == 'E')
6565 	{
6566 	  int j;
6567 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6568 	    if (loc != &XVECEXP (x, i, j)
6569 		&& refers_to_regno_for_reload_p (regno, endregno,
6570 						 XVECEXP (x, i, j), loc))
6571 	      return 1;
6572 	}
6573     }
6574   return 0;
6575 }
6576 
6577 /* Nonzero if modifying X will affect IN.  If X is a register or a SUBREG,
6578    we check if any register number in X conflicts with the relevant register
6579    numbers.  If X is a constant, return 0.  If X is a MEM, return 1 iff IN
6580    contains a MEM (we don't bother checking for memory addresses that can't
6581    conflict because we expect this to be a rare case.
6582 
6583    This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6584    that we look at equivalences for pseudos that didn't get hard registers.  */
6585 
6586 int
reg_overlap_mentioned_for_reload_p(rtx x,rtx in)6587 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6588 {
6589   int regno, endregno;
6590 
6591   /* Overly conservative.  */
6592   if (GET_CODE (x) == STRICT_LOW_PART
6593       || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6594     x = XEXP (x, 0);
6595 
6596   /* If either argument is a constant, then modifying X cannot affect IN.  */
6597   if (CONSTANT_P (x) || CONSTANT_P (in))
6598     return 0;
6599   else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6600     return refers_to_mem_for_reload_p (in);
6601   else if (GET_CODE (x) == SUBREG)
6602     {
6603       regno = REGNO (SUBREG_REG (x));
6604       if (regno < FIRST_PSEUDO_REGISTER)
6605 	regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6606 				      GET_MODE (SUBREG_REG (x)),
6607 				      SUBREG_BYTE (x),
6608 				      GET_MODE (x));
6609       endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6610 			  ? subreg_nregs (x) : 1);
6611 
6612       return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6613     }
6614   else if (REG_P (x))
6615     {
6616       regno = REGNO (x);
6617 
6618       /* If this is a pseudo, it must not have been assigned a hard register.
6619 	 Therefore, it must either be in memory or be a constant.  */
6620 
6621       if (regno >= FIRST_PSEUDO_REGISTER)
6622 	{
6623 	  if (reg_equiv_memory_loc (regno))
6624 	    return refers_to_mem_for_reload_p (in);
6625 	  gcc_assert (reg_equiv_constant (regno));
6626 	  return 0;
6627 	}
6628 
6629       endregno = END_REGNO (x);
6630 
6631       return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6632     }
6633   else if (MEM_P (x))
6634     return refers_to_mem_for_reload_p (in);
6635   else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6636 	   || GET_CODE (x) == CC0)
6637     return reg_mentioned_p (x, in);
6638   else
6639     {
6640       gcc_assert (GET_CODE (x) == PLUS);
6641 
6642       /* We actually want to know if X is mentioned somewhere inside IN.
6643 	 We must not say that (plus (sp) (const_int 124)) is in
6644 	 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6645 	 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6646 	 into a RELOAD_OTHER on behalf of another RELOAD_OTHER.  */
6647       while (MEM_P (in))
6648 	in = XEXP (in, 0);
6649       if (REG_P (in))
6650 	return 0;
6651       else if (GET_CODE (in) == PLUS)
6652 	return (rtx_equal_p (x, in)
6653 		|| reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6654 		|| reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6655       else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6656 		   || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6657     }
6658 
6659   gcc_unreachable ();
6660 }
6661 
6662 /* Return nonzero if anything in X contains a MEM.  Look also for pseudo
6663    registers.  */
6664 
6665 static int
refers_to_mem_for_reload_p(rtx x)6666 refers_to_mem_for_reload_p (rtx x)
6667 {
6668   const char *fmt;
6669   int i;
6670 
6671   if (MEM_P (x))
6672     return 1;
6673 
6674   if (REG_P (x))
6675     return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6676 	    && reg_equiv_memory_loc (REGNO (x)));
6677 
6678   fmt = GET_RTX_FORMAT (GET_CODE (x));
6679   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6680     if (fmt[i] == 'e'
6681 	&& (MEM_P (XEXP (x, i))
6682 	    || refers_to_mem_for_reload_p (XEXP (x, i))))
6683       return 1;
6684 
6685   return 0;
6686 }
6687 
6688 /* Check the insns before INSN to see if there is a suitable register
6689    containing the same value as GOAL.
6690    If OTHER is -1, look for a register in class RCLASS.
6691    Otherwise, just see if register number OTHER shares GOAL's value.
6692 
6693    Return an rtx for the register found, or zero if none is found.
6694 
6695    If RELOAD_REG_P is (short *)1,
6696    we reject any hard reg that appears in reload_reg_rtx
6697    because such a hard reg is also needed coming into this insn.
6698 
6699    If RELOAD_REG_P is any other nonzero value,
6700    it is a vector indexed by hard reg number
6701    and we reject any hard reg whose element in the vector is nonnegative
6702    as well as any that appears in reload_reg_rtx.
6703 
6704    If GOAL is zero, then GOALREG is a register number; we look
6705    for an equivalent for that register.
6706 
6707    MODE is the machine mode of the value we want an equivalence for.
6708    If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6709 
6710    This function is used by jump.c as well as in the reload pass.
6711 
6712    If GOAL is the sum of the stack pointer and a constant, we treat it
6713    as if it were a constant except that sp is required to be unchanging.  */
6714 
6715 rtx
find_equiv_reg(rtx goal,rtx_insn * insn,enum reg_class rclass,int other,short * reload_reg_p,int goalreg,machine_mode mode)6716 find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
6717 		short *reload_reg_p, int goalreg, machine_mode mode)
6718 {
6719   rtx_insn *p = insn;
6720   rtx goaltry, valtry, value;
6721   rtx_insn *where;
6722   rtx pat;
6723   int regno = -1;
6724   int valueno;
6725   int goal_mem = 0;
6726   int goal_const = 0;
6727   int goal_mem_addr_varies = 0;
6728   int need_stable_sp = 0;
6729   int nregs;
6730   int valuenregs;
6731   int num = 0;
6732 
6733   if (goal == 0)
6734     regno = goalreg;
6735   else if (REG_P (goal))
6736     regno = REGNO (goal);
6737   else if (MEM_P (goal))
6738     {
6739       enum rtx_code code = GET_CODE (XEXP (goal, 0));
6740       if (MEM_VOLATILE_P (goal))
6741 	return 0;
6742       if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6743 	return 0;
6744       /* An address with side effects must be reexecuted.  */
6745       switch (code)
6746 	{
6747 	case POST_INC:
6748 	case PRE_INC:
6749 	case POST_DEC:
6750 	case PRE_DEC:
6751 	case POST_MODIFY:
6752 	case PRE_MODIFY:
6753 	  return 0;
6754 	default:
6755 	  break;
6756 	}
6757       goal_mem = 1;
6758     }
6759   else if (CONSTANT_P (goal))
6760     goal_const = 1;
6761   else if (GET_CODE (goal) == PLUS
6762 	   && XEXP (goal, 0) == stack_pointer_rtx
6763 	   && CONSTANT_P (XEXP (goal, 1)))
6764     goal_const = need_stable_sp = 1;
6765   else if (GET_CODE (goal) == PLUS
6766 	   && XEXP (goal, 0) == frame_pointer_rtx
6767 	   && CONSTANT_P (XEXP (goal, 1)))
6768     goal_const = 1;
6769   else
6770     return 0;
6771 
6772   num = 0;
6773   /* Scan insns back from INSN, looking for one that copies
6774      a value into or out of GOAL.
6775      Stop and give up if we reach a label.  */
6776 
6777   while (1)
6778     {
6779       p = PREV_INSN (p);
6780       if (p && DEBUG_INSN_P (p))
6781 	continue;
6782       num++;
6783       if (p == 0 || LABEL_P (p)
6784 	  || num > param_max_reload_search_insns)
6785 	return 0;
6786 
6787       /* Don't reuse register contents from before a setjmp-type
6788 	 function call; on the second return (from the longjmp) it
6789 	 might have been clobbered by a later reuse.  It doesn't
6790 	 seem worthwhile to actually go and see if it is actually
6791 	 reused even if that information would be readily available;
6792 	 just don't reuse it across the setjmp call.  */
6793       if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6794 	return 0;
6795 
6796       if (NONJUMP_INSN_P (p)
6797 	  /* If we don't want spill regs ...  */
6798 	  && (! (reload_reg_p != 0
6799 		 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6800 	      /* ... then ignore insns introduced by reload; they aren't
6801 		 useful and can cause results in reload_as_needed to be
6802 		 different from what they were when calculating the need for
6803 		 spills.  If we notice an input-reload insn here, we will
6804 		 reject it below, but it might hide a usable equivalent.
6805 		 That makes bad code.  It may even fail: perhaps no reg was
6806 		 spilled for this insn because it was assumed we would find
6807 		 that equivalent.  */
6808 	      || INSN_UID (p) < reload_first_uid))
6809 	{
6810 	  rtx tem;
6811 	  pat = single_set (p);
6812 
6813 	  /* First check for something that sets some reg equal to GOAL.  */
6814 	  if (pat != 0
6815 	      && ((regno >= 0
6816 		   && true_regnum (SET_SRC (pat)) == regno
6817 		   && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6818 		  ||
6819 		  (regno >= 0
6820 		   && true_regnum (SET_DEST (pat)) == regno
6821 		   && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6822 		  ||
6823 		  (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6824 		   /* When looking for stack pointer + const,
6825 		      make sure we don't use a stack adjust.  */
6826 		   && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6827 		   && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6828 		  || (goal_mem
6829 		      && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6830 		      && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6831 		  || (goal_mem
6832 		      && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6833 		      && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6834 		  /* If we are looking for a constant,
6835 		     and something equivalent to that constant was copied
6836 		     into a reg, we can use that reg.  */
6837 		  || (goal_const && REG_NOTES (p) != 0
6838 		      && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6839 		      && ((rtx_equal_p (XEXP (tem, 0), goal)
6840 			   && (valueno
6841 			       = true_regnum (valtry = SET_DEST (pat))) >= 0)
6842 			  || (REG_P (SET_DEST (pat))
6843 			      && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6844 			      && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6845 			      && CONST_INT_P (goal)
6846 			      && (goaltry = operand_subword (XEXP (tem, 0), 0,
6847 							     0, VOIDmode)) != 0
6848 			      && rtx_equal_p (goal, goaltry)
6849 			      && (valtry
6850 				  = operand_subword (SET_DEST (pat), 0, 0,
6851 						     VOIDmode))
6852 			      && (valueno = true_regnum (valtry)) >= 0)))
6853 		  || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6854 							  NULL_RTX))
6855 		      && REG_P (SET_DEST (pat))
6856 		      && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6857 		      && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6858 		      && CONST_INT_P (goal)
6859 		      && (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6860 						     VOIDmode)) != 0
6861 		      && rtx_equal_p (goal, goaltry)
6862 		      && (valtry
6863 			  = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6864 		      && (valueno = true_regnum (valtry)) >= 0)))
6865 	    {
6866 	      if (other >= 0)
6867 		{
6868 		  if (valueno != other)
6869 		    continue;
6870 		}
6871 	      else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6872 		continue;
6873 	      else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6874 					  mode, valueno))
6875 		continue;
6876 	      value = valtry;
6877 	      where = p;
6878 	      break;
6879 	    }
6880 	}
6881     }
6882 
6883   /* We found a previous insn copying GOAL into a suitable other reg VALUE
6884      (or copying VALUE into GOAL, if GOAL is also a register).
6885      Now verify that VALUE is really valid.  */
6886 
6887   /* VALUENO is the register number of VALUE; a hard register.  */
6888 
6889   /* Don't try to re-use something that is killed in this insn.  We want
6890      to be able to trust REG_UNUSED notes.  */
6891   if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6892     return 0;
6893 
6894   /* If we propose to get the value from the stack pointer or if GOAL is
6895      a MEM based on the stack pointer, we need a stable SP.  */
6896   if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6897       || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6898 							  goal)))
6899     need_stable_sp = 1;
6900 
6901   /* Reject VALUE if the copy-insn moved the wrong sort of datum.  */
6902   if (GET_MODE (value) != mode)
6903     return 0;
6904 
6905   /* Reject VALUE if it was loaded from GOAL
6906      and is also a register that appears in the address of GOAL.  */
6907 
6908   if (goal_mem && value == SET_DEST (single_set (where))
6909       && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6910 				       goal, (rtx*) 0))
6911     return 0;
6912 
6913   /* Reject registers that overlap GOAL.  */
6914 
6915   if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6916     nregs = hard_regno_nregs (regno, mode);
6917   else
6918     nregs = 1;
6919   valuenregs = hard_regno_nregs (valueno, mode);
6920 
6921   if (!goal_mem && !goal_const
6922       && regno + nregs > valueno && regno < valueno + valuenregs)
6923     return 0;
6924 
6925   /* Reject VALUE if it is one of the regs reserved for reloads.
6926      Reload1 knows how to reuse them anyway, and it would get
6927      confused if we allocated one without its knowledge.
6928      (Now that insns introduced by reload are ignored above,
6929      this case shouldn't happen, but I'm not positive.)  */
6930 
6931   if (reload_reg_p != 0 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6932     {
6933       int i;
6934       for (i = 0; i < valuenregs; ++i)
6935 	if (reload_reg_p[valueno + i] >= 0)
6936 	  return 0;
6937     }
6938 
6939   /* Reject VALUE if it is a register being used for an input reload
6940      even if it is not one of those reserved.  */
6941 
6942   if (reload_reg_p != 0)
6943     {
6944       int i;
6945       for (i = 0; i < n_reloads; i++)
6946 	if (rld[i].reg_rtx != 0
6947 	    && rld[i].in
6948 	    && (int) REGNO (rld[i].reg_rtx) < valueno + valuenregs
6949 	    && (int) END_REGNO (rld[i].reg_rtx) > valueno)
6950 	  return 0;
6951     }
6952 
6953   if (goal_mem)
6954     /* We must treat frame pointer as varying here,
6955        since it can vary--in a nonlocal goto as generated by expand_goto.  */
6956     goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6957 
6958   /* Now verify that the values of GOAL and VALUE remain unaltered
6959      until INSN is reached.  */
6960 
6961   p = insn;
6962   while (1)
6963     {
6964       p = PREV_INSN (p);
6965       if (p == where)
6966 	return value;
6967 
6968       /* Don't trust the conversion past a function call
6969 	 if either of the two is in a call-clobbered register, or memory.  */
6970       if (CALL_P (p))
6971 	{
6972 	  if (goal_mem || need_stable_sp)
6973 	    return 0;
6974 
6975 	  function_abi callee_abi = insn_callee_abi (p);
6976 	  if (regno >= 0
6977 	      && regno < FIRST_PSEUDO_REGISTER
6978 	      && callee_abi.clobbers_reg_p (mode, regno))
6979 	    return 0;
6980 
6981 	  if (valueno >= 0
6982 	      && valueno < FIRST_PSEUDO_REGISTER
6983 	      && callee_abi.clobbers_reg_p (mode, valueno))
6984 	    return 0;
6985 	}
6986 
6987       if (INSN_P (p))
6988 	{
6989 	  pat = PATTERN (p);
6990 
6991 	  /* Watch out for unspec_volatile, and volatile asms.  */
6992 	  if (volatile_insn_p (pat))
6993 	    return 0;
6994 
6995 	  /* If this insn P stores in either GOAL or VALUE, return 0.
6996 	     If GOAL is a memory ref and this insn writes memory, return 0.
6997 	     If GOAL is a memory ref and its address is not constant,
6998 	     and this insn P changes a register used in GOAL, return 0.  */
6999 
7000 	  if (GET_CODE (pat) == COND_EXEC)
7001 	    pat = COND_EXEC_CODE (pat);
7002 	  if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
7003 	    {
7004 	      rtx dest = SET_DEST (pat);
7005 	      while (GET_CODE (dest) == SUBREG
7006 		     || GET_CODE (dest) == ZERO_EXTRACT
7007 		     || GET_CODE (dest) == STRICT_LOW_PART)
7008 		dest = XEXP (dest, 0);
7009 	      if (REG_P (dest))
7010 		{
7011 		  int xregno = REGNO (dest);
7012 		  int end_xregno = END_REGNO (dest);
7013 		  if (xregno < regno + nregs && end_xregno > regno)
7014 		    return 0;
7015 		  if (xregno < valueno + valuenregs
7016 		      && end_xregno > valueno)
7017 		    return 0;
7018 		  if (goal_mem_addr_varies
7019 		      && reg_overlap_mentioned_for_reload_p (dest, goal))
7020 		    return 0;
7021 		  if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7022 		    return 0;
7023 		}
7024 	      else if (goal_mem && MEM_P (dest)
7025 		       && ! push_operand (dest, GET_MODE (dest)))
7026 		return 0;
7027 	      else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7028 		       && reg_equiv_memory_loc (regno) != 0)
7029 		return 0;
7030 	      else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
7031 		return 0;
7032 	    }
7033 	  else if (GET_CODE (pat) == PARALLEL)
7034 	    {
7035 	      int i;
7036 	      for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7037 		{
7038 		  rtx v1 = XVECEXP (pat, 0, i);
7039 		  if (GET_CODE (v1) == COND_EXEC)
7040 		    v1 = COND_EXEC_CODE (v1);
7041 		  if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7042 		    {
7043 		      rtx dest = SET_DEST (v1);
7044 		      while (GET_CODE (dest) == SUBREG
7045 			     || GET_CODE (dest) == ZERO_EXTRACT
7046 			     || GET_CODE (dest) == STRICT_LOW_PART)
7047 			dest = XEXP (dest, 0);
7048 		      if (REG_P (dest))
7049 			{
7050 			  int xregno = REGNO (dest);
7051 			  int end_xregno = END_REGNO (dest);
7052 			  if (xregno < regno + nregs
7053 			      && end_xregno > regno)
7054 			    return 0;
7055 			  if (xregno < valueno + valuenregs
7056 			      && end_xregno > valueno)
7057 			    return 0;
7058 			  if (goal_mem_addr_varies
7059 			      && reg_overlap_mentioned_for_reload_p (dest,
7060 								     goal))
7061 			    return 0;
7062 			  if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7063 			    return 0;
7064 			}
7065 		      else if (goal_mem && MEM_P (dest)
7066 			       && ! push_operand (dest, GET_MODE (dest)))
7067 			return 0;
7068 		      else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7069 			       && reg_equiv_memory_loc (regno) != 0)
7070 			return 0;
7071 		      else if (need_stable_sp
7072 			       && push_operand (dest, GET_MODE (dest)))
7073 			return 0;
7074 		    }
7075 		}
7076 	    }
7077 
7078 	  if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7079 	    {
7080 	      rtx link;
7081 
7082 	      for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7083 		   link = XEXP (link, 1))
7084 		{
7085 		  pat = XEXP (link, 0);
7086 		  if (GET_CODE (pat) == CLOBBER)
7087 		    {
7088 		      rtx dest = SET_DEST (pat);
7089 
7090 		      if (REG_P (dest))
7091 			{
7092 			  int xregno = REGNO (dest);
7093 			  int end_xregno = END_REGNO (dest);
7094 
7095 			  if (xregno < regno + nregs
7096 			      && end_xregno > regno)
7097 			    return 0;
7098 			  else if (xregno < valueno + valuenregs
7099 				   && end_xregno > valueno)
7100 			    return 0;
7101 			  else if (goal_mem_addr_varies
7102 				   && reg_overlap_mentioned_for_reload_p (dest,
7103 								     goal))
7104 			    return 0;
7105 			}
7106 
7107 		      else if (goal_mem && MEM_P (dest)
7108 			       && ! push_operand (dest, GET_MODE (dest)))
7109 			return 0;
7110 		      else if (need_stable_sp
7111 			       && push_operand (dest, GET_MODE (dest)))
7112 			return 0;
7113 		    }
7114 		}
7115 	    }
7116 
7117 #if AUTO_INC_DEC
7118 	  /* If this insn auto-increments or auto-decrements
7119 	     either regno or valueno, return 0 now.
7120 	     If GOAL is a memory ref and its address is not constant,
7121 	     and this insn P increments a register used in GOAL, return 0.  */
7122 	  {
7123 	    rtx link;
7124 
7125 	    for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7126 	      if (REG_NOTE_KIND (link) == REG_INC
7127 		  && REG_P (XEXP (link, 0)))
7128 		{
7129 		  int incno = REGNO (XEXP (link, 0));
7130 		  if (incno < regno + nregs && incno >= regno)
7131 		    return 0;
7132 		  if (incno < valueno + valuenregs && incno >= valueno)
7133 		    return 0;
7134 		  if (goal_mem_addr_varies
7135 		      && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7136 							     goal))
7137 		    return 0;
7138 		}
7139 	  }
7140 #endif
7141 	}
7142     }
7143 }
7144 
7145 /* Find a place where INCED appears in an increment or decrement operator
7146    within X, and return the amount INCED is incremented or decremented by.
7147    The value is always positive.  */
7148 
7149 static poly_int64
find_inc_amount(rtx x,rtx inced)7150 find_inc_amount (rtx x, rtx inced)
7151 {
7152   enum rtx_code code = GET_CODE (x);
7153   const char *fmt;
7154   int i;
7155 
7156   if (code == MEM)
7157     {
7158       rtx addr = XEXP (x, 0);
7159       if ((GET_CODE (addr) == PRE_DEC
7160 	   || GET_CODE (addr) == POST_DEC
7161 	   || GET_CODE (addr) == PRE_INC
7162 	   || GET_CODE (addr) == POST_INC)
7163 	  && XEXP (addr, 0) == inced)
7164 	return GET_MODE_SIZE (GET_MODE (x));
7165       else if ((GET_CODE (addr) == PRE_MODIFY
7166 		|| GET_CODE (addr) == POST_MODIFY)
7167 	       && GET_CODE (XEXP (addr, 1)) == PLUS
7168 	       && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7169 	       && XEXP (addr, 0) == inced
7170 	       && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7171 	{
7172 	  i = INTVAL (XEXP (XEXP (addr, 1), 1));
7173 	  return i < 0 ? -i : i;
7174 	}
7175     }
7176 
7177   fmt = GET_RTX_FORMAT (code);
7178   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7179     {
7180       if (fmt[i] == 'e')
7181 	{
7182 	  poly_int64 tem = find_inc_amount (XEXP (x, i), inced);
7183 	  if (maybe_ne (tem, 0))
7184 	    return tem;
7185 	}
7186       if (fmt[i] == 'E')
7187 	{
7188 	  int j;
7189 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7190 	    {
7191 	      poly_int64 tem = find_inc_amount (XVECEXP (x, i, j), inced);
7192 	      if (maybe_ne (tem, 0))
7193 		return tem;
7194 	    }
7195 	}
7196     }
7197 
7198   return 0;
7199 }
7200 
7201 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7202    REG_INC note in insn INSN.  REGNO must refer to a hard register.  */
7203 
7204 static int
reg_inc_found_and_valid_p(unsigned int regno,unsigned int endregno,rtx insn)7205 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7206 			   rtx insn)
7207 {
7208   rtx link;
7209 
7210   if (!AUTO_INC_DEC)
7211     return 0;
7212 
7213   gcc_assert (insn);
7214 
7215   if (! INSN_P (insn))
7216     return 0;
7217 
7218   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7219     if (REG_NOTE_KIND (link) == REG_INC)
7220       {
7221 	unsigned int test = (int) REGNO (XEXP (link, 0));
7222 	if (test >= regno && test < endregno)
7223 	  return 1;
7224       }
7225   return 0;
7226 }
7227 
7228 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7229    If SETS is 1, also consider SETs.  If SETS is 2, enable checking
7230    REG_INC.  REGNO must refer to a hard register.  */
7231 
7232 int
regno_clobbered_p(unsigned int regno,rtx_insn * insn,machine_mode mode,int sets)7233 regno_clobbered_p (unsigned int regno, rtx_insn *insn, machine_mode mode,
7234 		   int sets)
7235 {
7236   /* regno must be a hard register.  */
7237   gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7238 
7239   unsigned int endregno = end_hard_regno (mode, regno);
7240 
7241   if ((GET_CODE (PATTERN (insn)) == CLOBBER
7242        || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7243       && REG_P (XEXP (PATTERN (insn), 0)))
7244     {
7245       unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7246 
7247       return test >= regno && test < endregno;
7248     }
7249 
7250   if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7251     return 1;
7252 
7253   if (GET_CODE (PATTERN (insn)) == PARALLEL)
7254     {
7255       int i = XVECLEN (PATTERN (insn), 0) - 1;
7256 
7257       for (; i >= 0; i--)
7258 	{
7259 	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
7260 	  if ((GET_CODE (elt) == CLOBBER
7261 	       || (sets == 1 && GET_CODE (elt) == SET))
7262 	      && REG_P (XEXP (elt, 0)))
7263 	    {
7264 	      unsigned int test = REGNO (XEXP (elt, 0));
7265 
7266 	      if (test >= regno && test < endregno)
7267 		return 1;
7268 	    }
7269 	  if (sets == 2
7270 	      && reg_inc_found_and_valid_p (regno, endregno, elt))
7271 	    return 1;
7272 	}
7273     }
7274 
7275   return 0;
7276 }
7277 
7278 /* Find the low part, with mode MODE, of a hard regno RELOADREG.  */
7279 rtx
reload_adjust_reg_for_mode(rtx reloadreg,machine_mode mode)7280 reload_adjust_reg_for_mode (rtx reloadreg, machine_mode mode)
7281 {
7282   int regno;
7283 
7284   if (GET_MODE (reloadreg) == mode)
7285     return reloadreg;
7286 
7287   regno = REGNO (reloadreg);
7288 
7289   if (REG_WORDS_BIG_ENDIAN)
7290     regno += ((int) REG_NREGS (reloadreg)
7291 	      - (int) hard_regno_nregs (regno, mode));
7292 
7293   return gen_rtx_REG (mode, regno);
7294 }
7295 
7296 static const char *const reload_when_needed_name[] =
7297 {
7298   "RELOAD_FOR_INPUT",
7299   "RELOAD_FOR_OUTPUT",
7300   "RELOAD_FOR_INSN",
7301   "RELOAD_FOR_INPUT_ADDRESS",
7302   "RELOAD_FOR_INPADDR_ADDRESS",
7303   "RELOAD_FOR_OUTPUT_ADDRESS",
7304   "RELOAD_FOR_OUTADDR_ADDRESS",
7305   "RELOAD_FOR_OPERAND_ADDRESS",
7306   "RELOAD_FOR_OPADDR_ADDR",
7307   "RELOAD_OTHER",
7308   "RELOAD_FOR_OTHER_ADDRESS"
7309 };
7310 
7311 /* These functions are used to print the variables set by 'find_reloads' */
7312 
7313 DEBUG_FUNCTION void
debug_reload_to_stream(FILE * f)7314 debug_reload_to_stream (FILE *f)
7315 {
7316   int r;
7317   const char *prefix;
7318 
7319   if (! f)
7320     f = stderr;
7321   for (r = 0; r < n_reloads; r++)
7322     {
7323       fprintf (f, "Reload %d: ", r);
7324 
7325       if (rld[r].in != 0)
7326 	{
7327 	  fprintf (f, "reload_in (%s) = ",
7328 		   GET_MODE_NAME (rld[r].inmode));
7329 	  print_inline_rtx (f, rld[r].in, 24);
7330 	  fprintf (f, "\n\t");
7331 	}
7332 
7333       if (rld[r].out != 0)
7334 	{
7335 	  fprintf (f, "reload_out (%s) = ",
7336 		   GET_MODE_NAME (rld[r].outmode));
7337 	  print_inline_rtx (f, rld[r].out, 24);
7338 	  fprintf (f, "\n\t");
7339 	}
7340 
7341       fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7342 
7343       fprintf (f, "%s (opnum = %d)",
7344 	       reload_when_needed_name[(int) rld[r].when_needed],
7345 	       rld[r].opnum);
7346 
7347       if (rld[r].optional)
7348 	fprintf (f, ", optional");
7349 
7350       if (rld[r].nongroup)
7351 	fprintf (f, ", nongroup");
7352 
7353       if (maybe_ne (rld[r].inc, 0))
7354 	{
7355 	  fprintf (f, ", inc by ");
7356 	  print_dec (rld[r].inc, f, SIGNED);
7357 	}
7358 
7359       if (rld[r].nocombine)
7360 	fprintf (f, ", can't combine");
7361 
7362       if (rld[r].secondary_p)
7363 	fprintf (f, ", secondary_reload_p");
7364 
7365       if (rld[r].in_reg != 0)
7366 	{
7367 	  fprintf (f, "\n\treload_in_reg: ");
7368 	  print_inline_rtx (f, rld[r].in_reg, 24);
7369 	}
7370 
7371       if (rld[r].out_reg != 0)
7372 	{
7373 	  fprintf (f, "\n\treload_out_reg: ");
7374 	  print_inline_rtx (f, rld[r].out_reg, 24);
7375 	}
7376 
7377       if (rld[r].reg_rtx != 0)
7378 	{
7379 	  fprintf (f, "\n\treload_reg_rtx: ");
7380 	  print_inline_rtx (f, rld[r].reg_rtx, 24);
7381 	}
7382 
7383       prefix = "\n\t";
7384       if (rld[r].secondary_in_reload != -1)
7385 	{
7386 	  fprintf (f, "%ssecondary_in_reload = %d",
7387 		   prefix, rld[r].secondary_in_reload);
7388 	  prefix = ", ";
7389 	}
7390 
7391       if (rld[r].secondary_out_reload != -1)
7392 	fprintf (f, "%ssecondary_out_reload = %d\n",
7393 		 prefix, rld[r].secondary_out_reload);
7394 
7395       prefix = "\n\t";
7396       if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7397 	{
7398 	  fprintf (f, "%ssecondary_in_icode = %s", prefix,
7399 		   insn_data[rld[r].secondary_in_icode].name);
7400 	  prefix = ", ";
7401 	}
7402 
7403       if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7404 	fprintf (f, "%ssecondary_out_icode = %s", prefix,
7405 		 insn_data[rld[r].secondary_out_icode].name);
7406 
7407       fprintf (f, "\n");
7408     }
7409 }
7410 
7411 DEBUG_FUNCTION void
debug_reload(void)7412 debug_reload (void)
7413 {
7414   debug_reload_to_stream (stderr);
7415 }
7416