xref: /dflybsd-src/contrib/gcc-8.0/gcc/reload1.c (revision 38fd149817dfbff97799f62fcb70be98c4e32523)
1*38fd1498Szrj /* Reload pseudo regs into hard regs for insns that require hard regs.
2*38fd1498Szrj    Copyright (C) 1987-2018 Free Software Foundation, Inc.
3*38fd1498Szrj 
4*38fd1498Szrj This file is part of GCC.
5*38fd1498Szrj 
6*38fd1498Szrj GCC is free software; you can redistribute it and/or modify it under
7*38fd1498Szrj the terms of the GNU General Public License as published by the Free
8*38fd1498Szrj Software Foundation; either version 3, or (at your option) any later
9*38fd1498Szrj version.
10*38fd1498Szrj 
11*38fd1498Szrj GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12*38fd1498Szrj WARRANTY; without even the implied warranty of MERCHANTABILITY or
13*38fd1498Szrj FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14*38fd1498Szrj for more details.
15*38fd1498Szrj 
16*38fd1498Szrj You should have received a copy of the GNU General Public License
17*38fd1498Szrj along with GCC; see the file COPYING3.  If not see
18*38fd1498Szrj <http://www.gnu.org/licenses/>.  */
19*38fd1498Szrj 
20*38fd1498Szrj #include "config.h"
21*38fd1498Szrj #include "system.h"
22*38fd1498Szrj #include "coretypes.h"
23*38fd1498Szrj #include "backend.h"
24*38fd1498Szrj #include "target.h"
25*38fd1498Szrj #include "rtl.h"
26*38fd1498Szrj #include "tree.h"
27*38fd1498Szrj #include "predict.h"
28*38fd1498Szrj #include "df.h"
29*38fd1498Szrj #include "memmodel.h"
30*38fd1498Szrj #include "tm_p.h"
31*38fd1498Szrj #include "optabs.h"
32*38fd1498Szrj #include "regs.h"
33*38fd1498Szrj #include "ira.h"
34*38fd1498Szrj #include "recog.h"
35*38fd1498Szrj 
36*38fd1498Szrj #include "rtl-error.h"
37*38fd1498Szrj #include "expr.h"
38*38fd1498Szrj #include "addresses.h"
39*38fd1498Szrj #include "cfgrtl.h"
40*38fd1498Szrj #include "cfgbuild.h"
41*38fd1498Szrj #include "reload.h"
42*38fd1498Szrj #include "except.h"
43*38fd1498Szrj #include "dumpfile.h"
44*38fd1498Szrj #include "rtl-iter.h"
45*38fd1498Szrj 
46*38fd1498Szrj /* This file contains the reload pass of the compiler, which is
47*38fd1498Szrj    run after register allocation has been done.  It checks that
48*38fd1498Szrj    each insn is valid (operands required to be in registers really
49*38fd1498Szrj    are in registers of the proper class) and fixes up invalid ones
50*38fd1498Szrj    by copying values temporarily into registers for the insns
51*38fd1498Szrj    that need them.
52*38fd1498Szrj 
53*38fd1498Szrj    The results of register allocation are described by the vector
54*38fd1498Szrj    reg_renumber; the insns still contain pseudo regs, but reg_renumber
55*38fd1498Szrj    can be used to find which hard reg, if any, a pseudo reg is in.
56*38fd1498Szrj 
57*38fd1498Szrj    The technique we always use is to free up a few hard regs that are
58*38fd1498Szrj    called ``reload regs'', and for each place where a pseudo reg
59*38fd1498Szrj    must be in a hard reg, copy it temporarily into one of the reload regs.
60*38fd1498Szrj 
61*38fd1498Szrj    Reload regs are allocated locally for every instruction that needs
62*38fd1498Szrj    reloads.  When there are pseudos which are allocated to a register that
63*38fd1498Szrj    has been chosen as a reload reg, such pseudos must be ``spilled''.
64*38fd1498Szrj    This means that they go to other hard regs, or to stack slots if no other
65*38fd1498Szrj    available hard regs can be found.  Spilling can invalidate more
66*38fd1498Szrj    insns, requiring additional need for reloads, so we must keep checking
67*38fd1498Szrj    until the process stabilizes.
68*38fd1498Szrj 
69*38fd1498Szrj    For machines with different classes of registers, we must keep track
70*38fd1498Szrj    of the register class needed for each reload, and make sure that
71*38fd1498Szrj    we allocate enough reload registers of each class.
72*38fd1498Szrj 
73*38fd1498Szrj    The file reload.c contains the code that checks one insn for
74*38fd1498Szrj    validity and reports the reloads that it needs.  This file
75*38fd1498Szrj    is in charge of scanning the entire rtl code, accumulating the
76*38fd1498Szrj    reload needs, spilling, assigning reload registers to use for
77*38fd1498Szrj    fixing up each insn, and generating the new insns to copy values
78*38fd1498Szrj    into the reload registers.  */
79*38fd1498Szrj 
80*38fd1498Szrj struct target_reload default_target_reload;
81*38fd1498Szrj #if SWITCHABLE_TARGET
82*38fd1498Szrj struct target_reload *this_target_reload = &default_target_reload;
83*38fd1498Szrj #endif
84*38fd1498Szrj 
85*38fd1498Szrj #define spill_indirect_levels			\
86*38fd1498Szrj   (this_target_reload->x_spill_indirect_levels)
87*38fd1498Szrj 
88*38fd1498Szrj /* During reload_as_needed, element N contains a REG rtx for the hard reg
89*38fd1498Szrj    into which reg N has been reloaded (perhaps for a previous insn).  */
90*38fd1498Szrj static rtx *reg_last_reload_reg;
91*38fd1498Szrj 
92*38fd1498Szrj /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
93*38fd1498Szrj    for an output reload that stores into reg N.  */
94*38fd1498Szrj static regset_head reg_has_output_reload;
95*38fd1498Szrj 
96*38fd1498Szrj /* Indicates which hard regs are reload-registers for an output reload
97*38fd1498Szrj    in the current insn.  */
98*38fd1498Szrj static HARD_REG_SET reg_is_output_reload;
99*38fd1498Szrj 
100*38fd1498Szrj /* Widest mode in which each pseudo reg is referred to (via subreg).  */
101*38fd1498Szrj static machine_mode *reg_max_ref_mode;
102*38fd1498Szrj 
103*38fd1498Szrj /* Vector to remember old contents of reg_renumber before spilling.  */
104*38fd1498Szrj static short *reg_old_renumber;
105*38fd1498Szrj 
106*38fd1498Szrj /* During reload_as_needed, element N contains the last pseudo regno reloaded
107*38fd1498Szrj    into hard register N.  If that pseudo reg occupied more than one register,
108*38fd1498Szrj    reg_reloaded_contents points to that pseudo for each spill register in
109*38fd1498Szrj    use; all of these must remain set for an inheritance to occur.  */
110*38fd1498Szrj static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
111*38fd1498Szrj 
112*38fd1498Szrj /* During reload_as_needed, element N contains the insn for which
113*38fd1498Szrj    hard register N was last used.   Its contents are significant only
114*38fd1498Szrj    when reg_reloaded_valid is set for this register.  */
115*38fd1498Szrj static rtx_insn *reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
116*38fd1498Szrj 
117*38fd1498Szrj /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid.  */
118*38fd1498Szrj static HARD_REG_SET reg_reloaded_valid;
119*38fd1498Szrj /* Indicate if the register was dead at the end of the reload.
120*38fd1498Szrj    This is only valid if reg_reloaded_contents is set and valid.  */
121*38fd1498Szrj static HARD_REG_SET reg_reloaded_dead;
122*38fd1498Szrj 
123*38fd1498Szrj /* Indicate whether the register's current value is one that is not
124*38fd1498Szrj    safe to retain across a call, even for registers that are normally
125*38fd1498Szrj    call-saved.  This is only meaningful for members of reg_reloaded_valid.  */
126*38fd1498Szrj static HARD_REG_SET reg_reloaded_call_part_clobbered;
127*38fd1498Szrj 
128*38fd1498Szrj /* Number of spill-regs so far; number of valid elements of spill_regs.  */
129*38fd1498Szrj static int n_spills;
130*38fd1498Szrj 
131*38fd1498Szrj /* In parallel with spill_regs, contains REG rtx's for those regs.
132*38fd1498Szrj    Holds the last rtx used for any given reg, or 0 if it has never
133*38fd1498Szrj    been used for spilling yet.  This rtx is reused, provided it has
134*38fd1498Szrj    the proper mode.  */
135*38fd1498Szrj static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
136*38fd1498Szrj 
137*38fd1498Szrj /* In parallel with spill_regs, contains nonzero for a spill reg
138*38fd1498Szrj    that was stored after the last time it was used.
139*38fd1498Szrj    The precise value is the insn generated to do the store.  */
140*38fd1498Szrj static rtx_insn *spill_reg_store[FIRST_PSEUDO_REGISTER];
141*38fd1498Szrj 
142*38fd1498Szrj /* This is the register that was stored with spill_reg_store.  This is a
143*38fd1498Szrj    copy of reload_out / reload_out_reg when the value was stored; if
144*38fd1498Szrj    reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg.  */
145*38fd1498Szrj static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
146*38fd1498Szrj 
147*38fd1498Szrj /* This table is the inverse mapping of spill_regs:
148*38fd1498Szrj    indexed by hard reg number,
149*38fd1498Szrj    it contains the position of that reg in spill_regs,
150*38fd1498Szrj    or -1 for something that is not in spill_regs.
151*38fd1498Szrj 
152*38fd1498Szrj    ?!?  This is no longer accurate.  */
153*38fd1498Szrj static short spill_reg_order[FIRST_PSEUDO_REGISTER];
154*38fd1498Szrj 
155*38fd1498Szrj /* This reg set indicates registers that can't be used as spill registers for
156*38fd1498Szrj    the currently processed insn.  These are the hard registers which are live
157*38fd1498Szrj    during the insn, but not allocated to pseudos, as well as fixed
158*38fd1498Szrj    registers.  */
159*38fd1498Szrj static HARD_REG_SET bad_spill_regs;
160*38fd1498Szrj 
161*38fd1498Szrj /* These are the hard registers that can't be used as spill register for any
162*38fd1498Szrj    insn.  This includes registers used for user variables and registers that
163*38fd1498Szrj    we can't eliminate.  A register that appears in this set also can't be used
164*38fd1498Szrj    to retry register allocation.  */
165*38fd1498Szrj static HARD_REG_SET bad_spill_regs_global;
166*38fd1498Szrj 
167*38fd1498Szrj /* Describes order of use of registers for reloading
168*38fd1498Szrj    of spilled pseudo-registers.  `n_spills' is the number of
169*38fd1498Szrj    elements that are actually valid; new ones are added at the end.
170*38fd1498Szrj 
171*38fd1498Szrj    Both spill_regs and spill_reg_order are used on two occasions:
172*38fd1498Szrj    once during find_reload_regs, where they keep track of the spill registers
173*38fd1498Szrj    for a single insn, but also during reload_as_needed where they show all
174*38fd1498Szrj    the registers ever used by reload.  For the latter case, the information
175*38fd1498Szrj    is calculated during finish_spills.  */
176*38fd1498Szrj static short spill_regs[FIRST_PSEUDO_REGISTER];
177*38fd1498Szrj 
178*38fd1498Szrj /* This vector of reg sets indicates, for each pseudo, which hard registers
179*38fd1498Szrj    may not be used for retrying global allocation because the register was
180*38fd1498Szrj    formerly spilled from one of them.  If we allowed reallocating a pseudo to
181*38fd1498Szrj    a register that it was already allocated to, reload might not
182*38fd1498Szrj    terminate.  */
183*38fd1498Szrj static HARD_REG_SET *pseudo_previous_regs;
184*38fd1498Szrj 
185*38fd1498Szrj /* This vector of reg sets indicates, for each pseudo, which hard
186*38fd1498Szrj    registers may not be used for retrying global allocation because they
187*38fd1498Szrj    are used as spill registers during one of the insns in which the
188*38fd1498Szrj    pseudo is live.  */
189*38fd1498Szrj static HARD_REG_SET *pseudo_forbidden_regs;
190*38fd1498Szrj 
191*38fd1498Szrj /* All hard regs that have been used as spill registers for any insn are
192*38fd1498Szrj    marked in this set.  */
193*38fd1498Szrj static HARD_REG_SET used_spill_regs;
194*38fd1498Szrj 
195*38fd1498Szrj /* Index of last register assigned as a spill register.  We allocate in
196*38fd1498Szrj    a round-robin fashion.  */
197*38fd1498Szrj static int last_spill_reg;
198*38fd1498Szrj 
199*38fd1498Szrj /* Record the stack slot for each spilled hard register.  */
200*38fd1498Szrj static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
201*38fd1498Szrj 
202*38fd1498Szrj /* Width allocated so far for that stack slot.  */
203*38fd1498Szrj static poly_uint64_pod spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
204*38fd1498Szrj 
205*38fd1498Szrj /* Record which pseudos needed to be spilled.  */
206*38fd1498Szrj static regset_head spilled_pseudos;
207*38fd1498Szrj 
208*38fd1498Szrj /* Record which pseudos changed their allocation in finish_spills.  */
209*38fd1498Szrj static regset_head changed_allocation_pseudos;
210*38fd1498Szrj 
211*38fd1498Szrj /* Used for communication between order_regs_for_reload and count_pseudo.
212*38fd1498Szrj    Used to avoid counting one pseudo twice.  */
213*38fd1498Szrj static regset_head pseudos_counted;
214*38fd1498Szrj 
215*38fd1498Szrj /* First uid used by insns created by reload in this function.
216*38fd1498Szrj    Used in find_equiv_reg.  */
217*38fd1498Szrj int reload_first_uid;
218*38fd1498Szrj 
219*38fd1498Szrj /* Flag set by local-alloc or global-alloc if anything is live in
220*38fd1498Szrj    a call-clobbered reg across calls.  */
221*38fd1498Szrj int caller_save_needed;
222*38fd1498Szrj 
223*38fd1498Szrj /* Set to 1 while reload_as_needed is operating.
224*38fd1498Szrj    Required by some machines to handle any generated moves differently.  */
225*38fd1498Szrj int reload_in_progress = 0;
226*38fd1498Szrj 
227*38fd1498Szrj /* This obstack is used for allocation of rtl during register elimination.
228*38fd1498Szrj    The allocated storage can be freed once find_reloads has processed the
229*38fd1498Szrj    insn.  */
230*38fd1498Szrj static struct obstack reload_obstack;
231*38fd1498Szrj 
232*38fd1498Szrj /* Points to the beginning of the reload_obstack.  All insn_chain structures
233*38fd1498Szrj    are allocated first.  */
234*38fd1498Szrj static char *reload_startobj;
235*38fd1498Szrj 
236*38fd1498Szrj /* The point after all insn_chain structures.  Used to quickly deallocate
237*38fd1498Szrj    memory allocated in copy_reloads during calculate_needs_all_insns.  */
238*38fd1498Szrj static char *reload_firstobj;
239*38fd1498Szrj 
240*38fd1498Szrj /* This points before all local rtl generated by register elimination.
241*38fd1498Szrj    Used to quickly free all memory after processing one insn.  */
242*38fd1498Szrj static char *reload_insn_firstobj;
243*38fd1498Szrj 
244*38fd1498Szrj /* List of insn_chain instructions, one for every insn that reload needs to
245*38fd1498Szrj    examine.  */
246*38fd1498Szrj struct insn_chain *reload_insn_chain;
247*38fd1498Szrj 
248*38fd1498Szrj /* TRUE if we potentially left dead insns in the insn stream and want to
249*38fd1498Szrj    run DCE immediately after reload, FALSE otherwise.  */
250*38fd1498Szrj static bool need_dce;
251*38fd1498Szrj 
252*38fd1498Szrj /* List of all insns needing reloads.  */
253*38fd1498Szrj static struct insn_chain *insns_need_reload;
254*38fd1498Szrj 
255*38fd1498Szrj /* This structure is used to record information about register eliminations.
256*38fd1498Szrj    Each array entry describes one possible way of eliminating a register
257*38fd1498Szrj    in favor of another.   If there is more than one way of eliminating a
258*38fd1498Szrj    particular register, the most preferred should be specified first.  */
259*38fd1498Szrj 
260*38fd1498Szrj struct elim_table
261*38fd1498Szrj {
262*38fd1498Szrj   int from;			/* Register number to be eliminated.  */
263*38fd1498Szrj   int to;			/* Register number used as replacement.  */
264*38fd1498Szrj   poly_int64_pod initial_offset; /* Initial difference between values.  */
265*38fd1498Szrj   int can_eliminate;		/* Nonzero if this elimination can be done.  */
266*38fd1498Szrj   int can_eliminate_previous;	/* Value returned by TARGET_CAN_ELIMINATE
267*38fd1498Szrj 				   target hook in previous scan over insns
268*38fd1498Szrj 				   made by reload.  */
269*38fd1498Szrj   poly_int64_pod offset;	/* Current offset between the two regs.  */
270*38fd1498Szrj   poly_int64_pod previous_offset; /* Offset at end of previous insn.  */
271*38fd1498Szrj   int ref_outside_mem;		/* "to" has been referenced outside a MEM.  */
272*38fd1498Szrj   rtx from_rtx;			/* REG rtx for the register to be eliminated.
273*38fd1498Szrj 				   We cannot simply compare the number since
274*38fd1498Szrj 				   we might then spuriously replace a hard
275*38fd1498Szrj 				   register corresponding to a pseudo
276*38fd1498Szrj 				   assigned to the reg to be eliminated.  */
277*38fd1498Szrj   rtx to_rtx;			/* REG rtx for the replacement.  */
278*38fd1498Szrj };
279*38fd1498Szrj 
280*38fd1498Szrj static struct elim_table *reg_eliminate = 0;
281*38fd1498Szrj 
282*38fd1498Szrj /* This is an intermediate structure to initialize the table.  It has
283*38fd1498Szrj    exactly the members provided by ELIMINABLE_REGS.  */
284*38fd1498Szrj static const struct elim_table_1
285*38fd1498Szrj {
286*38fd1498Szrj   const int from;
287*38fd1498Szrj   const int to;
288*38fd1498Szrj } reg_eliminate_1[] =
289*38fd1498Szrj 
290*38fd1498Szrj   ELIMINABLE_REGS;
291*38fd1498Szrj 
292*38fd1498Szrj #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
293*38fd1498Szrj 
294*38fd1498Szrj /* Record the number of pending eliminations that have an offset not equal
295*38fd1498Szrj    to their initial offset.  If nonzero, we use a new copy of each
296*38fd1498Szrj    replacement result in any insns encountered.  */
297*38fd1498Szrj int num_not_at_initial_offset;
298*38fd1498Szrj 
299*38fd1498Szrj /* Count the number of registers that we may be able to eliminate.  */
300*38fd1498Szrj static int num_eliminable;
301*38fd1498Szrj /* And the number of registers that are equivalent to a constant that
302*38fd1498Szrj    can be eliminated to frame_pointer / arg_pointer + constant.  */
303*38fd1498Szrj static int num_eliminable_invariants;
304*38fd1498Szrj 
305*38fd1498Szrj /* For each label, we record the offset of each elimination.  If we reach
306*38fd1498Szrj    a label by more than one path and an offset differs, we cannot do the
307*38fd1498Szrj    elimination.  This information is indexed by the difference of the
308*38fd1498Szrj    number of the label and the first label number.  We can't offset the
309*38fd1498Szrj    pointer itself as this can cause problems on machines with segmented
310*38fd1498Szrj    memory.  The first table is an array of flags that records whether we
311*38fd1498Szrj    have yet encountered a label and the second table is an array of arrays,
312*38fd1498Szrj    one entry in the latter array for each elimination.  */
313*38fd1498Szrj 
314*38fd1498Szrj static int first_label_num;
315*38fd1498Szrj static char *offsets_known_at;
316*38fd1498Szrj static poly_int64_pod (*offsets_at)[NUM_ELIMINABLE_REGS];
317*38fd1498Szrj 
318*38fd1498Szrj vec<reg_equivs_t, va_gc> *reg_equivs;
319*38fd1498Szrj 
320*38fd1498Szrj /* Stack of addresses where an rtx has been changed.  We can undo the
321*38fd1498Szrj    changes by popping items off the stack and restoring the original
322*38fd1498Szrj    value at each location.
323*38fd1498Szrj 
324*38fd1498Szrj    We use this simplistic undo capability rather than copy_rtx as copy_rtx
325*38fd1498Szrj    will not make a deep copy of a normally sharable rtx, such as
326*38fd1498Szrj    (const (plus (symbol_ref) (const_int))).  If such an expression appears
327*38fd1498Szrj    as R1 in gen_reload_chain_without_interm_reg_p, then a shared
328*38fd1498Szrj    rtx expression would be changed.  See PR 42431.  */
329*38fd1498Szrj 
330*38fd1498Szrj typedef rtx *rtx_p;
331*38fd1498Szrj static vec<rtx_p> substitute_stack;
332*38fd1498Szrj 
333*38fd1498Szrj /* Number of labels in the current function.  */
334*38fd1498Szrj 
335*38fd1498Szrj static int num_labels;
336*38fd1498Szrj 
337*38fd1498Szrj static void replace_pseudos_in (rtx *, machine_mode, rtx);
338*38fd1498Szrj static void maybe_fix_stack_asms (void);
339*38fd1498Szrj static void copy_reloads (struct insn_chain *);
340*38fd1498Szrj static void calculate_needs_all_insns (int);
341*38fd1498Szrj static int find_reg (struct insn_chain *, int);
342*38fd1498Szrj static void find_reload_regs (struct insn_chain *);
343*38fd1498Szrj static void select_reload_regs (void);
344*38fd1498Szrj static void delete_caller_save_insns (void);
345*38fd1498Szrj 
346*38fd1498Szrj static void spill_failure (rtx_insn *, enum reg_class);
347*38fd1498Szrj static void count_spilled_pseudo (int, int, int);
348*38fd1498Szrj static void delete_dead_insn (rtx_insn *);
349*38fd1498Szrj static void alter_reg (int, int, bool);
350*38fd1498Szrj static void set_label_offsets (rtx, rtx_insn *, int);
351*38fd1498Szrj static void check_eliminable_occurrences (rtx);
352*38fd1498Szrj static void elimination_effects (rtx, machine_mode);
353*38fd1498Szrj static rtx eliminate_regs_1 (rtx, machine_mode, rtx, bool, bool);
354*38fd1498Szrj static int eliminate_regs_in_insn (rtx_insn *, int);
355*38fd1498Szrj static void update_eliminable_offsets (void);
356*38fd1498Szrj static void mark_not_eliminable (rtx, const_rtx, void *);
357*38fd1498Szrj static void set_initial_elim_offsets (void);
358*38fd1498Szrj static bool verify_initial_elim_offsets (void);
359*38fd1498Szrj static void set_initial_label_offsets (void);
360*38fd1498Szrj static void set_offsets_for_label (rtx_insn *);
361*38fd1498Szrj static void init_eliminable_invariants (rtx_insn *, bool);
362*38fd1498Szrj static void init_elim_table (void);
363*38fd1498Szrj static void free_reg_equiv (void);
364*38fd1498Szrj static void update_eliminables (HARD_REG_SET *);
365*38fd1498Szrj static bool update_eliminables_and_spill (void);
366*38fd1498Szrj static void elimination_costs_in_insn (rtx_insn *);
367*38fd1498Szrj static void spill_hard_reg (unsigned int, int);
368*38fd1498Szrj static int finish_spills (int);
369*38fd1498Szrj static void scan_paradoxical_subregs (rtx);
370*38fd1498Szrj static void count_pseudo (int);
371*38fd1498Szrj static void order_regs_for_reload (struct insn_chain *);
372*38fd1498Szrj static void reload_as_needed (int);
373*38fd1498Szrj static void forget_old_reloads_1 (rtx, const_rtx, void *);
374*38fd1498Szrj static void forget_marked_reloads (regset);
375*38fd1498Szrj static int reload_reg_class_lower (const void *, const void *);
376*38fd1498Szrj static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
377*38fd1498Szrj 				    machine_mode);
378*38fd1498Szrj static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
379*38fd1498Szrj 				     machine_mode);
380*38fd1498Szrj static int reload_reg_free_p (unsigned int, int, enum reload_type);
381*38fd1498Szrj static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
382*38fd1498Szrj 					rtx, rtx, int, int);
383*38fd1498Szrj static int free_for_value_p (int, machine_mode, int, enum reload_type,
384*38fd1498Szrj 			     rtx, rtx, int, int);
385*38fd1498Szrj static int allocate_reload_reg (struct insn_chain *, int, int);
386*38fd1498Szrj static int conflicts_with_override (rtx);
387*38fd1498Szrj static void failed_reload (rtx_insn *, int);
388*38fd1498Szrj static int set_reload_reg (int, int);
389*38fd1498Szrj static void choose_reload_regs_init (struct insn_chain *, rtx *);
390*38fd1498Szrj static void choose_reload_regs (struct insn_chain *);
391*38fd1498Szrj static void emit_input_reload_insns (struct insn_chain *, struct reload *,
392*38fd1498Szrj 				     rtx, int);
393*38fd1498Szrj static void emit_output_reload_insns (struct insn_chain *, struct reload *,
394*38fd1498Szrj 				      int);
395*38fd1498Szrj static void do_input_reload (struct insn_chain *, struct reload *, int);
396*38fd1498Szrj static void do_output_reload (struct insn_chain *, struct reload *, int);
397*38fd1498Szrj static void emit_reload_insns (struct insn_chain *);
398*38fd1498Szrj static void delete_output_reload (rtx_insn *, int, int, rtx);
399*38fd1498Szrj static void delete_address_reloads (rtx_insn *, rtx_insn *);
400*38fd1498Szrj static void delete_address_reloads_1 (rtx_insn *, rtx, rtx_insn *);
401*38fd1498Szrj static void inc_for_reload (rtx, rtx, rtx, poly_int64);
402*38fd1498Szrj static void add_auto_inc_notes (rtx_insn *, rtx);
403*38fd1498Szrj static void substitute (rtx *, const_rtx, rtx);
404*38fd1498Szrj static bool gen_reload_chain_without_interm_reg_p (int, int);
405*38fd1498Szrj static int reloads_conflict (int, int);
406*38fd1498Szrj static rtx_insn *gen_reload (rtx, rtx, int, enum reload_type);
407*38fd1498Szrj static rtx_insn *emit_insn_if_valid_for_reload (rtx);
408*38fd1498Szrj 
409*38fd1498Szrj /* Initialize the reload pass.  This is called at the beginning of compilation
410*38fd1498Szrj    and may be called again if the target is reinitialized.  */
411*38fd1498Szrj 
412*38fd1498Szrj void
init_reload(void)413*38fd1498Szrj init_reload (void)
414*38fd1498Szrj {
415*38fd1498Szrj   int i;
416*38fd1498Szrj 
417*38fd1498Szrj   /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
418*38fd1498Szrj      Set spill_indirect_levels to the number of levels such addressing is
419*38fd1498Szrj      permitted, zero if it is not permitted at all.  */
420*38fd1498Szrj 
421*38fd1498Szrj   rtx tem
422*38fd1498Szrj     = gen_rtx_MEM (Pmode,
423*38fd1498Szrj 		   gen_rtx_PLUS (Pmode,
424*38fd1498Szrj 				 gen_rtx_REG (Pmode,
425*38fd1498Szrj 					      LAST_VIRTUAL_REGISTER + 1),
426*38fd1498Szrj 				 gen_int_mode (4, Pmode)));
427*38fd1498Szrj   spill_indirect_levels = 0;
428*38fd1498Szrj 
429*38fd1498Szrj   while (memory_address_p (QImode, tem))
430*38fd1498Szrj     {
431*38fd1498Szrj       spill_indirect_levels++;
432*38fd1498Szrj       tem = gen_rtx_MEM (Pmode, tem);
433*38fd1498Szrj     }
434*38fd1498Szrj 
435*38fd1498Szrj   /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)).  */
436*38fd1498Szrj 
437*38fd1498Szrj   tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
438*38fd1498Szrj   indirect_symref_ok = memory_address_p (QImode, tem);
439*38fd1498Szrj 
440*38fd1498Szrj   /* See if reg+reg is a valid (and offsettable) address.  */
441*38fd1498Szrj 
442*38fd1498Szrj   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
443*38fd1498Szrj     {
444*38fd1498Szrj       tem = gen_rtx_PLUS (Pmode,
445*38fd1498Szrj 			  gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
446*38fd1498Szrj 			  gen_rtx_REG (Pmode, i));
447*38fd1498Szrj 
448*38fd1498Szrj       /* This way, we make sure that reg+reg is an offsettable address.  */
449*38fd1498Szrj       tem = plus_constant (Pmode, tem, 4);
450*38fd1498Szrj 
451*38fd1498Szrj       for (int mode = 0; mode < MAX_MACHINE_MODE; mode++)
452*38fd1498Szrj 	if (!double_reg_address_ok[mode]
453*38fd1498Szrj 	    && memory_address_p ((enum machine_mode)mode, tem))
454*38fd1498Szrj 	  double_reg_address_ok[mode] = 1;
455*38fd1498Szrj     }
456*38fd1498Szrj 
457*38fd1498Szrj   /* Initialize obstack for our rtl allocation.  */
458*38fd1498Szrj   if (reload_startobj == NULL)
459*38fd1498Szrj     {
460*38fd1498Szrj       gcc_obstack_init (&reload_obstack);
461*38fd1498Szrj       reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
462*38fd1498Szrj     }
463*38fd1498Szrj 
464*38fd1498Szrj   INIT_REG_SET (&spilled_pseudos);
465*38fd1498Szrj   INIT_REG_SET (&changed_allocation_pseudos);
466*38fd1498Szrj   INIT_REG_SET (&pseudos_counted);
467*38fd1498Szrj }
468*38fd1498Szrj 
469*38fd1498Szrj /* List of insn chains that are currently unused.  */
470*38fd1498Szrj static struct insn_chain *unused_insn_chains = 0;
471*38fd1498Szrj 
472*38fd1498Szrj /* Allocate an empty insn_chain structure.  */
473*38fd1498Szrj struct insn_chain *
new_insn_chain(void)474*38fd1498Szrj new_insn_chain (void)
475*38fd1498Szrj {
476*38fd1498Szrj   struct insn_chain *c;
477*38fd1498Szrj 
478*38fd1498Szrj   if (unused_insn_chains == 0)
479*38fd1498Szrj     {
480*38fd1498Szrj       c = XOBNEW (&reload_obstack, struct insn_chain);
481*38fd1498Szrj       INIT_REG_SET (&c->live_throughout);
482*38fd1498Szrj       INIT_REG_SET (&c->dead_or_set);
483*38fd1498Szrj     }
484*38fd1498Szrj   else
485*38fd1498Szrj     {
486*38fd1498Szrj       c = unused_insn_chains;
487*38fd1498Szrj       unused_insn_chains = c->next;
488*38fd1498Szrj     }
489*38fd1498Szrj   c->is_caller_save_insn = 0;
490*38fd1498Szrj   c->need_operand_change = 0;
491*38fd1498Szrj   c->need_reload = 0;
492*38fd1498Szrj   c->need_elim = 0;
493*38fd1498Szrj   return c;
494*38fd1498Szrj }
495*38fd1498Szrj 
496*38fd1498Szrj /* Small utility function to set all regs in hard reg set TO which are
497*38fd1498Szrj    allocated to pseudos in regset FROM.  */
498*38fd1498Szrj 
499*38fd1498Szrj void
compute_use_by_pseudos(HARD_REG_SET * to,regset from)500*38fd1498Szrj compute_use_by_pseudos (HARD_REG_SET *to, regset from)
501*38fd1498Szrj {
502*38fd1498Szrj   unsigned int regno;
503*38fd1498Szrj   reg_set_iterator rsi;
504*38fd1498Szrj 
505*38fd1498Szrj   EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
506*38fd1498Szrj     {
507*38fd1498Szrj       int r = reg_renumber[regno];
508*38fd1498Szrj 
509*38fd1498Szrj       if (r < 0)
510*38fd1498Szrj 	{
511*38fd1498Szrj 	  /* reload_combine uses the information from DF_LIVE_IN,
512*38fd1498Szrj 	     which might still contain registers that have not
513*38fd1498Szrj 	     actually been allocated since they have an
514*38fd1498Szrj 	     equivalence.  */
515*38fd1498Szrj 	  gcc_assert (ira_conflicts_p || reload_completed);
516*38fd1498Szrj 	}
517*38fd1498Szrj       else
518*38fd1498Szrj 	add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
519*38fd1498Szrj     }
520*38fd1498Szrj }
521*38fd1498Szrj 
522*38fd1498Szrj /* Replace all pseudos found in LOC with their corresponding
523*38fd1498Szrj    equivalences.  */
524*38fd1498Szrj 
525*38fd1498Szrj static void
replace_pseudos_in(rtx * loc,machine_mode mem_mode,rtx usage)526*38fd1498Szrj replace_pseudos_in (rtx *loc, machine_mode mem_mode, rtx usage)
527*38fd1498Szrj {
528*38fd1498Szrj   rtx x = *loc;
529*38fd1498Szrj   enum rtx_code code;
530*38fd1498Szrj   const char *fmt;
531*38fd1498Szrj   int i, j;
532*38fd1498Szrj 
533*38fd1498Szrj   if (! x)
534*38fd1498Szrj     return;
535*38fd1498Szrj 
536*38fd1498Szrj   code = GET_CODE (x);
537*38fd1498Szrj   if (code == REG)
538*38fd1498Szrj     {
539*38fd1498Szrj       unsigned int regno = REGNO (x);
540*38fd1498Szrj 
541*38fd1498Szrj       if (regno < FIRST_PSEUDO_REGISTER)
542*38fd1498Szrj 	return;
543*38fd1498Szrj 
544*38fd1498Szrj       x = eliminate_regs_1 (x, mem_mode, usage, true, false);
545*38fd1498Szrj       if (x != *loc)
546*38fd1498Szrj 	{
547*38fd1498Szrj 	  *loc = x;
548*38fd1498Szrj 	  replace_pseudos_in (loc, mem_mode, usage);
549*38fd1498Szrj 	  return;
550*38fd1498Szrj 	}
551*38fd1498Szrj 
552*38fd1498Szrj       if (reg_equiv_constant (regno))
553*38fd1498Szrj 	*loc = reg_equiv_constant (regno);
554*38fd1498Szrj       else if (reg_equiv_invariant (regno))
555*38fd1498Szrj 	*loc = reg_equiv_invariant (regno);
556*38fd1498Szrj       else if (reg_equiv_mem (regno))
557*38fd1498Szrj 	*loc = reg_equiv_mem (regno);
558*38fd1498Szrj       else if (reg_equiv_address (regno))
559*38fd1498Szrj 	*loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address (regno));
560*38fd1498Szrj       else
561*38fd1498Szrj 	{
562*38fd1498Szrj 	  gcc_assert (!REG_P (regno_reg_rtx[regno])
563*38fd1498Szrj 		      || REGNO (regno_reg_rtx[regno]) != regno);
564*38fd1498Szrj 	  *loc = regno_reg_rtx[regno];
565*38fd1498Szrj 	}
566*38fd1498Szrj 
567*38fd1498Szrj       return;
568*38fd1498Szrj     }
569*38fd1498Szrj   else if (code == MEM)
570*38fd1498Szrj     {
571*38fd1498Szrj       replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
572*38fd1498Szrj       return;
573*38fd1498Szrj     }
574*38fd1498Szrj 
575*38fd1498Szrj   /* Process each of our operands recursively.  */
576*38fd1498Szrj   fmt = GET_RTX_FORMAT (code);
577*38fd1498Szrj   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
578*38fd1498Szrj     if (*fmt == 'e')
579*38fd1498Szrj       replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
580*38fd1498Szrj     else if (*fmt == 'E')
581*38fd1498Szrj       for (j = 0; j < XVECLEN (x, i); j++)
582*38fd1498Szrj 	replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
583*38fd1498Szrj }
584*38fd1498Szrj 
585*38fd1498Szrj /* Determine if the current function has an exception receiver block
586*38fd1498Szrj    that reaches the exit block via non-exceptional edges  */
587*38fd1498Szrj 
588*38fd1498Szrj static bool
has_nonexceptional_receiver(void)589*38fd1498Szrj has_nonexceptional_receiver (void)
590*38fd1498Szrj {
591*38fd1498Szrj   edge e;
592*38fd1498Szrj   edge_iterator ei;
593*38fd1498Szrj   basic_block *tos, *worklist, bb;
594*38fd1498Szrj 
595*38fd1498Szrj   /* If we're not optimizing, then just err on the safe side.  */
596*38fd1498Szrj   if (!optimize)
597*38fd1498Szrj     return true;
598*38fd1498Szrj 
599*38fd1498Szrj   /* First determine which blocks can reach exit via normal paths.  */
600*38fd1498Szrj   tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
601*38fd1498Szrj 
602*38fd1498Szrj   FOR_EACH_BB_FN (bb, cfun)
603*38fd1498Szrj     bb->flags &= ~BB_REACHABLE;
604*38fd1498Szrj 
605*38fd1498Szrj   /* Place the exit block on our worklist.  */
606*38fd1498Szrj   EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_REACHABLE;
607*38fd1498Szrj   *tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun);
608*38fd1498Szrj 
609*38fd1498Szrj   /* Iterate: find everything reachable from what we've already seen.  */
610*38fd1498Szrj   while (tos != worklist)
611*38fd1498Szrj     {
612*38fd1498Szrj       bb = *--tos;
613*38fd1498Szrj 
614*38fd1498Szrj       FOR_EACH_EDGE (e, ei, bb->preds)
615*38fd1498Szrj 	if (!(e->flags & EDGE_ABNORMAL))
616*38fd1498Szrj 	  {
617*38fd1498Szrj 	    basic_block src = e->src;
618*38fd1498Szrj 
619*38fd1498Szrj 	    if (!(src->flags & BB_REACHABLE))
620*38fd1498Szrj 	      {
621*38fd1498Szrj 		src->flags |= BB_REACHABLE;
622*38fd1498Szrj 		*tos++ = src;
623*38fd1498Szrj 	      }
624*38fd1498Szrj 	  }
625*38fd1498Szrj     }
626*38fd1498Szrj   free (worklist);
627*38fd1498Szrj 
628*38fd1498Szrj   /* Now see if there's a reachable block with an exceptional incoming
629*38fd1498Szrj      edge.  */
630*38fd1498Szrj   FOR_EACH_BB_FN (bb, cfun)
631*38fd1498Szrj     if (bb->flags & BB_REACHABLE && bb_has_abnormal_pred (bb))
632*38fd1498Szrj       return true;
633*38fd1498Szrj 
634*38fd1498Szrj   /* No exceptional block reached exit unexceptionally.  */
635*38fd1498Szrj   return false;
636*38fd1498Szrj }
637*38fd1498Szrj 
638*38fd1498Szrj /* Grow (or allocate) the REG_EQUIVS array from its current size (which may be
639*38fd1498Szrj    zero elements) to MAX_REG_NUM elements.
640*38fd1498Szrj 
641*38fd1498Szrj    Initialize all new fields to NULL and update REG_EQUIVS_SIZE.  */
642*38fd1498Szrj void
grow_reg_equivs(void)643*38fd1498Szrj grow_reg_equivs (void)
644*38fd1498Szrj {
645*38fd1498Szrj   int old_size = vec_safe_length (reg_equivs);
646*38fd1498Szrj   int max_regno = max_reg_num ();
647*38fd1498Szrj   int i;
648*38fd1498Szrj   reg_equivs_t ze;
649*38fd1498Szrj 
650*38fd1498Szrj   memset (&ze, 0, sizeof (reg_equivs_t));
651*38fd1498Szrj   vec_safe_reserve (reg_equivs, max_regno);
652*38fd1498Szrj   for (i = old_size; i < max_regno; i++)
653*38fd1498Szrj     reg_equivs->quick_insert (i, ze);
654*38fd1498Szrj }
655*38fd1498Szrj 
656*38fd1498Szrj 
657*38fd1498Szrj /* Global variables used by reload and its subroutines.  */
658*38fd1498Szrj 
659*38fd1498Szrj /* The current basic block while in calculate_elim_costs_all_insns.  */
660*38fd1498Szrj static basic_block elim_bb;
661*38fd1498Szrj 
662*38fd1498Szrj /* Set during calculate_needs if an insn needs register elimination.  */
663*38fd1498Szrj static int something_needs_elimination;
664*38fd1498Szrj /* Set during calculate_needs if an insn needs an operand changed.  */
665*38fd1498Szrj static int something_needs_operands_changed;
666*38fd1498Szrj /* Set by alter_regs if we spilled a register to the stack.  */
667*38fd1498Szrj static bool something_was_spilled;
668*38fd1498Szrj 
669*38fd1498Szrj /* Nonzero means we couldn't get enough spill regs.  */
670*38fd1498Szrj static int failure;
671*38fd1498Szrj 
672*38fd1498Szrj /* Temporary array of pseudo-register number.  */
673*38fd1498Szrj static int *temp_pseudo_reg_arr;
674*38fd1498Szrj 
675*38fd1498Szrj /* If a pseudo has no hard reg, delete the insns that made the equivalence.
676*38fd1498Szrj    If that insn didn't set the register (i.e., it copied the register to
677*38fd1498Szrj    memory), just delete that insn instead of the equivalencing insn plus
678*38fd1498Szrj    anything now dead.  If we call delete_dead_insn on that insn, we may
679*38fd1498Szrj    delete the insn that actually sets the register if the register dies
680*38fd1498Szrj    there and that is incorrect.  */
681*38fd1498Szrj static void
remove_init_insns()682*38fd1498Szrj remove_init_insns ()
683*38fd1498Szrj {
684*38fd1498Szrj   for (int i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
685*38fd1498Szrj     {
686*38fd1498Szrj       if (reg_renumber[i] < 0 && reg_equiv_init (i) != 0)
687*38fd1498Szrj 	{
688*38fd1498Szrj 	  rtx list;
689*38fd1498Szrj 	  for (list = reg_equiv_init (i); list; list = XEXP (list, 1))
690*38fd1498Szrj 	    {
691*38fd1498Szrj 	      rtx_insn *equiv_insn = as_a <rtx_insn *> (XEXP (list, 0));
692*38fd1498Szrj 
693*38fd1498Szrj 	      /* If we already deleted the insn or if it may trap, we can't
694*38fd1498Szrj 		 delete it.  The latter case shouldn't happen, but can
695*38fd1498Szrj 		 if an insn has a variable address, gets a REG_EH_REGION
696*38fd1498Szrj 		 note added to it, and then gets converted into a load
697*38fd1498Szrj 		 from a constant address.  */
698*38fd1498Szrj 	      if (NOTE_P (equiv_insn)
699*38fd1498Szrj 		  || can_throw_internal (equiv_insn))
700*38fd1498Szrj 		;
701*38fd1498Szrj 	      else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
702*38fd1498Szrj 		delete_dead_insn (equiv_insn);
703*38fd1498Szrj 	      else
704*38fd1498Szrj 		SET_INSN_DELETED (equiv_insn);
705*38fd1498Szrj 	    }
706*38fd1498Szrj 	}
707*38fd1498Szrj     }
708*38fd1498Szrj }
709*38fd1498Szrj 
710*38fd1498Szrj /* Return true if remove_init_insns will delete INSN.  */
711*38fd1498Szrj static bool
will_delete_init_insn_p(rtx_insn * insn)712*38fd1498Szrj will_delete_init_insn_p (rtx_insn *insn)
713*38fd1498Szrj {
714*38fd1498Szrj   rtx set = single_set (insn);
715*38fd1498Szrj   if (!set || !REG_P (SET_DEST (set)))
716*38fd1498Szrj     return false;
717*38fd1498Szrj   unsigned regno = REGNO (SET_DEST (set));
718*38fd1498Szrj 
719*38fd1498Szrj   if (can_throw_internal (insn))
720*38fd1498Szrj     return false;
721*38fd1498Szrj 
722*38fd1498Szrj   if (regno < FIRST_PSEUDO_REGISTER || reg_renumber[regno] >= 0)
723*38fd1498Szrj     return false;
724*38fd1498Szrj 
725*38fd1498Szrj   for (rtx list = reg_equiv_init (regno); list; list = XEXP (list, 1))
726*38fd1498Szrj     {
727*38fd1498Szrj       rtx equiv_insn = XEXP (list, 0);
728*38fd1498Szrj       if (equiv_insn == insn)
729*38fd1498Szrj 	return true;
730*38fd1498Szrj     }
731*38fd1498Szrj   return false;
732*38fd1498Szrj }
733*38fd1498Szrj 
734*38fd1498Szrj /* Main entry point for the reload pass.
735*38fd1498Szrj 
736*38fd1498Szrj    FIRST is the first insn of the function being compiled.
737*38fd1498Szrj 
738*38fd1498Szrj    GLOBAL nonzero means we were called from global_alloc
739*38fd1498Szrj    and should attempt to reallocate any pseudoregs that we
740*38fd1498Szrj    displace from hard regs we will use for reloads.
741*38fd1498Szrj    If GLOBAL is zero, we do not have enough information to do that,
742*38fd1498Szrj    so any pseudo reg that is spilled must go to the stack.
743*38fd1498Szrj 
744*38fd1498Szrj    Return value is TRUE if reload likely left dead insns in the
745*38fd1498Szrj    stream and a DCE pass should be run to elimiante them.  Else the
746*38fd1498Szrj    return value is FALSE.  */
747*38fd1498Szrj 
748*38fd1498Szrj bool
reload(rtx_insn * first,int global)749*38fd1498Szrj reload (rtx_insn *first, int global)
750*38fd1498Szrj {
751*38fd1498Szrj   int i, n;
752*38fd1498Szrj   rtx_insn *insn;
753*38fd1498Szrj   struct elim_table *ep;
754*38fd1498Szrj   basic_block bb;
755*38fd1498Szrj   bool inserted;
756*38fd1498Szrj 
757*38fd1498Szrj   /* Make sure even insns with volatile mem refs are recognizable.  */
758*38fd1498Szrj   init_recog ();
759*38fd1498Szrj 
760*38fd1498Szrj   failure = 0;
761*38fd1498Szrj 
762*38fd1498Szrj   reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
763*38fd1498Szrj 
764*38fd1498Szrj   /* Make sure that the last insn in the chain
765*38fd1498Szrj      is not something that needs reloading.  */
766*38fd1498Szrj   emit_note (NOTE_INSN_DELETED);
767*38fd1498Szrj 
768*38fd1498Szrj   /* Enable find_equiv_reg to distinguish insns made by reload.  */
769*38fd1498Szrj   reload_first_uid = get_max_uid ();
770*38fd1498Szrj 
771*38fd1498Szrj   /* Initialize the secondary memory table.  */
772*38fd1498Szrj   clear_secondary_mem ();
773*38fd1498Szrj 
774*38fd1498Szrj   /* We don't have a stack slot for any spill reg yet.  */
775*38fd1498Szrj   memset (spill_stack_slot, 0, sizeof spill_stack_slot);
776*38fd1498Szrj   memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
777*38fd1498Szrj 
778*38fd1498Szrj   /* Initialize the save area information for caller-save, in case some
779*38fd1498Szrj      are needed.  */
780*38fd1498Szrj   init_save_areas ();
781*38fd1498Szrj 
782*38fd1498Szrj   /* Compute which hard registers are now in use
783*38fd1498Szrj      as homes for pseudo registers.
784*38fd1498Szrj      This is done here rather than (eg) in global_alloc
785*38fd1498Szrj      because this point is reached even if not optimizing.  */
786*38fd1498Szrj   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
787*38fd1498Szrj     mark_home_live (i);
788*38fd1498Szrj 
789*38fd1498Szrj   /* A function that has a nonlocal label that can reach the exit
790*38fd1498Szrj      block via non-exceptional paths must save all call-saved
791*38fd1498Szrj      registers.  */
792*38fd1498Szrj   if (cfun->has_nonlocal_label
793*38fd1498Szrj       && has_nonexceptional_receiver ())
794*38fd1498Szrj     crtl->saves_all_registers = 1;
795*38fd1498Szrj 
796*38fd1498Szrj   if (crtl->saves_all_registers)
797*38fd1498Szrj     for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
798*38fd1498Szrj       if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
799*38fd1498Szrj 	df_set_regs_ever_live (i, true);
800*38fd1498Szrj 
801*38fd1498Szrj   /* Find all the pseudo registers that didn't get hard regs
802*38fd1498Szrj      but do have known equivalent constants or memory slots.
803*38fd1498Szrj      These include parameters (known equivalent to parameter slots)
804*38fd1498Szrj      and cse'd or loop-moved constant memory addresses.
805*38fd1498Szrj 
806*38fd1498Szrj      Record constant equivalents in reg_equiv_constant
807*38fd1498Szrj      so they will be substituted by find_reloads.
808*38fd1498Szrj      Record memory equivalents in reg_mem_equiv so they can
809*38fd1498Szrj      be substituted eventually by altering the REG-rtx's.  */
810*38fd1498Szrj 
811*38fd1498Szrj   grow_reg_equivs ();
812*38fd1498Szrj   reg_old_renumber = XCNEWVEC (short, max_regno);
813*38fd1498Szrj   memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
814*38fd1498Szrj   pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
815*38fd1498Szrj   pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
816*38fd1498Szrj 
817*38fd1498Szrj   CLEAR_HARD_REG_SET (bad_spill_regs_global);
818*38fd1498Szrj 
819*38fd1498Szrj   init_eliminable_invariants (first, true);
820*38fd1498Szrj   init_elim_table ();
821*38fd1498Szrj 
822*38fd1498Szrj   /* Alter each pseudo-reg rtx to contain its hard reg number.  Assign
823*38fd1498Szrj      stack slots to the pseudos that lack hard regs or equivalents.
824*38fd1498Szrj      Do not touch virtual registers.  */
825*38fd1498Szrj 
826*38fd1498Szrj   temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
827*38fd1498Szrj   for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
828*38fd1498Szrj     temp_pseudo_reg_arr[n++] = i;
829*38fd1498Szrj 
830*38fd1498Szrj   if (ira_conflicts_p)
831*38fd1498Szrj     /* Ask IRA to order pseudo-registers for better stack slot
832*38fd1498Szrj        sharing.  */
833*38fd1498Szrj     ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_mode);
834*38fd1498Szrj 
835*38fd1498Szrj   for (i = 0; i < n; i++)
836*38fd1498Szrj     alter_reg (temp_pseudo_reg_arr[i], -1, false);
837*38fd1498Szrj 
838*38fd1498Szrj   /* If we have some registers we think can be eliminated, scan all insns to
839*38fd1498Szrj      see if there is an insn that sets one of these registers to something
840*38fd1498Szrj      other than itself plus a constant.  If so, the register cannot be
841*38fd1498Szrj      eliminated.  Doing this scan here eliminates an extra pass through the
842*38fd1498Szrj      main reload loop in the most common case where register elimination
843*38fd1498Szrj      cannot be done.  */
844*38fd1498Szrj   for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
845*38fd1498Szrj     if (INSN_P (insn))
846*38fd1498Szrj       note_stores (PATTERN (insn), mark_not_eliminable, NULL);
847*38fd1498Szrj 
848*38fd1498Szrj   maybe_fix_stack_asms ();
849*38fd1498Szrj 
850*38fd1498Szrj   insns_need_reload = 0;
851*38fd1498Szrj   something_needs_elimination = 0;
852*38fd1498Szrj 
853*38fd1498Szrj   /* Initialize to -1, which means take the first spill register.  */
854*38fd1498Szrj   last_spill_reg = -1;
855*38fd1498Szrj 
856*38fd1498Szrj   /* Spill any hard regs that we know we can't eliminate.  */
857*38fd1498Szrj   CLEAR_HARD_REG_SET (used_spill_regs);
858*38fd1498Szrj   /* There can be multiple ways to eliminate a register;
859*38fd1498Szrj      they should be listed adjacently.
860*38fd1498Szrj      Elimination for any register fails only if all possible ways fail.  */
861*38fd1498Szrj   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
862*38fd1498Szrj     {
863*38fd1498Szrj       int from = ep->from;
864*38fd1498Szrj       int can_eliminate = 0;
865*38fd1498Szrj       do
866*38fd1498Szrj 	{
867*38fd1498Szrj           can_eliminate |= ep->can_eliminate;
868*38fd1498Szrj           ep++;
869*38fd1498Szrj 	}
870*38fd1498Szrj       while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
871*38fd1498Szrj       if (! can_eliminate)
872*38fd1498Szrj 	spill_hard_reg (from, 1);
873*38fd1498Szrj     }
874*38fd1498Szrj 
875*38fd1498Szrj   if (!HARD_FRAME_POINTER_IS_FRAME_POINTER && frame_pointer_needed)
876*38fd1498Szrj     spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
877*38fd1498Szrj 
878*38fd1498Szrj   finish_spills (global);
879*38fd1498Szrj 
880*38fd1498Szrj   /* From now on, we may need to generate moves differently.  We may also
881*38fd1498Szrj      allow modifications of insns which cause them to not be recognized.
882*38fd1498Szrj      Any such modifications will be cleaned up during reload itself.  */
883*38fd1498Szrj   reload_in_progress = 1;
884*38fd1498Szrj 
885*38fd1498Szrj   /* This loop scans the entire function each go-round
886*38fd1498Szrj      and repeats until one repetition spills no additional hard regs.  */
887*38fd1498Szrj   for (;;)
888*38fd1498Szrj     {
889*38fd1498Szrj       int something_changed;
890*38fd1498Szrj       poly_int64 starting_frame_size;
891*38fd1498Szrj 
892*38fd1498Szrj       starting_frame_size = get_frame_size ();
893*38fd1498Szrj       something_was_spilled = false;
894*38fd1498Szrj 
895*38fd1498Szrj       set_initial_elim_offsets ();
896*38fd1498Szrj       set_initial_label_offsets ();
897*38fd1498Szrj 
898*38fd1498Szrj       /* For each pseudo register that has an equivalent location defined,
899*38fd1498Szrj 	 try to eliminate any eliminable registers (such as the frame pointer)
900*38fd1498Szrj 	 assuming initial offsets for the replacement register, which
901*38fd1498Szrj 	 is the normal case.
902*38fd1498Szrj 
903*38fd1498Szrj 	 If the resulting location is directly addressable, substitute
904*38fd1498Szrj 	 the MEM we just got directly for the old REG.
905*38fd1498Szrj 
906*38fd1498Szrj 	 If it is not addressable but is a constant or the sum of a hard reg
907*38fd1498Szrj 	 and constant, it is probably not addressable because the constant is
908*38fd1498Szrj 	 out of range, in that case record the address; we will generate
909*38fd1498Szrj 	 hairy code to compute the address in a register each time it is
910*38fd1498Szrj 	 needed.  Similarly if it is a hard register, but one that is not
911*38fd1498Szrj 	 valid as an address register.
912*38fd1498Szrj 
913*38fd1498Szrj 	 If the location is not addressable, but does not have one of the
914*38fd1498Szrj 	 above forms, assign a stack slot.  We have to do this to avoid the
915*38fd1498Szrj 	 potential of producing lots of reloads if, e.g., a location involves
916*38fd1498Szrj 	 a pseudo that didn't get a hard register and has an equivalent memory
917*38fd1498Szrj 	 location that also involves a pseudo that didn't get a hard register.
918*38fd1498Szrj 
919*38fd1498Szrj 	 Perhaps at some point we will improve reload_when_needed handling
920*38fd1498Szrj 	 so this problem goes away.  But that's very hairy.  */
921*38fd1498Szrj 
922*38fd1498Szrj       for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
923*38fd1498Szrj 	if (reg_renumber[i] < 0 && reg_equiv_memory_loc (i))
924*38fd1498Szrj 	  {
925*38fd1498Szrj 	    rtx x = eliminate_regs (reg_equiv_memory_loc (i), VOIDmode,
926*38fd1498Szrj 				    NULL_RTX);
927*38fd1498Szrj 
928*38fd1498Szrj 	    if (strict_memory_address_addr_space_p
929*38fd1498Szrj 		  (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0),
930*38fd1498Szrj 		   MEM_ADDR_SPACE (x)))
931*38fd1498Szrj 	      reg_equiv_mem (i) = x, reg_equiv_address (i) = 0;
932*38fd1498Szrj 	    else if (CONSTANT_P (XEXP (x, 0))
933*38fd1498Szrj 		     || (REG_P (XEXP (x, 0))
934*38fd1498Szrj 			 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
935*38fd1498Szrj 		     || (GET_CODE (XEXP (x, 0)) == PLUS
936*38fd1498Szrj 			 && REG_P (XEXP (XEXP (x, 0), 0))
937*38fd1498Szrj 			 && (REGNO (XEXP (XEXP (x, 0), 0))
938*38fd1498Szrj 			     < FIRST_PSEUDO_REGISTER)
939*38fd1498Szrj 			 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
940*38fd1498Szrj 	      reg_equiv_address (i) = XEXP (x, 0), reg_equiv_mem (i) = 0;
941*38fd1498Szrj 	    else
942*38fd1498Szrj 	      {
943*38fd1498Szrj 		/* Make a new stack slot.  Then indicate that something
944*38fd1498Szrj 		   changed so we go back and recompute offsets for
945*38fd1498Szrj 		   eliminable registers because the allocation of memory
946*38fd1498Szrj 		   below might change some offset.  reg_equiv_{mem,address}
947*38fd1498Szrj 		   will be set up for this pseudo on the next pass around
948*38fd1498Szrj 		   the loop.  */
949*38fd1498Szrj 		reg_equiv_memory_loc (i) = 0;
950*38fd1498Szrj 		reg_equiv_init (i) = 0;
951*38fd1498Szrj 		alter_reg (i, -1, true);
952*38fd1498Szrj 	      }
953*38fd1498Szrj 	  }
954*38fd1498Szrj 
955*38fd1498Szrj       if (caller_save_needed)
956*38fd1498Szrj 	setup_save_areas ();
957*38fd1498Szrj 
958*38fd1498Szrj       if (maybe_ne (starting_frame_size, 0) && crtl->stack_alignment_needed)
959*38fd1498Szrj 	{
960*38fd1498Szrj 	  /* If we have a stack frame, we must align it now.  The
961*38fd1498Szrj 	     stack size may be a part of the offset computation for
962*38fd1498Szrj 	     register elimination.  So if this changes the stack size,
963*38fd1498Szrj 	     then repeat the elimination bookkeeping.  We don't
964*38fd1498Szrj 	     realign when there is no stack, as that will cause a
965*38fd1498Szrj 	     stack frame when none is needed should
966*38fd1498Szrj 	     TARGET_STARTING_FRAME_OFFSET not be already aligned to
967*38fd1498Szrj 	     STACK_BOUNDARY.  */
968*38fd1498Szrj 	  assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
969*38fd1498Szrj 	}
970*38fd1498Szrj       /* If we allocated another stack slot, redo elimination bookkeeping.  */
971*38fd1498Szrj       if (something_was_spilled
972*38fd1498Szrj 	  || maybe_ne (starting_frame_size, get_frame_size ()))
973*38fd1498Szrj 	{
974*38fd1498Szrj 	  if (update_eliminables_and_spill ())
975*38fd1498Szrj 	    finish_spills (0);
976*38fd1498Szrj 	  continue;
977*38fd1498Szrj 	}
978*38fd1498Szrj 
979*38fd1498Szrj       if (caller_save_needed)
980*38fd1498Szrj 	{
981*38fd1498Szrj 	  save_call_clobbered_regs ();
982*38fd1498Szrj 	  /* That might have allocated new insn_chain structures.  */
983*38fd1498Szrj 	  reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
984*38fd1498Szrj 	}
985*38fd1498Szrj 
986*38fd1498Szrj       calculate_needs_all_insns (global);
987*38fd1498Szrj 
988*38fd1498Szrj       if (! ira_conflicts_p)
989*38fd1498Szrj 	/* Don't do it for IRA.  We need this info because we don't
990*38fd1498Szrj 	   change live_throughout and dead_or_set for chains when IRA
991*38fd1498Szrj 	   is used.  */
992*38fd1498Szrj 	CLEAR_REG_SET (&spilled_pseudos);
993*38fd1498Szrj 
994*38fd1498Szrj       something_changed = 0;
995*38fd1498Szrj 
996*38fd1498Szrj       /* If we allocated any new memory locations, make another pass
997*38fd1498Szrj 	 since it might have changed elimination offsets.  */
998*38fd1498Szrj       if (something_was_spilled
999*38fd1498Szrj 	  || maybe_ne (starting_frame_size, get_frame_size ()))
1000*38fd1498Szrj 	something_changed = 1;
1001*38fd1498Szrj 
1002*38fd1498Szrj       /* Even if the frame size remained the same, we might still have
1003*38fd1498Szrj 	 changed elimination offsets, e.g. if find_reloads called
1004*38fd1498Szrj 	 force_const_mem requiring the back end to allocate a constant
1005*38fd1498Szrj 	 pool base register that needs to be saved on the stack.  */
1006*38fd1498Szrj       else if (!verify_initial_elim_offsets ())
1007*38fd1498Szrj 	something_changed = 1;
1008*38fd1498Szrj 
1009*38fd1498Szrj       if (update_eliminables_and_spill ())
1010*38fd1498Szrj 	{
1011*38fd1498Szrj 	  finish_spills (0);
1012*38fd1498Szrj 	  something_changed = 1;
1013*38fd1498Szrj 	}
1014*38fd1498Szrj       else
1015*38fd1498Szrj 	{
1016*38fd1498Szrj 	  select_reload_regs ();
1017*38fd1498Szrj 	  if (failure)
1018*38fd1498Szrj 	    goto failed;
1019*38fd1498Szrj 	  if (insns_need_reload)
1020*38fd1498Szrj 	    something_changed |= finish_spills (global);
1021*38fd1498Szrj 	}
1022*38fd1498Szrj 
1023*38fd1498Szrj       if (! something_changed)
1024*38fd1498Szrj 	break;
1025*38fd1498Szrj 
1026*38fd1498Szrj       if (caller_save_needed)
1027*38fd1498Szrj 	delete_caller_save_insns ();
1028*38fd1498Szrj 
1029*38fd1498Szrj       obstack_free (&reload_obstack, reload_firstobj);
1030*38fd1498Szrj     }
1031*38fd1498Szrj 
1032*38fd1498Szrj   /* If global-alloc was run, notify it of any register eliminations we have
1033*38fd1498Szrj      done.  */
1034*38fd1498Szrj   if (global)
1035*38fd1498Szrj     for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1036*38fd1498Szrj       if (ep->can_eliminate)
1037*38fd1498Szrj 	mark_elimination (ep->from, ep->to);
1038*38fd1498Szrj 
1039*38fd1498Szrj   remove_init_insns ();
1040*38fd1498Szrj 
1041*38fd1498Szrj   /* Use the reload registers where necessary
1042*38fd1498Szrj      by generating move instructions to move the must-be-register
1043*38fd1498Szrj      values into or out of the reload registers.  */
1044*38fd1498Szrj 
1045*38fd1498Szrj   if (insns_need_reload != 0 || something_needs_elimination
1046*38fd1498Szrj       || something_needs_operands_changed)
1047*38fd1498Szrj     {
1048*38fd1498Szrj       poly_int64 old_frame_size = get_frame_size ();
1049*38fd1498Szrj 
1050*38fd1498Szrj       reload_as_needed (global);
1051*38fd1498Szrj 
1052*38fd1498Szrj       gcc_assert (known_eq (old_frame_size, get_frame_size ()));
1053*38fd1498Szrj 
1054*38fd1498Szrj       gcc_assert (verify_initial_elim_offsets ());
1055*38fd1498Szrj     }
1056*38fd1498Szrj 
1057*38fd1498Szrj   /* If we were able to eliminate the frame pointer, show that it is no
1058*38fd1498Szrj      longer live at the start of any basic block.  If it ls live by
1059*38fd1498Szrj      virtue of being in a pseudo, that pseudo will be marked live
1060*38fd1498Szrj      and hence the frame pointer will be known to be live via that
1061*38fd1498Szrj      pseudo.  */
1062*38fd1498Szrj 
1063*38fd1498Szrj   if (! frame_pointer_needed)
1064*38fd1498Szrj     FOR_EACH_BB_FN (bb, cfun)
1065*38fd1498Szrj       bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1066*38fd1498Szrj 
1067*38fd1498Szrj   /* Come here (with failure set nonzero) if we can't get enough spill
1068*38fd1498Szrj      regs.  */
1069*38fd1498Szrj  failed:
1070*38fd1498Szrj 
1071*38fd1498Szrj   CLEAR_REG_SET (&changed_allocation_pseudos);
1072*38fd1498Szrj   CLEAR_REG_SET (&spilled_pseudos);
1073*38fd1498Szrj   reload_in_progress = 0;
1074*38fd1498Szrj 
1075*38fd1498Szrj   /* Now eliminate all pseudo regs by modifying them into
1076*38fd1498Szrj      their equivalent memory references.
1077*38fd1498Szrj      The REG-rtx's for the pseudos are modified in place,
1078*38fd1498Szrj      so all insns that used to refer to them now refer to memory.
1079*38fd1498Szrj 
1080*38fd1498Szrj      For a reg that has a reg_equiv_address, all those insns
1081*38fd1498Szrj      were changed by reloading so that no insns refer to it any longer;
1082*38fd1498Szrj      but the DECL_RTL of a variable decl may refer to it,
1083*38fd1498Szrj      and if so this causes the debugging info to mention the variable.  */
1084*38fd1498Szrj 
1085*38fd1498Szrj   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1086*38fd1498Szrj     {
1087*38fd1498Szrj       rtx addr = 0;
1088*38fd1498Szrj 
1089*38fd1498Szrj       if (reg_equiv_mem (i))
1090*38fd1498Szrj 	addr = XEXP (reg_equiv_mem (i), 0);
1091*38fd1498Szrj 
1092*38fd1498Szrj       if (reg_equiv_address (i))
1093*38fd1498Szrj 	addr = reg_equiv_address (i);
1094*38fd1498Szrj 
1095*38fd1498Szrj       if (addr)
1096*38fd1498Szrj 	{
1097*38fd1498Szrj 	  if (reg_renumber[i] < 0)
1098*38fd1498Szrj 	    {
1099*38fd1498Szrj 	      rtx reg = regno_reg_rtx[i];
1100*38fd1498Szrj 
1101*38fd1498Szrj 	      REG_USERVAR_P (reg) = 0;
1102*38fd1498Szrj 	      PUT_CODE (reg, MEM);
1103*38fd1498Szrj 	      XEXP (reg, 0) = addr;
1104*38fd1498Szrj 	      if (reg_equiv_memory_loc (i))
1105*38fd1498Szrj 		MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc (i));
1106*38fd1498Szrj 	      else
1107*38fd1498Szrj 		MEM_ATTRS (reg) = 0;
1108*38fd1498Szrj 	      MEM_NOTRAP_P (reg) = 1;
1109*38fd1498Szrj 	    }
1110*38fd1498Szrj 	  else if (reg_equiv_mem (i))
1111*38fd1498Szrj 	    XEXP (reg_equiv_mem (i), 0) = addr;
1112*38fd1498Szrj 	}
1113*38fd1498Szrj 
1114*38fd1498Szrj       /* We don't want complex addressing modes in debug insns
1115*38fd1498Szrj 	 if simpler ones will do, so delegitimize equivalences
1116*38fd1498Szrj 	 in debug insns.  */
1117*38fd1498Szrj       if (MAY_HAVE_DEBUG_BIND_INSNS && reg_renumber[i] < 0)
1118*38fd1498Szrj 	{
1119*38fd1498Szrj 	  rtx reg = regno_reg_rtx[i];
1120*38fd1498Szrj 	  rtx equiv = 0;
1121*38fd1498Szrj 	  df_ref use, next;
1122*38fd1498Szrj 
1123*38fd1498Szrj 	  if (reg_equiv_constant (i))
1124*38fd1498Szrj 	    equiv = reg_equiv_constant (i);
1125*38fd1498Szrj 	  else if (reg_equiv_invariant (i))
1126*38fd1498Szrj 	    equiv = reg_equiv_invariant (i);
1127*38fd1498Szrj 	  else if (reg && MEM_P (reg))
1128*38fd1498Szrj 	    equiv = targetm.delegitimize_address (reg);
1129*38fd1498Szrj 	  else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1130*38fd1498Szrj 	    equiv = reg;
1131*38fd1498Szrj 
1132*38fd1498Szrj 	  if (equiv == reg)
1133*38fd1498Szrj 	    continue;
1134*38fd1498Szrj 
1135*38fd1498Szrj 	  for (use = DF_REG_USE_CHAIN (i); use; use = next)
1136*38fd1498Szrj 	    {
1137*38fd1498Szrj 	      insn = DF_REF_INSN (use);
1138*38fd1498Szrj 
1139*38fd1498Szrj 	      /* Make sure the next ref is for a different instruction,
1140*38fd1498Szrj 		 so that we're not affected by the rescan.  */
1141*38fd1498Szrj 	      next = DF_REF_NEXT_REG (use);
1142*38fd1498Szrj 	      while (next && DF_REF_INSN (next) == insn)
1143*38fd1498Szrj 		next = DF_REF_NEXT_REG (next);
1144*38fd1498Szrj 
1145*38fd1498Szrj 	      if (DEBUG_BIND_INSN_P (insn))
1146*38fd1498Szrj 		{
1147*38fd1498Szrj 		  if (!equiv)
1148*38fd1498Szrj 		    {
1149*38fd1498Szrj 		      INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
1150*38fd1498Szrj 		      df_insn_rescan_debug_internal (insn);
1151*38fd1498Szrj 		    }
1152*38fd1498Szrj 		  else
1153*38fd1498Szrj 		    INSN_VAR_LOCATION_LOC (insn)
1154*38fd1498Szrj 		      = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn),
1155*38fd1498Szrj 					      reg, equiv);
1156*38fd1498Szrj 		}
1157*38fd1498Szrj 	    }
1158*38fd1498Szrj 	}
1159*38fd1498Szrj     }
1160*38fd1498Szrj 
1161*38fd1498Szrj   /* We must set reload_completed now since the cleanup_subreg_operands call
1162*38fd1498Szrj      below will re-recognize each insn and reload may have generated insns
1163*38fd1498Szrj      which are only valid during and after reload.  */
1164*38fd1498Szrj   reload_completed = 1;
1165*38fd1498Szrj 
1166*38fd1498Szrj   /* Make a pass over all the insns and delete all USEs which we inserted
1167*38fd1498Szrj      only to tag a REG_EQUAL note on them.  Remove all REG_DEAD and REG_UNUSED
1168*38fd1498Szrj      notes.  Delete all CLOBBER insns, except those that refer to the return
1169*38fd1498Szrj      value and the special mem:BLK CLOBBERs added to prevent the scheduler
1170*38fd1498Szrj      from misarranging variable-array code, and simplify (subreg (reg))
1171*38fd1498Szrj      operands.  Strip and regenerate REG_INC notes that may have been moved
1172*38fd1498Szrj      around.  */
1173*38fd1498Szrj 
1174*38fd1498Szrj   for (insn = first; insn; insn = NEXT_INSN (insn))
1175*38fd1498Szrj     if (INSN_P (insn))
1176*38fd1498Szrj       {
1177*38fd1498Szrj 	rtx *pnote;
1178*38fd1498Szrj 
1179*38fd1498Szrj 	if (CALL_P (insn))
1180*38fd1498Szrj 	  replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1181*38fd1498Szrj 			      VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1182*38fd1498Szrj 
1183*38fd1498Szrj 	if ((GET_CODE (PATTERN (insn)) == USE
1184*38fd1498Szrj 	     /* We mark with QImode USEs introduced by reload itself.  */
1185*38fd1498Szrj 	     && (GET_MODE (insn) == QImode
1186*38fd1498Szrj 		 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1187*38fd1498Szrj 	    || (GET_CODE (PATTERN (insn)) == CLOBBER
1188*38fd1498Szrj 		&& (!MEM_P (XEXP (PATTERN (insn), 0))
1189*38fd1498Szrj 		    || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1190*38fd1498Szrj 		    || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1191*38fd1498Szrj 			&& XEXP (XEXP (PATTERN (insn), 0), 0)
1192*38fd1498Szrj 				!= stack_pointer_rtx))
1193*38fd1498Szrj 		&& (!REG_P (XEXP (PATTERN (insn), 0))
1194*38fd1498Szrj 		    || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1195*38fd1498Szrj 	  {
1196*38fd1498Szrj 	    delete_insn (insn);
1197*38fd1498Szrj 	    continue;
1198*38fd1498Szrj 	  }
1199*38fd1498Szrj 
1200*38fd1498Szrj 	/* Some CLOBBERs may survive until here and still reference unassigned
1201*38fd1498Szrj 	   pseudos with const equivalent, which may in turn cause ICE in later
1202*38fd1498Szrj 	   passes if the reference remains in place.  */
1203*38fd1498Szrj 	if (GET_CODE (PATTERN (insn)) == CLOBBER)
1204*38fd1498Szrj 	  replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1205*38fd1498Szrj 			      VOIDmode, PATTERN (insn));
1206*38fd1498Szrj 
1207*38fd1498Szrj 	/* Discard obvious no-ops, even without -O.  This optimization
1208*38fd1498Szrj 	   is fast and doesn't interfere with debugging.  */
1209*38fd1498Szrj 	if (NONJUMP_INSN_P (insn)
1210*38fd1498Szrj 	    && GET_CODE (PATTERN (insn)) == SET
1211*38fd1498Szrj 	    && REG_P (SET_SRC (PATTERN (insn)))
1212*38fd1498Szrj 	    && REG_P (SET_DEST (PATTERN (insn)))
1213*38fd1498Szrj 	    && (REGNO (SET_SRC (PATTERN (insn)))
1214*38fd1498Szrj 		== REGNO (SET_DEST (PATTERN (insn)))))
1215*38fd1498Szrj 	  {
1216*38fd1498Szrj 	    delete_insn (insn);
1217*38fd1498Szrj 	    continue;
1218*38fd1498Szrj 	  }
1219*38fd1498Szrj 
1220*38fd1498Szrj 	pnote = &REG_NOTES (insn);
1221*38fd1498Szrj 	while (*pnote != 0)
1222*38fd1498Szrj 	  {
1223*38fd1498Szrj 	    if (REG_NOTE_KIND (*pnote) == REG_DEAD
1224*38fd1498Szrj 		|| REG_NOTE_KIND (*pnote) == REG_UNUSED
1225*38fd1498Szrj 		|| REG_NOTE_KIND (*pnote) == REG_INC)
1226*38fd1498Szrj 	      *pnote = XEXP (*pnote, 1);
1227*38fd1498Szrj 	    else
1228*38fd1498Szrj 	      pnote = &XEXP (*pnote, 1);
1229*38fd1498Szrj 	  }
1230*38fd1498Szrj 
1231*38fd1498Szrj 	if (AUTO_INC_DEC)
1232*38fd1498Szrj 	  add_auto_inc_notes (insn, PATTERN (insn));
1233*38fd1498Szrj 
1234*38fd1498Szrj 	/* Simplify (subreg (reg)) if it appears as an operand.  */
1235*38fd1498Szrj 	cleanup_subreg_operands (insn);
1236*38fd1498Szrj 
1237*38fd1498Szrj 	/* Clean up invalid ASMs so that they don't confuse later passes.
1238*38fd1498Szrj 	   See PR 21299.  */
1239*38fd1498Szrj 	if (asm_noperands (PATTERN (insn)) >= 0)
1240*38fd1498Szrj 	  {
1241*38fd1498Szrj 	    extract_insn (insn);
1242*38fd1498Szrj 	    if (!constrain_operands (1, get_enabled_alternatives (insn)))
1243*38fd1498Szrj 	      {
1244*38fd1498Szrj 		error_for_asm (insn,
1245*38fd1498Szrj 			       "%<asm%> operand has impossible constraints");
1246*38fd1498Szrj 		delete_insn (insn);
1247*38fd1498Szrj 		continue;
1248*38fd1498Szrj 	      }
1249*38fd1498Szrj 	  }
1250*38fd1498Szrj       }
1251*38fd1498Szrj 
1252*38fd1498Szrj   free (temp_pseudo_reg_arr);
1253*38fd1498Szrj 
1254*38fd1498Szrj   /* Indicate that we no longer have known memory locations or constants.  */
1255*38fd1498Szrj   free_reg_equiv ();
1256*38fd1498Szrj 
1257*38fd1498Szrj   free (reg_max_ref_mode);
1258*38fd1498Szrj   free (reg_old_renumber);
1259*38fd1498Szrj   free (pseudo_previous_regs);
1260*38fd1498Szrj   free (pseudo_forbidden_regs);
1261*38fd1498Szrj 
1262*38fd1498Szrj   CLEAR_HARD_REG_SET (used_spill_regs);
1263*38fd1498Szrj   for (i = 0; i < n_spills; i++)
1264*38fd1498Szrj     SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1265*38fd1498Szrj 
1266*38fd1498Szrj   /* Free all the insn_chain structures at once.  */
1267*38fd1498Szrj   obstack_free (&reload_obstack, reload_startobj);
1268*38fd1498Szrj   unused_insn_chains = 0;
1269*38fd1498Szrj 
1270*38fd1498Szrj   inserted = fixup_abnormal_edges ();
1271*38fd1498Szrj 
1272*38fd1498Szrj   /* We've possibly turned single trapping insn into multiple ones.  */
1273*38fd1498Szrj   if (cfun->can_throw_non_call_exceptions)
1274*38fd1498Szrj     {
1275*38fd1498Szrj       auto_sbitmap blocks (last_basic_block_for_fn (cfun));
1276*38fd1498Szrj       bitmap_ones (blocks);
1277*38fd1498Szrj       find_many_sub_basic_blocks (blocks);
1278*38fd1498Szrj     }
1279*38fd1498Szrj 
1280*38fd1498Szrj   if (inserted)
1281*38fd1498Szrj     commit_edge_insertions ();
1282*38fd1498Szrj 
1283*38fd1498Szrj   /* Replacing pseudos with their memory equivalents might have
1284*38fd1498Szrj      created shared rtx.  Subsequent passes would get confused
1285*38fd1498Szrj      by this, so unshare everything here.  */
1286*38fd1498Szrj   unshare_all_rtl_again (first);
1287*38fd1498Szrj 
1288*38fd1498Szrj #ifdef STACK_BOUNDARY
1289*38fd1498Szrj   /* init_emit has set the alignment of the hard frame pointer
1290*38fd1498Szrj      to STACK_BOUNDARY.  It is very likely no longer valid if
1291*38fd1498Szrj      the hard frame pointer was used for register allocation.  */
1292*38fd1498Szrj   if (!frame_pointer_needed)
1293*38fd1498Szrj     REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1294*38fd1498Szrj #endif
1295*38fd1498Szrj 
1296*38fd1498Szrj   substitute_stack.release ();
1297*38fd1498Szrj 
1298*38fd1498Szrj   gcc_assert (bitmap_empty_p (&spilled_pseudos));
1299*38fd1498Szrj 
1300*38fd1498Szrj   reload_completed = !failure;
1301*38fd1498Szrj 
1302*38fd1498Szrj   return need_dce;
1303*38fd1498Szrj }
1304*38fd1498Szrj 
1305*38fd1498Szrj /* Yet another special case.  Unfortunately, reg-stack forces people to
1306*38fd1498Szrj    write incorrect clobbers in asm statements.  These clobbers must not
1307*38fd1498Szrj    cause the register to appear in bad_spill_regs, otherwise we'll call
1308*38fd1498Szrj    fatal_insn later.  We clear the corresponding regnos in the live
1309*38fd1498Szrj    register sets to avoid this.
1310*38fd1498Szrj    The whole thing is rather sick, I'm afraid.  */
1311*38fd1498Szrj 
1312*38fd1498Szrj static void
maybe_fix_stack_asms(void)1313*38fd1498Szrj maybe_fix_stack_asms (void)
1314*38fd1498Szrj {
1315*38fd1498Szrj #ifdef STACK_REGS
1316*38fd1498Szrj   const char *constraints[MAX_RECOG_OPERANDS];
1317*38fd1498Szrj   machine_mode operand_mode[MAX_RECOG_OPERANDS];
1318*38fd1498Szrj   struct insn_chain *chain;
1319*38fd1498Szrj 
1320*38fd1498Szrj   for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1321*38fd1498Szrj     {
1322*38fd1498Szrj       int i, noperands;
1323*38fd1498Szrj       HARD_REG_SET clobbered, allowed;
1324*38fd1498Szrj       rtx pat;
1325*38fd1498Szrj 
1326*38fd1498Szrj       if (! INSN_P (chain->insn)
1327*38fd1498Szrj 	  || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1328*38fd1498Szrj 	continue;
1329*38fd1498Szrj       pat = PATTERN (chain->insn);
1330*38fd1498Szrj       if (GET_CODE (pat) != PARALLEL)
1331*38fd1498Szrj 	continue;
1332*38fd1498Szrj 
1333*38fd1498Szrj       CLEAR_HARD_REG_SET (clobbered);
1334*38fd1498Szrj       CLEAR_HARD_REG_SET (allowed);
1335*38fd1498Szrj 
1336*38fd1498Szrj       /* First, make a mask of all stack regs that are clobbered.  */
1337*38fd1498Szrj       for (i = 0; i < XVECLEN (pat, 0); i++)
1338*38fd1498Szrj 	{
1339*38fd1498Szrj 	  rtx t = XVECEXP (pat, 0, i);
1340*38fd1498Szrj 	  if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1341*38fd1498Szrj 	    SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1342*38fd1498Szrj 	}
1343*38fd1498Szrj 
1344*38fd1498Szrj       /* Get the operand values and constraints out of the insn.  */
1345*38fd1498Szrj       decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1346*38fd1498Szrj 			   constraints, operand_mode, NULL);
1347*38fd1498Szrj 
1348*38fd1498Szrj       /* For every operand, see what registers are allowed.  */
1349*38fd1498Szrj       for (i = 0; i < noperands; i++)
1350*38fd1498Szrj 	{
1351*38fd1498Szrj 	  const char *p = constraints[i];
1352*38fd1498Szrj 	  /* For every alternative, we compute the class of registers allowed
1353*38fd1498Szrj 	     for reloading in CLS, and merge its contents into the reg set
1354*38fd1498Szrj 	     ALLOWED.  */
1355*38fd1498Szrj 	  int cls = (int) NO_REGS;
1356*38fd1498Szrj 
1357*38fd1498Szrj 	  for (;;)
1358*38fd1498Szrj 	    {
1359*38fd1498Szrj 	      char c = *p;
1360*38fd1498Szrj 
1361*38fd1498Szrj 	      if (c == '\0' || c == ',' || c == '#')
1362*38fd1498Szrj 		{
1363*38fd1498Szrj 		  /* End of one alternative - mark the regs in the current
1364*38fd1498Szrj 		     class, and reset the class.  */
1365*38fd1498Szrj 		  IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1366*38fd1498Szrj 		  cls = NO_REGS;
1367*38fd1498Szrj 		  p++;
1368*38fd1498Szrj 		  if (c == '#')
1369*38fd1498Szrj 		    do {
1370*38fd1498Szrj 		      c = *p++;
1371*38fd1498Szrj 		    } while (c != '\0' && c != ',');
1372*38fd1498Szrj 		  if (c == '\0')
1373*38fd1498Szrj 		    break;
1374*38fd1498Szrj 		  continue;
1375*38fd1498Szrj 		}
1376*38fd1498Szrj 
1377*38fd1498Szrj 	      switch (c)
1378*38fd1498Szrj 		{
1379*38fd1498Szrj 		case 'g':
1380*38fd1498Szrj 		  cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1381*38fd1498Szrj 		  break;
1382*38fd1498Szrj 
1383*38fd1498Szrj 		default:
1384*38fd1498Szrj 		  enum constraint_num cn = lookup_constraint (p);
1385*38fd1498Szrj 		  if (insn_extra_address_constraint (cn))
1386*38fd1498Szrj 		    cls = (int) reg_class_subunion[cls]
1387*38fd1498Szrj 		      [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1388*38fd1498Szrj 					     ADDRESS, SCRATCH)];
1389*38fd1498Szrj 		  else
1390*38fd1498Szrj 		    cls = (int) reg_class_subunion[cls]
1391*38fd1498Szrj 		      [reg_class_for_constraint (cn)];
1392*38fd1498Szrj 		  break;
1393*38fd1498Szrj 		}
1394*38fd1498Szrj 	      p += CONSTRAINT_LEN (c, p);
1395*38fd1498Szrj 	    }
1396*38fd1498Szrj 	}
1397*38fd1498Szrj       /* Those of the registers which are clobbered, but allowed by the
1398*38fd1498Szrj 	 constraints, must be usable as reload registers.  So clear them
1399*38fd1498Szrj 	 out of the life information.  */
1400*38fd1498Szrj       AND_HARD_REG_SET (allowed, clobbered);
1401*38fd1498Szrj       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1402*38fd1498Szrj 	if (TEST_HARD_REG_BIT (allowed, i))
1403*38fd1498Szrj 	  {
1404*38fd1498Szrj 	    CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1405*38fd1498Szrj 	    CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1406*38fd1498Szrj 	  }
1407*38fd1498Szrj     }
1408*38fd1498Szrj 
1409*38fd1498Szrj #endif
1410*38fd1498Szrj }
1411*38fd1498Szrj 
1412*38fd1498Szrj /* Copy the global variables n_reloads and rld into the corresponding elts
1413*38fd1498Szrj    of CHAIN.  */
1414*38fd1498Szrj static void
copy_reloads(struct insn_chain * chain)1415*38fd1498Szrj copy_reloads (struct insn_chain *chain)
1416*38fd1498Szrj {
1417*38fd1498Szrj   chain->n_reloads = n_reloads;
1418*38fd1498Szrj   chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
1419*38fd1498Szrj   memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1420*38fd1498Szrj   reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1421*38fd1498Szrj }
1422*38fd1498Szrj 
1423*38fd1498Szrj /* Walk the chain of insns, and determine for each whether it needs reloads
1424*38fd1498Szrj    and/or eliminations.  Build the corresponding insns_need_reload list, and
1425*38fd1498Szrj    set something_needs_elimination as appropriate.  */
1426*38fd1498Szrj static void
calculate_needs_all_insns(int global)1427*38fd1498Szrj calculate_needs_all_insns (int global)
1428*38fd1498Szrj {
1429*38fd1498Szrj   struct insn_chain **pprev_reload = &insns_need_reload;
1430*38fd1498Szrj   struct insn_chain *chain, *next = 0;
1431*38fd1498Szrj 
1432*38fd1498Szrj   something_needs_elimination = 0;
1433*38fd1498Szrj 
1434*38fd1498Szrj   reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1435*38fd1498Szrj   for (chain = reload_insn_chain; chain != 0; chain = next)
1436*38fd1498Szrj     {
1437*38fd1498Szrj       rtx_insn *insn = chain->insn;
1438*38fd1498Szrj 
1439*38fd1498Szrj       next = chain->next;
1440*38fd1498Szrj 
1441*38fd1498Szrj       /* Clear out the shortcuts.  */
1442*38fd1498Szrj       chain->n_reloads = 0;
1443*38fd1498Szrj       chain->need_elim = 0;
1444*38fd1498Szrj       chain->need_reload = 0;
1445*38fd1498Szrj       chain->need_operand_change = 0;
1446*38fd1498Szrj 
1447*38fd1498Szrj       /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1448*38fd1498Szrj 	 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1449*38fd1498Szrj 	 what effects this has on the known offsets at labels.  */
1450*38fd1498Szrj 
1451*38fd1498Szrj       if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1452*38fd1498Szrj 	  || (INSN_P (insn) && REG_NOTES (insn) != 0))
1453*38fd1498Szrj 	set_label_offsets (insn, insn, 0);
1454*38fd1498Szrj 
1455*38fd1498Szrj       if (INSN_P (insn))
1456*38fd1498Szrj 	{
1457*38fd1498Szrj 	  rtx old_body = PATTERN (insn);
1458*38fd1498Szrj 	  int old_code = INSN_CODE (insn);
1459*38fd1498Szrj 	  rtx old_notes = REG_NOTES (insn);
1460*38fd1498Szrj 	  int did_elimination = 0;
1461*38fd1498Szrj 	  int operands_changed = 0;
1462*38fd1498Szrj 
1463*38fd1498Szrj 	  /* Skip insns that only set an equivalence.  */
1464*38fd1498Szrj 	  if (will_delete_init_insn_p (insn))
1465*38fd1498Szrj 	    continue;
1466*38fd1498Szrj 
1467*38fd1498Szrj 	  /* If needed, eliminate any eliminable registers.  */
1468*38fd1498Szrj 	  if (num_eliminable || num_eliminable_invariants)
1469*38fd1498Szrj 	    did_elimination = eliminate_regs_in_insn (insn, 0);
1470*38fd1498Szrj 
1471*38fd1498Szrj 	  /* Analyze the instruction.  */
1472*38fd1498Szrj 	  operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1473*38fd1498Szrj 					   global, spill_reg_order);
1474*38fd1498Szrj 
1475*38fd1498Szrj 	  /* If a no-op set needs more than one reload, this is likely
1476*38fd1498Szrj 	     to be something that needs input address reloads.  We
1477*38fd1498Szrj 	     can't get rid of this cleanly later, and it is of no use
1478*38fd1498Szrj 	     anyway, so discard it now.
1479*38fd1498Szrj 	     We only do this when expensive_optimizations is enabled,
1480*38fd1498Szrj 	     since this complements reload inheritance / output
1481*38fd1498Szrj 	     reload deletion, and it can make debugging harder.  */
1482*38fd1498Szrj 	  if (flag_expensive_optimizations && n_reloads > 1)
1483*38fd1498Szrj 	    {
1484*38fd1498Szrj 	      rtx set = single_set (insn);
1485*38fd1498Szrj 	      if (set
1486*38fd1498Szrj 		  &&
1487*38fd1498Szrj 		  ((SET_SRC (set) == SET_DEST (set)
1488*38fd1498Szrj 		    && REG_P (SET_SRC (set))
1489*38fd1498Szrj 		    && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1490*38fd1498Szrj 		   || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1491*38fd1498Szrj 		       && reg_renumber[REGNO (SET_SRC (set))] < 0
1492*38fd1498Szrj 		       && reg_renumber[REGNO (SET_DEST (set))] < 0
1493*38fd1498Szrj 		       && reg_equiv_memory_loc (REGNO (SET_SRC (set))) != NULL
1494*38fd1498Szrj 		       && reg_equiv_memory_loc (REGNO (SET_DEST (set))) != NULL
1495*38fd1498Szrj 		       && rtx_equal_p (reg_equiv_memory_loc (REGNO (SET_SRC (set))),
1496*38fd1498Szrj 				       reg_equiv_memory_loc (REGNO (SET_DEST (set)))))))
1497*38fd1498Szrj 		{
1498*38fd1498Szrj 		  if (ira_conflicts_p)
1499*38fd1498Szrj 		    /* Inform IRA about the insn deletion.  */
1500*38fd1498Szrj 		    ira_mark_memory_move_deletion (REGNO (SET_DEST (set)),
1501*38fd1498Szrj 						   REGNO (SET_SRC (set)));
1502*38fd1498Szrj 		  delete_insn (insn);
1503*38fd1498Szrj 		  /* Delete it from the reload chain.  */
1504*38fd1498Szrj 		  if (chain->prev)
1505*38fd1498Szrj 		    chain->prev->next = next;
1506*38fd1498Szrj 		  else
1507*38fd1498Szrj 		    reload_insn_chain = next;
1508*38fd1498Szrj 		  if (next)
1509*38fd1498Szrj 		    next->prev = chain->prev;
1510*38fd1498Szrj 		  chain->next = unused_insn_chains;
1511*38fd1498Szrj 		  unused_insn_chains = chain;
1512*38fd1498Szrj 		  continue;
1513*38fd1498Szrj 		}
1514*38fd1498Szrj 	    }
1515*38fd1498Szrj 	  if (num_eliminable)
1516*38fd1498Szrj 	    update_eliminable_offsets ();
1517*38fd1498Szrj 
1518*38fd1498Szrj 	  /* Remember for later shortcuts which insns had any reloads or
1519*38fd1498Szrj 	     register eliminations.  */
1520*38fd1498Szrj 	  chain->need_elim = did_elimination;
1521*38fd1498Szrj 	  chain->need_reload = n_reloads > 0;
1522*38fd1498Szrj 	  chain->need_operand_change = operands_changed;
1523*38fd1498Szrj 
1524*38fd1498Szrj 	  /* Discard any register replacements done.  */
1525*38fd1498Szrj 	  if (did_elimination)
1526*38fd1498Szrj 	    {
1527*38fd1498Szrj 	      obstack_free (&reload_obstack, reload_insn_firstobj);
1528*38fd1498Szrj 	      PATTERN (insn) = old_body;
1529*38fd1498Szrj 	      INSN_CODE (insn) = old_code;
1530*38fd1498Szrj 	      REG_NOTES (insn) = old_notes;
1531*38fd1498Szrj 	      something_needs_elimination = 1;
1532*38fd1498Szrj 	    }
1533*38fd1498Szrj 
1534*38fd1498Szrj 	  something_needs_operands_changed |= operands_changed;
1535*38fd1498Szrj 
1536*38fd1498Szrj 	  if (n_reloads != 0)
1537*38fd1498Szrj 	    {
1538*38fd1498Szrj 	      copy_reloads (chain);
1539*38fd1498Szrj 	      *pprev_reload = chain;
1540*38fd1498Szrj 	      pprev_reload = &chain->next_need_reload;
1541*38fd1498Szrj 	    }
1542*38fd1498Szrj 	}
1543*38fd1498Szrj     }
1544*38fd1498Szrj   *pprev_reload = 0;
1545*38fd1498Szrj }
1546*38fd1498Szrj 
1547*38fd1498Szrj /* This function is called from the register allocator to set up estimates
1548*38fd1498Szrj    for the cost of eliminating pseudos which have REG_EQUIV equivalences to
1549*38fd1498Szrj    an invariant.  The structure is similar to calculate_needs_all_insns.  */
1550*38fd1498Szrj 
1551*38fd1498Szrj void
calculate_elim_costs_all_insns(void)1552*38fd1498Szrj calculate_elim_costs_all_insns (void)
1553*38fd1498Szrj {
1554*38fd1498Szrj   int *reg_equiv_init_cost;
1555*38fd1498Szrj   basic_block bb;
1556*38fd1498Szrj   int i;
1557*38fd1498Szrj 
1558*38fd1498Szrj   reg_equiv_init_cost = XCNEWVEC (int, max_regno);
1559*38fd1498Szrj   init_elim_table ();
1560*38fd1498Szrj   init_eliminable_invariants (get_insns (), false);
1561*38fd1498Szrj 
1562*38fd1498Szrj   set_initial_elim_offsets ();
1563*38fd1498Szrj   set_initial_label_offsets ();
1564*38fd1498Szrj 
1565*38fd1498Szrj   FOR_EACH_BB_FN (bb, cfun)
1566*38fd1498Szrj     {
1567*38fd1498Szrj       rtx_insn *insn;
1568*38fd1498Szrj       elim_bb = bb;
1569*38fd1498Szrj 
1570*38fd1498Szrj       FOR_BB_INSNS (bb, insn)
1571*38fd1498Szrj 	{
1572*38fd1498Szrj 	  /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1573*38fd1498Szrj 	     include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1574*38fd1498Szrj 	     what effects this has on the known offsets at labels.  */
1575*38fd1498Szrj 
1576*38fd1498Szrj 	  if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1577*38fd1498Szrj 	      || (INSN_P (insn) && REG_NOTES (insn) != 0))
1578*38fd1498Szrj 	    set_label_offsets (insn, insn, 0);
1579*38fd1498Szrj 
1580*38fd1498Szrj 	  if (INSN_P (insn))
1581*38fd1498Szrj 	    {
1582*38fd1498Szrj 	      rtx set = single_set (insn);
1583*38fd1498Szrj 
1584*38fd1498Szrj 	      /* Skip insns that only set an equivalence.  */
1585*38fd1498Szrj 	      if (set && REG_P (SET_DEST (set))
1586*38fd1498Szrj 		  && reg_renumber[REGNO (SET_DEST (set))] < 0
1587*38fd1498Szrj 		  && (reg_equiv_constant (REGNO (SET_DEST (set)))
1588*38fd1498Szrj 		      || reg_equiv_invariant (REGNO (SET_DEST (set)))))
1589*38fd1498Szrj 		{
1590*38fd1498Szrj 		  unsigned regno = REGNO (SET_DEST (set));
1591*38fd1498Szrj 		  rtx_insn_list *init = reg_equiv_init (regno);
1592*38fd1498Szrj 		  if (init)
1593*38fd1498Szrj 		    {
1594*38fd1498Szrj 		      rtx t = eliminate_regs_1 (SET_SRC (set), VOIDmode, insn,
1595*38fd1498Szrj 						false, true);
1596*38fd1498Szrj 		      machine_mode mode = GET_MODE (SET_DEST (set));
1597*38fd1498Szrj 		      int cost = set_src_cost (t, mode,
1598*38fd1498Szrj 					       optimize_bb_for_speed_p (bb));
1599*38fd1498Szrj 		      int freq = REG_FREQ_FROM_BB (bb);
1600*38fd1498Szrj 
1601*38fd1498Szrj 		      reg_equiv_init_cost[regno] = cost * freq;
1602*38fd1498Szrj 		      continue;
1603*38fd1498Szrj 		    }
1604*38fd1498Szrj 		}
1605*38fd1498Szrj 	      /* If needed, eliminate any eliminable registers.  */
1606*38fd1498Szrj 	      if (num_eliminable || num_eliminable_invariants)
1607*38fd1498Szrj 		elimination_costs_in_insn (insn);
1608*38fd1498Szrj 
1609*38fd1498Szrj 	      if (num_eliminable)
1610*38fd1498Szrj 		update_eliminable_offsets ();
1611*38fd1498Szrj 	    }
1612*38fd1498Szrj 	}
1613*38fd1498Szrj     }
1614*38fd1498Szrj   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1615*38fd1498Szrj     {
1616*38fd1498Szrj       if (reg_equiv_invariant (i))
1617*38fd1498Szrj 	{
1618*38fd1498Szrj 	  if (reg_equiv_init (i))
1619*38fd1498Szrj 	    {
1620*38fd1498Szrj 	      int cost = reg_equiv_init_cost[i];
1621*38fd1498Szrj 	      if (dump_file)
1622*38fd1498Szrj 		fprintf (dump_file,
1623*38fd1498Szrj 			 "Reg %d has equivalence, initial gains %d\n", i, cost);
1624*38fd1498Szrj 	      if (cost != 0)
1625*38fd1498Szrj 		ira_adjust_equiv_reg_cost (i, cost);
1626*38fd1498Szrj 	    }
1627*38fd1498Szrj 	  else
1628*38fd1498Szrj 	    {
1629*38fd1498Szrj 	      if (dump_file)
1630*38fd1498Szrj 		fprintf (dump_file,
1631*38fd1498Szrj 			 "Reg %d had equivalence, but can't be eliminated\n",
1632*38fd1498Szrj 			 i);
1633*38fd1498Szrj 	      ira_adjust_equiv_reg_cost (i, 0);
1634*38fd1498Szrj 	    }
1635*38fd1498Szrj 	}
1636*38fd1498Szrj     }
1637*38fd1498Szrj 
1638*38fd1498Szrj   free (reg_equiv_init_cost);
1639*38fd1498Szrj   free (offsets_known_at);
1640*38fd1498Szrj   free (offsets_at);
1641*38fd1498Szrj   offsets_at = NULL;
1642*38fd1498Szrj   offsets_known_at = NULL;
1643*38fd1498Szrj }
1644*38fd1498Szrj 
1645*38fd1498Szrj /* Comparison function for qsort to decide which of two reloads
1646*38fd1498Szrj    should be handled first.  *P1 and *P2 are the reload numbers.  */
1647*38fd1498Szrj 
1648*38fd1498Szrj static int
reload_reg_class_lower(const void * r1p,const void * r2p)1649*38fd1498Szrj reload_reg_class_lower (const void *r1p, const void *r2p)
1650*38fd1498Szrj {
1651*38fd1498Szrj   int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1652*38fd1498Szrj   int t;
1653*38fd1498Szrj 
1654*38fd1498Szrj   /* Consider required reloads before optional ones.  */
1655*38fd1498Szrj   t = rld[r1].optional - rld[r2].optional;
1656*38fd1498Szrj   if (t != 0)
1657*38fd1498Szrj     return t;
1658*38fd1498Szrj 
1659*38fd1498Szrj   /* Count all solitary classes before non-solitary ones.  */
1660*38fd1498Szrj   t = ((reg_class_size[(int) rld[r2].rclass] == 1)
1661*38fd1498Szrj        - (reg_class_size[(int) rld[r1].rclass] == 1));
1662*38fd1498Szrj   if (t != 0)
1663*38fd1498Szrj     return t;
1664*38fd1498Szrj 
1665*38fd1498Szrj   /* Aside from solitaires, consider all multi-reg groups first.  */
1666*38fd1498Szrj   t = rld[r2].nregs - rld[r1].nregs;
1667*38fd1498Szrj   if (t != 0)
1668*38fd1498Szrj     return t;
1669*38fd1498Szrj 
1670*38fd1498Szrj   /* Consider reloads in order of increasing reg-class number.  */
1671*38fd1498Szrj   t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1672*38fd1498Szrj   if (t != 0)
1673*38fd1498Szrj     return t;
1674*38fd1498Szrj 
1675*38fd1498Szrj   /* If reloads are equally urgent, sort by reload number,
1676*38fd1498Szrj      so that the results of qsort leave nothing to chance.  */
1677*38fd1498Szrj   return r1 - r2;
1678*38fd1498Szrj }
1679*38fd1498Szrj 
1680*38fd1498Szrj /* The cost of spilling each hard reg.  */
1681*38fd1498Szrj static int spill_cost[FIRST_PSEUDO_REGISTER];
1682*38fd1498Szrj 
1683*38fd1498Szrj /* When spilling multiple hard registers, we use SPILL_COST for the first
1684*38fd1498Szrj    spilled hard reg and SPILL_ADD_COST for subsequent regs.  SPILL_ADD_COST
1685*38fd1498Szrj    only the first hard reg for a multi-reg pseudo.  */
1686*38fd1498Szrj static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1687*38fd1498Szrj 
1688*38fd1498Szrj /* Map of hard regno to pseudo regno currently occupying the hard
1689*38fd1498Szrj    reg.  */
1690*38fd1498Szrj static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1691*38fd1498Szrj 
1692*38fd1498Szrj /* Update the spill cost arrays, considering that pseudo REG is live.  */
1693*38fd1498Szrj 
1694*38fd1498Szrj static void
count_pseudo(int reg)1695*38fd1498Szrj count_pseudo (int reg)
1696*38fd1498Szrj {
1697*38fd1498Szrj   int freq = REG_FREQ (reg);
1698*38fd1498Szrj   int r = reg_renumber[reg];
1699*38fd1498Szrj   int nregs;
1700*38fd1498Szrj 
1701*38fd1498Szrj   /* Ignore spilled pseudo-registers which can be here only if IRA is used.  */
1702*38fd1498Szrj   if (ira_conflicts_p && r < 0)
1703*38fd1498Szrj     return;
1704*38fd1498Szrj 
1705*38fd1498Szrj   if (REGNO_REG_SET_P (&pseudos_counted, reg)
1706*38fd1498Szrj       || REGNO_REG_SET_P (&spilled_pseudos, reg))
1707*38fd1498Szrj     return;
1708*38fd1498Szrj 
1709*38fd1498Szrj   SET_REGNO_REG_SET (&pseudos_counted, reg);
1710*38fd1498Szrj 
1711*38fd1498Szrj   gcc_assert (r >= 0);
1712*38fd1498Szrj 
1713*38fd1498Szrj   spill_add_cost[r] += freq;
1714*38fd1498Szrj   nregs = hard_regno_nregs (r, PSEUDO_REGNO_MODE (reg));
1715*38fd1498Szrj   while (nregs-- > 0)
1716*38fd1498Szrj     {
1717*38fd1498Szrj       hard_regno_to_pseudo_regno[r + nregs] = reg;
1718*38fd1498Szrj       spill_cost[r + nregs] += freq;
1719*38fd1498Szrj     }
1720*38fd1498Szrj }
1721*38fd1498Szrj 
1722*38fd1498Szrj /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1723*38fd1498Szrj    contents of BAD_SPILL_REGS for the insn described by CHAIN.  */
1724*38fd1498Szrj 
1725*38fd1498Szrj static void
order_regs_for_reload(struct insn_chain * chain)1726*38fd1498Szrj order_regs_for_reload (struct insn_chain *chain)
1727*38fd1498Szrj {
1728*38fd1498Szrj   unsigned i;
1729*38fd1498Szrj   HARD_REG_SET used_by_pseudos;
1730*38fd1498Szrj   HARD_REG_SET used_by_pseudos2;
1731*38fd1498Szrj   reg_set_iterator rsi;
1732*38fd1498Szrj 
1733*38fd1498Szrj   COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1734*38fd1498Szrj 
1735*38fd1498Szrj   memset (spill_cost, 0, sizeof spill_cost);
1736*38fd1498Szrj   memset (spill_add_cost, 0, sizeof spill_add_cost);
1737*38fd1498Szrj   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1738*38fd1498Szrj     hard_regno_to_pseudo_regno[i] = -1;
1739*38fd1498Szrj 
1740*38fd1498Szrj   /* Count number of uses of each hard reg by pseudo regs allocated to it
1741*38fd1498Szrj      and then order them by decreasing use.  First exclude hard registers
1742*38fd1498Szrj      that are live in or across this insn.  */
1743*38fd1498Szrj 
1744*38fd1498Szrj   REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1745*38fd1498Szrj   REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1746*38fd1498Szrj   IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1747*38fd1498Szrj   IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1748*38fd1498Szrj 
1749*38fd1498Szrj   /* Now find out which pseudos are allocated to it, and update
1750*38fd1498Szrj      hard_reg_n_uses.  */
1751*38fd1498Szrj   CLEAR_REG_SET (&pseudos_counted);
1752*38fd1498Szrj 
1753*38fd1498Szrj   EXECUTE_IF_SET_IN_REG_SET
1754*38fd1498Szrj     (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1755*38fd1498Szrj     {
1756*38fd1498Szrj       count_pseudo (i);
1757*38fd1498Szrj     }
1758*38fd1498Szrj   EXECUTE_IF_SET_IN_REG_SET
1759*38fd1498Szrj     (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1760*38fd1498Szrj     {
1761*38fd1498Szrj       count_pseudo (i);
1762*38fd1498Szrj     }
1763*38fd1498Szrj   CLEAR_REG_SET (&pseudos_counted);
1764*38fd1498Szrj }
1765*38fd1498Szrj 
1766*38fd1498Szrj /* Vector of reload-numbers showing the order in which the reloads should
1767*38fd1498Szrj    be processed.  */
1768*38fd1498Szrj static short reload_order[MAX_RELOADS];
1769*38fd1498Szrj 
1770*38fd1498Szrj /* This is used to keep track of the spill regs used in one insn.  */
1771*38fd1498Szrj static HARD_REG_SET used_spill_regs_local;
1772*38fd1498Szrj 
1773*38fd1498Szrj /* We decided to spill hard register SPILLED, which has a size of
1774*38fd1498Szrj    SPILLED_NREGS.  Determine how pseudo REG, which is live during the insn,
1775*38fd1498Szrj    is affected.  We will add it to SPILLED_PSEUDOS if necessary, and we will
1776*38fd1498Szrj    update SPILL_COST/SPILL_ADD_COST.  */
1777*38fd1498Szrj 
1778*38fd1498Szrj static void
count_spilled_pseudo(int spilled,int spilled_nregs,int reg)1779*38fd1498Szrj count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1780*38fd1498Szrj {
1781*38fd1498Szrj   int freq = REG_FREQ (reg);
1782*38fd1498Szrj   int r = reg_renumber[reg];
1783*38fd1498Szrj   int nregs;
1784*38fd1498Szrj 
1785*38fd1498Szrj   /* Ignore spilled pseudo-registers which can be here only if IRA is used.  */
1786*38fd1498Szrj   if (ira_conflicts_p && r < 0)
1787*38fd1498Szrj     return;
1788*38fd1498Szrj 
1789*38fd1498Szrj   gcc_assert (r >= 0);
1790*38fd1498Szrj 
1791*38fd1498Szrj   nregs = hard_regno_nregs (r, PSEUDO_REGNO_MODE (reg));
1792*38fd1498Szrj 
1793*38fd1498Szrj   if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1794*38fd1498Szrj       || spilled + spilled_nregs <= r || r + nregs <= spilled)
1795*38fd1498Szrj     return;
1796*38fd1498Szrj 
1797*38fd1498Szrj   SET_REGNO_REG_SET (&spilled_pseudos, reg);
1798*38fd1498Szrj 
1799*38fd1498Szrj   spill_add_cost[r] -= freq;
1800*38fd1498Szrj   while (nregs-- > 0)
1801*38fd1498Szrj     {
1802*38fd1498Szrj       hard_regno_to_pseudo_regno[r + nregs] = -1;
1803*38fd1498Szrj       spill_cost[r + nregs] -= freq;
1804*38fd1498Szrj     }
1805*38fd1498Szrj }
1806*38fd1498Szrj 
1807*38fd1498Szrj /* Find reload register to use for reload number ORDER.  */
1808*38fd1498Szrj 
1809*38fd1498Szrj static int
find_reg(struct insn_chain * chain,int order)1810*38fd1498Szrj find_reg (struct insn_chain *chain, int order)
1811*38fd1498Szrj {
1812*38fd1498Szrj   int rnum = reload_order[order];
1813*38fd1498Szrj   struct reload *rl = rld + rnum;
1814*38fd1498Szrj   int best_cost = INT_MAX;
1815*38fd1498Szrj   int best_reg = -1;
1816*38fd1498Szrj   unsigned int i, j, n;
1817*38fd1498Szrj   int k;
1818*38fd1498Szrj   HARD_REG_SET not_usable;
1819*38fd1498Szrj   HARD_REG_SET used_by_other_reload;
1820*38fd1498Szrj   reg_set_iterator rsi;
1821*38fd1498Szrj   static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1822*38fd1498Szrj   static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1823*38fd1498Szrj 
1824*38fd1498Szrj   COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1825*38fd1498Szrj   IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1826*38fd1498Szrj   IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->rclass]);
1827*38fd1498Szrj 
1828*38fd1498Szrj   CLEAR_HARD_REG_SET (used_by_other_reload);
1829*38fd1498Szrj   for (k = 0; k < order; k++)
1830*38fd1498Szrj     {
1831*38fd1498Szrj       int other = reload_order[k];
1832*38fd1498Szrj 
1833*38fd1498Szrj       if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1834*38fd1498Szrj 	for (j = 0; j < rld[other].nregs; j++)
1835*38fd1498Szrj 	  SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1836*38fd1498Szrj     }
1837*38fd1498Szrj 
1838*38fd1498Szrj   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1839*38fd1498Szrj     {
1840*38fd1498Szrj #ifdef REG_ALLOC_ORDER
1841*38fd1498Szrj       unsigned int regno = reg_alloc_order[i];
1842*38fd1498Szrj #else
1843*38fd1498Szrj       unsigned int regno = i;
1844*38fd1498Szrj #endif
1845*38fd1498Szrj 
1846*38fd1498Szrj       if (! TEST_HARD_REG_BIT (not_usable, regno)
1847*38fd1498Szrj 	  && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1848*38fd1498Szrj 	  && targetm.hard_regno_mode_ok (regno, rl->mode))
1849*38fd1498Szrj 	{
1850*38fd1498Szrj 	  int this_cost = spill_cost[regno];
1851*38fd1498Szrj 	  int ok = 1;
1852*38fd1498Szrj 	  unsigned int this_nregs = hard_regno_nregs (regno, rl->mode);
1853*38fd1498Szrj 
1854*38fd1498Szrj 	  for (j = 1; j < this_nregs; j++)
1855*38fd1498Szrj 	    {
1856*38fd1498Szrj 	      this_cost += spill_add_cost[regno + j];
1857*38fd1498Szrj 	      if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1858*38fd1498Szrj 		  || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1859*38fd1498Szrj 		ok = 0;
1860*38fd1498Szrj 	    }
1861*38fd1498Szrj 	  if (! ok)
1862*38fd1498Szrj 	    continue;
1863*38fd1498Szrj 
1864*38fd1498Szrj 	  if (ira_conflicts_p)
1865*38fd1498Szrj 	    {
1866*38fd1498Szrj 	      /* Ask IRA to find a better pseudo-register for
1867*38fd1498Szrj 		 spilling.  */
1868*38fd1498Szrj 	      for (n = j = 0; j < this_nregs; j++)
1869*38fd1498Szrj 		{
1870*38fd1498Szrj 		  int r = hard_regno_to_pseudo_regno[regno + j];
1871*38fd1498Szrj 
1872*38fd1498Szrj 		  if (r < 0)
1873*38fd1498Szrj 		    continue;
1874*38fd1498Szrj 		  if (n == 0 || regno_pseudo_regs[n - 1] != r)
1875*38fd1498Szrj 		    regno_pseudo_regs[n++] = r;
1876*38fd1498Szrj 		}
1877*38fd1498Szrj 	      regno_pseudo_regs[n++] = -1;
1878*38fd1498Szrj 	      if (best_reg < 0
1879*38fd1498Szrj 		  || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1880*38fd1498Szrj 						      best_regno_pseudo_regs,
1881*38fd1498Szrj 						      rl->in, rl->out,
1882*38fd1498Szrj 						      chain->insn))
1883*38fd1498Szrj 		{
1884*38fd1498Szrj 		  best_reg = regno;
1885*38fd1498Szrj 		  for (j = 0;; j++)
1886*38fd1498Szrj 		    {
1887*38fd1498Szrj 		      best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1888*38fd1498Szrj 		      if (regno_pseudo_regs[j] < 0)
1889*38fd1498Szrj 			break;
1890*38fd1498Szrj 		    }
1891*38fd1498Szrj 		}
1892*38fd1498Szrj 	      continue;
1893*38fd1498Szrj 	    }
1894*38fd1498Szrj 
1895*38fd1498Szrj 	  if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1896*38fd1498Szrj 	    this_cost--;
1897*38fd1498Szrj 	  if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1898*38fd1498Szrj 	    this_cost--;
1899*38fd1498Szrj 	  if (this_cost < best_cost
1900*38fd1498Szrj 	      /* Among registers with equal cost, prefer caller-saved ones, or
1901*38fd1498Szrj 		 use REG_ALLOC_ORDER if it is defined.  */
1902*38fd1498Szrj 	      || (this_cost == best_cost
1903*38fd1498Szrj #ifdef REG_ALLOC_ORDER
1904*38fd1498Szrj 		  && (inv_reg_alloc_order[regno]
1905*38fd1498Szrj 		      < inv_reg_alloc_order[best_reg])
1906*38fd1498Szrj #else
1907*38fd1498Szrj 		  && call_used_regs[regno]
1908*38fd1498Szrj 		  && ! call_used_regs[best_reg]
1909*38fd1498Szrj #endif
1910*38fd1498Szrj 		  ))
1911*38fd1498Szrj 	    {
1912*38fd1498Szrj 	      best_reg = regno;
1913*38fd1498Szrj 	      best_cost = this_cost;
1914*38fd1498Szrj 	    }
1915*38fd1498Szrj 	}
1916*38fd1498Szrj     }
1917*38fd1498Szrj   if (best_reg == -1)
1918*38fd1498Szrj     return 0;
1919*38fd1498Szrj 
1920*38fd1498Szrj   if (dump_file)
1921*38fd1498Szrj     fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1922*38fd1498Szrj 
1923*38fd1498Szrj   rl->nregs = hard_regno_nregs (best_reg, rl->mode);
1924*38fd1498Szrj   rl->regno = best_reg;
1925*38fd1498Szrj 
1926*38fd1498Szrj   EXECUTE_IF_SET_IN_REG_SET
1927*38fd1498Szrj     (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1928*38fd1498Szrj     {
1929*38fd1498Szrj       count_spilled_pseudo (best_reg, rl->nregs, j);
1930*38fd1498Szrj     }
1931*38fd1498Szrj 
1932*38fd1498Szrj   EXECUTE_IF_SET_IN_REG_SET
1933*38fd1498Szrj     (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1934*38fd1498Szrj     {
1935*38fd1498Szrj       count_spilled_pseudo (best_reg, rl->nregs, j);
1936*38fd1498Szrj     }
1937*38fd1498Szrj 
1938*38fd1498Szrj   for (i = 0; i < rl->nregs; i++)
1939*38fd1498Szrj     {
1940*38fd1498Szrj       gcc_assert (spill_cost[best_reg + i] == 0);
1941*38fd1498Szrj       gcc_assert (spill_add_cost[best_reg + i] == 0);
1942*38fd1498Szrj       gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
1943*38fd1498Szrj       SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1944*38fd1498Szrj     }
1945*38fd1498Szrj   return 1;
1946*38fd1498Szrj }
1947*38fd1498Szrj 
1948*38fd1498Szrj /* Find more reload regs to satisfy the remaining need of an insn, which
1949*38fd1498Szrj    is given by CHAIN.
1950*38fd1498Szrj    Do it by ascending class number, since otherwise a reg
1951*38fd1498Szrj    might be spilled for a big class and might fail to count
1952*38fd1498Szrj    for a smaller class even though it belongs to that class.  */
1953*38fd1498Szrj 
1954*38fd1498Szrj static void
find_reload_regs(struct insn_chain * chain)1955*38fd1498Szrj find_reload_regs (struct insn_chain *chain)
1956*38fd1498Szrj {
1957*38fd1498Szrj   int i;
1958*38fd1498Szrj 
1959*38fd1498Szrj   /* In order to be certain of getting the registers we need,
1960*38fd1498Szrj      we must sort the reloads into order of increasing register class.
1961*38fd1498Szrj      Then our grabbing of reload registers will parallel the process
1962*38fd1498Szrj      that provided the reload registers.  */
1963*38fd1498Szrj   for (i = 0; i < chain->n_reloads; i++)
1964*38fd1498Szrj     {
1965*38fd1498Szrj       /* Show whether this reload already has a hard reg.  */
1966*38fd1498Szrj       if (chain->rld[i].reg_rtx)
1967*38fd1498Szrj 	{
1968*38fd1498Szrj 	  chain->rld[i].regno = REGNO (chain->rld[i].reg_rtx);
1969*38fd1498Szrj 	  chain->rld[i].nregs = REG_NREGS (chain->rld[i].reg_rtx);
1970*38fd1498Szrj 	}
1971*38fd1498Szrj       else
1972*38fd1498Szrj 	chain->rld[i].regno = -1;
1973*38fd1498Szrj       reload_order[i] = i;
1974*38fd1498Szrj     }
1975*38fd1498Szrj 
1976*38fd1498Szrj   n_reloads = chain->n_reloads;
1977*38fd1498Szrj   memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
1978*38fd1498Szrj 
1979*38fd1498Szrj   CLEAR_HARD_REG_SET (used_spill_regs_local);
1980*38fd1498Szrj 
1981*38fd1498Szrj   if (dump_file)
1982*38fd1498Szrj     fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1983*38fd1498Szrj 
1984*38fd1498Szrj   qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
1985*38fd1498Szrj 
1986*38fd1498Szrj   /* Compute the order of preference for hard registers to spill.  */
1987*38fd1498Szrj 
1988*38fd1498Szrj   order_regs_for_reload (chain);
1989*38fd1498Szrj 
1990*38fd1498Szrj   for (i = 0; i < n_reloads; i++)
1991*38fd1498Szrj     {
1992*38fd1498Szrj       int r = reload_order[i];
1993*38fd1498Szrj 
1994*38fd1498Szrj       /* Ignore reloads that got marked inoperative.  */
1995*38fd1498Szrj       if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
1996*38fd1498Szrj 	  && ! rld[r].optional
1997*38fd1498Szrj 	  && rld[r].regno == -1)
1998*38fd1498Szrj 	if (! find_reg (chain, i))
1999*38fd1498Szrj 	  {
2000*38fd1498Szrj 	    if (dump_file)
2001*38fd1498Szrj 	      fprintf (dump_file, "reload failure for reload %d\n", r);
2002*38fd1498Szrj 	    spill_failure (chain->insn, rld[r].rclass);
2003*38fd1498Szrj 	    failure = 1;
2004*38fd1498Szrj 	    return;
2005*38fd1498Szrj 	  }
2006*38fd1498Szrj     }
2007*38fd1498Szrj 
2008*38fd1498Szrj   COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
2009*38fd1498Szrj   IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
2010*38fd1498Szrj 
2011*38fd1498Szrj   memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2012*38fd1498Szrj }
2013*38fd1498Szrj 
2014*38fd1498Szrj static void
select_reload_regs(void)2015*38fd1498Szrj select_reload_regs (void)
2016*38fd1498Szrj {
2017*38fd1498Szrj   struct insn_chain *chain;
2018*38fd1498Szrj 
2019*38fd1498Szrj   /* Try to satisfy the needs for each insn.  */
2020*38fd1498Szrj   for (chain = insns_need_reload; chain != 0;
2021*38fd1498Szrj        chain = chain->next_need_reload)
2022*38fd1498Szrj     find_reload_regs (chain);
2023*38fd1498Szrj }
2024*38fd1498Szrj 
2025*38fd1498Szrj /* Delete all insns that were inserted by emit_caller_save_insns during
2026*38fd1498Szrj    this iteration.  */
2027*38fd1498Szrj static void
delete_caller_save_insns(void)2028*38fd1498Szrj delete_caller_save_insns (void)
2029*38fd1498Szrj {
2030*38fd1498Szrj   struct insn_chain *c = reload_insn_chain;
2031*38fd1498Szrj 
2032*38fd1498Szrj   while (c != 0)
2033*38fd1498Szrj     {
2034*38fd1498Szrj       while (c != 0 && c->is_caller_save_insn)
2035*38fd1498Szrj 	{
2036*38fd1498Szrj 	  struct insn_chain *next = c->next;
2037*38fd1498Szrj 	  rtx_insn *insn = c->insn;
2038*38fd1498Szrj 
2039*38fd1498Szrj 	  if (c == reload_insn_chain)
2040*38fd1498Szrj 	    reload_insn_chain = next;
2041*38fd1498Szrj 	  delete_insn (insn);
2042*38fd1498Szrj 
2043*38fd1498Szrj 	  if (next)
2044*38fd1498Szrj 	    next->prev = c->prev;
2045*38fd1498Szrj 	  if (c->prev)
2046*38fd1498Szrj 	    c->prev->next = next;
2047*38fd1498Szrj 	  c->next = unused_insn_chains;
2048*38fd1498Szrj 	  unused_insn_chains = c;
2049*38fd1498Szrj 	  c = next;
2050*38fd1498Szrj 	}
2051*38fd1498Szrj       if (c != 0)
2052*38fd1498Szrj 	c = c->next;
2053*38fd1498Szrj     }
2054*38fd1498Szrj }
2055*38fd1498Szrj 
2056*38fd1498Szrj /* Handle the failure to find a register to spill.
2057*38fd1498Szrj    INSN should be one of the insns which needed this particular spill reg.  */
2058*38fd1498Szrj 
2059*38fd1498Szrj static void
spill_failure(rtx_insn * insn,enum reg_class rclass)2060*38fd1498Szrj spill_failure (rtx_insn *insn, enum reg_class rclass)
2061*38fd1498Szrj {
2062*38fd1498Szrj   if (asm_noperands (PATTERN (insn)) >= 0)
2063*38fd1498Szrj     error_for_asm (insn, "can%'t find a register in class %qs while "
2064*38fd1498Szrj 		   "reloading %<asm%>",
2065*38fd1498Szrj 		   reg_class_names[rclass]);
2066*38fd1498Szrj   else
2067*38fd1498Szrj     {
2068*38fd1498Szrj       error ("unable to find a register to spill in class %qs",
2069*38fd1498Szrj 	     reg_class_names[rclass]);
2070*38fd1498Szrj 
2071*38fd1498Szrj       if (dump_file)
2072*38fd1498Szrj 	{
2073*38fd1498Szrj 	  fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2074*38fd1498Szrj 	  debug_reload_to_stream (dump_file);
2075*38fd1498Szrj 	}
2076*38fd1498Szrj       fatal_insn ("this is the insn:", insn);
2077*38fd1498Szrj     }
2078*38fd1498Szrj }
2079*38fd1498Szrj 
2080*38fd1498Szrj /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2081*38fd1498Szrj    data that is dead in INSN.  */
2082*38fd1498Szrj 
2083*38fd1498Szrj static void
delete_dead_insn(rtx_insn * insn)2084*38fd1498Szrj delete_dead_insn (rtx_insn *insn)
2085*38fd1498Szrj {
2086*38fd1498Szrj   rtx_insn *prev = prev_active_insn (insn);
2087*38fd1498Szrj   rtx prev_dest;
2088*38fd1498Szrj 
2089*38fd1498Szrj   /* If the previous insn sets a register that dies in our insn make
2090*38fd1498Szrj      a note that we want to run DCE immediately after reload.
2091*38fd1498Szrj 
2092*38fd1498Szrj      We used to delete the previous insn & recurse, but that's wrong for
2093*38fd1498Szrj      block local equivalences.  Instead of trying to figure out the exact
2094*38fd1498Szrj      circumstances where we can delete the potentially dead insns, just
2095*38fd1498Szrj      let DCE do the job.  */
2096*38fd1498Szrj   if (prev && BLOCK_FOR_INSN (prev) == BLOCK_FOR_INSN (insn)
2097*38fd1498Szrj       && GET_CODE (PATTERN (prev)) == SET
2098*38fd1498Szrj       && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2099*38fd1498Szrj       && reg_mentioned_p (prev_dest, PATTERN (insn))
2100*38fd1498Szrj       && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2101*38fd1498Szrj       && ! side_effects_p (SET_SRC (PATTERN (prev))))
2102*38fd1498Szrj     need_dce = 1;
2103*38fd1498Szrj 
2104*38fd1498Szrj   SET_INSN_DELETED (insn);
2105*38fd1498Szrj }
2106*38fd1498Szrj 
2107*38fd1498Szrj /* Modify the home of pseudo-reg I.
2108*38fd1498Szrj    The new home is present in reg_renumber[I].
2109*38fd1498Szrj 
2110*38fd1498Szrj    FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2111*38fd1498Szrj    or it may be -1, meaning there is none or it is not relevant.
2112*38fd1498Szrj    This is used so that all pseudos spilled from a given hard reg
2113*38fd1498Szrj    can share one stack slot.  */
2114*38fd1498Szrj 
2115*38fd1498Szrj static void
alter_reg(int i,int from_reg,bool dont_share_p)2116*38fd1498Szrj alter_reg (int i, int from_reg, bool dont_share_p)
2117*38fd1498Szrj {
2118*38fd1498Szrj   /* When outputting an inline function, this can happen
2119*38fd1498Szrj      for a reg that isn't actually used.  */
2120*38fd1498Szrj   if (regno_reg_rtx[i] == 0)
2121*38fd1498Szrj     return;
2122*38fd1498Szrj 
2123*38fd1498Szrj   /* If the reg got changed to a MEM at rtl-generation time,
2124*38fd1498Szrj      ignore it.  */
2125*38fd1498Szrj   if (!REG_P (regno_reg_rtx[i]))
2126*38fd1498Szrj     return;
2127*38fd1498Szrj 
2128*38fd1498Szrj   /* Modify the reg-rtx to contain the new hard reg
2129*38fd1498Szrj      number or else to contain its pseudo reg number.  */
2130*38fd1498Szrj   SET_REGNO (regno_reg_rtx[i],
2131*38fd1498Szrj 	     reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2132*38fd1498Szrj 
2133*38fd1498Szrj   /* If we have a pseudo that is needed but has no hard reg or equivalent,
2134*38fd1498Szrj      allocate a stack slot for it.  */
2135*38fd1498Szrj 
2136*38fd1498Szrj   if (reg_renumber[i] < 0
2137*38fd1498Szrj       && REG_N_REFS (i) > 0
2138*38fd1498Szrj       && reg_equiv_constant (i) == 0
2139*38fd1498Szrj       && (reg_equiv_invariant (i) == 0
2140*38fd1498Szrj 	  || reg_equiv_init (i) == 0)
2141*38fd1498Szrj       && reg_equiv_memory_loc (i) == 0)
2142*38fd1498Szrj     {
2143*38fd1498Szrj       rtx x = NULL_RTX;
2144*38fd1498Szrj       machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2145*38fd1498Szrj       poly_uint64 inherent_size = GET_MODE_SIZE (mode);
2146*38fd1498Szrj       unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2147*38fd1498Szrj       machine_mode wider_mode = wider_subreg_mode (mode, reg_max_ref_mode[i]);
2148*38fd1498Szrj       poly_uint64 total_size = GET_MODE_SIZE (wider_mode);
2149*38fd1498Szrj       /* ??? Seems strange to derive the minimum alignment from the size,
2150*38fd1498Szrj 	 but that's the traditional behavior.  For polynomial-size modes,
2151*38fd1498Szrj 	 the natural extension is to use the minimum possible size.  */
2152*38fd1498Szrj       unsigned int min_align
2153*38fd1498Szrj 	= constant_lower_bound (GET_MODE_BITSIZE (reg_max_ref_mode[i]));
2154*38fd1498Szrj       poly_int64 adjust = 0;
2155*38fd1498Szrj 
2156*38fd1498Szrj       something_was_spilled = true;
2157*38fd1498Szrj 
2158*38fd1498Szrj       if (ira_conflicts_p)
2159*38fd1498Szrj 	{
2160*38fd1498Szrj 	  /* Mark the spill for IRA.  */
2161*38fd1498Szrj 	  SET_REGNO_REG_SET (&spilled_pseudos, i);
2162*38fd1498Szrj 	  if (!dont_share_p)
2163*38fd1498Szrj 	    x = ira_reuse_stack_slot (i, inherent_size, total_size);
2164*38fd1498Szrj 	}
2165*38fd1498Szrj 
2166*38fd1498Szrj       if (x)
2167*38fd1498Szrj 	;
2168*38fd1498Szrj 
2169*38fd1498Szrj       /* Each pseudo reg has an inherent size which comes from its own mode,
2170*38fd1498Szrj 	 and a total size which provides room for paradoxical subregs
2171*38fd1498Szrj 	 which refer to the pseudo reg in wider modes.
2172*38fd1498Szrj 
2173*38fd1498Szrj 	 We can use a slot already allocated if it provides both
2174*38fd1498Szrj 	 enough inherent space and enough total space.
2175*38fd1498Szrj 	 Otherwise, we allocate a new slot, making sure that it has no less
2176*38fd1498Szrj 	 inherent space, and no less total space, then the previous slot.  */
2177*38fd1498Szrj       else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2178*38fd1498Szrj 	{
2179*38fd1498Szrj 	  rtx stack_slot;
2180*38fd1498Szrj 
2181*38fd1498Szrj 	  /* The sizes are taken from a subreg operation, which guarantees
2182*38fd1498Szrj 	     that they're ordered.  */
2183*38fd1498Szrj 	  gcc_checking_assert (ordered_p (total_size, inherent_size));
2184*38fd1498Szrj 
2185*38fd1498Szrj 	  /* No known place to spill from => no slot to reuse.  */
2186*38fd1498Szrj 	  x = assign_stack_local (mode, total_size,
2187*38fd1498Szrj 				  min_align > inherent_align
2188*38fd1498Szrj 				  || maybe_gt (total_size, inherent_size)
2189*38fd1498Szrj 				  ? -1 : 0);
2190*38fd1498Szrj 
2191*38fd1498Szrj 	  stack_slot = x;
2192*38fd1498Szrj 
2193*38fd1498Szrj 	  /* Cancel the big-endian correction done in assign_stack_local.
2194*38fd1498Szrj 	     Get the address of the beginning of the slot.  This is so we
2195*38fd1498Szrj 	     can do a big-endian correction unconditionally below.  */
2196*38fd1498Szrj 	  if (BYTES_BIG_ENDIAN)
2197*38fd1498Szrj 	    {
2198*38fd1498Szrj 	      adjust = inherent_size - total_size;
2199*38fd1498Szrj 	      if (maybe_ne (adjust, 0))
2200*38fd1498Szrj 		{
2201*38fd1498Szrj 		  poly_uint64 total_bits = total_size * BITS_PER_UNIT;
2202*38fd1498Szrj 		  machine_mode mem_mode
2203*38fd1498Szrj 		    = int_mode_for_size (total_bits, 1).else_blk ();
2204*38fd1498Szrj 		  stack_slot = adjust_address_nv (x, mem_mode, adjust);
2205*38fd1498Szrj 		}
2206*38fd1498Szrj 	    }
2207*38fd1498Szrj 
2208*38fd1498Szrj 	  if (! dont_share_p && ira_conflicts_p)
2209*38fd1498Szrj 	    /* Inform IRA about allocation a new stack slot.  */
2210*38fd1498Szrj 	    ira_mark_new_stack_slot (stack_slot, i, total_size);
2211*38fd1498Szrj 	}
2212*38fd1498Szrj 
2213*38fd1498Szrj       /* Reuse a stack slot if possible.  */
2214*38fd1498Szrj       else if (spill_stack_slot[from_reg] != 0
2215*38fd1498Szrj 	       && known_ge (spill_stack_slot_width[from_reg], total_size)
2216*38fd1498Szrj 	       && known_ge (GET_MODE_SIZE
2217*38fd1498Szrj 			    (GET_MODE (spill_stack_slot[from_reg])),
2218*38fd1498Szrj 			    inherent_size)
2219*38fd1498Szrj 	       && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2220*38fd1498Szrj 	x = spill_stack_slot[from_reg];
2221*38fd1498Szrj 
2222*38fd1498Szrj       /* Allocate a bigger slot.  */
2223*38fd1498Szrj       else
2224*38fd1498Szrj 	{
2225*38fd1498Szrj 	  /* Compute maximum size needed, both for inherent size
2226*38fd1498Szrj 	     and for total size.  */
2227*38fd1498Szrj 	  rtx stack_slot;
2228*38fd1498Szrj 
2229*38fd1498Szrj 	  if (spill_stack_slot[from_reg])
2230*38fd1498Szrj 	    {
2231*38fd1498Szrj 	      if (partial_subreg_p (mode,
2232*38fd1498Szrj 				    GET_MODE (spill_stack_slot[from_reg])))
2233*38fd1498Szrj 		mode = GET_MODE (spill_stack_slot[from_reg]);
2234*38fd1498Szrj 	      total_size = ordered_max (total_size,
2235*38fd1498Szrj 					spill_stack_slot_width[from_reg]);
2236*38fd1498Szrj 	      if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2237*38fd1498Szrj 		min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2238*38fd1498Szrj 	    }
2239*38fd1498Szrj 
2240*38fd1498Szrj 	  /* The sizes are taken from a subreg operation, which guarantees
2241*38fd1498Szrj 	     that they're ordered.  */
2242*38fd1498Szrj 	  gcc_checking_assert (ordered_p (total_size, inherent_size));
2243*38fd1498Szrj 
2244*38fd1498Szrj 	  /* Make a slot with that size.  */
2245*38fd1498Szrj 	  x = assign_stack_local (mode, total_size,
2246*38fd1498Szrj 				  min_align > inherent_align
2247*38fd1498Szrj 				  || maybe_gt (total_size, inherent_size)
2248*38fd1498Szrj 				  ? -1 : 0);
2249*38fd1498Szrj 	  stack_slot = x;
2250*38fd1498Szrj 
2251*38fd1498Szrj 	  /* Cancel the  big-endian correction done in assign_stack_local.
2252*38fd1498Szrj 	     Get the address of the beginning of the slot.  This is so we
2253*38fd1498Szrj 	     can do a big-endian correction unconditionally below.  */
2254*38fd1498Szrj 	  if (BYTES_BIG_ENDIAN)
2255*38fd1498Szrj 	    {
2256*38fd1498Szrj 	      adjust = GET_MODE_SIZE (mode) - total_size;
2257*38fd1498Szrj 	      if (maybe_ne (adjust, 0))
2258*38fd1498Szrj 		{
2259*38fd1498Szrj 		  poly_uint64 total_bits = total_size * BITS_PER_UNIT;
2260*38fd1498Szrj 		  machine_mode mem_mode
2261*38fd1498Szrj 		    = int_mode_for_size (total_bits, 1).else_blk ();
2262*38fd1498Szrj 		  stack_slot = adjust_address_nv (x, mem_mode, adjust);
2263*38fd1498Szrj 		}
2264*38fd1498Szrj 	    }
2265*38fd1498Szrj 
2266*38fd1498Szrj 	  spill_stack_slot[from_reg] = stack_slot;
2267*38fd1498Szrj 	  spill_stack_slot_width[from_reg] = total_size;
2268*38fd1498Szrj 	}
2269*38fd1498Szrj 
2270*38fd1498Szrj       /* On a big endian machine, the "address" of the slot
2271*38fd1498Szrj 	 is the address of the low part that fits its inherent mode.  */
2272*38fd1498Szrj       adjust += subreg_size_lowpart_offset (inherent_size, total_size);
2273*38fd1498Szrj 
2274*38fd1498Szrj       /* If we have any adjustment to make, or if the stack slot is the
2275*38fd1498Szrj 	 wrong mode, make a new stack slot.  */
2276*38fd1498Szrj       x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2277*38fd1498Szrj 
2278*38fd1498Szrj       /* Set all of the memory attributes as appropriate for a spill.  */
2279*38fd1498Szrj       set_mem_attrs_for_spill (x);
2280*38fd1498Szrj 
2281*38fd1498Szrj       /* Save the stack slot for later.  */
2282*38fd1498Szrj       reg_equiv_memory_loc (i) = x;
2283*38fd1498Szrj     }
2284*38fd1498Szrj }
2285*38fd1498Szrj 
2286*38fd1498Szrj /* Mark the slots in regs_ever_live for the hard regs used by
2287*38fd1498Szrj    pseudo-reg number REGNO, accessed in MODE.  */
2288*38fd1498Szrj 
2289*38fd1498Szrj static void
mark_home_live_1(int regno,machine_mode mode)2290*38fd1498Szrj mark_home_live_1 (int regno, machine_mode mode)
2291*38fd1498Szrj {
2292*38fd1498Szrj   int i, lim;
2293*38fd1498Szrj 
2294*38fd1498Szrj   i = reg_renumber[regno];
2295*38fd1498Szrj   if (i < 0)
2296*38fd1498Szrj     return;
2297*38fd1498Szrj   lim = end_hard_regno (mode, i);
2298*38fd1498Szrj   while (i < lim)
2299*38fd1498Szrj     df_set_regs_ever_live (i++, true);
2300*38fd1498Szrj }
2301*38fd1498Szrj 
2302*38fd1498Szrj /* Mark the slots in regs_ever_live for the hard regs
2303*38fd1498Szrj    used by pseudo-reg number REGNO.  */
2304*38fd1498Szrj 
2305*38fd1498Szrj void
mark_home_live(int regno)2306*38fd1498Szrj mark_home_live (int regno)
2307*38fd1498Szrj {
2308*38fd1498Szrj   if (reg_renumber[regno] >= 0)
2309*38fd1498Szrj     mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2310*38fd1498Szrj }
2311*38fd1498Szrj 
2312*38fd1498Szrj /* This function handles the tracking of elimination offsets around branches.
2313*38fd1498Szrj 
2314*38fd1498Szrj    X is a piece of RTL being scanned.
2315*38fd1498Szrj 
2316*38fd1498Szrj    INSN is the insn that it came from, if any.
2317*38fd1498Szrj 
2318*38fd1498Szrj    INITIAL_P is nonzero if we are to set the offset to be the initial
2319*38fd1498Szrj    offset and zero if we are setting the offset of the label to be the
2320*38fd1498Szrj    current offset.  */
2321*38fd1498Szrj 
2322*38fd1498Szrj static void
set_label_offsets(rtx x,rtx_insn * insn,int initial_p)2323*38fd1498Szrj set_label_offsets (rtx x, rtx_insn *insn, int initial_p)
2324*38fd1498Szrj {
2325*38fd1498Szrj   enum rtx_code code = GET_CODE (x);
2326*38fd1498Szrj   rtx tem;
2327*38fd1498Szrj   unsigned int i;
2328*38fd1498Szrj   struct elim_table *p;
2329*38fd1498Szrj 
2330*38fd1498Szrj   switch (code)
2331*38fd1498Szrj     {
2332*38fd1498Szrj     case LABEL_REF:
2333*38fd1498Szrj       if (LABEL_REF_NONLOCAL_P (x))
2334*38fd1498Szrj 	return;
2335*38fd1498Szrj 
2336*38fd1498Szrj       x = label_ref_label (x);
2337*38fd1498Szrj 
2338*38fd1498Szrj       /* fall through */
2339*38fd1498Szrj 
2340*38fd1498Szrj     case CODE_LABEL:
2341*38fd1498Szrj       /* If we know nothing about this label, set the desired offsets.  Note
2342*38fd1498Szrj 	 that this sets the offset at a label to be the offset before a label
2343*38fd1498Szrj 	 if we don't know anything about the label.  This is not correct for
2344*38fd1498Szrj 	 the label after a BARRIER, but is the best guess we can make.  If
2345*38fd1498Szrj 	 we guessed wrong, we will suppress an elimination that might have
2346*38fd1498Szrj 	 been possible had we been able to guess correctly.  */
2347*38fd1498Szrj 
2348*38fd1498Szrj       if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2349*38fd1498Szrj 	{
2350*38fd1498Szrj 	  for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2351*38fd1498Szrj 	    offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2352*38fd1498Szrj 	      = (initial_p ? reg_eliminate[i].initial_offset
2353*38fd1498Szrj 		 : reg_eliminate[i].offset);
2354*38fd1498Szrj 	  offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2355*38fd1498Szrj 	}
2356*38fd1498Szrj 
2357*38fd1498Szrj       /* Otherwise, if this is the definition of a label and it is
2358*38fd1498Szrj 	 preceded by a BARRIER, set our offsets to the known offset of
2359*38fd1498Szrj 	 that label.  */
2360*38fd1498Szrj 
2361*38fd1498Szrj       else if (x == insn
2362*38fd1498Szrj 	       && (tem = prev_nonnote_insn (insn)) != 0
2363*38fd1498Szrj 	       && BARRIER_P (tem))
2364*38fd1498Szrj 	set_offsets_for_label (insn);
2365*38fd1498Szrj       else
2366*38fd1498Szrj 	/* If neither of the above cases is true, compare each offset
2367*38fd1498Szrj 	   with those previously recorded and suppress any eliminations
2368*38fd1498Szrj 	   where the offsets disagree.  */
2369*38fd1498Szrj 
2370*38fd1498Szrj 	for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2371*38fd1498Szrj 	  if (maybe_ne (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i],
2372*38fd1498Szrj 			(initial_p ? reg_eliminate[i].initial_offset
2373*38fd1498Szrj 			 : reg_eliminate[i].offset)))
2374*38fd1498Szrj 	    reg_eliminate[i].can_eliminate = 0;
2375*38fd1498Szrj 
2376*38fd1498Szrj       return;
2377*38fd1498Szrj 
2378*38fd1498Szrj     case JUMP_TABLE_DATA:
2379*38fd1498Szrj       set_label_offsets (PATTERN (insn), insn, initial_p);
2380*38fd1498Szrj       return;
2381*38fd1498Szrj 
2382*38fd1498Szrj     case JUMP_INSN:
2383*38fd1498Szrj       set_label_offsets (PATTERN (insn), insn, initial_p);
2384*38fd1498Szrj 
2385*38fd1498Szrj       /* fall through */
2386*38fd1498Szrj 
2387*38fd1498Szrj     case INSN:
2388*38fd1498Szrj     case CALL_INSN:
2389*38fd1498Szrj       /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2390*38fd1498Szrj 	 to indirectly and hence must have all eliminations at their
2391*38fd1498Szrj 	 initial offsets.  */
2392*38fd1498Szrj       for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2393*38fd1498Szrj 	if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2394*38fd1498Szrj 	  set_label_offsets (XEXP (tem, 0), insn, 1);
2395*38fd1498Szrj       return;
2396*38fd1498Szrj 
2397*38fd1498Szrj     case PARALLEL:
2398*38fd1498Szrj     case ADDR_VEC:
2399*38fd1498Szrj     case ADDR_DIFF_VEC:
2400*38fd1498Szrj       /* Each of the labels in the parallel or address vector must be
2401*38fd1498Szrj 	 at their initial offsets.  We want the first field for PARALLEL
2402*38fd1498Szrj 	 and ADDR_VEC and the second field for ADDR_DIFF_VEC.  */
2403*38fd1498Szrj 
2404*38fd1498Szrj       for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2405*38fd1498Szrj 	set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2406*38fd1498Szrj 			   insn, initial_p);
2407*38fd1498Szrj       return;
2408*38fd1498Szrj 
2409*38fd1498Szrj     case SET:
2410*38fd1498Szrj       /* We only care about setting PC.  If the source is not RETURN,
2411*38fd1498Szrj 	 IF_THEN_ELSE, or a label, disable any eliminations not at
2412*38fd1498Szrj 	 their initial offsets.  Similarly if any arm of the IF_THEN_ELSE
2413*38fd1498Szrj 	 isn't one of those possibilities.  For branches to a label,
2414*38fd1498Szrj 	 call ourselves recursively.
2415*38fd1498Szrj 
2416*38fd1498Szrj 	 Note that this can disable elimination unnecessarily when we have
2417*38fd1498Szrj 	 a non-local goto since it will look like a non-constant jump to
2418*38fd1498Szrj 	 someplace in the current function.  This isn't a significant
2419*38fd1498Szrj 	 problem since such jumps will normally be when all elimination
2420*38fd1498Szrj 	 pairs are back to their initial offsets.  */
2421*38fd1498Szrj 
2422*38fd1498Szrj       if (SET_DEST (x) != pc_rtx)
2423*38fd1498Szrj 	return;
2424*38fd1498Szrj 
2425*38fd1498Szrj       switch (GET_CODE (SET_SRC (x)))
2426*38fd1498Szrj 	{
2427*38fd1498Szrj 	case PC:
2428*38fd1498Szrj 	case RETURN:
2429*38fd1498Szrj 	  return;
2430*38fd1498Szrj 
2431*38fd1498Szrj 	case LABEL_REF:
2432*38fd1498Szrj 	  set_label_offsets (SET_SRC (x), insn, initial_p);
2433*38fd1498Szrj 	  return;
2434*38fd1498Szrj 
2435*38fd1498Szrj 	case IF_THEN_ELSE:
2436*38fd1498Szrj 	  tem = XEXP (SET_SRC (x), 1);
2437*38fd1498Szrj 	  if (GET_CODE (tem) == LABEL_REF)
2438*38fd1498Szrj 	    set_label_offsets (label_ref_label (tem), insn, initial_p);
2439*38fd1498Szrj 	  else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2440*38fd1498Szrj 	    break;
2441*38fd1498Szrj 
2442*38fd1498Szrj 	  tem = XEXP (SET_SRC (x), 2);
2443*38fd1498Szrj 	  if (GET_CODE (tem) == LABEL_REF)
2444*38fd1498Szrj 	    set_label_offsets (label_ref_label (tem), insn, initial_p);
2445*38fd1498Szrj 	  else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2446*38fd1498Szrj 	    break;
2447*38fd1498Szrj 	  return;
2448*38fd1498Szrj 
2449*38fd1498Szrj 	default:
2450*38fd1498Szrj 	  break;
2451*38fd1498Szrj 	}
2452*38fd1498Szrj 
2453*38fd1498Szrj       /* If we reach here, all eliminations must be at their initial
2454*38fd1498Szrj 	 offset because we are doing a jump to a variable address.  */
2455*38fd1498Szrj       for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2456*38fd1498Szrj 	if (maybe_ne (p->offset, p->initial_offset))
2457*38fd1498Szrj 	  p->can_eliminate = 0;
2458*38fd1498Szrj       break;
2459*38fd1498Szrj 
2460*38fd1498Szrj     default:
2461*38fd1498Szrj       break;
2462*38fd1498Szrj     }
2463*38fd1498Szrj }
2464*38fd1498Szrj 
2465*38fd1498Szrj /* This function examines every reg that occurs in X and adjusts the
2466*38fd1498Szrj    costs for its elimination which are gathered by IRA.  INSN is the
2467*38fd1498Szrj    insn in which X occurs.  We do not recurse into MEM expressions.  */
2468*38fd1498Szrj 
2469*38fd1498Szrj static void
note_reg_elim_costly(const_rtx x,rtx insn)2470*38fd1498Szrj note_reg_elim_costly (const_rtx x, rtx insn)
2471*38fd1498Szrj {
2472*38fd1498Szrj   subrtx_iterator::array_type array;
2473*38fd1498Szrj   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
2474*38fd1498Szrj     {
2475*38fd1498Szrj       const_rtx x = *iter;
2476*38fd1498Szrj       if (MEM_P (x))
2477*38fd1498Szrj 	iter.skip_subrtxes ();
2478*38fd1498Szrj       else if (REG_P (x)
2479*38fd1498Szrj 	       && REGNO (x) >= FIRST_PSEUDO_REGISTER
2480*38fd1498Szrj 	       && reg_equiv_init (REGNO (x))
2481*38fd1498Szrj 	       && reg_equiv_invariant (REGNO (x)))
2482*38fd1498Szrj 	{
2483*38fd1498Szrj 	  rtx t = reg_equiv_invariant (REGNO (x));
2484*38fd1498Szrj 	  rtx new_rtx = eliminate_regs_1 (t, Pmode, insn, true, true);
2485*38fd1498Szrj 	  int cost = set_src_cost (new_rtx, Pmode,
2486*38fd1498Szrj 				   optimize_bb_for_speed_p (elim_bb));
2487*38fd1498Szrj 	  int freq = REG_FREQ_FROM_BB (elim_bb);
2488*38fd1498Szrj 
2489*38fd1498Szrj 	  if (cost != 0)
2490*38fd1498Szrj 	    ira_adjust_equiv_reg_cost (REGNO (x), -cost * freq);
2491*38fd1498Szrj 	}
2492*38fd1498Szrj     }
2493*38fd1498Szrj }
2494*38fd1498Szrj 
2495*38fd1498Szrj /* Scan X and replace any eliminable registers (such as fp) with a
2496*38fd1498Szrj    replacement (such as sp), plus an offset.
2497*38fd1498Szrj 
2498*38fd1498Szrj    MEM_MODE is the mode of an enclosing MEM.  We need this to know how
2499*38fd1498Szrj    much to adjust a register for, e.g., PRE_DEC.  Also, if we are inside a
2500*38fd1498Szrj    MEM, we are allowed to replace a sum of a register and the constant zero
2501*38fd1498Szrj    with the register, which we cannot do outside a MEM.  In addition, we need
2502*38fd1498Szrj    to record the fact that a register is referenced outside a MEM.
2503*38fd1498Szrj 
2504*38fd1498Szrj    If INSN is an insn, it is the insn containing X.  If we replace a REG
2505*38fd1498Szrj    in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2506*38fd1498Szrj    CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2507*38fd1498Szrj    the REG is being modified.
2508*38fd1498Szrj 
2509*38fd1498Szrj    Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2510*38fd1498Szrj    That's used when we eliminate in expressions stored in notes.
2511*38fd1498Szrj    This means, do not set ref_outside_mem even if the reference
2512*38fd1498Szrj    is outside of MEMs.
2513*38fd1498Szrj 
2514*38fd1498Szrj    If FOR_COSTS is true, we are being called before reload in order to
2515*38fd1498Szrj    estimate the costs of keeping registers with an equivalence unallocated.
2516*38fd1498Szrj 
2517*38fd1498Szrj    REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2518*38fd1498Szrj    replacements done assuming all offsets are at their initial values.  If
2519*38fd1498Szrj    they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2520*38fd1498Szrj    encounter, return the actual location so that find_reloads will do
2521*38fd1498Szrj    the proper thing.  */
2522*38fd1498Szrj 
2523*38fd1498Szrj static rtx
eliminate_regs_1(rtx x,machine_mode mem_mode,rtx insn,bool may_use_invariant,bool for_costs)2524*38fd1498Szrj eliminate_regs_1 (rtx x, machine_mode mem_mode, rtx insn,
2525*38fd1498Szrj 		  bool may_use_invariant, bool for_costs)
2526*38fd1498Szrj {
2527*38fd1498Szrj   enum rtx_code code = GET_CODE (x);
2528*38fd1498Szrj   struct elim_table *ep;
2529*38fd1498Szrj   int regno;
2530*38fd1498Szrj   rtx new_rtx;
2531*38fd1498Szrj   int i, j;
2532*38fd1498Szrj   const char *fmt;
2533*38fd1498Szrj   int copied = 0;
2534*38fd1498Szrj 
2535*38fd1498Szrj   if (! current_function_decl)
2536*38fd1498Szrj     return x;
2537*38fd1498Szrj 
2538*38fd1498Szrj   switch (code)
2539*38fd1498Szrj     {
2540*38fd1498Szrj     CASE_CONST_ANY:
2541*38fd1498Szrj     case CONST:
2542*38fd1498Szrj     case SYMBOL_REF:
2543*38fd1498Szrj     case CODE_LABEL:
2544*38fd1498Szrj     case PC:
2545*38fd1498Szrj     case CC0:
2546*38fd1498Szrj     case ASM_INPUT:
2547*38fd1498Szrj     case ADDR_VEC:
2548*38fd1498Szrj     case ADDR_DIFF_VEC:
2549*38fd1498Szrj     case RETURN:
2550*38fd1498Szrj       return x;
2551*38fd1498Szrj 
2552*38fd1498Szrj     case REG:
2553*38fd1498Szrj       regno = REGNO (x);
2554*38fd1498Szrj 
2555*38fd1498Szrj       /* First handle the case where we encounter a bare register that
2556*38fd1498Szrj 	 is eliminable.  Replace it with a PLUS.  */
2557*38fd1498Szrj       if (regno < FIRST_PSEUDO_REGISTER)
2558*38fd1498Szrj 	{
2559*38fd1498Szrj 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2560*38fd1498Szrj 	       ep++)
2561*38fd1498Szrj 	    if (ep->from_rtx == x && ep->can_eliminate)
2562*38fd1498Szrj 	      return plus_constant (Pmode, ep->to_rtx, ep->previous_offset);
2563*38fd1498Szrj 
2564*38fd1498Szrj 	}
2565*38fd1498Szrj       else if (reg_renumber && reg_renumber[regno] < 0
2566*38fd1498Szrj 	       && reg_equivs
2567*38fd1498Szrj 	       && reg_equiv_invariant (regno))
2568*38fd1498Szrj 	{
2569*38fd1498Szrj 	  if (may_use_invariant || (insn && DEBUG_INSN_P (insn)))
2570*38fd1498Szrj 	    return eliminate_regs_1 (copy_rtx (reg_equiv_invariant (regno)),
2571*38fd1498Szrj 			             mem_mode, insn, true, for_costs);
2572*38fd1498Szrj 	  /* There exists at least one use of REGNO that cannot be
2573*38fd1498Szrj 	     eliminated.  Prevent the defining insn from being deleted.  */
2574*38fd1498Szrj 	  reg_equiv_init (regno) = NULL;
2575*38fd1498Szrj 	  if (!for_costs)
2576*38fd1498Szrj 	    alter_reg (regno, -1, true);
2577*38fd1498Szrj 	}
2578*38fd1498Szrj       return x;
2579*38fd1498Szrj 
2580*38fd1498Szrj     /* You might think handling MINUS in a manner similar to PLUS is a
2581*38fd1498Szrj        good idea.  It is not.  It has been tried multiple times and every
2582*38fd1498Szrj        time the change has had to have been reverted.
2583*38fd1498Szrj 
2584*38fd1498Szrj        Other parts of reload know a PLUS is special (gen_reload for example)
2585*38fd1498Szrj        and require special code to handle code a reloaded PLUS operand.
2586*38fd1498Szrj 
2587*38fd1498Szrj        Also consider backends where the flags register is clobbered by a
2588*38fd1498Szrj        MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2589*38fd1498Szrj        lea instruction comes to mind).  If we try to reload a MINUS, we
2590*38fd1498Szrj        may kill the flags register that was holding a useful value.
2591*38fd1498Szrj 
2592*38fd1498Szrj        So, please before trying to handle MINUS, consider reload as a
2593*38fd1498Szrj        whole instead of this little section as well as the backend issues.  */
2594*38fd1498Szrj     case PLUS:
2595*38fd1498Szrj       /* If this is the sum of an eliminable register and a constant, rework
2596*38fd1498Szrj 	 the sum.  */
2597*38fd1498Szrj       if (REG_P (XEXP (x, 0))
2598*38fd1498Szrj 	  && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2599*38fd1498Szrj 	  && CONSTANT_P (XEXP (x, 1)))
2600*38fd1498Szrj 	{
2601*38fd1498Szrj 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2602*38fd1498Szrj 	       ep++)
2603*38fd1498Szrj 	    if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2604*38fd1498Szrj 	      {
2605*38fd1498Szrj 		/* The only time we want to replace a PLUS with a REG (this
2606*38fd1498Szrj 		   occurs when the constant operand of the PLUS is the negative
2607*38fd1498Szrj 		   of the offset) is when we are inside a MEM.  We won't want
2608*38fd1498Szrj 		   to do so at other times because that would change the
2609*38fd1498Szrj 		   structure of the insn in a way that reload can't handle.
2610*38fd1498Szrj 		   We special-case the commonest situation in
2611*38fd1498Szrj 		   eliminate_regs_in_insn, so just replace a PLUS with a
2612*38fd1498Szrj 		   PLUS here, unless inside a MEM.  */
2613*38fd1498Szrj 		if (mem_mode != 0
2614*38fd1498Szrj 		    && CONST_INT_P (XEXP (x, 1))
2615*38fd1498Szrj 		    && known_eq (INTVAL (XEXP (x, 1)), -ep->previous_offset))
2616*38fd1498Szrj 		  return ep->to_rtx;
2617*38fd1498Szrj 		else
2618*38fd1498Szrj 		  return gen_rtx_PLUS (Pmode, ep->to_rtx,
2619*38fd1498Szrj 				       plus_constant (Pmode, XEXP (x, 1),
2620*38fd1498Szrj 						      ep->previous_offset));
2621*38fd1498Szrj 	      }
2622*38fd1498Szrj 
2623*38fd1498Szrj 	  /* If the register is not eliminable, we are done since the other
2624*38fd1498Szrj 	     operand is a constant.  */
2625*38fd1498Szrj 	  return x;
2626*38fd1498Szrj 	}
2627*38fd1498Szrj 
2628*38fd1498Szrj       /* If this is part of an address, we want to bring any constant to the
2629*38fd1498Szrj 	 outermost PLUS.  We will do this by doing register replacement in
2630*38fd1498Szrj 	 our operands and seeing if a constant shows up in one of them.
2631*38fd1498Szrj 
2632*38fd1498Szrj 	 Note that there is no risk of modifying the structure of the insn,
2633*38fd1498Szrj 	 since we only get called for its operands, thus we are either
2634*38fd1498Szrj 	 modifying the address inside a MEM, or something like an address
2635*38fd1498Szrj 	 operand of a load-address insn.  */
2636*38fd1498Szrj 
2637*38fd1498Szrj       {
2638*38fd1498Szrj 	rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2639*38fd1498Szrj 				     for_costs);
2640*38fd1498Szrj 	rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2641*38fd1498Szrj 				     for_costs);
2642*38fd1498Szrj 
2643*38fd1498Szrj 	if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2644*38fd1498Szrj 	  {
2645*38fd1498Szrj 	    /* If one side is a PLUS and the other side is a pseudo that
2646*38fd1498Szrj 	       didn't get a hard register but has a reg_equiv_constant,
2647*38fd1498Szrj 	       we must replace the constant here since it may no longer
2648*38fd1498Szrj 	       be in the position of any operand.  */
2649*38fd1498Szrj 	    if (GET_CODE (new0) == PLUS && REG_P (new1)
2650*38fd1498Szrj 		&& REGNO (new1) >= FIRST_PSEUDO_REGISTER
2651*38fd1498Szrj 		&& reg_renumber[REGNO (new1)] < 0
2652*38fd1498Szrj 		&& reg_equivs
2653*38fd1498Szrj 		&& reg_equiv_constant (REGNO (new1)) != 0)
2654*38fd1498Szrj 	      new1 = reg_equiv_constant (REGNO (new1));
2655*38fd1498Szrj 	    else if (GET_CODE (new1) == PLUS && REG_P (new0)
2656*38fd1498Szrj 		     && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2657*38fd1498Szrj 		     && reg_renumber[REGNO (new0)] < 0
2658*38fd1498Szrj 		     && reg_equiv_constant (REGNO (new0)) != 0)
2659*38fd1498Szrj 	      new0 = reg_equiv_constant (REGNO (new0));
2660*38fd1498Szrj 
2661*38fd1498Szrj 	    new_rtx = form_sum (GET_MODE (x), new0, new1);
2662*38fd1498Szrj 
2663*38fd1498Szrj 	    /* As above, if we are not inside a MEM we do not want to
2664*38fd1498Szrj 	       turn a PLUS into something else.  We might try to do so here
2665*38fd1498Szrj 	       for an addition of 0 if we aren't optimizing.  */
2666*38fd1498Szrj 	    if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2667*38fd1498Szrj 	      return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
2668*38fd1498Szrj 	    else
2669*38fd1498Szrj 	      return new_rtx;
2670*38fd1498Szrj 	  }
2671*38fd1498Szrj       }
2672*38fd1498Szrj       return x;
2673*38fd1498Szrj 
2674*38fd1498Szrj     case MULT:
2675*38fd1498Szrj       /* If this is the product of an eliminable register and a
2676*38fd1498Szrj 	 constant, apply the distribute law and move the constant out
2677*38fd1498Szrj 	 so that we have (plus (mult ..) ..).  This is needed in order
2678*38fd1498Szrj 	 to keep load-address insns valid.   This case is pathological.
2679*38fd1498Szrj 	 We ignore the possibility of overflow here.  */
2680*38fd1498Szrj       if (REG_P (XEXP (x, 0))
2681*38fd1498Szrj 	  && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2682*38fd1498Szrj 	  && CONST_INT_P (XEXP (x, 1)))
2683*38fd1498Szrj 	for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2684*38fd1498Szrj 	     ep++)
2685*38fd1498Szrj 	  if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2686*38fd1498Szrj 	    {
2687*38fd1498Szrj 	      if (! mem_mode
2688*38fd1498Szrj 		  /* Refs inside notes or in DEBUG_INSNs don't count for
2689*38fd1498Szrj 		     this purpose.  */
2690*38fd1498Szrj 		  && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2691*38fd1498Szrj 				      || GET_CODE (insn) == INSN_LIST
2692*38fd1498Szrj 				      || DEBUG_INSN_P (insn))))
2693*38fd1498Szrj 		ep->ref_outside_mem = 1;
2694*38fd1498Szrj 
2695*38fd1498Szrj 	      return
2696*38fd1498Szrj 		plus_constant (Pmode,
2697*38fd1498Szrj 			       gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2698*38fd1498Szrj 			       ep->previous_offset * INTVAL (XEXP (x, 1)));
2699*38fd1498Szrj 	    }
2700*38fd1498Szrj 
2701*38fd1498Szrj       /* fall through */
2702*38fd1498Szrj 
2703*38fd1498Szrj     case CALL:
2704*38fd1498Szrj     case COMPARE:
2705*38fd1498Szrj     /* See comments before PLUS about handling MINUS.  */
2706*38fd1498Szrj     case MINUS:
2707*38fd1498Szrj     case DIV:      case UDIV:
2708*38fd1498Szrj     case MOD:      case UMOD:
2709*38fd1498Szrj     case AND:      case IOR:      case XOR:
2710*38fd1498Szrj     case ROTATERT: case ROTATE:
2711*38fd1498Szrj     case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2712*38fd1498Szrj     case NE:       case EQ:
2713*38fd1498Szrj     case GE:       case GT:       case GEU:    case GTU:
2714*38fd1498Szrj     case LE:       case LT:       case LEU:    case LTU:
2715*38fd1498Szrj       {
2716*38fd1498Szrj 	rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2717*38fd1498Szrj 				     for_costs);
2718*38fd1498Szrj 	rtx new1 = XEXP (x, 1)
2719*38fd1498Szrj 	  ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false,
2720*38fd1498Szrj 			      for_costs) : 0;
2721*38fd1498Szrj 
2722*38fd1498Szrj 	if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2723*38fd1498Szrj 	  return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2724*38fd1498Szrj       }
2725*38fd1498Szrj       return x;
2726*38fd1498Szrj 
2727*38fd1498Szrj     case EXPR_LIST:
2728*38fd1498Szrj       /* If we have something in XEXP (x, 0), the usual case, eliminate it.  */
2729*38fd1498Szrj       if (XEXP (x, 0))
2730*38fd1498Szrj 	{
2731*38fd1498Szrj 	  new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2732*38fd1498Szrj 				      for_costs);
2733*38fd1498Szrj 	  if (new_rtx != XEXP (x, 0))
2734*38fd1498Szrj 	    {
2735*38fd1498Szrj 	      /* If this is a REG_DEAD note, it is not valid anymore.
2736*38fd1498Szrj 		 Using the eliminated version could result in creating a
2737*38fd1498Szrj 		 REG_DEAD note for the stack or frame pointer.  */
2738*38fd1498Szrj 	      if (REG_NOTE_KIND (x) == REG_DEAD)
2739*38fd1498Szrj 		return (XEXP (x, 1)
2740*38fd1498Szrj 			? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2741*38fd1498Szrj 					    for_costs)
2742*38fd1498Szrj 			: NULL_RTX);
2743*38fd1498Szrj 
2744*38fd1498Szrj 	      x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2745*38fd1498Szrj 	    }
2746*38fd1498Szrj 	}
2747*38fd1498Szrj 
2748*38fd1498Szrj       /* fall through */
2749*38fd1498Szrj 
2750*38fd1498Szrj     case INSN_LIST:
2751*38fd1498Szrj     case INT_LIST:
2752*38fd1498Szrj       /* Now do eliminations in the rest of the chain.  If this was
2753*38fd1498Szrj 	 an EXPR_LIST, this might result in allocating more memory than is
2754*38fd1498Szrj 	 strictly needed, but it simplifies the code.  */
2755*38fd1498Szrj       if (XEXP (x, 1))
2756*38fd1498Szrj 	{
2757*38fd1498Szrj 	  new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2758*38fd1498Szrj 				      for_costs);
2759*38fd1498Szrj 	  if (new_rtx != XEXP (x, 1))
2760*38fd1498Szrj 	    return
2761*38fd1498Szrj 	      gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
2762*38fd1498Szrj 	}
2763*38fd1498Szrj       return x;
2764*38fd1498Szrj 
2765*38fd1498Szrj     case PRE_INC:
2766*38fd1498Szrj     case POST_INC:
2767*38fd1498Szrj     case PRE_DEC:
2768*38fd1498Szrj     case POST_DEC:
2769*38fd1498Szrj       /* We do not support elimination of a register that is modified.
2770*38fd1498Szrj 	 elimination_effects has already make sure that this does not
2771*38fd1498Szrj 	 happen.  */
2772*38fd1498Szrj       return x;
2773*38fd1498Szrj 
2774*38fd1498Szrj     case PRE_MODIFY:
2775*38fd1498Szrj     case POST_MODIFY:
2776*38fd1498Szrj       /* We do not support elimination of a register that is modified.
2777*38fd1498Szrj 	 elimination_effects has already make sure that this does not
2778*38fd1498Szrj 	 happen.  The only remaining case we need to consider here is
2779*38fd1498Szrj 	 that the increment value may be an eliminable register.  */
2780*38fd1498Szrj       if (GET_CODE (XEXP (x, 1)) == PLUS
2781*38fd1498Szrj 	  && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2782*38fd1498Szrj 	{
2783*38fd1498Szrj 	  rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2784*38fd1498Szrj 					  insn, true, for_costs);
2785*38fd1498Szrj 
2786*38fd1498Szrj 	  if (new_rtx != XEXP (XEXP (x, 1), 1))
2787*38fd1498Szrj 	    return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2788*38fd1498Szrj 				   gen_rtx_PLUS (GET_MODE (x),
2789*38fd1498Szrj 						 XEXP (x, 0), new_rtx));
2790*38fd1498Szrj 	}
2791*38fd1498Szrj       return x;
2792*38fd1498Szrj 
2793*38fd1498Szrj     case STRICT_LOW_PART:
2794*38fd1498Szrj     case NEG:          case NOT:
2795*38fd1498Szrj     case SIGN_EXTEND:  case ZERO_EXTEND:
2796*38fd1498Szrj     case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2797*38fd1498Szrj     case FLOAT:        case FIX:
2798*38fd1498Szrj     case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2799*38fd1498Szrj     case ABS:
2800*38fd1498Szrj     case SQRT:
2801*38fd1498Szrj     case FFS:
2802*38fd1498Szrj     case CLZ:
2803*38fd1498Szrj     case CTZ:
2804*38fd1498Szrj     case POPCOUNT:
2805*38fd1498Szrj     case PARITY:
2806*38fd1498Szrj     case BSWAP:
2807*38fd1498Szrj       new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2808*38fd1498Szrj 				  for_costs);
2809*38fd1498Szrj       if (new_rtx != XEXP (x, 0))
2810*38fd1498Szrj 	return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
2811*38fd1498Szrj       return x;
2812*38fd1498Szrj 
2813*38fd1498Szrj     case SUBREG:
2814*38fd1498Szrj       /* Similar to above processing, but preserve SUBREG_BYTE.
2815*38fd1498Szrj 	 Convert (subreg (mem)) to (mem) if not paradoxical.
2816*38fd1498Szrj 	 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2817*38fd1498Szrj 	 pseudo didn't get a hard reg, we must replace this with the
2818*38fd1498Szrj 	 eliminated version of the memory location because push_reload
2819*38fd1498Szrj 	 may do the replacement in certain circumstances.  */
2820*38fd1498Szrj       if (REG_P (SUBREG_REG (x))
2821*38fd1498Szrj 	  && !paradoxical_subreg_p (x)
2822*38fd1498Szrj 	  && reg_equivs
2823*38fd1498Szrj 	  && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
2824*38fd1498Szrj 	{
2825*38fd1498Szrj 	  new_rtx = SUBREG_REG (x);
2826*38fd1498Szrj 	}
2827*38fd1498Szrj       else
2828*38fd1498Szrj 	new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false, for_costs);
2829*38fd1498Szrj 
2830*38fd1498Szrj       if (new_rtx != SUBREG_REG (x))
2831*38fd1498Szrj 	{
2832*38fd1498Szrj 	  poly_int64 x_size = GET_MODE_SIZE (GET_MODE (x));
2833*38fd1498Szrj 	  poly_int64 new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
2834*38fd1498Szrj 
2835*38fd1498Szrj 	  if (MEM_P (new_rtx)
2836*38fd1498Szrj 	      && ((partial_subreg_p (GET_MODE (x), GET_MODE (new_rtx))
2837*38fd1498Szrj 		   /* On RISC machines, combine can create rtl of the form
2838*38fd1498Szrj 		      (set (subreg:m1 (reg:m2 R) 0) ...)
2839*38fd1498Szrj 		      where m1 < m2, and expects something interesting to
2840*38fd1498Szrj 		      happen to the entire word.  Moreover, it will use the
2841*38fd1498Szrj 		      (reg:m2 R) later, expecting all bits to be preserved.
2842*38fd1498Szrj 		      So if the number of words is the same, preserve the
2843*38fd1498Szrj 		      subreg so that push_reload can see it.  */
2844*38fd1498Szrj 		   && !(WORD_REGISTER_OPERATIONS
2845*38fd1498Szrj 			&& known_equal_after_align_down (x_size - 1,
2846*38fd1498Szrj 							 new_size - 1,
2847*38fd1498Szrj 							 UNITS_PER_WORD)))
2848*38fd1498Szrj 		  || known_eq (x_size, new_size))
2849*38fd1498Szrj 	      )
2850*38fd1498Szrj 	    return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
2851*38fd1498Szrj 	  else if (insn && GET_CODE (insn) == DEBUG_INSN)
2852*38fd1498Szrj 	    return gen_rtx_raw_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2853*38fd1498Szrj 	  else
2854*38fd1498Szrj 	    return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2855*38fd1498Szrj 	}
2856*38fd1498Szrj 
2857*38fd1498Szrj       return x;
2858*38fd1498Szrj 
2859*38fd1498Szrj     case MEM:
2860*38fd1498Szrj       /* Our only special processing is to pass the mode of the MEM to our
2861*38fd1498Szrj 	 recursive call and copy the flags.  While we are here, handle this
2862*38fd1498Szrj 	 case more efficiently.  */
2863*38fd1498Szrj 
2864*38fd1498Szrj       new_rtx = eliminate_regs_1 (XEXP (x, 0), GET_MODE (x), insn, true,
2865*38fd1498Szrj 				  for_costs);
2866*38fd1498Szrj       if (for_costs
2867*38fd1498Szrj 	  && memory_address_p (GET_MODE (x), XEXP (x, 0))
2868*38fd1498Szrj 	  && !memory_address_p (GET_MODE (x), new_rtx))
2869*38fd1498Szrj 	note_reg_elim_costly (XEXP (x, 0), insn);
2870*38fd1498Szrj 
2871*38fd1498Szrj       return replace_equiv_address_nv (x, new_rtx);
2872*38fd1498Szrj 
2873*38fd1498Szrj     case USE:
2874*38fd1498Szrj       /* Handle insn_list USE that a call to a pure function may generate.  */
2875*38fd1498Szrj       new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false,
2876*38fd1498Szrj 				  for_costs);
2877*38fd1498Szrj       if (new_rtx != XEXP (x, 0))
2878*38fd1498Szrj 	return gen_rtx_USE (GET_MODE (x), new_rtx);
2879*38fd1498Szrj       return x;
2880*38fd1498Szrj 
2881*38fd1498Szrj     case CLOBBER:
2882*38fd1498Szrj     case ASM_OPERANDS:
2883*38fd1498Szrj       gcc_assert (insn && DEBUG_INSN_P (insn));
2884*38fd1498Szrj       break;
2885*38fd1498Szrj 
2886*38fd1498Szrj     case SET:
2887*38fd1498Szrj       gcc_unreachable ();
2888*38fd1498Szrj 
2889*38fd1498Szrj     default:
2890*38fd1498Szrj       break;
2891*38fd1498Szrj     }
2892*38fd1498Szrj 
2893*38fd1498Szrj   /* Process each of our operands recursively.  If any have changed, make a
2894*38fd1498Szrj      copy of the rtx.  */
2895*38fd1498Szrj   fmt = GET_RTX_FORMAT (code);
2896*38fd1498Szrj   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2897*38fd1498Szrj     {
2898*38fd1498Szrj       if (*fmt == 'e')
2899*38fd1498Szrj 	{
2900*38fd1498Szrj 	  new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false,
2901*38fd1498Szrj 				      for_costs);
2902*38fd1498Szrj 	  if (new_rtx != XEXP (x, i) && ! copied)
2903*38fd1498Szrj 	    {
2904*38fd1498Szrj 	      x = shallow_copy_rtx (x);
2905*38fd1498Szrj 	      copied = 1;
2906*38fd1498Szrj 	    }
2907*38fd1498Szrj 	  XEXP (x, i) = new_rtx;
2908*38fd1498Szrj 	}
2909*38fd1498Szrj       else if (*fmt == 'E')
2910*38fd1498Szrj 	{
2911*38fd1498Szrj 	  int copied_vec = 0;
2912*38fd1498Szrj 	  for (j = 0; j < XVECLEN (x, i); j++)
2913*38fd1498Szrj 	    {
2914*38fd1498Szrj 	      new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false,
2915*38fd1498Szrj 					  for_costs);
2916*38fd1498Szrj 	      if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
2917*38fd1498Szrj 		{
2918*38fd1498Szrj 		  rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2919*38fd1498Szrj 					     XVEC (x, i)->elem);
2920*38fd1498Szrj 		  if (! copied)
2921*38fd1498Szrj 		    {
2922*38fd1498Szrj 		      x = shallow_copy_rtx (x);
2923*38fd1498Szrj 		      copied = 1;
2924*38fd1498Szrj 		    }
2925*38fd1498Szrj 		  XVEC (x, i) = new_v;
2926*38fd1498Szrj 		  copied_vec = 1;
2927*38fd1498Szrj 		}
2928*38fd1498Szrj 	      XVECEXP (x, i, j) = new_rtx;
2929*38fd1498Szrj 	    }
2930*38fd1498Szrj 	}
2931*38fd1498Szrj     }
2932*38fd1498Szrj 
2933*38fd1498Szrj   return x;
2934*38fd1498Szrj }
2935*38fd1498Szrj 
2936*38fd1498Szrj rtx
eliminate_regs(rtx x,machine_mode mem_mode,rtx insn)2937*38fd1498Szrj eliminate_regs (rtx x, machine_mode mem_mode, rtx insn)
2938*38fd1498Szrj {
2939*38fd1498Szrj   if (reg_eliminate == NULL)
2940*38fd1498Szrj     {
2941*38fd1498Szrj       gcc_assert (targetm.no_register_allocation);
2942*38fd1498Szrj       return x;
2943*38fd1498Szrj     }
2944*38fd1498Szrj   return eliminate_regs_1 (x, mem_mode, insn, false, false);
2945*38fd1498Szrj }
2946*38fd1498Szrj 
2947*38fd1498Szrj /* Scan rtx X for modifications of elimination target registers.  Update
2948*38fd1498Szrj    the table of eliminables to reflect the changed state.  MEM_MODE is
2949*38fd1498Szrj    the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM.  */
2950*38fd1498Szrj 
2951*38fd1498Szrj static void
elimination_effects(rtx x,machine_mode mem_mode)2952*38fd1498Szrj elimination_effects (rtx x, machine_mode mem_mode)
2953*38fd1498Szrj {
2954*38fd1498Szrj   enum rtx_code code = GET_CODE (x);
2955*38fd1498Szrj   struct elim_table *ep;
2956*38fd1498Szrj   int regno;
2957*38fd1498Szrj   int i, j;
2958*38fd1498Szrj   const char *fmt;
2959*38fd1498Szrj 
2960*38fd1498Szrj   switch (code)
2961*38fd1498Szrj     {
2962*38fd1498Szrj     CASE_CONST_ANY:
2963*38fd1498Szrj     case CONST:
2964*38fd1498Szrj     case SYMBOL_REF:
2965*38fd1498Szrj     case CODE_LABEL:
2966*38fd1498Szrj     case PC:
2967*38fd1498Szrj     case CC0:
2968*38fd1498Szrj     case ASM_INPUT:
2969*38fd1498Szrj     case ADDR_VEC:
2970*38fd1498Szrj     case ADDR_DIFF_VEC:
2971*38fd1498Szrj     case RETURN:
2972*38fd1498Szrj       return;
2973*38fd1498Szrj 
2974*38fd1498Szrj     case REG:
2975*38fd1498Szrj       regno = REGNO (x);
2976*38fd1498Szrj 
2977*38fd1498Szrj       /* First handle the case where we encounter a bare register that
2978*38fd1498Szrj 	 is eliminable.  Replace it with a PLUS.  */
2979*38fd1498Szrj       if (regno < FIRST_PSEUDO_REGISTER)
2980*38fd1498Szrj 	{
2981*38fd1498Szrj 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2982*38fd1498Szrj 	       ep++)
2983*38fd1498Szrj 	    if (ep->from_rtx == x && ep->can_eliminate)
2984*38fd1498Szrj 	      {
2985*38fd1498Szrj 		if (! mem_mode)
2986*38fd1498Szrj 		  ep->ref_outside_mem = 1;
2987*38fd1498Szrj 		return;
2988*38fd1498Szrj 	      }
2989*38fd1498Szrj 
2990*38fd1498Szrj 	}
2991*38fd1498Szrj       else if (reg_renumber[regno] < 0
2992*38fd1498Szrj 	       && reg_equivs
2993*38fd1498Szrj 	       && reg_equiv_constant (regno)
2994*38fd1498Szrj 	       && ! function_invariant_p (reg_equiv_constant (regno)))
2995*38fd1498Szrj 	elimination_effects (reg_equiv_constant (regno), mem_mode);
2996*38fd1498Szrj       return;
2997*38fd1498Szrj 
2998*38fd1498Szrj     case PRE_INC:
2999*38fd1498Szrj     case POST_INC:
3000*38fd1498Szrj     case PRE_DEC:
3001*38fd1498Szrj     case POST_DEC:
3002*38fd1498Szrj     case POST_MODIFY:
3003*38fd1498Szrj     case PRE_MODIFY:
3004*38fd1498Szrj       /* If we modify the source of an elimination rule, disable it.  */
3005*38fd1498Szrj       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3006*38fd1498Szrj 	if (ep->from_rtx == XEXP (x, 0))
3007*38fd1498Szrj 	  ep->can_eliminate = 0;
3008*38fd1498Szrj 
3009*38fd1498Szrj       /* If we modify the target of an elimination rule by adding a constant,
3010*38fd1498Szrj 	 update its offset.  If we modify the target in any other way, we'll
3011*38fd1498Szrj 	 have to disable the rule as well.  */
3012*38fd1498Szrj       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3013*38fd1498Szrj 	if (ep->to_rtx == XEXP (x, 0))
3014*38fd1498Szrj 	  {
3015*38fd1498Szrj 	    poly_int64 size = GET_MODE_SIZE (mem_mode);
3016*38fd1498Szrj 
3017*38fd1498Szrj 	    /* If more bytes than MEM_MODE are pushed, account for them.  */
3018*38fd1498Szrj #ifdef PUSH_ROUNDING
3019*38fd1498Szrj 	    if (ep->to_rtx == stack_pointer_rtx)
3020*38fd1498Szrj 	      size = PUSH_ROUNDING (size);
3021*38fd1498Szrj #endif
3022*38fd1498Szrj 	    if (code == PRE_DEC || code == POST_DEC)
3023*38fd1498Szrj 	      ep->offset += size;
3024*38fd1498Szrj 	    else if (code == PRE_INC || code == POST_INC)
3025*38fd1498Szrj 	      ep->offset -= size;
3026*38fd1498Szrj 	    else if (code == PRE_MODIFY || code == POST_MODIFY)
3027*38fd1498Szrj 	      {
3028*38fd1498Szrj 		if (GET_CODE (XEXP (x, 1)) == PLUS
3029*38fd1498Szrj 		    && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3030*38fd1498Szrj 		    && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3031*38fd1498Szrj 		  ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3032*38fd1498Szrj 		else
3033*38fd1498Szrj 		  ep->can_eliminate = 0;
3034*38fd1498Szrj 	      }
3035*38fd1498Szrj 	  }
3036*38fd1498Szrj 
3037*38fd1498Szrj       /* These two aren't unary operators.  */
3038*38fd1498Szrj       if (code == POST_MODIFY || code == PRE_MODIFY)
3039*38fd1498Szrj 	break;
3040*38fd1498Szrj 
3041*38fd1498Szrj       /* Fall through to generic unary operation case.  */
3042*38fd1498Szrj       gcc_fallthrough ();
3043*38fd1498Szrj     case STRICT_LOW_PART:
3044*38fd1498Szrj     case NEG:          case NOT:
3045*38fd1498Szrj     case SIGN_EXTEND:  case ZERO_EXTEND:
3046*38fd1498Szrj     case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3047*38fd1498Szrj     case FLOAT:        case FIX:
3048*38fd1498Szrj     case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3049*38fd1498Szrj     case ABS:
3050*38fd1498Szrj     case SQRT:
3051*38fd1498Szrj     case FFS:
3052*38fd1498Szrj     case CLZ:
3053*38fd1498Szrj     case CTZ:
3054*38fd1498Szrj     case POPCOUNT:
3055*38fd1498Szrj     case PARITY:
3056*38fd1498Szrj     case BSWAP:
3057*38fd1498Szrj       elimination_effects (XEXP (x, 0), mem_mode);
3058*38fd1498Szrj       return;
3059*38fd1498Szrj 
3060*38fd1498Szrj     case SUBREG:
3061*38fd1498Szrj       if (REG_P (SUBREG_REG (x))
3062*38fd1498Szrj 	  && !paradoxical_subreg_p (x)
3063*38fd1498Szrj 	  && reg_equivs
3064*38fd1498Szrj 	  && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
3065*38fd1498Szrj 	return;
3066*38fd1498Szrj 
3067*38fd1498Szrj       elimination_effects (SUBREG_REG (x), mem_mode);
3068*38fd1498Szrj       return;
3069*38fd1498Szrj 
3070*38fd1498Szrj     case USE:
3071*38fd1498Szrj       /* If using a register that is the source of an eliminate we still
3072*38fd1498Szrj 	 think can be performed, note it cannot be performed since we don't
3073*38fd1498Szrj 	 know how this register is used.  */
3074*38fd1498Szrj       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3075*38fd1498Szrj 	if (ep->from_rtx == XEXP (x, 0))
3076*38fd1498Szrj 	  ep->can_eliminate = 0;
3077*38fd1498Szrj 
3078*38fd1498Szrj       elimination_effects (XEXP (x, 0), mem_mode);
3079*38fd1498Szrj       return;
3080*38fd1498Szrj 
3081*38fd1498Szrj     case CLOBBER:
3082*38fd1498Szrj       /* If clobbering a register that is the replacement register for an
3083*38fd1498Szrj 	 elimination we still think can be performed, note that it cannot
3084*38fd1498Szrj 	 be performed.  Otherwise, we need not be concerned about it.  */
3085*38fd1498Szrj       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3086*38fd1498Szrj 	if (ep->to_rtx == XEXP (x, 0))
3087*38fd1498Szrj 	  ep->can_eliminate = 0;
3088*38fd1498Szrj 
3089*38fd1498Szrj       elimination_effects (XEXP (x, 0), mem_mode);
3090*38fd1498Szrj       return;
3091*38fd1498Szrj 
3092*38fd1498Szrj     case SET:
3093*38fd1498Szrj       /* Check for setting a register that we know about.  */
3094*38fd1498Szrj       if (REG_P (SET_DEST (x)))
3095*38fd1498Szrj 	{
3096*38fd1498Szrj 	  /* See if this is setting the replacement register for an
3097*38fd1498Szrj 	     elimination.
3098*38fd1498Szrj 
3099*38fd1498Szrj 	     If DEST is the hard frame pointer, we do nothing because we
3100*38fd1498Szrj 	     assume that all assignments to the frame pointer are for
3101*38fd1498Szrj 	     non-local gotos and are being done at a time when they are valid
3102*38fd1498Szrj 	     and do not disturb anything else.  Some machines want to
3103*38fd1498Szrj 	     eliminate a fake argument pointer (or even a fake frame pointer)
3104*38fd1498Szrj 	     with either the real frame or the stack pointer.  Assignments to
3105*38fd1498Szrj 	     the hard frame pointer must not prevent this elimination.  */
3106*38fd1498Szrj 
3107*38fd1498Szrj 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3108*38fd1498Szrj 	       ep++)
3109*38fd1498Szrj 	    if (ep->to_rtx == SET_DEST (x)
3110*38fd1498Szrj 		&& SET_DEST (x) != hard_frame_pointer_rtx)
3111*38fd1498Szrj 	      {
3112*38fd1498Szrj 		/* If it is being incremented, adjust the offset.  Otherwise,
3113*38fd1498Szrj 		   this elimination can't be done.  */
3114*38fd1498Szrj 		rtx src = SET_SRC (x);
3115*38fd1498Szrj 
3116*38fd1498Szrj 		if (GET_CODE (src) == PLUS
3117*38fd1498Szrj 		    && XEXP (src, 0) == SET_DEST (x)
3118*38fd1498Szrj 		    && CONST_INT_P (XEXP (src, 1)))
3119*38fd1498Szrj 		  ep->offset -= INTVAL (XEXP (src, 1));
3120*38fd1498Szrj 		else
3121*38fd1498Szrj 		  ep->can_eliminate = 0;
3122*38fd1498Szrj 	      }
3123*38fd1498Szrj 	}
3124*38fd1498Szrj 
3125*38fd1498Szrj       elimination_effects (SET_DEST (x), VOIDmode);
3126*38fd1498Szrj       elimination_effects (SET_SRC (x), VOIDmode);
3127*38fd1498Szrj       return;
3128*38fd1498Szrj 
3129*38fd1498Szrj     case MEM:
3130*38fd1498Szrj       /* Our only special processing is to pass the mode of the MEM to our
3131*38fd1498Szrj 	 recursive call.  */
3132*38fd1498Szrj       elimination_effects (XEXP (x, 0), GET_MODE (x));
3133*38fd1498Szrj       return;
3134*38fd1498Szrj 
3135*38fd1498Szrj     default:
3136*38fd1498Szrj       break;
3137*38fd1498Szrj     }
3138*38fd1498Szrj 
3139*38fd1498Szrj   fmt = GET_RTX_FORMAT (code);
3140*38fd1498Szrj   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3141*38fd1498Szrj     {
3142*38fd1498Szrj       if (*fmt == 'e')
3143*38fd1498Szrj 	elimination_effects (XEXP (x, i), mem_mode);
3144*38fd1498Szrj       else if (*fmt == 'E')
3145*38fd1498Szrj 	for (j = 0; j < XVECLEN (x, i); j++)
3146*38fd1498Szrj 	  elimination_effects (XVECEXP (x, i, j), mem_mode);
3147*38fd1498Szrj     }
3148*38fd1498Szrj }
3149*38fd1498Szrj 
3150*38fd1498Szrj /* Descend through rtx X and verify that no references to eliminable registers
3151*38fd1498Szrj    remain.  If any do remain, mark the involved register as not
3152*38fd1498Szrj    eliminable.  */
3153*38fd1498Szrj 
3154*38fd1498Szrj static void
check_eliminable_occurrences(rtx x)3155*38fd1498Szrj check_eliminable_occurrences (rtx x)
3156*38fd1498Szrj {
3157*38fd1498Szrj   const char *fmt;
3158*38fd1498Szrj   int i;
3159*38fd1498Szrj   enum rtx_code code;
3160*38fd1498Szrj 
3161*38fd1498Szrj   if (x == 0)
3162*38fd1498Szrj     return;
3163*38fd1498Szrj 
3164*38fd1498Szrj   code = GET_CODE (x);
3165*38fd1498Szrj 
3166*38fd1498Szrj   if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3167*38fd1498Szrj     {
3168*38fd1498Szrj       struct elim_table *ep;
3169*38fd1498Szrj 
3170*38fd1498Szrj       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3171*38fd1498Szrj 	if (ep->from_rtx == x)
3172*38fd1498Szrj 	  ep->can_eliminate = 0;
3173*38fd1498Szrj       return;
3174*38fd1498Szrj     }
3175*38fd1498Szrj 
3176*38fd1498Szrj   fmt = GET_RTX_FORMAT (code);
3177*38fd1498Szrj   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3178*38fd1498Szrj     {
3179*38fd1498Szrj       if (*fmt == 'e')
3180*38fd1498Szrj 	check_eliminable_occurrences (XEXP (x, i));
3181*38fd1498Szrj       else if (*fmt == 'E')
3182*38fd1498Szrj 	{
3183*38fd1498Szrj 	  int j;
3184*38fd1498Szrj 	  for (j = 0; j < XVECLEN (x, i); j++)
3185*38fd1498Szrj 	    check_eliminable_occurrences (XVECEXP (x, i, j));
3186*38fd1498Szrj 	}
3187*38fd1498Szrj     }
3188*38fd1498Szrj }
3189*38fd1498Szrj 
3190*38fd1498Szrj /* Scan INSN and eliminate all eliminable registers in it.
3191*38fd1498Szrj 
3192*38fd1498Szrj    If REPLACE is nonzero, do the replacement destructively.  Also
3193*38fd1498Szrj    delete the insn as dead it if it is setting an eliminable register.
3194*38fd1498Szrj 
3195*38fd1498Szrj    If REPLACE is zero, do all our allocations in reload_obstack.
3196*38fd1498Szrj 
3197*38fd1498Szrj    If no eliminations were done and this insn doesn't require any elimination
3198*38fd1498Szrj    processing (these are not identical conditions: it might be updating sp,
3199*38fd1498Szrj    but not referencing fp; this needs to be seen during reload_as_needed so
3200*38fd1498Szrj    that the offset between fp and sp can be taken into consideration), zero
3201*38fd1498Szrj    is returned.  Otherwise, 1 is returned.  */
3202*38fd1498Szrj 
3203*38fd1498Szrj static int
eliminate_regs_in_insn(rtx_insn * insn,int replace)3204*38fd1498Szrj eliminate_regs_in_insn (rtx_insn *insn, int replace)
3205*38fd1498Szrj {
3206*38fd1498Szrj   int icode = recog_memoized (insn);
3207*38fd1498Szrj   rtx old_body = PATTERN (insn);
3208*38fd1498Szrj   int insn_is_asm = asm_noperands (old_body) >= 0;
3209*38fd1498Szrj   rtx old_set = single_set (insn);
3210*38fd1498Szrj   rtx new_body;
3211*38fd1498Szrj   int val = 0;
3212*38fd1498Szrj   int i;
3213*38fd1498Szrj   rtx substed_operand[MAX_RECOG_OPERANDS];
3214*38fd1498Szrj   rtx orig_operand[MAX_RECOG_OPERANDS];
3215*38fd1498Szrj   struct elim_table *ep;
3216*38fd1498Szrj   rtx plus_src, plus_cst_src;
3217*38fd1498Szrj 
3218*38fd1498Szrj   if (! insn_is_asm && icode < 0)
3219*38fd1498Szrj     {
3220*38fd1498Szrj       gcc_assert (DEBUG_INSN_P (insn)
3221*38fd1498Szrj 		  || GET_CODE (PATTERN (insn)) == USE
3222*38fd1498Szrj 		  || GET_CODE (PATTERN (insn)) == CLOBBER
3223*38fd1498Szrj 		  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3224*38fd1498Szrj       if (DEBUG_BIND_INSN_P (insn))
3225*38fd1498Szrj 	INSN_VAR_LOCATION_LOC (insn)
3226*38fd1498Szrj 	  = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn);
3227*38fd1498Szrj       return 0;
3228*38fd1498Szrj     }
3229*38fd1498Szrj 
3230*38fd1498Szrj   if (old_set != 0 && REG_P (SET_DEST (old_set))
3231*38fd1498Szrj       && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3232*38fd1498Szrj     {
3233*38fd1498Szrj       /* Check for setting an eliminable register.  */
3234*38fd1498Szrj       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3235*38fd1498Szrj 	if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3236*38fd1498Szrj 	  {
3237*38fd1498Szrj 	    /* If this is setting the frame pointer register to the
3238*38fd1498Szrj 	       hardware frame pointer register and this is an elimination
3239*38fd1498Szrj 	       that will be done (tested above), this insn is really
3240*38fd1498Szrj 	       adjusting the frame pointer downward to compensate for
3241*38fd1498Szrj 	       the adjustment done before a nonlocal goto.  */
3242*38fd1498Szrj 	    if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
3243*38fd1498Szrj 		&& ep->from == FRAME_POINTER_REGNUM
3244*38fd1498Szrj 		&& ep->to == HARD_FRAME_POINTER_REGNUM)
3245*38fd1498Szrj 	      {
3246*38fd1498Szrj 		rtx base = SET_SRC (old_set);
3247*38fd1498Szrj 		rtx_insn *base_insn = insn;
3248*38fd1498Szrj 		HOST_WIDE_INT offset = 0;
3249*38fd1498Szrj 
3250*38fd1498Szrj 		while (base != ep->to_rtx)
3251*38fd1498Szrj 		  {
3252*38fd1498Szrj 		    rtx_insn *prev_insn;
3253*38fd1498Szrj 		    rtx prev_set;
3254*38fd1498Szrj 
3255*38fd1498Szrj 		    if (GET_CODE (base) == PLUS
3256*38fd1498Szrj 		        && CONST_INT_P (XEXP (base, 1)))
3257*38fd1498Szrj 		      {
3258*38fd1498Szrj 		        offset += INTVAL (XEXP (base, 1));
3259*38fd1498Szrj 		        base = XEXP (base, 0);
3260*38fd1498Szrj 		      }
3261*38fd1498Szrj 		    else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3262*38fd1498Szrj 			     && (prev_set = single_set (prev_insn)) != 0
3263*38fd1498Szrj 			     && rtx_equal_p (SET_DEST (prev_set), base))
3264*38fd1498Szrj 		      {
3265*38fd1498Szrj 		        base = SET_SRC (prev_set);
3266*38fd1498Szrj 		        base_insn = prev_insn;
3267*38fd1498Szrj 		      }
3268*38fd1498Szrj 		    else
3269*38fd1498Szrj 		      break;
3270*38fd1498Szrj 		  }
3271*38fd1498Szrj 
3272*38fd1498Szrj 		if (base == ep->to_rtx)
3273*38fd1498Szrj 		  {
3274*38fd1498Szrj 		    rtx src = plus_constant (Pmode, ep->to_rtx,
3275*38fd1498Szrj 					     offset - ep->offset);
3276*38fd1498Szrj 
3277*38fd1498Szrj 		    new_body = old_body;
3278*38fd1498Szrj 		    if (! replace)
3279*38fd1498Szrj 		      {
3280*38fd1498Szrj 			new_body = copy_insn (old_body);
3281*38fd1498Szrj 			if (REG_NOTES (insn))
3282*38fd1498Szrj 			  REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3283*38fd1498Szrj 		      }
3284*38fd1498Szrj 		    PATTERN (insn) = new_body;
3285*38fd1498Szrj 		    old_set = single_set (insn);
3286*38fd1498Szrj 
3287*38fd1498Szrj 		    /* First see if this insn remains valid when we
3288*38fd1498Szrj 		       make the change.  If not, keep the INSN_CODE
3289*38fd1498Szrj 		       the same and let reload fit it up.  */
3290*38fd1498Szrj 		    validate_change (insn, &SET_SRC (old_set), src, 1);
3291*38fd1498Szrj 		    validate_change (insn, &SET_DEST (old_set),
3292*38fd1498Szrj 				     ep->to_rtx, 1);
3293*38fd1498Szrj 		    if (! apply_change_group ())
3294*38fd1498Szrj 		      {
3295*38fd1498Szrj 			SET_SRC (old_set) = src;
3296*38fd1498Szrj 			SET_DEST (old_set) = ep->to_rtx;
3297*38fd1498Szrj 		      }
3298*38fd1498Szrj 
3299*38fd1498Szrj 		    val = 1;
3300*38fd1498Szrj 		    goto done;
3301*38fd1498Szrj 		  }
3302*38fd1498Szrj 	      }
3303*38fd1498Szrj 
3304*38fd1498Szrj 	    /* In this case this insn isn't serving a useful purpose.  We
3305*38fd1498Szrj 	       will delete it in reload_as_needed once we know that this
3306*38fd1498Szrj 	       elimination is, in fact, being done.
3307*38fd1498Szrj 
3308*38fd1498Szrj 	       If REPLACE isn't set, we can't delete this insn, but needn't
3309*38fd1498Szrj 	       process it since it won't be used unless something changes.  */
3310*38fd1498Szrj 	    if (replace)
3311*38fd1498Szrj 	      {
3312*38fd1498Szrj 		delete_dead_insn (insn);
3313*38fd1498Szrj 		return 1;
3314*38fd1498Szrj 	      }
3315*38fd1498Szrj 	    val = 1;
3316*38fd1498Szrj 	    goto done;
3317*38fd1498Szrj 	  }
3318*38fd1498Szrj     }
3319*38fd1498Szrj 
3320*38fd1498Szrj   /* We allow one special case which happens to work on all machines we
3321*38fd1498Szrj      currently support: a single set with the source or a REG_EQUAL
3322*38fd1498Szrj      note being a PLUS of an eliminable register and a constant.  */
3323*38fd1498Szrj   plus_src = plus_cst_src = 0;
3324*38fd1498Szrj   if (old_set && REG_P (SET_DEST (old_set)))
3325*38fd1498Szrj     {
3326*38fd1498Szrj       if (GET_CODE (SET_SRC (old_set)) == PLUS)
3327*38fd1498Szrj 	plus_src = SET_SRC (old_set);
3328*38fd1498Szrj       /* First see if the source is of the form (plus (...) CST).  */
3329*38fd1498Szrj       if (plus_src
3330*38fd1498Szrj 	  && CONST_INT_P (XEXP (plus_src, 1)))
3331*38fd1498Szrj 	plus_cst_src = plus_src;
3332*38fd1498Szrj       else if (REG_P (SET_SRC (old_set))
3333*38fd1498Szrj 	       || plus_src)
3334*38fd1498Szrj 	{
3335*38fd1498Szrj 	  /* Otherwise, see if we have a REG_EQUAL note of the form
3336*38fd1498Szrj 	     (plus (...) CST).  */
3337*38fd1498Szrj 	  rtx links;
3338*38fd1498Szrj 	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3339*38fd1498Szrj 	    {
3340*38fd1498Szrj 	      if ((REG_NOTE_KIND (links) == REG_EQUAL
3341*38fd1498Szrj 		   || REG_NOTE_KIND (links) == REG_EQUIV)
3342*38fd1498Szrj 		  && GET_CODE (XEXP (links, 0)) == PLUS
3343*38fd1498Szrj 		  && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3344*38fd1498Szrj 		{
3345*38fd1498Szrj 		  plus_cst_src = XEXP (links, 0);
3346*38fd1498Szrj 		  break;
3347*38fd1498Szrj 		}
3348*38fd1498Szrj 	    }
3349*38fd1498Szrj 	}
3350*38fd1498Szrj 
3351*38fd1498Szrj       /* Check that the first operand of the PLUS is a hard reg or
3352*38fd1498Szrj 	 the lowpart subreg of one.  */
3353*38fd1498Szrj       if (plus_cst_src)
3354*38fd1498Szrj 	{
3355*38fd1498Szrj 	  rtx reg = XEXP (plus_cst_src, 0);
3356*38fd1498Szrj 	  if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3357*38fd1498Szrj 	    reg = SUBREG_REG (reg);
3358*38fd1498Szrj 
3359*38fd1498Szrj 	  if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3360*38fd1498Szrj 	    plus_cst_src = 0;
3361*38fd1498Szrj 	}
3362*38fd1498Szrj     }
3363*38fd1498Szrj   if (plus_cst_src)
3364*38fd1498Szrj     {
3365*38fd1498Szrj       rtx reg = XEXP (plus_cst_src, 0);
3366*38fd1498Szrj       poly_int64 offset = INTVAL (XEXP (plus_cst_src, 1));
3367*38fd1498Szrj 
3368*38fd1498Szrj       if (GET_CODE (reg) == SUBREG)
3369*38fd1498Szrj 	reg = SUBREG_REG (reg);
3370*38fd1498Szrj 
3371*38fd1498Szrj       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3372*38fd1498Szrj 	if (ep->from_rtx == reg && ep->can_eliminate)
3373*38fd1498Szrj 	  {
3374*38fd1498Szrj 	    rtx to_rtx = ep->to_rtx;
3375*38fd1498Szrj 	    offset += ep->offset;
3376*38fd1498Szrj 	    offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
3377*38fd1498Szrj 
3378*38fd1498Szrj 	    if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3379*38fd1498Szrj 	      to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3380*38fd1498Szrj 				    to_rtx);
3381*38fd1498Szrj 	    /* If we have a nonzero offset, and the source is already
3382*38fd1498Szrj 	       a simple REG, the following transformation would
3383*38fd1498Szrj 	       increase the cost of the insn by replacing a simple REG
3384*38fd1498Szrj 	       with (plus (reg sp) CST).  So try only when we already
3385*38fd1498Szrj 	       had a PLUS before.  */
3386*38fd1498Szrj 	    if (known_eq (offset, 0) || plus_src)
3387*38fd1498Szrj 	      {
3388*38fd1498Szrj 		rtx new_src = plus_constant (GET_MODE (to_rtx),
3389*38fd1498Szrj 					     to_rtx, offset);
3390*38fd1498Szrj 
3391*38fd1498Szrj 		new_body = old_body;
3392*38fd1498Szrj 		if (! replace)
3393*38fd1498Szrj 		  {
3394*38fd1498Szrj 		    new_body = copy_insn (old_body);
3395*38fd1498Szrj 		    if (REG_NOTES (insn))
3396*38fd1498Szrj 		      REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3397*38fd1498Szrj 		  }
3398*38fd1498Szrj 		PATTERN (insn) = new_body;
3399*38fd1498Szrj 		old_set = single_set (insn);
3400*38fd1498Szrj 
3401*38fd1498Szrj 		/* First see if this insn remains valid when we make the
3402*38fd1498Szrj 		   change.  If not, try to replace the whole pattern with
3403*38fd1498Szrj 		   a simple set (this may help if the original insn was a
3404*38fd1498Szrj 		   PARALLEL that was only recognized as single_set due to
3405*38fd1498Szrj 		   REG_UNUSED notes).  If this isn't valid either, keep
3406*38fd1498Szrj 		   the INSN_CODE the same and let reload fix it up.  */
3407*38fd1498Szrj 		if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3408*38fd1498Szrj 		  {
3409*38fd1498Szrj 		    rtx new_pat = gen_rtx_SET (SET_DEST (old_set), new_src);
3410*38fd1498Szrj 
3411*38fd1498Szrj 		    if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3412*38fd1498Szrj 		      SET_SRC (old_set) = new_src;
3413*38fd1498Szrj 		  }
3414*38fd1498Szrj 	      }
3415*38fd1498Szrj 	    else
3416*38fd1498Szrj 	      break;
3417*38fd1498Szrj 
3418*38fd1498Szrj 	    val = 1;
3419*38fd1498Szrj 	    /* This can't have an effect on elimination offsets, so skip right
3420*38fd1498Szrj 	       to the end.  */
3421*38fd1498Szrj 	    goto done;
3422*38fd1498Szrj 	  }
3423*38fd1498Szrj     }
3424*38fd1498Szrj 
3425*38fd1498Szrj   /* Determine the effects of this insn on elimination offsets.  */
3426*38fd1498Szrj   elimination_effects (old_body, VOIDmode);
3427*38fd1498Szrj 
3428*38fd1498Szrj   /* Eliminate all eliminable registers occurring in operands that
3429*38fd1498Szrj      can be handled by reload.  */
3430*38fd1498Szrj   extract_insn (insn);
3431*38fd1498Szrj   for (i = 0; i < recog_data.n_operands; i++)
3432*38fd1498Szrj     {
3433*38fd1498Szrj       orig_operand[i] = recog_data.operand[i];
3434*38fd1498Szrj       substed_operand[i] = recog_data.operand[i];
3435*38fd1498Szrj 
3436*38fd1498Szrj       /* For an asm statement, every operand is eliminable.  */
3437*38fd1498Szrj       if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3438*38fd1498Szrj 	{
3439*38fd1498Szrj 	  bool is_set_src, in_plus;
3440*38fd1498Szrj 
3441*38fd1498Szrj 	  /* Check for setting a register that we know about.  */
3442*38fd1498Szrj 	  if (recog_data.operand_type[i] != OP_IN
3443*38fd1498Szrj 	      && REG_P (orig_operand[i]))
3444*38fd1498Szrj 	    {
3445*38fd1498Szrj 	      /* If we are assigning to a register that can be eliminated, it
3446*38fd1498Szrj 		 must be as part of a PARALLEL, since the code above handles
3447*38fd1498Szrj 		 single SETs.  We must indicate that we can no longer
3448*38fd1498Szrj 		 eliminate this reg.  */
3449*38fd1498Szrj 	      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3450*38fd1498Szrj 		   ep++)
3451*38fd1498Szrj 		if (ep->from_rtx == orig_operand[i])
3452*38fd1498Szrj 		  ep->can_eliminate = 0;
3453*38fd1498Szrj 	    }
3454*38fd1498Szrj 
3455*38fd1498Szrj 	  /* Companion to the above plus substitution, we can allow
3456*38fd1498Szrj 	     invariants as the source of a plain move.  */
3457*38fd1498Szrj 	  is_set_src = false;
3458*38fd1498Szrj 	  if (old_set
3459*38fd1498Szrj 	      && recog_data.operand_loc[i] == &SET_SRC (old_set))
3460*38fd1498Szrj 	    is_set_src = true;
3461*38fd1498Szrj 	  in_plus = false;
3462*38fd1498Szrj 	  if (plus_src
3463*38fd1498Szrj 	      && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3464*38fd1498Szrj 		  || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3465*38fd1498Szrj 	    in_plus = true;
3466*38fd1498Szrj 
3467*38fd1498Szrj 	  substed_operand[i]
3468*38fd1498Szrj 	    = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3469*38fd1498Szrj 			        replace ? insn : NULL_RTX,
3470*38fd1498Szrj 				is_set_src || in_plus, false);
3471*38fd1498Szrj 	  if (substed_operand[i] != orig_operand[i])
3472*38fd1498Szrj 	    val = 1;
3473*38fd1498Szrj 	  /* Terminate the search in check_eliminable_occurrences at
3474*38fd1498Szrj 	     this point.  */
3475*38fd1498Szrj 	  *recog_data.operand_loc[i] = 0;
3476*38fd1498Szrj 
3477*38fd1498Szrj 	  /* If an output operand changed from a REG to a MEM and INSN is an
3478*38fd1498Szrj 	     insn, write a CLOBBER insn.  */
3479*38fd1498Szrj 	  if (recog_data.operand_type[i] != OP_IN
3480*38fd1498Szrj 	      && REG_P (orig_operand[i])
3481*38fd1498Szrj 	      && MEM_P (substed_operand[i])
3482*38fd1498Szrj 	      && replace)
3483*38fd1498Szrj 	    emit_insn_after (gen_clobber (orig_operand[i]), insn);
3484*38fd1498Szrj 	}
3485*38fd1498Szrj     }
3486*38fd1498Szrj 
3487*38fd1498Szrj   for (i = 0; i < recog_data.n_dups; i++)
3488*38fd1498Szrj     *recog_data.dup_loc[i]
3489*38fd1498Szrj       = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3490*38fd1498Szrj 
3491*38fd1498Szrj   /* If any eliminable remain, they aren't eliminable anymore.  */
3492*38fd1498Szrj   check_eliminable_occurrences (old_body);
3493*38fd1498Szrj 
3494*38fd1498Szrj   /* Substitute the operands; the new values are in the substed_operand
3495*38fd1498Szrj      array.  */
3496*38fd1498Szrj   for (i = 0; i < recog_data.n_operands; i++)
3497*38fd1498Szrj     *recog_data.operand_loc[i] = substed_operand[i];
3498*38fd1498Szrj   for (i = 0; i < recog_data.n_dups; i++)
3499*38fd1498Szrj     *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3500*38fd1498Szrj 
3501*38fd1498Szrj   /* If we are replacing a body that was a (set X (plus Y Z)), try to
3502*38fd1498Szrj      re-recognize the insn.  We do this in case we had a simple addition
3503*38fd1498Szrj      but now can do this as a load-address.  This saves an insn in this
3504*38fd1498Szrj      common case.
3505*38fd1498Szrj      If re-recognition fails, the old insn code number will still be used,
3506*38fd1498Szrj      and some register operands may have changed into PLUS expressions.
3507*38fd1498Szrj      These will be handled by find_reloads by loading them into a register
3508*38fd1498Szrj      again.  */
3509*38fd1498Szrj 
3510*38fd1498Szrj   if (val)
3511*38fd1498Szrj     {
3512*38fd1498Szrj       /* If we aren't replacing things permanently and we changed something,
3513*38fd1498Szrj 	 make another copy to ensure that all the RTL is new.  Otherwise
3514*38fd1498Szrj 	 things can go wrong if find_reload swaps commutative operands
3515*38fd1498Szrj 	 and one is inside RTL that has been copied while the other is not.  */
3516*38fd1498Szrj       new_body = old_body;
3517*38fd1498Szrj       if (! replace)
3518*38fd1498Szrj 	{
3519*38fd1498Szrj 	  new_body = copy_insn (old_body);
3520*38fd1498Szrj 	  if (REG_NOTES (insn))
3521*38fd1498Szrj 	    REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3522*38fd1498Szrj 	}
3523*38fd1498Szrj       PATTERN (insn) = new_body;
3524*38fd1498Szrj 
3525*38fd1498Szrj       /* If we had a move insn but now we don't, rerecognize it.  This will
3526*38fd1498Szrj 	 cause spurious re-recognition if the old move had a PARALLEL since
3527*38fd1498Szrj 	 the new one still will, but we can't call single_set without
3528*38fd1498Szrj 	 having put NEW_BODY into the insn and the re-recognition won't
3529*38fd1498Szrj 	 hurt in this rare case.  */
3530*38fd1498Szrj       /* ??? Why this huge if statement - why don't we just rerecognize the
3531*38fd1498Szrj 	 thing always?  */
3532*38fd1498Szrj       if (! insn_is_asm
3533*38fd1498Szrj 	  && old_set != 0
3534*38fd1498Szrj 	  && ((REG_P (SET_SRC (old_set))
3535*38fd1498Szrj 	       && (GET_CODE (new_body) != SET
3536*38fd1498Szrj 		   || !REG_P (SET_SRC (new_body))))
3537*38fd1498Szrj 	      /* If this was a load from or store to memory, compare
3538*38fd1498Szrj 		 the MEM in recog_data.operand to the one in the insn.
3539*38fd1498Szrj 		 If they are not equal, then rerecognize the insn.  */
3540*38fd1498Szrj 	      || (old_set != 0
3541*38fd1498Szrj 		  && ((MEM_P (SET_SRC (old_set))
3542*38fd1498Szrj 		       && SET_SRC (old_set) != recog_data.operand[1])
3543*38fd1498Szrj 		      || (MEM_P (SET_DEST (old_set))
3544*38fd1498Szrj 			  && SET_DEST (old_set) != recog_data.operand[0])))
3545*38fd1498Szrj 	      /* If this was an add insn before, rerecognize.  */
3546*38fd1498Szrj 	      || GET_CODE (SET_SRC (old_set)) == PLUS))
3547*38fd1498Szrj 	{
3548*38fd1498Szrj 	  int new_icode = recog (PATTERN (insn), insn, 0);
3549*38fd1498Szrj 	  if (new_icode >= 0)
3550*38fd1498Szrj 	    INSN_CODE (insn) = new_icode;
3551*38fd1498Szrj 	}
3552*38fd1498Szrj     }
3553*38fd1498Szrj 
3554*38fd1498Szrj   /* Restore the old body.  If there were any changes to it, we made a copy
3555*38fd1498Szrj      of it while the changes were still in place, so we'll correctly return
3556*38fd1498Szrj      a modified insn below.  */
3557*38fd1498Szrj   if (! replace)
3558*38fd1498Szrj     {
3559*38fd1498Szrj       /* Restore the old body.  */
3560*38fd1498Szrj       for (i = 0; i < recog_data.n_operands; i++)
3561*38fd1498Szrj 	/* Restoring a top-level match_parallel would clobber the new_body
3562*38fd1498Szrj 	   we installed in the insn.  */
3563*38fd1498Szrj 	if (recog_data.operand_loc[i] != &PATTERN (insn))
3564*38fd1498Szrj 	  *recog_data.operand_loc[i] = orig_operand[i];
3565*38fd1498Szrj       for (i = 0; i < recog_data.n_dups; i++)
3566*38fd1498Szrj 	*recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3567*38fd1498Szrj     }
3568*38fd1498Szrj 
3569*38fd1498Szrj   /* Update all elimination pairs to reflect the status after the current
3570*38fd1498Szrj      insn.  The changes we make were determined by the earlier call to
3571*38fd1498Szrj      elimination_effects.
3572*38fd1498Szrj 
3573*38fd1498Szrj      We also detect cases where register elimination cannot be done,
3574*38fd1498Szrj      namely, if a register would be both changed and referenced outside a MEM
3575*38fd1498Szrj      in the resulting insn since such an insn is often undefined and, even if
3576*38fd1498Szrj      not, we cannot know what meaning will be given to it.  Note that it is
3577*38fd1498Szrj      valid to have a register used in an address in an insn that changes it
3578*38fd1498Szrj      (presumably with a pre- or post-increment or decrement).
3579*38fd1498Szrj 
3580*38fd1498Szrj      If anything changes, return nonzero.  */
3581*38fd1498Szrj 
3582*38fd1498Szrj   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3583*38fd1498Szrj     {
3584*38fd1498Szrj       if (maybe_ne (ep->previous_offset, ep->offset) && ep->ref_outside_mem)
3585*38fd1498Szrj 	ep->can_eliminate = 0;
3586*38fd1498Szrj 
3587*38fd1498Szrj       ep->ref_outside_mem = 0;
3588*38fd1498Szrj 
3589*38fd1498Szrj       if (maybe_ne (ep->previous_offset, ep->offset))
3590*38fd1498Szrj 	val = 1;
3591*38fd1498Szrj     }
3592*38fd1498Szrj 
3593*38fd1498Szrj  done:
3594*38fd1498Szrj   /* If we changed something, perform elimination in REG_NOTES.  This is
3595*38fd1498Szrj      needed even when REPLACE is zero because a REG_DEAD note might refer
3596*38fd1498Szrj      to a register that we eliminate and could cause a different number
3597*38fd1498Szrj      of spill registers to be needed in the final reload pass than in
3598*38fd1498Szrj      the pre-passes.  */
3599*38fd1498Szrj   if (val && REG_NOTES (insn) != 0)
3600*38fd1498Szrj     REG_NOTES (insn)
3601*38fd1498Szrj       = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true,
3602*38fd1498Szrj 			  false);
3603*38fd1498Szrj 
3604*38fd1498Szrj   return val;
3605*38fd1498Szrj }
3606*38fd1498Szrj 
3607*38fd1498Szrj /* Like eliminate_regs_in_insn, but only estimate costs for the use of the
3608*38fd1498Szrj    register allocator.  INSN is the instruction we need to examine, we perform
3609*38fd1498Szrj    eliminations in its operands and record cases where eliminating a reg with
3610*38fd1498Szrj    an invariant equivalence would add extra cost.  */
3611*38fd1498Szrj 
3612*38fd1498Szrj #pragma GCC diagnostic push
3613*38fd1498Szrj #pragma GCC diagnostic warning "-Wmaybe-uninitialized"
3614*38fd1498Szrj static void
elimination_costs_in_insn(rtx_insn * insn)3615*38fd1498Szrj elimination_costs_in_insn (rtx_insn *insn)
3616*38fd1498Szrj {
3617*38fd1498Szrj   int icode = recog_memoized (insn);
3618*38fd1498Szrj   rtx old_body = PATTERN (insn);
3619*38fd1498Szrj   int insn_is_asm = asm_noperands (old_body) >= 0;
3620*38fd1498Szrj   rtx old_set = single_set (insn);
3621*38fd1498Szrj   int i;
3622*38fd1498Szrj   rtx orig_operand[MAX_RECOG_OPERANDS];
3623*38fd1498Szrj   rtx orig_dup[MAX_RECOG_OPERANDS];
3624*38fd1498Szrj   struct elim_table *ep;
3625*38fd1498Szrj   rtx plus_src, plus_cst_src;
3626*38fd1498Szrj   bool sets_reg_p;
3627*38fd1498Szrj 
3628*38fd1498Szrj   if (! insn_is_asm && icode < 0)
3629*38fd1498Szrj     {
3630*38fd1498Szrj       gcc_assert (DEBUG_INSN_P (insn)
3631*38fd1498Szrj 		  || GET_CODE (PATTERN (insn)) == USE
3632*38fd1498Szrj 		  || GET_CODE (PATTERN (insn)) == CLOBBER
3633*38fd1498Szrj 		  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3634*38fd1498Szrj       return;
3635*38fd1498Szrj     }
3636*38fd1498Szrj 
3637*38fd1498Szrj   if (old_set != 0 && REG_P (SET_DEST (old_set))
3638*38fd1498Szrj       && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3639*38fd1498Szrj     {
3640*38fd1498Szrj       /* Check for setting an eliminable register.  */
3641*38fd1498Szrj       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3642*38fd1498Szrj 	if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3643*38fd1498Szrj 	  return;
3644*38fd1498Szrj     }
3645*38fd1498Szrj 
3646*38fd1498Szrj   /* We allow one special case which happens to work on all machines we
3647*38fd1498Szrj      currently support: a single set with the source or a REG_EQUAL
3648*38fd1498Szrj      note being a PLUS of an eliminable register and a constant.  */
3649*38fd1498Szrj   plus_src = plus_cst_src = 0;
3650*38fd1498Szrj   sets_reg_p = false;
3651*38fd1498Szrj   if (old_set && REG_P (SET_DEST (old_set)))
3652*38fd1498Szrj     {
3653*38fd1498Szrj       sets_reg_p = true;
3654*38fd1498Szrj       if (GET_CODE (SET_SRC (old_set)) == PLUS)
3655*38fd1498Szrj 	plus_src = SET_SRC (old_set);
3656*38fd1498Szrj       /* First see if the source is of the form (plus (...) CST).  */
3657*38fd1498Szrj       if (plus_src
3658*38fd1498Szrj 	  && CONST_INT_P (XEXP (plus_src, 1)))
3659*38fd1498Szrj 	plus_cst_src = plus_src;
3660*38fd1498Szrj       else if (REG_P (SET_SRC (old_set))
3661*38fd1498Szrj 	       || plus_src)
3662*38fd1498Szrj 	{
3663*38fd1498Szrj 	  /* Otherwise, see if we have a REG_EQUAL note of the form
3664*38fd1498Szrj 	     (plus (...) CST).  */
3665*38fd1498Szrj 	  rtx links;
3666*38fd1498Szrj 	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3667*38fd1498Szrj 	    {
3668*38fd1498Szrj 	      if ((REG_NOTE_KIND (links) == REG_EQUAL
3669*38fd1498Szrj 		   || REG_NOTE_KIND (links) == REG_EQUIV)
3670*38fd1498Szrj 		  && GET_CODE (XEXP (links, 0)) == PLUS
3671*38fd1498Szrj 		  && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3672*38fd1498Szrj 		{
3673*38fd1498Szrj 		  plus_cst_src = XEXP (links, 0);
3674*38fd1498Szrj 		  break;
3675*38fd1498Szrj 		}
3676*38fd1498Szrj 	    }
3677*38fd1498Szrj 	}
3678*38fd1498Szrj     }
3679*38fd1498Szrj 
3680*38fd1498Szrj   /* Determine the effects of this insn on elimination offsets.  */
3681*38fd1498Szrj   elimination_effects (old_body, VOIDmode);
3682*38fd1498Szrj 
3683*38fd1498Szrj   /* Eliminate all eliminable registers occurring in operands that
3684*38fd1498Szrj      can be handled by reload.  */
3685*38fd1498Szrj   extract_insn (insn);
3686*38fd1498Szrj   int n_dups = recog_data.n_dups;
3687*38fd1498Szrj   for (i = 0; i < n_dups; i++)
3688*38fd1498Szrj     orig_dup[i] = *recog_data.dup_loc[i];
3689*38fd1498Szrj 
3690*38fd1498Szrj   int n_operands = recog_data.n_operands;
3691*38fd1498Szrj   for (i = 0; i < n_operands; i++)
3692*38fd1498Szrj     {
3693*38fd1498Szrj       orig_operand[i] = recog_data.operand[i];
3694*38fd1498Szrj 
3695*38fd1498Szrj       /* For an asm statement, every operand is eliminable.  */
3696*38fd1498Szrj       if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3697*38fd1498Szrj 	{
3698*38fd1498Szrj 	  bool is_set_src, in_plus;
3699*38fd1498Szrj 
3700*38fd1498Szrj 	  /* Check for setting a register that we know about.  */
3701*38fd1498Szrj 	  if (recog_data.operand_type[i] != OP_IN
3702*38fd1498Szrj 	      && REG_P (orig_operand[i]))
3703*38fd1498Szrj 	    {
3704*38fd1498Szrj 	      /* If we are assigning to a register that can be eliminated, it
3705*38fd1498Szrj 		 must be as part of a PARALLEL, since the code above handles
3706*38fd1498Szrj 		 single SETs.  We must indicate that we can no longer
3707*38fd1498Szrj 		 eliminate this reg.  */
3708*38fd1498Szrj 	      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3709*38fd1498Szrj 		   ep++)
3710*38fd1498Szrj 		if (ep->from_rtx == orig_operand[i])
3711*38fd1498Szrj 		  ep->can_eliminate = 0;
3712*38fd1498Szrj 	    }
3713*38fd1498Szrj 
3714*38fd1498Szrj 	  /* Companion to the above plus substitution, we can allow
3715*38fd1498Szrj 	     invariants as the source of a plain move.  */
3716*38fd1498Szrj 	  is_set_src = false;
3717*38fd1498Szrj 	  if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3718*38fd1498Szrj 	    is_set_src = true;
3719*38fd1498Szrj 	  if (is_set_src && !sets_reg_p)
3720*38fd1498Szrj 	    note_reg_elim_costly (SET_SRC (old_set), insn);
3721*38fd1498Szrj 	  in_plus = false;
3722*38fd1498Szrj 	  if (plus_src && sets_reg_p
3723*38fd1498Szrj 	      && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3724*38fd1498Szrj 		  || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3725*38fd1498Szrj 	    in_plus = true;
3726*38fd1498Szrj 
3727*38fd1498Szrj 	  eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3728*38fd1498Szrj 			    NULL_RTX,
3729*38fd1498Szrj 			    is_set_src || in_plus, true);
3730*38fd1498Szrj 	  /* Terminate the search in check_eliminable_occurrences at
3731*38fd1498Szrj 	     this point.  */
3732*38fd1498Szrj 	  *recog_data.operand_loc[i] = 0;
3733*38fd1498Szrj 	}
3734*38fd1498Szrj     }
3735*38fd1498Szrj 
3736*38fd1498Szrj   for (i = 0; i < n_dups; i++)
3737*38fd1498Szrj     *recog_data.dup_loc[i]
3738*38fd1498Szrj       = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3739*38fd1498Szrj 
3740*38fd1498Szrj   /* If any eliminable remain, they aren't eliminable anymore.  */
3741*38fd1498Szrj   check_eliminable_occurrences (old_body);
3742*38fd1498Szrj 
3743*38fd1498Szrj   /* Restore the old body.  */
3744*38fd1498Szrj   for (i = 0; i < n_operands; i++)
3745*38fd1498Szrj     *recog_data.operand_loc[i] = orig_operand[i];
3746*38fd1498Szrj   for (i = 0; i < n_dups; i++)
3747*38fd1498Szrj     *recog_data.dup_loc[i] = orig_dup[i];
3748*38fd1498Szrj 
3749*38fd1498Szrj   /* Update all elimination pairs to reflect the status after the current
3750*38fd1498Szrj      insn.  The changes we make were determined by the earlier call to
3751*38fd1498Szrj      elimination_effects.  */
3752*38fd1498Szrj 
3753*38fd1498Szrj   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3754*38fd1498Szrj     {
3755*38fd1498Szrj       if (maybe_ne (ep->previous_offset, ep->offset) && ep->ref_outside_mem)
3756*38fd1498Szrj 	ep->can_eliminate = 0;
3757*38fd1498Szrj 
3758*38fd1498Szrj       ep->ref_outside_mem = 0;
3759*38fd1498Szrj     }
3760*38fd1498Szrj 
3761*38fd1498Szrj   return;
3762*38fd1498Szrj }
3763*38fd1498Szrj #pragma GCC diagnostic pop
3764*38fd1498Szrj 
3765*38fd1498Szrj /* Loop through all elimination pairs.
3766*38fd1498Szrj    Recalculate the number not at initial offset.
3767*38fd1498Szrj 
3768*38fd1498Szrj    Compute the maximum offset (minimum offset if the stack does not
3769*38fd1498Szrj    grow downward) for each elimination pair.  */
3770*38fd1498Szrj 
3771*38fd1498Szrj static void
update_eliminable_offsets(void)3772*38fd1498Szrj update_eliminable_offsets (void)
3773*38fd1498Szrj {
3774*38fd1498Szrj   struct elim_table *ep;
3775*38fd1498Szrj 
3776*38fd1498Szrj   num_not_at_initial_offset = 0;
3777*38fd1498Szrj   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3778*38fd1498Szrj     {
3779*38fd1498Szrj       ep->previous_offset = ep->offset;
3780*38fd1498Szrj       if (ep->can_eliminate && maybe_ne (ep->offset, ep->initial_offset))
3781*38fd1498Szrj 	num_not_at_initial_offset++;
3782*38fd1498Szrj     }
3783*38fd1498Szrj }
3784*38fd1498Szrj 
3785*38fd1498Szrj /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3786*38fd1498Szrj    replacement we currently believe is valid, mark it as not eliminable if X
3787*38fd1498Szrj    modifies DEST in any way other than by adding a constant integer to it.
3788*38fd1498Szrj 
3789*38fd1498Szrj    If DEST is the frame pointer, we do nothing because we assume that
3790*38fd1498Szrj    all assignments to the hard frame pointer are nonlocal gotos and are being
3791*38fd1498Szrj    done at a time when they are valid and do not disturb anything else.
3792*38fd1498Szrj    Some machines want to eliminate a fake argument pointer with either the
3793*38fd1498Szrj    frame or stack pointer.  Assignments to the hard frame pointer must not
3794*38fd1498Szrj    prevent this elimination.
3795*38fd1498Szrj 
3796*38fd1498Szrj    Called via note_stores from reload before starting its passes to scan
3797*38fd1498Szrj    the insns of the function.  */
3798*38fd1498Szrj 
3799*38fd1498Szrj static void
mark_not_eliminable(rtx dest,const_rtx x,void * data ATTRIBUTE_UNUSED)3800*38fd1498Szrj mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3801*38fd1498Szrj {
3802*38fd1498Szrj   unsigned int i;
3803*38fd1498Szrj 
3804*38fd1498Szrj   /* A SUBREG of a hard register here is just changing its mode.  We should
3805*38fd1498Szrj      not see a SUBREG of an eliminable hard register, but check just in
3806*38fd1498Szrj      case.  */
3807*38fd1498Szrj   if (GET_CODE (dest) == SUBREG)
3808*38fd1498Szrj     dest = SUBREG_REG (dest);
3809*38fd1498Szrj 
3810*38fd1498Szrj   if (dest == hard_frame_pointer_rtx)
3811*38fd1498Szrj     return;
3812*38fd1498Szrj 
3813*38fd1498Szrj   for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3814*38fd1498Szrj     if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3815*38fd1498Szrj 	&& (GET_CODE (x) != SET
3816*38fd1498Szrj 	    || GET_CODE (SET_SRC (x)) != PLUS
3817*38fd1498Szrj 	    || XEXP (SET_SRC (x), 0) != dest
3818*38fd1498Szrj 	    || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3819*38fd1498Szrj       {
3820*38fd1498Szrj 	reg_eliminate[i].can_eliminate_previous
3821*38fd1498Szrj 	  = reg_eliminate[i].can_eliminate = 0;
3822*38fd1498Szrj 	num_eliminable--;
3823*38fd1498Szrj       }
3824*38fd1498Szrj }
3825*38fd1498Szrj 
3826*38fd1498Szrj /* Verify that the initial elimination offsets did not change since the
3827*38fd1498Szrj    last call to set_initial_elim_offsets.  This is used to catch cases
3828*38fd1498Szrj    where something illegal happened during reload_as_needed that could
3829*38fd1498Szrj    cause incorrect code to be generated if we did not check for it.  */
3830*38fd1498Szrj 
3831*38fd1498Szrj static bool
verify_initial_elim_offsets(void)3832*38fd1498Szrj verify_initial_elim_offsets (void)
3833*38fd1498Szrj {
3834*38fd1498Szrj   poly_int64 t;
3835*38fd1498Szrj   struct elim_table *ep;
3836*38fd1498Szrj 
3837*38fd1498Szrj   if (!num_eliminable)
3838*38fd1498Szrj     return true;
3839*38fd1498Szrj 
3840*38fd1498Szrj   targetm.compute_frame_layout ();
3841*38fd1498Szrj   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3842*38fd1498Szrj     {
3843*38fd1498Szrj       INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3844*38fd1498Szrj       if (maybe_ne (t, ep->initial_offset))
3845*38fd1498Szrj 	return false;
3846*38fd1498Szrj     }
3847*38fd1498Szrj 
3848*38fd1498Szrj   return true;
3849*38fd1498Szrj }
3850*38fd1498Szrj 
3851*38fd1498Szrj /* Reset all offsets on eliminable registers to their initial values.  */
3852*38fd1498Szrj 
3853*38fd1498Szrj static void
set_initial_elim_offsets(void)3854*38fd1498Szrj set_initial_elim_offsets (void)
3855*38fd1498Szrj {
3856*38fd1498Szrj   struct elim_table *ep = reg_eliminate;
3857*38fd1498Szrj 
3858*38fd1498Szrj   targetm.compute_frame_layout ();
3859*38fd1498Szrj   for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3860*38fd1498Szrj     {
3861*38fd1498Szrj       INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3862*38fd1498Szrj       ep->previous_offset = ep->offset = ep->initial_offset;
3863*38fd1498Szrj     }
3864*38fd1498Szrj 
3865*38fd1498Szrj   num_not_at_initial_offset = 0;
3866*38fd1498Szrj }
3867*38fd1498Szrj 
3868*38fd1498Szrj /* Subroutine of set_initial_label_offsets called via for_each_eh_label.  */
3869*38fd1498Szrj 
3870*38fd1498Szrj static void
set_initial_eh_label_offset(rtx label)3871*38fd1498Szrj set_initial_eh_label_offset (rtx label)
3872*38fd1498Szrj {
3873*38fd1498Szrj   set_label_offsets (label, NULL, 1);
3874*38fd1498Szrj }
3875*38fd1498Szrj 
3876*38fd1498Szrj /* Initialize the known label offsets.
3877*38fd1498Szrj    Set a known offset for each forced label to be at the initial offset
3878*38fd1498Szrj    of each elimination.  We do this because we assume that all
3879*38fd1498Szrj    computed jumps occur from a location where each elimination is
3880*38fd1498Szrj    at its initial offset.
3881*38fd1498Szrj    For all other labels, show that we don't know the offsets.  */
3882*38fd1498Szrj 
3883*38fd1498Szrj static void
set_initial_label_offsets(void)3884*38fd1498Szrj set_initial_label_offsets (void)
3885*38fd1498Szrj {
3886*38fd1498Szrj   memset (offsets_known_at, 0, num_labels);
3887*38fd1498Szrj 
3888*38fd1498Szrj   unsigned int i;
3889*38fd1498Szrj   rtx_insn *insn;
3890*38fd1498Szrj   FOR_EACH_VEC_SAFE_ELT (forced_labels, i, insn)
3891*38fd1498Szrj     set_label_offsets (insn, NULL, 1);
3892*38fd1498Szrj 
3893*38fd1498Szrj   for (rtx_insn_list *x = nonlocal_goto_handler_labels; x; x = x->next ())
3894*38fd1498Szrj     if (x->insn ())
3895*38fd1498Szrj       set_label_offsets (x->insn (), NULL, 1);
3896*38fd1498Szrj 
3897*38fd1498Szrj   for_each_eh_label (set_initial_eh_label_offset);
3898*38fd1498Szrj }
3899*38fd1498Szrj 
3900*38fd1498Szrj /* Set all elimination offsets to the known values for the code label given
3901*38fd1498Szrj    by INSN.  */
3902*38fd1498Szrj 
3903*38fd1498Szrj static void
set_offsets_for_label(rtx_insn * insn)3904*38fd1498Szrj set_offsets_for_label (rtx_insn *insn)
3905*38fd1498Szrj {
3906*38fd1498Szrj   unsigned int i;
3907*38fd1498Szrj   int label_nr = CODE_LABEL_NUMBER (insn);
3908*38fd1498Szrj   struct elim_table *ep;
3909*38fd1498Szrj 
3910*38fd1498Szrj   num_not_at_initial_offset = 0;
3911*38fd1498Szrj   for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3912*38fd1498Szrj     {
3913*38fd1498Szrj       ep->offset = ep->previous_offset
3914*38fd1498Szrj 		 = offsets_at[label_nr - first_label_num][i];
3915*38fd1498Szrj       if (ep->can_eliminate && maybe_ne (ep->offset, ep->initial_offset))
3916*38fd1498Szrj 	num_not_at_initial_offset++;
3917*38fd1498Szrj     }
3918*38fd1498Szrj }
3919*38fd1498Szrj 
3920*38fd1498Szrj /* See if anything that happened changes which eliminations are valid.
3921*38fd1498Szrj    For example, on the SPARC, whether or not the frame pointer can
3922*38fd1498Szrj    be eliminated can depend on what registers have been used.  We need
3923*38fd1498Szrj    not check some conditions again (such as flag_omit_frame_pointer)
3924*38fd1498Szrj    since they can't have changed.  */
3925*38fd1498Szrj 
3926*38fd1498Szrj static void
update_eliminables(HARD_REG_SET * pset)3927*38fd1498Szrj update_eliminables (HARD_REG_SET *pset)
3928*38fd1498Szrj {
3929*38fd1498Szrj   int previous_frame_pointer_needed = frame_pointer_needed;
3930*38fd1498Szrj   struct elim_table *ep;
3931*38fd1498Szrj 
3932*38fd1498Szrj   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3933*38fd1498Szrj     if ((ep->from == HARD_FRAME_POINTER_REGNUM
3934*38fd1498Szrj          && targetm.frame_pointer_required ())
3935*38fd1498Szrj 	|| ! targetm.can_eliminate (ep->from, ep->to)
3936*38fd1498Szrj 	)
3937*38fd1498Szrj       ep->can_eliminate = 0;
3938*38fd1498Szrj 
3939*38fd1498Szrj   /* Look for the case where we have discovered that we can't replace
3940*38fd1498Szrj      register A with register B and that means that we will now be
3941*38fd1498Szrj      trying to replace register A with register C.  This means we can
3942*38fd1498Szrj      no longer replace register C with register B and we need to disable
3943*38fd1498Szrj      such an elimination, if it exists.  This occurs often with A == ap,
3944*38fd1498Szrj      B == sp, and C == fp.  */
3945*38fd1498Szrj 
3946*38fd1498Szrj   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3947*38fd1498Szrj     {
3948*38fd1498Szrj       struct elim_table *op;
3949*38fd1498Szrj       int new_to = -1;
3950*38fd1498Szrj 
3951*38fd1498Szrj       if (! ep->can_eliminate && ep->can_eliminate_previous)
3952*38fd1498Szrj 	{
3953*38fd1498Szrj 	  /* Find the current elimination for ep->from, if there is a
3954*38fd1498Szrj 	     new one.  */
3955*38fd1498Szrj 	  for (op = reg_eliminate;
3956*38fd1498Szrj 	       op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3957*38fd1498Szrj 	    if (op->from == ep->from && op->can_eliminate)
3958*38fd1498Szrj 	      {
3959*38fd1498Szrj 		new_to = op->to;
3960*38fd1498Szrj 		break;
3961*38fd1498Szrj 	      }
3962*38fd1498Szrj 
3963*38fd1498Szrj 	  /* See if there is an elimination of NEW_TO -> EP->TO.  If so,
3964*38fd1498Szrj 	     disable it.  */
3965*38fd1498Szrj 	  for (op = reg_eliminate;
3966*38fd1498Szrj 	       op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3967*38fd1498Szrj 	    if (op->from == new_to && op->to == ep->to)
3968*38fd1498Szrj 	      op->can_eliminate = 0;
3969*38fd1498Szrj 	}
3970*38fd1498Szrj     }
3971*38fd1498Szrj 
3972*38fd1498Szrj   /* See if any registers that we thought we could eliminate the previous
3973*38fd1498Szrj      time are no longer eliminable.  If so, something has changed and we
3974*38fd1498Szrj      must spill the register.  Also, recompute the number of eliminable
3975*38fd1498Szrj      registers and see if the frame pointer is needed; it is if there is
3976*38fd1498Szrj      no elimination of the frame pointer that we can perform.  */
3977*38fd1498Szrj 
3978*38fd1498Szrj   frame_pointer_needed = 1;
3979*38fd1498Szrj   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3980*38fd1498Szrj     {
3981*38fd1498Szrj       if (ep->can_eliminate
3982*38fd1498Szrj 	  && ep->from == FRAME_POINTER_REGNUM
3983*38fd1498Szrj 	  && ep->to != HARD_FRAME_POINTER_REGNUM
3984*38fd1498Szrj 	  && (! SUPPORTS_STACK_ALIGNMENT
3985*38fd1498Szrj 	      || ! crtl->stack_realign_needed))
3986*38fd1498Szrj 	frame_pointer_needed = 0;
3987*38fd1498Szrj 
3988*38fd1498Szrj       if (! ep->can_eliminate && ep->can_eliminate_previous)
3989*38fd1498Szrj 	{
3990*38fd1498Szrj 	  ep->can_eliminate_previous = 0;
3991*38fd1498Szrj 	  SET_HARD_REG_BIT (*pset, ep->from);
3992*38fd1498Szrj 	  num_eliminable--;
3993*38fd1498Szrj 	}
3994*38fd1498Szrj     }
3995*38fd1498Szrj 
3996*38fd1498Szrj   /* If we didn't need a frame pointer last time, but we do now, spill
3997*38fd1498Szrj      the hard frame pointer.  */
3998*38fd1498Szrj   if (frame_pointer_needed && ! previous_frame_pointer_needed)
3999*38fd1498Szrj     SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
4000*38fd1498Szrj }
4001*38fd1498Szrj 
4002*38fd1498Szrj /* Call update_eliminables an spill any registers we can't eliminate anymore.
4003*38fd1498Szrj    Return true iff a register was spilled.  */
4004*38fd1498Szrj 
4005*38fd1498Szrj static bool
update_eliminables_and_spill(void)4006*38fd1498Szrj update_eliminables_and_spill (void)
4007*38fd1498Szrj {
4008*38fd1498Szrj   int i;
4009*38fd1498Szrj   bool did_spill = false;
4010*38fd1498Szrj   HARD_REG_SET to_spill;
4011*38fd1498Szrj   CLEAR_HARD_REG_SET (to_spill);
4012*38fd1498Szrj   update_eliminables (&to_spill);
4013*38fd1498Szrj   AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
4014*38fd1498Szrj 
4015*38fd1498Szrj   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4016*38fd1498Szrj     if (TEST_HARD_REG_BIT (to_spill, i))
4017*38fd1498Szrj       {
4018*38fd1498Szrj 	spill_hard_reg (i, 1);
4019*38fd1498Szrj 	did_spill = true;
4020*38fd1498Szrj 
4021*38fd1498Szrj 	/* Regardless of the state of spills, if we previously had
4022*38fd1498Szrj 	   a register that we thought we could eliminate, but now can
4023*38fd1498Szrj 	   not eliminate, we must run another pass.
4024*38fd1498Szrj 
4025*38fd1498Szrj 	   Consider pseudos which have an entry in reg_equiv_* which
4026*38fd1498Szrj 	   reference an eliminable register.  We must make another pass
4027*38fd1498Szrj 	   to update reg_equiv_* so that we do not substitute in the
4028*38fd1498Szrj 	   old value from when we thought the elimination could be
4029*38fd1498Szrj 	   performed.  */
4030*38fd1498Szrj       }
4031*38fd1498Szrj   return did_spill;
4032*38fd1498Szrj }
4033*38fd1498Szrj 
4034*38fd1498Szrj /* Return true if X is used as the target register of an elimination.  */
4035*38fd1498Szrj 
4036*38fd1498Szrj bool
elimination_target_reg_p(rtx x)4037*38fd1498Szrj elimination_target_reg_p (rtx x)
4038*38fd1498Szrj {
4039*38fd1498Szrj   struct elim_table *ep;
4040*38fd1498Szrj 
4041*38fd1498Szrj   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4042*38fd1498Szrj     if (ep->to_rtx == x && ep->can_eliminate)
4043*38fd1498Szrj       return true;
4044*38fd1498Szrj 
4045*38fd1498Szrj   return false;
4046*38fd1498Szrj }
4047*38fd1498Szrj 
4048*38fd1498Szrj /* Initialize the table of registers to eliminate.
4049*38fd1498Szrj    Pre-condition: global flag frame_pointer_needed has been set before
4050*38fd1498Szrj    calling this function.  */
4051*38fd1498Szrj 
4052*38fd1498Szrj static void
init_elim_table(void)4053*38fd1498Szrj init_elim_table (void)
4054*38fd1498Szrj {
4055*38fd1498Szrj   struct elim_table *ep;
4056*38fd1498Szrj   const struct elim_table_1 *ep1;
4057*38fd1498Szrj 
4058*38fd1498Szrj   if (!reg_eliminate)
4059*38fd1498Szrj     reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
4060*38fd1498Szrj 
4061*38fd1498Szrj   num_eliminable = 0;
4062*38fd1498Szrj 
4063*38fd1498Szrj   for (ep = reg_eliminate, ep1 = reg_eliminate_1;
4064*38fd1498Szrj        ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
4065*38fd1498Szrj     {
4066*38fd1498Szrj       ep->from = ep1->from;
4067*38fd1498Szrj       ep->to = ep1->to;
4068*38fd1498Szrj       ep->can_eliminate = ep->can_eliminate_previous
4069*38fd1498Szrj 	= (targetm.can_eliminate (ep->from, ep->to)
4070*38fd1498Szrj 	   && ! (ep->to == STACK_POINTER_REGNUM
4071*38fd1498Szrj 		 && frame_pointer_needed
4072*38fd1498Szrj 		 && (! SUPPORTS_STACK_ALIGNMENT
4073*38fd1498Szrj 		     || ! stack_realign_fp)));
4074*38fd1498Szrj     }
4075*38fd1498Szrj 
4076*38fd1498Szrj   /* Count the number of eliminable registers and build the FROM and TO
4077*38fd1498Szrj      REG rtx's.  Note that code in gen_rtx_REG will cause, e.g.,
4078*38fd1498Szrj      gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
4079*38fd1498Szrj      We depend on this.  */
4080*38fd1498Szrj   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4081*38fd1498Szrj     {
4082*38fd1498Szrj       num_eliminable += ep->can_eliminate;
4083*38fd1498Szrj       ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
4084*38fd1498Szrj       ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
4085*38fd1498Szrj     }
4086*38fd1498Szrj }
4087*38fd1498Szrj 
4088*38fd1498Szrj /* Find all the pseudo registers that didn't get hard regs
4089*38fd1498Szrj    but do have known equivalent constants or memory slots.
4090*38fd1498Szrj    These include parameters (known equivalent to parameter slots)
4091*38fd1498Szrj    and cse'd or loop-moved constant memory addresses.
4092*38fd1498Szrj 
4093*38fd1498Szrj    Record constant equivalents in reg_equiv_constant
4094*38fd1498Szrj    so they will be substituted by find_reloads.
4095*38fd1498Szrj    Record memory equivalents in reg_mem_equiv so they can
4096*38fd1498Szrj    be substituted eventually by altering the REG-rtx's.  */
4097*38fd1498Szrj 
4098*38fd1498Szrj static void
init_eliminable_invariants(rtx_insn * first,bool do_subregs)4099*38fd1498Szrj init_eliminable_invariants (rtx_insn *first, bool do_subregs)
4100*38fd1498Szrj {
4101*38fd1498Szrj   int i;
4102*38fd1498Szrj   rtx_insn *insn;
4103*38fd1498Szrj 
4104*38fd1498Szrj   grow_reg_equivs ();
4105*38fd1498Szrj   if (do_subregs)
4106*38fd1498Szrj     reg_max_ref_mode = XCNEWVEC (machine_mode, max_regno);
4107*38fd1498Szrj   else
4108*38fd1498Szrj     reg_max_ref_mode = NULL;
4109*38fd1498Szrj 
4110*38fd1498Szrj   num_eliminable_invariants = 0;
4111*38fd1498Szrj 
4112*38fd1498Szrj   first_label_num = get_first_label_num ();
4113*38fd1498Szrj   num_labels = max_label_num () - first_label_num;
4114*38fd1498Szrj 
4115*38fd1498Szrj   /* Allocate the tables used to store offset information at labels.  */
4116*38fd1498Szrj   offsets_known_at = XNEWVEC (char, num_labels);
4117*38fd1498Szrj   offsets_at = (poly_int64_pod (*)[NUM_ELIMINABLE_REGS])
4118*38fd1498Szrj     xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (poly_int64));
4119*38fd1498Szrj 
4120*38fd1498Szrj /* Look for REG_EQUIV notes; record what each pseudo is equivalent
4121*38fd1498Szrj    to.  If DO_SUBREGS is true, also find all paradoxical subregs and
4122*38fd1498Szrj    find largest such for each pseudo.  FIRST is the head of the insn
4123*38fd1498Szrj    list.  */
4124*38fd1498Szrj 
4125*38fd1498Szrj   for (insn = first; insn; insn = NEXT_INSN (insn))
4126*38fd1498Szrj     {
4127*38fd1498Szrj       rtx set = single_set (insn);
4128*38fd1498Szrj 
4129*38fd1498Szrj       /* We may introduce USEs that we want to remove at the end, so
4130*38fd1498Szrj 	 we'll mark them with QImode.  Make sure there are no
4131*38fd1498Szrj 	 previously-marked insns left by say regmove.  */
4132*38fd1498Szrj       if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
4133*38fd1498Szrj 	  && GET_MODE (insn) != VOIDmode)
4134*38fd1498Szrj 	PUT_MODE (insn, VOIDmode);
4135*38fd1498Szrj 
4136*38fd1498Szrj       if (do_subregs && NONDEBUG_INSN_P (insn))
4137*38fd1498Szrj 	scan_paradoxical_subregs (PATTERN (insn));
4138*38fd1498Szrj 
4139*38fd1498Szrj       if (set != 0 && REG_P (SET_DEST (set)))
4140*38fd1498Szrj 	{
4141*38fd1498Szrj 	  rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
4142*38fd1498Szrj 	  rtx x;
4143*38fd1498Szrj 
4144*38fd1498Szrj 	  if (! note)
4145*38fd1498Szrj 	    continue;
4146*38fd1498Szrj 
4147*38fd1498Szrj 	  i = REGNO (SET_DEST (set));
4148*38fd1498Szrj 	  x = XEXP (note, 0);
4149*38fd1498Szrj 
4150*38fd1498Szrj 	  if (i <= LAST_VIRTUAL_REGISTER)
4151*38fd1498Szrj 	    continue;
4152*38fd1498Szrj 
4153*38fd1498Szrj 	  /* If flag_pic and we have constant, verify it's legitimate.  */
4154*38fd1498Szrj 	  if (!CONSTANT_P (x)
4155*38fd1498Szrj 	      || !flag_pic || LEGITIMATE_PIC_OPERAND_P (x))
4156*38fd1498Szrj 	    {
4157*38fd1498Szrj 	      /* It can happen that a REG_EQUIV note contains a MEM
4158*38fd1498Szrj 		 that is not a legitimate memory operand.  As later
4159*38fd1498Szrj 		 stages of reload assume that all addresses found
4160*38fd1498Szrj 		 in the reg_equiv_* arrays were originally legitimate,
4161*38fd1498Szrj 		 we ignore such REG_EQUIV notes.  */
4162*38fd1498Szrj 	      if (memory_operand (x, VOIDmode))
4163*38fd1498Szrj 		{
4164*38fd1498Szrj 		  /* Always unshare the equivalence, so we can
4165*38fd1498Szrj 		     substitute into this insn without touching the
4166*38fd1498Szrj 		       equivalence.  */
4167*38fd1498Szrj 		  reg_equiv_memory_loc (i) = copy_rtx (x);
4168*38fd1498Szrj 		}
4169*38fd1498Szrj 	      else if (function_invariant_p (x))
4170*38fd1498Szrj 		{
4171*38fd1498Szrj 		  machine_mode mode;
4172*38fd1498Szrj 
4173*38fd1498Szrj 		  mode = GET_MODE (SET_DEST (set));
4174*38fd1498Szrj 		  if (GET_CODE (x) == PLUS)
4175*38fd1498Szrj 		    {
4176*38fd1498Szrj 		      /* This is PLUS of frame pointer and a constant,
4177*38fd1498Szrj 			 and might be shared.  Unshare it.  */
4178*38fd1498Szrj 		      reg_equiv_invariant (i) = copy_rtx (x);
4179*38fd1498Szrj 		      num_eliminable_invariants++;
4180*38fd1498Szrj 		    }
4181*38fd1498Szrj 		  else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
4182*38fd1498Szrj 		    {
4183*38fd1498Szrj 		      reg_equiv_invariant (i) = x;
4184*38fd1498Szrj 		      num_eliminable_invariants++;
4185*38fd1498Szrj 		    }
4186*38fd1498Szrj 		  else if (targetm.legitimate_constant_p (mode, x))
4187*38fd1498Szrj 		    reg_equiv_constant (i) = x;
4188*38fd1498Szrj 		  else
4189*38fd1498Szrj 		    {
4190*38fd1498Szrj 		      reg_equiv_memory_loc (i) = force_const_mem (mode, x);
4191*38fd1498Szrj 		      if (! reg_equiv_memory_loc (i))
4192*38fd1498Szrj 			reg_equiv_init (i) = NULL;
4193*38fd1498Szrj 		    }
4194*38fd1498Szrj 		}
4195*38fd1498Szrj 	      else
4196*38fd1498Szrj 		{
4197*38fd1498Szrj 		  reg_equiv_init (i) = NULL;
4198*38fd1498Szrj 		  continue;
4199*38fd1498Szrj 		}
4200*38fd1498Szrj 	    }
4201*38fd1498Szrj 	  else
4202*38fd1498Szrj 	    reg_equiv_init (i) = NULL;
4203*38fd1498Szrj 	}
4204*38fd1498Szrj     }
4205*38fd1498Szrj 
4206*38fd1498Szrj   if (dump_file)
4207*38fd1498Szrj     for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4208*38fd1498Szrj       if (reg_equiv_init (i))
4209*38fd1498Szrj 	{
4210*38fd1498Szrj 	  fprintf (dump_file, "init_insns for %u: ", i);
4211*38fd1498Szrj 	  print_inline_rtx (dump_file, reg_equiv_init (i), 20);
4212*38fd1498Szrj 	  fprintf (dump_file, "\n");
4213*38fd1498Szrj 	}
4214*38fd1498Szrj }
4215*38fd1498Szrj 
4216*38fd1498Szrj /* Indicate that we no longer have known memory locations or constants.
4217*38fd1498Szrj    Free all data involved in tracking these.  */
4218*38fd1498Szrj 
4219*38fd1498Szrj static void
free_reg_equiv(void)4220*38fd1498Szrj free_reg_equiv (void)
4221*38fd1498Szrj {
4222*38fd1498Szrj   int i;
4223*38fd1498Szrj 
4224*38fd1498Szrj   free (offsets_known_at);
4225*38fd1498Szrj   free (offsets_at);
4226*38fd1498Szrj   offsets_at = 0;
4227*38fd1498Szrj   offsets_known_at = 0;
4228*38fd1498Szrj 
4229*38fd1498Szrj   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4230*38fd1498Szrj     if (reg_equiv_alt_mem_list (i))
4231*38fd1498Szrj       free_EXPR_LIST_list (&reg_equiv_alt_mem_list (i));
4232*38fd1498Szrj   vec_free (reg_equivs);
4233*38fd1498Szrj }
4234*38fd1498Szrj 
4235*38fd1498Szrj /* Kick all pseudos out of hard register REGNO.
4236*38fd1498Szrj 
4237*38fd1498Szrj    If CANT_ELIMINATE is nonzero, it means that we are doing this spill
4238*38fd1498Szrj    because we found we can't eliminate some register.  In the case, no pseudos
4239*38fd1498Szrj    are allowed to be in the register, even if they are only in a block that
4240*38fd1498Szrj    doesn't require spill registers, unlike the case when we are spilling this
4241*38fd1498Szrj    hard reg to produce another spill register.
4242*38fd1498Szrj 
4243*38fd1498Szrj    Return nonzero if any pseudos needed to be kicked out.  */
4244*38fd1498Szrj 
4245*38fd1498Szrj static void
spill_hard_reg(unsigned int regno,int cant_eliminate)4246*38fd1498Szrj spill_hard_reg (unsigned int regno, int cant_eliminate)
4247*38fd1498Szrj {
4248*38fd1498Szrj   int i;
4249*38fd1498Szrj 
4250*38fd1498Szrj   if (cant_eliminate)
4251*38fd1498Szrj     {
4252*38fd1498Szrj       SET_HARD_REG_BIT (bad_spill_regs_global, regno);
4253*38fd1498Szrj       df_set_regs_ever_live (regno, true);
4254*38fd1498Szrj     }
4255*38fd1498Szrj 
4256*38fd1498Szrj   /* Spill every pseudo reg that was allocated to this reg
4257*38fd1498Szrj      or to something that overlaps this reg.  */
4258*38fd1498Szrj 
4259*38fd1498Szrj   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4260*38fd1498Szrj     if (reg_renumber[i] >= 0
4261*38fd1498Szrj 	&& (unsigned int) reg_renumber[i] <= regno
4262*38fd1498Szrj 	&& end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
4263*38fd1498Szrj       SET_REGNO_REG_SET (&spilled_pseudos, i);
4264*38fd1498Szrj }
4265*38fd1498Szrj 
4266*38fd1498Szrj /* After spill_hard_reg was called and/or find_reload_regs was run for all
4267*38fd1498Szrj    insns that need reloads, this function is used to actually spill pseudo
4268*38fd1498Szrj    registers and try to reallocate them.  It also sets up the spill_regs
4269*38fd1498Szrj    array for use by choose_reload_regs.
4270*38fd1498Szrj 
4271*38fd1498Szrj    GLOBAL nonzero means we should attempt to reallocate any pseudo registers
4272*38fd1498Szrj    that we displace from hard registers.  */
4273*38fd1498Szrj 
4274*38fd1498Szrj static int
finish_spills(int global)4275*38fd1498Szrj finish_spills (int global)
4276*38fd1498Szrj {
4277*38fd1498Szrj   struct insn_chain *chain;
4278*38fd1498Szrj   int something_changed = 0;
4279*38fd1498Szrj   unsigned i;
4280*38fd1498Szrj   reg_set_iterator rsi;
4281*38fd1498Szrj 
4282*38fd1498Szrj   /* Build the spill_regs array for the function.  */
4283*38fd1498Szrj   /* If there are some registers still to eliminate and one of the spill regs
4284*38fd1498Szrj      wasn't ever used before, additional stack space may have to be
4285*38fd1498Szrj      allocated to store this register.  Thus, we may have changed the offset
4286*38fd1498Szrj      between the stack and frame pointers, so mark that something has changed.
4287*38fd1498Szrj 
4288*38fd1498Szrj      One might think that we need only set VAL to 1 if this is a call-used
4289*38fd1498Szrj      register.  However, the set of registers that must be saved by the
4290*38fd1498Szrj      prologue is not identical to the call-used set.  For example, the
4291*38fd1498Szrj      register used by the call insn for the return PC is a call-used register,
4292*38fd1498Szrj      but must be saved by the prologue.  */
4293*38fd1498Szrj 
4294*38fd1498Szrj   n_spills = 0;
4295*38fd1498Szrj   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4296*38fd1498Szrj     if (TEST_HARD_REG_BIT (used_spill_regs, i))
4297*38fd1498Szrj       {
4298*38fd1498Szrj 	spill_reg_order[i] = n_spills;
4299*38fd1498Szrj 	spill_regs[n_spills++] = i;
4300*38fd1498Szrj 	if (num_eliminable && ! df_regs_ever_live_p (i))
4301*38fd1498Szrj 	  something_changed = 1;
4302*38fd1498Szrj 	df_set_regs_ever_live (i, true);
4303*38fd1498Szrj       }
4304*38fd1498Szrj     else
4305*38fd1498Szrj       spill_reg_order[i] = -1;
4306*38fd1498Szrj 
4307*38fd1498Szrj   EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
4308*38fd1498Szrj     if (! ira_conflicts_p || reg_renumber[i] >= 0)
4309*38fd1498Szrj       {
4310*38fd1498Szrj 	/* Record the current hard register the pseudo is allocated to
4311*38fd1498Szrj 	   in pseudo_previous_regs so we avoid reallocating it to the
4312*38fd1498Szrj 	   same hard reg in a later pass.  */
4313*38fd1498Szrj 	gcc_assert (reg_renumber[i] >= 0);
4314*38fd1498Szrj 
4315*38fd1498Szrj 	SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
4316*38fd1498Szrj 	/* Mark it as no longer having a hard register home.  */
4317*38fd1498Szrj 	reg_renumber[i] = -1;
4318*38fd1498Szrj 	if (ira_conflicts_p)
4319*38fd1498Szrj 	  /* Inform IRA about the change.  */
4320*38fd1498Szrj 	  ira_mark_allocation_change (i);
4321*38fd1498Szrj 	/* We will need to scan everything again.  */
4322*38fd1498Szrj 	something_changed = 1;
4323*38fd1498Szrj       }
4324*38fd1498Szrj 
4325*38fd1498Szrj   /* Retry global register allocation if possible.  */
4326*38fd1498Szrj   if (global && ira_conflicts_p)
4327*38fd1498Szrj     {
4328*38fd1498Szrj       unsigned int n;
4329*38fd1498Szrj 
4330*38fd1498Szrj       memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
4331*38fd1498Szrj       /* For every insn that needs reloads, set the registers used as spill
4332*38fd1498Szrj 	 regs in pseudo_forbidden_regs for every pseudo live across the
4333*38fd1498Szrj 	 insn.  */
4334*38fd1498Szrj       for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
4335*38fd1498Szrj 	{
4336*38fd1498Szrj 	  EXECUTE_IF_SET_IN_REG_SET
4337*38fd1498Szrj 	    (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
4338*38fd1498Szrj 	    {
4339*38fd1498Szrj 	      IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4340*38fd1498Szrj 				chain->used_spill_regs);
4341*38fd1498Szrj 	    }
4342*38fd1498Szrj 	  EXECUTE_IF_SET_IN_REG_SET
4343*38fd1498Szrj 	    (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
4344*38fd1498Szrj 	    {
4345*38fd1498Szrj 	      IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4346*38fd1498Szrj 				chain->used_spill_regs);
4347*38fd1498Szrj 	    }
4348*38fd1498Szrj 	}
4349*38fd1498Szrj 
4350*38fd1498Szrj       /* Retry allocating the pseudos spilled in IRA and the
4351*38fd1498Szrj 	 reload.  For each reg, merge the various reg sets that
4352*38fd1498Szrj 	 indicate which hard regs can't be used, and call
4353*38fd1498Szrj 	 ira_reassign_pseudos.  */
4354*38fd1498Szrj       for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4355*38fd1498Szrj 	if (reg_old_renumber[i] != reg_renumber[i])
4356*38fd1498Szrj 	  {
4357*38fd1498Szrj 	    if (reg_renumber[i] < 0)
4358*38fd1498Szrj 	      temp_pseudo_reg_arr[n++] = i;
4359*38fd1498Szrj 	    else
4360*38fd1498Szrj 	      CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4361*38fd1498Szrj 	  }
4362*38fd1498Szrj       if (ira_reassign_pseudos (temp_pseudo_reg_arr, n,
4363*38fd1498Szrj 				bad_spill_regs_global,
4364*38fd1498Szrj 				pseudo_forbidden_regs, pseudo_previous_regs,
4365*38fd1498Szrj 				&spilled_pseudos))
4366*38fd1498Szrj 	something_changed = 1;
4367*38fd1498Szrj     }
4368*38fd1498Szrj   /* Fix up the register information in the insn chain.
4369*38fd1498Szrj      This involves deleting those of the spilled pseudos which did not get
4370*38fd1498Szrj      a new hard register home from the live_{before,after} sets.  */
4371*38fd1498Szrj   for (chain = reload_insn_chain; chain; chain = chain->next)
4372*38fd1498Szrj     {
4373*38fd1498Szrj       HARD_REG_SET used_by_pseudos;
4374*38fd1498Szrj       HARD_REG_SET used_by_pseudos2;
4375*38fd1498Szrj 
4376*38fd1498Szrj       if (! ira_conflicts_p)
4377*38fd1498Szrj 	{
4378*38fd1498Szrj 	  /* Don't do it for IRA because IRA and the reload still can
4379*38fd1498Szrj 	     assign hard registers to the spilled pseudos on next
4380*38fd1498Szrj 	     reload iterations.  */
4381*38fd1498Szrj 	  AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4382*38fd1498Szrj 	  AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4383*38fd1498Szrj 	}
4384*38fd1498Szrj       /* Mark any unallocated hard regs as available for spills.  That
4385*38fd1498Szrj 	 makes inheritance work somewhat better.  */
4386*38fd1498Szrj       if (chain->need_reload)
4387*38fd1498Szrj 	{
4388*38fd1498Szrj 	  REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4389*38fd1498Szrj 	  REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4390*38fd1498Szrj 	  IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
4391*38fd1498Szrj 
4392*38fd1498Szrj 	  compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4393*38fd1498Szrj 	  compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4394*38fd1498Szrj 	  /* Value of chain->used_spill_regs from previous iteration
4395*38fd1498Szrj 	     may be not included in the value calculated here because
4396*38fd1498Szrj 	     of possible removing caller-saves insns (see function
4397*38fd1498Szrj 	     delete_caller_save_insns.  */
4398*38fd1498Szrj 	  COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
4399*38fd1498Szrj 	  AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
4400*38fd1498Szrj 	}
4401*38fd1498Szrj     }
4402*38fd1498Szrj 
4403*38fd1498Szrj   CLEAR_REG_SET (&changed_allocation_pseudos);
4404*38fd1498Szrj   /* Let alter_reg modify the reg rtx's for the modified pseudos.  */
4405*38fd1498Szrj   for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4406*38fd1498Szrj     {
4407*38fd1498Szrj       int regno = reg_renumber[i];
4408*38fd1498Szrj       if (reg_old_renumber[i] == regno)
4409*38fd1498Szrj 	continue;
4410*38fd1498Szrj 
4411*38fd1498Szrj       SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4412*38fd1498Szrj 
4413*38fd1498Szrj       alter_reg (i, reg_old_renumber[i], false);
4414*38fd1498Szrj       reg_old_renumber[i] = regno;
4415*38fd1498Szrj       if (dump_file)
4416*38fd1498Szrj 	{
4417*38fd1498Szrj 	  if (regno == -1)
4418*38fd1498Szrj 	    fprintf (dump_file, " Register %d now on stack.\n\n", i);
4419*38fd1498Szrj 	  else
4420*38fd1498Szrj 	    fprintf (dump_file, " Register %d now in %d.\n\n",
4421*38fd1498Szrj 		     i, reg_renumber[i]);
4422*38fd1498Szrj 	}
4423*38fd1498Szrj     }
4424*38fd1498Szrj 
4425*38fd1498Szrj   return something_changed;
4426*38fd1498Szrj }
4427*38fd1498Szrj 
4428*38fd1498Szrj /* Find all paradoxical subregs within X and update reg_max_ref_mode.  */
4429*38fd1498Szrj 
4430*38fd1498Szrj static void
scan_paradoxical_subregs(rtx x)4431*38fd1498Szrj scan_paradoxical_subregs (rtx x)
4432*38fd1498Szrj {
4433*38fd1498Szrj   int i;
4434*38fd1498Szrj   const char *fmt;
4435*38fd1498Szrj   enum rtx_code code = GET_CODE (x);
4436*38fd1498Szrj 
4437*38fd1498Szrj   switch (code)
4438*38fd1498Szrj     {
4439*38fd1498Szrj     case REG:
4440*38fd1498Szrj     case CONST:
4441*38fd1498Szrj     case SYMBOL_REF:
4442*38fd1498Szrj     case LABEL_REF:
4443*38fd1498Szrj     CASE_CONST_ANY:
4444*38fd1498Szrj     case CC0:
4445*38fd1498Szrj     case PC:
4446*38fd1498Szrj     case USE:
4447*38fd1498Szrj     case CLOBBER:
4448*38fd1498Szrj       return;
4449*38fd1498Szrj 
4450*38fd1498Szrj     case SUBREG:
4451*38fd1498Szrj       if (REG_P (SUBREG_REG (x)))
4452*38fd1498Szrj 	{
4453*38fd1498Szrj 	  unsigned int regno = REGNO (SUBREG_REG (x));
4454*38fd1498Szrj 	  if (partial_subreg_p (reg_max_ref_mode[regno], GET_MODE (x)))
4455*38fd1498Szrj 	    {
4456*38fd1498Szrj 	      reg_max_ref_mode[regno] = GET_MODE (x);
4457*38fd1498Szrj 	      mark_home_live_1 (regno, GET_MODE (x));
4458*38fd1498Szrj 	    }
4459*38fd1498Szrj 	}
4460*38fd1498Szrj       return;
4461*38fd1498Szrj 
4462*38fd1498Szrj     default:
4463*38fd1498Szrj       break;
4464*38fd1498Szrj     }
4465*38fd1498Szrj 
4466*38fd1498Szrj   fmt = GET_RTX_FORMAT (code);
4467*38fd1498Szrj   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4468*38fd1498Szrj     {
4469*38fd1498Szrj       if (fmt[i] == 'e')
4470*38fd1498Szrj 	scan_paradoxical_subregs (XEXP (x, i));
4471*38fd1498Szrj       else if (fmt[i] == 'E')
4472*38fd1498Szrj 	{
4473*38fd1498Szrj 	  int j;
4474*38fd1498Szrj 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4475*38fd1498Szrj 	    scan_paradoxical_subregs (XVECEXP (x, i, j));
4476*38fd1498Szrj 	}
4477*38fd1498Szrj     }
4478*38fd1498Szrj }
4479*38fd1498Szrj 
4480*38fd1498Szrj /* *OP_PTR and *OTHER_PTR are two operands to a conceptual reload.
4481*38fd1498Szrj    If *OP_PTR is a paradoxical subreg, try to remove that subreg
4482*38fd1498Szrj    and apply the corresponding narrowing subreg to *OTHER_PTR.
4483*38fd1498Szrj    Return true if the operands were changed, false otherwise.  */
4484*38fd1498Szrj 
4485*38fd1498Szrj static bool
strip_paradoxical_subreg(rtx * op_ptr,rtx * other_ptr)4486*38fd1498Szrj strip_paradoxical_subreg (rtx *op_ptr, rtx *other_ptr)
4487*38fd1498Szrj {
4488*38fd1498Szrj   rtx op, inner, other, tem;
4489*38fd1498Szrj 
4490*38fd1498Szrj   op = *op_ptr;
4491*38fd1498Szrj   if (!paradoxical_subreg_p (op))
4492*38fd1498Szrj     return false;
4493*38fd1498Szrj   inner = SUBREG_REG (op);
4494*38fd1498Szrj 
4495*38fd1498Szrj   other = *other_ptr;
4496*38fd1498Szrj   tem = gen_lowpart_common (GET_MODE (inner), other);
4497*38fd1498Szrj   if (!tem)
4498*38fd1498Szrj     return false;
4499*38fd1498Szrj 
4500*38fd1498Szrj   /* If the lowpart operation turned a hard register into a subreg,
4501*38fd1498Szrj      rather than simplifying it to another hard register, then the
4502*38fd1498Szrj      mode change cannot be properly represented.  For example, OTHER
4503*38fd1498Szrj      might be valid in its current mode, but not in the new one.  */
4504*38fd1498Szrj   if (GET_CODE (tem) == SUBREG
4505*38fd1498Szrj       && REG_P (other)
4506*38fd1498Szrj       && HARD_REGISTER_P (other))
4507*38fd1498Szrj     return false;
4508*38fd1498Szrj 
4509*38fd1498Szrj   *op_ptr = inner;
4510*38fd1498Szrj   *other_ptr = tem;
4511*38fd1498Szrj   return true;
4512*38fd1498Szrj }
4513*38fd1498Szrj 
4514*38fd1498Szrj /* A subroutine of reload_as_needed.  If INSN has a REG_EH_REGION note,
4515*38fd1498Szrj    examine all of the reload insns between PREV and NEXT exclusive, and
4516*38fd1498Szrj    annotate all that may trap.  */
4517*38fd1498Szrj 
4518*38fd1498Szrj static void
fixup_eh_region_note(rtx_insn * insn,rtx_insn * prev,rtx_insn * next)4519*38fd1498Szrj fixup_eh_region_note (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4520*38fd1498Szrj {
4521*38fd1498Szrj   rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4522*38fd1498Szrj   if (note == NULL)
4523*38fd1498Szrj     return;
4524*38fd1498Szrj   if (!insn_could_throw_p (insn))
4525*38fd1498Szrj     remove_note (insn, note);
4526*38fd1498Szrj   copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next);
4527*38fd1498Szrj }
4528*38fd1498Szrj 
4529*38fd1498Szrj /* Reload pseudo-registers into hard regs around each insn as needed.
4530*38fd1498Szrj    Additional register load insns are output before the insn that needs it
4531*38fd1498Szrj    and perhaps store insns after insns that modify the reloaded pseudo reg.
4532*38fd1498Szrj 
4533*38fd1498Szrj    reg_last_reload_reg and reg_reloaded_contents keep track of
4534*38fd1498Szrj    which registers are already available in reload registers.
4535*38fd1498Szrj    We update these for the reloads that we perform,
4536*38fd1498Szrj    as the insns are scanned.  */
4537*38fd1498Szrj 
4538*38fd1498Szrj static void
reload_as_needed(int live_known)4539*38fd1498Szrj reload_as_needed (int live_known)
4540*38fd1498Szrj {
4541*38fd1498Szrj   struct insn_chain *chain;
4542*38fd1498Szrj #if AUTO_INC_DEC
4543*38fd1498Szrj   int i;
4544*38fd1498Szrj #endif
4545*38fd1498Szrj   rtx_note *marker;
4546*38fd1498Szrj 
4547*38fd1498Szrj   memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4548*38fd1498Szrj   memset (spill_reg_store, 0, sizeof spill_reg_store);
4549*38fd1498Szrj   reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4550*38fd1498Szrj   INIT_REG_SET (&reg_has_output_reload);
4551*38fd1498Szrj   CLEAR_HARD_REG_SET (reg_reloaded_valid);
4552*38fd1498Szrj   CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4553*38fd1498Szrj 
4554*38fd1498Szrj   set_initial_elim_offsets ();
4555*38fd1498Szrj 
4556*38fd1498Szrj   /* Generate a marker insn that we will move around.  */
4557*38fd1498Szrj   marker = emit_note (NOTE_INSN_DELETED);
4558*38fd1498Szrj   unlink_insn_chain (marker, marker);
4559*38fd1498Szrj 
4560*38fd1498Szrj   for (chain = reload_insn_chain; chain; chain = chain->next)
4561*38fd1498Szrj     {
4562*38fd1498Szrj       rtx_insn *prev = 0;
4563*38fd1498Szrj       rtx_insn *insn = chain->insn;
4564*38fd1498Szrj       rtx_insn *old_next = NEXT_INSN (insn);
4565*38fd1498Szrj #if AUTO_INC_DEC
4566*38fd1498Szrj       rtx_insn *old_prev = PREV_INSN (insn);
4567*38fd1498Szrj #endif
4568*38fd1498Szrj 
4569*38fd1498Szrj       if (will_delete_init_insn_p (insn))
4570*38fd1498Szrj 	continue;
4571*38fd1498Szrj 
4572*38fd1498Szrj       /* If we pass a label, copy the offsets from the label information
4573*38fd1498Szrj 	 into the current offsets of each elimination.  */
4574*38fd1498Szrj       if (LABEL_P (insn))
4575*38fd1498Szrj 	set_offsets_for_label (insn);
4576*38fd1498Szrj 
4577*38fd1498Szrj       else if (INSN_P (insn))
4578*38fd1498Szrj 	{
4579*38fd1498Szrj 	  regset_head regs_to_forget;
4580*38fd1498Szrj 	  INIT_REG_SET (&regs_to_forget);
4581*38fd1498Szrj 	  note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
4582*38fd1498Szrj 
4583*38fd1498Szrj 	  /* If this is a USE and CLOBBER of a MEM, ensure that any
4584*38fd1498Szrj 	     references to eliminable registers have been removed.  */
4585*38fd1498Szrj 
4586*38fd1498Szrj 	  if ((GET_CODE (PATTERN (insn)) == USE
4587*38fd1498Szrj 	       || GET_CODE (PATTERN (insn)) == CLOBBER)
4588*38fd1498Szrj 	      && MEM_P (XEXP (PATTERN (insn), 0)))
4589*38fd1498Szrj 	    XEXP (XEXP (PATTERN (insn), 0), 0)
4590*38fd1498Szrj 	      = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4591*38fd1498Szrj 				GET_MODE (XEXP (PATTERN (insn), 0)),
4592*38fd1498Szrj 				NULL_RTX);
4593*38fd1498Szrj 
4594*38fd1498Szrj 	  /* If we need to do register elimination processing, do so.
4595*38fd1498Szrj 	     This might delete the insn, in which case we are done.  */
4596*38fd1498Szrj 	  if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4597*38fd1498Szrj 	    {
4598*38fd1498Szrj 	      eliminate_regs_in_insn (insn, 1);
4599*38fd1498Szrj 	      if (NOTE_P (insn))
4600*38fd1498Szrj 		{
4601*38fd1498Szrj 		  update_eliminable_offsets ();
4602*38fd1498Szrj 		  CLEAR_REG_SET (&regs_to_forget);
4603*38fd1498Szrj 		  continue;
4604*38fd1498Szrj 		}
4605*38fd1498Szrj 	    }
4606*38fd1498Szrj 
4607*38fd1498Szrj 	  /* If need_elim is nonzero but need_reload is zero, one might think
4608*38fd1498Szrj 	     that we could simply set n_reloads to 0.  However, find_reloads
4609*38fd1498Szrj 	     could have done some manipulation of the insn (such as swapping
4610*38fd1498Szrj 	     commutative operands), and these manipulations are lost during
4611*38fd1498Szrj 	     the first pass for every insn that needs register elimination.
4612*38fd1498Szrj 	     So the actions of find_reloads must be redone here.  */
4613*38fd1498Szrj 
4614*38fd1498Szrj 	  if (! chain->need_elim && ! chain->need_reload
4615*38fd1498Szrj 	      && ! chain->need_operand_change)
4616*38fd1498Szrj 	    n_reloads = 0;
4617*38fd1498Szrj 	  /* First find the pseudo regs that must be reloaded for this insn.
4618*38fd1498Szrj 	     This info is returned in the tables reload_... (see reload.h).
4619*38fd1498Szrj 	     Also modify the body of INSN by substituting RELOAD
4620*38fd1498Szrj 	     rtx's for those pseudo regs.  */
4621*38fd1498Szrj 	  else
4622*38fd1498Szrj 	    {
4623*38fd1498Szrj 	      CLEAR_REG_SET (&reg_has_output_reload);
4624*38fd1498Szrj 	      CLEAR_HARD_REG_SET (reg_is_output_reload);
4625*38fd1498Szrj 
4626*38fd1498Szrj 	      find_reloads (insn, 1, spill_indirect_levels, live_known,
4627*38fd1498Szrj 			    spill_reg_order);
4628*38fd1498Szrj 	    }
4629*38fd1498Szrj 
4630*38fd1498Szrj 	  if (n_reloads > 0)
4631*38fd1498Szrj 	    {
4632*38fd1498Szrj 	      rtx_insn *next = NEXT_INSN (insn);
4633*38fd1498Szrj 
4634*38fd1498Szrj 	      /* ??? PREV can get deleted by reload inheritance.
4635*38fd1498Szrj 		 Work around this by emitting a marker note.  */
4636*38fd1498Szrj 	      prev = PREV_INSN (insn);
4637*38fd1498Szrj 	      reorder_insns_nobb (marker, marker, prev);
4638*38fd1498Szrj 
4639*38fd1498Szrj 	      /* Now compute which reload regs to reload them into.  Perhaps
4640*38fd1498Szrj 		 reusing reload regs from previous insns, or else output
4641*38fd1498Szrj 		 load insns to reload them.  Maybe output store insns too.
4642*38fd1498Szrj 		 Record the choices of reload reg in reload_reg_rtx.  */
4643*38fd1498Szrj 	      choose_reload_regs (chain);
4644*38fd1498Szrj 
4645*38fd1498Szrj 	      /* Generate the insns to reload operands into or out of
4646*38fd1498Szrj 		 their reload regs.  */
4647*38fd1498Szrj 	      emit_reload_insns (chain);
4648*38fd1498Szrj 
4649*38fd1498Szrj 	      /* Substitute the chosen reload regs from reload_reg_rtx
4650*38fd1498Szrj 		 into the insn's body (or perhaps into the bodies of other
4651*38fd1498Szrj 		 load and store insn that we just made for reloading
4652*38fd1498Szrj 		 and that we moved the structure into).  */
4653*38fd1498Szrj 	      subst_reloads (insn);
4654*38fd1498Szrj 
4655*38fd1498Szrj 	      prev = PREV_INSN (marker);
4656*38fd1498Szrj 	      unlink_insn_chain (marker, marker);
4657*38fd1498Szrj 
4658*38fd1498Szrj 	      /* Adjust the exception region notes for loads and stores.  */
4659*38fd1498Szrj 	      if (cfun->can_throw_non_call_exceptions && !CALL_P (insn))
4660*38fd1498Szrj 		fixup_eh_region_note (insn, prev, next);
4661*38fd1498Szrj 
4662*38fd1498Szrj 	      /* Adjust the location of REG_ARGS_SIZE.  */
4663*38fd1498Szrj 	      rtx p = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4664*38fd1498Szrj 	      if (p)
4665*38fd1498Szrj 		{
4666*38fd1498Szrj 		  remove_note (insn, p);
4667*38fd1498Szrj 		  fixup_args_size_notes (prev, PREV_INSN (next),
4668*38fd1498Szrj 					 get_args_size (p));
4669*38fd1498Szrj 		}
4670*38fd1498Szrj 
4671*38fd1498Szrj 	      /* If this was an ASM, make sure that all the reload insns
4672*38fd1498Szrj 		 we have generated are valid.  If not, give an error
4673*38fd1498Szrj 		 and delete them.  */
4674*38fd1498Szrj 	      if (asm_noperands (PATTERN (insn)) >= 0)
4675*38fd1498Szrj 		for (rtx_insn *p = NEXT_INSN (prev);
4676*38fd1498Szrj 		     p != next;
4677*38fd1498Szrj 		     p = NEXT_INSN (p))
4678*38fd1498Szrj 		  if (p != insn && INSN_P (p)
4679*38fd1498Szrj 		      && GET_CODE (PATTERN (p)) != USE
4680*38fd1498Szrj 		      && (recog_memoized (p) < 0
4681*38fd1498Szrj 			  || (extract_insn (p),
4682*38fd1498Szrj 			      !(constrain_operands (1,
4683*38fd1498Szrj 				  get_enabled_alternatives (p))))))
4684*38fd1498Szrj 		    {
4685*38fd1498Szrj 		      error_for_asm (insn,
4686*38fd1498Szrj 				     "%<asm%> operand requires "
4687*38fd1498Szrj 				     "impossible reload");
4688*38fd1498Szrj 		      delete_insn (p);
4689*38fd1498Szrj 		    }
4690*38fd1498Szrj 	    }
4691*38fd1498Szrj 
4692*38fd1498Szrj 	  if (num_eliminable && chain->need_elim)
4693*38fd1498Szrj 	    update_eliminable_offsets ();
4694*38fd1498Szrj 
4695*38fd1498Szrj 	  /* Any previously reloaded spilled pseudo reg, stored in this insn,
4696*38fd1498Szrj 	     is no longer validly lying around to save a future reload.
4697*38fd1498Szrj 	     Note that this does not detect pseudos that were reloaded
4698*38fd1498Szrj 	     for this insn in order to be stored in
4699*38fd1498Szrj 	     (obeying register constraints).  That is correct; such reload
4700*38fd1498Szrj 	     registers ARE still valid.  */
4701*38fd1498Szrj 	  forget_marked_reloads (&regs_to_forget);
4702*38fd1498Szrj 	  CLEAR_REG_SET (&regs_to_forget);
4703*38fd1498Szrj 
4704*38fd1498Szrj 	  /* There may have been CLOBBER insns placed after INSN.  So scan
4705*38fd1498Szrj 	     between INSN and NEXT and use them to forget old reloads.  */
4706*38fd1498Szrj 	  for (rtx_insn *x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4707*38fd1498Szrj 	    if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4708*38fd1498Szrj 	      note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4709*38fd1498Szrj 
4710*38fd1498Szrj #if AUTO_INC_DEC
4711*38fd1498Szrj 	  /* Likewise for regs altered by auto-increment in this insn.
4712*38fd1498Szrj 	     REG_INC notes have been changed by reloading:
4713*38fd1498Szrj 	     find_reloads_address_1 records substitutions for them,
4714*38fd1498Szrj 	     which have been performed by subst_reloads above.  */
4715*38fd1498Szrj 	  for (i = n_reloads - 1; i >= 0; i--)
4716*38fd1498Szrj 	    {
4717*38fd1498Szrj 	      rtx in_reg = rld[i].in_reg;
4718*38fd1498Szrj 	      if (in_reg)
4719*38fd1498Szrj 		{
4720*38fd1498Szrj 		  enum rtx_code code = GET_CODE (in_reg);
4721*38fd1498Szrj 		  /* PRE_INC / PRE_DEC will have the reload register ending up
4722*38fd1498Szrj 		     with the same value as the stack slot, but that doesn't
4723*38fd1498Szrj 		     hold true for POST_INC / POST_DEC.  Either we have to
4724*38fd1498Szrj 		     convert the memory access to a true POST_INC / POST_DEC,
4725*38fd1498Szrj 		     or we can't use the reload register for inheritance.  */
4726*38fd1498Szrj 		  if ((code == POST_INC || code == POST_DEC)
4727*38fd1498Szrj 		      && TEST_HARD_REG_BIT (reg_reloaded_valid,
4728*38fd1498Szrj 					    REGNO (rld[i].reg_rtx))
4729*38fd1498Szrj 		      /* Make sure it is the inc/dec pseudo, and not
4730*38fd1498Szrj 			 some other (e.g. output operand) pseudo.  */
4731*38fd1498Szrj 		      && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4732*38fd1498Szrj 			  == REGNO (XEXP (in_reg, 0))))
4733*38fd1498Szrj 
4734*38fd1498Szrj 		    {
4735*38fd1498Szrj 		      rtx reload_reg = rld[i].reg_rtx;
4736*38fd1498Szrj 		      machine_mode mode = GET_MODE (reload_reg);
4737*38fd1498Szrj 		      int n = 0;
4738*38fd1498Szrj 		      rtx_insn *p;
4739*38fd1498Szrj 
4740*38fd1498Szrj 		      for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4741*38fd1498Szrj 			{
4742*38fd1498Szrj 			  /* We really want to ignore REG_INC notes here, so
4743*38fd1498Szrj 			     use PATTERN (p) as argument to reg_set_p .  */
4744*38fd1498Szrj 			  if (reg_set_p (reload_reg, PATTERN (p)))
4745*38fd1498Szrj 			    break;
4746*38fd1498Szrj 			  n = count_occurrences (PATTERN (p), reload_reg, 0);
4747*38fd1498Szrj 			  if (! n)
4748*38fd1498Szrj 			    continue;
4749*38fd1498Szrj 			  if (n == 1)
4750*38fd1498Szrj 			    {
4751*38fd1498Szrj 			      rtx replace_reg
4752*38fd1498Szrj 				= gen_rtx_fmt_e (code, mode, reload_reg);
4753*38fd1498Szrj 
4754*38fd1498Szrj 			      validate_replace_rtx_group (reload_reg,
4755*38fd1498Szrj 							  replace_reg, p);
4756*38fd1498Szrj 			      n = verify_changes (0);
4757*38fd1498Szrj 
4758*38fd1498Szrj 			      /* We must also verify that the constraints
4759*38fd1498Szrj 				 are met after the replacement.  Make sure
4760*38fd1498Szrj 				 extract_insn is only called for an insn
4761*38fd1498Szrj 				 where the replacements were found to be
4762*38fd1498Szrj 				 valid so far. */
4763*38fd1498Szrj 			      if (n)
4764*38fd1498Szrj 				{
4765*38fd1498Szrj 				  extract_insn (p);
4766*38fd1498Szrj 				  n = constrain_operands (1,
4767*38fd1498Szrj 				    get_enabled_alternatives (p));
4768*38fd1498Szrj 				}
4769*38fd1498Szrj 
4770*38fd1498Szrj 			      /* If the constraints were not met, then
4771*38fd1498Szrj 				 undo the replacement, else confirm it.  */
4772*38fd1498Szrj 			      if (!n)
4773*38fd1498Szrj 				cancel_changes (0);
4774*38fd1498Szrj 			      else
4775*38fd1498Szrj 				confirm_change_group ();
4776*38fd1498Szrj 			    }
4777*38fd1498Szrj 			  break;
4778*38fd1498Szrj 			}
4779*38fd1498Szrj 		      if (n == 1)
4780*38fd1498Szrj 			{
4781*38fd1498Szrj 			  add_reg_note (p, REG_INC, reload_reg);
4782*38fd1498Szrj 			  /* Mark this as having an output reload so that the
4783*38fd1498Szrj 			     REG_INC processing code below won't invalidate
4784*38fd1498Szrj 			     the reload for inheritance.  */
4785*38fd1498Szrj 			  SET_HARD_REG_BIT (reg_is_output_reload,
4786*38fd1498Szrj 					    REGNO (reload_reg));
4787*38fd1498Szrj 			  SET_REGNO_REG_SET (&reg_has_output_reload,
4788*38fd1498Szrj 					     REGNO (XEXP (in_reg, 0)));
4789*38fd1498Szrj 			}
4790*38fd1498Szrj 		      else
4791*38fd1498Szrj 			forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4792*38fd1498Szrj 					      NULL);
4793*38fd1498Szrj 		    }
4794*38fd1498Szrj 		  else if ((code == PRE_INC || code == PRE_DEC)
4795*38fd1498Szrj 			   && TEST_HARD_REG_BIT (reg_reloaded_valid,
4796*38fd1498Szrj 						 REGNO (rld[i].reg_rtx))
4797*38fd1498Szrj 			   /* Make sure it is the inc/dec pseudo, and not
4798*38fd1498Szrj 			      some other (e.g. output operand) pseudo.  */
4799*38fd1498Szrj 			   && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4800*38fd1498Szrj 			       == REGNO (XEXP (in_reg, 0))))
4801*38fd1498Szrj 		    {
4802*38fd1498Szrj 		      SET_HARD_REG_BIT (reg_is_output_reload,
4803*38fd1498Szrj 					REGNO (rld[i].reg_rtx));
4804*38fd1498Szrj 		      SET_REGNO_REG_SET (&reg_has_output_reload,
4805*38fd1498Szrj 					 REGNO (XEXP (in_reg, 0)));
4806*38fd1498Szrj 		    }
4807*38fd1498Szrj 		  else if (code == PRE_INC || code == PRE_DEC
4808*38fd1498Szrj 			   || code == POST_INC || code == POST_DEC)
4809*38fd1498Szrj 		    {
4810*38fd1498Szrj 		      int in_regno = REGNO (XEXP (in_reg, 0));
4811*38fd1498Szrj 
4812*38fd1498Szrj 		      if (reg_last_reload_reg[in_regno] != NULL_RTX)
4813*38fd1498Szrj 			{
4814*38fd1498Szrj 			  int in_hard_regno;
4815*38fd1498Szrj 			  bool forget_p = true;
4816*38fd1498Szrj 
4817*38fd1498Szrj 			  in_hard_regno = REGNO (reg_last_reload_reg[in_regno]);
4818*38fd1498Szrj 			  if (TEST_HARD_REG_BIT (reg_reloaded_valid,
4819*38fd1498Szrj 						 in_hard_regno))
4820*38fd1498Szrj 			    {
4821*38fd1498Szrj 			      for (rtx_insn *x = (old_prev ?
4822*38fd1498Szrj 						  NEXT_INSN (old_prev) : insn);
4823*38fd1498Szrj 				   x != old_next;
4824*38fd1498Szrj 				   x = NEXT_INSN (x))
4825*38fd1498Szrj 				if (x == reg_reloaded_insn[in_hard_regno])
4826*38fd1498Szrj 				  {
4827*38fd1498Szrj 				    forget_p = false;
4828*38fd1498Szrj 				    break;
4829*38fd1498Szrj 				  }
4830*38fd1498Szrj 			    }
4831*38fd1498Szrj 			  /* If for some reasons, we didn't set up
4832*38fd1498Szrj 			     reg_last_reload_reg in this insn,
4833*38fd1498Szrj 			     invalidate inheritance from previous
4834*38fd1498Szrj 			     insns for the incremented/decremented
4835*38fd1498Szrj 			     register.  Such registers will be not in
4836*38fd1498Szrj 			     reg_has_output_reload.  Invalidate it
4837*38fd1498Szrj 			     also if the corresponding element in
4838*38fd1498Szrj 			     reg_reloaded_insn is also
4839*38fd1498Szrj 			     invalidated.  */
4840*38fd1498Szrj 			  if (forget_p)
4841*38fd1498Szrj 			    forget_old_reloads_1 (XEXP (in_reg, 0),
4842*38fd1498Szrj 						  NULL_RTX, NULL);
4843*38fd1498Szrj 			}
4844*38fd1498Szrj 		    }
4845*38fd1498Szrj 		}
4846*38fd1498Szrj 	    }
4847*38fd1498Szrj 	  /* If a pseudo that got a hard register is auto-incremented,
4848*38fd1498Szrj 	     we must purge records of copying it into pseudos without
4849*38fd1498Szrj 	     hard registers.  */
4850*38fd1498Szrj 	  for (rtx x = REG_NOTES (insn); x; x = XEXP (x, 1))
4851*38fd1498Szrj 	    if (REG_NOTE_KIND (x) == REG_INC)
4852*38fd1498Szrj 	      {
4853*38fd1498Szrj 		/* See if this pseudo reg was reloaded in this insn.
4854*38fd1498Szrj 		   If so, its last-reload info is still valid
4855*38fd1498Szrj 		   because it is based on this insn's reload.  */
4856*38fd1498Szrj 		for (i = 0; i < n_reloads; i++)
4857*38fd1498Szrj 		  if (rld[i].out == XEXP (x, 0))
4858*38fd1498Szrj 		    break;
4859*38fd1498Szrj 
4860*38fd1498Szrj 		if (i == n_reloads)
4861*38fd1498Szrj 		  forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4862*38fd1498Szrj 	      }
4863*38fd1498Szrj #endif
4864*38fd1498Szrj 	}
4865*38fd1498Szrj       /* A reload reg's contents are unknown after a label.  */
4866*38fd1498Szrj       if (LABEL_P (insn))
4867*38fd1498Szrj 	CLEAR_HARD_REG_SET (reg_reloaded_valid);
4868*38fd1498Szrj 
4869*38fd1498Szrj       /* Don't assume a reload reg is still good after a call insn
4870*38fd1498Szrj 	 if it is a call-used reg, or if it contains a value that will
4871*38fd1498Szrj          be partially clobbered by the call.  */
4872*38fd1498Szrj       else if (CALL_P (insn))
4873*38fd1498Szrj 	{
4874*38fd1498Szrj 	  AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4875*38fd1498Szrj 	  AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4876*38fd1498Szrj 
4877*38fd1498Szrj 	  /* If this is a call to a setjmp-type function, we must not
4878*38fd1498Szrj 	     reuse any reload reg contents across the call; that will
4879*38fd1498Szrj 	     just be clobbered by other uses of the register in later
4880*38fd1498Szrj 	     code, before the longjmp.  */
4881*38fd1498Szrj 	  if (find_reg_note (insn, REG_SETJMP, NULL_RTX))
4882*38fd1498Szrj 	    CLEAR_HARD_REG_SET (reg_reloaded_valid);
4883*38fd1498Szrj 	}
4884*38fd1498Szrj     }
4885*38fd1498Szrj 
4886*38fd1498Szrj   /* Clean up.  */
4887*38fd1498Szrj   free (reg_last_reload_reg);
4888*38fd1498Szrj   CLEAR_REG_SET (&reg_has_output_reload);
4889*38fd1498Szrj }
4890*38fd1498Szrj 
4891*38fd1498Szrj /* Discard all record of any value reloaded from X,
4892*38fd1498Szrj    or reloaded in X from someplace else;
4893*38fd1498Szrj    unless X is an output reload reg of the current insn.
4894*38fd1498Szrj 
4895*38fd1498Szrj    X may be a hard reg (the reload reg)
4896*38fd1498Szrj    or it may be a pseudo reg that was reloaded from.
4897*38fd1498Szrj 
4898*38fd1498Szrj    When DATA is non-NULL just mark the registers in regset
4899*38fd1498Szrj    to be forgotten later.  */
4900*38fd1498Szrj 
4901*38fd1498Szrj static void
forget_old_reloads_1(rtx x,const_rtx ignored ATTRIBUTE_UNUSED,void * data)4902*38fd1498Szrj forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
4903*38fd1498Szrj 		      void *data)
4904*38fd1498Szrj {
4905*38fd1498Szrj   unsigned int regno;
4906*38fd1498Szrj   unsigned int nr;
4907*38fd1498Szrj   regset regs = (regset) data;
4908*38fd1498Szrj 
4909*38fd1498Szrj   /* note_stores does give us subregs of hard regs,
4910*38fd1498Szrj      subreg_regno_offset requires a hard reg.  */
4911*38fd1498Szrj   while (GET_CODE (x) == SUBREG)
4912*38fd1498Szrj     {
4913*38fd1498Szrj       /* We ignore the subreg offset when calculating the regno,
4914*38fd1498Szrj 	 because we are using the entire underlying hard register
4915*38fd1498Szrj 	 below.  */
4916*38fd1498Szrj       x = SUBREG_REG (x);
4917*38fd1498Szrj     }
4918*38fd1498Szrj 
4919*38fd1498Szrj   if (!REG_P (x))
4920*38fd1498Szrj     return;
4921*38fd1498Szrj 
4922*38fd1498Szrj   regno = REGNO (x);
4923*38fd1498Szrj 
4924*38fd1498Szrj   if (regno >= FIRST_PSEUDO_REGISTER)
4925*38fd1498Szrj     nr = 1;
4926*38fd1498Szrj   else
4927*38fd1498Szrj     {
4928*38fd1498Szrj       unsigned int i;
4929*38fd1498Szrj 
4930*38fd1498Szrj       nr = REG_NREGS (x);
4931*38fd1498Szrj       /* Storing into a spilled-reg invalidates its contents.
4932*38fd1498Szrj 	 This can happen if a block-local pseudo is allocated to that reg
4933*38fd1498Szrj 	 and it wasn't spilled because this block's total need is 0.
4934*38fd1498Szrj 	 Then some insn might have an optional reload and use this reg.  */
4935*38fd1498Szrj       if (!regs)
4936*38fd1498Szrj 	for (i = 0; i < nr; i++)
4937*38fd1498Szrj 	  /* But don't do this if the reg actually serves as an output
4938*38fd1498Szrj 	     reload reg in the current instruction.  */
4939*38fd1498Szrj 	  if (n_reloads == 0
4940*38fd1498Szrj 	      || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4941*38fd1498Szrj 	    {
4942*38fd1498Szrj 	      CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4943*38fd1498Szrj 	      spill_reg_store[regno + i] = 0;
4944*38fd1498Szrj 	    }
4945*38fd1498Szrj     }
4946*38fd1498Szrj 
4947*38fd1498Szrj   if (regs)
4948*38fd1498Szrj     while (nr-- > 0)
4949*38fd1498Szrj       SET_REGNO_REG_SET (regs, regno + nr);
4950*38fd1498Szrj   else
4951*38fd1498Szrj     {
4952*38fd1498Szrj       /* Since value of X has changed,
4953*38fd1498Szrj 	 forget any value previously copied from it.  */
4954*38fd1498Szrj 
4955*38fd1498Szrj       while (nr-- > 0)
4956*38fd1498Szrj 	/* But don't forget a copy if this is the output reload
4957*38fd1498Szrj 	   that establishes the copy's validity.  */
4958*38fd1498Szrj 	if (n_reloads == 0
4959*38fd1498Szrj 	    || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
4960*38fd1498Szrj 	  reg_last_reload_reg[regno + nr] = 0;
4961*38fd1498Szrj      }
4962*38fd1498Szrj }
4963*38fd1498Szrj 
4964*38fd1498Szrj /* Forget the reloads marked in regset by previous function.  */
4965*38fd1498Szrj static void
forget_marked_reloads(regset regs)4966*38fd1498Szrj forget_marked_reloads (regset regs)
4967*38fd1498Szrj {
4968*38fd1498Szrj   unsigned int reg;
4969*38fd1498Szrj   reg_set_iterator rsi;
4970*38fd1498Szrj   EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
4971*38fd1498Szrj     {
4972*38fd1498Szrj       if (reg < FIRST_PSEUDO_REGISTER
4973*38fd1498Szrj 	  /* But don't do this if the reg actually serves as an output
4974*38fd1498Szrj 	     reload reg in the current instruction.  */
4975*38fd1498Szrj 	  && (n_reloads == 0
4976*38fd1498Szrj 	      || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
4977*38fd1498Szrj 	  {
4978*38fd1498Szrj 	    CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
4979*38fd1498Szrj 	    spill_reg_store[reg] = 0;
4980*38fd1498Szrj 	  }
4981*38fd1498Szrj       if (n_reloads == 0
4982*38fd1498Szrj 	  || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
4983*38fd1498Szrj 	reg_last_reload_reg[reg] = 0;
4984*38fd1498Szrj     }
4985*38fd1498Szrj }
4986*38fd1498Szrj 
4987*38fd1498Szrj /* The following HARD_REG_SETs indicate when each hard register is
4988*38fd1498Szrj    used for a reload of various parts of the current insn.  */
4989*38fd1498Szrj 
4990*38fd1498Szrj /* If reg is unavailable for all reloads.  */
4991*38fd1498Szrj static HARD_REG_SET reload_reg_unavailable;
4992*38fd1498Szrj /* If reg is in use as a reload reg for a RELOAD_OTHER reload.  */
4993*38fd1498Szrj static HARD_REG_SET reload_reg_used;
4994*38fd1498Szrj /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I.  */
4995*38fd1498Szrj static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4996*38fd1498Szrj /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I.  */
4997*38fd1498Szrj static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4998*38fd1498Szrj /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I.  */
4999*38fd1498Szrj static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5000*38fd1498Szrj /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I.  */
5001*38fd1498Szrj static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5002*38fd1498Szrj /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I.  */
5003*38fd1498Szrj static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5004*38fd1498Szrj /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I.  */
5005*38fd1498Szrj static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5006*38fd1498Szrj /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload.  */
5007*38fd1498Szrj static HARD_REG_SET reload_reg_used_in_op_addr;
5008*38fd1498Szrj /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload.  */
5009*38fd1498Szrj static HARD_REG_SET reload_reg_used_in_op_addr_reload;
5010*38fd1498Szrj /* If reg is in use for a RELOAD_FOR_INSN reload.  */
5011*38fd1498Szrj static HARD_REG_SET reload_reg_used_in_insn;
5012*38fd1498Szrj /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload.  */
5013*38fd1498Szrj static HARD_REG_SET reload_reg_used_in_other_addr;
5014*38fd1498Szrj 
5015*38fd1498Szrj /* If reg is in use as a reload reg for any sort of reload.  */
5016*38fd1498Szrj static HARD_REG_SET reload_reg_used_at_all;
5017*38fd1498Szrj 
5018*38fd1498Szrj /* If reg is use as an inherited reload.  We just mark the first register
5019*38fd1498Szrj    in the group.  */
5020*38fd1498Szrj static HARD_REG_SET reload_reg_used_for_inherit;
5021*38fd1498Szrj 
5022*38fd1498Szrj /* Records which hard regs are used in any way, either as explicit use or
5023*38fd1498Szrj    by being allocated to a pseudo during any point of the current insn.  */
5024*38fd1498Szrj static HARD_REG_SET reg_used_in_insn;
5025*38fd1498Szrj 
5026*38fd1498Szrj /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
5027*38fd1498Szrj    TYPE. MODE is used to indicate how many consecutive regs are
5028*38fd1498Szrj    actually used.  */
5029*38fd1498Szrj 
5030*38fd1498Szrj static void
mark_reload_reg_in_use(unsigned int regno,int opnum,enum reload_type type,machine_mode mode)5031*38fd1498Szrj mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
5032*38fd1498Szrj 			machine_mode mode)
5033*38fd1498Szrj {
5034*38fd1498Szrj   switch (type)
5035*38fd1498Szrj     {
5036*38fd1498Szrj     case RELOAD_OTHER:
5037*38fd1498Szrj       add_to_hard_reg_set (&reload_reg_used, mode, regno);
5038*38fd1498Szrj       break;
5039*38fd1498Szrj 
5040*38fd1498Szrj     case RELOAD_FOR_INPUT_ADDRESS:
5041*38fd1498Szrj       add_to_hard_reg_set (&reload_reg_used_in_input_addr[opnum], mode, regno);
5042*38fd1498Szrj       break;
5043*38fd1498Szrj 
5044*38fd1498Szrj     case RELOAD_FOR_INPADDR_ADDRESS:
5045*38fd1498Szrj       add_to_hard_reg_set (&reload_reg_used_in_inpaddr_addr[opnum], mode, regno);
5046*38fd1498Szrj       break;
5047*38fd1498Szrj 
5048*38fd1498Szrj     case RELOAD_FOR_OUTPUT_ADDRESS:
5049*38fd1498Szrj       add_to_hard_reg_set (&reload_reg_used_in_output_addr[opnum], mode, regno);
5050*38fd1498Szrj       break;
5051*38fd1498Szrj 
5052*38fd1498Szrj     case RELOAD_FOR_OUTADDR_ADDRESS:
5053*38fd1498Szrj       add_to_hard_reg_set (&reload_reg_used_in_outaddr_addr[opnum], mode, regno);
5054*38fd1498Szrj       break;
5055*38fd1498Szrj 
5056*38fd1498Szrj     case RELOAD_FOR_OPERAND_ADDRESS:
5057*38fd1498Szrj       add_to_hard_reg_set (&reload_reg_used_in_op_addr, mode, regno);
5058*38fd1498Szrj       break;
5059*38fd1498Szrj 
5060*38fd1498Szrj     case RELOAD_FOR_OPADDR_ADDR:
5061*38fd1498Szrj       add_to_hard_reg_set (&reload_reg_used_in_op_addr_reload, mode, regno);
5062*38fd1498Szrj       break;
5063*38fd1498Szrj 
5064*38fd1498Szrj     case RELOAD_FOR_OTHER_ADDRESS:
5065*38fd1498Szrj       add_to_hard_reg_set (&reload_reg_used_in_other_addr, mode, regno);
5066*38fd1498Szrj       break;
5067*38fd1498Szrj 
5068*38fd1498Szrj     case RELOAD_FOR_INPUT:
5069*38fd1498Szrj       add_to_hard_reg_set (&reload_reg_used_in_input[opnum], mode, regno);
5070*38fd1498Szrj       break;
5071*38fd1498Szrj 
5072*38fd1498Szrj     case RELOAD_FOR_OUTPUT:
5073*38fd1498Szrj       add_to_hard_reg_set (&reload_reg_used_in_output[opnum], mode, regno);
5074*38fd1498Szrj       break;
5075*38fd1498Szrj 
5076*38fd1498Szrj     case RELOAD_FOR_INSN:
5077*38fd1498Szrj       add_to_hard_reg_set (&reload_reg_used_in_insn,  mode, regno);
5078*38fd1498Szrj       break;
5079*38fd1498Szrj     }
5080*38fd1498Szrj 
5081*38fd1498Szrj   add_to_hard_reg_set (&reload_reg_used_at_all, mode, regno);
5082*38fd1498Szrj }
5083*38fd1498Szrj 
5084*38fd1498Szrj /* Similarly, but show REGNO is no longer in use for a reload.  */
5085*38fd1498Szrj 
5086*38fd1498Szrj static void
clear_reload_reg_in_use(unsigned int regno,int opnum,enum reload_type type,machine_mode mode)5087*38fd1498Szrj clear_reload_reg_in_use (unsigned int regno, int opnum,
5088*38fd1498Szrj 			 enum reload_type type, machine_mode mode)
5089*38fd1498Szrj {
5090*38fd1498Szrj   unsigned int nregs = hard_regno_nregs (regno, mode);
5091*38fd1498Szrj   unsigned int start_regno, end_regno, r;
5092*38fd1498Szrj   int i;
5093*38fd1498Szrj   /* A complication is that for some reload types, inheritance might
5094*38fd1498Szrj      allow multiple reloads of the same types to share a reload register.
5095*38fd1498Szrj      We set check_opnum if we have to check only reloads with the same
5096*38fd1498Szrj      operand number, and check_any if we have to check all reloads.  */
5097*38fd1498Szrj   int check_opnum = 0;
5098*38fd1498Szrj   int check_any = 0;
5099*38fd1498Szrj   HARD_REG_SET *used_in_set;
5100*38fd1498Szrj 
5101*38fd1498Szrj   switch (type)
5102*38fd1498Szrj     {
5103*38fd1498Szrj     case RELOAD_OTHER:
5104*38fd1498Szrj       used_in_set = &reload_reg_used;
5105*38fd1498Szrj       break;
5106*38fd1498Szrj 
5107*38fd1498Szrj     case RELOAD_FOR_INPUT_ADDRESS:
5108*38fd1498Szrj       used_in_set = &reload_reg_used_in_input_addr[opnum];
5109*38fd1498Szrj       break;
5110*38fd1498Szrj 
5111*38fd1498Szrj     case RELOAD_FOR_INPADDR_ADDRESS:
5112*38fd1498Szrj       check_opnum = 1;
5113*38fd1498Szrj       used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
5114*38fd1498Szrj       break;
5115*38fd1498Szrj 
5116*38fd1498Szrj     case RELOAD_FOR_OUTPUT_ADDRESS:
5117*38fd1498Szrj       used_in_set = &reload_reg_used_in_output_addr[opnum];
5118*38fd1498Szrj       break;
5119*38fd1498Szrj 
5120*38fd1498Szrj     case RELOAD_FOR_OUTADDR_ADDRESS:
5121*38fd1498Szrj       check_opnum = 1;
5122*38fd1498Szrj       used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
5123*38fd1498Szrj       break;
5124*38fd1498Szrj 
5125*38fd1498Szrj     case RELOAD_FOR_OPERAND_ADDRESS:
5126*38fd1498Szrj       used_in_set = &reload_reg_used_in_op_addr;
5127*38fd1498Szrj       break;
5128*38fd1498Szrj 
5129*38fd1498Szrj     case RELOAD_FOR_OPADDR_ADDR:
5130*38fd1498Szrj       check_any = 1;
5131*38fd1498Szrj       used_in_set = &reload_reg_used_in_op_addr_reload;
5132*38fd1498Szrj       break;
5133*38fd1498Szrj 
5134*38fd1498Szrj     case RELOAD_FOR_OTHER_ADDRESS:
5135*38fd1498Szrj       used_in_set = &reload_reg_used_in_other_addr;
5136*38fd1498Szrj       check_any = 1;
5137*38fd1498Szrj       break;
5138*38fd1498Szrj 
5139*38fd1498Szrj     case RELOAD_FOR_INPUT:
5140*38fd1498Szrj       used_in_set = &reload_reg_used_in_input[opnum];
5141*38fd1498Szrj       break;
5142*38fd1498Szrj 
5143*38fd1498Szrj     case RELOAD_FOR_OUTPUT:
5144*38fd1498Szrj       used_in_set = &reload_reg_used_in_output[opnum];
5145*38fd1498Szrj       break;
5146*38fd1498Szrj 
5147*38fd1498Szrj     case RELOAD_FOR_INSN:
5148*38fd1498Szrj       used_in_set = &reload_reg_used_in_insn;
5149*38fd1498Szrj       break;
5150*38fd1498Szrj     default:
5151*38fd1498Szrj       gcc_unreachable ();
5152*38fd1498Szrj     }
5153*38fd1498Szrj   /* We resolve conflicts with remaining reloads of the same type by
5154*38fd1498Szrj      excluding the intervals of reload registers by them from the
5155*38fd1498Szrj      interval of freed reload registers.  Since we only keep track of
5156*38fd1498Szrj      one set of interval bounds, we might have to exclude somewhat
5157*38fd1498Szrj      more than what would be necessary if we used a HARD_REG_SET here.
5158*38fd1498Szrj      But this should only happen very infrequently, so there should
5159*38fd1498Szrj      be no reason to worry about it.  */
5160*38fd1498Szrj 
5161*38fd1498Szrj   start_regno = regno;
5162*38fd1498Szrj   end_regno = regno + nregs;
5163*38fd1498Szrj   if (check_opnum || check_any)
5164*38fd1498Szrj     {
5165*38fd1498Szrj       for (i = n_reloads - 1; i >= 0; i--)
5166*38fd1498Szrj 	{
5167*38fd1498Szrj 	  if (rld[i].when_needed == type
5168*38fd1498Szrj 	      && (check_any || rld[i].opnum == opnum)
5169*38fd1498Szrj 	      && rld[i].reg_rtx)
5170*38fd1498Szrj 	    {
5171*38fd1498Szrj 	      unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
5172*38fd1498Szrj 	      unsigned int conflict_end
5173*38fd1498Szrj 		= end_hard_regno (rld[i].mode, conflict_start);
5174*38fd1498Szrj 
5175*38fd1498Szrj 	      /* If there is an overlap with the first to-be-freed register,
5176*38fd1498Szrj 		 adjust the interval start.  */
5177*38fd1498Szrj 	      if (conflict_start <= start_regno && conflict_end > start_regno)
5178*38fd1498Szrj 		start_regno = conflict_end;
5179*38fd1498Szrj 	      /* Otherwise, if there is a conflict with one of the other
5180*38fd1498Szrj 		 to-be-freed registers, adjust the interval end.  */
5181*38fd1498Szrj 	      if (conflict_start > start_regno && conflict_start < end_regno)
5182*38fd1498Szrj 		end_regno = conflict_start;
5183*38fd1498Szrj 	    }
5184*38fd1498Szrj 	}
5185*38fd1498Szrj     }
5186*38fd1498Szrj 
5187*38fd1498Szrj   for (r = start_regno; r < end_regno; r++)
5188*38fd1498Szrj     CLEAR_HARD_REG_BIT (*used_in_set, r);
5189*38fd1498Szrj }
5190*38fd1498Szrj 
5191*38fd1498Szrj /* 1 if reg REGNO is free as a reload reg for a reload of the sort
5192*38fd1498Szrj    specified by OPNUM and TYPE.  */
5193*38fd1498Szrj 
5194*38fd1498Szrj static int
reload_reg_free_p(unsigned int regno,int opnum,enum reload_type type)5195*38fd1498Szrj reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
5196*38fd1498Szrj {
5197*38fd1498Szrj   int i;
5198*38fd1498Szrj 
5199*38fd1498Szrj   /* In use for a RELOAD_OTHER means it's not available for anything.  */
5200*38fd1498Szrj   if (TEST_HARD_REG_BIT (reload_reg_used, regno)
5201*38fd1498Szrj       || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5202*38fd1498Szrj     return 0;
5203*38fd1498Szrj 
5204*38fd1498Szrj   switch (type)
5205*38fd1498Szrj     {
5206*38fd1498Szrj     case RELOAD_OTHER:
5207*38fd1498Szrj       /* In use for anything means we can't use it for RELOAD_OTHER.  */
5208*38fd1498Szrj       if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
5209*38fd1498Szrj 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5210*38fd1498Szrj 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5211*38fd1498Szrj 	  || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5212*38fd1498Szrj 	return 0;
5213*38fd1498Szrj 
5214*38fd1498Szrj       for (i = 0; i < reload_n_operands; i++)
5215*38fd1498Szrj 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5216*38fd1498Szrj 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5217*38fd1498Szrj 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5218*38fd1498Szrj 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5219*38fd1498Szrj 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5220*38fd1498Szrj 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5221*38fd1498Szrj 	  return 0;
5222*38fd1498Szrj 
5223*38fd1498Szrj       return 1;
5224*38fd1498Szrj 
5225*38fd1498Szrj     case RELOAD_FOR_INPUT:
5226*38fd1498Szrj       if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5227*38fd1498Szrj 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
5228*38fd1498Szrj 	return 0;
5229*38fd1498Szrj 
5230*38fd1498Szrj       if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5231*38fd1498Szrj 	return 0;
5232*38fd1498Szrj 
5233*38fd1498Szrj       /* If it is used for some other input, can't use it.  */
5234*38fd1498Szrj       for (i = 0; i < reload_n_operands; i++)
5235*38fd1498Szrj 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5236*38fd1498Szrj 	  return 0;
5237*38fd1498Szrj 
5238*38fd1498Szrj       /* If it is used in a later operand's address, can't use it.  */
5239*38fd1498Szrj       for (i = opnum + 1; i < reload_n_operands; i++)
5240*38fd1498Szrj 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5241*38fd1498Szrj 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5242*38fd1498Szrj 	  return 0;
5243*38fd1498Szrj 
5244*38fd1498Szrj       return 1;
5245*38fd1498Szrj 
5246*38fd1498Szrj     case RELOAD_FOR_INPUT_ADDRESS:
5247*38fd1498Szrj       /* Can't use a register if it is used for an input address for this
5248*38fd1498Szrj 	 operand or used as an input in an earlier one.  */
5249*38fd1498Szrj       if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
5250*38fd1498Szrj 	  || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5251*38fd1498Szrj 	return 0;
5252*38fd1498Szrj 
5253*38fd1498Szrj       for (i = 0; i < opnum; i++)
5254*38fd1498Szrj 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5255*38fd1498Szrj 	  return 0;
5256*38fd1498Szrj 
5257*38fd1498Szrj       return 1;
5258*38fd1498Szrj 
5259*38fd1498Szrj     case RELOAD_FOR_INPADDR_ADDRESS:
5260*38fd1498Szrj       /* Can't use a register if it is used for an input address
5261*38fd1498Szrj 	 for this operand or used as an input in an earlier
5262*38fd1498Szrj 	 one.  */
5263*38fd1498Szrj       if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5264*38fd1498Szrj 	return 0;
5265*38fd1498Szrj 
5266*38fd1498Szrj       for (i = 0; i < opnum; i++)
5267*38fd1498Szrj 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5268*38fd1498Szrj 	  return 0;
5269*38fd1498Szrj 
5270*38fd1498Szrj       return 1;
5271*38fd1498Szrj 
5272*38fd1498Szrj     case RELOAD_FOR_OUTPUT_ADDRESS:
5273*38fd1498Szrj       /* Can't use a register if it is used for an output address for this
5274*38fd1498Szrj 	 operand or used as an output in this or a later operand.  Note
5275*38fd1498Szrj 	 that multiple output operands are emitted in reverse order, so
5276*38fd1498Szrj 	 the conflicting ones are those with lower indices.  */
5277*38fd1498Szrj       if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
5278*38fd1498Szrj 	return 0;
5279*38fd1498Szrj 
5280*38fd1498Szrj       for (i = 0; i <= opnum; i++)
5281*38fd1498Szrj 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5282*38fd1498Szrj 	  return 0;
5283*38fd1498Szrj 
5284*38fd1498Szrj       return 1;
5285*38fd1498Szrj 
5286*38fd1498Szrj     case RELOAD_FOR_OUTADDR_ADDRESS:
5287*38fd1498Szrj       /* Can't use a register if it is used for an output address
5288*38fd1498Szrj 	 for this operand or used as an output in this or a
5289*38fd1498Szrj 	 later operand.  Note that multiple output operands are
5290*38fd1498Szrj 	 emitted in reverse order, so the conflicting ones are
5291*38fd1498Szrj 	 those with lower indices.  */
5292*38fd1498Szrj       if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5293*38fd1498Szrj 	return 0;
5294*38fd1498Szrj 
5295*38fd1498Szrj       for (i = 0; i <= opnum; i++)
5296*38fd1498Szrj 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5297*38fd1498Szrj 	  return 0;
5298*38fd1498Szrj 
5299*38fd1498Szrj       return 1;
5300*38fd1498Szrj 
5301*38fd1498Szrj     case RELOAD_FOR_OPERAND_ADDRESS:
5302*38fd1498Szrj       for (i = 0; i < reload_n_operands; i++)
5303*38fd1498Szrj 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5304*38fd1498Szrj 	  return 0;
5305*38fd1498Szrj 
5306*38fd1498Szrj       return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5307*38fd1498Szrj 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5308*38fd1498Szrj 
5309*38fd1498Szrj     case RELOAD_FOR_OPADDR_ADDR:
5310*38fd1498Szrj       for (i = 0; i < reload_n_operands; i++)
5311*38fd1498Szrj 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5312*38fd1498Szrj 	  return 0;
5313*38fd1498Szrj 
5314*38fd1498Szrj       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
5315*38fd1498Szrj 
5316*38fd1498Szrj     case RELOAD_FOR_OUTPUT:
5317*38fd1498Szrj       /* This cannot share a register with RELOAD_FOR_INSN reloads, other
5318*38fd1498Szrj 	 outputs, or an operand address for this or an earlier output.
5319*38fd1498Szrj 	 Note that multiple output operands are emitted in reverse order,
5320*38fd1498Szrj 	 so the conflicting ones are those with higher indices.  */
5321*38fd1498Szrj       if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5322*38fd1498Szrj 	return 0;
5323*38fd1498Szrj 
5324*38fd1498Szrj       for (i = 0; i < reload_n_operands; i++)
5325*38fd1498Szrj 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5326*38fd1498Szrj 	  return 0;
5327*38fd1498Szrj 
5328*38fd1498Szrj       for (i = opnum; i < reload_n_operands; i++)
5329*38fd1498Szrj 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5330*38fd1498Szrj 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5331*38fd1498Szrj 	  return 0;
5332*38fd1498Szrj 
5333*38fd1498Szrj       return 1;
5334*38fd1498Szrj 
5335*38fd1498Szrj     case RELOAD_FOR_INSN:
5336*38fd1498Szrj       for (i = 0; i < reload_n_operands; i++)
5337*38fd1498Szrj 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5338*38fd1498Szrj 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5339*38fd1498Szrj 	  return 0;
5340*38fd1498Szrj 
5341*38fd1498Szrj       return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5342*38fd1498Szrj 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5343*38fd1498Szrj 
5344*38fd1498Szrj     case RELOAD_FOR_OTHER_ADDRESS:
5345*38fd1498Szrj       return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
5346*38fd1498Szrj 
5347*38fd1498Szrj     default:
5348*38fd1498Szrj       gcc_unreachable ();
5349*38fd1498Szrj     }
5350*38fd1498Szrj }
5351*38fd1498Szrj 
5352*38fd1498Szrj /* Return 1 if the value in reload reg REGNO, as used by the reload with
5353*38fd1498Szrj    the number RELOADNUM, is still available in REGNO at the end of the insn.
5354*38fd1498Szrj 
5355*38fd1498Szrj    We can assume that the reload reg was already tested for availability
5356*38fd1498Szrj    at the time it is needed, and we should not check this again,
5357*38fd1498Szrj    in case the reg has already been marked in use.  */
5358*38fd1498Szrj 
5359*38fd1498Szrj static int
reload_reg_reaches_end_p(unsigned int regno,int reloadnum)5360*38fd1498Szrj reload_reg_reaches_end_p (unsigned int regno, int reloadnum)
5361*38fd1498Szrj {
5362*38fd1498Szrj   int opnum = rld[reloadnum].opnum;
5363*38fd1498Szrj   enum reload_type type = rld[reloadnum].when_needed;
5364*38fd1498Szrj   int i;
5365*38fd1498Szrj 
5366*38fd1498Szrj   /* See if there is a reload with the same type for this operand, using
5367*38fd1498Szrj      the same register. This case is not handled by the code below.  */
5368*38fd1498Szrj   for (i = reloadnum + 1; i < n_reloads; i++)
5369*38fd1498Szrj     {
5370*38fd1498Szrj       rtx reg;
5371*38fd1498Szrj 
5372*38fd1498Szrj       if (rld[i].opnum != opnum || rld[i].when_needed != type)
5373*38fd1498Szrj 	continue;
5374*38fd1498Szrj       reg = rld[i].reg_rtx;
5375*38fd1498Szrj       if (reg == NULL_RTX)
5376*38fd1498Szrj 	continue;
5377*38fd1498Szrj       if (regno >= REGNO (reg) && regno < END_REGNO (reg))
5378*38fd1498Szrj 	return 0;
5379*38fd1498Szrj     }
5380*38fd1498Szrj 
5381*38fd1498Szrj   switch (type)
5382*38fd1498Szrj     {
5383*38fd1498Szrj     case RELOAD_OTHER:
5384*38fd1498Szrj       /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
5385*38fd1498Szrj 	 its value must reach the end.  */
5386*38fd1498Szrj       return 1;
5387*38fd1498Szrj 
5388*38fd1498Szrj       /* If this use is for part of the insn,
5389*38fd1498Szrj 	 its value reaches if no subsequent part uses the same register.
5390*38fd1498Szrj 	 Just like the above function, don't try to do this with lots
5391*38fd1498Szrj 	 of fallthroughs.  */
5392*38fd1498Szrj 
5393*38fd1498Szrj     case RELOAD_FOR_OTHER_ADDRESS:
5394*38fd1498Szrj       /* Here we check for everything else, since these don't conflict
5395*38fd1498Szrj 	 with anything else and everything comes later.  */
5396*38fd1498Szrj 
5397*38fd1498Szrj       for (i = 0; i < reload_n_operands; i++)
5398*38fd1498Szrj 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5399*38fd1498Szrj 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5400*38fd1498Szrj 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5401*38fd1498Szrj 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5402*38fd1498Szrj 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5403*38fd1498Szrj 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5404*38fd1498Szrj 	  return 0;
5405*38fd1498Szrj 
5406*38fd1498Szrj       return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5407*38fd1498Szrj 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5408*38fd1498Szrj 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5409*38fd1498Szrj 	      && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5410*38fd1498Szrj 
5411*38fd1498Szrj     case RELOAD_FOR_INPUT_ADDRESS:
5412*38fd1498Szrj     case RELOAD_FOR_INPADDR_ADDRESS:
5413*38fd1498Szrj       /* Similar, except that we check only for this and subsequent inputs
5414*38fd1498Szrj 	 and the address of only subsequent inputs and we do not need
5415*38fd1498Szrj 	 to check for RELOAD_OTHER objects since they are known not to
5416*38fd1498Szrj 	 conflict.  */
5417*38fd1498Szrj 
5418*38fd1498Szrj       for (i = opnum; i < reload_n_operands; i++)
5419*38fd1498Szrj 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5420*38fd1498Szrj 	  return 0;
5421*38fd1498Szrj 
5422*38fd1498Szrj       /* Reload register of reload with type RELOAD_FOR_INPADDR_ADDRESS
5423*38fd1498Szrj 	 could be killed if the register is also used by reload with type
5424*38fd1498Szrj 	 RELOAD_FOR_INPUT_ADDRESS, so check it.  */
5425*38fd1498Szrj       if (type == RELOAD_FOR_INPADDR_ADDRESS
5426*38fd1498Szrj 	  && TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
5427*38fd1498Szrj 	return 0;
5428*38fd1498Szrj 
5429*38fd1498Szrj       for (i = opnum + 1; i < reload_n_operands; i++)
5430*38fd1498Szrj 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5431*38fd1498Szrj 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5432*38fd1498Szrj 	  return 0;
5433*38fd1498Szrj 
5434*38fd1498Szrj       for (i = 0; i < reload_n_operands; i++)
5435*38fd1498Szrj 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5436*38fd1498Szrj 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5437*38fd1498Szrj 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5438*38fd1498Szrj 	  return 0;
5439*38fd1498Szrj 
5440*38fd1498Szrj       if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5441*38fd1498Szrj 	return 0;
5442*38fd1498Szrj 
5443*38fd1498Szrj       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5444*38fd1498Szrj 	      && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5445*38fd1498Szrj 	      && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5446*38fd1498Szrj 
5447*38fd1498Szrj     case RELOAD_FOR_INPUT:
5448*38fd1498Szrj       /* Similar to input address, except we start at the next operand for
5449*38fd1498Szrj 	 both input and input address and we do not check for
5450*38fd1498Szrj 	 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5451*38fd1498Szrj 	 would conflict.  */
5452*38fd1498Szrj 
5453*38fd1498Szrj       for (i = opnum + 1; i < reload_n_operands; i++)
5454*38fd1498Szrj 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5455*38fd1498Szrj 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5456*38fd1498Szrj 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5457*38fd1498Szrj 	  return 0;
5458*38fd1498Szrj 
5459*38fd1498Szrj       /* ... fall through ...  */
5460*38fd1498Szrj 
5461*38fd1498Szrj     case RELOAD_FOR_OPERAND_ADDRESS:
5462*38fd1498Szrj       /* Check outputs and their addresses.  */
5463*38fd1498Szrj 
5464*38fd1498Szrj       for (i = 0; i < reload_n_operands; i++)
5465*38fd1498Szrj 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5466*38fd1498Szrj 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5467*38fd1498Szrj 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5468*38fd1498Szrj 	  return 0;
5469*38fd1498Szrj 
5470*38fd1498Szrj       return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
5471*38fd1498Szrj 
5472*38fd1498Szrj     case RELOAD_FOR_OPADDR_ADDR:
5473*38fd1498Szrj       for (i = 0; i < reload_n_operands; i++)
5474*38fd1498Szrj 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5475*38fd1498Szrj 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5476*38fd1498Szrj 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5477*38fd1498Szrj 	  return 0;
5478*38fd1498Szrj 
5479*38fd1498Szrj       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5480*38fd1498Szrj 	      && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5481*38fd1498Szrj 	      && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5482*38fd1498Szrj 
5483*38fd1498Szrj     case RELOAD_FOR_INSN:
5484*38fd1498Szrj       /* These conflict with other outputs with RELOAD_OTHER.  So
5485*38fd1498Szrj 	 we need only check for output addresses.  */
5486*38fd1498Szrj 
5487*38fd1498Szrj       opnum = reload_n_operands;
5488*38fd1498Szrj 
5489*38fd1498Szrj       /* fall through */
5490*38fd1498Szrj 
5491*38fd1498Szrj     case RELOAD_FOR_OUTPUT:
5492*38fd1498Szrj     case RELOAD_FOR_OUTPUT_ADDRESS:
5493*38fd1498Szrj     case RELOAD_FOR_OUTADDR_ADDRESS:
5494*38fd1498Szrj       /* We already know these can't conflict with a later output.  So the
5495*38fd1498Szrj 	 only thing to check are later output addresses.
5496*38fd1498Szrj 	 Note that multiple output operands are emitted in reverse order,
5497*38fd1498Szrj 	 so the conflicting ones are those with lower indices.  */
5498*38fd1498Szrj       for (i = 0; i < opnum; i++)
5499*38fd1498Szrj 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5500*38fd1498Szrj 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5501*38fd1498Szrj 	  return 0;
5502*38fd1498Szrj 
5503*38fd1498Szrj       /* Reload register of reload with type RELOAD_FOR_OUTADDR_ADDRESS
5504*38fd1498Szrj 	 could be killed if the register is also used by reload with type
5505*38fd1498Szrj 	 RELOAD_FOR_OUTPUT_ADDRESS, so check it.  */
5506*38fd1498Szrj       if (type == RELOAD_FOR_OUTADDR_ADDRESS
5507*38fd1498Szrj 	  && TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5508*38fd1498Szrj 	return 0;
5509*38fd1498Szrj 
5510*38fd1498Szrj       return 1;
5511*38fd1498Szrj 
5512*38fd1498Szrj     default:
5513*38fd1498Szrj       gcc_unreachable ();
5514*38fd1498Szrj     }
5515*38fd1498Szrj }
5516*38fd1498Szrj 
5517*38fd1498Szrj /* Like reload_reg_reaches_end_p, but check that the condition holds for
5518*38fd1498Szrj    every register in REG.  */
5519*38fd1498Szrj 
5520*38fd1498Szrj static bool
reload_reg_rtx_reaches_end_p(rtx reg,int reloadnum)5521*38fd1498Szrj reload_reg_rtx_reaches_end_p (rtx reg, int reloadnum)
5522*38fd1498Szrj {
5523*38fd1498Szrj   unsigned int i;
5524*38fd1498Szrj 
5525*38fd1498Szrj   for (i = REGNO (reg); i < END_REGNO (reg); i++)
5526*38fd1498Szrj     if (!reload_reg_reaches_end_p (i, reloadnum))
5527*38fd1498Szrj       return false;
5528*38fd1498Szrj   return true;
5529*38fd1498Szrj }
5530*38fd1498Szrj 
5531*38fd1498Szrj 
5532*38fd1498Szrj /*  Returns whether R1 and R2 are uniquely chained: the value of one
5533*38fd1498Szrj     is used by the other, and that value is not used by any other
5534*38fd1498Szrj     reload for this insn.  This is used to partially undo the decision
5535*38fd1498Szrj     made in find_reloads when in the case of multiple
5536*38fd1498Szrj     RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5537*38fd1498Szrj     RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5538*38fd1498Szrj     reloads.  This code tries to avoid the conflict created by that
5539*38fd1498Szrj     change.  It might be cleaner to explicitly keep track of which
5540*38fd1498Szrj     RELOAD_FOR_OPADDR_ADDR reload is associated with which
5541*38fd1498Szrj     RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5542*38fd1498Szrj     this after the fact. */
5543*38fd1498Szrj static bool
reloads_unique_chain_p(int r1,int r2)5544*38fd1498Szrj reloads_unique_chain_p (int r1, int r2)
5545*38fd1498Szrj {
5546*38fd1498Szrj   int i;
5547*38fd1498Szrj 
5548*38fd1498Szrj   /* We only check input reloads.  */
5549*38fd1498Szrj   if (! rld[r1].in || ! rld[r2].in)
5550*38fd1498Szrj     return false;
5551*38fd1498Szrj 
5552*38fd1498Szrj   /* Avoid anything with output reloads.  */
5553*38fd1498Szrj   if (rld[r1].out || rld[r2].out)
5554*38fd1498Szrj     return false;
5555*38fd1498Szrj 
5556*38fd1498Szrj   /* "chained" means one reload is a component of the other reload,
5557*38fd1498Szrj      not the same as the other reload.  */
5558*38fd1498Szrj   if (rld[r1].opnum != rld[r2].opnum
5559*38fd1498Szrj       || rtx_equal_p (rld[r1].in, rld[r2].in)
5560*38fd1498Szrj       || rld[r1].optional || rld[r2].optional
5561*38fd1498Szrj       || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5562*38fd1498Szrj 	    || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5563*38fd1498Szrj     return false;
5564*38fd1498Szrj 
5565*38fd1498Szrj   /* The following loop assumes that r1 is the reload that feeds r2.  */
5566*38fd1498Szrj   if (r1 > r2)
5567*38fd1498Szrj     std::swap (r1, r2);
5568*38fd1498Szrj 
5569*38fd1498Szrj   for (i = 0; i < n_reloads; i ++)
5570*38fd1498Szrj     /* Look for input reloads that aren't our two */
5571*38fd1498Szrj     if (i != r1 && i != r2 && rld[i].in)
5572*38fd1498Szrj       {
5573*38fd1498Szrj 	/* If our reload is mentioned at all, it isn't a simple chain.  */
5574*38fd1498Szrj 	if (reg_mentioned_p (rld[r1].in, rld[i].in))
5575*38fd1498Szrj 	  return false;
5576*38fd1498Szrj       }
5577*38fd1498Szrj   return true;
5578*38fd1498Szrj }
5579*38fd1498Szrj 
5580*38fd1498Szrj /* The recursive function change all occurrences of WHAT in *WHERE
5581*38fd1498Szrj    to REPL.  */
5582*38fd1498Szrj static void
substitute(rtx * where,const_rtx what,rtx repl)5583*38fd1498Szrj substitute (rtx *where, const_rtx what, rtx repl)
5584*38fd1498Szrj {
5585*38fd1498Szrj   const char *fmt;
5586*38fd1498Szrj   int i;
5587*38fd1498Szrj   enum rtx_code code;
5588*38fd1498Szrj 
5589*38fd1498Szrj   if (*where == 0)
5590*38fd1498Szrj     return;
5591*38fd1498Szrj 
5592*38fd1498Szrj   if (*where == what || rtx_equal_p (*where, what))
5593*38fd1498Szrj     {
5594*38fd1498Szrj       /* Record the location of the changed rtx.  */
5595*38fd1498Szrj       substitute_stack.safe_push (where);
5596*38fd1498Szrj       *where = repl;
5597*38fd1498Szrj       return;
5598*38fd1498Szrj     }
5599*38fd1498Szrj 
5600*38fd1498Szrj   code = GET_CODE (*where);
5601*38fd1498Szrj   fmt = GET_RTX_FORMAT (code);
5602*38fd1498Szrj   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5603*38fd1498Szrj     {
5604*38fd1498Szrj       if (fmt[i] == 'E')
5605*38fd1498Szrj 	{
5606*38fd1498Szrj 	  int j;
5607*38fd1498Szrj 
5608*38fd1498Szrj 	  for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5609*38fd1498Szrj 	    substitute (&XVECEXP (*where, i, j), what, repl);
5610*38fd1498Szrj 	}
5611*38fd1498Szrj       else if (fmt[i] == 'e')
5612*38fd1498Szrj 	substitute (&XEXP (*where, i), what, repl);
5613*38fd1498Szrj     }
5614*38fd1498Szrj }
5615*38fd1498Szrj 
5616*38fd1498Szrj /* The function returns TRUE if chain of reload R1 and R2 (in any
5617*38fd1498Szrj    order) can be evaluated without usage of intermediate register for
5618*38fd1498Szrj    the reload containing another reload.  It is important to see
5619*38fd1498Szrj    gen_reload to understand what the function is trying to do.  As an
5620*38fd1498Szrj    example, let us have reload chain
5621*38fd1498Szrj 
5622*38fd1498Szrj       r2: const
5623*38fd1498Szrj       r1: <something> + const
5624*38fd1498Szrj 
5625*38fd1498Szrj    and reload R2 got reload reg HR.  The function returns true if
5626*38fd1498Szrj    there is a correct insn HR = HR + <something>.  Otherwise,
5627*38fd1498Szrj    gen_reload will use intermediate register (and this is the reload
5628*38fd1498Szrj    reg for R1) to reload <something>.
5629*38fd1498Szrj 
5630*38fd1498Szrj    We need this function to find a conflict for chain reloads.  In our
5631*38fd1498Szrj    example, if HR = HR + <something> is incorrect insn, then we cannot
5632*38fd1498Szrj    use HR as a reload register for R2.  If we do use it then we get a
5633*38fd1498Szrj    wrong code:
5634*38fd1498Szrj 
5635*38fd1498Szrj       HR = const
5636*38fd1498Szrj       HR = <something>
5637*38fd1498Szrj       HR = HR + HR
5638*38fd1498Szrj 
5639*38fd1498Szrj */
5640*38fd1498Szrj static bool
gen_reload_chain_without_interm_reg_p(int r1,int r2)5641*38fd1498Szrj gen_reload_chain_without_interm_reg_p (int r1, int r2)
5642*38fd1498Szrj {
5643*38fd1498Szrj   /* Assume other cases in gen_reload are not possible for
5644*38fd1498Szrj      chain reloads or do need an intermediate hard registers.  */
5645*38fd1498Szrj   bool result = true;
5646*38fd1498Szrj   int regno, code;
5647*38fd1498Szrj   rtx out, in;
5648*38fd1498Szrj   rtx_insn *insn;
5649*38fd1498Szrj   rtx_insn *last = get_last_insn ();
5650*38fd1498Szrj 
5651*38fd1498Szrj   /* Make r2 a component of r1.  */
5652*38fd1498Szrj   if (reg_mentioned_p (rld[r1].in, rld[r2].in))
5653*38fd1498Szrj     std::swap (r1, r2);
5654*38fd1498Szrj 
5655*38fd1498Szrj   gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in));
5656*38fd1498Szrj   regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno;
5657*38fd1498Szrj   gcc_assert (regno >= 0);
5658*38fd1498Szrj   out = gen_rtx_REG (rld[r1].mode, regno);
5659*38fd1498Szrj   in = rld[r1].in;
5660*38fd1498Szrj   substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno));
5661*38fd1498Szrj 
5662*38fd1498Szrj   /* If IN is a paradoxical SUBREG, remove it and try to put the
5663*38fd1498Szrj      opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
5664*38fd1498Szrj   strip_paradoxical_subreg (&in, &out);
5665*38fd1498Szrj 
5666*38fd1498Szrj   if (GET_CODE (in) == PLUS
5667*38fd1498Szrj       && (REG_P (XEXP (in, 0))
5668*38fd1498Szrj 	  || GET_CODE (XEXP (in, 0)) == SUBREG
5669*38fd1498Szrj 	  || MEM_P (XEXP (in, 0)))
5670*38fd1498Szrj       && (REG_P (XEXP (in, 1))
5671*38fd1498Szrj 	  || GET_CODE (XEXP (in, 1)) == SUBREG
5672*38fd1498Szrj 	  || CONSTANT_P (XEXP (in, 1))
5673*38fd1498Szrj 	  || MEM_P (XEXP (in, 1))))
5674*38fd1498Szrj     {
5675*38fd1498Szrj       insn = emit_insn (gen_rtx_SET (out, in));
5676*38fd1498Szrj       code = recog_memoized (insn);
5677*38fd1498Szrj       result = false;
5678*38fd1498Szrj 
5679*38fd1498Szrj       if (code >= 0)
5680*38fd1498Szrj 	{
5681*38fd1498Szrj 	  extract_insn (insn);
5682*38fd1498Szrj 	  /* We want constrain operands to treat this insn strictly in
5683*38fd1498Szrj 	     its validity determination, i.e., the way it would after
5684*38fd1498Szrj 	     reload has completed.  */
5685*38fd1498Szrj 	  result = constrain_operands (1, get_enabled_alternatives (insn));
5686*38fd1498Szrj 	}
5687*38fd1498Szrj 
5688*38fd1498Szrj       delete_insns_since (last);
5689*38fd1498Szrj     }
5690*38fd1498Szrj 
5691*38fd1498Szrj   /* Restore the original value at each changed address within R1.  */
5692*38fd1498Szrj   while (!substitute_stack.is_empty ())
5693*38fd1498Szrj     {
5694*38fd1498Szrj       rtx *where = substitute_stack.pop ();
5695*38fd1498Szrj       *where = rld[r2].in;
5696*38fd1498Szrj     }
5697*38fd1498Szrj 
5698*38fd1498Szrj   return result;
5699*38fd1498Szrj }
5700*38fd1498Szrj 
5701*38fd1498Szrj /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5702*38fd1498Szrj    Return 0 otherwise.
5703*38fd1498Szrj 
5704*38fd1498Szrj    This function uses the same algorithm as reload_reg_free_p above.  */
5705*38fd1498Szrj 
5706*38fd1498Szrj static int
reloads_conflict(int r1,int r2)5707*38fd1498Szrj reloads_conflict (int r1, int r2)
5708*38fd1498Szrj {
5709*38fd1498Szrj   enum reload_type r1_type = rld[r1].when_needed;
5710*38fd1498Szrj   enum reload_type r2_type = rld[r2].when_needed;
5711*38fd1498Szrj   int r1_opnum = rld[r1].opnum;
5712*38fd1498Szrj   int r2_opnum = rld[r2].opnum;
5713*38fd1498Szrj 
5714*38fd1498Szrj   /* RELOAD_OTHER conflicts with everything.  */
5715*38fd1498Szrj   if (r2_type == RELOAD_OTHER)
5716*38fd1498Szrj     return 1;
5717*38fd1498Szrj 
5718*38fd1498Szrj   /* Otherwise, check conflicts differently for each type.  */
5719*38fd1498Szrj 
5720*38fd1498Szrj   switch (r1_type)
5721*38fd1498Szrj     {
5722*38fd1498Szrj     case RELOAD_FOR_INPUT:
5723*38fd1498Szrj       return (r2_type == RELOAD_FOR_INSN
5724*38fd1498Szrj 	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5725*38fd1498Szrj 	      || r2_type == RELOAD_FOR_OPADDR_ADDR
5726*38fd1498Szrj 	      || r2_type == RELOAD_FOR_INPUT
5727*38fd1498Szrj 	      || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5728*38fd1498Szrj 		   || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5729*38fd1498Szrj 		  && r2_opnum > r1_opnum));
5730*38fd1498Szrj 
5731*38fd1498Szrj     case RELOAD_FOR_INPUT_ADDRESS:
5732*38fd1498Szrj       return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5733*38fd1498Szrj 	      || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5734*38fd1498Szrj 
5735*38fd1498Szrj     case RELOAD_FOR_INPADDR_ADDRESS:
5736*38fd1498Szrj       return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5737*38fd1498Szrj 	      || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5738*38fd1498Szrj 
5739*38fd1498Szrj     case RELOAD_FOR_OUTPUT_ADDRESS:
5740*38fd1498Szrj       return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5741*38fd1498Szrj 	      || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5742*38fd1498Szrj 
5743*38fd1498Szrj     case RELOAD_FOR_OUTADDR_ADDRESS:
5744*38fd1498Szrj       return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5745*38fd1498Szrj 	      || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5746*38fd1498Szrj 
5747*38fd1498Szrj     case RELOAD_FOR_OPERAND_ADDRESS:
5748*38fd1498Szrj       return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5749*38fd1498Szrj 	      || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5750*38fd1498Szrj 		  && (!reloads_unique_chain_p (r1, r2)
5751*38fd1498Szrj 		      || !gen_reload_chain_without_interm_reg_p (r1, r2))));
5752*38fd1498Szrj 
5753*38fd1498Szrj     case RELOAD_FOR_OPADDR_ADDR:
5754*38fd1498Szrj       return (r2_type == RELOAD_FOR_INPUT
5755*38fd1498Szrj 	      || r2_type == RELOAD_FOR_OPADDR_ADDR);
5756*38fd1498Szrj 
5757*38fd1498Szrj     case RELOAD_FOR_OUTPUT:
5758*38fd1498Szrj       return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5759*38fd1498Szrj 	      || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5760*38fd1498Szrj 		   || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5761*38fd1498Szrj 		  && r2_opnum >= r1_opnum));
5762*38fd1498Szrj 
5763*38fd1498Szrj     case RELOAD_FOR_INSN:
5764*38fd1498Szrj       return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5765*38fd1498Szrj 	      || r2_type == RELOAD_FOR_INSN
5766*38fd1498Szrj 	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5767*38fd1498Szrj 
5768*38fd1498Szrj     case RELOAD_FOR_OTHER_ADDRESS:
5769*38fd1498Szrj       return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5770*38fd1498Szrj 
5771*38fd1498Szrj     case RELOAD_OTHER:
5772*38fd1498Szrj       return 1;
5773*38fd1498Szrj 
5774*38fd1498Szrj     default:
5775*38fd1498Szrj       gcc_unreachable ();
5776*38fd1498Szrj     }
5777*38fd1498Szrj }
5778*38fd1498Szrj 
5779*38fd1498Szrj /* Indexed by reload number, 1 if incoming value
5780*38fd1498Szrj    inherited from previous insns.  */
5781*38fd1498Szrj static char reload_inherited[MAX_RELOADS];
5782*38fd1498Szrj 
5783*38fd1498Szrj /* For an inherited reload, this is the insn the reload was inherited from,
5784*38fd1498Szrj    if we know it.  Otherwise, this is 0.  */
5785*38fd1498Szrj static rtx_insn *reload_inheritance_insn[MAX_RELOADS];
5786*38fd1498Szrj 
5787*38fd1498Szrj /* If nonzero, this is a place to get the value of the reload,
5788*38fd1498Szrj    rather than using reload_in.  */
5789*38fd1498Szrj static rtx reload_override_in[MAX_RELOADS];
5790*38fd1498Szrj 
5791*38fd1498Szrj /* For each reload, the hard register number of the register used,
5792*38fd1498Szrj    or -1 if we did not need a register for this reload.  */
5793*38fd1498Szrj static int reload_spill_index[MAX_RELOADS];
5794*38fd1498Szrj 
5795*38fd1498Szrj /* Index X is the value of rld[X].reg_rtx, adjusted for the input mode.  */
5796*38fd1498Szrj static rtx reload_reg_rtx_for_input[MAX_RELOADS];
5797*38fd1498Szrj 
5798*38fd1498Szrj /* Index X is the value of rld[X].reg_rtx, adjusted for the output mode.  */
5799*38fd1498Szrj static rtx reload_reg_rtx_for_output[MAX_RELOADS];
5800*38fd1498Szrj 
5801*38fd1498Szrj /* Subroutine of free_for_value_p, used to check a single register.
5802*38fd1498Szrj    START_REGNO is the starting regno of the full reload register
5803*38fd1498Szrj    (possibly comprising multiple hard registers) that we are considering.  */
5804*38fd1498Szrj 
5805*38fd1498Szrj static int
reload_reg_free_for_value_p(int start_regno,int regno,int opnum,enum reload_type type,rtx value,rtx out,int reloadnum,int ignore_address_reloads)5806*38fd1498Szrj reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5807*38fd1498Szrj 			     enum reload_type type, rtx value, rtx out,
5808*38fd1498Szrj 			     int reloadnum, int ignore_address_reloads)
5809*38fd1498Szrj {
5810*38fd1498Szrj   int time1;
5811*38fd1498Szrj   /* Set if we see an input reload that must not share its reload register
5812*38fd1498Szrj      with any new earlyclobber, but might otherwise share the reload
5813*38fd1498Szrj      register with an output or input-output reload.  */
5814*38fd1498Szrj   int check_earlyclobber = 0;
5815*38fd1498Szrj   int i;
5816*38fd1498Szrj   int copy = 0;
5817*38fd1498Szrj 
5818*38fd1498Szrj   if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5819*38fd1498Szrj     return 0;
5820*38fd1498Szrj 
5821*38fd1498Szrj   if (out == const0_rtx)
5822*38fd1498Szrj     {
5823*38fd1498Szrj       copy = 1;
5824*38fd1498Szrj       out = NULL_RTX;
5825*38fd1498Szrj     }
5826*38fd1498Szrj 
5827*38fd1498Szrj   /* We use some pseudo 'time' value to check if the lifetimes of the
5828*38fd1498Szrj      new register use would overlap with the one of a previous reload
5829*38fd1498Szrj      that is not read-only or uses a different value.
5830*38fd1498Szrj      The 'time' used doesn't have to be linear in any shape or form, just
5831*38fd1498Szrj      monotonic.
5832*38fd1498Szrj      Some reload types use different 'buckets' for each operand.
5833*38fd1498Szrj      So there are MAX_RECOG_OPERANDS different time values for each
5834*38fd1498Szrj      such reload type.
5835*38fd1498Szrj      We compute TIME1 as the time when the register for the prospective
5836*38fd1498Szrj      new reload ceases to be live, and TIME2 for each existing
5837*38fd1498Szrj      reload as the time when that the reload register of that reload
5838*38fd1498Szrj      becomes live.
5839*38fd1498Szrj      Where there is little to be gained by exact lifetime calculations,
5840*38fd1498Szrj      we just make conservative assumptions, i.e. a longer lifetime;
5841*38fd1498Szrj      this is done in the 'default:' cases.  */
5842*38fd1498Szrj   switch (type)
5843*38fd1498Szrj     {
5844*38fd1498Szrj     case RELOAD_FOR_OTHER_ADDRESS:
5845*38fd1498Szrj       /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads.  */
5846*38fd1498Szrj       time1 = copy ? 0 : 1;
5847*38fd1498Szrj       break;
5848*38fd1498Szrj     case RELOAD_OTHER:
5849*38fd1498Szrj       time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5850*38fd1498Szrj       break;
5851*38fd1498Szrj       /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5852*38fd1498Szrj 	 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT.  By adding 0 / 1 / 2 ,
5853*38fd1498Szrj 	 respectively, to the time values for these, we get distinct time
5854*38fd1498Szrj 	 values.  To get distinct time values for each operand, we have to
5855*38fd1498Szrj 	 multiply opnum by at least three.  We round that up to four because
5856*38fd1498Szrj 	 multiply by four is often cheaper.  */
5857*38fd1498Szrj     case RELOAD_FOR_INPADDR_ADDRESS:
5858*38fd1498Szrj       time1 = opnum * 4 + 2;
5859*38fd1498Szrj       break;
5860*38fd1498Szrj     case RELOAD_FOR_INPUT_ADDRESS:
5861*38fd1498Szrj       time1 = opnum * 4 + 3;
5862*38fd1498Szrj       break;
5863*38fd1498Szrj     case RELOAD_FOR_INPUT:
5864*38fd1498Szrj       /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5865*38fd1498Szrj 	 executes (inclusive).  */
5866*38fd1498Szrj       time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5867*38fd1498Szrj       break;
5868*38fd1498Szrj     case RELOAD_FOR_OPADDR_ADDR:
5869*38fd1498Szrj       /* opnum * 4 + 4
5870*38fd1498Szrj 	 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5871*38fd1498Szrj       time1 = MAX_RECOG_OPERANDS * 4 + 1;
5872*38fd1498Szrj       break;
5873*38fd1498Szrj     case RELOAD_FOR_OPERAND_ADDRESS:
5874*38fd1498Szrj       /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5875*38fd1498Szrj 	 is executed.  */
5876*38fd1498Szrj       time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5877*38fd1498Szrj       break;
5878*38fd1498Szrj     case RELOAD_FOR_OUTADDR_ADDRESS:
5879*38fd1498Szrj       time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5880*38fd1498Szrj       break;
5881*38fd1498Szrj     case RELOAD_FOR_OUTPUT_ADDRESS:
5882*38fd1498Szrj       time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5883*38fd1498Szrj       break;
5884*38fd1498Szrj     default:
5885*38fd1498Szrj       time1 = MAX_RECOG_OPERANDS * 5 + 5;
5886*38fd1498Szrj     }
5887*38fd1498Szrj 
5888*38fd1498Szrj   for (i = 0; i < n_reloads; i++)
5889*38fd1498Szrj     {
5890*38fd1498Szrj       rtx reg = rld[i].reg_rtx;
5891*38fd1498Szrj       if (reg && REG_P (reg)
5892*38fd1498Szrj 	  && (unsigned) regno - true_regnum (reg) < REG_NREGS (reg)
5893*38fd1498Szrj 	  && i != reloadnum)
5894*38fd1498Szrj 	{
5895*38fd1498Szrj 	  rtx other_input = rld[i].in;
5896*38fd1498Szrj 
5897*38fd1498Szrj 	  /* If the other reload loads the same input value, that
5898*38fd1498Szrj 	     will not cause a conflict only if it's loading it into
5899*38fd1498Szrj 	     the same register.  */
5900*38fd1498Szrj 	  if (true_regnum (reg) != start_regno)
5901*38fd1498Szrj 	    other_input = NULL_RTX;
5902*38fd1498Szrj 	  if (! other_input || ! rtx_equal_p (other_input, value)
5903*38fd1498Szrj 	      || rld[i].out || out)
5904*38fd1498Szrj 	    {
5905*38fd1498Szrj 	      int time2;
5906*38fd1498Szrj 	      switch (rld[i].when_needed)
5907*38fd1498Szrj 		{
5908*38fd1498Szrj 		case RELOAD_FOR_OTHER_ADDRESS:
5909*38fd1498Szrj 		  time2 = 0;
5910*38fd1498Szrj 		  break;
5911*38fd1498Szrj 		case RELOAD_FOR_INPADDR_ADDRESS:
5912*38fd1498Szrj 		  /* find_reloads makes sure that a
5913*38fd1498Szrj 		     RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5914*38fd1498Szrj 		     by at most one - the first -
5915*38fd1498Szrj 		     RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS .  If the
5916*38fd1498Szrj 		     address reload is inherited, the address address reload
5917*38fd1498Szrj 		     goes away, so we can ignore this conflict.  */
5918*38fd1498Szrj 		  if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5919*38fd1498Szrj 		      && ignore_address_reloads
5920*38fd1498Szrj 		      /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5921*38fd1498Szrj 			 Then the address address is still needed to store
5922*38fd1498Szrj 			 back the new address.  */
5923*38fd1498Szrj 		      && ! rld[reloadnum].out)
5924*38fd1498Szrj 		    continue;
5925*38fd1498Szrj 		  /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5926*38fd1498Szrj 		     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5927*38fd1498Szrj 		     reloads go away.  */
5928*38fd1498Szrj 		  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5929*38fd1498Szrj 		      && ignore_address_reloads
5930*38fd1498Szrj 		      /* Unless we are reloading an auto_inc expression.  */
5931*38fd1498Szrj 		      && ! rld[reloadnum].out)
5932*38fd1498Szrj 		    continue;
5933*38fd1498Szrj 		  time2 = rld[i].opnum * 4 + 2;
5934*38fd1498Szrj 		  break;
5935*38fd1498Szrj 		case RELOAD_FOR_INPUT_ADDRESS:
5936*38fd1498Szrj 		  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5937*38fd1498Szrj 		      && ignore_address_reloads
5938*38fd1498Szrj 		      && ! rld[reloadnum].out)
5939*38fd1498Szrj 		    continue;
5940*38fd1498Szrj 		  time2 = rld[i].opnum * 4 + 3;
5941*38fd1498Szrj 		  break;
5942*38fd1498Szrj 		case RELOAD_FOR_INPUT:
5943*38fd1498Szrj 		  time2 = rld[i].opnum * 4 + 4;
5944*38fd1498Szrj 		  check_earlyclobber = 1;
5945*38fd1498Szrj 		  break;
5946*38fd1498Szrj 		  /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5947*38fd1498Szrj 		     == MAX_RECOG_OPERAND * 4  */
5948*38fd1498Szrj 		case RELOAD_FOR_OPADDR_ADDR:
5949*38fd1498Szrj 		  if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5950*38fd1498Szrj 		      && ignore_address_reloads
5951*38fd1498Szrj 		      && ! rld[reloadnum].out)
5952*38fd1498Szrj 		    continue;
5953*38fd1498Szrj 		  time2 = MAX_RECOG_OPERANDS * 4 + 1;
5954*38fd1498Szrj 		  break;
5955*38fd1498Szrj 		case RELOAD_FOR_OPERAND_ADDRESS:
5956*38fd1498Szrj 		  time2 = MAX_RECOG_OPERANDS * 4 + 2;
5957*38fd1498Szrj 		  check_earlyclobber = 1;
5958*38fd1498Szrj 		  break;
5959*38fd1498Szrj 		case RELOAD_FOR_INSN:
5960*38fd1498Szrj 		  time2 = MAX_RECOG_OPERANDS * 4 + 3;
5961*38fd1498Szrj 		  break;
5962*38fd1498Szrj 		case RELOAD_FOR_OUTPUT:
5963*38fd1498Szrj 		  /* All RELOAD_FOR_OUTPUT reloads become live just after the
5964*38fd1498Szrj 		     instruction is executed.  */
5965*38fd1498Szrj 		  time2 = MAX_RECOG_OPERANDS * 4 + 4;
5966*38fd1498Szrj 		  break;
5967*38fd1498Szrj 		  /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5968*38fd1498Szrj 		     the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5969*38fd1498Szrj 		     value.  */
5970*38fd1498Szrj 		case RELOAD_FOR_OUTADDR_ADDRESS:
5971*38fd1498Szrj 		  if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5972*38fd1498Szrj 		      && ignore_address_reloads
5973*38fd1498Szrj 		      && ! rld[reloadnum].out)
5974*38fd1498Szrj 		    continue;
5975*38fd1498Szrj 		  time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5976*38fd1498Szrj 		  break;
5977*38fd1498Szrj 		case RELOAD_FOR_OUTPUT_ADDRESS:
5978*38fd1498Szrj 		  time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5979*38fd1498Szrj 		  break;
5980*38fd1498Szrj 		case RELOAD_OTHER:
5981*38fd1498Szrj 		  /* If there is no conflict in the input part, handle this
5982*38fd1498Szrj 		     like an output reload.  */
5983*38fd1498Szrj 		  if (! rld[i].in || rtx_equal_p (other_input, value))
5984*38fd1498Szrj 		    {
5985*38fd1498Szrj 		      time2 = MAX_RECOG_OPERANDS * 4 + 4;
5986*38fd1498Szrj 		      /* Earlyclobbered outputs must conflict with inputs.  */
5987*38fd1498Szrj 		      if (earlyclobber_operand_p (rld[i].out))
5988*38fd1498Szrj 			time2 = MAX_RECOG_OPERANDS * 4 + 3;
5989*38fd1498Szrj 
5990*38fd1498Szrj 		      break;
5991*38fd1498Szrj 		    }
5992*38fd1498Szrj 		  time2 = 1;
5993*38fd1498Szrj 		  /* RELOAD_OTHER might be live beyond instruction execution,
5994*38fd1498Szrj 		     but this is not obvious when we set time2 = 1.  So check
5995*38fd1498Szrj 		     here if there might be a problem with the new reload
5996*38fd1498Szrj 		     clobbering the register used by the RELOAD_OTHER.  */
5997*38fd1498Szrj 		  if (out)
5998*38fd1498Szrj 		    return 0;
5999*38fd1498Szrj 		  break;
6000*38fd1498Szrj 		default:
6001*38fd1498Szrj 		  return 0;
6002*38fd1498Szrj 		}
6003*38fd1498Szrj 	      if ((time1 >= time2
6004*38fd1498Szrj 		   && (! rld[i].in || rld[i].out
6005*38fd1498Szrj 		       || ! rtx_equal_p (other_input, value)))
6006*38fd1498Szrj 		  || (out && rld[reloadnum].out_reg
6007*38fd1498Szrj 		      && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
6008*38fd1498Szrj 		return 0;
6009*38fd1498Szrj 	    }
6010*38fd1498Szrj 	}
6011*38fd1498Szrj     }
6012*38fd1498Szrj 
6013*38fd1498Szrj   /* Earlyclobbered outputs must conflict with inputs.  */
6014*38fd1498Szrj   if (check_earlyclobber && out && earlyclobber_operand_p (out))
6015*38fd1498Szrj     return 0;
6016*38fd1498Szrj 
6017*38fd1498Szrj   return 1;
6018*38fd1498Szrj }
6019*38fd1498Szrj 
6020*38fd1498Szrj /* Return 1 if the value in reload reg REGNO, as used by a reload
6021*38fd1498Szrj    needed for the part of the insn specified by OPNUM and TYPE,
6022*38fd1498Szrj    may be used to load VALUE into it.
6023*38fd1498Szrj 
6024*38fd1498Szrj    MODE is the mode in which the register is used, this is needed to
6025*38fd1498Szrj    determine how many hard regs to test.
6026*38fd1498Szrj 
6027*38fd1498Szrj    Other read-only reloads with the same value do not conflict
6028*38fd1498Szrj    unless OUT is nonzero and these other reloads have to live while
6029*38fd1498Szrj    output reloads live.
6030*38fd1498Szrj    If OUT is CONST0_RTX, this is a special case: it means that the
6031*38fd1498Szrj    test should not be for using register REGNO as reload register, but
6032*38fd1498Szrj    for copying from register REGNO into the reload register.
6033*38fd1498Szrj 
6034*38fd1498Szrj    RELOADNUM is the number of the reload we want to load this value for;
6035*38fd1498Szrj    a reload does not conflict with itself.
6036*38fd1498Szrj 
6037*38fd1498Szrj    When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
6038*38fd1498Szrj    reloads that load an address for the very reload we are considering.
6039*38fd1498Szrj 
6040*38fd1498Szrj    The caller has to make sure that there is no conflict with the return
6041*38fd1498Szrj    register.  */
6042*38fd1498Szrj 
6043*38fd1498Szrj static int
free_for_value_p(int regno,machine_mode mode,int opnum,enum reload_type type,rtx value,rtx out,int reloadnum,int ignore_address_reloads)6044*38fd1498Szrj free_for_value_p (int regno, machine_mode mode, int opnum,
6045*38fd1498Szrj 		  enum reload_type type, rtx value, rtx out, int reloadnum,
6046*38fd1498Szrj 		  int ignore_address_reloads)
6047*38fd1498Szrj {
6048*38fd1498Szrj   int nregs = hard_regno_nregs (regno, mode);
6049*38fd1498Szrj   while (nregs-- > 0)
6050*38fd1498Szrj     if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
6051*38fd1498Szrj 				       value, out, reloadnum,
6052*38fd1498Szrj 				       ignore_address_reloads))
6053*38fd1498Szrj       return 0;
6054*38fd1498Szrj   return 1;
6055*38fd1498Szrj }
6056*38fd1498Szrj 
6057*38fd1498Szrj /* Return nonzero if the rtx X is invariant over the current function.  */
6058*38fd1498Szrj /* ??? Actually, the places where we use this expect exactly what is
6059*38fd1498Szrj    tested here, and not everything that is function invariant.  In
6060*38fd1498Szrj    particular, the frame pointer and arg pointer are special cased;
6061*38fd1498Szrj    pic_offset_table_rtx is not, and we must not spill these things to
6062*38fd1498Szrj    memory.  */
6063*38fd1498Szrj 
6064*38fd1498Szrj int
function_invariant_p(const_rtx x)6065*38fd1498Szrj function_invariant_p (const_rtx x)
6066*38fd1498Szrj {
6067*38fd1498Szrj   if (CONSTANT_P (x))
6068*38fd1498Szrj     return 1;
6069*38fd1498Szrj   if (x == frame_pointer_rtx || x == arg_pointer_rtx)
6070*38fd1498Szrj     return 1;
6071*38fd1498Szrj   if (GET_CODE (x) == PLUS
6072*38fd1498Szrj       && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
6073*38fd1498Szrj       && GET_CODE (XEXP (x, 1)) == CONST_INT)
6074*38fd1498Szrj     return 1;
6075*38fd1498Szrj   return 0;
6076*38fd1498Szrj }
6077*38fd1498Szrj 
6078*38fd1498Szrj /* Determine whether the reload reg X overlaps any rtx'es used for
6079*38fd1498Szrj    overriding inheritance.  Return nonzero if so.  */
6080*38fd1498Szrj 
6081*38fd1498Szrj static int
conflicts_with_override(rtx x)6082*38fd1498Szrj conflicts_with_override (rtx x)
6083*38fd1498Szrj {
6084*38fd1498Szrj   int i;
6085*38fd1498Szrj   for (i = 0; i < n_reloads; i++)
6086*38fd1498Szrj     if (reload_override_in[i]
6087*38fd1498Szrj 	&& reg_overlap_mentioned_p (x, reload_override_in[i]))
6088*38fd1498Szrj       return 1;
6089*38fd1498Szrj   return 0;
6090*38fd1498Szrj }
6091*38fd1498Szrj 
6092*38fd1498Szrj /* Give an error message saying we failed to find a reload for INSN,
6093*38fd1498Szrj    and clear out reload R.  */
6094*38fd1498Szrj static void
failed_reload(rtx_insn * insn,int r)6095*38fd1498Szrj failed_reload (rtx_insn *insn, int r)
6096*38fd1498Szrj {
6097*38fd1498Szrj   if (asm_noperands (PATTERN (insn)) < 0)
6098*38fd1498Szrj     /* It's the compiler's fault.  */
6099*38fd1498Szrj     fatal_insn ("could not find a spill register", insn);
6100*38fd1498Szrj 
6101*38fd1498Szrj   /* It's the user's fault; the operand's mode and constraint
6102*38fd1498Szrj      don't match.  Disable this reload so we don't crash in final.  */
6103*38fd1498Szrj   error_for_asm (insn,
6104*38fd1498Szrj 		 "%<asm%> operand constraint incompatible with operand size");
6105*38fd1498Szrj   rld[r].in = 0;
6106*38fd1498Szrj   rld[r].out = 0;
6107*38fd1498Szrj   rld[r].reg_rtx = 0;
6108*38fd1498Szrj   rld[r].optional = 1;
6109*38fd1498Szrj   rld[r].secondary_p = 1;
6110*38fd1498Szrj }
6111*38fd1498Szrj 
6112*38fd1498Szrj /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
6113*38fd1498Szrj    for reload R.  If it's valid, get an rtx for it.  Return nonzero if
6114*38fd1498Szrj    successful.  */
6115*38fd1498Szrj static int
set_reload_reg(int i,int r)6116*38fd1498Szrj set_reload_reg (int i, int r)
6117*38fd1498Szrj {
6118*38fd1498Szrj   int regno;
6119*38fd1498Szrj   rtx reg = spill_reg_rtx[i];
6120*38fd1498Szrj 
6121*38fd1498Szrj   if (reg == 0 || GET_MODE (reg) != rld[r].mode)
6122*38fd1498Szrj     spill_reg_rtx[i] = reg
6123*38fd1498Szrj       = gen_rtx_REG (rld[r].mode, spill_regs[i]);
6124*38fd1498Szrj 
6125*38fd1498Szrj   regno = true_regnum (reg);
6126*38fd1498Szrj 
6127*38fd1498Szrj   /* Detect when the reload reg can't hold the reload mode.
6128*38fd1498Szrj      This used to be one `if', but Sequent compiler can't handle that.  */
6129*38fd1498Szrj   if (targetm.hard_regno_mode_ok (regno, rld[r].mode))
6130*38fd1498Szrj     {
6131*38fd1498Szrj       machine_mode test_mode = VOIDmode;
6132*38fd1498Szrj       if (rld[r].in)
6133*38fd1498Szrj 	test_mode = GET_MODE (rld[r].in);
6134*38fd1498Szrj       /* If rld[r].in has VOIDmode, it means we will load it
6135*38fd1498Szrj 	 in whatever mode the reload reg has: to wit, rld[r].mode.
6136*38fd1498Szrj 	 We have already tested that for validity.  */
6137*38fd1498Szrj       /* Aside from that, we need to test that the expressions
6138*38fd1498Szrj 	 to reload from or into have modes which are valid for this
6139*38fd1498Szrj 	 reload register.  Otherwise the reload insns would be invalid.  */
6140*38fd1498Szrj       if (! (rld[r].in != 0 && test_mode != VOIDmode
6141*38fd1498Szrj 	     && !targetm.hard_regno_mode_ok (regno, test_mode)))
6142*38fd1498Szrj 	if (! (rld[r].out != 0
6143*38fd1498Szrj 	       && !targetm.hard_regno_mode_ok (regno, GET_MODE (rld[r].out))))
6144*38fd1498Szrj 	  {
6145*38fd1498Szrj 	    /* The reg is OK.  */
6146*38fd1498Szrj 	    last_spill_reg = i;
6147*38fd1498Szrj 
6148*38fd1498Szrj 	    /* Mark as in use for this insn the reload regs we use
6149*38fd1498Szrj 	       for this.  */
6150*38fd1498Szrj 	    mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
6151*38fd1498Szrj 				    rld[r].when_needed, rld[r].mode);
6152*38fd1498Szrj 
6153*38fd1498Szrj 	    rld[r].reg_rtx = reg;
6154*38fd1498Szrj 	    reload_spill_index[r] = spill_regs[i];
6155*38fd1498Szrj 	    return 1;
6156*38fd1498Szrj 	  }
6157*38fd1498Szrj     }
6158*38fd1498Szrj   return 0;
6159*38fd1498Szrj }
6160*38fd1498Szrj 
6161*38fd1498Szrj /* Find a spill register to use as a reload register for reload R.
6162*38fd1498Szrj    LAST_RELOAD is nonzero if this is the last reload for the insn being
6163*38fd1498Szrj    processed.
6164*38fd1498Szrj 
6165*38fd1498Szrj    Set rld[R].reg_rtx to the register allocated.
6166*38fd1498Szrj 
6167*38fd1498Szrj    We return 1 if successful, or 0 if we couldn't find a spill reg and
6168*38fd1498Szrj    we didn't change anything.  */
6169*38fd1498Szrj 
6170*38fd1498Szrj static int
allocate_reload_reg(struct insn_chain * chain ATTRIBUTE_UNUSED,int r,int last_reload)6171*38fd1498Szrj allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
6172*38fd1498Szrj 		     int last_reload)
6173*38fd1498Szrj {
6174*38fd1498Szrj   int i, pass, count;
6175*38fd1498Szrj 
6176*38fd1498Szrj   /* If we put this reload ahead, thinking it is a group,
6177*38fd1498Szrj      then insist on finding a group.  Otherwise we can grab a
6178*38fd1498Szrj      reg that some other reload needs.
6179*38fd1498Szrj      (That can happen when we have a 68000 DATA_OR_FP_REG
6180*38fd1498Szrj      which is a group of data regs or one fp reg.)
6181*38fd1498Szrj      We need not be so restrictive if there are no more reloads
6182*38fd1498Szrj      for this insn.
6183*38fd1498Szrj 
6184*38fd1498Szrj      ??? Really it would be nicer to have smarter handling
6185*38fd1498Szrj      for that kind of reg class, where a problem like this is normal.
6186*38fd1498Szrj      Perhaps those classes should be avoided for reloading
6187*38fd1498Szrj      by use of more alternatives.  */
6188*38fd1498Szrj 
6189*38fd1498Szrj   int force_group = rld[r].nregs > 1 && ! last_reload;
6190*38fd1498Szrj 
6191*38fd1498Szrj   /* If we want a single register and haven't yet found one,
6192*38fd1498Szrj      take any reg in the right class and not in use.
6193*38fd1498Szrj      If we want a consecutive group, here is where we look for it.
6194*38fd1498Szrj 
6195*38fd1498Szrj      We use three passes so we can first look for reload regs to
6196*38fd1498Szrj      reuse, which are already in use for other reloads in this insn,
6197*38fd1498Szrj      and only then use additional registers which are not "bad", then
6198*38fd1498Szrj      finally any register.
6199*38fd1498Szrj 
6200*38fd1498Szrj      I think that maximizing reuse is needed to make sure we don't
6201*38fd1498Szrj      run out of reload regs.  Suppose we have three reloads, and
6202*38fd1498Szrj      reloads A and B can share regs.  These need two regs.
6203*38fd1498Szrj      Suppose A and B are given different regs.
6204*38fd1498Szrj      That leaves none for C.  */
6205*38fd1498Szrj   for (pass = 0; pass < 3; pass++)
6206*38fd1498Szrj     {
6207*38fd1498Szrj       /* I is the index in spill_regs.
6208*38fd1498Szrj 	 We advance it round-robin between insns to use all spill regs
6209*38fd1498Szrj 	 equally, so that inherited reloads have a chance
6210*38fd1498Szrj 	 of leapfrogging each other.  */
6211*38fd1498Szrj 
6212*38fd1498Szrj       i = last_spill_reg;
6213*38fd1498Szrj 
6214*38fd1498Szrj       for (count = 0; count < n_spills; count++)
6215*38fd1498Szrj 	{
6216*38fd1498Szrj 	  int rclass = (int) rld[r].rclass;
6217*38fd1498Szrj 	  int regnum;
6218*38fd1498Szrj 
6219*38fd1498Szrj 	  i++;
6220*38fd1498Szrj 	  if (i >= n_spills)
6221*38fd1498Szrj 	    i -= n_spills;
6222*38fd1498Szrj 	  regnum = spill_regs[i];
6223*38fd1498Szrj 
6224*38fd1498Szrj 	  if ((reload_reg_free_p (regnum, rld[r].opnum,
6225*38fd1498Szrj 				  rld[r].when_needed)
6226*38fd1498Szrj 	       || (rld[r].in
6227*38fd1498Szrj 		   /* We check reload_reg_used to make sure we
6228*38fd1498Szrj 		      don't clobber the return register.  */
6229*38fd1498Szrj 		   && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
6230*38fd1498Szrj 		   && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
6231*38fd1498Szrj 					rld[r].when_needed, rld[r].in,
6232*38fd1498Szrj 					rld[r].out, r, 1)))
6233*38fd1498Szrj 	      && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
6234*38fd1498Szrj 	      && targetm.hard_regno_mode_ok (regnum, rld[r].mode)
6235*38fd1498Szrj 	      /* Look first for regs to share, then for unshared.  But
6236*38fd1498Szrj 		 don't share regs used for inherited reloads; they are
6237*38fd1498Szrj 		 the ones we want to preserve.  */
6238*38fd1498Szrj 	      && (pass
6239*38fd1498Szrj 		  || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
6240*38fd1498Szrj 					 regnum)
6241*38fd1498Szrj 		      && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
6242*38fd1498Szrj 					      regnum))))
6243*38fd1498Szrj 	    {
6244*38fd1498Szrj 	      int nr = hard_regno_nregs (regnum, rld[r].mode);
6245*38fd1498Szrj 
6246*38fd1498Szrj 	      /* During the second pass we want to avoid reload registers
6247*38fd1498Szrj 		 which are "bad" for this reload.  */
6248*38fd1498Szrj 	      if (pass == 1
6249*38fd1498Szrj 		  && ira_bad_reload_regno (regnum, rld[r].in, rld[r].out))
6250*38fd1498Szrj 		continue;
6251*38fd1498Szrj 
6252*38fd1498Szrj 	      /* Avoid the problem where spilling a GENERAL_OR_FP_REG
6253*38fd1498Szrj 		 (on 68000) got us two FP regs.  If NR is 1,
6254*38fd1498Szrj 		 we would reject both of them.  */
6255*38fd1498Szrj 	      if (force_group)
6256*38fd1498Szrj 		nr = rld[r].nregs;
6257*38fd1498Szrj 	      /* If we need only one reg, we have already won.  */
6258*38fd1498Szrj 	      if (nr == 1)
6259*38fd1498Szrj 		{
6260*38fd1498Szrj 		  /* But reject a single reg if we demand a group.  */
6261*38fd1498Szrj 		  if (force_group)
6262*38fd1498Szrj 		    continue;
6263*38fd1498Szrj 		  break;
6264*38fd1498Szrj 		}
6265*38fd1498Szrj 	      /* Otherwise check that as many consecutive regs as we need
6266*38fd1498Szrj 		 are available here.  */
6267*38fd1498Szrj 	      while (nr > 1)
6268*38fd1498Szrj 		{
6269*38fd1498Szrj 		  int regno = regnum + nr - 1;
6270*38fd1498Szrj 		  if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
6271*38fd1498Szrj 			&& spill_reg_order[regno] >= 0
6272*38fd1498Szrj 			&& reload_reg_free_p (regno, rld[r].opnum,
6273*38fd1498Szrj 					      rld[r].when_needed)))
6274*38fd1498Szrj 		    break;
6275*38fd1498Szrj 		  nr--;
6276*38fd1498Szrj 		}
6277*38fd1498Szrj 	      if (nr == 1)
6278*38fd1498Szrj 		break;
6279*38fd1498Szrj 	    }
6280*38fd1498Szrj 	}
6281*38fd1498Szrj 
6282*38fd1498Szrj       /* If we found something on the current pass, omit later passes.  */
6283*38fd1498Szrj       if (count < n_spills)
6284*38fd1498Szrj 	break;
6285*38fd1498Szrj     }
6286*38fd1498Szrj 
6287*38fd1498Szrj   /* We should have found a spill register by now.  */
6288*38fd1498Szrj   if (count >= n_spills)
6289*38fd1498Szrj     return 0;
6290*38fd1498Szrj 
6291*38fd1498Szrj   /* I is the index in SPILL_REG_RTX of the reload register we are to
6292*38fd1498Szrj      allocate.  Get an rtx for it and find its register number.  */
6293*38fd1498Szrj 
6294*38fd1498Szrj   return set_reload_reg (i, r);
6295*38fd1498Szrj }
6296*38fd1498Szrj 
6297*38fd1498Szrj /* Initialize all the tables needed to allocate reload registers.
6298*38fd1498Szrj    CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
6299*38fd1498Szrj    is the array we use to restore the reg_rtx field for every reload.  */
6300*38fd1498Szrj 
6301*38fd1498Szrj static void
choose_reload_regs_init(struct insn_chain * chain,rtx * save_reload_reg_rtx)6302*38fd1498Szrj choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
6303*38fd1498Szrj {
6304*38fd1498Szrj   int i;
6305*38fd1498Szrj 
6306*38fd1498Szrj   for (i = 0; i < n_reloads; i++)
6307*38fd1498Szrj     rld[i].reg_rtx = save_reload_reg_rtx[i];
6308*38fd1498Szrj 
6309*38fd1498Szrj   memset (reload_inherited, 0, MAX_RELOADS);
6310*38fd1498Szrj   memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
6311*38fd1498Szrj   memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
6312*38fd1498Szrj 
6313*38fd1498Szrj   CLEAR_HARD_REG_SET (reload_reg_used);
6314*38fd1498Szrj   CLEAR_HARD_REG_SET (reload_reg_used_at_all);
6315*38fd1498Szrj   CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
6316*38fd1498Szrj   CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
6317*38fd1498Szrj   CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
6318*38fd1498Szrj   CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
6319*38fd1498Szrj 
6320*38fd1498Szrj   CLEAR_HARD_REG_SET (reg_used_in_insn);
6321*38fd1498Szrj   {
6322*38fd1498Szrj     HARD_REG_SET tmp;
6323*38fd1498Szrj     REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
6324*38fd1498Szrj     IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6325*38fd1498Szrj     REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
6326*38fd1498Szrj     IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6327*38fd1498Szrj     compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
6328*38fd1498Szrj     compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
6329*38fd1498Szrj   }
6330*38fd1498Szrj 
6331*38fd1498Szrj   for (i = 0; i < reload_n_operands; i++)
6332*38fd1498Szrj     {
6333*38fd1498Szrj       CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
6334*38fd1498Szrj       CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
6335*38fd1498Szrj       CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
6336*38fd1498Szrj       CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
6337*38fd1498Szrj       CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
6338*38fd1498Szrj       CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
6339*38fd1498Szrj     }
6340*38fd1498Szrj 
6341*38fd1498Szrj   COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
6342*38fd1498Szrj 
6343*38fd1498Szrj   CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
6344*38fd1498Szrj 
6345*38fd1498Szrj   for (i = 0; i < n_reloads; i++)
6346*38fd1498Szrj     /* If we have already decided to use a certain register,
6347*38fd1498Szrj        don't use it in another way.  */
6348*38fd1498Szrj     if (rld[i].reg_rtx)
6349*38fd1498Szrj       mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
6350*38fd1498Szrj 			      rld[i].when_needed, rld[i].mode);
6351*38fd1498Szrj }
6352*38fd1498Szrj 
6353*38fd1498Szrj /* If X is not a subreg, return it unmodified.  If it is a subreg,
6354*38fd1498Szrj    look up whether we made a replacement for the SUBREG_REG.  Return
6355*38fd1498Szrj    either the replacement or the SUBREG_REG.  */
6356*38fd1498Szrj 
6357*38fd1498Szrj static rtx
replaced_subreg(rtx x)6358*38fd1498Szrj replaced_subreg (rtx x)
6359*38fd1498Szrj {
6360*38fd1498Szrj   if (GET_CODE (x) == SUBREG)
6361*38fd1498Szrj     return find_replacement (&SUBREG_REG (x));
6362*38fd1498Szrj   return x;
6363*38fd1498Szrj }
6364*38fd1498Szrj 
6365*38fd1498Szrj /* Compute the offset to pass to subreg_regno_offset, for a pseudo of
6366*38fd1498Szrj    mode OUTERMODE that is available in a hard reg of mode INNERMODE.
6367*38fd1498Szrj    SUBREG is non-NULL if the pseudo is a subreg whose reg is a pseudo,
6368*38fd1498Szrj    otherwise it is NULL.  */
6369*38fd1498Szrj 
6370*38fd1498Szrj static poly_int64
compute_reload_subreg_offset(machine_mode outermode,rtx subreg,machine_mode innermode)6371*38fd1498Szrj compute_reload_subreg_offset (machine_mode outermode,
6372*38fd1498Szrj 			      rtx subreg,
6373*38fd1498Szrj 			      machine_mode innermode)
6374*38fd1498Szrj {
6375*38fd1498Szrj   poly_int64 outer_offset;
6376*38fd1498Szrj   machine_mode middlemode;
6377*38fd1498Szrj 
6378*38fd1498Szrj   if (!subreg)
6379*38fd1498Szrj     return subreg_lowpart_offset (outermode, innermode);
6380*38fd1498Szrj 
6381*38fd1498Szrj   outer_offset = SUBREG_BYTE (subreg);
6382*38fd1498Szrj   middlemode = GET_MODE (SUBREG_REG (subreg));
6383*38fd1498Szrj 
6384*38fd1498Szrj   /* If SUBREG is paradoxical then return the normal lowpart offset
6385*38fd1498Szrj      for OUTERMODE and INNERMODE.  Our caller has already checked
6386*38fd1498Szrj      that OUTERMODE fits in INNERMODE.  */
6387*38fd1498Szrj   if (paradoxical_subreg_p (outermode, middlemode))
6388*38fd1498Szrj     return subreg_lowpart_offset (outermode, innermode);
6389*38fd1498Szrj 
6390*38fd1498Szrj   /* SUBREG is normal, but may not be lowpart; return OUTER_OFFSET
6391*38fd1498Szrj      plus the normal lowpart offset for MIDDLEMODE and INNERMODE.  */
6392*38fd1498Szrj   return outer_offset + subreg_lowpart_offset (middlemode, innermode);
6393*38fd1498Szrj }
6394*38fd1498Szrj 
6395*38fd1498Szrj /* Assign hard reg targets for the pseudo-registers we must reload
6396*38fd1498Szrj    into hard regs for this insn.
6397*38fd1498Szrj    Also output the instructions to copy them in and out of the hard regs.
6398*38fd1498Szrj 
6399*38fd1498Szrj    For machines with register classes, we are responsible for
6400*38fd1498Szrj    finding a reload reg in the proper class.  */
6401*38fd1498Szrj 
6402*38fd1498Szrj static void
choose_reload_regs(struct insn_chain * chain)6403*38fd1498Szrj choose_reload_regs (struct insn_chain *chain)
6404*38fd1498Szrj {
6405*38fd1498Szrj   rtx_insn *insn = chain->insn;
6406*38fd1498Szrj   int i, j;
6407*38fd1498Szrj   unsigned int max_group_size = 1;
6408*38fd1498Szrj   enum reg_class group_class = NO_REGS;
6409*38fd1498Szrj   int pass, win, inheritance;
6410*38fd1498Szrj 
6411*38fd1498Szrj   rtx save_reload_reg_rtx[MAX_RELOADS];
6412*38fd1498Szrj 
6413*38fd1498Szrj   /* In order to be certain of getting the registers we need,
6414*38fd1498Szrj      we must sort the reloads into order of increasing register class.
6415*38fd1498Szrj      Then our grabbing of reload registers will parallel the process
6416*38fd1498Szrj      that provided the reload registers.
6417*38fd1498Szrj 
6418*38fd1498Szrj      Also note whether any of the reloads wants a consecutive group of regs.
6419*38fd1498Szrj      If so, record the maximum size of the group desired and what
6420*38fd1498Szrj      register class contains all the groups needed by this insn.  */
6421*38fd1498Szrj 
6422*38fd1498Szrj   for (j = 0; j < n_reloads; j++)
6423*38fd1498Szrj     {
6424*38fd1498Szrj       reload_order[j] = j;
6425*38fd1498Szrj       if (rld[j].reg_rtx != NULL_RTX)
6426*38fd1498Szrj 	{
6427*38fd1498Szrj 	  gcc_assert (REG_P (rld[j].reg_rtx)
6428*38fd1498Szrj 		      && HARD_REGISTER_P (rld[j].reg_rtx));
6429*38fd1498Szrj 	  reload_spill_index[j] = REGNO (rld[j].reg_rtx);
6430*38fd1498Szrj 	}
6431*38fd1498Szrj       else
6432*38fd1498Szrj 	reload_spill_index[j] = -1;
6433*38fd1498Szrj 
6434*38fd1498Szrj       if (rld[j].nregs > 1)
6435*38fd1498Szrj 	{
6436*38fd1498Szrj 	  max_group_size = MAX (rld[j].nregs, max_group_size);
6437*38fd1498Szrj 	  group_class
6438*38fd1498Szrj 	    = reg_class_superunion[(int) rld[j].rclass][(int) group_class];
6439*38fd1498Szrj 	}
6440*38fd1498Szrj 
6441*38fd1498Szrj       save_reload_reg_rtx[j] = rld[j].reg_rtx;
6442*38fd1498Szrj     }
6443*38fd1498Szrj 
6444*38fd1498Szrj   if (n_reloads > 1)
6445*38fd1498Szrj     qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
6446*38fd1498Szrj 
6447*38fd1498Szrj   /* If -O, try first with inheritance, then turning it off.
6448*38fd1498Szrj      If not -O, don't do inheritance.
6449*38fd1498Szrj      Using inheritance when not optimizing leads to paradoxes
6450*38fd1498Szrj      with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
6451*38fd1498Szrj      because one side of the comparison might be inherited.  */
6452*38fd1498Szrj   win = 0;
6453*38fd1498Szrj   for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
6454*38fd1498Szrj     {
6455*38fd1498Szrj       choose_reload_regs_init (chain, save_reload_reg_rtx);
6456*38fd1498Szrj 
6457*38fd1498Szrj       /* Process the reloads in order of preference just found.
6458*38fd1498Szrj 	 Beyond this point, subregs can be found in reload_reg_rtx.
6459*38fd1498Szrj 
6460*38fd1498Szrj 	 This used to look for an existing reloaded home for all of the
6461*38fd1498Szrj 	 reloads, and only then perform any new reloads.  But that could lose
6462*38fd1498Szrj 	 if the reloads were done out of reg-class order because a later
6463*38fd1498Szrj 	 reload with a looser constraint might have an old home in a register
6464*38fd1498Szrj 	 needed by an earlier reload with a tighter constraint.
6465*38fd1498Szrj 
6466*38fd1498Szrj 	 To solve this, we make two passes over the reloads, in the order
6467*38fd1498Szrj 	 described above.  In the first pass we try to inherit a reload
6468*38fd1498Szrj 	 from a previous insn.  If there is a later reload that needs a
6469*38fd1498Szrj 	 class that is a proper subset of the class being processed, we must
6470*38fd1498Szrj 	 also allocate a spill register during the first pass.
6471*38fd1498Szrj 
6472*38fd1498Szrj 	 Then make a second pass over the reloads to allocate any reloads
6473*38fd1498Szrj 	 that haven't been given registers yet.  */
6474*38fd1498Szrj 
6475*38fd1498Szrj       for (j = 0; j < n_reloads; j++)
6476*38fd1498Szrj 	{
6477*38fd1498Szrj 	  int r = reload_order[j];
6478*38fd1498Szrj 	  rtx search_equiv = NULL_RTX;
6479*38fd1498Szrj 
6480*38fd1498Szrj 	  /* Ignore reloads that got marked inoperative.  */
6481*38fd1498Szrj 	  if (rld[r].out == 0 && rld[r].in == 0
6482*38fd1498Szrj 	      && ! rld[r].secondary_p)
6483*38fd1498Szrj 	    continue;
6484*38fd1498Szrj 
6485*38fd1498Szrj 	  /* If find_reloads chose to use reload_in or reload_out as a reload
6486*38fd1498Szrj 	     register, we don't need to chose one.  Otherwise, try even if it
6487*38fd1498Szrj 	     found one since we might save an insn if we find the value lying
6488*38fd1498Szrj 	     around.
6489*38fd1498Szrj 	     Try also when reload_in is a pseudo without a hard reg.  */
6490*38fd1498Szrj 	  if (rld[r].in != 0 && rld[r].reg_rtx != 0
6491*38fd1498Szrj 	      && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
6492*38fd1498Szrj 		  || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
6493*38fd1498Szrj 		      && !MEM_P (rld[r].in)
6494*38fd1498Szrj 		      && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
6495*38fd1498Szrj 	    continue;
6496*38fd1498Szrj 
6497*38fd1498Szrj #if 0 /* No longer needed for correct operation.
6498*38fd1498Szrj 	 It might give better code, or might not; worth an experiment?  */
6499*38fd1498Szrj 	  /* If this is an optional reload, we can't inherit from earlier insns
6500*38fd1498Szrj 	     until we are sure that any non-optional reloads have been allocated.
6501*38fd1498Szrj 	     The following code takes advantage of the fact that optional reloads
6502*38fd1498Szrj 	     are at the end of reload_order.  */
6503*38fd1498Szrj 	  if (rld[r].optional != 0)
6504*38fd1498Szrj 	    for (i = 0; i < j; i++)
6505*38fd1498Szrj 	      if ((rld[reload_order[i]].out != 0
6506*38fd1498Szrj 		   || rld[reload_order[i]].in != 0
6507*38fd1498Szrj 		   || rld[reload_order[i]].secondary_p)
6508*38fd1498Szrj 		  && ! rld[reload_order[i]].optional
6509*38fd1498Szrj 		  && rld[reload_order[i]].reg_rtx == 0)
6510*38fd1498Szrj 		allocate_reload_reg (chain, reload_order[i], 0);
6511*38fd1498Szrj #endif
6512*38fd1498Szrj 
6513*38fd1498Szrj 	  /* First see if this pseudo is already available as reloaded
6514*38fd1498Szrj 	     for a previous insn.  We cannot try to inherit for reloads
6515*38fd1498Szrj 	     that are smaller than the maximum number of registers needed
6516*38fd1498Szrj 	     for groups unless the register we would allocate cannot be used
6517*38fd1498Szrj 	     for the groups.
6518*38fd1498Szrj 
6519*38fd1498Szrj 	     We could check here to see if this is a secondary reload for
6520*38fd1498Szrj 	     an object that is already in a register of the desired class.
6521*38fd1498Szrj 	     This would avoid the need for the secondary reload register.
6522*38fd1498Szrj 	     But this is complex because we can't easily determine what
6523*38fd1498Szrj 	     objects might want to be loaded via this reload.  So let a
6524*38fd1498Szrj 	     register be allocated here.  In `emit_reload_insns' we suppress
6525*38fd1498Szrj 	     one of the loads in the case described above.  */
6526*38fd1498Szrj 
6527*38fd1498Szrj 	  if (inheritance)
6528*38fd1498Szrj 	    {
6529*38fd1498Szrj 	      poly_int64 byte = 0;
6530*38fd1498Szrj 	      int regno = -1;
6531*38fd1498Szrj 	      machine_mode mode = VOIDmode;
6532*38fd1498Szrj 	      rtx subreg = NULL_RTX;
6533*38fd1498Szrj 
6534*38fd1498Szrj 	      if (rld[r].in == 0)
6535*38fd1498Szrj 		;
6536*38fd1498Szrj 	      else if (REG_P (rld[r].in))
6537*38fd1498Szrj 		{
6538*38fd1498Szrj 		  regno = REGNO (rld[r].in);
6539*38fd1498Szrj 		  mode = GET_MODE (rld[r].in);
6540*38fd1498Szrj 		}
6541*38fd1498Szrj 	      else if (REG_P (rld[r].in_reg))
6542*38fd1498Szrj 		{
6543*38fd1498Szrj 		  regno = REGNO (rld[r].in_reg);
6544*38fd1498Szrj 		  mode = GET_MODE (rld[r].in_reg);
6545*38fd1498Szrj 		}
6546*38fd1498Szrj 	      else if (GET_CODE (rld[r].in_reg) == SUBREG
6547*38fd1498Szrj 		       && REG_P (SUBREG_REG (rld[r].in_reg)))
6548*38fd1498Szrj 		{
6549*38fd1498Szrj 		  regno = REGNO (SUBREG_REG (rld[r].in_reg));
6550*38fd1498Szrj 		  if (regno < FIRST_PSEUDO_REGISTER)
6551*38fd1498Szrj 		    regno = subreg_regno (rld[r].in_reg);
6552*38fd1498Szrj 		  else
6553*38fd1498Szrj 		    {
6554*38fd1498Szrj 		      subreg = rld[r].in_reg;
6555*38fd1498Szrj 		      byte = SUBREG_BYTE (subreg);
6556*38fd1498Szrj 		    }
6557*38fd1498Szrj 		  mode = GET_MODE (rld[r].in_reg);
6558*38fd1498Szrj 		}
6559*38fd1498Szrj #if AUTO_INC_DEC
6560*38fd1498Szrj 	      else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
6561*38fd1498Szrj 		       && REG_P (XEXP (rld[r].in_reg, 0)))
6562*38fd1498Szrj 		{
6563*38fd1498Szrj 		  regno = REGNO (XEXP (rld[r].in_reg, 0));
6564*38fd1498Szrj 		  mode = GET_MODE (XEXP (rld[r].in_reg, 0));
6565*38fd1498Szrj 		  rld[r].out = rld[r].in;
6566*38fd1498Szrj 		}
6567*38fd1498Szrj #endif
6568*38fd1498Szrj #if 0
6569*38fd1498Szrj 	      /* This won't work, since REGNO can be a pseudo reg number.
6570*38fd1498Szrj 		 Also, it takes much more hair to keep track of all the things
6571*38fd1498Szrj 		 that can invalidate an inherited reload of part of a pseudoreg.  */
6572*38fd1498Szrj 	      else if (GET_CODE (rld[r].in) == SUBREG
6573*38fd1498Szrj 		       && REG_P (SUBREG_REG (rld[r].in)))
6574*38fd1498Szrj 		regno = subreg_regno (rld[r].in);
6575*38fd1498Szrj #endif
6576*38fd1498Szrj 
6577*38fd1498Szrj 	      if (regno >= 0
6578*38fd1498Szrj 		  && reg_last_reload_reg[regno] != 0
6579*38fd1498Szrj 		  && (known_ge
6580*38fd1498Szrj 		      (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno])),
6581*38fd1498Szrj 		       GET_MODE_SIZE (mode) + byte))
6582*38fd1498Szrj 		  /* Verify that the register it's in can be used in
6583*38fd1498Szrj 		     mode MODE.  */
6584*38fd1498Szrj 		  && (REG_CAN_CHANGE_MODE_P
6585*38fd1498Szrj 		      (REGNO (reg_last_reload_reg[regno]),
6586*38fd1498Szrj 		       GET_MODE (reg_last_reload_reg[regno]),
6587*38fd1498Szrj 		       mode)))
6588*38fd1498Szrj 		{
6589*38fd1498Szrj 		  enum reg_class rclass = rld[r].rclass, last_class;
6590*38fd1498Szrj 		  rtx last_reg = reg_last_reload_reg[regno];
6591*38fd1498Szrj 
6592*38fd1498Szrj 		  i = REGNO (last_reg);
6593*38fd1498Szrj 		  byte = compute_reload_subreg_offset (mode,
6594*38fd1498Szrj 						       subreg,
6595*38fd1498Szrj 						       GET_MODE (last_reg));
6596*38fd1498Szrj 		  i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
6597*38fd1498Szrj 		  last_class = REGNO_REG_CLASS (i);
6598*38fd1498Szrj 
6599*38fd1498Szrj 		  if (reg_reloaded_contents[i] == regno
6600*38fd1498Szrj 		      && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
6601*38fd1498Szrj 		      && targetm.hard_regno_mode_ok (i, rld[r].mode)
6602*38fd1498Szrj 		      && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
6603*38fd1498Szrj 			  /* Even if we can't use this register as a reload
6604*38fd1498Szrj 			     register, we might use it for reload_override_in,
6605*38fd1498Szrj 			     if copying it to the desired class is cheap
6606*38fd1498Szrj 			     enough.  */
6607*38fd1498Szrj 			  || ((register_move_cost (mode, last_class, rclass)
6608*38fd1498Szrj 			       < memory_move_cost (mode, rclass, true))
6609*38fd1498Szrj 			      && (secondary_reload_class (1, rclass, mode,
6610*38fd1498Szrj 							  last_reg)
6611*38fd1498Szrj 				  == NO_REGS)
6612*38fd1498Szrj 			      && !(targetm.secondary_memory_needed
6613*38fd1498Szrj 				   (mode, last_class, rclass))))
6614*38fd1498Szrj 		      && (rld[r].nregs == max_group_size
6615*38fd1498Szrj 			  || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
6616*38fd1498Szrj 						  i))
6617*38fd1498Szrj 		      && free_for_value_p (i, rld[r].mode, rld[r].opnum,
6618*38fd1498Szrj 					   rld[r].when_needed, rld[r].in,
6619*38fd1498Szrj 					   const0_rtx, r, 1))
6620*38fd1498Szrj 		    {
6621*38fd1498Szrj 		      /* If a group is needed, verify that all the subsequent
6622*38fd1498Szrj 			 registers still have their values intact.  */
6623*38fd1498Szrj 		      int nr = hard_regno_nregs (i, rld[r].mode);
6624*38fd1498Szrj 		      int k;
6625*38fd1498Szrj 
6626*38fd1498Szrj 		      for (k = 1; k < nr; k++)
6627*38fd1498Szrj 			if (reg_reloaded_contents[i + k] != regno
6628*38fd1498Szrj 			    || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6629*38fd1498Szrj 			  break;
6630*38fd1498Szrj 
6631*38fd1498Szrj 		      if (k == nr)
6632*38fd1498Szrj 			{
6633*38fd1498Szrj 			  int i1;
6634*38fd1498Szrj 			  int bad_for_class;
6635*38fd1498Szrj 
6636*38fd1498Szrj 			  last_reg = (GET_MODE (last_reg) == mode
6637*38fd1498Szrj 				      ? last_reg : gen_rtx_REG (mode, i));
6638*38fd1498Szrj 
6639*38fd1498Szrj 			  bad_for_class = 0;
6640*38fd1498Szrj 			  for (k = 0; k < nr; k++)
6641*38fd1498Szrj 			    bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6642*38fd1498Szrj 								  i+k);
6643*38fd1498Szrj 
6644*38fd1498Szrj 			  /* We found a register that contains the
6645*38fd1498Szrj 			     value we need.  If this register is the
6646*38fd1498Szrj 			     same as an `earlyclobber' operand of the
6647*38fd1498Szrj 			     current insn, just mark it as a place to
6648*38fd1498Szrj 			     reload from since we can't use it as the
6649*38fd1498Szrj 			     reload register itself.  */
6650*38fd1498Szrj 
6651*38fd1498Szrj 			  for (i1 = 0; i1 < n_earlyclobbers; i1++)
6652*38fd1498Szrj 			    if (reg_overlap_mentioned_for_reload_p
6653*38fd1498Szrj 				(reg_last_reload_reg[regno],
6654*38fd1498Szrj 				 reload_earlyclobbers[i1]))
6655*38fd1498Szrj 			      break;
6656*38fd1498Szrj 
6657*38fd1498Szrj 			  if (i1 != n_earlyclobbers
6658*38fd1498Szrj 			      || ! (free_for_value_p (i, rld[r].mode,
6659*38fd1498Szrj 						      rld[r].opnum,
6660*38fd1498Szrj 						      rld[r].when_needed, rld[r].in,
6661*38fd1498Szrj 						      rld[r].out, r, 1))
6662*38fd1498Szrj 			      /* Don't use it if we'd clobber a pseudo reg.  */
6663*38fd1498Szrj 			      || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6664*38fd1498Szrj 				  && rld[r].out
6665*38fd1498Szrj 				  && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6666*38fd1498Szrj 			      /* Don't clobber the frame pointer.  */
6667*38fd1498Szrj 			      || (i == HARD_FRAME_POINTER_REGNUM
6668*38fd1498Szrj 				  && frame_pointer_needed
6669*38fd1498Szrj 				  && rld[r].out)
6670*38fd1498Szrj 			      /* Don't really use the inherited spill reg
6671*38fd1498Szrj 				 if we need it wider than we've got it.  */
6672*38fd1498Szrj 			      || paradoxical_subreg_p (rld[r].mode, mode)
6673*38fd1498Szrj 			      || bad_for_class
6674*38fd1498Szrj 
6675*38fd1498Szrj 			      /* If find_reloads chose reload_out as reload
6676*38fd1498Szrj 				 register, stay with it - that leaves the
6677*38fd1498Szrj 				 inherited register for subsequent reloads.  */
6678*38fd1498Szrj 			      || (rld[r].out && rld[r].reg_rtx
6679*38fd1498Szrj 				  && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6680*38fd1498Szrj 			    {
6681*38fd1498Szrj 			      if (! rld[r].optional)
6682*38fd1498Szrj 				{
6683*38fd1498Szrj 				  reload_override_in[r] = last_reg;
6684*38fd1498Szrj 				  reload_inheritance_insn[r]
6685*38fd1498Szrj 				    = reg_reloaded_insn[i];
6686*38fd1498Szrj 				}
6687*38fd1498Szrj 			    }
6688*38fd1498Szrj 			  else
6689*38fd1498Szrj 			    {
6690*38fd1498Szrj 			      int k;
6691*38fd1498Szrj 			      /* We can use this as a reload reg.  */
6692*38fd1498Szrj 			      /* Mark the register as in use for this part of
6693*38fd1498Szrj 				 the insn.  */
6694*38fd1498Szrj 			      mark_reload_reg_in_use (i,
6695*38fd1498Szrj 						      rld[r].opnum,
6696*38fd1498Szrj 						      rld[r].when_needed,
6697*38fd1498Szrj 						      rld[r].mode);
6698*38fd1498Szrj 			      rld[r].reg_rtx = last_reg;
6699*38fd1498Szrj 			      reload_inherited[r] = 1;
6700*38fd1498Szrj 			      reload_inheritance_insn[r]
6701*38fd1498Szrj 				= reg_reloaded_insn[i];
6702*38fd1498Szrj 			      reload_spill_index[r] = i;
6703*38fd1498Szrj 			      for (k = 0; k < nr; k++)
6704*38fd1498Szrj 				SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6705*38fd1498Szrj 						  i + k);
6706*38fd1498Szrj 			    }
6707*38fd1498Szrj 			}
6708*38fd1498Szrj 		    }
6709*38fd1498Szrj 		}
6710*38fd1498Szrj 	    }
6711*38fd1498Szrj 
6712*38fd1498Szrj 	  /* Here's another way to see if the value is already lying around.  */
6713*38fd1498Szrj 	  if (inheritance
6714*38fd1498Szrj 	      && rld[r].in != 0
6715*38fd1498Szrj 	      && ! reload_inherited[r]
6716*38fd1498Szrj 	      && rld[r].out == 0
6717*38fd1498Szrj 	      && (CONSTANT_P (rld[r].in)
6718*38fd1498Szrj 		  || GET_CODE (rld[r].in) == PLUS
6719*38fd1498Szrj 		  || REG_P (rld[r].in)
6720*38fd1498Szrj 		  || MEM_P (rld[r].in))
6721*38fd1498Szrj 	      && (rld[r].nregs == max_group_size
6722*38fd1498Szrj 		  || ! reg_classes_intersect_p (rld[r].rclass, group_class)))
6723*38fd1498Szrj 	    search_equiv = rld[r].in;
6724*38fd1498Szrj 
6725*38fd1498Szrj 	  if (search_equiv)
6726*38fd1498Szrj 	    {
6727*38fd1498Szrj 	      rtx equiv
6728*38fd1498Szrj 		= find_equiv_reg (search_equiv, insn, rld[r].rclass,
6729*38fd1498Szrj 				  -1, NULL, 0, rld[r].mode);
6730*38fd1498Szrj 	      int regno = 0;
6731*38fd1498Szrj 
6732*38fd1498Szrj 	      if (equiv != 0)
6733*38fd1498Szrj 		{
6734*38fd1498Szrj 		  if (REG_P (equiv))
6735*38fd1498Szrj 		    regno = REGNO (equiv);
6736*38fd1498Szrj 		  else
6737*38fd1498Szrj 		    {
6738*38fd1498Szrj 		      /* This must be a SUBREG of a hard register.
6739*38fd1498Szrj 			 Make a new REG since this might be used in an
6740*38fd1498Szrj 			 address and not all machines support SUBREGs
6741*38fd1498Szrj 			 there.  */
6742*38fd1498Szrj 		      gcc_assert (GET_CODE (equiv) == SUBREG);
6743*38fd1498Szrj 		      regno = subreg_regno (equiv);
6744*38fd1498Szrj 		      equiv = gen_rtx_REG (rld[r].mode, regno);
6745*38fd1498Szrj 		      /* If we choose EQUIV as the reload register, but the
6746*38fd1498Szrj 			 loop below decides to cancel the inheritance, we'll
6747*38fd1498Szrj 			 end up reloading EQUIV in rld[r].mode, not the mode
6748*38fd1498Szrj 			 it had originally.  That isn't safe when EQUIV isn't
6749*38fd1498Szrj 			 available as a spill register since its value might
6750*38fd1498Szrj 			 still be live at this point.  */
6751*38fd1498Szrj 		      for (i = regno; i < regno + (int) rld[r].nregs; i++)
6752*38fd1498Szrj 			if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6753*38fd1498Szrj 			  equiv = 0;
6754*38fd1498Szrj 		    }
6755*38fd1498Szrj 		}
6756*38fd1498Szrj 
6757*38fd1498Szrj 	      /* If we found a spill reg, reject it unless it is free
6758*38fd1498Szrj 		 and of the desired class.  */
6759*38fd1498Szrj 	      if (equiv != 0)
6760*38fd1498Szrj 		{
6761*38fd1498Szrj 		  int regs_used = 0;
6762*38fd1498Szrj 		  int bad_for_class = 0;
6763*38fd1498Szrj 		  int max_regno = regno + rld[r].nregs;
6764*38fd1498Szrj 
6765*38fd1498Szrj 		  for (i = regno; i < max_regno; i++)
6766*38fd1498Szrj 		    {
6767*38fd1498Szrj 		      regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6768*38fd1498Szrj 						      i);
6769*38fd1498Szrj 		      bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6770*38fd1498Szrj 							   i);
6771*38fd1498Szrj 		    }
6772*38fd1498Szrj 
6773*38fd1498Szrj 		  if ((regs_used
6774*38fd1498Szrj 		       && ! free_for_value_p (regno, rld[r].mode,
6775*38fd1498Szrj 					      rld[r].opnum, rld[r].when_needed,
6776*38fd1498Szrj 					      rld[r].in, rld[r].out, r, 1))
6777*38fd1498Szrj 		      || bad_for_class)
6778*38fd1498Szrj 		    equiv = 0;
6779*38fd1498Szrj 		}
6780*38fd1498Szrj 
6781*38fd1498Szrj 	      if (equiv != 0
6782*38fd1498Szrj 		  && !targetm.hard_regno_mode_ok (regno, rld[r].mode))
6783*38fd1498Szrj 		equiv = 0;
6784*38fd1498Szrj 
6785*38fd1498Szrj 	      /* We found a register that contains the value we need.
6786*38fd1498Szrj 		 If this register is the same as an `earlyclobber' operand
6787*38fd1498Szrj 		 of the current insn, just mark it as a place to reload from
6788*38fd1498Szrj 		 since we can't use it as the reload register itself.  */
6789*38fd1498Szrj 
6790*38fd1498Szrj 	      if (equiv != 0)
6791*38fd1498Szrj 		for (i = 0; i < n_earlyclobbers; i++)
6792*38fd1498Szrj 		  if (reg_overlap_mentioned_for_reload_p (equiv,
6793*38fd1498Szrj 							  reload_earlyclobbers[i]))
6794*38fd1498Szrj 		    {
6795*38fd1498Szrj 		      if (! rld[r].optional)
6796*38fd1498Szrj 			reload_override_in[r] = equiv;
6797*38fd1498Szrj 		      equiv = 0;
6798*38fd1498Szrj 		      break;
6799*38fd1498Szrj 		    }
6800*38fd1498Szrj 
6801*38fd1498Szrj 	      /* If the equiv register we have found is explicitly clobbered
6802*38fd1498Szrj 		 in the current insn, it depends on the reload type if we
6803*38fd1498Szrj 		 can use it, use it for reload_override_in, or not at all.
6804*38fd1498Szrj 		 In particular, we then can't use EQUIV for a
6805*38fd1498Szrj 		 RELOAD_FOR_OUTPUT_ADDRESS reload.  */
6806*38fd1498Szrj 
6807*38fd1498Szrj 	      if (equiv != 0)
6808*38fd1498Szrj 		{
6809*38fd1498Szrj 		  if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6810*38fd1498Szrj 		    switch (rld[r].when_needed)
6811*38fd1498Szrj 		      {
6812*38fd1498Szrj 		      case RELOAD_FOR_OTHER_ADDRESS:
6813*38fd1498Szrj 		      case RELOAD_FOR_INPADDR_ADDRESS:
6814*38fd1498Szrj 		      case RELOAD_FOR_INPUT_ADDRESS:
6815*38fd1498Szrj 		      case RELOAD_FOR_OPADDR_ADDR:
6816*38fd1498Szrj 			break;
6817*38fd1498Szrj 		      case RELOAD_OTHER:
6818*38fd1498Szrj 		      case RELOAD_FOR_INPUT:
6819*38fd1498Szrj 		      case RELOAD_FOR_OPERAND_ADDRESS:
6820*38fd1498Szrj 			if (! rld[r].optional)
6821*38fd1498Szrj 			  reload_override_in[r] = equiv;
6822*38fd1498Szrj 			/* Fall through.  */
6823*38fd1498Szrj 		      default:
6824*38fd1498Szrj 			equiv = 0;
6825*38fd1498Szrj 			break;
6826*38fd1498Szrj 		      }
6827*38fd1498Szrj 		  else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6828*38fd1498Szrj 		    switch (rld[r].when_needed)
6829*38fd1498Szrj 		      {
6830*38fd1498Szrj 		      case RELOAD_FOR_OTHER_ADDRESS:
6831*38fd1498Szrj 		      case RELOAD_FOR_INPADDR_ADDRESS:
6832*38fd1498Szrj 		      case RELOAD_FOR_INPUT_ADDRESS:
6833*38fd1498Szrj 		      case RELOAD_FOR_OPADDR_ADDR:
6834*38fd1498Szrj 		      case RELOAD_FOR_OPERAND_ADDRESS:
6835*38fd1498Szrj 		      case RELOAD_FOR_INPUT:
6836*38fd1498Szrj 			break;
6837*38fd1498Szrj 		      case RELOAD_OTHER:
6838*38fd1498Szrj 			if (! rld[r].optional)
6839*38fd1498Szrj 			  reload_override_in[r] = equiv;
6840*38fd1498Szrj 			/* Fall through.  */
6841*38fd1498Szrj 		      default:
6842*38fd1498Szrj 			equiv = 0;
6843*38fd1498Szrj 			break;
6844*38fd1498Szrj 		      }
6845*38fd1498Szrj 		}
6846*38fd1498Szrj 
6847*38fd1498Szrj 	      /* If we found an equivalent reg, say no code need be generated
6848*38fd1498Szrj 		 to load it, and use it as our reload reg.  */
6849*38fd1498Szrj 	      if (equiv != 0
6850*38fd1498Szrj 		  && (regno != HARD_FRAME_POINTER_REGNUM
6851*38fd1498Szrj 		      || !frame_pointer_needed))
6852*38fd1498Szrj 		{
6853*38fd1498Szrj 		  int nr = hard_regno_nregs (regno, rld[r].mode);
6854*38fd1498Szrj 		  int k;
6855*38fd1498Szrj 		  rld[r].reg_rtx = equiv;
6856*38fd1498Szrj 		  reload_spill_index[r] = regno;
6857*38fd1498Szrj 		  reload_inherited[r] = 1;
6858*38fd1498Szrj 
6859*38fd1498Szrj 		  /* If reg_reloaded_valid is not set for this register,
6860*38fd1498Szrj 		     there might be a stale spill_reg_store lying around.
6861*38fd1498Szrj 		     We must clear it, since otherwise emit_reload_insns
6862*38fd1498Szrj 		     might delete the store.  */
6863*38fd1498Szrj 		  if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6864*38fd1498Szrj 		    spill_reg_store[regno] = NULL;
6865*38fd1498Szrj 		  /* If any of the hard registers in EQUIV are spill
6866*38fd1498Szrj 		     registers, mark them as in use for this insn.  */
6867*38fd1498Szrj 		  for (k = 0; k < nr; k++)
6868*38fd1498Szrj 		    {
6869*38fd1498Szrj 		      i = spill_reg_order[regno + k];
6870*38fd1498Szrj 		      if (i >= 0)
6871*38fd1498Szrj 			{
6872*38fd1498Szrj 			  mark_reload_reg_in_use (regno, rld[r].opnum,
6873*38fd1498Szrj 						  rld[r].when_needed,
6874*38fd1498Szrj 						  rld[r].mode);
6875*38fd1498Szrj 			  SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6876*38fd1498Szrj 					    regno + k);
6877*38fd1498Szrj 			}
6878*38fd1498Szrj 		    }
6879*38fd1498Szrj 		}
6880*38fd1498Szrj 	    }
6881*38fd1498Szrj 
6882*38fd1498Szrj 	  /* If we found a register to use already, or if this is an optional
6883*38fd1498Szrj 	     reload, we are done.  */
6884*38fd1498Szrj 	  if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6885*38fd1498Szrj 	    continue;
6886*38fd1498Szrj 
6887*38fd1498Szrj #if 0
6888*38fd1498Szrj 	  /* No longer needed for correct operation.  Might or might
6889*38fd1498Szrj 	     not give better code on the average.  Want to experiment?  */
6890*38fd1498Szrj 
6891*38fd1498Szrj 	  /* See if there is a later reload that has a class different from our
6892*38fd1498Szrj 	     class that intersects our class or that requires less register
6893*38fd1498Szrj 	     than our reload.  If so, we must allocate a register to this
6894*38fd1498Szrj 	     reload now, since that reload might inherit a previous reload
6895*38fd1498Szrj 	     and take the only available register in our class.  Don't do this
6896*38fd1498Szrj 	     for optional reloads since they will force all previous reloads
6897*38fd1498Szrj 	     to be allocated.  Also don't do this for reloads that have been
6898*38fd1498Szrj 	     turned off.  */
6899*38fd1498Szrj 
6900*38fd1498Szrj 	  for (i = j + 1; i < n_reloads; i++)
6901*38fd1498Szrj 	    {
6902*38fd1498Szrj 	      int s = reload_order[i];
6903*38fd1498Szrj 
6904*38fd1498Szrj 	      if ((rld[s].in == 0 && rld[s].out == 0
6905*38fd1498Szrj 		   && ! rld[s].secondary_p)
6906*38fd1498Szrj 		  || rld[s].optional)
6907*38fd1498Szrj 		continue;
6908*38fd1498Szrj 
6909*38fd1498Szrj 	      if ((rld[s].rclass != rld[r].rclass
6910*38fd1498Szrj 		   && reg_classes_intersect_p (rld[r].rclass,
6911*38fd1498Szrj 					       rld[s].rclass))
6912*38fd1498Szrj 		  || rld[s].nregs < rld[r].nregs)
6913*38fd1498Szrj 		break;
6914*38fd1498Szrj 	    }
6915*38fd1498Szrj 
6916*38fd1498Szrj 	  if (i == n_reloads)
6917*38fd1498Szrj 	    continue;
6918*38fd1498Szrj 
6919*38fd1498Szrj 	  allocate_reload_reg (chain, r, j == n_reloads - 1);
6920*38fd1498Szrj #endif
6921*38fd1498Szrj 	}
6922*38fd1498Szrj 
6923*38fd1498Szrj       /* Now allocate reload registers for anything non-optional that
6924*38fd1498Szrj 	 didn't get one yet.  */
6925*38fd1498Szrj       for (j = 0; j < n_reloads; j++)
6926*38fd1498Szrj 	{
6927*38fd1498Szrj 	  int r = reload_order[j];
6928*38fd1498Szrj 
6929*38fd1498Szrj 	  /* Ignore reloads that got marked inoperative.  */
6930*38fd1498Szrj 	  if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6931*38fd1498Szrj 	    continue;
6932*38fd1498Szrj 
6933*38fd1498Szrj 	  /* Skip reloads that already have a register allocated or are
6934*38fd1498Szrj 	     optional.  */
6935*38fd1498Szrj 	  if (rld[r].reg_rtx != 0 || rld[r].optional)
6936*38fd1498Szrj 	    continue;
6937*38fd1498Szrj 
6938*38fd1498Szrj 	  if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6939*38fd1498Szrj 	    break;
6940*38fd1498Szrj 	}
6941*38fd1498Szrj 
6942*38fd1498Szrj       /* If that loop got all the way, we have won.  */
6943*38fd1498Szrj       if (j == n_reloads)
6944*38fd1498Szrj 	{
6945*38fd1498Szrj 	  win = 1;
6946*38fd1498Szrj 	  break;
6947*38fd1498Szrj 	}
6948*38fd1498Szrj 
6949*38fd1498Szrj       /* Loop around and try without any inheritance.  */
6950*38fd1498Szrj     }
6951*38fd1498Szrj 
6952*38fd1498Szrj   if (! win)
6953*38fd1498Szrj     {
6954*38fd1498Szrj       /* First undo everything done by the failed attempt
6955*38fd1498Szrj 	 to allocate with inheritance.  */
6956*38fd1498Szrj       choose_reload_regs_init (chain, save_reload_reg_rtx);
6957*38fd1498Szrj 
6958*38fd1498Szrj       /* Some sanity tests to verify that the reloads found in the first
6959*38fd1498Szrj 	 pass are identical to the ones we have now.  */
6960*38fd1498Szrj       gcc_assert (chain->n_reloads == n_reloads);
6961*38fd1498Szrj 
6962*38fd1498Szrj       for (i = 0; i < n_reloads; i++)
6963*38fd1498Szrj 	{
6964*38fd1498Szrj 	  if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
6965*38fd1498Szrj 	    continue;
6966*38fd1498Szrj 	  gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
6967*38fd1498Szrj 	  for (j = 0; j < n_spills; j++)
6968*38fd1498Szrj 	    if (spill_regs[j] == chain->rld[i].regno)
6969*38fd1498Szrj 	      if (! set_reload_reg (j, i))
6970*38fd1498Szrj 		failed_reload (chain->insn, i);
6971*38fd1498Szrj 	}
6972*38fd1498Szrj     }
6973*38fd1498Szrj 
6974*38fd1498Szrj   /* If we thought we could inherit a reload, because it seemed that
6975*38fd1498Szrj      nothing else wanted the same reload register earlier in the insn,
6976*38fd1498Szrj      verify that assumption, now that all reloads have been assigned.
6977*38fd1498Szrj      Likewise for reloads where reload_override_in has been set.  */
6978*38fd1498Szrj 
6979*38fd1498Szrj   /* If doing expensive optimizations, do one preliminary pass that doesn't
6980*38fd1498Szrj      cancel any inheritance, but removes reloads that have been needed only
6981*38fd1498Szrj      for reloads that we know can be inherited.  */
6982*38fd1498Szrj   for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6983*38fd1498Szrj     {
6984*38fd1498Szrj       for (j = 0; j < n_reloads; j++)
6985*38fd1498Szrj 	{
6986*38fd1498Szrj 	  int r = reload_order[j];
6987*38fd1498Szrj 	  rtx check_reg;
6988*38fd1498Szrj 	  rtx tem;
6989*38fd1498Szrj 	  if (reload_inherited[r] && rld[r].reg_rtx)
6990*38fd1498Szrj 	    check_reg = rld[r].reg_rtx;
6991*38fd1498Szrj 	  else if (reload_override_in[r]
6992*38fd1498Szrj 		   && (REG_P (reload_override_in[r])
6993*38fd1498Szrj 		       || GET_CODE (reload_override_in[r]) == SUBREG))
6994*38fd1498Szrj 	    check_reg = reload_override_in[r];
6995*38fd1498Szrj 	  else
6996*38fd1498Szrj 	    continue;
6997*38fd1498Szrj 	  if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
6998*38fd1498Szrj 				  rld[r].opnum, rld[r].when_needed, rld[r].in,
6999*38fd1498Szrj 				  (reload_inherited[r]
7000*38fd1498Szrj 				   ? rld[r].out : const0_rtx),
7001*38fd1498Szrj 				  r, 1))
7002*38fd1498Szrj 	    {
7003*38fd1498Szrj 	      if (pass)
7004*38fd1498Szrj 		continue;
7005*38fd1498Szrj 	      reload_inherited[r] = 0;
7006*38fd1498Szrj 	      reload_override_in[r] = 0;
7007*38fd1498Szrj 	    }
7008*38fd1498Szrj 	  /* If we can inherit a RELOAD_FOR_INPUT, or can use a
7009*38fd1498Szrj 	     reload_override_in, then we do not need its related
7010*38fd1498Szrj 	     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
7011*38fd1498Szrj 	     likewise for other reload types.
7012*38fd1498Szrj 	     We handle this by removing a reload when its only replacement
7013*38fd1498Szrj 	     is mentioned in reload_in of the reload we are going to inherit.
7014*38fd1498Szrj 	     A special case are auto_inc expressions; even if the input is
7015*38fd1498Szrj 	     inherited, we still need the address for the output.  We can
7016*38fd1498Szrj 	     recognize them because they have RELOAD_OUT set to RELOAD_IN.
7017*38fd1498Szrj 	     If we succeeded removing some reload and we are doing a preliminary
7018*38fd1498Szrj 	     pass just to remove such reloads, make another pass, since the
7019*38fd1498Szrj 	     removal of one reload might allow us to inherit another one.  */
7020*38fd1498Szrj 	  else if (rld[r].in
7021*38fd1498Szrj 		   && rld[r].out != rld[r].in
7022*38fd1498Szrj 		   && remove_address_replacements (rld[r].in))
7023*38fd1498Szrj 	    {
7024*38fd1498Szrj 	      if (pass)
7025*38fd1498Szrj 	        pass = 2;
7026*38fd1498Szrj 	    }
7027*38fd1498Szrj 	  /* If we needed a memory location for the reload, we also have to
7028*38fd1498Szrj 	     remove its related reloads.  */
7029*38fd1498Szrj 	  else if (rld[r].in
7030*38fd1498Szrj 		   && rld[r].out != rld[r].in
7031*38fd1498Szrj 		   && (tem = replaced_subreg (rld[r].in), REG_P (tem))
7032*38fd1498Szrj 		   && REGNO (tem) < FIRST_PSEUDO_REGISTER
7033*38fd1498Szrj 		   && (targetm.secondary_memory_needed
7034*38fd1498Szrj 		       (rld[r].inmode, REGNO_REG_CLASS (REGNO (tem)),
7035*38fd1498Szrj 			rld[r].rclass))
7036*38fd1498Szrj 		   && remove_address_replacements
7037*38fd1498Szrj 		      (get_secondary_mem (tem, rld[r].inmode, rld[r].opnum,
7038*38fd1498Szrj 					  rld[r].when_needed)))
7039*38fd1498Szrj 	    {
7040*38fd1498Szrj 	      if (pass)
7041*38fd1498Szrj 	        pass = 2;
7042*38fd1498Szrj 	    }
7043*38fd1498Szrj 	}
7044*38fd1498Szrj     }
7045*38fd1498Szrj 
7046*38fd1498Szrj   /* Now that reload_override_in is known valid,
7047*38fd1498Szrj      actually override reload_in.  */
7048*38fd1498Szrj   for (j = 0; j < n_reloads; j++)
7049*38fd1498Szrj     if (reload_override_in[j])
7050*38fd1498Szrj       rld[j].in = reload_override_in[j];
7051*38fd1498Szrj 
7052*38fd1498Szrj   /* If this reload won't be done because it has been canceled or is
7053*38fd1498Szrj      optional and not inherited, clear reload_reg_rtx so other
7054*38fd1498Szrj      routines (such as subst_reloads) don't get confused.  */
7055*38fd1498Szrj   for (j = 0; j < n_reloads; j++)
7056*38fd1498Szrj     if (rld[j].reg_rtx != 0
7057*38fd1498Szrj 	&& ((rld[j].optional && ! reload_inherited[j])
7058*38fd1498Szrj 	    || (rld[j].in == 0 && rld[j].out == 0
7059*38fd1498Szrj 		&& ! rld[j].secondary_p)))
7060*38fd1498Szrj       {
7061*38fd1498Szrj 	int regno = true_regnum (rld[j].reg_rtx);
7062*38fd1498Szrj 
7063*38fd1498Szrj 	if (spill_reg_order[regno] >= 0)
7064*38fd1498Szrj 	  clear_reload_reg_in_use (regno, rld[j].opnum,
7065*38fd1498Szrj 				   rld[j].when_needed, rld[j].mode);
7066*38fd1498Szrj 	rld[j].reg_rtx = 0;
7067*38fd1498Szrj 	reload_spill_index[j] = -1;
7068*38fd1498Szrj       }
7069*38fd1498Szrj 
7070*38fd1498Szrj   /* Record which pseudos and which spill regs have output reloads.  */
7071*38fd1498Szrj   for (j = 0; j < n_reloads; j++)
7072*38fd1498Szrj     {
7073*38fd1498Szrj       int r = reload_order[j];
7074*38fd1498Szrj 
7075*38fd1498Szrj       i = reload_spill_index[r];
7076*38fd1498Szrj 
7077*38fd1498Szrj       /* I is nonneg if this reload uses a register.
7078*38fd1498Szrj 	 If rld[r].reg_rtx is 0, this is an optional reload
7079*38fd1498Szrj 	 that we opted to ignore.  */
7080*38fd1498Szrj       if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
7081*38fd1498Szrj 	  && rld[r].reg_rtx != 0)
7082*38fd1498Szrj 	{
7083*38fd1498Szrj 	  int nregno = REGNO (rld[r].out_reg);
7084*38fd1498Szrj 	  int nr = 1;
7085*38fd1498Szrj 
7086*38fd1498Szrj 	  if (nregno < FIRST_PSEUDO_REGISTER)
7087*38fd1498Szrj 	    nr = hard_regno_nregs (nregno, rld[r].mode);
7088*38fd1498Szrj 
7089*38fd1498Szrj 	  while (--nr >= 0)
7090*38fd1498Szrj 	    SET_REGNO_REG_SET (&reg_has_output_reload,
7091*38fd1498Szrj 			       nregno + nr);
7092*38fd1498Szrj 
7093*38fd1498Szrj 	  if (i >= 0)
7094*38fd1498Szrj 	    add_to_hard_reg_set (&reg_is_output_reload, rld[r].mode, i);
7095*38fd1498Szrj 
7096*38fd1498Szrj 	  gcc_assert (rld[r].when_needed == RELOAD_OTHER
7097*38fd1498Szrj 		      || rld[r].when_needed == RELOAD_FOR_OUTPUT
7098*38fd1498Szrj 		      || rld[r].when_needed == RELOAD_FOR_INSN);
7099*38fd1498Szrj 	}
7100*38fd1498Szrj     }
7101*38fd1498Szrj }
7102*38fd1498Szrj 
7103*38fd1498Szrj /* Deallocate the reload register for reload R.  This is called from
7104*38fd1498Szrj    remove_address_replacements.  */
7105*38fd1498Szrj 
7106*38fd1498Szrj void
deallocate_reload_reg(int r)7107*38fd1498Szrj deallocate_reload_reg (int r)
7108*38fd1498Szrj {
7109*38fd1498Szrj   int regno;
7110*38fd1498Szrj 
7111*38fd1498Szrj   if (! rld[r].reg_rtx)
7112*38fd1498Szrj     return;
7113*38fd1498Szrj   regno = true_regnum (rld[r].reg_rtx);
7114*38fd1498Szrj   rld[r].reg_rtx = 0;
7115*38fd1498Szrj   if (spill_reg_order[regno] >= 0)
7116*38fd1498Szrj     clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
7117*38fd1498Szrj 			     rld[r].mode);
7118*38fd1498Szrj   reload_spill_index[r] = -1;
7119*38fd1498Szrj }
7120*38fd1498Szrj 
7121*38fd1498Szrj /* These arrays are filled by emit_reload_insns and its subroutines.  */
7122*38fd1498Szrj static rtx_insn *input_reload_insns[MAX_RECOG_OPERANDS];
7123*38fd1498Szrj static rtx_insn *other_input_address_reload_insns = 0;
7124*38fd1498Szrj static rtx_insn *other_input_reload_insns = 0;
7125*38fd1498Szrj static rtx_insn *input_address_reload_insns[MAX_RECOG_OPERANDS];
7126*38fd1498Szrj static rtx_insn *inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7127*38fd1498Szrj static rtx_insn *output_reload_insns[MAX_RECOG_OPERANDS];
7128*38fd1498Szrj static rtx_insn *output_address_reload_insns[MAX_RECOG_OPERANDS];
7129*38fd1498Szrj static rtx_insn *outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7130*38fd1498Szrj static rtx_insn *operand_reload_insns = 0;
7131*38fd1498Szrj static rtx_insn *other_operand_reload_insns = 0;
7132*38fd1498Szrj static rtx_insn *other_output_reload_insns[MAX_RECOG_OPERANDS];
7133*38fd1498Szrj 
7134*38fd1498Szrj /* Values to be put in spill_reg_store are put here first.  Instructions
7135*38fd1498Szrj    must only be placed here if the associated reload register reaches
7136*38fd1498Szrj    the end of the instruction's reload sequence.  */
7137*38fd1498Szrj static rtx_insn *new_spill_reg_store[FIRST_PSEUDO_REGISTER];
7138*38fd1498Szrj static HARD_REG_SET reg_reloaded_died;
7139*38fd1498Szrj 
7140*38fd1498Szrj /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
7141*38fd1498Szrj    of class NEW_CLASS with mode NEW_MODE.  Or alternatively, if alt_reload_reg
7142*38fd1498Szrj    is nonzero, if that is suitable.  On success, change *RELOAD_REG to the
7143*38fd1498Szrj    adjusted register, and return true.  Otherwise, return false.  */
7144*38fd1498Szrj static bool
reload_adjust_reg_for_temp(rtx * reload_reg,rtx alt_reload_reg,enum reg_class new_class,machine_mode new_mode)7145*38fd1498Szrj reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
7146*38fd1498Szrj 			    enum reg_class new_class,
7147*38fd1498Szrj 			    machine_mode new_mode)
7148*38fd1498Szrj 
7149*38fd1498Szrj {
7150*38fd1498Szrj   rtx reg;
7151*38fd1498Szrj 
7152*38fd1498Szrj   for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
7153*38fd1498Szrj     {
7154*38fd1498Szrj       unsigned regno = REGNO (reg);
7155*38fd1498Szrj 
7156*38fd1498Szrj       if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
7157*38fd1498Szrj 	continue;
7158*38fd1498Szrj       if (GET_MODE (reg) != new_mode)
7159*38fd1498Szrj 	{
7160*38fd1498Szrj 	  if (!targetm.hard_regno_mode_ok (regno, new_mode))
7161*38fd1498Szrj 	    continue;
7162*38fd1498Szrj 	  if (hard_regno_nregs (regno, new_mode) > REG_NREGS (reg))
7163*38fd1498Szrj 	    continue;
7164*38fd1498Szrj 	  reg = reload_adjust_reg_for_mode (reg, new_mode);
7165*38fd1498Szrj 	}
7166*38fd1498Szrj       *reload_reg = reg;
7167*38fd1498Szrj       return true;
7168*38fd1498Szrj     }
7169*38fd1498Szrj   return false;
7170*38fd1498Szrj }
7171*38fd1498Szrj 
7172*38fd1498Szrj /* Check if *RELOAD_REG is suitable as a scratch register for the reload
7173*38fd1498Szrj    pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
7174*38fd1498Szrj    nonzero, if that is suitable.  On success, change *RELOAD_REG to the
7175*38fd1498Szrj    adjusted register, and return true.  Otherwise, return false.  */
7176*38fd1498Szrj static bool
reload_adjust_reg_for_icode(rtx * reload_reg,rtx alt_reload_reg,enum insn_code icode)7177*38fd1498Szrj reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
7178*38fd1498Szrj 			     enum insn_code icode)
7179*38fd1498Szrj 
7180*38fd1498Szrj {
7181*38fd1498Szrj   enum reg_class new_class = scratch_reload_class (icode);
7182*38fd1498Szrj   machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
7183*38fd1498Szrj 
7184*38fd1498Szrj   return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
7185*38fd1498Szrj 				     new_class, new_mode);
7186*38fd1498Szrj }
7187*38fd1498Szrj 
7188*38fd1498Szrj /* Generate insns to perform reload RL, which is for the insn in CHAIN and
7189*38fd1498Szrj    has the number J.  OLD contains the value to be used as input.  */
7190*38fd1498Szrj 
7191*38fd1498Szrj static void
emit_input_reload_insns(struct insn_chain * chain,struct reload * rl,rtx old,int j)7192*38fd1498Szrj emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
7193*38fd1498Szrj 			 rtx old, int j)
7194*38fd1498Szrj {
7195*38fd1498Szrj   rtx_insn *insn = chain->insn;
7196*38fd1498Szrj   rtx reloadreg;
7197*38fd1498Szrj   rtx oldequiv_reg = 0;
7198*38fd1498Szrj   rtx oldequiv = 0;
7199*38fd1498Szrj   int special = 0;
7200*38fd1498Szrj   machine_mode mode;
7201*38fd1498Szrj   rtx_insn **where;
7202*38fd1498Szrj 
7203*38fd1498Szrj   /* delete_output_reload is only invoked properly if old contains
7204*38fd1498Szrj      the original pseudo register.  Since this is replaced with a
7205*38fd1498Szrj      hard reg when RELOAD_OVERRIDE_IN is set, see if we can
7206*38fd1498Szrj      find the pseudo in RELOAD_IN_REG.  This is also used to
7207*38fd1498Szrj      determine whether a secondary reload is needed.  */
7208*38fd1498Szrj   if (reload_override_in[j]
7209*38fd1498Szrj       && (REG_P (rl->in_reg)
7210*38fd1498Szrj 	  || (GET_CODE (rl->in_reg) == SUBREG
7211*38fd1498Szrj 	      && REG_P (SUBREG_REG (rl->in_reg)))))
7212*38fd1498Szrj     {
7213*38fd1498Szrj       oldequiv = old;
7214*38fd1498Szrj       old = rl->in_reg;
7215*38fd1498Szrj     }
7216*38fd1498Szrj   if (oldequiv == 0)
7217*38fd1498Szrj     oldequiv = old;
7218*38fd1498Szrj   else if (REG_P (oldequiv))
7219*38fd1498Szrj     oldequiv_reg = oldequiv;
7220*38fd1498Szrj   else if (GET_CODE (oldequiv) == SUBREG)
7221*38fd1498Szrj     oldequiv_reg = SUBREG_REG (oldequiv);
7222*38fd1498Szrj 
7223*38fd1498Szrj   reloadreg = reload_reg_rtx_for_input[j];
7224*38fd1498Szrj   mode = GET_MODE (reloadreg);
7225*38fd1498Szrj 
7226*38fd1498Szrj   /* If we are reloading from a register that was recently stored in
7227*38fd1498Szrj      with an output-reload, see if we can prove there was
7228*38fd1498Szrj      actually no need to store the old value in it.  */
7229*38fd1498Szrj 
7230*38fd1498Szrj   if (optimize && REG_P (oldequiv)
7231*38fd1498Szrj       && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
7232*38fd1498Szrj       && spill_reg_store[REGNO (oldequiv)]
7233*38fd1498Szrj       && REG_P (old)
7234*38fd1498Szrj       && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
7235*38fd1498Szrj 	  || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
7236*38fd1498Szrj 			  rl->out_reg)))
7237*38fd1498Szrj     delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
7238*38fd1498Szrj 
7239*38fd1498Szrj   /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
7240*38fd1498Szrj      OLDEQUIV.  */
7241*38fd1498Szrj 
7242*38fd1498Szrj   while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
7243*38fd1498Szrj     oldequiv = SUBREG_REG (oldequiv);
7244*38fd1498Szrj   if (GET_MODE (oldequiv) != VOIDmode
7245*38fd1498Szrj       && mode != GET_MODE (oldequiv))
7246*38fd1498Szrj     oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
7247*38fd1498Szrj 
7248*38fd1498Szrj   /* Switch to the right place to emit the reload insns.  */
7249*38fd1498Szrj   switch (rl->when_needed)
7250*38fd1498Szrj     {
7251*38fd1498Szrj     case RELOAD_OTHER:
7252*38fd1498Szrj       where = &other_input_reload_insns;
7253*38fd1498Szrj       break;
7254*38fd1498Szrj     case RELOAD_FOR_INPUT:
7255*38fd1498Szrj       where = &input_reload_insns[rl->opnum];
7256*38fd1498Szrj       break;
7257*38fd1498Szrj     case RELOAD_FOR_INPUT_ADDRESS:
7258*38fd1498Szrj       where = &input_address_reload_insns[rl->opnum];
7259*38fd1498Szrj       break;
7260*38fd1498Szrj     case RELOAD_FOR_INPADDR_ADDRESS:
7261*38fd1498Szrj       where = &inpaddr_address_reload_insns[rl->opnum];
7262*38fd1498Szrj       break;
7263*38fd1498Szrj     case RELOAD_FOR_OUTPUT_ADDRESS:
7264*38fd1498Szrj       where = &output_address_reload_insns[rl->opnum];
7265*38fd1498Szrj       break;
7266*38fd1498Szrj     case RELOAD_FOR_OUTADDR_ADDRESS:
7267*38fd1498Szrj       where = &outaddr_address_reload_insns[rl->opnum];
7268*38fd1498Szrj       break;
7269*38fd1498Szrj     case RELOAD_FOR_OPERAND_ADDRESS:
7270*38fd1498Szrj       where = &operand_reload_insns;
7271*38fd1498Szrj       break;
7272*38fd1498Szrj     case RELOAD_FOR_OPADDR_ADDR:
7273*38fd1498Szrj       where = &other_operand_reload_insns;
7274*38fd1498Szrj       break;
7275*38fd1498Szrj     case RELOAD_FOR_OTHER_ADDRESS:
7276*38fd1498Szrj       where = &other_input_address_reload_insns;
7277*38fd1498Szrj       break;
7278*38fd1498Szrj     default:
7279*38fd1498Szrj       gcc_unreachable ();
7280*38fd1498Szrj     }
7281*38fd1498Szrj 
7282*38fd1498Szrj   push_to_sequence (*where);
7283*38fd1498Szrj 
7284*38fd1498Szrj   /* Auto-increment addresses must be reloaded in a special way.  */
7285*38fd1498Szrj   if (rl->out && ! rl->out_reg)
7286*38fd1498Szrj     {
7287*38fd1498Szrj       /* We are not going to bother supporting the case where a
7288*38fd1498Szrj 	 incremented register can't be copied directly from
7289*38fd1498Szrj 	 OLDEQUIV since this seems highly unlikely.  */
7290*38fd1498Szrj       gcc_assert (rl->secondary_in_reload < 0);
7291*38fd1498Szrj 
7292*38fd1498Szrj       if (reload_inherited[j])
7293*38fd1498Szrj 	oldequiv = reloadreg;
7294*38fd1498Szrj 
7295*38fd1498Szrj       old = XEXP (rl->in_reg, 0);
7296*38fd1498Szrj 
7297*38fd1498Szrj       /* Prevent normal processing of this reload.  */
7298*38fd1498Szrj       special = 1;
7299*38fd1498Szrj       /* Output a special code sequence for this case.  */
7300*38fd1498Szrj       inc_for_reload (reloadreg, oldequiv, rl->out, rl->inc);
7301*38fd1498Szrj     }
7302*38fd1498Szrj 
7303*38fd1498Szrj   /* If we are reloading a pseudo-register that was set by the previous
7304*38fd1498Szrj      insn, see if we can get rid of that pseudo-register entirely
7305*38fd1498Szrj      by redirecting the previous insn into our reload register.  */
7306*38fd1498Szrj 
7307*38fd1498Szrj   else if (optimize && REG_P (old)
7308*38fd1498Szrj 	   && REGNO (old) >= FIRST_PSEUDO_REGISTER
7309*38fd1498Szrj 	   && dead_or_set_p (insn, old)
7310*38fd1498Szrj 	   /* This is unsafe if some other reload
7311*38fd1498Szrj 	      uses the same reg first.  */
7312*38fd1498Szrj 	   && ! conflicts_with_override (reloadreg)
7313*38fd1498Szrj 	   && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
7314*38fd1498Szrj 				rl->when_needed, old, rl->out, j, 0))
7315*38fd1498Szrj     {
7316*38fd1498Szrj       rtx_insn *temp = PREV_INSN (insn);
7317*38fd1498Szrj       while (temp && (NOTE_P (temp) || DEBUG_INSN_P (temp)))
7318*38fd1498Szrj 	temp = PREV_INSN (temp);
7319*38fd1498Szrj       if (temp
7320*38fd1498Szrj 	  && NONJUMP_INSN_P (temp)
7321*38fd1498Szrj 	  && GET_CODE (PATTERN (temp)) == SET
7322*38fd1498Szrj 	  && SET_DEST (PATTERN (temp)) == old
7323*38fd1498Szrj 	  /* Make sure we can access insn_operand_constraint.  */
7324*38fd1498Szrj 	  && asm_noperands (PATTERN (temp)) < 0
7325*38fd1498Szrj 	  /* This is unsafe if operand occurs more than once in current
7326*38fd1498Szrj 	     insn.  Perhaps some occurrences aren't reloaded.  */
7327*38fd1498Szrj 	  && count_occurrences (PATTERN (insn), old, 0) == 1)
7328*38fd1498Szrj 	{
7329*38fd1498Szrj 	  rtx old = SET_DEST (PATTERN (temp));
7330*38fd1498Szrj 	  /* Store into the reload register instead of the pseudo.  */
7331*38fd1498Szrj 	  SET_DEST (PATTERN (temp)) = reloadreg;
7332*38fd1498Szrj 
7333*38fd1498Szrj 	  /* Verify that resulting insn is valid.
7334*38fd1498Szrj 
7335*38fd1498Szrj 	     Note that we have replaced the destination of TEMP with
7336*38fd1498Szrj 	     RELOADREG.  If TEMP references RELOADREG within an
7337*38fd1498Szrj 	     autoincrement addressing mode, then the resulting insn
7338*38fd1498Szrj 	     is ill-formed and we must reject this optimization.  */
7339*38fd1498Szrj 	  extract_insn (temp);
7340*38fd1498Szrj 	  if (constrain_operands (1, get_enabled_alternatives (temp))
7341*38fd1498Szrj 	      && (!AUTO_INC_DEC || ! find_reg_note (temp, REG_INC, reloadreg)))
7342*38fd1498Szrj 	    {
7343*38fd1498Szrj 	      /* If the previous insn is an output reload, the source is
7344*38fd1498Szrj 		 a reload register, and its spill_reg_store entry will
7345*38fd1498Szrj 		 contain the previous destination.  This is now
7346*38fd1498Szrj 		 invalid.  */
7347*38fd1498Szrj 	      if (REG_P (SET_SRC (PATTERN (temp)))
7348*38fd1498Szrj 		  && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
7349*38fd1498Szrj 		{
7350*38fd1498Szrj 		  spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7351*38fd1498Szrj 		  spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7352*38fd1498Szrj 		}
7353*38fd1498Szrj 
7354*38fd1498Szrj 	      /* If these are the only uses of the pseudo reg,
7355*38fd1498Szrj 		 pretend for GDB it lives in the reload reg we used.  */
7356*38fd1498Szrj 	      if (REG_N_DEATHS (REGNO (old)) == 1
7357*38fd1498Szrj 		  && REG_N_SETS (REGNO (old)) == 1)
7358*38fd1498Szrj 		{
7359*38fd1498Szrj 		  reg_renumber[REGNO (old)] = REGNO (reloadreg);
7360*38fd1498Szrj 		  if (ira_conflicts_p)
7361*38fd1498Szrj 		    /* Inform IRA about the change.  */
7362*38fd1498Szrj 		    ira_mark_allocation_change (REGNO (old));
7363*38fd1498Szrj 		  alter_reg (REGNO (old), -1, false);
7364*38fd1498Szrj 		}
7365*38fd1498Szrj 	      special = 1;
7366*38fd1498Szrj 
7367*38fd1498Szrj 	      /* Adjust any debug insns between temp and insn.  */
7368*38fd1498Szrj 	      while ((temp = NEXT_INSN (temp)) != insn)
7369*38fd1498Szrj 		if (DEBUG_BIND_INSN_P (temp))
7370*38fd1498Szrj 		  INSN_VAR_LOCATION_LOC (temp)
7371*38fd1498Szrj 		    = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (temp),
7372*38fd1498Szrj 					    old, reloadreg);
7373*38fd1498Szrj 		else
7374*38fd1498Szrj 		  gcc_assert (DEBUG_INSN_P (temp) || NOTE_P (temp));
7375*38fd1498Szrj 	    }
7376*38fd1498Szrj 	  else
7377*38fd1498Szrj 	    {
7378*38fd1498Szrj 	      SET_DEST (PATTERN (temp)) = old;
7379*38fd1498Szrj 	    }
7380*38fd1498Szrj 	}
7381*38fd1498Szrj     }
7382*38fd1498Szrj 
7383*38fd1498Szrj   /* We can't do that, so output an insn to load RELOADREG.  */
7384*38fd1498Szrj 
7385*38fd1498Szrj   /* If we have a secondary reload, pick up the secondary register
7386*38fd1498Szrj      and icode, if any.  If OLDEQUIV and OLD are different or
7387*38fd1498Szrj      if this is an in-out reload, recompute whether or not we
7388*38fd1498Szrj      still need a secondary register and what the icode should
7389*38fd1498Szrj      be.  If we still need a secondary register and the class or
7390*38fd1498Szrj      icode is different, go back to reloading from OLD if using
7391*38fd1498Szrj      OLDEQUIV means that we got the wrong type of register.  We
7392*38fd1498Szrj      cannot have different class or icode due to an in-out reload
7393*38fd1498Szrj      because we don't make such reloads when both the input and
7394*38fd1498Szrj      output need secondary reload registers.  */
7395*38fd1498Szrj 
7396*38fd1498Szrj   if (! special && rl->secondary_in_reload >= 0)
7397*38fd1498Szrj     {
7398*38fd1498Szrj       rtx second_reload_reg = 0;
7399*38fd1498Szrj       rtx third_reload_reg = 0;
7400*38fd1498Szrj       int secondary_reload = rl->secondary_in_reload;
7401*38fd1498Szrj       rtx real_oldequiv = oldequiv;
7402*38fd1498Szrj       rtx real_old = old;
7403*38fd1498Szrj       rtx tmp;
7404*38fd1498Szrj       enum insn_code icode;
7405*38fd1498Szrj       enum insn_code tertiary_icode = CODE_FOR_nothing;
7406*38fd1498Szrj 
7407*38fd1498Szrj       /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7408*38fd1498Szrj 	 and similarly for OLD.
7409*38fd1498Szrj 	 See comments in get_secondary_reload in reload.c.  */
7410*38fd1498Szrj       /* If it is a pseudo that cannot be replaced with its
7411*38fd1498Szrj 	 equivalent MEM, we must fall back to reload_in, which
7412*38fd1498Szrj 	 will have all the necessary substitutions registered.
7413*38fd1498Szrj 	 Likewise for a pseudo that can't be replaced with its
7414*38fd1498Szrj 	 equivalent constant.
7415*38fd1498Szrj 
7416*38fd1498Szrj 	 Take extra care for subregs of such pseudos.  Note that
7417*38fd1498Szrj 	 we cannot use reg_equiv_mem in this case because it is
7418*38fd1498Szrj 	 not in the right mode.  */
7419*38fd1498Szrj 
7420*38fd1498Szrj       tmp = oldequiv;
7421*38fd1498Szrj       if (GET_CODE (tmp) == SUBREG)
7422*38fd1498Szrj 	tmp = SUBREG_REG (tmp);
7423*38fd1498Szrj       if (REG_P (tmp)
7424*38fd1498Szrj 	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7425*38fd1498Szrj 	  && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7426*38fd1498Szrj 	      || reg_equiv_constant (REGNO (tmp)) != 0))
7427*38fd1498Szrj 	{
7428*38fd1498Szrj 	  if (! reg_equiv_mem (REGNO (tmp))
7429*38fd1498Szrj 	      || num_not_at_initial_offset
7430*38fd1498Szrj 	      || GET_CODE (oldequiv) == SUBREG)
7431*38fd1498Szrj 	    real_oldequiv = rl->in;
7432*38fd1498Szrj 	  else
7433*38fd1498Szrj 	    real_oldequiv = reg_equiv_mem (REGNO (tmp));
7434*38fd1498Szrj 	}
7435*38fd1498Szrj 
7436*38fd1498Szrj       tmp = old;
7437*38fd1498Szrj       if (GET_CODE (tmp) == SUBREG)
7438*38fd1498Szrj 	tmp = SUBREG_REG (tmp);
7439*38fd1498Szrj       if (REG_P (tmp)
7440*38fd1498Szrj 	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7441*38fd1498Szrj 	  && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7442*38fd1498Szrj 	      || reg_equiv_constant (REGNO (tmp)) != 0))
7443*38fd1498Szrj 	{
7444*38fd1498Szrj 	  if (! reg_equiv_mem (REGNO (tmp))
7445*38fd1498Szrj 	      || num_not_at_initial_offset
7446*38fd1498Szrj 	      || GET_CODE (old) == SUBREG)
7447*38fd1498Szrj 	    real_old = rl->in;
7448*38fd1498Szrj 	  else
7449*38fd1498Szrj 	    real_old = reg_equiv_mem (REGNO (tmp));
7450*38fd1498Szrj 	}
7451*38fd1498Szrj 
7452*38fd1498Szrj       second_reload_reg = rld[secondary_reload].reg_rtx;
7453*38fd1498Szrj       if (rld[secondary_reload].secondary_in_reload >= 0)
7454*38fd1498Szrj 	{
7455*38fd1498Szrj 	  int tertiary_reload = rld[secondary_reload].secondary_in_reload;
7456*38fd1498Szrj 
7457*38fd1498Szrj 	  third_reload_reg = rld[tertiary_reload].reg_rtx;
7458*38fd1498Szrj 	  tertiary_icode = rld[secondary_reload].secondary_in_icode;
7459*38fd1498Szrj 	  /* We'd have to add more code for quartary reloads.  */
7460*38fd1498Szrj 	  gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
7461*38fd1498Szrj 	}
7462*38fd1498Szrj       icode = rl->secondary_in_icode;
7463*38fd1498Szrj 
7464*38fd1498Szrj       if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
7465*38fd1498Szrj 	  || (rl->in != 0 && rl->out != 0))
7466*38fd1498Szrj 	{
7467*38fd1498Szrj 	  secondary_reload_info sri, sri2;
7468*38fd1498Szrj 	  enum reg_class new_class, new_t_class;
7469*38fd1498Szrj 
7470*38fd1498Szrj 	  sri.icode = CODE_FOR_nothing;
7471*38fd1498Szrj 	  sri.prev_sri = NULL;
7472*38fd1498Szrj 	  new_class
7473*38fd1498Szrj 	    = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7474*38fd1498Szrj 							 rl->rclass, mode,
7475*38fd1498Szrj 							 &sri);
7476*38fd1498Szrj 
7477*38fd1498Szrj 	  if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7478*38fd1498Szrj 	    second_reload_reg = 0;
7479*38fd1498Szrj 	  else if (new_class == NO_REGS)
7480*38fd1498Szrj 	    {
7481*38fd1498Szrj 	      if (reload_adjust_reg_for_icode (&second_reload_reg,
7482*38fd1498Szrj 					       third_reload_reg,
7483*38fd1498Szrj 					       (enum insn_code) sri.icode))
7484*38fd1498Szrj 		{
7485*38fd1498Szrj 		  icode = (enum insn_code) sri.icode;
7486*38fd1498Szrj 		  third_reload_reg = 0;
7487*38fd1498Szrj 		}
7488*38fd1498Szrj 	      else
7489*38fd1498Szrj 		{
7490*38fd1498Szrj 		  oldequiv = old;
7491*38fd1498Szrj 		  real_oldequiv = real_old;
7492*38fd1498Szrj 		}
7493*38fd1498Szrj 	    }
7494*38fd1498Szrj 	  else if (sri.icode != CODE_FOR_nothing)
7495*38fd1498Szrj 	    /* We currently lack a way to express this in reloads.  */
7496*38fd1498Szrj 	    gcc_unreachable ();
7497*38fd1498Szrj 	  else
7498*38fd1498Szrj 	    {
7499*38fd1498Szrj 	      sri2.icode = CODE_FOR_nothing;
7500*38fd1498Szrj 	      sri2.prev_sri = &sri;
7501*38fd1498Szrj 	      new_t_class
7502*38fd1498Szrj 		= (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7503*38fd1498Szrj 							     new_class, mode,
7504*38fd1498Szrj 							     &sri);
7505*38fd1498Szrj 	      if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7506*38fd1498Szrj 		{
7507*38fd1498Szrj 		  if (reload_adjust_reg_for_temp (&second_reload_reg,
7508*38fd1498Szrj 						  third_reload_reg,
7509*38fd1498Szrj 						  new_class, mode))
7510*38fd1498Szrj 		    {
7511*38fd1498Szrj 		      third_reload_reg = 0;
7512*38fd1498Szrj 		      tertiary_icode = (enum insn_code) sri2.icode;
7513*38fd1498Szrj 		    }
7514*38fd1498Szrj 		  else
7515*38fd1498Szrj 		    {
7516*38fd1498Szrj 		      oldequiv = old;
7517*38fd1498Szrj 		      real_oldequiv = real_old;
7518*38fd1498Szrj 		    }
7519*38fd1498Szrj 		}
7520*38fd1498Szrj 	      else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7521*38fd1498Szrj 		{
7522*38fd1498Szrj 		  rtx intermediate = second_reload_reg;
7523*38fd1498Szrj 
7524*38fd1498Szrj 		  if (reload_adjust_reg_for_temp (&intermediate, NULL,
7525*38fd1498Szrj 						  new_class, mode)
7526*38fd1498Szrj 		      && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7527*38fd1498Szrj 						      ((enum insn_code)
7528*38fd1498Szrj 						       sri2.icode)))
7529*38fd1498Szrj 		    {
7530*38fd1498Szrj 		      second_reload_reg = intermediate;
7531*38fd1498Szrj 		      tertiary_icode = (enum insn_code) sri2.icode;
7532*38fd1498Szrj 		    }
7533*38fd1498Szrj 		  else
7534*38fd1498Szrj 		    {
7535*38fd1498Szrj 		      oldequiv = old;
7536*38fd1498Szrj 		      real_oldequiv = real_old;
7537*38fd1498Szrj 		    }
7538*38fd1498Szrj 		}
7539*38fd1498Szrj 	      else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7540*38fd1498Szrj 		{
7541*38fd1498Szrj 		  rtx intermediate = second_reload_reg;
7542*38fd1498Szrj 
7543*38fd1498Szrj 		  if (reload_adjust_reg_for_temp (&intermediate, NULL,
7544*38fd1498Szrj 						  new_class, mode)
7545*38fd1498Szrj 		      && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7546*38fd1498Szrj 						      new_t_class, mode))
7547*38fd1498Szrj 		    {
7548*38fd1498Szrj 		      second_reload_reg = intermediate;
7549*38fd1498Szrj 		      tertiary_icode = (enum insn_code) sri2.icode;
7550*38fd1498Szrj 		    }
7551*38fd1498Szrj 		  else
7552*38fd1498Szrj 		    {
7553*38fd1498Szrj 		      oldequiv = old;
7554*38fd1498Szrj 		      real_oldequiv = real_old;
7555*38fd1498Szrj 		    }
7556*38fd1498Szrj 		}
7557*38fd1498Szrj 	      else
7558*38fd1498Szrj 		{
7559*38fd1498Szrj 		  /* This could be handled more intelligently too.  */
7560*38fd1498Szrj 		  oldequiv = old;
7561*38fd1498Szrj 		  real_oldequiv = real_old;
7562*38fd1498Szrj 		}
7563*38fd1498Szrj 	    }
7564*38fd1498Szrj 	}
7565*38fd1498Szrj 
7566*38fd1498Szrj       /* If we still need a secondary reload register, check
7567*38fd1498Szrj 	 to see if it is being used as a scratch or intermediate
7568*38fd1498Szrj 	 register and generate code appropriately.  If we need
7569*38fd1498Szrj 	 a scratch register, use REAL_OLDEQUIV since the form of
7570*38fd1498Szrj 	 the insn may depend on the actual address if it is
7571*38fd1498Szrj 	 a MEM.  */
7572*38fd1498Szrj 
7573*38fd1498Szrj       if (second_reload_reg)
7574*38fd1498Szrj 	{
7575*38fd1498Szrj 	  if (icode != CODE_FOR_nothing)
7576*38fd1498Szrj 	    {
7577*38fd1498Szrj 	      /* We'd have to add extra code to handle this case.  */
7578*38fd1498Szrj 	      gcc_assert (!third_reload_reg);
7579*38fd1498Szrj 
7580*38fd1498Szrj 	      emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7581*38fd1498Szrj 					  second_reload_reg));
7582*38fd1498Szrj 	      special = 1;
7583*38fd1498Szrj 	    }
7584*38fd1498Szrj 	  else
7585*38fd1498Szrj 	    {
7586*38fd1498Szrj 	      /* See if we need a scratch register to load the
7587*38fd1498Szrj 		 intermediate register (a tertiary reload).  */
7588*38fd1498Szrj 	      if (tertiary_icode != CODE_FOR_nothing)
7589*38fd1498Szrj 		{
7590*38fd1498Szrj 		  emit_insn ((GEN_FCN (tertiary_icode)
7591*38fd1498Szrj 			      (second_reload_reg, real_oldequiv,
7592*38fd1498Szrj 			       third_reload_reg)));
7593*38fd1498Szrj 		}
7594*38fd1498Szrj 	      else if (third_reload_reg)
7595*38fd1498Szrj 		{
7596*38fd1498Szrj 		  gen_reload (third_reload_reg, real_oldequiv,
7597*38fd1498Szrj 			      rl->opnum,
7598*38fd1498Szrj 			      rl->when_needed);
7599*38fd1498Szrj 		  gen_reload (second_reload_reg, third_reload_reg,
7600*38fd1498Szrj 			      rl->opnum,
7601*38fd1498Szrj 			      rl->when_needed);
7602*38fd1498Szrj 		}
7603*38fd1498Szrj 	      else
7604*38fd1498Szrj 		gen_reload (second_reload_reg, real_oldequiv,
7605*38fd1498Szrj 			    rl->opnum,
7606*38fd1498Szrj 			    rl->when_needed);
7607*38fd1498Szrj 
7608*38fd1498Szrj 	      oldequiv = second_reload_reg;
7609*38fd1498Szrj 	    }
7610*38fd1498Szrj 	}
7611*38fd1498Szrj     }
7612*38fd1498Szrj 
7613*38fd1498Szrj   if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7614*38fd1498Szrj     {
7615*38fd1498Szrj       rtx real_oldequiv = oldequiv;
7616*38fd1498Szrj 
7617*38fd1498Szrj       if ((REG_P (oldequiv)
7618*38fd1498Szrj 	   && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7619*38fd1498Szrj 	   && (reg_equiv_memory_loc (REGNO (oldequiv)) != 0
7620*38fd1498Szrj 	       || reg_equiv_constant (REGNO (oldequiv)) != 0))
7621*38fd1498Szrj 	  || (GET_CODE (oldequiv) == SUBREG
7622*38fd1498Szrj 	      && REG_P (SUBREG_REG (oldequiv))
7623*38fd1498Szrj 	      && (REGNO (SUBREG_REG (oldequiv))
7624*38fd1498Szrj 		  >= FIRST_PSEUDO_REGISTER)
7625*38fd1498Szrj 	      && ((reg_equiv_memory_loc (REGNO (SUBREG_REG (oldequiv))) != 0)
7626*38fd1498Szrj 		  || (reg_equiv_constant (REGNO (SUBREG_REG (oldequiv))) != 0)))
7627*38fd1498Szrj 	  || (CONSTANT_P (oldequiv)
7628*38fd1498Szrj 	      && (targetm.preferred_reload_class (oldequiv,
7629*38fd1498Szrj 						  REGNO_REG_CLASS (REGNO (reloadreg)))
7630*38fd1498Szrj 		  == NO_REGS)))
7631*38fd1498Szrj 	real_oldequiv = rl->in;
7632*38fd1498Szrj       gen_reload (reloadreg, real_oldequiv, rl->opnum,
7633*38fd1498Szrj 		  rl->when_needed);
7634*38fd1498Szrj     }
7635*38fd1498Szrj 
7636*38fd1498Szrj   if (cfun->can_throw_non_call_exceptions)
7637*38fd1498Szrj     copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7638*38fd1498Szrj 
7639*38fd1498Szrj   /* End this sequence.  */
7640*38fd1498Szrj   *where = get_insns ();
7641*38fd1498Szrj   end_sequence ();
7642*38fd1498Szrj 
7643*38fd1498Szrj   /* Update reload_override_in so that delete_address_reloads_1
7644*38fd1498Szrj      can see the actual register usage.  */
7645*38fd1498Szrj   if (oldequiv_reg)
7646*38fd1498Szrj     reload_override_in[j] = oldequiv;
7647*38fd1498Szrj }
7648*38fd1498Szrj 
7649*38fd1498Szrj /* Generate insns to for the output reload RL, which is for the insn described
7650*38fd1498Szrj    by CHAIN and has the number J.  */
7651*38fd1498Szrj static void
emit_output_reload_insns(struct insn_chain * chain,struct reload * rl,int j)7652*38fd1498Szrj emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
7653*38fd1498Szrj 			  int j)
7654*38fd1498Szrj {
7655*38fd1498Szrj   rtx reloadreg;
7656*38fd1498Szrj   rtx_insn *insn = chain->insn;
7657*38fd1498Szrj   int special = 0;
7658*38fd1498Szrj   rtx old = rl->out;
7659*38fd1498Szrj   machine_mode mode;
7660*38fd1498Szrj   rtx_insn *p;
7661*38fd1498Szrj   rtx rl_reg_rtx;
7662*38fd1498Szrj 
7663*38fd1498Szrj   if (rl->when_needed == RELOAD_OTHER)
7664*38fd1498Szrj     start_sequence ();
7665*38fd1498Szrj   else
7666*38fd1498Szrj     push_to_sequence (output_reload_insns[rl->opnum]);
7667*38fd1498Szrj 
7668*38fd1498Szrj   rl_reg_rtx = reload_reg_rtx_for_output[j];
7669*38fd1498Szrj   mode = GET_MODE (rl_reg_rtx);
7670*38fd1498Szrj 
7671*38fd1498Szrj   reloadreg = rl_reg_rtx;
7672*38fd1498Szrj 
7673*38fd1498Szrj   /* If we need two reload regs, set RELOADREG to the intermediate
7674*38fd1498Szrj      one, since it will be stored into OLD.  We might need a secondary
7675*38fd1498Szrj      register only for an input reload, so check again here.  */
7676*38fd1498Szrj 
7677*38fd1498Szrj   if (rl->secondary_out_reload >= 0)
7678*38fd1498Szrj     {
7679*38fd1498Szrj       rtx real_old = old;
7680*38fd1498Szrj       int secondary_reload = rl->secondary_out_reload;
7681*38fd1498Szrj       int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7682*38fd1498Szrj 
7683*38fd1498Szrj       if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7684*38fd1498Szrj 	  && reg_equiv_mem (REGNO (old)) != 0)
7685*38fd1498Szrj 	real_old = reg_equiv_mem (REGNO (old));
7686*38fd1498Szrj 
7687*38fd1498Szrj       if (secondary_reload_class (0, rl->rclass, mode, real_old) != NO_REGS)
7688*38fd1498Szrj 	{
7689*38fd1498Szrj 	  rtx second_reloadreg = reloadreg;
7690*38fd1498Szrj 	  reloadreg = rld[secondary_reload].reg_rtx;
7691*38fd1498Szrj 
7692*38fd1498Szrj 	  /* See if RELOADREG is to be used as a scratch register
7693*38fd1498Szrj 	     or as an intermediate register.  */
7694*38fd1498Szrj 	  if (rl->secondary_out_icode != CODE_FOR_nothing)
7695*38fd1498Szrj 	    {
7696*38fd1498Szrj 	      /* We'd have to add extra code to handle this case.  */
7697*38fd1498Szrj 	      gcc_assert (tertiary_reload < 0);
7698*38fd1498Szrj 
7699*38fd1498Szrj 	      emit_insn ((GEN_FCN (rl->secondary_out_icode)
7700*38fd1498Szrj 			  (real_old, second_reloadreg, reloadreg)));
7701*38fd1498Szrj 	      special = 1;
7702*38fd1498Szrj 	    }
7703*38fd1498Szrj 	  else
7704*38fd1498Szrj 	    {
7705*38fd1498Szrj 	      /* See if we need both a scratch and intermediate reload
7706*38fd1498Szrj 		 register.  */
7707*38fd1498Szrj 
7708*38fd1498Szrj 	      enum insn_code tertiary_icode
7709*38fd1498Szrj 		= rld[secondary_reload].secondary_out_icode;
7710*38fd1498Szrj 
7711*38fd1498Szrj 	      /* We'd have to add more code for quartary reloads.  */
7712*38fd1498Szrj 	      gcc_assert (tertiary_reload < 0
7713*38fd1498Szrj 			  || rld[tertiary_reload].secondary_out_reload < 0);
7714*38fd1498Szrj 
7715*38fd1498Szrj 	      if (GET_MODE (reloadreg) != mode)
7716*38fd1498Szrj 		reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7717*38fd1498Szrj 
7718*38fd1498Szrj 	      if (tertiary_icode != CODE_FOR_nothing)
7719*38fd1498Szrj 		{
7720*38fd1498Szrj 		  rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7721*38fd1498Szrj 
7722*38fd1498Szrj 		  /* Copy primary reload reg to secondary reload reg.
7723*38fd1498Szrj 		     (Note that these have been swapped above, then
7724*38fd1498Szrj 		     secondary reload reg to OLD using our insn.)  */
7725*38fd1498Szrj 
7726*38fd1498Szrj 		  /* If REAL_OLD is a paradoxical SUBREG, remove it
7727*38fd1498Szrj 		     and try to put the opposite SUBREG on
7728*38fd1498Szrj 		     RELOADREG.  */
7729*38fd1498Szrj 		  strip_paradoxical_subreg (&real_old, &reloadreg);
7730*38fd1498Szrj 
7731*38fd1498Szrj 		  gen_reload (reloadreg, second_reloadreg,
7732*38fd1498Szrj 			      rl->opnum, rl->when_needed);
7733*38fd1498Szrj 		  emit_insn ((GEN_FCN (tertiary_icode)
7734*38fd1498Szrj 			      (real_old, reloadreg, third_reloadreg)));
7735*38fd1498Szrj 		  special = 1;
7736*38fd1498Szrj 		}
7737*38fd1498Szrj 
7738*38fd1498Szrj 	      else
7739*38fd1498Szrj 		{
7740*38fd1498Szrj 		  /* Copy between the reload regs here and then to
7741*38fd1498Szrj 		     OUT later.  */
7742*38fd1498Szrj 
7743*38fd1498Szrj 		  gen_reload (reloadreg, second_reloadreg,
7744*38fd1498Szrj 			      rl->opnum, rl->when_needed);
7745*38fd1498Szrj 		  if (tertiary_reload >= 0)
7746*38fd1498Szrj 		    {
7747*38fd1498Szrj 		      rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7748*38fd1498Szrj 
7749*38fd1498Szrj 		      gen_reload (third_reloadreg, reloadreg,
7750*38fd1498Szrj 				  rl->opnum, rl->when_needed);
7751*38fd1498Szrj 		      reloadreg = third_reloadreg;
7752*38fd1498Szrj 		    }
7753*38fd1498Szrj 		}
7754*38fd1498Szrj 	    }
7755*38fd1498Szrj 	}
7756*38fd1498Szrj     }
7757*38fd1498Szrj 
7758*38fd1498Szrj   /* Output the last reload insn.  */
7759*38fd1498Szrj   if (! special)
7760*38fd1498Szrj     {
7761*38fd1498Szrj       rtx set;
7762*38fd1498Szrj 
7763*38fd1498Szrj       /* Don't output the last reload if OLD is not the dest of
7764*38fd1498Szrj 	 INSN and is in the src and is clobbered by INSN.  */
7765*38fd1498Szrj       if (! flag_expensive_optimizations
7766*38fd1498Szrj 	  || !REG_P (old)
7767*38fd1498Szrj 	  || !(set = single_set (insn))
7768*38fd1498Szrj 	  || rtx_equal_p (old, SET_DEST (set))
7769*38fd1498Szrj 	  || !reg_mentioned_p (old, SET_SRC (set))
7770*38fd1498Szrj 	  || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7771*38fd1498Szrj 	       && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7772*38fd1498Szrj 	gen_reload (old, reloadreg, rl->opnum,
7773*38fd1498Szrj 		    rl->when_needed);
7774*38fd1498Szrj     }
7775*38fd1498Szrj 
7776*38fd1498Szrj   /* Look at all insns we emitted, just to be safe.  */
7777*38fd1498Szrj   for (p = get_insns (); p; p = NEXT_INSN (p))
7778*38fd1498Szrj     if (INSN_P (p))
7779*38fd1498Szrj       {
7780*38fd1498Szrj 	rtx pat = PATTERN (p);
7781*38fd1498Szrj 
7782*38fd1498Szrj 	/* If this output reload doesn't come from a spill reg,
7783*38fd1498Szrj 	   clear any memory of reloaded copies of the pseudo reg.
7784*38fd1498Szrj 	   If this output reload comes from a spill reg,
7785*38fd1498Szrj 	   reg_has_output_reload will make this do nothing.  */
7786*38fd1498Szrj 	note_stores (pat, forget_old_reloads_1, NULL);
7787*38fd1498Szrj 
7788*38fd1498Szrj 	if (reg_mentioned_p (rl_reg_rtx, pat))
7789*38fd1498Szrj 	  {
7790*38fd1498Szrj 	    rtx set = single_set (insn);
7791*38fd1498Szrj 	    if (reload_spill_index[j] < 0
7792*38fd1498Szrj 		&& set
7793*38fd1498Szrj 		&& SET_SRC (set) == rl_reg_rtx)
7794*38fd1498Szrj 	      {
7795*38fd1498Szrj 		int src = REGNO (SET_SRC (set));
7796*38fd1498Szrj 
7797*38fd1498Szrj 		reload_spill_index[j] = src;
7798*38fd1498Szrj 		SET_HARD_REG_BIT (reg_is_output_reload, src);
7799*38fd1498Szrj 		if (find_regno_note (insn, REG_DEAD, src))
7800*38fd1498Szrj 		  SET_HARD_REG_BIT (reg_reloaded_died, src);
7801*38fd1498Szrj 	      }
7802*38fd1498Szrj 	    if (HARD_REGISTER_P (rl_reg_rtx))
7803*38fd1498Szrj 	      {
7804*38fd1498Szrj 		int s = rl->secondary_out_reload;
7805*38fd1498Szrj 		set = single_set (p);
7806*38fd1498Szrj 		/* If this reload copies only to the secondary reload
7807*38fd1498Szrj 		   register, the secondary reload does the actual
7808*38fd1498Szrj 		   store.  */
7809*38fd1498Szrj 		if (s >= 0 && set == NULL_RTX)
7810*38fd1498Szrj 		  /* We can't tell what function the secondary reload
7811*38fd1498Szrj 		     has and where the actual store to the pseudo is
7812*38fd1498Szrj 		     made; leave new_spill_reg_store alone.  */
7813*38fd1498Szrj 		  ;
7814*38fd1498Szrj 		else if (s >= 0
7815*38fd1498Szrj 			 && SET_SRC (set) == rl_reg_rtx
7816*38fd1498Szrj 			 && SET_DEST (set) == rld[s].reg_rtx)
7817*38fd1498Szrj 		  {
7818*38fd1498Szrj 		    /* Usually the next instruction will be the
7819*38fd1498Szrj 		       secondary reload insn;  if we can confirm
7820*38fd1498Szrj 		       that it is, setting new_spill_reg_store to
7821*38fd1498Szrj 		       that insn will allow an extra optimization.  */
7822*38fd1498Szrj 		    rtx s_reg = rld[s].reg_rtx;
7823*38fd1498Szrj 		    rtx_insn *next = NEXT_INSN (p);
7824*38fd1498Szrj 		    rld[s].out = rl->out;
7825*38fd1498Szrj 		    rld[s].out_reg = rl->out_reg;
7826*38fd1498Szrj 		    set = single_set (next);
7827*38fd1498Szrj 		    if (set && SET_SRC (set) == s_reg
7828*38fd1498Szrj 			&& reload_reg_rtx_reaches_end_p (s_reg, s))
7829*38fd1498Szrj 		      {
7830*38fd1498Szrj 			SET_HARD_REG_BIT (reg_is_output_reload,
7831*38fd1498Szrj 					  REGNO (s_reg));
7832*38fd1498Szrj 			new_spill_reg_store[REGNO (s_reg)] = next;
7833*38fd1498Szrj 		      }
7834*38fd1498Szrj 		  }
7835*38fd1498Szrj 		else if (reload_reg_rtx_reaches_end_p (rl_reg_rtx, j))
7836*38fd1498Szrj 		  new_spill_reg_store[REGNO (rl_reg_rtx)] = p;
7837*38fd1498Szrj 	      }
7838*38fd1498Szrj 	  }
7839*38fd1498Szrj       }
7840*38fd1498Szrj 
7841*38fd1498Szrj   if (rl->when_needed == RELOAD_OTHER)
7842*38fd1498Szrj     {
7843*38fd1498Szrj       emit_insn (other_output_reload_insns[rl->opnum]);
7844*38fd1498Szrj       other_output_reload_insns[rl->opnum] = get_insns ();
7845*38fd1498Szrj     }
7846*38fd1498Szrj   else
7847*38fd1498Szrj     output_reload_insns[rl->opnum] = get_insns ();
7848*38fd1498Szrj 
7849*38fd1498Szrj   if (cfun->can_throw_non_call_exceptions)
7850*38fd1498Szrj     copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7851*38fd1498Szrj 
7852*38fd1498Szrj   end_sequence ();
7853*38fd1498Szrj }
7854*38fd1498Szrj 
7855*38fd1498Szrj /* Do input reloading for reload RL, which is for the insn described by CHAIN
7856*38fd1498Szrj    and has the number J.  */
7857*38fd1498Szrj static void
do_input_reload(struct insn_chain * chain,struct reload * rl,int j)7858*38fd1498Szrj do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7859*38fd1498Szrj {
7860*38fd1498Szrj   rtx_insn *insn = chain->insn;
7861*38fd1498Szrj   rtx old = (rl->in && MEM_P (rl->in)
7862*38fd1498Szrj 	     ? rl->in_reg : rl->in);
7863*38fd1498Szrj   rtx reg_rtx = rl->reg_rtx;
7864*38fd1498Szrj 
7865*38fd1498Szrj   if (old && reg_rtx)
7866*38fd1498Szrj     {
7867*38fd1498Szrj       machine_mode mode;
7868*38fd1498Szrj 
7869*38fd1498Szrj       /* Determine the mode to reload in.
7870*38fd1498Szrj 	 This is very tricky because we have three to choose from.
7871*38fd1498Szrj 	 There is the mode the insn operand wants (rl->inmode).
7872*38fd1498Szrj 	 There is the mode of the reload register RELOADREG.
7873*38fd1498Szrj 	 There is the intrinsic mode of the operand, which we could find
7874*38fd1498Szrj 	 by stripping some SUBREGs.
7875*38fd1498Szrj 	 It turns out that RELOADREG's mode is irrelevant:
7876*38fd1498Szrj 	 we can change that arbitrarily.
7877*38fd1498Szrj 
7878*38fd1498Szrj 	 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7879*38fd1498Szrj 	 then the reload reg may not support QImode moves, so use SImode.
7880*38fd1498Szrj 	 If foo is in memory due to spilling a pseudo reg, this is safe,
7881*38fd1498Szrj 	 because the QImode value is in the least significant part of a
7882*38fd1498Szrj 	 slot big enough for a SImode.  If foo is some other sort of
7883*38fd1498Szrj 	 memory reference, then it is impossible to reload this case,
7884*38fd1498Szrj 	 so previous passes had better make sure this never happens.
7885*38fd1498Szrj 
7886*38fd1498Szrj 	 Then consider a one-word union which has SImode and one of its
7887*38fd1498Szrj 	 members is a float, being fetched as (SUBREG:SF union:SI).
7888*38fd1498Szrj 	 We must fetch that as SFmode because we could be loading into
7889*38fd1498Szrj 	 a float-only register.  In this case OLD's mode is correct.
7890*38fd1498Szrj 
7891*38fd1498Szrj 	 Consider an immediate integer: it has VOIDmode.  Here we need
7892*38fd1498Szrj 	 to get a mode from something else.
7893*38fd1498Szrj 
7894*38fd1498Szrj 	 In some cases, there is a fourth mode, the operand's
7895*38fd1498Szrj 	 containing mode.  If the insn specifies a containing mode for
7896*38fd1498Szrj 	 this operand, it overrides all others.
7897*38fd1498Szrj 
7898*38fd1498Szrj 	 I am not sure whether the algorithm here is always right,
7899*38fd1498Szrj 	 but it does the right things in those cases.  */
7900*38fd1498Szrj 
7901*38fd1498Szrj       mode = GET_MODE (old);
7902*38fd1498Szrj       if (mode == VOIDmode)
7903*38fd1498Szrj 	mode = rl->inmode;
7904*38fd1498Szrj 
7905*38fd1498Szrj       /* We cannot use gen_lowpart_common since it can do the wrong thing
7906*38fd1498Szrj 	 when REG_RTX has a multi-word mode.  Note that REG_RTX must
7907*38fd1498Szrj 	 always be a REG here.  */
7908*38fd1498Szrj       if (GET_MODE (reg_rtx) != mode)
7909*38fd1498Szrj 	reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7910*38fd1498Szrj     }
7911*38fd1498Szrj   reload_reg_rtx_for_input[j] = reg_rtx;
7912*38fd1498Szrj 
7913*38fd1498Szrj   if (old != 0
7914*38fd1498Szrj       /* AUTO_INC reloads need to be handled even if inherited.  We got an
7915*38fd1498Szrj 	 AUTO_INC reload if reload_out is set but reload_out_reg isn't.  */
7916*38fd1498Szrj       && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7917*38fd1498Szrj       && ! rtx_equal_p (reg_rtx, old)
7918*38fd1498Szrj       && reg_rtx != 0)
7919*38fd1498Szrj     emit_input_reload_insns (chain, rld + j, old, j);
7920*38fd1498Szrj 
7921*38fd1498Szrj   /* When inheriting a wider reload, we have a MEM in rl->in,
7922*38fd1498Szrj      e.g. inheriting a SImode output reload for
7923*38fd1498Szrj      (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10)))  */
7924*38fd1498Szrj   if (optimize && reload_inherited[j] && rl->in
7925*38fd1498Szrj       && MEM_P (rl->in)
7926*38fd1498Szrj       && MEM_P (rl->in_reg)
7927*38fd1498Szrj       && reload_spill_index[j] >= 0
7928*38fd1498Szrj       && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7929*38fd1498Szrj     rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7930*38fd1498Szrj 
7931*38fd1498Szrj   /* If we are reloading a register that was recently stored in with an
7932*38fd1498Szrj      output-reload, see if we can prove there was
7933*38fd1498Szrj      actually no need to store the old value in it.  */
7934*38fd1498Szrj 
7935*38fd1498Szrj   if (optimize
7936*38fd1498Szrj       && (reload_inherited[j] || reload_override_in[j])
7937*38fd1498Szrj       && reg_rtx
7938*38fd1498Szrj       && REG_P (reg_rtx)
7939*38fd1498Szrj       && spill_reg_store[REGNO (reg_rtx)] != 0
7940*38fd1498Szrj #if 0
7941*38fd1498Szrj       /* There doesn't seem to be any reason to restrict this to pseudos
7942*38fd1498Szrj 	 and doing so loses in the case where we are copying from a
7943*38fd1498Szrj 	 register of the wrong class.  */
7944*38fd1498Szrj       && !HARD_REGISTER_P (spill_reg_stored_to[REGNO (reg_rtx)])
7945*38fd1498Szrj #endif
7946*38fd1498Szrj       /* The insn might have already some references to stackslots
7947*38fd1498Szrj 	 replaced by MEMs, while reload_out_reg still names the
7948*38fd1498Szrj 	 original pseudo.  */
7949*38fd1498Szrj       && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (reg_rtx)])
7950*38fd1498Szrj 	  || rtx_equal_p (spill_reg_stored_to[REGNO (reg_rtx)], rl->out_reg)))
7951*38fd1498Szrj     delete_output_reload (insn, j, REGNO (reg_rtx), reg_rtx);
7952*38fd1498Szrj }
7953*38fd1498Szrj 
7954*38fd1498Szrj /* Do output reloading for reload RL, which is for the insn described by
7955*38fd1498Szrj    CHAIN and has the number J.
7956*38fd1498Szrj    ??? At some point we need to support handling output reloads of
7957*38fd1498Szrj    JUMP_INSNs or insns that set cc0.  */
7958*38fd1498Szrj static void
do_output_reload(struct insn_chain * chain,struct reload * rl,int j)7959*38fd1498Szrj do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
7960*38fd1498Szrj {
7961*38fd1498Szrj   rtx note, old;
7962*38fd1498Szrj   rtx_insn *insn = chain->insn;
7963*38fd1498Szrj   /* If this is an output reload that stores something that is
7964*38fd1498Szrj      not loaded in this same reload, see if we can eliminate a previous
7965*38fd1498Szrj      store.  */
7966*38fd1498Szrj   rtx pseudo = rl->out_reg;
7967*38fd1498Szrj   rtx reg_rtx = rl->reg_rtx;
7968*38fd1498Szrj 
7969*38fd1498Szrj   if (rl->out && reg_rtx)
7970*38fd1498Szrj     {
7971*38fd1498Szrj       machine_mode mode;
7972*38fd1498Szrj 
7973*38fd1498Szrj       /* Determine the mode to reload in.
7974*38fd1498Szrj 	 See comments above (for input reloading).  */
7975*38fd1498Szrj       mode = GET_MODE (rl->out);
7976*38fd1498Szrj       if (mode == VOIDmode)
7977*38fd1498Szrj 	{
7978*38fd1498Szrj 	  /* VOIDmode should never happen for an output.  */
7979*38fd1498Szrj 	  if (asm_noperands (PATTERN (insn)) < 0)
7980*38fd1498Szrj 	    /* It's the compiler's fault.  */
7981*38fd1498Szrj 	    fatal_insn ("VOIDmode on an output", insn);
7982*38fd1498Szrj 	  error_for_asm (insn, "output operand is constant in %<asm%>");
7983*38fd1498Szrj 	  /* Prevent crash--use something we know is valid.  */
7984*38fd1498Szrj 	  mode = word_mode;
7985*38fd1498Szrj 	  rl->out = gen_rtx_REG (mode, REGNO (reg_rtx));
7986*38fd1498Szrj 	}
7987*38fd1498Szrj       if (GET_MODE (reg_rtx) != mode)
7988*38fd1498Szrj 	reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7989*38fd1498Szrj     }
7990*38fd1498Szrj   reload_reg_rtx_for_output[j] = reg_rtx;
7991*38fd1498Szrj 
7992*38fd1498Szrj   if (pseudo
7993*38fd1498Szrj       && optimize
7994*38fd1498Szrj       && REG_P (pseudo)
7995*38fd1498Szrj       && ! rtx_equal_p (rl->in_reg, pseudo)
7996*38fd1498Szrj       && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
7997*38fd1498Szrj       && reg_last_reload_reg[REGNO (pseudo)])
7998*38fd1498Szrj     {
7999*38fd1498Szrj       int pseudo_no = REGNO (pseudo);
8000*38fd1498Szrj       int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
8001*38fd1498Szrj 
8002*38fd1498Szrj       /* We don't need to test full validity of last_regno for
8003*38fd1498Szrj 	 inherit here; we only want to know if the store actually
8004*38fd1498Szrj 	 matches the pseudo.  */
8005*38fd1498Szrj       if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
8006*38fd1498Szrj 	  && reg_reloaded_contents[last_regno] == pseudo_no
8007*38fd1498Szrj 	  && spill_reg_store[last_regno]
8008*38fd1498Szrj 	  && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
8009*38fd1498Szrj 	delete_output_reload (insn, j, last_regno, reg_rtx);
8010*38fd1498Szrj     }
8011*38fd1498Szrj 
8012*38fd1498Szrj   old = rl->out_reg;
8013*38fd1498Szrj   if (old == 0
8014*38fd1498Szrj       || reg_rtx == 0
8015*38fd1498Szrj       || rtx_equal_p (old, reg_rtx))
8016*38fd1498Szrj     return;
8017*38fd1498Szrj 
8018*38fd1498Szrj   /* An output operand that dies right away does need a reload,
8019*38fd1498Szrj      but need not be copied from it.  Show the new location in the
8020*38fd1498Szrj      REG_UNUSED note.  */
8021*38fd1498Szrj   if ((REG_P (old) || GET_CODE (old) == SCRATCH)
8022*38fd1498Szrj       && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
8023*38fd1498Szrj     {
8024*38fd1498Szrj       XEXP (note, 0) = reg_rtx;
8025*38fd1498Szrj       return;
8026*38fd1498Szrj     }
8027*38fd1498Szrj   /* Likewise for a SUBREG of an operand that dies.  */
8028*38fd1498Szrj   else if (GET_CODE (old) == SUBREG
8029*38fd1498Szrj 	   && REG_P (SUBREG_REG (old))
8030*38fd1498Szrj 	   && (note = find_reg_note (insn, REG_UNUSED,
8031*38fd1498Szrj 				     SUBREG_REG (old))) != 0)
8032*38fd1498Szrj     {
8033*38fd1498Szrj       XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx);
8034*38fd1498Szrj       return;
8035*38fd1498Szrj     }
8036*38fd1498Szrj   else if (GET_CODE (old) == SCRATCH)
8037*38fd1498Szrj     /* If we aren't optimizing, there won't be a REG_UNUSED note,
8038*38fd1498Szrj        but we don't want to make an output reload.  */
8039*38fd1498Szrj     return;
8040*38fd1498Szrj 
8041*38fd1498Szrj   /* If is a JUMP_INSN, we can't support output reloads yet.  */
8042*38fd1498Szrj   gcc_assert (NONJUMP_INSN_P (insn));
8043*38fd1498Szrj 
8044*38fd1498Szrj   emit_output_reload_insns (chain, rld + j, j);
8045*38fd1498Szrj }
8046*38fd1498Szrj 
8047*38fd1498Szrj /* A reload copies values of MODE from register SRC to register DEST.
8048*38fd1498Szrj    Return true if it can be treated for inheritance purposes like a
8049*38fd1498Szrj    group of reloads, each one reloading a single hard register.  The
8050*38fd1498Szrj    caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
8051*38fd1498Szrj    occupy the same number of hard registers.  */
8052*38fd1498Szrj 
8053*38fd1498Szrj static bool
inherit_piecemeal_p(int dest ATTRIBUTE_UNUSED,int src ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED)8054*38fd1498Szrj inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
8055*38fd1498Szrj 		     int src ATTRIBUTE_UNUSED,
8056*38fd1498Szrj 		     machine_mode mode ATTRIBUTE_UNUSED)
8057*38fd1498Szrj {
8058*38fd1498Szrj   return (REG_CAN_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
8059*38fd1498Szrj 	  && REG_CAN_CHANGE_MODE_P (src, mode, reg_raw_mode[src]));
8060*38fd1498Szrj }
8061*38fd1498Szrj 
8062*38fd1498Szrj /* Output insns to reload values in and out of the chosen reload regs.  */
8063*38fd1498Szrj 
8064*38fd1498Szrj static void
emit_reload_insns(struct insn_chain * chain)8065*38fd1498Szrj emit_reload_insns (struct insn_chain *chain)
8066*38fd1498Szrj {
8067*38fd1498Szrj   rtx_insn *insn = chain->insn;
8068*38fd1498Szrj 
8069*38fd1498Szrj   int j;
8070*38fd1498Szrj 
8071*38fd1498Szrj   CLEAR_HARD_REG_SET (reg_reloaded_died);
8072*38fd1498Szrj 
8073*38fd1498Szrj   for (j = 0; j < reload_n_operands; j++)
8074*38fd1498Szrj     input_reload_insns[j] = input_address_reload_insns[j]
8075*38fd1498Szrj       = inpaddr_address_reload_insns[j]
8076*38fd1498Szrj       = output_reload_insns[j] = output_address_reload_insns[j]
8077*38fd1498Szrj       = outaddr_address_reload_insns[j]
8078*38fd1498Szrj       = other_output_reload_insns[j] = 0;
8079*38fd1498Szrj   other_input_address_reload_insns = 0;
8080*38fd1498Szrj   other_input_reload_insns = 0;
8081*38fd1498Szrj   operand_reload_insns = 0;
8082*38fd1498Szrj   other_operand_reload_insns = 0;
8083*38fd1498Szrj 
8084*38fd1498Szrj   /* Dump reloads into the dump file.  */
8085*38fd1498Szrj   if (dump_file)
8086*38fd1498Szrj     {
8087*38fd1498Szrj       fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
8088*38fd1498Szrj       debug_reload_to_stream (dump_file);
8089*38fd1498Szrj     }
8090*38fd1498Szrj 
8091*38fd1498Szrj   for (j = 0; j < n_reloads; j++)
8092*38fd1498Szrj     if (rld[j].reg_rtx && HARD_REGISTER_P (rld[j].reg_rtx))
8093*38fd1498Szrj       {
8094*38fd1498Szrj 	unsigned int i;
8095*38fd1498Szrj 
8096*38fd1498Szrj 	for (i = REGNO (rld[j].reg_rtx); i < END_REGNO (rld[j].reg_rtx); i++)
8097*38fd1498Szrj 	  new_spill_reg_store[i] = 0;
8098*38fd1498Szrj       }
8099*38fd1498Szrj 
8100*38fd1498Szrj   /* Now output the instructions to copy the data into and out of the
8101*38fd1498Szrj      reload registers.  Do these in the order that the reloads were reported,
8102*38fd1498Szrj      since reloads of base and index registers precede reloads of operands
8103*38fd1498Szrj      and the operands may need the base and index registers reloaded.  */
8104*38fd1498Szrj 
8105*38fd1498Szrj   for (j = 0; j < n_reloads; j++)
8106*38fd1498Szrj     {
8107*38fd1498Szrj       do_input_reload (chain, rld + j, j);
8108*38fd1498Szrj       do_output_reload (chain, rld + j, j);
8109*38fd1498Szrj     }
8110*38fd1498Szrj 
8111*38fd1498Szrj   /* Now write all the insns we made for reloads in the order expected by
8112*38fd1498Szrj      the allocation functions.  Prior to the insn being reloaded, we write
8113*38fd1498Szrj      the following reloads:
8114*38fd1498Szrj 
8115*38fd1498Szrj      RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
8116*38fd1498Szrj 
8117*38fd1498Szrj      RELOAD_OTHER reloads.
8118*38fd1498Szrj 
8119*38fd1498Szrj      For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
8120*38fd1498Szrj      by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
8121*38fd1498Szrj      RELOAD_FOR_INPUT reload for the operand.
8122*38fd1498Szrj 
8123*38fd1498Szrj      RELOAD_FOR_OPADDR_ADDRS reloads.
8124*38fd1498Szrj 
8125*38fd1498Szrj      RELOAD_FOR_OPERAND_ADDRESS reloads.
8126*38fd1498Szrj 
8127*38fd1498Szrj      After the insn being reloaded, we write the following:
8128*38fd1498Szrj 
8129*38fd1498Szrj      For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
8130*38fd1498Szrj      by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
8131*38fd1498Szrj      RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
8132*38fd1498Szrj      reloads for the operand.  The RELOAD_OTHER output reloads are
8133*38fd1498Szrj      output in descending order by reload number.  */
8134*38fd1498Szrj 
8135*38fd1498Szrj   emit_insn_before (other_input_address_reload_insns, insn);
8136*38fd1498Szrj   emit_insn_before (other_input_reload_insns, insn);
8137*38fd1498Szrj 
8138*38fd1498Szrj   for (j = 0; j < reload_n_operands; j++)
8139*38fd1498Szrj     {
8140*38fd1498Szrj       emit_insn_before (inpaddr_address_reload_insns[j], insn);
8141*38fd1498Szrj       emit_insn_before (input_address_reload_insns[j], insn);
8142*38fd1498Szrj       emit_insn_before (input_reload_insns[j], insn);
8143*38fd1498Szrj     }
8144*38fd1498Szrj 
8145*38fd1498Szrj   emit_insn_before (other_operand_reload_insns, insn);
8146*38fd1498Szrj   emit_insn_before (operand_reload_insns, insn);
8147*38fd1498Szrj 
8148*38fd1498Szrj   for (j = 0; j < reload_n_operands; j++)
8149*38fd1498Szrj     {
8150*38fd1498Szrj       rtx_insn *x = emit_insn_after (outaddr_address_reload_insns[j], insn);
8151*38fd1498Szrj       x = emit_insn_after (output_address_reload_insns[j], x);
8152*38fd1498Szrj       x = emit_insn_after (output_reload_insns[j], x);
8153*38fd1498Szrj       emit_insn_after (other_output_reload_insns[j], x);
8154*38fd1498Szrj     }
8155*38fd1498Szrj 
8156*38fd1498Szrj   /* For all the spill regs newly reloaded in this instruction,
8157*38fd1498Szrj      record what they were reloaded from, so subsequent instructions
8158*38fd1498Szrj      can inherit the reloads.
8159*38fd1498Szrj 
8160*38fd1498Szrj      Update spill_reg_store for the reloads of this insn.
8161*38fd1498Szrj      Copy the elements that were updated in the loop above.  */
8162*38fd1498Szrj 
8163*38fd1498Szrj   for (j = 0; j < n_reloads; j++)
8164*38fd1498Szrj     {
8165*38fd1498Szrj       int r = reload_order[j];
8166*38fd1498Szrj       int i = reload_spill_index[r];
8167*38fd1498Szrj 
8168*38fd1498Szrj       /* If this is a non-inherited input reload from a pseudo, we must
8169*38fd1498Szrj 	 clear any memory of a previous store to the same pseudo.  Only do
8170*38fd1498Szrj 	 something if there will not be an output reload for the pseudo
8171*38fd1498Szrj 	 being reloaded.  */
8172*38fd1498Szrj       if (rld[r].in_reg != 0
8173*38fd1498Szrj 	  && ! (reload_inherited[r] || reload_override_in[r]))
8174*38fd1498Szrj 	{
8175*38fd1498Szrj 	  rtx reg = rld[r].in_reg;
8176*38fd1498Szrj 
8177*38fd1498Szrj 	  if (GET_CODE (reg) == SUBREG)
8178*38fd1498Szrj 	    reg = SUBREG_REG (reg);
8179*38fd1498Szrj 
8180*38fd1498Szrj 	  if (REG_P (reg)
8181*38fd1498Szrj 	      && REGNO (reg) >= FIRST_PSEUDO_REGISTER
8182*38fd1498Szrj 	      && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
8183*38fd1498Szrj 	    {
8184*38fd1498Szrj 	      int nregno = REGNO (reg);
8185*38fd1498Szrj 
8186*38fd1498Szrj 	      if (reg_last_reload_reg[nregno])
8187*38fd1498Szrj 		{
8188*38fd1498Szrj 		  int last_regno = REGNO (reg_last_reload_reg[nregno]);
8189*38fd1498Szrj 
8190*38fd1498Szrj 		  if (reg_reloaded_contents[last_regno] == nregno)
8191*38fd1498Szrj 		    spill_reg_store[last_regno] = 0;
8192*38fd1498Szrj 		}
8193*38fd1498Szrj 	    }
8194*38fd1498Szrj 	}
8195*38fd1498Szrj 
8196*38fd1498Szrj       /* I is nonneg if this reload used a register.
8197*38fd1498Szrj 	 If rld[r].reg_rtx is 0, this is an optional reload
8198*38fd1498Szrj 	 that we opted to ignore.  */
8199*38fd1498Szrj 
8200*38fd1498Szrj       if (i >= 0 && rld[r].reg_rtx != 0)
8201*38fd1498Szrj 	{
8202*38fd1498Szrj 	  int nr = hard_regno_nregs (i, GET_MODE (rld[r].reg_rtx));
8203*38fd1498Szrj 	  int k;
8204*38fd1498Szrj 
8205*38fd1498Szrj 	  /* For a multi register reload, we need to check if all or part
8206*38fd1498Szrj 	     of the value lives to the end.  */
8207*38fd1498Szrj 	  for (k = 0; k < nr; k++)
8208*38fd1498Szrj 	    if (reload_reg_reaches_end_p (i + k, r))
8209*38fd1498Szrj 	      CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
8210*38fd1498Szrj 
8211*38fd1498Szrj 	  /* Maybe the spill reg contains a copy of reload_out.  */
8212*38fd1498Szrj 	  if (rld[r].out != 0
8213*38fd1498Szrj 	      && (REG_P (rld[r].out)
8214*38fd1498Szrj 		  || (rld[r].out_reg
8215*38fd1498Szrj 		      ? REG_P (rld[r].out_reg)
8216*38fd1498Szrj 		      /* The reload value is an auto-modification of
8217*38fd1498Szrj 			 some kind.  For PRE_INC, POST_INC, PRE_DEC
8218*38fd1498Szrj 			 and POST_DEC, we record an equivalence
8219*38fd1498Szrj 			 between the reload register and the operand
8220*38fd1498Szrj 			 on the optimistic assumption that we can make
8221*38fd1498Szrj 			 the equivalence hold.  reload_as_needed must
8222*38fd1498Szrj 			 then either make it hold or invalidate the
8223*38fd1498Szrj 			 equivalence.
8224*38fd1498Szrj 
8225*38fd1498Szrj 			 PRE_MODIFY and POST_MODIFY addresses are reloaded
8226*38fd1498Szrj 			 somewhat differently, and allowing them here leads
8227*38fd1498Szrj 			 to problems.  */
8228*38fd1498Szrj 		      : (GET_CODE (rld[r].out) != POST_MODIFY
8229*38fd1498Szrj 			 && GET_CODE (rld[r].out) != PRE_MODIFY))))
8230*38fd1498Szrj 	    {
8231*38fd1498Szrj 	      rtx reg;
8232*38fd1498Szrj 
8233*38fd1498Szrj 	      reg = reload_reg_rtx_for_output[r];
8234*38fd1498Szrj 	      if (reload_reg_rtx_reaches_end_p (reg, r))
8235*38fd1498Szrj 		{
8236*38fd1498Szrj 		  machine_mode mode = GET_MODE (reg);
8237*38fd1498Szrj 		  int regno = REGNO (reg);
8238*38fd1498Szrj 		  int nregs = REG_NREGS (reg);
8239*38fd1498Szrj 		  rtx out = (REG_P (rld[r].out)
8240*38fd1498Szrj 			     ? rld[r].out
8241*38fd1498Szrj 			     : rld[r].out_reg
8242*38fd1498Szrj 			     ? rld[r].out_reg
8243*38fd1498Szrj /* AUTO_INC */		     : XEXP (rld[r].in_reg, 0));
8244*38fd1498Szrj 		  int out_regno = REGNO (out);
8245*38fd1498Szrj 		  int out_nregs = (!HARD_REGISTER_NUM_P (out_regno) ? 1
8246*38fd1498Szrj 				   : hard_regno_nregs (out_regno, mode));
8247*38fd1498Szrj 		  bool piecemeal;
8248*38fd1498Szrj 
8249*38fd1498Szrj 		  spill_reg_store[regno] = new_spill_reg_store[regno];
8250*38fd1498Szrj 		  spill_reg_stored_to[regno] = out;
8251*38fd1498Szrj 		  reg_last_reload_reg[out_regno] = reg;
8252*38fd1498Szrj 
8253*38fd1498Szrj 		  piecemeal = (HARD_REGISTER_NUM_P (out_regno)
8254*38fd1498Szrj 			       && nregs == out_nregs
8255*38fd1498Szrj 			       && inherit_piecemeal_p (out_regno, regno, mode));
8256*38fd1498Szrj 
8257*38fd1498Szrj 		  /* If OUT_REGNO is a hard register, it may occupy more than
8258*38fd1498Szrj 		     one register.  If it does, say what is in the
8259*38fd1498Szrj 		     rest of the registers assuming that both registers
8260*38fd1498Szrj 		     agree on how many words the object takes.  If not,
8261*38fd1498Szrj 		     invalidate the subsequent registers.  */
8262*38fd1498Szrj 
8263*38fd1498Szrj 		  if (HARD_REGISTER_NUM_P (out_regno))
8264*38fd1498Szrj 		    for (k = 1; k < out_nregs; k++)
8265*38fd1498Szrj 		      reg_last_reload_reg[out_regno + k]
8266*38fd1498Szrj 			= (piecemeal ? regno_reg_rtx[regno + k] : 0);
8267*38fd1498Szrj 
8268*38fd1498Szrj 		  /* Now do the inverse operation.  */
8269*38fd1498Szrj 		  for (k = 0; k < nregs; k++)
8270*38fd1498Szrj 		    {
8271*38fd1498Szrj 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8272*38fd1498Szrj 		      reg_reloaded_contents[regno + k]
8273*38fd1498Szrj 			= (!HARD_REGISTER_NUM_P (out_regno) || !piecemeal
8274*38fd1498Szrj 			   ? out_regno
8275*38fd1498Szrj 			   : out_regno + k);
8276*38fd1498Szrj 		      reg_reloaded_insn[regno + k] = insn;
8277*38fd1498Szrj 		      SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8278*38fd1498Szrj 		      if (targetm.hard_regno_call_part_clobbered (regno + k,
8279*38fd1498Szrj 								  mode))
8280*38fd1498Szrj 			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8281*38fd1498Szrj 					  regno + k);
8282*38fd1498Szrj 		      else
8283*38fd1498Szrj 			CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8284*38fd1498Szrj 					    regno + k);
8285*38fd1498Szrj 		    }
8286*38fd1498Szrj 		}
8287*38fd1498Szrj 	    }
8288*38fd1498Szrj 	  /* Maybe the spill reg contains a copy of reload_in.  Only do
8289*38fd1498Szrj 	     something if there will not be an output reload for
8290*38fd1498Szrj 	     the register being reloaded.  */
8291*38fd1498Szrj 	  else if (rld[r].out_reg == 0
8292*38fd1498Szrj 		   && rld[r].in != 0
8293*38fd1498Szrj 		   && ((REG_P (rld[r].in)
8294*38fd1498Szrj 			&& !HARD_REGISTER_P (rld[r].in)
8295*38fd1498Szrj 			&& !REGNO_REG_SET_P (&reg_has_output_reload,
8296*38fd1498Szrj 					     REGNO (rld[r].in)))
8297*38fd1498Szrj 		       || (REG_P (rld[r].in_reg)
8298*38fd1498Szrj 			   && !REGNO_REG_SET_P (&reg_has_output_reload,
8299*38fd1498Szrj 						REGNO (rld[r].in_reg))))
8300*38fd1498Szrj 		   && !reg_set_p (reload_reg_rtx_for_input[r], PATTERN (insn)))
8301*38fd1498Szrj 	    {
8302*38fd1498Szrj 	      rtx reg;
8303*38fd1498Szrj 
8304*38fd1498Szrj 	      reg = reload_reg_rtx_for_input[r];
8305*38fd1498Szrj 	      if (reload_reg_rtx_reaches_end_p (reg, r))
8306*38fd1498Szrj 		{
8307*38fd1498Szrj 		  machine_mode mode;
8308*38fd1498Szrj 		  int regno;
8309*38fd1498Szrj 		  int nregs;
8310*38fd1498Szrj 		  int in_regno;
8311*38fd1498Szrj 		  int in_nregs;
8312*38fd1498Szrj 		  rtx in;
8313*38fd1498Szrj 		  bool piecemeal;
8314*38fd1498Szrj 
8315*38fd1498Szrj 		  mode = GET_MODE (reg);
8316*38fd1498Szrj 		  regno = REGNO (reg);
8317*38fd1498Szrj 		  nregs = REG_NREGS (reg);
8318*38fd1498Szrj 		  if (REG_P (rld[r].in)
8319*38fd1498Szrj 		      && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
8320*38fd1498Szrj 		    in = rld[r].in;
8321*38fd1498Szrj 		  else if (REG_P (rld[r].in_reg))
8322*38fd1498Szrj 		    in = rld[r].in_reg;
8323*38fd1498Szrj 		  else
8324*38fd1498Szrj 		    in = XEXP (rld[r].in_reg, 0);
8325*38fd1498Szrj 		  in_regno = REGNO (in);
8326*38fd1498Szrj 
8327*38fd1498Szrj 		  in_nregs = (!HARD_REGISTER_NUM_P (in_regno) ? 1
8328*38fd1498Szrj 			      : hard_regno_nregs (in_regno, mode));
8329*38fd1498Szrj 
8330*38fd1498Szrj 		  reg_last_reload_reg[in_regno] = reg;
8331*38fd1498Szrj 
8332*38fd1498Szrj 		  piecemeal = (HARD_REGISTER_NUM_P (in_regno)
8333*38fd1498Szrj 			       && nregs == in_nregs
8334*38fd1498Szrj 			       && inherit_piecemeal_p (regno, in_regno, mode));
8335*38fd1498Szrj 
8336*38fd1498Szrj 		  if (HARD_REGISTER_NUM_P (in_regno))
8337*38fd1498Szrj 		    for (k = 1; k < in_nregs; k++)
8338*38fd1498Szrj 		      reg_last_reload_reg[in_regno + k]
8339*38fd1498Szrj 			= (piecemeal ? regno_reg_rtx[regno + k] : 0);
8340*38fd1498Szrj 
8341*38fd1498Szrj 		  /* Unless we inherited this reload, show we haven't
8342*38fd1498Szrj 		     recently done a store.
8343*38fd1498Szrj 		     Previous stores of inherited auto_inc expressions
8344*38fd1498Szrj 		     also have to be discarded.  */
8345*38fd1498Szrj 		  if (! reload_inherited[r]
8346*38fd1498Szrj 		      || (rld[r].out && ! rld[r].out_reg))
8347*38fd1498Szrj 		    spill_reg_store[regno] = 0;
8348*38fd1498Szrj 
8349*38fd1498Szrj 		  for (k = 0; k < nregs; k++)
8350*38fd1498Szrj 		    {
8351*38fd1498Szrj 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8352*38fd1498Szrj 		      reg_reloaded_contents[regno + k]
8353*38fd1498Szrj 			= (!HARD_REGISTER_NUM_P (in_regno) || !piecemeal
8354*38fd1498Szrj 			   ? in_regno
8355*38fd1498Szrj 			   : in_regno + k);
8356*38fd1498Szrj 		      reg_reloaded_insn[regno + k] = insn;
8357*38fd1498Szrj 		      SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8358*38fd1498Szrj 		      if (targetm.hard_regno_call_part_clobbered (regno + k,
8359*38fd1498Szrj 								  mode))
8360*38fd1498Szrj 			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8361*38fd1498Szrj 					  regno + k);
8362*38fd1498Szrj 		      else
8363*38fd1498Szrj 			CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8364*38fd1498Szrj 					    regno + k);
8365*38fd1498Szrj 		    }
8366*38fd1498Szrj 		}
8367*38fd1498Szrj 	    }
8368*38fd1498Szrj 	}
8369*38fd1498Szrj 
8370*38fd1498Szrj       /* The following if-statement was #if 0'd in 1.34 (or before...).
8371*38fd1498Szrj 	 It's reenabled in 1.35 because supposedly nothing else
8372*38fd1498Szrj 	 deals with this problem.  */
8373*38fd1498Szrj 
8374*38fd1498Szrj       /* If a register gets output-reloaded from a non-spill register,
8375*38fd1498Szrj 	 that invalidates any previous reloaded copy of it.
8376*38fd1498Szrj 	 But forget_old_reloads_1 won't get to see it, because
8377*38fd1498Szrj 	 it thinks only about the original insn.  So invalidate it here.
8378*38fd1498Szrj 	 Also do the same thing for RELOAD_OTHER constraints where the
8379*38fd1498Szrj 	 output is discarded.  */
8380*38fd1498Szrj       if (i < 0
8381*38fd1498Szrj 	  && ((rld[r].out != 0
8382*38fd1498Szrj 	       && (REG_P (rld[r].out)
8383*38fd1498Szrj 		   || (MEM_P (rld[r].out)
8384*38fd1498Szrj 		       && REG_P (rld[r].out_reg))))
8385*38fd1498Szrj 	      || (rld[r].out == 0 && rld[r].out_reg
8386*38fd1498Szrj 		  && REG_P (rld[r].out_reg))))
8387*38fd1498Szrj 	{
8388*38fd1498Szrj 	  rtx out = ((rld[r].out && REG_P (rld[r].out))
8389*38fd1498Szrj 		     ? rld[r].out : rld[r].out_reg);
8390*38fd1498Szrj 	  int out_regno = REGNO (out);
8391*38fd1498Szrj 	  machine_mode mode = GET_MODE (out);
8392*38fd1498Szrj 
8393*38fd1498Szrj 	  /* REG_RTX is now set or clobbered by the main instruction.
8394*38fd1498Szrj 	     As the comment above explains, forget_old_reloads_1 only
8395*38fd1498Szrj 	     sees the original instruction, and there is no guarantee
8396*38fd1498Szrj 	     that the original instruction also clobbered REG_RTX.
8397*38fd1498Szrj 	     For example, if find_reloads sees that the input side of
8398*38fd1498Szrj 	     a matched operand pair dies in this instruction, it may
8399*38fd1498Szrj 	     use the input register as the reload register.
8400*38fd1498Szrj 
8401*38fd1498Szrj 	     Calling forget_old_reloads_1 is a waste of effort if
8402*38fd1498Szrj 	     REG_RTX is also the output register.
8403*38fd1498Szrj 
8404*38fd1498Szrj 	     If we know that REG_RTX holds the value of a pseudo
8405*38fd1498Szrj 	     register, the code after the call will record that fact.  */
8406*38fd1498Szrj 	  if (rld[r].reg_rtx && rld[r].reg_rtx != out)
8407*38fd1498Szrj 	    forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
8408*38fd1498Szrj 
8409*38fd1498Szrj 	  if (!HARD_REGISTER_NUM_P (out_regno))
8410*38fd1498Szrj 	    {
8411*38fd1498Szrj 	      rtx src_reg;
8412*38fd1498Szrj 	      rtx_insn *store_insn = NULL;
8413*38fd1498Szrj 
8414*38fd1498Szrj 	      reg_last_reload_reg[out_regno] = 0;
8415*38fd1498Szrj 
8416*38fd1498Szrj 	      /* If we can find a hard register that is stored, record
8417*38fd1498Szrj 		 the storing insn so that we may delete this insn with
8418*38fd1498Szrj 		 delete_output_reload.  */
8419*38fd1498Szrj 	      src_reg = reload_reg_rtx_for_output[r];
8420*38fd1498Szrj 
8421*38fd1498Szrj 	      if (src_reg)
8422*38fd1498Szrj 		{
8423*38fd1498Szrj 		  if (reload_reg_rtx_reaches_end_p (src_reg, r))
8424*38fd1498Szrj 		    store_insn = new_spill_reg_store[REGNO (src_reg)];
8425*38fd1498Szrj 		  else
8426*38fd1498Szrj 		    src_reg = NULL_RTX;
8427*38fd1498Szrj 		}
8428*38fd1498Szrj 	      else
8429*38fd1498Szrj 		{
8430*38fd1498Szrj 		  /* If this is an optional reload, try to find the
8431*38fd1498Szrj 		     source reg from an input reload.  */
8432*38fd1498Szrj 		  rtx set = single_set (insn);
8433*38fd1498Szrj 		  if (set && SET_DEST (set) == rld[r].out)
8434*38fd1498Szrj 		    {
8435*38fd1498Szrj 		      int k;
8436*38fd1498Szrj 
8437*38fd1498Szrj 		      src_reg = SET_SRC (set);
8438*38fd1498Szrj 		      store_insn = insn;
8439*38fd1498Szrj 		      for (k = 0; k < n_reloads; k++)
8440*38fd1498Szrj 			{
8441*38fd1498Szrj 			  if (rld[k].in == src_reg)
8442*38fd1498Szrj 			    {
8443*38fd1498Szrj 			      src_reg = reload_reg_rtx_for_input[k];
8444*38fd1498Szrj 			      break;
8445*38fd1498Szrj 			    }
8446*38fd1498Szrj 			}
8447*38fd1498Szrj 		    }
8448*38fd1498Szrj 		}
8449*38fd1498Szrj 	      if (src_reg && REG_P (src_reg)
8450*38fd1498Szrj 		  && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
8451*38fd1498Szrj 		{
8452*38fd1498Szrj 		  int src_regno, src_nregs, k;
8453*38fd1498Szrj 		  rtx note;
8454*38fd1498Szrj 
8455*38fd1498Szrj 		  gcc_assert (GET_MODE (src_reg) == mode);
8456*38fd1498Szrj 		  src_regno = REGNO (src_reg);
8457*38fd1498Szrj 		  src_nregs = hard_regno_nregs (src_regno, mode);
8458*38fd1498Szrj 		  /* The place where to find a death note varies with
8459*38fd1498Szrj 		     PRESERVE_DEATH_INFO_REGNO_P .  The condition is not
8460*38fd1498Szrj 		     necessarily checked exactly in the code that moves
8461*38fd1498Szrj 		     notes, so just check both locations.  */
8462*38fd1498Szrj 		  note = find_regno_note (insn, REG_DEAD, src_regno);
8463*38fd1498Szrj 		  if (! note && store_insn)
8464*38fd1498Szrj 		    note = find_regno_note (store_insn, REG_DEAD, src_regno);
8465*38fd1498Szrj 		  for (k = 0; k < src_nregs; k++)
8466*38fd1498Szrj 		    {
8467*38fd1498Szrj 		      spill_reg_store[src_regno + k] = store_insn;
8468*38fd1498Szrj 		      spill_reg_stored_to[src_regno + k] = out;
8469*38fd1498Szrj 		      reg_reloaded_contents[src_regno + k] = out_regno;
8470*38fd1498Szrj 		      reg_reloaded_insn[src_regno + k] = store_insn;
8471*38fd1498Szrj 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8472*38fd1498Szrj 		      SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8473*38fd1498Szrj 		      if (targetm.hard_regno_call_part_clobbered
8474*38fd1498Szrj 			  (src_regno + k, mode))
8475*38fd1498Szrj 			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8476*38fd1498Szrj 					  src_regno + k);
8477*38fd1498Szrj 		      else
8478*38fd1498Szrj 			CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8479*38fd1498Szrj 					    src_regno + k);
8480*38fd1498Szrj 		      SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8481*38fd1498Szrj 		      if (note)
8482*38fd1498Szrj 			SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8483*38fd1498Szrj 		      else
8484*38fd1498Szrj 			CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8485*38fd1498Szrj 		    }
8486*38fd1498Szrj 		  reg_last_reload_reg[out_regno] = src_reg;
8487*38fd1498Szrj 		  /* We have to set reg_has_output_reload here, or else
8488*38fd1498Szrj 		     forget_old_reloads_1 will clear reg_last_reload_reg
8489*38fd1498Szrj 		     right away.  */
8490*38fd1498Szrj 		  SET_REGNO_REG_SET (&reg_has_output_reload,
8491*38fd1498Szrj 				     out_regno);
8492*38fd1498Szrj 		}
8493*38fd1498Szrj 	    }
8494*38fd1498Szrj 	  else
8495*38fd1498Szrj 	    {
8496*38fd1498Szrj 	      int k, out_nregs = hard_regno_nregs (out_regno, mode);
8497*38fd1498Szrj 
8498*38fd1498Szrj 	      for (k = 0; k < out_nregs; k++)
8499*38fd1498Szrj 		reg_last_reload_reg[out_regno + k] = 0;
8500*38fd1498Szrj 	    }
8501*38fd1498Szrj 	}
8502*38fd1498Szrj     }
8503*38fd1498Szrj   IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
8504*38fd1498Szrj }
8505*38fd1498Szrj 
8506*38fd1498Szrj /* Go through the motions to emit INSN and test if it is strictly valid.
8507*38fd1498Szrj    Return the emitted insn if valid, else return NULL.  */
8508*38fd1498Szrj 
8509*38fd1498Szrj static rtx_insn *
emit_insn_if_valid_for_reload(rtx pat)8510*38fd1498Szrj emit_insn_if_valid_for_reload (rtx pat)
8511*38fd1498Szrj {
8512*38fd1498Szrj   rtx_insn *last = get_last_insn ();
8513*38fd1498Szrj   int code;
8514*38fd1498Szrj 
8515*38fd1498Szrj   rtx_insn *insn = emit_insn (pat);
8516*38fd1498Szrj   code = recog_memoized (insn);
8517*38fd1498Szrj 
8518*38fd1498Szrj   if (code >= 0)
8519*38fd1498Szrj     {
8520*38fd1498Szrj       extract_insn (insn);
8521*38fd1498Szrj       /* We want constrain operands to treat this insn strictly in its
8522*38fd1498Szrj 	 validity determination, i.e., the way it would after reload has
8523*38fd1498Szrj 	 completed.  */
8524*38fd1498Szrj       if (constrain_operands (1, get_enabled_alternatives (insn)))
8525*38fd1498Szrj 	return insn;
8526*38fd1498Szrj     }
8527*38fd1498Szrj 
8528*38fd1498Szrj   delete_insns_since (last);
8529*38fd1498Szrj   return NULL;
8530*38fd1498Szrj }
8531*38fd1498Szrj 
8532*38fd1498Szrj /* Emit code to perform a reload from IN (which may be a reload register) to
8533*38fd1498Szrj    OUT (which may also be a reload register).  IN or OUT is from operand
8534*38fd1498Szrj    OPNUM with reload type TYPE.
8535*38fd1498Szrj 
8536*38fd1498Szrj    Returns first insn emitted.  */
8537*38fd1498Szrj 
8538*38fd1498Szrj static rtx_insn *
gen_reload(rtx out,rtx in,int opnum,enum reload_type type)8539*38fd1498Szrj gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
8540*38fd1498Szrj {
8541*38fd1498Szrj   rtx_insn *last = get_last_insn ();
8542*38fd1498Szrj   rtx_insn *tem;
8543*38fd1498Szrj   rtx tem1, tem2;
8544*38fd1498Szrj 
8545*38fd1498Szrj   /* If IN is a paradoxical SUBREG, remove it and try to put the
8546*38fd1498Szrj      opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
8547*38fd1498Szrj   if (!strip_paradoxical_subreg (&in, &out))
8548*38fd1498Szrj     strip_paradoxical_subreg (&out, &in);
8549*38fd1498Szrj 
8550*38fd1498Szrj   /* How to do this reload can get quite tricky.  Normally, we are being
8551*38fd1498Szrj      asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8552*38fd1498Szrj      register that didn't get a hard register.  In that case we can just
8553*38fd1498Szrj      call emit_move_insn.
8554*38fd1498Szrj 
8555*38fd1498Szrj      We can also be asked to reload a PLUS that adds a register or a MEM to
8556*38fd1498Szrj      another register, constant or MEM.  This can occur during frame pointer
8557*38fd1498Szrj      elimination and while reloading addresses.  This case is handled by
8558*38fd1498Szrj      trying to emit a single insn to perform the add.  If it is not valid,
8559*38fd1498Szrj      we use a two insn sequence.
8560*38fd1498Szrj 
8561*38fd1498Szrj      Or we can be asked to reload an unary operand that was a fragment of
8562*38fd1498Szrj      an addressing mode, into a register.  If it isn't recognized as-is,
8563*38fd1498Szrj      we try making the unop operand and the reload-register the same:
8564*38fd1498Szrj      (set reg:X (unop:X expr:Y))
8565*38fd1498Szrj      -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8566*38fd1498Szrj 
8567*38fd1498Szrj      Finally, we could be called to handle an 'o' constraint by putting
8568*38fd1498Szrj      an address into a register.  In that case, we first try to do this
8569*38fd1498Szrj      with a named pattern of "reload_load_address".  If no such pattern
8570*38fd1498Szrj      exists, we just emit a SET insn and hope for the best (it will normally
8571*38fd1498Szrj      be valid on machines that use 'o').
8572*38fd1498Szrj 
8573*38fd1498Szrj      This entire process is made complex because reload will never
8574*38fd1498Szrj      process the insns we generate here and so we must ensure that
8575*38fd1498Szrj      they will fit their constraints and also by the fact that parts of
8576*38fd1498Szrj      IN might be being reloaded separately and replaced with spill registers.
8577*38fd1498Szrj      Because of this, we are, in some sense, just guessing the right approach
8578*38fd1498Szrj      here.  The one listed above seems to work.
8579*38fd1498Szrj 
8580*38fd1498Szrj      ??? At some point, this whole thing needs to be rethought.  */
8581*38fd1498Szrj 
8582*38fd1498Szrj   if (GET_CODE (in) == PLUS
8583*38fd1498Szrj       && (REG_P (XEXP (in, 0))
8584*38fd1498Szrj 	  || GET_CODE (XEXP (in, 0)) == SUBREG
8585*38fd1498Szrj 	  || MEM_P (XEXP (in, 0)))
8586*38fd1498Szrj       && (REG_P (XEXP (in, 1))
8587*38fd1498Szrj 	  || GET_CODE (XEXP (in, 1)) == SUBREG
8588*38fd1498Szrj 	  || CONSTANT_P (XEXP (in, 1))
8589*38fd1498Szrj 	  || MEM_P (XEXP (in, 1))))
8590*38fd1498Szrj     {
8591*38fd1498Szrj       /* We need to compute the sum of a register or a MEM and another
8592*38fd1498Szrj 	 register, constant, or MEM, and put it into the reload
8593*38fd1498Szrj 	 register.  The best possible way of doing this is if the machine
8594*38fd1498Szrj 	 has a three-operand ADD insn that accepts the required operands.
8595*38fd1498Szrj 
8596*38fd1498Szrj 	 The simplest approach is to try to generate such an insn and see if it
8597*38fd1498Szrj 	 is recognized and matches its constraints.  If so, it can be used.
8598*38fd1498Szrj 
8599*38fd1498Szrj 	 It might be better not to actually emit the insn unless it is valid,
8600*38fd1498Szrj 	 but we need to pass the insn as an operand to `recog' and
8601*38fd1498Szrj 	 `extract_insn' and it is simpler to emit and then delete the insn if
8602*38fd1498Szrj 	 not valid than to dummy things up.  */
8603*38fd1498Szrj 
8604*38fd1498Szrj       rtx op0, op1, tem;
8605*38fd1498Szrj       rtx_insn *insn;
8606*38fd1498Szrj       enum insn_code code;
8607*38fd1498Szrj 
8608*38fd1498Szrj       op0 = find_replacement (&XEXP (in, 0));
8609*38fd1498Szrj       op1 = find_replacement (&XEXP (in, 1));
8610*38fd1498Szrj 
8611*38fd1498Szrj       /* Since constraint checking is strict, commutativity won't be
8612*38fd1498Szrj 	 checked, so we need to do that here to avoid spurious failure
8613*38fd1498Szrj 	 if the add instruction is two-address and the second operand
8614*38fd1498Szrj 	 of the add is the same as the reload reg, which is frequently
8615*38fd1498Szrj 	 the case.  If the insn would be A = B + A, rearrange it so
8616*38fd1498Szrj 	 it will be A = A + B as constrain_operands expects.  */
8617*38fd1498Szrj 
8618*38fd1498Szrj       if (REG_P (XEXP (in, 1))
8619*38fd1498Szrj 	  && REGNO (out) == REGNO (XEXP (in, 1)))
8620*38fd1498Szrj 	tem = op0, op0 = op1, op1 = tem;
8621*38fd1498Szrj 
8622*38fd1498Szrj       if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8623*38fd1498Szrj 	in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8624*38fd1498Szrj 
8625*38fd1498Szrj       insn = emit_insn_if_valid_for_reload (gen_rtx_SET (out, in));
8626*38fd1498Szrj       if (insn)
8627*38fd1498Szrj 	return insn;
8628*38fd1498Szrj 
8629*38fd1498Szrj       /* If that failed, we must use a conservative two-insn sequence.
8630*38fd1498Szrj 
8631*38fd1498Szrj 	 Use a move to copy one operand into the reload register.  Prefer
8632*38fd1498Szrj 	 to reload a constant, MEM or pseudo since the move patterns can
8633*38fd1498Szrj 	 handle an arbitrary operand.  If OP1 is not a constant, MEM or
8634*38fd1498Szrj 	 pseudo and OP1 is not a valid operand for an add instruction, then
8635*38fd1498Szrj 	 reload OP1.
8636*38fd1498Szrj 
8637*38fd1498Szrj 	 After reloading one of the operands into the reload register, add
8638*38fd1498Szrj 	 the reload register to the output register.
8639*38fd1498Szrj 
8640*38fd1498Szrj 	 If there is another way to do this for a specific machine, a
8641*38fd1498Szrj 	 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8642*38fd1498Szrj 	 we emit below.  */
8643*38fd1498Szrj 
8644*38fd1498Szrj       code = optab_handler (add_optab, GET_MODE (out));
8645*38fd1498Szrj 
8646*38fd1498Szrj       if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8647*38fd1498Szrj 	  || (REG_P (op1)
8648*38fd1498Szrj 	      && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8649*38fd1498Szrj 	  || (code != CODE_FOR_nothing
8650*38fd1498Szrj 	      && !insn_operand_matches (code, 2, op1)))
8651*38fd1498Szrj 	tem = op0, op0 = op1, op1 = tem;
8652*38fd1498Szrj 
8653*38fd1498Szrj       gen_reload (out, op0, opnum, type);
8654*38fd1498Szrj 
8655*38fd1498Szrj       /* If OP0 and OP1 are the same, we can use OUT for OP1.
8656*38fd1498Szrj 	 This fixes a problem on the 32K where the stack pointer cannot
8657*38fd1498Szrj 	 be used as an operand of an add insn.  */
8658*38fd1498Szrj 
8659*38fd1498Szrj       if (rtx_equal_p (op0, op1))
8660*38fd1498Szrj 	op1 = out;
8661*38fd1498Szrj 
8662*38fd1498Szrj       insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8663*38fd1498Szrj       if (insn)
8664*38fd1498Szrj 	{
8665*38fd1498Szrj 	  /* Add a REG_EQUIV note so that find_equiv_reg can find it.  */
8666*38fd1498Szrj 	  set_dst_reg_note (insn, REG_EQUIV, in, out);
8667*38fd1498Szrj 	  return insn;
8668*38fd1498Szrj 	}
8669*38fd1498Szrj 
8670*38fd1498Szrj       /* If that failed, copy the address register to the reload register.
8671*38fd1498Szrj 	 Then add the constant to the reload register.  */
8672*38fd1498Szrj 
8673*38fd1498Szrj       gcc_assert (!reg_overlap_mentioned_p (out, op0));
8674*38fd1498Szrj       gen_reload (out, op1, opnum, type);
8675*38fd1498Szrj       insn = emit_insn (gen_add2_insn (out, op0));
8676*38fd1498Szrj       set_dst_reg_note (insn, REG_EQUIV, in, out);
8677*38fd1498Szrj     }
8678*38fd1498Szrj 
8679*38fd1498Szrj   /* If we need a memory location to do the move, do it that way.  */
8680*38fd1498Szrj   else if ((tem1 = replaced_subreg (in), tem2 = replaced_subreg (out),
8681*38fd1498Szrj 	    (REG_P (tem1) && REG_P (tem2)))
8682*38fd1498Szrj 	   && REGNO (tem1) < FIRST_PSEUDO_REGISTER
8683*38fd1498Szrj 	   && REGNO (tem2) < FIRST_PSEUDO_REGISTER
8684*38fd1498Szrj 	   && targetm.secondary_memory_needed (GET_MODE (out),
8685*38fd1498Szrj 					       REGNO_REG_CLASS (REGNO (tem1)),
8686*38fd1498Szrj 					       REGNO_REG_CLASS (REGNO (tem2))))
8687*38fd1498Szrj     {
8688*38fd1498Szrj       /* Get the memory to use and rewrite both registers to its mode.  */
8689*38fd1498Szrj       rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8690*38fd1498Szrj 
8691*38fd1498Szrj       if (GET_MODE (loc) != GET_MODE (out))
8692*38fd1498Szrj 	out = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (out));
8693*38fd1498Szrj 
8694*38fd1498Szrj       if (GET_MODE (loc) != GET_MODE (in))
8695*38fd1498Szrj 	in = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (in));
8696*38fd1498Szrj 
8697*38fd1498Szrj       gen_reload (loc, in, opnum, type);
8698*38fd1498Szrj       gen_reload (out, loc, opnum, type);
8699*38fd1498Szrj     }
8700*38fd1498Szrj   else if (REG_P (out) && UNARY_P (in))
8701*38fd1498Szrj     {
8702*38fd1498Szrj       rtx op1;
8703*38fd1498Szrj       rtx out_moded;
8704*38fd1498Szrj       rtx_insn *set;
8705*38fd1498Szrj 
8706*38fd1498Szrj       op1 = find_replacement (&XEXP (in, 0));
8707*38fd1498Szrj       if (op1 != XEXP (in, 0))
8708*38fd1498Szrj 	in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8709*38fd1498Szrj 
8710*38fd1498Szrj       /* First, try a plain SET.  */
8711*38fd1498Szrj       set = emit_insn_if_valid_for_reload (gen_rtx_SET (out, in));
8712*38fd1498Szrj       if (set)
8713*38fd1498Szrj 	return set;
8714*38fd1498Szrj 
8715*38fd1498Szrj       /* If that failed, move the inner operand to the reload
8716*38fd1498Szrj 	 register, and try the same unop with the inner expression
8717*38fd1498Szrj 	 replaced with the reload register.  */
8718*38fd1498Szrj 
8719*38fd1498Szrj       if (GET_MODE (op1) != GET_MODE (out))
8720*38fd1498Szrj 	out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8721*38fd1498Szrj       else
8722*38fd1498Szrj 	out_moded = out;
8723*38fd1498Szrj 
8724*38fd1498Szrj       gen_reload (out_moded, op1, opnum, type);
8725*38fd1498Szrj 
8726*38fd1498Szrj       rtx temp = gen_rtx_SET (out, gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8727*38fd1498Szrj 						  out_moded));
8728*38fd1498Szrj       rtx_insn *insn = emit_insn_if_valid_for_reload (temp);
8729*38fd1498Szrj       if (insn)
8730*38fd1498Szrj 	{
8731*38fd1498Szrj 	  set_unique_reg_note (insn, REG_EQUIV, in);
8732*38fd1498Szrj 	  return insn;
8733*38fd1498Szrj 	}
8734*38fd1498Szrj 
8735*38fd1498Szrj       fatal_insn ("failure trying to reload:", set);
8736*38fd1498Szrj     }
8737*38fd1498Szrj   /* If IN is a simple operand, use gen_move_insn.  */
8738*38fd1498Szrj   else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8739*38fd1498Szrj     {
8740*38fd1498Szrj       tem = emit_insn (gen_move_insn (out, in));
8741*38fd1498Szrj       /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note.  */
8742*38fd1498Szrj       mark_jump_label (in, tem, 0);
8743*38fd1498Szrj     }
8744*38fd1498Szrj 
8745*38fd1498Szrj   else if (targetm.have_reload_load_address ())
8746*38fd1498Szrj     emit_insn (targetm.gen_reload_load_address (out, in));
8747*38fd1498Szrj 
8748*38fd1498Szrj   /* Otherwise, just write (set OUT IN) and hope for the best.  */
8749*38fd1498Szrj   else
8750*38fd1498Szrj     emit_insn (gen_rtx_SET (out, in));
8751*38fd1498Szrj 
8752*38fd1498Szrj   /* Return the first insn emitted.
8753*38fd1498Szrj      We can not just return get_last_insn, because there may have
8754*38fd1498Szrj      been multiple instructions emitted.  Also note that gen_move_insn may
8755*38fd1498Szrj      emit more than one insn itself, so we can not assume that there is one
8756*38fd1498Szrj      insn emitted per emit_insn_before call.  */
8757*38fd1498Szrj 
8758*38fd1498Szrj   return last ? NEXT_INSN (last) : get_insns ();
8759*38fd1498Szrj }
8760*38fd1498Szrj 
8761*38fd1498Szrj /* Delete a previously made output-reload whose result we now believe
8762*38fd1498Szrj    is not needed.  First we double-check.
8763*38fd1498Szrj 
8764*38fd1498Szrj    INSN is the insn now being processed.
8765*38fd1498Szrj    LAST_RELOAD_REG is the hard register number for which we want to delete
8766*38fd1498Szrj    the last output reload.
8767*38fd1498Szrj    J is the reload-number that originally used REG.  The caller has made
8768*38fd1498Szrj    certain that reload J doesn't use REG any longer for input.
8769*38fd1498Szrj    NEW_RELOAD_REG is reload register that reload J is using for REG.  */
8770*38fd1498Szrj 
8771*38fd1498Szrj static void
delete_output_reload(rtx_insn * insn,int j,int last_reload_reg,rtx new_reload_reg)8772*38fd1498Szrj delete_output_reload (rtx_insn *insn, int j, int last_reload_reg,
8773*38fd1498Szrj 		      rtx new_reload_reg)
8774*38fd1498Szrj {
8775*38fd1498Szrj   rtx_insn *output_reload_insn = spill_reg_store[last_reload_reg];
8776*38fd1498Szrj   rtx reg = spill_reg_stored_to[last_reload_reg];
8777*38fd1498Szrj   int k;
8778*38fd1498Szrj   int n_occurrences;
8779*38fd1498Szrj   int n_inherited = 0;
8780*38fd1498Szrj   rtx substed;
8781*38fd1498Szrj   unsigned regno;
8782*38fd1498Szrj   int nregs;
8783*38fd1498Szrj 
8784*38fd1498Szrj   /* It is possible that this reload has been only used to set another reload
8785*38fd1498Szrj      we eliminated earlier and thus deleted this instruction too.  */
8786*38fd1498Szrj   if (output_reload_insn->deleted ())
8787*38fd1498Szrj     return;
8788*38fd1498Szrj 
8789*38fd1498Szrj   /* Get the raw pseudo-register referred to.  */
8790*38fd1498Szrj 
8791*38fd1498Szrj   while (GET_CODE (reg) == SUBREG)
8792*38fd1498Szrj     reg = SUBREG_REG (reg);
8793*38fd1498Szrj   substed = reg_equiv_memory_loc (REGNO (reg));
8794*38fd1498Szrj 
8795*38fd1498Szrj   /* This is unsafe if the operand occurs more often in the current
8796*38fd1498Szrj      insn than it is inherited.  */
8797*38fd1498Szrj   for (k = n_reloads - 1; k >= 0; k--)
8798*38fd1498Szrj     {
8799*38fd1498Szrj       rtx reg2 = rld[k].in;
8800*38fd1498Szrj       if (! reg2)
8801*38fd1498Szrj 	continue;
8802*38fd1498Szrj       if (MEM_P (reg2) || reload_override_in[k])
8803*38fd1498Szrj 	reg2 = rld[k].in_reg;
8804*38fd1498Szrj 
8805*38fd1498Szrj       if (AUTO_INC_DEC && rld[k].out && ! rld[k].out_reg)
8806*38fd1498Szrj 	reg2 = XEXP (rld[k].in_reg, 0);
8807*38fd1498Szrj 
8808*38fd1498Szrj       while (GET_CODE (reg2) == SUBREG)
8809*38fd1498Szrj 	reg2 = SUBREG_REG (reg2);
8810*38fd1498Szrj       if (rtx_equal_p (reg2, reg))
8811*38fd1498Szrj 	{
8812*38fd1498Szrj 	  if (reload_inherited[k] || reload_override_in[k] || k == j)
8813*38fd1498Szrj 	    n_inherited++;
8814*38fd1498Szrj 	  else
8815*38fd1498Szrj 	    return;
8816*38fd1498Szrj 	}
8817*38fd1498Szrj     }
8818*38fd1498Szrj   n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8819*38fd1498Szrj   if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8820*38fd1498Szrj     n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8821*38fd1498Szrj 					reg, 0);
8822*38fd1498Szrj   if (substed)
8823*38fd1498Szrj     n_occurrences += count_occurrences (PATTERN (insn),
8824*38fd1498Szrj 					eliminate_regs (substed, VOIDmode,
8825*38fd1498Szrj 							NULL_RTX), 0);
8826*38fd1498Szrj   for (rtx i1 = reg_equiv_alt_mem_list (REGNO (reg)); i1; i1 = XEXP (i1, 1))
8827*38fd1498Szrj     {
8828*38fd1498Szrj       gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8829*38fd1498Szrj       n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8830*38fd1498Szrj     }
8831*38fd1498Szrj   if (n_occurrences > n_inherited)
8832*38fd1498Szrj     return;
8833*38fd1498Szrj 
8834*38fd1498Szrj   regno = REGNO (reg);
8835*38fd1498Szrj   nregs = REG_NREGS (reg);
8836*38fd1498Szrj 
8837*38fd1498Szrj   /* If the pseudo-reg we are reloading is no longer referenced
8838*38fd1498Szrj      anywhere between the store into it and here,
8839*38fd1498Szrj      and we're within the same basic block, then the value can only
8840*38fd1498Szrj      pass through the reload reg and end up here.
8841*38fd1498Szrj      Otherwise, give up--return.  */
8842*38fd1498Szrj   for (rtx_insn *i1 = NEXT_INSN (output_reload_insn);
8843*38fd1498Szrj        i1 != insn; i1 = NEXT_INSN (i1))
8844*38fd1498Szrj     {
8845*38fd1498Szrj       if (NOTE_INSN_BASIC_BLOCK_P (i1))
8846*38fd1498Szrj 	return;
8847*38fd1498Szrj       if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8848*38fd1498Szrj 	  && refers_to_regno_p (regno, regno + nregs, PATTERN (i1), NULL))
8849*38fd1498Szrj 	{
8850*38fd1498Szrj 	  /* If this is USE in front of INSN, we only have to check that
8851*38fd1498Szrj 	     there are no more references than accounted for by inheritance.  */
8852*38fd1498Szrj 	  while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8853*38fd1498Szrj 	    {
8854*38fd1498Szrj 	      n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8855*38fd1498Szrj 	      i1 = NEXT_INSN (i1);
8856*38fd1498Szrj 	    }
8857*38fd1498Szrj 	  if (n_occurrences <= n_inherited && i1 == insn)
8858*38fd1498Szrj 	    break;
8859*38fd1498Szrj 	  return;
8860*38fd1498Szrj 	}
8861*38fd1498Szrj     }
8862*38fd1498Szrj 
8863*38fd1498Szrj   /* We will be deleting the insn.  Remove the spill reg information.  */
8864*38fd1498Szrj   for (k = hard_regno_nregs (last_reload_reg, GET_MODE (reg)); k-- > 0; )
8865*38fd1498Szrj     {
8866*38fd1498Szrj       spill_reg_store[last_reload_reg + k] = 0;
8867*38fd1498Szrj       spill_reg_stored_to[last_reload_reg + k] = 0;
8868*38fd1498Szrj     }
8869*38fd1498Szrj 
8870*38fd1498Szrj   /* The caller has already checked that REG dies or is set in INSN.
8871*38fd1498Szrj      It has also checked that we are optimizing, and thus some
8872*38fd1498Szrj      inaccuracies in the debugging information are acceptable.
8873*38fd1498Szrj      So we could just delete output_reload_insn.  But in some cases
8874*38fd1498Szrj      we can improve the debugging information without sacrificing
8875*38fd1498Szrj      optimization - maybe even improving the code: See if the pseudo
8876*38fd1498Szrj      reg has been completely replaced with reload regs.  If so, delete
8877*38fd1498Szrj      the store insn and forget we had a stack slot for the pseudo.  */
8878*38fd1498Szrj   if (rld[j].out != rld[j].in
8879*38fd1498Szrj       && REG_N_DEATHS (REGNO (reg)) == 1
8880*38fd1498Szrj       && REG_N_SETS (REGNO (reg)) == 1
8881*38fd1498Szrj       && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8882*38fd1498Szrj       && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8883*38fd1498Szrj     {
8884*38fd1498Szrj       rtx_insn *i2;
8885*38fd1498Szrj 
8886*38fd1498Szrj       /* We know that it was used only between here and the beginning of
8887*38fd1498Szrj 	 the current basic block.  (We also know that the last use before
8888*38fd1498Szrj 	 INSN was the output reload we are thinking of deleting, but never
8889*38fd1498Szrj 	 mind that.)  Search that range; see if any ref remains.  */
8890*38fd1498Szrj       for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8891*38fd1498Szrj 	{
8892*38fd1498Szrj 	  rtx set = single_set (i2);
8893*38fd1498Szrj 
8894*38fd1498Szrj 	  /* Uses which just store in the pseudo don't count,
8895*38fd1498Szrj 	     since if they are the only uses, they are dead.  */
8896*38fd1498Szrj 	  if (set != 0 && SET_DEST (set) == reg)
8897*38fd1498Szrj 	    continue;
8898*38fd1498Szrj 	  if (LABEL_P (i2) || JUMP_P (i2))
8899*38fd1498Szrj 	    break;
8900*38fd1498Szrj 	  if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8901*38fd1498Szrj 	      && reg_mentioned_p (reg, PATTERN (i2)))
8902*38fd1498Szrj 	    {
8903*38fd1498Szrj 	      /* Some other ref remains; just delete the output reload we
8904*38fd1498Szrj 		 know to be dead.  */
8905*38fd1498Szrj 	      delete_address_reloads (output_reload_insn, insn);
8906*38fd1498Szrj 	      delete_insn (output_reload_insn);
8907*38fd1498Szrj 	      return;
8908*38fd1498Szrj 	    }
8909*38fd1498Szrj 	}
8910*38fd1498Szrj 
8911*38fd1498Szrj       /* Delete the now-dead stores into this pseudo.  Note that this
8912*38fd1498Szrj 	 loop also takes care of deleting output_reload_insn.  */
8913*38fd1498Szrj       for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8914*38fd1498Szrj 	{
8915*38fd1498Szrj 	  rtx set = single_set (i2);
8916*38fd1498Szrj 
8917*38fd1498Szrj 	  if (set != 0 && SET_DEST (set) == reg)
8918*38fd1498Szrj 	    {
8919*38fd1498Szrj 	      delete_address_reloads (i2, insn);
8920*38fd1498Szrj 	      delete_insn (i2);
8921*38fd1498Szrj 	    }
8922*38fd1498Szrj 	  if (LABEL_P (i2) || JUMP_P (i2))
8923*38fd1498Szrj 	    break;
8924*38fd1498Szrj 	}
8925*38fd1498Szrj 
8926*38fd1498Szrj       /* For the debugging info, say the pseudo lives in this reload reg.  */
8927*38fd1498Szrj       reg_renumber[REGNO (reg)] = REGNO (new_reload_reg);
8928*38fd1498Szrj       if (ira_conflicts_p)
8929*38fd1498Szrj 	/* Inform IRA about the change.  */
8930*38fd1498Szrj 	ira_mark_allocation_change (REGNO (reg));
8931*38fd1498Szrj       alter_reg (REGNO (reg), -1, false);
8932*38fd1498Szrj     }
8933*38fd1498Szrj   else
8934*38fd1498Szrj     {
8935*38fd1498Szrj       delete_address_reloads (output_reload_insn, insn);
8936*38fd1498Szrj       delete_insn (output_reload_insn);
8937*38fd1498Szrj     }
8938*38fd1498Szrj }
8939*38fd1498Szrj 
8940*38fd1498Szrj /* We are going to delete DEAD_INSN.  Recursively delete loads of
8941*38fd1498Szrj    reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8942*38fd1498Szrj    CURRENT_INSN is being reloaded, so we have to check its reloads too.  */
8943*38fd1498Szrj static void
delete_address_reloads(rtx_insn * dead_insn,rtx_insn * current_insn)8944*38fd1498Szrj delete_address_reloads (rtx_insn *dead_insn, rtx_insn *current_insn)
8945*38fd1498Szrj {
8946*38fd1498Szrj   rtx set = single_set (dead_insn);
8947*38fd1498Szrj   rtx set2, dst;
8948*38fd1498Szrj   rtx_insn *prev, *next;
8949*38fd1498Szrj   if (set)
8950*38fd1498Szrj     {
8951*38fd1498Szrj       rtx dst = SET_DEST (set);
8952*38fd1498Szrj       if (MEM_P (dst))
8953*38fd1498Szrj 	delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8954*38fd1498Szrj     }
8955*38fd1498Szrj   /* If we deleted the store from a reloaded post_{in,de}c expression,
8956*38fd1498Szrj      we can delete the matching adds.  */
8957*38fd1498Szrj   prev = PREV_INSN (dead_insn);
8958*38fd1498Szrj   next = NEXT_INSN (dead_insn);
8959*38fd1498Szrj   if (! prev || ! next)
8960*38fd1498Szrj     return;
8961*38fd1498Szrj   set = single_set (next);
8962*38fd1498Szrj   set2 = single_set (prev);
8963*38fd1498Szrj   if (! set || ! set2
8964*38fd1498Szrj       || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8965*38fd1498Szrj       || !CONST_INT_P (XEXP (SET_SRC (set), 1))
8966*38fd1498Szrj       || !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
8967*38fd1498Szrj     return;
8968*38fd1498Szrj   dst = SET_DEST (set);
8969*38fd1498Szrj   if (! rtx_equal_p (dst, SET_DEST (set2))
8970*38fd1498Szrj       || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8971*38fd1498Szrj       || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8972*38fd1498Szrj       || (INTVAL (XEXP (SET_SRC (set), 1))
8973*38fd1498Szrj 	  != -INTVAL (XEXP (SET_SRC (set2), 1))))
8974*38fd1498Szrj     return;
8975*38fd1498Szrj   delete_related_insns (prev);
8976*38fd1498Szrj   delete_related_insns (next);
8977*38fd1498Szrj }
8978*38fd1498Szrj 
8979*38fd1498Szrj /* Subfunction of delete_address_reloads: process registers found in X.  */
8980*38fd1498Szrj static void
delete_address_reloads_1(rtx_insn * dead_insn,rtx x,rtx_insn * current_insn)8981*38fd1498Szrj delete_address_reloads_1 (rtx_insn *dead_insn, rtx x, rtx_insn *current_insn)
8982*38fd1498Szrj {
8983*38fd1498Szrj   rtx_insn *prev, *i2;
8984*38fd1498Szrj   rtx set, dst;
8985*38fd1498Szrj   int i, j;
8986*38fd1498Szrj   enum rtx_code code = GET_CODE (x);
8987*38fd1498Szrj 
8988*38fd1498Szrj   if (code != REG)
8989*38fd1498Szrj     {
8990*38fd1498Szrj       const char *fmt = GET_RTX_FORMAT (code);
8991*38fd1498Szrj       for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8992*38fd1498Szrj 	{
8993*38fd1498Szrj 	  if (fmt[i] == 'e')
8994*38fd1498Szrj 	    delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
8995*38fd1498Szrj 	  else if (fmt[i] == 'E')
8996*38fd1498Szrj 	    {
8997*38fd1498Szrj 	      for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8998*38fd1498Szrj 		delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
8999*38fd1498Szrj 					  current_insn);
9000*38fd1498Szrj 	    }
9001*38fd1498Szrj 	}
9002*38fd1498Szrj       return;
9003*38fd1498Szrj     }
9004*38fd1498Szrj 
9005*38fd1498Szrj   if (spill_reg_order[REGNO (x)] < 0)
9006*38fd1498Szrj     return;
9007*38fd1498Szrj 
9008*38fd1498Szrj   /* Scan backwards for the insn that sets x.  This might be a way back due
9009*38fd1498Szrj      to inheritance.  */
9010*38fd1498Szrj   for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
9011*38fd1498Szrj     {
9012*38fd1498Szrj       code = GET_CODE (prev);
9013*38fd1498Szrj       if (code == CODE_LABEL || code == JUMP_INSN)
9014*38fd1498Szrj 	return;
9015*38fd1498Szrj       if (!INSN_P (prev))
9016*38fd1498Szrj 	continue;
9017*38fd1498Szrj       if (reg_set_p (x, PATTERN (prev)))
9018*38fd1498Szrj 	break;
9019*38fd1498Szrj       if (reg_referenced_p (x, PATTERN (prev)))
9020*38fd1498Szrj 	return;
9021*38fd1498Szrj     }
9022*38fd1498Szrj   if (! prev || INSN_UID (prev) < reload_first_uid)
9023*38fd1498Szrj     return;
9024*38fd1498Szrj   /* Check that PREV only sets the reload register.  */
9025*38fd1498Szrj   set = single_set (prev);
9026*38fd1498Szrj   if (! set)
9027*38fd1498Szrj     return;
9028*38fd1498Szrj   dst = SET_DEST (set);
9029*38fd1498Szrj   if (!REG_P (dst)
9030*38fd1498Szrj       || ! rtx_equal_p (dst, x))
9031*38fd1498Szrj     return;
9032*38fd1498Szrj   if (! reg_set_p (dst, PATTERN (dead_insn)))
9033*38fd1498Szrj     {
9034*38fd1498Szrj       /* Check if DST was used in a later insn -
9035*38fd1498Szrj 	 it might have been inherited.  */
9036*38fd1498Szrj       for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
9037*38fd1498Szrj 	{
9038*38fd1498Szrj 	  if (LABEL_P (i2))
9039*38fd1498Szrj 	    break;
9040*38fd1498Szrj 	  if (! INSN_P (i2))
9041*38fd1498Szrj 	    continue;
9042*38fd1498Szrj 	  if (reg_referenced_p (dst, PATTERN (i2)))
9043*38fd1498Szrj 	    {
9044*38fd1498Szrj 	      /* If there is a reference to the register in the current insn,
9045*38fd1498Szrj 		 it might be loaded in a non-inherited reload.  If no other
9046*38fd1498Szrj 		 reload uses it, that means the register is set before
9047*38fd1498Szrj 		 referenced.  */
9048*38fd1498Szrj 	      if (i2 == current_insn)
9049*38fd1498Szrj 		{
9050*38fd1498Szrj 		  for (j = n_reloads - 1; j >= 0; j--)
9051*38fd1498Szrj 		    if ((rld[j].reg_rtx == dst && reload_inherited[j])
9052*38fd1498Szrj 			|| reload_override_in[j] == dst)
9053*38fd1498Szrj 		      return;
9054*38fd1498Szrj 		  for (j = n_reloads - 1; j >= 0; j--)
9055*38fd1498Szrj 		    if (rld[j].in && rld[j].reg_rtx == dst)
9056*38fd1498Szrj 		      break;
9057*38fd1498Szrj 		  if (j >= 0)
9058*38fd1498Szrj 		    break;
9059*38fd1498Szrj 		}
9060*38fd1498Szrj 	      return;
9061*38fd1498Szrj 	    }
9062*38fd1498Szrj 	  if (JUMP_P (i2))
9063*38fd1498Szrj 	    break;
9064*38fd1498Szrj 	  /* If DST is still live at CURRENT_INSN, check if it is used for
9065*38fd1498Szrj 	     any reload.  Note that even if CURRENT_INSN sets DST, we still
9066*38fd1498Szrj 	     have to check the reloads.  */
9067*38fd1498Szrj 	  if (i2 == current_insn)
9068*38fd1498Szrj 	    {
9069*38fd1498Szrj 	      for (j = n_reloads - 1; j >= 0; j--)
9070*38fd1498Szrj 		if ((rld[j].reg_rtx == dst && reload_inherited[j])
9071*38fd1498Szrj 		    || reload_override_in[j] == dst)
9072*38fd1498Szrj 		  return;
9073*38fd1498Szrj 	      /* ??? We can't finish the loop here, because dst might be
9074*38fd1498Szrj 		 allocated to a pseudo in this block if no reload in this
9075*38fd1498Szrj 		 block needs any of the classes containing DST - see
9076*38fd1498Szrj 		 spill_hard_reg.  There is no easy way to tell this, so we
9077*38fd1498Szrj 		 have to scan till the end of the basic block.  */
9078*38fd1498Szrj 	    }
9079*38fd1498Szrj 	  if (reg_set_p (dst, PATTERN (i2)))
9080*38fd1498Szrj 	    break;
9081*38fd1498Szrj 	}
9082*38fd1498Szrj     }
9083*38fd1498Szrj   delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
9084*38fd1498Szrj   reg_reloaded_contents[REGNO (dst)] = -1;
9085*38fd1498Szrj   delete_insn (prev);
9086*38fd1498Szrj }
9087*38fd1498Szrj 
9088*38fd1498Szrj /* Output reload-insns to reload VALUE into RELOADREG.
9089*38fd1498Szrj    VALUE is an autoincrement or autodecrement RTX whose operand
9090*38fd1498Szrj    is a register or memory location;
9091*38fd1498Szrj    so reloading involves incrementing that location.
9092*38fd1498Szrj    IN is either identical to VALUE, or some cheaper place to reload from.
9093*38fd1498Szrj 
9094*38fd1498Szrj    INC_AMOUNT is the number to increment or decrement by (always positive).
9095*38fd1498Szrj    This cannot be deduced from VALUE.  */
9096*38fd1498Szrj 
9097*38fd1498Szrj static void
inc_for_reload(rtx reloadreg,rtx in,rtx value,poly_int64 inc_amount)9098*38fd1498Szrj inc_for_reload (rtx reloadreg, rtx in, rtx value, poly_int64 inc_amount)
9099*38fd1498Szrj {
9100*38fd1498Szrj   /* REG or MEM to be copied and incremented.  */
9101*38fd1498Szrj   rtx incloc = find_replacement (&XEXP (value, 0));
9102*38fd1498Szrj   /* Nonzero if increment after copying.  */
9103*38fd1498Szrj   int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
9104*38fd1498Szrj 	      || GET_CODE (value) == POST_MODIFY);
9105*38fd1498Szrj   rtx_insn *last;
9106*38fd1498Szrj   rtx inc;
9107*38fd1498Szrj   rtx_insn *add_insn;
9108*38fd1498Szrj   int code;
9109*38fd1498Szrj   rtx real_in = in == value ? incloc : in;
9110*38fd1498Szrj 
9111*38fd1498Szrj   /* No hard register is equivalent to this register after
9112*38fd1498Szrj      inc/dec operation.  If REG_LAST_RELOAD_REG were nonzero,
9113*38fd1498Szrj      we could inc/dec that register as well (maybe even using it for
9114*38fd1498Szrj      the source), but I'm not sure it's worth worrying about.  */
9115*38fd1498Szrj   if (REG_P (incloc))
9116*38fd1498Szrj     reg_last_reload_reg[REGNO (incloc)] = 0;
9117*38fd1498Szrj 
9118*38fd1498Szrj   if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
9119*38fd1498Szrj     {
9120*38fd1498Szrj       gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
9121*38fd1498Szrj       inc = find_replacement (&XEXP (XEXP (value, 1), 1));
9122*38fd1498Szrj     }
9123*38fd1498Szrj   else
9124*38fd1498Szrj     {
9125*38fd1498Szrj       if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
9126*38fd1498Szrj 	inc_amount = -inc_amount;
9127*38fd1498Szrj 
9128*38fd1498Szrj       inc = gen_int_mode (inc_amount, Pmode);
9129*38fd1498Szrj     }
9130*38fd1498Szrj 
9131*38fd1498Szrj   /* If this is post-increment, first copy the location to the reload reg.  */
9132*38fd1498Szrj   if (post && real_in != reloadreg)
9133*38fd1498Szrj     emit_insn (gen_move_insn (reloadreg, real_in));
9134*38fd1498Szrj 
9135*38fd1498Szrj   if (in == value)
9136*38fd1498Szrj     {
9137*38fd1498Szrj       /* See if we can directly increment INCLOC.  Use a method similar to
9138*38fd1498Szrj 	 that in gen_reload.  */
9139*38fd1498Szrj 
9140*38fd1498Szrj       last = get_last_insn ();
9141*38fd1498Szrj       add_insn = emit_insn (gen_rtx_SET (incloc,
9142*38fd1498Szrj 					 gen_rtx_PLUS (GET_MODE (incloc),
9143*38fd1498Szrj 						       incloc, inc)));
9144*38fd1498Szrj 
9145*38fd1498Szrj       code = recog_memoized (add_insn);
9146*38fd1498Szrj       if (code >= 0)
9147*38fd1498Szrj 	{
9148*38fd1498Szrj 	  extract_insn (add_insn);
9149*38fd1498Szrj 	  if (constrain_operands (1, get_enabled_alternatives (add_insn)))
9150*38fd1498Szrj 	    {
9151*38fd1498Szrj 	      /* If this is a pre-increment and we have incremented the value
9152*38fd1498Szrj 		 where it lives, copy the incremented value to RELOADREG to
9153*38fd1498Szrj 		 be used as an address.  */
9154*38fd1498Szrj 
9155*38fd1498Szrj 	      if (! post)
9156*38fd1498Szrj 		emit_insn (gen_move_insn (reloadreg, incloc));
9157*38fd1498Szrj 	      return;
9158*38fd1498Szrj 	    }
9159*38fd1498Szrj 	}
9160*38fd1498Szrj       delete_insns_since (last);
9161*38fd1498Szrj     }
9162*38fd1498Szrj 
9163*38fd1498Szrj   /* If couldn't do the increment directly, must increment in RELOADREG.
9164*38fd1498Szrj      The way we do this depends on whether this is pre- or post-increment.
9165*38fd1498Szrj      For pre-increment, copy INCLOC to the reload register, increment it
9166*38fd1498Szrj      there, then save back.  */
9167*38fd1498Szrj 
9168*38fd1498Szrj   if (! post)
9169*38fd1498Szrj     {
9170*38fd1498Szrj       if (in != reloadreg)
9171*38fd1498Szrj 	emit_insn (gen_move_insn (reloadreg, real_in));
9172*38fd1498Szrj       emit_insn (gen_add2_insn (reloadreg, inc));
9173*38fd1498Szrj       emit_insn (gen_move_insn (incloc, reloadreg));
9174*38fd1498Szrj     }
9175*38fd1498Szrj   else
9176*38fd1498Szrj     {
9177*38fd1498Szrj       /* Postincrement.
9178*38fd1498Szrj 	 Because this might be a jump insn or a compare, and because RELOADREG
9179*38fd1498Szrj 	 may not be available after the insn in an input reload, we must do
9180*38fd1498Szrj 	 the incrementation before the insn being reloaded for.
9181*38fd1498Szrj 
9182*38fd1498Szrj 	 We have already copied IN to RELOADREG.  Increment the copy in
9183*38fd1498Szrj 	 RELOADREG, save that back, then decrement RELOADREG so it has
9184*38fd1498Szrj 	 the original value.  */
9185*38fd1498Szrj 
9186*38fd1498Szrj       emit_insn (gen_add2_insn (reloadreg, inc));
9187*38fd1498Szrj       emit_insn (gen_move_insn (incloc, reloadreg));
9188*38fd1498Szrj       if (CONST_INT_P (inc))
9189*38fd1498Szrj 	emit_insn (gen_add2_insn (reloadreg,
9190*38fd1498Szrj 				  gen_int_mode (-INTVAL (inc),
9191*38fd1498Szrj 						GET_MODE (reloadreg))));
9192*38fd1498Szrj       else
9193*38fd1498Szrj 	emit_insn (gen_sub2_insn (reloadreg, inc));
9194*38fd1498Szrj     }
9195*38fd1498Szrj }
9196*38fd1498Szrj 
9197*38fd1498Szrj static void
add_auto_inc_notes(rtx_insn * insn,rtx x)9198*38fd1498Szrj add_auto_inc_notes (rtx_insn *insn, rtx x)
9199*38fd1498Szrj {
9200*38fd1498Szrj   enum rtx_code code = GET_CODE (x);
9201*38fd1498Szrj   const char *fmt;
9202*38fd1498Szrj   int i, j;
9203*38fd1498Szrj 
9204*38fd1498Szrj   if (code == MEM && auto_inc_p (XEXP (x, 0)))
9205*38fd1498Szrj     {
9206*38fd1498Szrj       add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
9207*38fd1498Szrj       return;
9208*38fd1498Szrj     }
9209*38fd1498Szrj 
9210*38fd1498Szrj   /* Scan all the operand sub-expressions.  */
9211*38fd1498Szrj   fmt = GET_RTX_FORMAT (code);
9212*38fd1498Szrj   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9213*38fd1498Szrj     {
9214*38fd1498Szrj       if (fmt[i] == 'e')
9215*38fd1498Szrj 	add_auto_inc_notes (insn, XEXP (x, i));
9216*38fd1498Szrj       else if (fmt[i] == 'E')
9217*38fd1498Szrj 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9218*38fd1498Szrj 	  add_auto_inc_notes (insn, XVECEXP (x, i, j));
9219*38fd1498Szrj     }
9220*38fd1498Szrj }
9221