xref: /openbsd-src/gnu/usr.bin/gcc/gcc/reload1.c (revision e37ff515a253919717bf5f41c0f640bfef960c50)
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2    Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3    1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA.  */
21 
22 #include "config.h"
23 #include "system.h"
24 
25 #include "machmode.h"
26 #include "hard-reg-set.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "obstack.h"
30 #include "insn-config.h"
31 #include "flags.h"
32 #include "function.h"
33 #include "expr.h"
34 #include "optabs.h"
35 #include "regs.h"
36 #include "basic-block.h"
37 #include "reload.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "cselib.h"
41 #include "real.h"
42 #include "toplev.h"
43 #include "except.h"
44 #include "tree.h"
45 #include "protector.h"
46 
47 /* This file contains the reload pass of the compiler, which is
48    run after register allocation has been done.  It checks that
49    each insn is valid (operands required to be in registers really
50    are in registers of the proper class) and fixes up invalid ones
51    by copying values temporarily into registers for the insns
52    that need them.
53 
54    The results of register allocation are described by the vector
55    reg_renumber; the insns still contain pseudo regs, but reg_renumber
56    can be used to find which hard reg, if any, a pseudo reg is in.
57 
58    The technique we always use is to free up a few hard regs that are
59    called ``reload regs'', and for each place where a pseudo reg
60    must be in a hard reg, copy it temporarily into one of the reload regs.
61 
62    Reload regs are allocated locally for every instruction that needs
63    reloads.  When there are pseudos which are allocated to a register that
64    has been chosen as a reload reg, such pseudos must be ``spilled''.
65    This means that they go to other hard regs, or to stack slots if no other
66    available hard regs can be found.  Spilling can invalidate more
67    insns, requiring additional need for reloads, so we must keep checking
68    until the process stabilizes.
69 
70    For machines with different classes of registers, we must keep track
71    of the register class needed for each reload, and make sure that
72    we allocate enough reload registers of each class.
73 
74    The file reload.c contains the code that checks one insn for
75    validity and reports the reloads that it needs.  This file
76    is in charge of scanning the entire rtl code, accumulating the
77    reload needs, spilling, assigning reload registers to use for
78    fixing up each insn, and generating the new insns to copy values
79    into the reload registers.  */
80 
81 #ifndef REGISTER_MOVE_COST
82 #define REGISTER_MOVE_COST(m, x, y) 2
83 #endif
84 
85 #ifndef LOCAL_REGNO
86 #define LOCAL_REGNO(REGNO)  0
87 #endif
88 
89 /* During reload_as_needed, element N contains a REG rtx for the hard reg
90    into which reg N has been reloaded (perhaps for a previous insn).  */
91 static rtx *reg_last_reload_reg;
92 
93 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
94    for an output reload that stores into reg N.  */
95 static char *reg_has_output_reload;
96 
97 /* Indicates which hard regs are reload-registers for an output reload
98    in the current insn.  */
99 static HARD_REG_SET reg_is_output_reload;
100 
101 /* Element N is the constant value to which pseudo reg N is equivalent,
102    or zero if pseudo reg N is not equivalent to a constant.
103    find_reloads looks at this in order to replace pseudo reg N
104    with the constant it stands for.  */
105 rtx *reg_equiv_constant;
106 
107 /* Element N is a memory location to which pseudo reg N is equivalent,
108    prior to any register elimination (such as frame pointer to stack
109    pointer).  Depending on whether or not it is a valid address, this value
110    is transferred to either reg_equiv_address or reg_equiv_mem.  */
111 rtx *reg_equiv_memory_loc;
112 
113 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
114    This is used when the address is not valid as a memory address
115    (because its displacement is too big for the machine.)  */
116 rtx *reg_equiv_address;
117 
118 /* Element N is the memory slot to which pseudo reg N is equivalent,
119    or zero if pseudo reg N is not equivalent to a memory slot.  */
120 rtx *reg_equiv_mem;
121 
122 /* Widest width in which each pseudo reg is referred to (via subreg).  */
123 static unsigned int *reg_max_ref_width;
124 
125 /* Element N is the list of insns that initialized reg N from its equivalent
126    constant or memory slot.  */
127 static rtx *reg_equiv_init;
128 
129 /* Vector to remember old contents of reg_renumber before spilling.  */
130 static short *reg_old_renumber;
131 
132 /* During reload_as_needed, element N contains the last pseudo regno reloaded
133    into hard register N.  If that pseudo reg occupied more than one register,
134    reg_reloaded_contents points to that pseudo for each spill register in
135    use; all of these must remain set for an inheritance to occur.  */
136 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
137 
138 /* During reload_as_needed, element N contains the insn for which
139    hard register N was last used.   Its contents are significant only
140    when reg_reloaded_valid is set for this register.  */
141 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
142 
143 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid.  */
144 static HARD_REG_SET reg_reloaded_valid;
145 /* Indicate if the register was dead at the end of the reload.
146    This is only valid if reg_reloaded_contents is set and valid.  */
147 static HARD_REG_SET reg_reloaded_dead;
148 
149 /* Number of spill-regs so far; number of valid elements of spill_regs.  */
150 static int n_spills;
151 
152 /* In parallel with spill_regs, contains REG rtx's for those regs.
153    Holds the last rtx used for any given reg, or 0 if it has never
154    been used for spilling yet.  This rtx is reused, provided it has
155    the proper mode.  */
156 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
157 
158 /* In parallel with spill_regs, contains nonzero for a spill reg
159    that was stored after the last time it was used.
160    The precise value is the insn generated to do the store.  */
161 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
162 
163 /* This is the register that was stored with spill_reg_store.  This is a
164    copy of reload_out / reload_out_reg when the value was stored; if
165    reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg.  */
166 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
167 
168 /* This table is the inverse mapping of spill_regs:
169    indexed by hard reg number,
170    it contains the position of that reg in spill_regs,
171    or -1 for something that is not in spill_regs.
172 
173    ?!?  This is no longer accurate.  */
174 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
175 
176 /* This reg set indicates registers that can't be used as spill registers for
177    the currently processed insn.  These are the hard registers which are live
178    during the insn, but not allocated to pseudos, as well as fixed
179    registers.  */
180 static HARD_REG_SET bad_spill_regs;
181 
182 /* These are the hard registers that can't be used as spill register for any
183    insn.  This includes registers used for user variables and registers that
184    we can't eliminate.  A register that appears in this set also can't be used
185    to retry register allocation.  */
186 static HARD_REG_SET bad_spill_regs_global;
187 
188 /* Describes order of use of registers for reloading
189    of spilled pseudo-registers.  `n_spills' is the number of
190    elements that are actually valid; new ones are added at the end.
191 
192    Both spill_regs and spill_reg_order are used on two occasions:
193    once during find_reload_regs, where they keep track of the spill registers
194    for a single insn, but also during reload_as_needed where they show all
195    the registers ever used by reload.  For the latter case, the information
196    is calculated during finish_spills.  */
197 static short spill_regs[FIRST_PSEUDO_REGISTER];
198 
199 /* This vector of reg sets indicates, for each pseudo, which hard registers
200    may not be used for retrying global allocation because the register was
201    formerly spilled from one of them.  If we allowed reallocating a pseudo to
202    a register that it was already allocated to, reload might not
203    terminate.  */
204 static HARD_REG_SET *pseudo_previous_regs;
205 
206 /* This vector of reg sets indicates, for each pseudo, which hard
207    registers may not be used for retrying global allocation because they
208    are used as spill registers during one of the insns in which the
209    pseudo is live.  */
210 static HARD_REG_SET *pseudo_forbidden_regs;
211 
212 /* All hard regs that have been used as spill registers for any insn are
213    marked in this set.  */
214 static HARD_REG_SET used_spill_regs;
215 
216 /* Index of last register assigned as a spill register.  We allocate in
217    a round-robin fashion.  */
218 static int last_spill_reg;
219 
220 /* Nonzero if indirect addressing is supported on the machine; this means
221    that spilling (REG n) does not require reloading it into a register in
222    order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))).  The
223    value indicates the level of indirect addressing supported, e.g., two
224    means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
225    a hard register.  */
226 static char spill_indirect_levels;
227 
228 /* Nonzero if indirect addressing is supported when the innermost MEM is
229    of the form (MEM (SYMBOL_REF sym)).  It is assumed that the level to
230    which these are valid is the same as spill_indirect_levels, above.  */
231 char indirect_symref_ok;
232 
233 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid.  */
234 char double_reg_address_ok;
235 
236 /* Record the stack slot for each spilled hard register.  */
237 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
238 
239 /* Width allocated so far for that stack slot.  */
240 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
241 
242 /* Record which pseudos needed to be spilled.  */
243 static regset_head spilled_pseudos;
244 
245 /* Used for communication between order_regs_for_reload and count_pseudo.
246    Used to avoid counting one pseudo twice.  */
247 static regset_head pseudos_counted;
248 
249 /* First uid used by insns created by reload in this function.
250    Used in find_equiv_reg.  */
251 int reload_first_uid;
252 
253 /* Flag set by local-alloc or global-alloc if anything is live in
254    a call-clobbered reg across calls.  */
255 int caller_save_needed;
256 
257 /* Set to 1 while reload_as_needed is operating.
258    Required by some machines to handle any generated moves differently.  */
259 int reload_in_progress = 0;
260 
261 /* These arrays record the insn_code of insns that may be needed to
262    perform input and output reloads of special objects.  They provide a
263    place to pass a scratch register.  */
264 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
265 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
266 
267 /* This obstack is used for allocation of rtl during register elimination.
268    The allocated storage can be freed once find_reloads has processed the
269    insn.  */
270 struct obstack reload_obstack;
271 
272 /* Points to the beginning of the reload_obstack.  All insn_chain structures
273    are allocated first.  */
274 char *reload_startobj;
275 
276 /* The point after all insn_chain structures.  Used to quickly deallocate
277    memory allocated in copy_reloads during calculate_needs_all_insns.  */
278 char *reload_firstobj;
279 
280 /* This points before all local rtl generated by register elimination.
281    Used to quickly free all memory after processing one insn.  */
282 static char *reload_insn_firstobj;
283 
284 /* List of insn_chain instructions, one for every insn that reload needs to
285    examine.  */
286 struct insn_chain *reload_insn_chain;
287 
288 #ifdef TREE_CODE
289 extern tree current_function_decl;
290 #else
291 extern union tree_node *current_function_decl;
292 #endif
293 
294 /* List of all insns needing reloads.  */
295 static struct insn_chain *insns_need_reload;
296 
297 /* This structure is used to record information about register eliminations.
298    Each array entry describes one possible way of eliminating a register
299    in favor of another.   If there is more than one way of eliminating a
300    particular register, the most preferred should be specified first.  */
301 
302 struct elim_table
303 {
304   int from;			/* Register number to be eliminated.  */
305   int to;			/* Register number used as replacement.  */
306   int initial_offset;		/* Initial difference between values.  */
307   int can_eliminate;		/* Non-zero if this elimination can be done.  */
308   int can_eliminate_previous;	/* Value of CAN_ELIMINATE in previous scan over
309 				   insns made by reload.  */
310   int offset;			/* Current offset between the two regs.  */
311   int previous_offset;		/* Offset at end of previous insn.  */
312   int ref_outside_mem;		/* "to" has been referenced outside a MEM.  */
313   rtx from_rtx;			/* REG rtx for the register to be eliminated.
314 				   We cannot simply compare the number since
315 				   we might then spuriously replace a hard
316 				   register corresponding to a pseudo
317 				   assigned to the reg to be eliminated.  */
318   rtx to_rtx;			/* REG rtx for the replacement.  */
319 };
320 
321 static struct elim_table *reg_eliminate = 0;
322 
323 /* This is an intermediate structure to initialize the table.  It has
324    exactly the members provided by ELIMINABLE_REGS.  */
325 static const struct elim_table_1
326 {
327   const int from;
328   const int to;
329 } reg_eliminate_1[] =
330 
331 /* If a set of eliminable registers was specified, define the table from it.
332    Otherwise, default to the normal case of the frame pointer being
333    replaced by the stack pointer.  */
334 
335 #ifdef ELIMINABLE_REGS
336   ELIMINABLE_REGS;
337 #else
338   {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
339 #endif
340 
341 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
342 
343 /* Record the number of pending eliminations that have an offset not equal
344    to their initial offset.  If nonzero, we use a new copy of each
345    replacement result in any insns encountered.  */
346 int num_not_at_initial_offset;
347 
348 /* Count the number of registers that we may be able to eliminate.  */
349 static int num_eliminable;
350 /* And the number of registers that are equivalent to a constant that
351    can be eliminated to frame_pointer / arg_pointer + constant.  */
352 static int num_eliminable_invariants;
353 
354 /* For each label, we record the offset of each elimination.  If we reach
355    a label by more than one path and an offset differs, we cannot do the
356    elimination.  This information is indexed by the difference of the
357    number of the label and the first label number.  We can't offset the
358    pointer itself as this can cause problems on machines with segmented
359    memory.  The first table is an array of flags that records whether we
360    have yet encountered a label and the second table is an array of arrays,
361    one entry in the latter array for each elimination.  */
362 
363 static int first_label_num;
364 static char *offsets_known_at;
365 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
366 
367 /* Number of labels in the current function.  */
368 
369 static int num_labels;
370 
371 static void replace_pseudos_in_call_usage	PARAMS ((rtx *,
372 							 enum machine_mode,
373 							 rtx));
374 static void maybe_fix_stack_asms	PARAMS ((void));
375 static void copy_reloads		PARAMS ((struct insn_chain *));
376 static void calculate_needs_all_insns	PARAMS ((int));
377 static int find_reg			PARAMS ((struct insn_chain *, int));
378 static void find_reload_regs		PARAMS ((struct insn_chain *));
379 static void select_reload_regs		PARAMS ((void));
380 static void delete_caller_save_insns	PARAMS ((void));
381 
382 static void spill_failure		PARAMS ((rtx, enum reg_class));
383 static void count_spilled_pseudo	PARAMS ((int, int, int));
384 static void delete_dead_insn		PARAMS ((rtx));
385 static void alter_reg			PARAMS ((int, int));
386 static void set_label_offsets		PARAMS ((rtx, rtx, int));
387 static void check_eliminable_occurrences	PARAMS ((rtx));
388 static void elimination_effects		PARAMS ((rtx, enum machine_mode));
389 static int eliminate_regs_in_insn	PARAMS ((rtx, int));
390 static void update_eliminable_offsets	PARAMS ((void));
391 static void mark_not_eliminable		PARAMS ((rtx, rtx, void *));
392 static void set_initial_elim_offsets	PARAMS ((void));
393 static void verify_initial_elim_offsets	PARAMS ((void));
394 static void set_initial_label_offsets	PARAMS ((void));
395 static void set_offsets_for_label	PARAMS ((rtx));
396 static void init_elim_table		PARAMS ((void));
397 static void update_eliminables		PARAMS ((HARD_REG_SET *));
398 static void spill_hard_reg		PARAMS ((unsigned int, int));
399 static int finish_spills		PARAMS ((int));
400 static void ior_hard_reg_set		PARAMS ((HARD_REG_SET *, HARD_REG_SET *));
401 static void scan_paradoxical_subregs	PARAMS ((rtx));
402 static void count_pseudo		PARAMS ((int));
403 static void order_regs_for_reload	PARAMS ((struct insn_chain *));
404 static void reload_as_needed		PARAMS ((int));
405 static void forget_old_reloads_1	PARAMS ((rtx, rtx, void *));
406 static int reload_reg_class_lower	PARAMS ((const PTR, const PTR));
407 static void mark_reload_reg_in_use	PARAMS ((unsigned int, int,
408 						 enum reload_type,
409 						 enum machine_mode));
410 static void clear_reload_reg_in_use	PARAMS ((unsigned int, int,
411 						 enum reload_type,
412 						 enum machine_mode));
413 static int reload_reg_free_p		PARAMS ((unsigned int, int,
414 						 enum reload_type));
415 static int reload_reg_free_for_value_p	PARAMS ((int, int, int,
416 						 enum reload_type,
417 						 rtx, rtx, int, int));
418 static int free_for_value_p		PARAMS ((int, enum machine_mode, int,
419 						 enum reload_type, rtx, rtx,
420 						 int, int));
421 static int reload_reg_reaches_end_p	PARAMS ((unsigned int, int,
422 						 enum reload_type));
423 static int allocate_reload_reg		PARAMS ((struct insn_chain *, int,
424 						 int));
425 static int conflicts_with_override	PARAMS ((rtx));
426 static void failed_reload		PARAMS ((rtx, int));
427 static int set_reload_reg		PARAMS ((int, int));
428 static void choose_reload_regs_init	PARAMS ((struct insn_chain *, rtx *));
429 static void choose_reload_regs		PARAMS ((struct insn_chain *));
430 static void merge_assigned_reloads	PARAMS ((rtx));
431 static void emit_input_reload_insns	PARAMS ((struct insn_chain *,
432 						 struct reload *, rtx, int));
433 static void emit_output_reload_insns	PARAMS ((struct insn_chain *,
434 						 struct reload *, int));
435 static void do_input_reload		PARAMS ((struct insn_chain *,
436 						 struct reload *, int));
437 static void do_output_reload		PARAMS ((struct insn_chain *,
438 						 struct reload *, int));
439 static void emit_reload_insns		PARAMS ((struct insn_chain *));
440 static void delete_output_reload	PARAMS ((rtx, int, int));
441 static void delete_address_reloads	PARAMS ((rtx, rtx));
442 static void delete_address_reloads_1	PARAMS ((rtx, rtx, rtx));
443 static rtx inc_for_reload		PARAMS ((rtx, rtx, rtx, int));
444 static void reload_cse_regs_1		PARAMS ((rtx));
445 static int reload_cse_noop_set_p	PARAMS ((rtx));
446 static int reload_cse_simplify_set	PARAMS ((rtx, rtx));
447 static int reload_cse_simplify_operands	PARAMS ((rtx, rtx));
448 static void reload_combine		PARAMS ((void));
449 static void reload_combine_note_use	PARAMS ((rtx *, rtx));
450 static void reload_combine_note_store	PARAMS ((rtx, rtx, void *));
451 static void reload_cse_move2add		PARAMS ((rtx));
452 static void move2add_note_store		PARAMS ((rtx, rtx, void *));
453 #ifdef AUTO_INC_DEC
454 static void add_auto_inc_notes		PARAMS ((rtx, rtx));
455 #endif
456 static void copy_eh_notes		PARAMS ((rtx, rtx));
457 static HOST_WIDE_INT sext_for_mode	PARAMS ((enum machine_mode,
458 						 HOST_WIDE_INT));
459 static void failed_reload		PARAMS ((rtx, int));
460 static int set_reload_reg		PARAMS ((int, int));
461 static void reload_cse_simplify		PARAMS ((rtx, rtx));
462 void fixup_abnormal_edges		PARAMS ((void));
463 extern void dump_needs			PARAMS ((struct insn_chain *));
464 
465 /* Initialize the reload pass once per compilation.  */
466 
467 void
init_reload()468 init_reload ()
469 {
470   int i;
471 
472   /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
473      Set spill_indirect_levels to the number of levels such addressing is
474      permitted, zero if it is not permitted at all.  */
475 
476   rtx tem
477     = gen_rtx_MEM (Pmode,
478 		   gen_rtx_PLUS (Pmode,
479 				 gen_rtx_REG (Pmode,
480 					      LAST_VIRTUAL_REGISTER + 1),
481 				 GEN_INT (4)));
482   spill_indirect_levels = 0;
483 
484   while (memory_address_p (QImode, tem))
485     {
486       spill_indirect_levels++;
487       tem = gen_rtx_MEM (Pmode, tem);
488     }
489 
490   /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)).  */
491 
492   tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
493   indirect_symref_ok = memory_address_p (QImode, tem);
494 
495   /* See if reg+reg is a valid (and offsettable) address.  */
496 
497   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
498     {
499       tem = gen_rtx_PLUS (Pmode,
500 			  gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
501 			  gen_rtx_REG (Pmode, i));
502 
503       /* This way, we make sure that reg+reg is an offsettable address.  */
504       tem = plus_constant (tem, 4);
505 
506       if (memory_address_p (QImode, tem))
507 	{
508 	  double_reg_address_ok = 1;
509 	  break;
510 	}
511     }
512 
513   /* Initialize obstack for our rtl allocation.  */
514   gcc_obstack_init (&reload_obstack);
515   reload_startobj = (char *) obstack_alloc (&reload_obstack, 0);
516 
517   INIT_REG_SET (&spilled_pseudos);
518   INIT_REG_SET (&pseudos_counted);
519 }
520 
521 /* List of insn chains that are currently unused.  */
522 static struct insn_chain *unused_insn_chains = 0;
523 
524 /* Allocate an empty insn_chain structure.  */
525 struct insn_chain *
new_insn_chain()526 new_insn_chain ()
527 {
528   struct insn_chain *c;
529 
530   if (unused_insn_chains == 0)
531     {
532       c = (struct insn_chain *)
533 	obstack_alloc (&reload_obstack, sizeof (struct insn_chain));
534       INIT_REG_SET (&c->live_throughout);
535       INIT_REG_SET (&c->dead_or_set);
536     }
537   else
538     {
539       c = unused_insn_chains;
540       unused_insn_chains = c->next;
541     }
542   c->is_caller_save_insn = 0;
543   c->need_operand_change = 0;
544   c->need_reload = 0;
545   c->need_elim = 0;
546   return c;
547 }
548 
549 /* Small utility function to set all regs in hard reg set TO which are
550    allocated to pseudos in regset FROM.  */
551 
552 void
compute_use_by_pseudos(to,from)553 compute_use_by_pseudos (to, from)
554      HARD_REG_SET *to;
555      regset from;
556 {
557   unsigned int regno;
558 
559   EXECUTE_IF_SET_IN_REG_SET
560     (from, FIRST_PSEUDO_REGISTER, regno,
561      {
562        int r = reg_renumber[regno];
563        int nregs;
564 
565        if (r < 0)
566 	 {
567 	   /* reload_combine uses the information from
568 	      BASIC_BLOCK->global_live_at_start, which might still
569 	      contain registers that have not actually been allocated
570 	      since they have an equivalence.  */
571 	   if (! reload_completed)
572 	     abort ();
573 	 }
574        else
575 	 {
576 	   nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (regno));
577 	   while (nregs-- > 0)
578 	     SET_HARD_REG_BIT (*to, r + nregs);
579 	 }
580      });
581 }
582 
583 /* Replace all pseudos found in LOC with their corresponding
584    equivalences.  */
585 
586 static void
replace_pseudos_in_call_usage(loc,mem_mode,usage)587 replace_pseudos_in_call_usage (loc, mem_mode, usage)
588      rtx *loc;
589      enum machine_mode mem_mode;
590      rtx usage;
591 {
592   rtx x = *loc;
593   enum rtx_code code;
594   const char *fmt;
595   int i, j;
596 
597   if (! x)
598     return;
599 
600   code = GET_CODE (x);
601   if (code == REG)
602     {
603       unsigned int regno = REGNO (x);
604 
605       if (regno < FIRST_PSEUDO_REGISTER)
606 	return;
607 
608       x = eliminate_regs (x, mem_mode, usage);
609       if (x != *loc)
610 	{
611 	  *loc = x;
612 	  replace_pseudos_in_call_usage (loc, mem_mode, usage);
613 	  return;
614 	}
615 
616       if (reg_equiv_constant[regno])
617 	*loc = reg_equiv_constant[regno];
618       else if (reg_equiv_mem[regno])
619 	*loc = reg_equiv_mem[regno];
620       else if (reg_equiv_address[regno])
621 	*loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address[regno]);
622       else if (GET_CODE (regno_reg_rtx[regno]) != REG
623 	       || REGNO (regno_reg_rtx[regno]) != regno)
624 	*loc = regno_reg_rtx[regno];
625       else
626 	abort ();
627 
628       return;
629     }
630   else if (code == MEM)
631     {
632       replace_pseudos_in_call_usage (& XEXP (x, 0), GET_MODE (x), usage);
633       return;
634     }
635 
636   /* Process each of our operands recursively.  */
637   fmt = GET_RTX_FORMAT (code);
638   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
639     if (*fmt == 'e')
640       replace_pseudos_in_call_usage (&XEXP (x, i), mem_mode, usage);
641     else if (*fmt == 'E')
642       for (j = 0; j < XVECLEN (x, i); j++)
643 	replace_pseudos_in_call_usage (& XVECEXP (x, i, j), mem_mode, usage);
644 }
645 
646 
647 /* Global variables used by reload and its subroutines.  */
648 
649 /* Set during calculate_needs if an insn needs register elimination.  */
650 static int something_needs_elimination;
651 /* Set during calculate_needs if an insn needs an operand changed.  */
652 int something_needs_operands_changed;
653 
654 /* Nonzero means we couldn't get enough spill regs.  */
655 static int failure;
656 
657 /* Main entry point for the reload pass.
658 
659    FIRST is the first insn of the function being compiled.
660 
661    GLOBAL nonzero means we were called from global_alloc
662    and should attempt to reallocate any pseudoregs that we
663    displace from hard regs we will use for reloads.
664    If GLOBAL is zero, we do not have enough information to do that,
665    so any pseudo reg that is spilled must go to the stack.
666 
667    Return value is nonzero if reload failed
668    and we must not do any more for this function.  */
669 
670 int
reload(first,global)671 reload (first, global)
672      rtx first;
673      int global;
674 {
675   int i;
676   rtx insn;
677   struct elim_table *ep;
678   basic_block bb;
679 
680   /* Make sure even insns with volatile mem refs are recognizable.  */
681   init_recog ();
682 
683   failure = 0;
684 
685   reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
686 
687   /* Make sure that the last insn in the chain
688      is not something that needs reloading.  */
689   emit_note (NULL, NOTE_INSN_DELETED);
690 
691   /* Enable find_equiv_reg to distinguish insns made by reload.  */
692   reload_first_uid = get_max_uid ();
693 
694 #ifdef SECONDARY_MEMORY_NEEDED
695   /* Initialize the secondary memory table.  */
696   clear_secondary_mem ();
697 #endif
698 
699   /* We don't have a stack slot for any spill reg yet.  */
700   memset ((char *) spill_stack_slot, 0, sizeof spill_stack_slot);
701   memset ((char *) spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
702 
703   /* Initialize the save area information for caller-save, in case some
704      are needed.  */
705   init_save_areas ();
706 
707   /* Compute which hard registers are now in use
708      as homes for pseudo registers.
709      This is done here rather than (eg) in global_alloc
710      because this point is reached even if not optimizing.  */
711   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
712     mark_home_live (i);
713 
714   /* A function that receives a nonlocal goto must save all call-saved
715      registers.  */
716   if (current_function_has_nonlocal_label)
717     for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
718       if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
719 	regs_ever_live[i] = 1;
720 
721   /* Find all the pseudo registers that didn't get hard regs
722      but do have known equivalent constants or memory slots.
723      These include parameters (known equivalent to parameter slots)
724      and cse'd or loop-moved constant memory addresses.
725 
726      Record constant equivalents in reg_equiv_constant
727      so they will be substituted by find_reloads.
728      Record memory equivalents in reg_mem_equiv so they can
729      be substituted eventually by altering the REG-rtx's.  */
730 
731   reg_equiv_constant = (rtx *) xcalloc (max_regno, sizeof (rtx));
732   reg_equiv_mem = (rtx *) xcalloc (max_regno, sizeof (rtx));
733   reg_equiv_init = (rtx *) xcalloc (max_regno, sizeof (rtx));
734   reg_equiv_address = (rtx *) xcalloc (max_regno, sizeof (rtx));
735   reg_max_ref_width = (unsigned int *) xcalloc (max_regno, sizeof (int));
736   reg_old_renumber = (short *) xcalloc (max_regno, sizeof (short));
737   memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
738   pseudo_forbidden_regs
739     = (HARD_REG_SET *) xmalloc (max_regno * sizeof (HARD_REG_SET));
740   pseudo_previous_regs
741     = (HARD_REG_SET *) xcalloc (max_regno, sizeof (HARD_REG_SET));
742 
743   CLEAR_HARD_REG_SET (bad_spill_regs_global);
744 
745   /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
746      Also find all paradoxical subregs and find largest such for each pseudo.
747      On machines with small register classes, record hard registers that
748      are used for user variables.  These can never be used for spills.
749      Also look for a "constant" REG_SETJMP.  This means that all
750      caller-saved registers must be marked live.  */
751 
752   num_eliminable_invariants = 0;
753   for (insn = first; insn; insn = NEXT_INSN (insn))
754     {
755       rtx set = single_set (insn);
756 
757       /* We may introduce USEs that we want to remove at the end, so
758 	 we'll mark them with QImode.  Make sure there are no
759 	 previously-marked insns left by say regmove.  */
760       if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
761 	  && GET_MODE (insn) != VOIDmode)
762 	PUT_MODE (insn, VOIDmode);
763 
764       if (GET_CODE (insn) == CALL_INSN
765 	  && find_reg_note (insn, REG_SETJMP, NULL))
766 	for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
767 	  if (! call_used_regs[i])
768 	    regs_ever_live[i] = 1;
769 
770       if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
771 	{
772 	  rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
773 	  if (note
774 #ifdef LEGITIMATE_PIC_OPERAND_P
775 	      && (! function_invariant_p (XEXP (note, 0))
776 		  || ! flag_pic
777 		  /* A function invariant is often CONSTANT_P but may
778 		     include a register.  We promise to only pass
779 		     CONSTANT_P objects to LEGITIMATE_PIC_OPERAND_P.  */
780 		  || (CONSTANT_P (XEXP (note, 0))
781 		      && LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0))))
782 #endif
783 	      )
784 	    {
785 	      rtx x = XEXP (note, 0);
786 	      i = REGNO (SET_DEST (set));
787 	      if (i > LAST_VIRTUAL_REGISTER)
788 		{
789 		  /* It can happen that a REG_EQUIV note contains a MEM
790 		     that is not a legitimate memory operand.  As later
791 		     stages of reload assume that all addresses found
792 		     in the reg_equiv_* arrays were originally legitimate,
793 		     we ignore such REG_EQUIV notes.
794 
795 		     It also can happen that a REG_EQUIV note contains a MEM
796 		     that carries the /u flag, for example when GCSE turns
797 		     the load of a constant into a move from a pseudo that
798 		     already contains the constant and attaches a REG_EQUAL
799 		     note to the insn, which is later promoted to REQ_EQUIV
800 		     by local-alloc.  If the destination pseudo happens not
801 		     to be assigned to a hard reg, it will be replaced by
802 		     the MEM as the destination of the move, thus generating
803 		     a store to a possibly read-only memory location.  */
804 		  if (memory_operand (x, VOIDmode) && ! RTX_UNCHANGING_P (x))
805 		    {
806 		      /* Always unshare the equivalence, so we can
807 			 substitute into this insn without touching the
808 			 equivalence.  */
809 		      reg_equiv_memory_loc[i] = copy_rtx (x);
810 		    }
811 		  else if (function_invariant_p (x))
812 		    {
813 		      if (GET_CODE (x) == PLUS)
814 			{
815 			  /* This is PLUS of frame pointer and a constant,
816 			     and might be shared.  Unshare it.  */
817 			  reg_equiv_constant[i] = copy_rtx (x);
818 			  num_eliminable_invariants++;
819 			}
820 		      else if (x == frame_pointer_rtx
821 			       || x == arg_pointer_rtx)
822 			{
823 			  reg_equiv_constant[i] = x;
824 			  num_eliminable_invariants++;
825 			}
826 		      else if (LEGITIMATE_CONSTANT_P (x))
827 			reg_equiv_constant[i] = x;
828 		      else
829 			{
830 			  reg_equiv_memory_loc[i]
831 			    = force_const_mem (GET_MODE (SET_DEST (set)), x);
832 			  if (!reg_equiv_memory_loc[i])
833 			    continue;
834 			}
835 		    }
836 		  else
837 		    continue;
838 
839 		  /* If this register is being made equivalent to a MEM
840 		     and the MEM is not SET_SRC, the equivalencing insn
841 		     is one with the MEM as a SET_DEST and it occurs later.
842 		     So don't mark this insn now.  */
843 		  if (GET_CODE (x) != MEM
844 		      || rtx_equal_p (SET_SRC (set), x))
845 		    reg_equiv_init[i]
846 		      = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[i]);
847 		}
848 	    }
849 	}
850 
851       /* If this insn is setting a MEM from a register equivalent to it,
852 	 this is the equivalencing insn.  */
853       else if (set && GET_CODE (SET_DEST (set)) == MEM
854 	       && GET_CODE (SET_SRC (set)) == REG
855 	       && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
856 	       && rtx_equal_p (SET_DEST (set),
857 			       reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
858 	reg_equiv_init[REGNO (SET_SRC (set))]
859 	  = gen_rtx_INSN_LIST (VOIDmode, insn,
860 			       reg_equiv_init[REGNO (SET_SRC (set))]);
861 
862       if (INSN_P (insn))
863 	scan_paradoxical_subregs (PATTERN (insn));
864     }
865 
866   init_elim_table ();
867 
868   first_label_num = get_first_label_num ();
869   num_labels = max_label_num () - first_label_num;
870 
871   /* Allocate the tables used to store offset information at labels.  */
872   /* We used to use alloca here, but the size of what it would try to
873      allocate would occasionally cause it to exceed the stack limit and
874      cause a core dump.  */
875   offsets_known_at = xmalloc (num_labels);
876   offsets_at
877     = (int (*)[NUM_ELIMINABLE_REGS])
878     xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
879 
880   /* Alter each pseudo-reg rtx to contain its hard reg number.
881      Assign stack slots to the pseudos that lack hard regs or equivalents.
882      Do not touch virtual registers.  */
883 
884   for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
885     alter_reg (i, -1);
886 
887   /* If we have some registers we think can be eliminated, scan all insns to
888      see if there is an insn that sets one of these registers to something
889      other than itself plus a constant.  If so, the register cannot be
890      eliminated.  Doing this scan here eliminates an extra pass through the
891      main reload loop in the most common case where register elimination
892      cannot be done.  */
893   for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
894     if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
895 	|| GET_CODE (insn) == CALL_INSN)
896       note_stores (PATTERN (insn), mark_not_eliminable, NULL);
897 
898   maybe_fix_stack_asms ();
899 
900   insns_need_reload = 0;
901   something_needs_elimination = 0;
902 
903   /* Initialize to -1, which means take the first spill register.  */
904   last_spill_reg = -1;
905 
906   /* Spill any hard regs that we know we can't eliminate.  */
907   CLEAR_HARD_REG_SET (used_spill_regs);
908   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
909     if (! ep->can_eliminate)
910       spill_hard_reg (ep->from, 1);
911 
912 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
913   if (frame_pointer_needed)
914     spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
915 #endif
916   finish_spills (global);
917 
918   /* From now on, we may need to generate moves differently.  We may also
919      allow modifications of insns which cause them to not be recognized.
920      Any such modifications will be cleaned up during reload itself.  */
921   reload_in_progress = 1;
922 
923   /* This loop scans the entire function each go-round
924      and repeats until one repetition spills no additional hard regs.  */
925   for (;;)
926     {
927       int something_changed;
928       int did_spill;
929 
930       HOST_WIDE_INT starting_frame_size;
931 
932       /* Round size of stack frame to stack_alignment_needed.  This must be done
933 	 here because the stack size may be a part of the offset computation
934 	 for register elimination, and there might have been new stack slots
935 	 created in the last iteration of this loop.  */
936       if (cfun->stack_alignment_needed)
937         assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
938 
939       starting_frame_size = get_frame_size () - get_frame_free_size ();
940 
941       set_initial_elim_offsets ();
942       set_initial_label_offsets ();
943 
944       /* For each pseudo register that has an equivalent location defined,
945 	 try to eliminate any eliminable registers (such as the frame pointer)
946 	 assuming initial offsets for the replacement register, which
947 	 is the normal case.
948 
949 	 If the resulting location is directly addressable, substitute
950 	 the MEM we just got directly for the old REG.
951 
952 	 If it is not addressable but is a constant or the sum of a hard reg
953 	 and constant, it is probably not addressable because the constant is
954 	 out of range, in that case record the address; we will generate
955 	 hairy code to compute the address in a register each time it is
956 	 needed.  Similarly if it is a hard register, but one that is not
957 	 valid as an address register.
958 
959 	 If the location is not addressable, but does not have one of the
960 	 above forms, assign a stack slot.  We have to do this to avoid the
961 	 potential of producing lots of reloads if, e.g., a location involves
962 	 a pseudo that didn't get a hard register and has an equivalent memory
963 	 location that also involves a pseudo that didn't get a hard register.
964 
965 	 Perhaps at some point we will improve reload_when_needed handling
966 	 so this problem goes away.  But that's very hairy.  */
967 
968       for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
969 	if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
970 	  {
971 	    rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
972 
973 	    if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
974 					 XEXP (x, 0)))
975 	      reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
976 	    else if (CONSTANT_P (XEXP (x, 0))
977 		     || (GET_CODE (XEXP (x, 0)) == REG
978 			 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
979 		     || (GET_CODE (XEXP (x, 0)) == PLUS
980 			 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
981 			 && (REGNO (XEXP (XEXP (x, 0), 0))
982 			     < FIRST_PSEUDO_REGISTER)
983 			 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
984 	      reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
985 	    else
986 	      {
987 		/* Make a new stack slot.  Then indicate that something
988 		   changed so we go back and recompute offsets for
989 		   eliminable registers because the allocation of memory
990 		   below might change some offset.  reg_equiv_{mem,address}
991 		   will be set up for this pseudo on the next pass around
992 		   the loop.  */
993 		reg_equiv_memory_loc[i] = 0;
994 		reg_equiv_init[i] = 0;
995 		alter_reg (i, -1);
996 	      }
997 	  }
998 
999       if (caller_save_needed)
1000 	setup_save_areas ();
1001 
1002       /* If we allocated another stack slot, redo elimination bookkeeping.  */
1003       if (starting_frame_size != get_frame_size () - get_frame_free_size ())
1004 	continue;
1005 
1006       if (caller_save_needed)
1007 	{
1008 	  save_call_clobbered_regs ();
1009 	  /* That might have allocated new insn_chain structures.  */
1010 	  reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
1011 	}
1012 
1013       calculate_needs_all_insns (global);
1014 
1015       CLEAR_REG_SET (&spilled_pseudos);
1016       did_spill = 0;
1017 
1018       something_changed = 0;
1019 
1020       /* If we allocated any new memory locations, make another pass
1021 	 since it might have changed elimination offsets.  */
1022       if (starting_frame_size != get_frame_size () - get_frame_free_size ())
1023 	something_changed = 1;
1024 
1025       {
1026 	HARD_REG_SET to_spill;
1027 	CLEAR_HARD_REG_SET (to_spill);
1028 	update_eliminables (&to_spill);
1029 	for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1030 	  if (TEST_HARD_REG_BIT (to_spill, i))
1031 	    {
1032 	      spill_hard_reg (i, 1);
1033 	      did_spill = 1;
1034 
1035 	      /* Regardless of the state of spills, if we previously had
1036 		 a register that we thought we could eliminate, but now can
1037 		 not eliminate, we must run another pass.
1038 
1039 		 Consider pseudos which have an entry in reg_equiv_* which
1040 		 reference an eliminable register.  We must make another pass
1041 		 to update reg_equiv_* so that we do not substitute in the
1042 		 old value from when we thought the elimination could be
1043 		 performed.  */
1044 	      something_changed = 1;
1045 	    }
1046       }
1047 
1048       select_reload_regs ();
1049       if (failure)
1050 	goto failed;
1051 
1052       if (insns_need_reload != 0 || did_spill)
1053 	something_changed |= finish_spills (global);
1054 
1055       if (! something_changed)
1056 	break;
1057 
1058       if (caller_save_needed)
1059 	delete_caller_save_insns ();
1060 
1061       obstack_free (&reload_obstack, reload_firstobj);
1062     }
1063 
1064   /* If global-alloc was run, notify it of any register eliminations we have
1065      done.  */
1066   if (global)
1067     for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1068       if (ep->can_eliminate)
1069 	mark_elimination (ep->from, ep->to);
1070 
1071   /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1072      If that insn didn't set the register (i.e., it copied the register to
1073      memory), just delete that insn instead of the equivalencing insn plus
1074      anything now dead.  If we call delete_dead_insn on that insn, we may
1075      delete the insn that actually sets the register if the register dies
1076      there and that is incorrect.  */
1077 
1078   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1079     {
1080       if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1081 	{
1082 	  rtx list;
1083 	  for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1084 	    {
1085 	      rtx equiv_insn = XEXP (list, 0);
1086 
1087 	      /* If we already deleted the insn or if it may trap, we can't
1088 		 delete it.  The latter case shouldn't happen, but can
1089 		 if an insn has a variable address, gets a REG_EH_REGION
1090 		 note added to it, and then gets converted into an load
1091 		 from a constant address.  */
1092 	      if (GET_CODE (equiv_insn) == NOTE
1093 		  || can_throw_internal (equiv_insn))
1094 		;
1095 	      else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1096 		delete_dead_insn (equiv_insn);
1097 	      else
1098 		{
1099 		  PUT_CODE (equiv_insn, NOTE);
1100 		  NOTE_SOURCE_FILE (equiv_insn) = 0;
1101 		  NOTE_LINE_NUMBER (equiv_insn) = NOTE_INSN_DELETED;
1102 		}
1103 	    }
1104 	}
1105     }
1106 
1107   /* Use the reload registers where necessary
1108      by generating move instructions to move the must-be-register
1109      values into or out of the reload registers.  */
1110 
1111   if (insns_need_reload != 0 || something_needs_elimination
1112       || something_needs_operands_changed)
1113     {
1114       HOST_WIDE_INT old_frame_size = get_frame_size () - get_frame_free_size ();
1115 
1116       reload_as_needed (global);
1117 
1118       if (old_frame_size != get_frame_size () - get_frame_free_size ())
1119 	abort ();
1120 
1121       if (num_eliminable)
1122 	verify_initial_elim_offsets ();
1123     }
1124 
1125   /* If we were able to eliminate the frame pointer, show that it is no
1126      longer live at the start of any basic block.  If it ls live by
1127      virtue of being in a pseudo, that pseudo will be marked live
1128      and hence the frame pointer will be known to be live via that
1129      pseudo.  */
1130 
1131   if (! frame_pointer_needed)
1132     FOR_EACH_BB (bb)
1133       CLEAR_REGNO_REG_SET (bb->global_live_at_start,
1134 			   HARD_FRAME_POINTER_REGNUM);
1135 
1136   /* Come here (with failure set nonzero) if we can't get enough spill regs
1137      and we decide not to abort about it.  */
1138  failed:
1139 
1140   CLEAR_REG_SET (&spilled_pseudos);
1141   reload_in_progress = 0;
1142 
1143   /* Now eliminate all pseudo regs by modifying them into
1144      their equivalent memory references.
1145      The REG-rtx's for the pseudos are modified in place,
1146      so all insns that used to refer to them now refer to memory.
1147 
1148      For a reg that has a reg_equiv_address, all those insns
1149      were changed by reloading so that no insns refer to it any longer;
1150      but the DECL_RTL of a variable decl may refer to it,
1151      and if so this causes the debugging info to mention the variable.  */
1152 
1153   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1154     {
1155       rtx addr = 0;
1156 
1157       if (reg_equiv_mem[i])
1158 	addr = XEXP (reg_equiv_mem[i], 0);
1159 
1160       if (reg_equiv_address[i])
1161 	addr = reg_equiv_address[i];
1162 
1163       if (addr)
1164 	{
1165 	  if (reg_renumber[i] < 0)
1166 	    {
1167 	      rtx reg = regno_reg_rtx[i];
1168 
1169 	      REG_USERVAR_P (reg) = 0;
1170 	      PUT_CODE (reg, MEM);
1171 	      XEXP (reg, 0) = addr;
1172 	      if (reg_equiv_memory_loc[i])
1173 		MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc[i]);
1174 	      else
1175 		{
1176 		  RTX_UNCHANGING_P (reg) = MEM_IN_STRUCT_P (reg)
1177 		    = MEM_SCALAR_P (reg) = 0;
1178 		  MEM_ATTRS (reg) = 0;
1179 		}
1180 	    }
1181 	  else if (reg_equiv_mem[i])
1182 	    XEXP (reg_equiv_mem[i], 0) = addr;
1183 	}
1184     }
1185 
1186   /* We must set reload_completed now since the cleanup_subreg_operands call
1187      below will re-recognize each insn and reload may have generated insns
1188      which are only valid during and after reload.  */
1189   reload_completed = 1;
1190 
1191   /* Make a pass over all the insns and delete all USEs which we inserted
1192      only to tag a REG_EQUAL note on them.  Remove all REG_DEAD and REG_UNUSED
1193      notes.  Delete all CLOBBER insns, except those that refer to the return
1194      value and the special mem:BLK CLOBBERs added to prevent the scheduler
1195      from misarranging variable-array code, and simplify (subreg (reg))
1196      operands.  Also remove all REG_RETVAL and REG_LIBCALL notes since they
1197      are no longer useful or accurate.  Strip and regenerate REG_INC notes
1198      that may have been moved around.  */
1199 
1200   for (insn = first; insn; insn = NEXT_INSN (insn))
1201     if (INSN_P (insn))
1202       {
1203 	rtx *pnote;
1204 
1205 	if (GET_CODE (insn) == CALL_INSN)
1206 	  replace_pseudos_in_call_usage (& CALL_INSN_FUNCTION_USAGE (insn),
1207 					 VOIDmode,
1208 					 CALL_INSN_FUNCTION_USAGE (insn));
1209 
1210 	if ((GET_CODE (PATTERN (insn)) == USE
1211 	     /* We mark with QImode USEs introduced by reload itself.  */
1212 	     && (GET_MODE (insn) == QImode
1213 		 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1214 	    || (GET_CODE (PATTERN (insn)) == CLOBBER
1215 		&& (GET_CODE (XEXP (PATTERN (insn), 0)) != MEM
1216 		    || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1217 		    || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1218 			&& XEXP (XEXP (PATTERN (insn), 0), 0)
1219 				!= stack_pointer_rtx))
1220 		&& (GET_CODE (XEXP (PATTERN (insn), 0)) != REG
1221 		    || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1222 	  {
1223 	    delete_insn (insn);
1224 	    continue;
1225 	  }
1226 
1227 	pnote = &REG_NOTES (insn);
1228 	while (*pnote != 0)
1229 	  {
1230 	    if (REG_NOTE_KIND (*pnote) == REG_DEAD
1231 		|| REG_NOTE_KIND (*pnote) == REG_UNUSED
1232 		|| REG_NOTE_KIND (*pnote) == REG_INC
1233 		|| REG_NOTE_KIND (*pnote) == REG_RETVAL
1234 		|| REG_NOTE_KIND (*pnote) == REG_LIBCALL)
1235 	      *pnote = XEXP (*pnote, 1);
1236 	    else
1237 	      pnote = &XEXP (*pnote, 1);
1238 	  }
1239 
1240 #ifdef AUTO_INC_DEC
1241 	add_auto_inc_notes (insn, PATTERN (insn));
1242 #endif
1243 
1244 	/* And simplify (subreg (reg)) if it appears as an operand.  */
1245 	cleanup_subreg_operands (insn);
1246       }
1247 
1248   /* If we are doing stack checking, give a warning if this function's
1249      frame size is larger than we expect.  */
1250   if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1251     {
1252       HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1253       static int verbose_warned = 0;
1254 
1255       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1256 	if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
1257 	  size += UNITS_PER_WORD;
1258 
1259       if (size > STACK_CHECK_MAX_FRAME_SIZE)
1260 	{
1261 	  warning ("frame size too large for reliable stack checking");
1262 	  if (! verbose_warned)
1263 	    {
1264 	      warning ("try reducing the number of local variables");
1265 	      verbose_warned = 1;
1266 	    }
1267 	}
1268     }
1269 
1270   /* Indicate that we no longer have known memory locations or constants.  */
1271   if (reg_equiv_constant)
1272     free (reg_equiv_constant);
1273   reg_equiv_constant = 0;
1274   if (reg_equiv_memory_loc)
1275     free (reg_equiv_memory_loc);
1276   reg_equiv_memory_loc = 0;
1277 
1278   if (offsets_known_at)
1279     free (offsets_known_at);
1280   if (offsets_at)
1281     free (offsets_at);
1282 
1283   free (reg_equiv_mem);
1284   free (reg_equiv_init);
1285   free (reg_equiv_address);
1286   free (reg_max_ref_width);
1287   free (reg_old_renumber);
1288   free (pseudo_previous_regs);
1289   free (pseudo_forbidden_regs);
1290 
1291   CLEAR_HARD_REG_SET (used_spill_regs);
1292   for (i = 0; i < n_spills; i++)
1293     SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1294 
1295   /* Free all the insn_chain structures at once.  */
1296   obstack_free (&reload_obstack, reload_startobj);
1297   unused_insn_chains = 0;
1298   fixup_abnormal_edges ();
1299 
1300   /* Replacing pseudos with their memory equivalents might have
1301      created shared rtx.  Subsequent passes would get confused
1302      by this, so unshare everything here.  */
1303   unshare_all_rtl_again (first);
1304 
1305 #ifdef STACK_BOUNDARY
1306   /* init_emit has set the alignment of the hard frame pointer
1307      to STACK_BOUNDARY.  It is very likely no longer valid if
1308      the hard frame pointer was used for register allocation.  */
1309   if (!frame_pointer_needed)
1310     REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1311 #endif
1312 
1313   return failure;
1314 }
1315 
1316 /* Yet another special case.  Unfortunately, reg-stack forces people to
1317    write incorrect clobbers in asm statements.  These clobbers must not
1318    cause the register to appear in bad_spill_regs, otherwise we'll call
1319    fatal_insn later.  We clear the corresponding regnos in the live
1320    register sets to avoid this.
1321    The whole thing is rather sick, I'm afraid.  */
1322 
1323 static void
maybe_fix_stack_asms()1324 maybe_fix_stack_asms ()
1325 {
1326 #ifdef STACK_REGS
1327   const char *constraints[MAX_RECOG_OPERANDS];
1328   enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1329   struct insn_chain *chain;
1330 
1331   for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1332     {
1333       int i, noperands;
1334       HARD_REG_SET clobbered, allowed;
1335       rtx pat;
1336 
1337       if (! INSN_P (chain->insn)
1338 	  || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1339 	continue;
1340       pat = PATTERN (chain->insn);
1341       if (GET_CODE (pat) != PARALLEL)
1342 	continue;
1343 
1344       CLEAR_HARD_REG_SET (clobbered);
1345       CLEAR_HARD_REG_SET (allowed);
1346 
1347       /* First, make a mask of all stack regs that are clobbered.  */
1348       for (i = 0; i < XVECLEN (pat, 0); i++)
1349 	{
1350 	  rtx t = XVECEXP (pat, 0, i);
1351 	  if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1352 	    SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1353 	}
1354 
1355       /* Get the operand values and constraints out of the insn.  */
1356       decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1357 			   constraints, operand_mode);
1358 
1359       /* For every operand, see what registers are allowed.  */
1360       for (i = 0; i < noperands; i++)
1361 	{
1362 	  const char *p = constraints[i];
1363 	  /* For every alternative, we compute the class of registers allowed
1364 	     for reloading in CLS, and merge its contents into the reg set
1365 	     ALLOWED.  */
1366 	  int cls = (int) NO_REGS;
1367 
1368 	  for (;;)
1369 	    {
1370 	      char c = *p++;
1371 
1372 	      if (c == '\0' || c == ',' || c == '#')
1373 		{
1374 		  /* End of one alternative - mark the regs in the current
1375 		     class, and reset the class.  */
1376 		  IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1377 		  cls = NO_REGS;
1378 		  if (c == '#')
1379 		    do {
1380 		      c = *p++;
1381 		    } while (c != '\0' && c != ',');
1382 		  if (c == '\0')
1383 		    break;
1384 		  continue;
1385 		}
1386 
1387 	      switch (c)
1388 		{
1389 		case '=': case '+': case '*': case '%': case '?': case '!':
1390 		case '0': case '1': case '2': case '3': case '4': case 'm':
1391 		case '<': case '>': case 'V': case 'o': case '&': case 'E':
1392 		case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
1393 		case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
1394 		case 'P':
1395 		  break;
1396 
1397 		case 'p':
1398 		  cls = (int) reg_class_subunion[cls]
1399 		    [(int) MODE_BASE_REG_CLASS (VOIDmode)];
1400 		  break;
1401 
1402 		case 'g':
1403 		case 'r':
1404 		  cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1405 		  break;
1406 
1407 		default:
1408 		  if (EXTRA_ADDRESS_CONSTRAINT (c))
1409 		    cls = (int) reg_class_subunion[cls]
1410 		      [(int) MODE_BASE_REG_CLASS (VOIDmode)];
1411 		  else
1412 		    cls = (int) reg_class_subunion[cls]
1413 		      [(int) REG_CLASS_FROM_LETTER (c)];
1414 		}
1415 	    }
1416 	}
1417       /* Those of the registers which are clobbered, but allowed by the
1418 	 constraints, must be usable as reload registers.  So clear them
1419 	 out of the life information.  */
1420       AND_HARD_REG_SET (allowed, clobbered);
1421       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1422 	if (TEST_HARD_REG_BIT (allowed, i))
1423 	  {
1424 	    CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1425 	    CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1426 	  }
1427     }
1428 
1429 #endif
1430 }
1431 
1432 /* Copy the global variables n_reloads and rld into the corresponding elts
1433    of CHAIN.  */
1434 static void
copy_reloads(chain)1435 copy_reloads (chain)
1436      struct insn_chain *chain;
1437 {
1438   chain->n_reloads = n_reloads;
1439   chain->rld
1440     = (struct reload *) obstack_alloc (&reload_obstack,
1441 				       n_reloads * sizeof (struct reload));
1442   memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1443   reload_insn_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
1444 }
1445 
1446 /* Walk the chain of insns, and determine for each whether it needs reloads
1447    and/or eliminations.  Build the corresponding insns_need_reload list, and
1448    set something_needs_elimination as appropriate.  */
1449 static void
calculate_needs_all_insns(global)1450 calculate_needs_all_insns (global)
1451      int global;
1452 {
1453   struct insn_chain **pprev_reload = &insns_need_reload;
1454   struct insn_chain *chain, *next = 0;
1455 
1456   something_needs_elimination = 0;
1457 
1458   reload_insn_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
1459   for (chain = reload_insn_chain; chain != 0; chain = next)
1460     {
1461       rtx insn = chain->insn;
1462 
1463       next = chain->next;
1464 
1465       /* Clear out the shortcuts.  */
1466       chain->n_reloads = 0;
1467       chain->need_elim = 0;
1468       chain->need_reload = 0;
1469       chain->need_operand_change = 0;
1470 
1471       /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1472 	 include REG_LABEL), we need to see what effects this has on the
1473 	 known offsets at labels.  */
1474 
1475       if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
1476 	  || (INSN_P (insn) && REG_NOTES (insn) != 0))
1477 	set_label_offsets (insn, insn, 0);
1478 
1479       if (INSN_P (insn))
1480 	{
1481 	  rtx old_body = PATTERN (insn);
1482 	  int old_code = INSN_CODE (insn);
1483 	  rtx old_notes = REG_NOTES (insn);
1484 	  int did_elimination = 0;
1485 	  int operands_changed = 0;
1486 	  rtx set = single_set (insn);
1487 
1488 	  /* Skip insns that only set an equivalence.  */
1489 	  if (set && GET_CODE (SET_DEST (set)) == REG
1490 	      && reg_renumber[REGNO (SET_DEST (set))] < 0
1491 	      && reg_equiv_constant[REGNO (SET_DEST (set))])
1492 	    continue;
1493 
1494 	  /* If needed, eliminate any eliminable registers.  */
1495 	  if (num_eliminable || num_eliminable_invariants)
1496 	    did_elimination = eliminate_regs_in_insn (insn, 0);
1497 
1498 	  /* Analyze the instruction.  */
1499 	  operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1500 					   global, spill_reg_order);
1501 
1502 	  /* If a no-op set needs more than one reload, this is likely
1503 	     to be something that needs input address reloads.  We
1504 	     can't get rid of this cleanly later, and it is of no use
1505 	     anyway, so discard it now.
1506 	     We only do this when expensive_optimizations is enabled,
1507 	     since this complements reload inheritance / output
1508 	     reload deletion, and it can make debugging harder.  */
1509 	  if (flag_expensive_optimizations && n_reloads > 1)
1510 	    {
1511 	      rtx set = single_set (insn);
1512 	      if (set
1513 		  && SET_SRC (set) == SET_DEST (set)
1514 		  && GET_CODE (SET_SRC (set)) == REG
1515 		  && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1516 		{
1517 		  delete_insn (insn);
1518 		  /* Delete it from the reload chain.  */
1519 		  if (chain->prev)
1520 		    chain->prev->next = next;
1521 		  else
1522 		    reload_insn_chain = next;
1523 		  if (next)
1524 		    next->prev = chain->prev;
1525 		  chain->next = unused_insn_chains;
1526 		  unused_insn_chains = chain;
1527 		  continue;
1528 		}
1529 	    }
1530 	  if (num_eliminable)
1531 	    update_eliminable_offsets ();
1532 
1533 	  /* Remember for later shortcuts which insns had any reloads or
1534 	     register eliminations.  */
1535 	  chain->need_elim = did_elimination;
1536 	  chain->need_reload = n_reloads > 0;
1537 	  chain->need_operand_change = operands_changed;
1538 
1539 	  /* Discard any register replacements done.  */
1540 	  if (did_elimination)
1541 	    {
1542 	      obstack_free (&reload_obstack, reload_insn_firstobj);
1543 	      PATTERN (insn) = old_body;
1544 	      INSN_CODE (insn) = old_code;
1545 	      REG_NOTES (insn) = old_notes;
1546 	      something_needs_elimination = 1;
1547 	    }
1548 
1549 	  something_needs_operands_changed |= operands_changed;
1550 
1551 	  if (n_reloads != 0)
1552 	    {
1553 	      copy_reloads (chain);
1554 	      *pprev_reload = chain;
1555 	      pprev_reload = &chain->next_need_reload;
1556 	    }
1557 	}
1558     }
1559   *pprev_reload = 0;
1560 }
1561 
1562 /* Comparison function for qsort to decide which of two reloads
1563    should be handled first.  *P1 and *P2 are the reload numbers.  */
1564 
1565 static int
reload_reg_class_lower(r1p,r2p)1566 reload_reg_class_lower (r1p, r2p)
1567      const PTR r1p;
1568      const PTR r2p;
1569 {
1570   int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1571   int t;
1572 
1573   /* Consider required reloads before optional ones.  */
1574   t = rld[r1].optional - rld[r2].optional;
1575   if (t != 0)
1576     return t;
1577 
1578   /* Count all solitary classes before non-solitary ones.  */
1579   t = ((reg_class_size[(int) rld[r2].class] == 1)
1580        - (reg_class_size[(int) rld[r1].class] == 1));
1581   if (t != 0)
1582     return t;
1583 
1584   /* Aside from solitaires, consider all multi-reg groups first.  */
1585   t = rld[r2].nregs - rld[r1].nregs;
1586   if (t != 0)
1587     return t;
1588 
1589   /* Consider reloads in order of increasing reg-class number.  */
1590   t = (int) rld[r1].class - (int) rld[r2].class;
1591   if (t != 0)
1592     return t;
1593 
1594   /* If reloads are equally urgent, sort by reload number,
1595      so that the results of qsort leave nothing to chance.  */
1596   return r1 - r2;
1597 }
1598 
1599 /* The cost of spilling each hard reg.  */
1600 static int spill_cost[FIRST_PSEUDO_REGISTER];
1601 
1602 /* When spilling multiple hard registers, we use SPILL_COST for the first
1603    spilled hard reg and SPILL_ADD_COST for subsequent regs.  SPILL_ADD_COST
1604    only the first hard reg for a multi-reg pseudo.  */
1605 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1606 
1607 /* Update the spill cost arrays, considering that pseudo REG is live.  */
1608 
1609 static void
count_pseudo(reg)1610 count_pseudo (reg)
1611      int reg;
1612 {
1613   int freq = REG_FREQ (reg);
1614   int r = reg_renumber[reg];
1615   int nregs;
1616 
1617   if (REGNO_REG_SET_P (&pseudos_counted, reg)
1618       || REGNO_REG_SET_P (&spilled_pseudos, reg))
1619     return;
1620 
1621   SET_REGNO_REG_SET (&pseudos_counted, reg);
1622 
1623   if (r < 0)
1624     abort ();
1625 
1626   spill_add_cost[r] += freq;
1627 
1628   nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg));
1629   while (nregs-- > 0)
1630     spill_cost[r + nregs] += freq;
1631 }
1632 
1633 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1634    contents of BAD_SPILL_REGS for the insn described by CHAIN.  */
1635 
1636 static void
order_regs_for_reload(chain)1637 order_regs_for_reload (chain)
1638      struct insn_chain *chain;
1639 {
1640   int i;
1641   HARD_REG_SET used_by_pseudos;
1642   HARD_REG_SET used_by_pseudos2;
1643 
1644   COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1645 
1646   memset (spill_cost, 0, sizeof spill_cost);
1647   memset (spill_add_cost, 0, sizeof spill_add_cost);
1648 
1649   /* Count number of uses of each hard reg by pseudo regs allocated to it
1650      and then order them by decreasing use.  First exclude hard registers
1651      that are live in or across this insn.  */
1652 
1653   REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1654   REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1655   IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1656   IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1657 
1658   /* Now find out which pseudos are allocated to it, and update
1659      hard_reg_n_uses.  */
1660   CLEAR_REG_SET (&pseudos_counted);
1661 
1662   EXECUTE_IF_SET_IN_REG_SET
1663     (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i,
1664      {
1665        count_pseudo (i);
1666      });
1667   EXECUTE_IF_SET_IN_REG_SET
1668     (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i,
1669      {
1670        count_pseudo (i);
1671      });
1672   CLEAR_REG_SET (&pseudos_counted);
1673 }
1674 
1675 /* Vector of reload-numbers showing the order in which the reloads should
1676    be processed.  */
1677 static short reload_order[MAX_RELOADS];
1678 
1679 /* This is used to keep track of the spill regs used in one insn.  */
1680 static HARD_REG_SET used_spill_regs_local;
1681 
1682 /* We decided to spill hard register SPILLED, which has a size of
1683    SPILLED_NREGS.  Determine how pseudo REG, which is live during the insn,
1684    is affected.  We will add it to SPILLED_PSEUDOS if necessary, and we will
1685    update SPILL_COST/SPILL_ADD_COST.  */
1686 
1687 static void
count_spilled_pseudo(spilled,spilled_nregs,reg)1688 count_spilled_pseudo (spilled, spilled_nregs, reg)
1689      int spilled, spilled_nregs, reg;
1690 {
1691   int r = reg_renumber[reg];
1692   int nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg));
1693 
1694   if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1695       || spilled + spilled_nregs <= r || r + nregs <= spilled)
1696     return;
1697 
1698   SET_REGNO_REG_SET (&spilled_pseudos, reg);
1699 
1700   spill_add_cost[r] -= REG_FREQ (reg);
1701   while (nregs-- > 0)
1702     spill_cost[r + nregs] -= REG_FREQ (reg);
1703 }
1704 
1705 /* Find reload register to use for reload number ORDER.  */
1706 
1707 static int
find_reg(chain,order)1708 find_reg (chain, order)
1709      struct insn_chain *chain;
1710      int order;
1711 {
1712   int rnum = reload_order[order];
1713   struct reload *rl = rld + rnum;
1714   int best_cost = INT_MAX;
1715   int best_reg = -1;
1716   unsigned int i, j;
1717   int k;
1718   HARD_REG_SET not_usable;
1719   HARD_REG_SET used_by_other_reload;
1720 
1721   COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1722   IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1723   IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->class]);
1724 
1725   CLEAR_HARD_REG_SET (used_by_other_reload);
1726   for (k = 0; k < order; k++)
1727     {
1728       int other = reload_order[k];
1729 
1730       if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1731 	for (j = 0; j < rld[other].nregs; j++)
1732 	  SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1733     }
1734 
1735   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1736     {
1737       unsigned int regno = i;
1738 
1739       if (! TEST_HARD_REG_BIT (not_usable, regno)
1740 	  && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1741 	  && HARD_REGNO_MODE_OK (regno, rl->mode))
1742 	{
1743 	  int this_cost = spill_cost[regno];
1744 	  int ok = 1;
1745 	  unsigned int this_nregs = HARD_REGNO_NREGS (regno, rl->mode);
1746 
1747 	  for (j = 1; j < this_nregs; j++)
1748 	    {
1749 	      this_cost += spill_add_cost[regno + j];
1750 	      if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1751 		  || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1752 		ok = 0;
1753 	    }
1754 	  if (! ok)
1755 	    continue;
1756 	  if (rl->in && GET_CODE (rl->in) == REG && REGNO (rl->in) == regno)
1757 	    this_cost--;
1758 	  if (rl->out && GET_CODE (rl->out) == REG && REGNO (rl->out) == regno)
1759 	    this_cost--;
1760 	  if (this_cost < best_cost
1761 	      /* Among registers with equal cost, prefer caller-saved ones, or
1762 		 use REG_ALLOC_ORDER if it is defined.  */
1763 	      || (this_cost == best_cost
1764 #ifdef REG_ALLOC_ORDER
1765 		  && (inv_reg_alloc_order[regno]
1766 		      < inv_reg_alloc_order[best_reg])
1767 #else
1768 		  && call_used_regs[regno]
1769 		  && ! call_used_regs[best_reg]
1770 #endif
1771 		  ))
1772 	    {
1773 	      best_reg = regno;
1774 	      best_cost = this_cost;
1775 	    }
1776 	}
1777     }
1778   if (best_reg == -1)
1779     return 0;
1780 
1781   if (rtl_dump_file)
1782     fprintf (rtl_dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1783 
1784   rl->nregs = HARD_REGNO_NREGS (best_reg, rl->mode);
1785   rl->regno = best_reg;
1786 
1787   EXECUTE_IF_SET_IN_REG_SET
1788     (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j,
1789      {
1790        count_spilled_pseudo (best_reg, rl->nregs, j);
1791      });
1792 
1793   EXECUTE_IF_SET_IN_REG_SET
1794     (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j,
1795      {
1796        count_spilled_pseudo (best_reg, rl->nregs, j);
1797      });
1798 
1799   for (i = 0; i < rl->nregs; i++)
1800     {
1801       if (spill_cost[best_reg + i] != 0
1802 	  || spill_add_cost[best_reg + i] != 0)
1803 	abort ();
1804       SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1805     }
1806   return 1;
1807 }
1808 
1809 /* Find more reload regs to satisfy the remaining need of an insn, which
1810    is given by CHAIN.
1811    Do it by ascending class number, since otherwise a reg
1812    might be spilled for a big class and might fail to count
1813    for a smaller class even though it belongs to that class.  */
1814 
1815 static void
find_reload_regs(chain)1816 find_reload_regs (chain)
1817      struct insn_chain *chain;
1818 {
1819   int i;
1820 
1821   /* In order to be certain of getting the registers we need,
1822      we must sort the reloads into order of increasing register class.
1823      Then our grabbing of reload registers will parallel the process
1824      that provided the reload registers.  */
1825   for (i = 0; i < chain->n_reloads; i++)
1826     {
1827       /* Show whether this reload already has a hard reg.  */
1828       if (chain->rld[i].reg_rtx)
1829 	{
1830 	  int regno = REGNO (chain->rld[i].reg_rtx);
1831 	  chain->rld[i].regno = regno;
1832 	  chain->rld[i].nregs
1833 	    = HARD_REGNO_NREGS (regno, GET_MODE (chain->rld[i].reg_rtx));
1834 	}
1835       else
1836 	chain->rld[i].regno = -1;
1837       reload_order[i] = i;
1838     }
1839 
1840   n_reloads = chain->n_reloads;
1841   memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
1842 
1843   CLEAR_HARD_REG_SET (used_spill_regs_local);
1844 
1845   if (rtl_dump_file)
1846     fprintf (rtl_dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1847 
1848   qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
1849 
1850   /* Compute the order of preference for hard registers to spill.  */
1851 
1852   order_regs_for_reload (chain);
1853 
1854   for (i = 0; i < n_reloads; i++)
1855     {
1856       int r = reload_order[i];
1857 
1858       /* Ignore reloads that got marked inoperative.  */
1859       if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
1860 	  && ! rld[r].optional
1861 	  && rld[r].regno == -1)
1862 	if (! find_reg (chain, i))
1863 	  {
1864 	    spill_failure (chain->insn, rld[r].class);
1865 	    failure = 1;
1866 	    return;
1867 	  }
1868     }
1869 
1870   COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
1871   IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
1872 
1873   memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1874 }
1875 
1876 static void
select_reload_regs()1877 select_reload_regs ()
1878 {
1879   struct insn_chain *chain;
1880 
1881   /* Try to satisfy the needs for each insn.  */
1882   for (chain = insns_need_reload; chain != 0;
1883        chain = chain->next_need_reload)
1884     find_reload_regs (chain);
1885 }
1886 
1887 /* Delete all insns that were inserted by emit_caller_save_insns during
1888    this iteration.  */
1889 static void
delete_caller_save_insns()1890 delete_caller_save_insns ()
1891 {
1892   struct insn_chain *c = reload_insn_chain;
1893 
1894   while (c != 0)
1895     {
1896       while (c != 0 && c->is_caller_save_insn)
1897 	{
1898 	  struct insn_chain *next = c->next;
1899 	  rtx insn = c->insn;
1900 
1901 	  if (c == reload_insn_chain)
1902 	    reload_insn_chain = next;
1903 	  delete_insn (insn);
1904 
1905 	  if (next)
1906 	    next->prev = c->prev;
1907 	  if (c->prev)
1908 	    c->prev->next = next;
1909 	  c->next = unused_insn_chains;
1910 	  unused_insn_chains = c;
1911 	  c = next;
1912 	}
1913       if (c != 0)
1914 	c = c->next;
1915     }
1916 }
1917 
1918 /* Handle the failure to find a register to spill.
1919    INSN should be one of the insns which needed this particular spill reg.  */
1920 
1921 static void
spill_failure(insn,class)1922 spill_failure (insn, class)
1923      rtx insn;
1924      enum reg_class class;
1925 {
1926   static const char *const reg_class_names[] = REG_CLASS_NAMES;
1927   if (asm_noperands (PATTERN (insn)) >= 0)
1928     error_for_asm (insn, "can't find a register in class `%s' while reloading `asm'",
1929 		   reg_class_names[class]);
1930   else
1931     {
1932       error ("unable to find a register to spill in class `%s'",
1933 	     reg_class_names[class]);
1934       fatal_insn ("this is the insn:", insn);
1935     }
1936 }
1937 
1938 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
1939    data that is dead in INSN.  */
1940 
1941 static void
delete_dead_insn(insn)1942 delete_dead_insn (insn)
1943      rtx insn;
1944 {
1945   rtx prev = prev_real_insn (insn);
1946   rtx prev_dest;
1947 
1948   /* If the previous insn sets a register that dies in our insn, delete it
1949      too.  */
1950   if (prev && GET_CODE (PATTERN (prev)) == SET
1951       && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
1952       && reg_mentioned_p (prev_dest, PATTERN (insn))
1953       && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
1954       && ! side_effects_p (SET_SRC (PATTERN (prev))))
1955     delete_dead_insn (prev);
1956 
1957   PUT_CODE (insn, NOTE);
1958   NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1959   NOTE_SOURCE_FILE (insn) = 0;
1960 }
1961 
1962 /* Modify the home of pseudo-reg I.
1963    The new home is present in reg_renumber[I].
1964 
1965    FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
1966    or it may be -1, meaning there is none or it is not relevant.
1967    This is used so that all pseudos spilled from a given hard reg
1968    can share one stack slot.  */
1969 
1970 static void
alter_reg(i,from_reg)1971 alter_reg (i, from_reg)
1972      int i;
1973      int from_reg;
1974 {
1975   /* When outputting an inline function, this can happen
1976      for a reg that isn't actually used.  */
1977   if (regno_reg_rtx[i] == 0)
1978     return;
1979 
1980   /* If the reg got changed to a MEM at rtl-generation time,
1981      ignore it.  */
1982   if (GET_CODE (regno_reg_rtx[i]) != REG)
1983     return;
1984 
1985   /* Modify the reg-rtx to contain the new hard reg
1986      number or else to contain its pseudo reg number.  */
1987   REGNO (regno_reg_rtx[i])
1988     = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
1989 
1990   /* If we have a pseudo that is needed but has no hard reg or equivalent,
1991      allocate a stack slot for it.  */
1992 
1993   if (reg_renumber[i] < 0
1994       && REG_N_REFS (i) > 0
1995       && reg_equiv_constant[i] == 0
1996       && reg_equiv_memory_loc[i] == 0)
1997     {
1998       rtx x;
1999       unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2000       unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2001       int adjust = 0;
2002 
2003       /* Each pseudo reg has an inherent size which comes from its own mode,
2004 	 and a total size which provides room for paradoxical subregs
2005 	 which refer to the pseudo reg in wider modes.
2006 
2007 	 We can use a slot already allocated if it provides both
2008 	 enough inherent space and enough total space.
2009 	 Otherwise, we allocate a new slot, making sure that it has no less
2010 	 inherent space, and no less total space, then the previous slot.  */
2011       if (from_reg == -1)
2012 	{
2013 	  /* No known place to spill from => no slot to reuse.  */
2014 	  x = assign_stack_local_for_pseudo_reg (GET_MODE (regno_reg_rtx[i]), total_size,
2015 				  inherent_size == total_size ? 0 : -1);
2016 	  if (BYTES_BIG_ENDIAN)
2017 	    /* Cancel the  big-endian correction done in assign_stack_local.
2018 	       Get the address of the beginning of the slot.
2019 	       This is so we can do a big-endian correction unconditionally
2020 	       below.  */
2021 	    adjust = inherent_size - total_size;
2022 
2023 	  RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2024 
2025 	  /* Nothing can alias this slot except this pseudo.  */
2026 	  set_mem_alias_set (x, new_alias_set ());
2027 	}
2028 
2029       /* Reuse a stack slot if possible.  */
2030       else if (spill_stack_slot[from_reg] != 0
2031 	       && spill_stack_slot_width[from_reg] >= total_size
2032 	       && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2033 		   >= inherent_size))
2034 	x = spill_stack_slot[from_reg];
2035 
2036       /* Allocate a bigger slot.  */
2037       else
2038 	{
2039 	  /* Compute maximum size needed, both for inherent size
2040 	     and for total size.  */
2041 	  enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2042 	  rtx stack_slot;
2043 
2044 	  if (spill_stack_slot[from_reg])
2045 	    {
2046 	      if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2047 		  > inherent_size)
2048 		mode = GET_MODE (spill_stack_slot[from_reg]);
2049 	      if (spill_stack_slot_width[from_reg] > total_size)
2050 		total_size = spill_stack_slot_width[from_reg];
2051 	    }
2052 
2053 	  /* Make a slot with that size.  */
2054 	  x = assign_stack_local (mode, total_size,
2055 				  inherent_size == total_size ? 0 : -1);
2056 	  stack_slot = x;
2057 
2058 	  /* All pseudos mapped to this slot can alias each other.  */
2059 	  if (spill_stack_slot[from_reg])
2060 	    set_mem_alias_set (x, MEM_ALIAS_SET (spill_stack_slot[from_reg]));
2061 	  else
2062 	    set_mem_alias_set (x, new_alias_set ());
2063 
2064 	  if (BYTES_BIG_ENDIAN)
2065 	    {
2066 	      /* Cancel the  big-endian correction done in assign_stack_local.
2067 		 Get the address of the beginning of the slot.
2068 		 This is so we can do a big-endian correction unconditionally
2069 		 below.  */
2070 	      adjust = GET_MODE_SIZE (mode) - total_size;
2071 	      if (adjust)
2072 		stack_slot
2073 		  = adjust_address_nv (x, mode_for_size (total_size
2074 							 * BITS_PER_UNIT,
2075 							 MODE_INT, 1),
2076 				       adjust);
2077 	    }
2078 
2079 	  spill_stack_slot[from_reg] = stack_slot;
2080 	  spill_stack_slot_width[from_reg] = total_size;
2081 	}
2082 
2083       /* On a big endian machine, the "address" of the slot
2084 	 is the address of the low part that fits its inherent mode.  */
2085       if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2086 	adjust += (total_size - inherent_size);
2087 
2088       /* If we have any adjustment to make, or if the stack slot is the
2089 	 wrong mode, make a new stack slot.  */
2090       x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2091 
2092       /* If we have a decl for the original register, set it for the
2093 	 memory.  If this is a shared MEM, make a copy.  */
2094       if (REGNO_DECL (i))
2095 	{
2096 	  rtx decl = DECL_RTL_IF_SET (REGNO_DECL (i));
2097 
2098 	  /* We can do this only for the DECLs home pseudo, not for
2099 	     any copies of it, since otherwise when the stack slot
2100 	     is reused, nonoverlapping_memrefs_p might think they
2101 	     cannot overlap.  */
2102 	  if (decl && GET_CODE (decl) == REG && REGNO (decl) == (unsigned) i)
2103 	    {
2104 	      if (from_reg != -1 && spill_stack_slot[from_reg] == x)
2105 		x = copy_rtx (x);
2106 
2107 	      set_mem_expr (x, REGNO_DECL (i));
2108 	    }
2109 	}
2110 
2111       /* Save the stack slot for later.  */
2112       reg_equiv_memory_loc[i] = x;
2113     }
2114 }
2115 
2116 /* Mark the slots in regs_ever_live for the hard regs
2117    used by pseudo-reg number REGNO.  */
2118 
2119 void
mark_home_live(regno)2120 mark_home_live (regno)
2121      int regno;
2122 {
2123   int i, lim;
2124 
2125   i = reg_renumber[regno];
2126   if (i < 0)
2127     return;
2128   lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2129   while (i < lim)
2130     regs_ever_live[i++] = 1;
2131 }
2132 
2133 /* This function handles the tracking of elimination offsets around branches.
2134 
2135    X is a piece of RTL being scanned.
2136 
2137    INSN is the insn that it came from, if any.
2138 
2139    INITIAL_P is nonzero if we are to set the offset to be the initial
2140    offset and zero if we are setting the offset of the label to be the
2141    current offset.  */
2142 
2143 static void
set_label_offsets(x,insn,initial_p)2144 set_label_offsets (x, insn, initial_p)
2145      rtx x;
2146      rtx insn;
2147      int initial_p;
2148 {
2149   enum rtx_code code = GET_CODE (x);
2150   rtx tem;
2151   unsigned int i;
2152   struct elim_table *p;
2153 
2154   switch (code)
2155     {
2156     case LABEL_REF:
2157       if (LABEL_REF_NONLOCAL_P (x))
2158 	return;
2159 
2160       x = XEXP (x, 0);
2161 
2162       /* ... fall through ...  */
2163 
2164     case CODE_LABEL:
2165       /* If we know nothing about this label, set the desired offsets.  Note
2166 	 that this sets the offset at a label to be the offset before a label
2167 	 if we don't know anything about the label.  This is not correct for
2168 	 the label after a BARRIER, but is the best guess we can make.  If
2169 	 we guessed wrong, we will suppress an elimination that might have
2170 	 been possible had we been able to guess correctly.  */
2171 
2172       if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2173 	{
2174 	  for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2175 	    offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2176 	      = (initial_p ? reg_eliminate[i].initial_offset
2177 		 : reg_eliminate[i].offset);
2178 	  offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2179 	}
2180 
2181       /* Otherwise, if this is the definition of a label and it is
2182 	 preceded by a BARRIER, set our offsets to the known offset of
2183 	 that label.  */
2184 
2185       else if (x == insn
2186 	       && (tem = prev_nonnote_insn (insn)) != 0
2187 	       && GET_CODE (tem) == BARRIER)
2188 	set_offsets_for_label (insn);
2189       else
2190 	/* If neither of the above cases is true, compare each offset
2191 	   with those previously recorded and suppress any eliminations
2192 	   where the offsets disagree.  */
2193 
2194 	for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2195 	  if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2196 	      != (initial_p ? reg_eliminate[i].initial_offset
2197 		  : reg_eliminate[i].offset))
2198 	    reg_eliminate[i].can_eliminate = 0;
2199 
2200       return;
2201 
2202     case JUMP_INSN:
2203       set_label_offsets (PATTERN (insn), insn, initial_p);
2204 
2205       /* ... fall through ...  */
2206 
2207     case INSN:
2208     case CALL_INSN:
2209       /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2210 	 and hence must have all eliminations at their initial offsets.  */
2211       for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2212 	if (REG_NOTE_KIND (tem) == REG_LABEL)
2213 	  set_label_offsets (XEXP (tem, 0), insn, 1);
2214       return;
2215 
2216     case PARALLEL:
2217     case ADDR_VEC:
2218     case ADDR_DIFF_VEC:
2219       /* Each of the labels in the parallel or address vector must be
2220 	 at their initial offsets.  We want the first field for PARALLEL
2221 	 and ADDR_VEC and the second field for ADDR_DIFF_VEC.  */
2222 
2223       for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2224 	set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2225 			   insn, initial_p);
2226       return;
2227 
2228     case SET:
2229       /* We only care about setting PC.  If the source is not RETURN,
2230 	 IF_THEN_ELSE, or a label, disable any eliminations not at
2231 	 their initial offsets.  Similarly if any arm of the IF_THEN_ELSE
2232 	 isn't one of those possibilities.  For branches to a label,
2233 	 call ourselves recursively.
2234 
2235 	 Note that this can disable elimination unnecessarily when we have
2236 	 a non-local goto since it will look like a non-constant jump to
2237 	 someplace in the current function.  This isn't a significant
2238 	 problem since such jumps will normally be when all elimination
2239 	 pairs are back to their initial offsets.  */
2240 
2241       if (SET_DEST (x) != pc_rtx)
2242 	return;
2243 
2244       switch (GET_CODE (SET_SRC (x)))
2245 	{
2246 	case PC:
2247 	case RETURN:
2248 	  return;
2249 
2250 	case LABEL_REF:
2251 	  set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2252 	  return;
2253 
2254 	case IF_THEN_ELSE:
2255 	  tem = XEXP (SET_SRC (x), 1);
2256 	  if (GET_CODE (tem) == LABEL_REF)
2257 	    set_label_offsets (XEXP (tem, 0), insn, initial_p);
2258 	  else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2259 	    break;
2260 
2261 	  tem = XEXP (SET_SRC (x), 2);
2262 	  if (GET_CODE (tem) == LABEL_REF)
2263 	    set_label_offsets (XEXP (tem, 0), insn, initial_p);
2264 	  else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2265 	    break;
2266 	  return;
2267 
2268 	default:
2269 	  break;
2270 	}
2271 
2272       /* If we reach here, all eliminations must be at their initial
2273 	 offset because we are doing a jump to a variable address.  */
2274       for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2275 	if (p->offset != p->initial_offset)
2276 	  p->can_eliminate = 0;
2277       break;
2278 
2279     default:
2280       break;
2281     }
2282 }
2283 
2284 /* Scan X and replace any eliminable registers (such as fp) with a
2285    replacement (such as sp), plus an offset.
2286 
2287    MEM_MODE is the mode of an enclosing MEM.  We need this to know how
2288    much to adjust a register for, e.g., PRE_DEC.  Also, if we are inside a
2289    MEM, we are allowed to replace a sum of a register and the constant zero
2290    with the register, which we cannot do outside a MEM.  In addition, we need
2291    to record the fact that a register is referenced outside a MEM.
2292 
2293    If INSN is an insn, it is the insn containing X.  If we replace a REG
2294    in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2295    CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2296    the REG is being modified.
2297 
2298    Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2299    That's used when we eliminate in expressions stored in notes.
2300    This means, do not set ref_outside_mem even if the reference
2301    is outside of MEMs.
2302 
2303    REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2304    replacements done assuming all offsets are at their initial values.  If
2305    they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2306    encounter, return the actual location so that find_reloads will do
2307    the proper thing.  */
2308 
2309 rtx
eliminate_regs(x,mem_mode,insn)2310 eliminate_regs (x, mem_mode, insn)
2311      rtx x;
2312      enum machine_mode mem_mode;
2313      rtx insn;
2314 {
2315   enum rtx_code code = GET_CODE (x);
2316   struct elim_table *ep;
2317   int regno;
2318   rtx new;
2319   int i, j;
2320   const char *fmt;
2321   int copied = 0;
2322 
2323   if (! current_function_decl)
2324     return x;
2325 
2326   switch (code)
2327     {
2328     case CONST_INT:
2329     case CONST_DOUBLE:
2330     case CONST_VECTOR:
2331     case CONST:
2332     case SYMBOL_REF:
2333     case CODE_LABEL:
2334     case PC:
2335     case CC0:
2336     case ASM_INPUT:
2337     case ADDR_VEC:
2338     case ADDR_DIFF_VEC:
2339     case RETURN:
2340       return x;
2341 
2342     case ADDRESSOF:
2343       /* This is only for the benefit of the debugging backends, which call
2344 	 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2345 	 removed after CSE.  */
2346       new = eliminate_regs (XEXP (x, 0), 0, insn);
2347       if (GET_CODE (new) == MEM)
2348 	return XEXP (new, 0);
2349       return x;
2350 
2351     case REG:
2352       regno = REGNO (x);
2353 
2354       /* First handle the case where we encounter a bare register that
2355 	 is eliminable.  Replace it with a PLUS.  */
2356       if (regno < FIRST_PSEUDO_REGISTER)
2357 	{
2358 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2359 	       ep++)
2360 	    if (ep->from_rtx == x && ep->can_eliminate)
2361 	      return plus_constant (ep->to_rtx, ep->previous_offset);
2362 
2363 	}
2364       else if (reg_renumber && reg_renumber[regno] < 0
2365 	       && reg_equiv_constant && reg_equiv_constant[regno]
2366 	       && ! CONSTANT_P (reg_equiv_constant[regno]))
2367 	return eliminate_regs (copy_rtx (reg_equiv_constant[regno]),
2368 			       mem_mode, insn);
2369       return x;
2370 
2371     /* You might think handling MINUS in a manner similar to PLUS is a
2372        good idea.  It is not.  It has been tried multiple times and every
2373        time the change has had to have been reverted.
2374 
2375        Other parts of reload know a PLUS is special (gen_reload for example)
2376        and require special code to handle code a reloaded PLUS operand.
2377 
2378        Also consider backends where the flags register is clobbered by a
2379        MINUS, but we can emit a PLUS that does not clobber flags (ia32,
2380        lea instruction comes to mind).  If we try to reload a MINUS, we
2381        may kill the flags register that was holding a useful value.
2382 
2383        So, please before trying to handle MINUS, consider reload as a
2384        whole instead of this little section as well as the backend issues.  */
2385     case PLUS:
2386       /* If this is the sum of an eliminable register and a constant, rework
2387 	 the sum.  */
2388       if (GET_CODE (XEXP (x, 0)) == REG
2389 	  && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2390 	  && CONSTANT_P (XEXP (x, 1)))
2391 	{
2392 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2393 	       ep++)
2394 	    if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2395 	      {
2396 		/* The only time we want to replace a PLUS with a REG (this
2397 		   occurs when the constant operand of the PLUS is the negative
2398 		   of the offset) is when we are inside a MEM.  We won't want
2399 		   to do so at other times because that would change the
2400 		   structure of the insn in a way that reload can't handle.
2401 		   We special-case the commonest situation in
2402 		   eliminate_regs_in_insn, so just replace a PLUS with a
2403 		   PLUS here, unless inside a MEM.  */
2404 		if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2405 		    && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2406 		  return ep->to_rtx;
2407 		else
2408 		  return gen_rtx_PLUS (Pmode, ep->to_rtx,
2409 				       plus_constant (XEXP (x, 1),
2410 						      ep->previous_offset));
2411 	      }
2412 
2413 	  /* If the register is not eliminable, we are done since the other
2414 	     operand is a constant.  */
2415 	  return x;
2416 	}
2417 
2418       /* If this is part of an address, we want to bring any constant to the
2419 	 outermost PLUS.  We will do this by doing register replacement in
2420 	 our operands and seeing if a constant shows up in one of them.
2421 
2422 	 Note that there is no risk of modifying the structure of the insn,
2423 	 since we only get called for its operands, thus we are either
2424 	 modifying the address inside a MEM, or something like an address
2425 	 operand of a load-address insn.  */
2426 
2427       {
2428 	rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2429 	rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2430 
2431 	if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2432 	  {
2433 	    /* If one side is a PLUS and the other side is a pseudo that
2434 	       didn't get a hard register but has a reg_equiv_constant,
2435 	       we must replace the constant here since it may no longer
2436 	       be in the position of any operand.  */
2437 	    if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2438 		&& REGNO (new1) >= FIRST_PSEUDO_REGISTER
2439 		&& reg_renumber[REGNO (new1)] < 0
2440 		&& reg_equiv_constant != 0
2441 		&& reg_equiv_constant[REGNO (new1)] != 0)
2442 	      new1 = reg_equiv_constant[REGNO (new1)];
2443 	    else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2444 		     && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2445 		     && reg_renumber[REGNO (new0)] < 0
2446 		     && reg_equiv_constant[REGNO (new0)] != 0)
2447 	      new0 = reg_equiv_constant[REGNO (new0)];
2448 
2449 	    new = form_sum (new0, new1);
2450 
2451 	    /* As above, if we are not inside a MEM we do not want to
2452 	       turn a PLUS into something else.  We might try to do so here
2453 	       for an addition of 0 if we aren't optimizing.  */
2454 	    if (! mem_mode && GET_CODE (new) != PLUS)
2455 	      return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
2456 	    else
2457 	      return new;
2458 	  }
2459       }
2460       return x;
2461 
2462     case MULT:
2463       /* If this is the product of an eliminable register and a
2464 	 constant, apply the distribute law and move the constant out
2465 	 so that we have (plus (mult ..) ..).  This is needed in order
2466 	 to keep load-address insns valid.   This case is pathological.
2467 	 We ignore the possibility of overflow here.  */
2468       if (GET_CODE (XEXP (x, 0)) == REG
2469 	  && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2470 	  && GET_CODE (XEXP (x, 1)) == CONST_INT)
2471 	for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2472 	     ep++)
2473 	  if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2474 	    {
2475 	      if (! mem_mode
2476 		  /* Refs inside notes don't count for this purpose.  */
2477 		  && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2478 				      || GET_CODE (insn) == INSN_LIST)))
2479 		ep->ref_outside_mem = 1;
2480 
2481 	      return
2482 		plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2483 			       ep->previous_offset * INTVAL (XEXP (x, 1)));
2484 	    }
2485 
2486       /* ... fall through ...  */
2487 
2488     case CALL:
2489     case COMPARE:
2490     /* See comments before PLUS about handling MINUS.  */
2491     case MINUS:
2492     case DIV:      case UDIV:
2493     case MOD:      case UMOD:
2494     case AND:      case IOR:      case XOR:
2495     case ROTATERT: case ROTATE:
2496     case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2497     case NE:       case EQ:
2498     case GE:       case GT:       case GEU:    case GTU:
2499     case LE:       case LT:       case LEU:    case LTU:
2500       {
2501 	rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2502 	rtx new1
2503 	  = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2504 
2505 	if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2506 	  return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2507       }
2508       return x;
2509 
2510     case EXPR_LIST:
2511       /* If we have something in XEXP (x, 0), the usual case, eliminate it.  */
2512       if (XEXP (x, 0))
2513 	{
2514 	  new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2515 	  if (new != XEXP (x, 0))
2516 	    {
2517 	      /* If this is a REG_DEAD note, it is not valid anymore.
2518 		 Using the eliminated version could result in creating a
2519 		 REG_DEAD note for the stack or frame pointer.  */
2520 	      if (GET_MODE (x) == REG_DEAD)
2521 		return (XEXP (x, 1)
2522 			? eliminate_regs (XEXP (x, 1), mem_mode, insn)
2523 			: NULL_RTX);
2524 
2525 	      x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
2526 	    }
2527 	}
2528 
2529       /* ... fall through ...  */
2530 
2531     case INSN_LIST:
2532       /* Now do eliminations in the rest of the chain.  If this was
2533 	 an EXPR_LIST, this might result in allocating more memory than is
2534 	 strictly needed, but it simplifies the code.  */
2535       if (XEXP (x, 1))
2536 	{
2537 	  new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2538 	  if (new != XEXP (x, 1))
2539 	    return
2540 	      gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2541 	}
2542       return x;
2543 
2544     case PRE_INC:
2545     case POST_INC:
2546     case PRE_DEC:
2547     case POST_DEC:
2548     case STRICT_LOW_PART:
2549     case NEG:          case NOT:
2550     case SIGN_EXTEND:  case ZERO_EXTEND:
2551     case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2552     case FLOAT:        case FIX:
2553     case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2554     case ABS:
2555     case SQRT:
2556     case FFS:
2557       new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2558       if (new != XEXP (x, 0))
2559 	return gen_rtx_fmt_e (code, GET_MODE (x), new);
2560       return x;
2561 
2562     case SUBREG:
2563       /* Similar to above processing, but preserve SUBREG_BYTE.
2564 	 Convert (subreg (mem)) to (mem) if not paradoxical.
2565 	 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2566 	 pseudo didn't get a hard reg, we must replace this with the
2567 	 eliminated version of the memory location because push_reloads
2568 	 may do the replacement in certain circumstances.  */
2569       if (GET_CODE (SUBREG_REG (x)) == REG
2570 	  && (GET_MODE_SIZE (GET_MODE (x))
2571 	      <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2572 	  && reg_equiv_memory_loc != 0
2573 	  && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2574 	{
2575 	  new = SUBREG_REG (x);
2576 	}
2577       else
2578 	new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2579 
2580       if (new != SUBREG_REG (x))
2581 	{
2582 	  int x_size = GET_MODE_SIZE (GET_MODE (x));
2583 	  int new_size = GET_MODE_SIZE (GET_MODE (new));
2584 
2585 	  if (GET_CODE (new) == MEM
2586 	      && ((x_size < new_size
2587 #ifdef WORD_REGISTER_OPERATIONS
2588 		   /* On these machines, combine can create rtl of the form
2589 		      (set (subreg:m1 (reg:m2 R) 0) ...)
2590 		      where m1 < m2, and expects something interesting to
2591 		      happen to the entire word.  Moreover, it will use the
2592 		      (reg:m2 R) later, expecting all bits to be preserved.
2593 		      So if the number of words is the same, preserve the
2594 		      subreg so that push_reloads can see it.  */
2595 		   && ! ((x_size - 1) / UNITS_PER_WORD
2596 			 == (new_size -1 ) / UNITS_PER_WORD)
2597 #endif
2598 		   )
2599 		  || x_size == new_size)
2600 	      )
2601 	    return adjust_address_nv (new, GET_MODE (x), SUBREG_BYTE (x));
2602 	  else
2603 	    return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_BYTE (x));
2604 	}
2605 
2606       return x;
2607 
2608     case MEM:
2609       /* This is only for the benefit of the debugging backends, which call
2610 	 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2611 	 removed after CSE.  */
2612       if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
2613 	return eliminate_regs (XEXP (XEXP (x, 0), 0), 0, insn);
2614 
2615       /* Our only special processing is to pass the mode of the MEM to our
2616 	 recursive call and copy the flags.  While we are here, handle this
2617 	 case more efficiently.  */
2618       return
2619 	replace_equiv_address_nv (x,
2620 				  eliminate_regs (XEXP (x, 0),
2621 						  GET_MODE (x), insn));
2622 
2623     case USE:
2624       /* Handle insn_list USE that a call to a pure function may generate.  */
2625       new = eliminate_regs (XEXP (x, 0), 0, insn);
2626       if (new != XEXP (x, 0))
2627 	return gen_rtx_USE (GET_MODE (x), new);
2628       return x;
2629 
2630     case CLOBBER:
2631     case ASM_OPERANDS:
2632     case SET:
2633       abort ();
2634 
2635     default:
2636       break;
2637     }
2638 
2639   /* Process each of our operands recursively.  If any have changed, make a
2640      copy of the rtx.  */
2641   fmt = GET_RTX_FORMAT (code);
2642   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2643     {
2644       if (*fmt == 'e')
2645 	{
2646 	  new = eliminate_regs (XEXP (x, i), mem_mode, insn);
2647 	  if (new != XEXP (x, i) && ! copied)
2648 	    {
2649 	      rtx new_x = rtx_alloc (code);
2650 	      memcpy (new_x, x,
2651 		      (sizeof (*new_x) - sizeof (new_x->fld)
2652 		       + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
2653 	      x = new_x;
2654 	      copied = 1;
2655 	    }
2656 	  XEXP (x, i) = new;
2657 	}
2658       else if (*fmt == 'E')
2659 	{
2660 	  int copied_vec = 0;
2661 	  for (j = 0; j < XVECLEN (x, i); j++)
2662 	    {
2663 	      new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
2664 	      if (new != XVECEXP (x, i, j) && ! copied_vec)
2665 		{
2666 		  rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2667 					     XVEC (x, i)->elem);
2668 		  if (! copied)
2669 		    {
2670 		      rtx new_x = rtx_alloc (code);
2671 		      memcpy (new_x, x,
2672 			      (sizeof (*new_x) - sizeof (new_x->fld)
2673 			       + (sizeof (new_x->fld[0])
2674 				  * GET_RTX_LENGTH (code))));
2675 		      x = new_x;
2676 		      copied = 1;
2677 		    }
2678 		  XVEC (x, i) = new_v;
2679 		  copied_vec = 1;
2680 		}
2681 	      XVECEXP (x, i, j) = new;
2682 	    }
2683 	}
2684     }
2685 
2686   return x;
2687 }
2688 
2689 /* Scan rtx X for modifications of elimination target registers.  Update
2690    the table of eliminables to reflect the changed state.  MEM_MODE is
2691    the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM.  */
2692 
2693 static void
elimination_effects(x,mem_mode)2694 elimination_effects (x, mem_mode)
2695      rtx x;
2696      enum machine_mode mem_mode;
2697 
2698 {
2699   enum rtx_code code = GET_CODE (x);
2700   struct elim_table *ep;
2701   int regno;
2702   int i, j;
2703   const char *fmt;
2704 
2705   switch (code)
2706     {
2707     case CONST_INT:
2708     case CONST_DOUBLE:
2709     case CONST_VECTOR:
2710     case CONST:
2711     case SYMBOL_REF:
2712     case CODE_LABEL:
2713     case PC:
2714     case CC0:
2715     case ASM_INPUT:
2716     case ADDR_VEC:
2717     case ADDR_DIFF_VEC:
2718     case RETURN:
2719       return;
2720 
2721     case ADDRESSOF:
2722       abort ();
2723 
2724     case REG:
2725       regno = REGNO (x);
2726 
2727       /* First handle the case where we encounter a bare register that
2728 	 is eliminable.  Replace it with a PLUS.  */
2729       if (regno < FIRST_PSEUDO_REGISTER)
2730 	{
2731 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2732 	       ep++)
2733 	    if (ep->from_rtx == x && ep->can_eliminate)
2734 	      {
2735 		if (! mem_mode)
2736 		  ep->ref_outside_mem = 1;
2737 		return;
2738 	      }
2739 
2740 	}
2741       else if (reg_renumber[regno] < 0 && reg_equiv_constant
2742 	       && reg_equiv_constant[regno]
2743 	       && ! function_invariant_p (reg_equiv_constant[regno]))
2744 	elimination_effects (reg_equiv_constant[regno], mem_mode);
2745       return;
2746 
2747     case PRE_INC:
2748     case POST_INC:
2749     case PRE_DEC:
2750     case POST_DEC:
2751     case POST_MODIFY:
2752     case PRE_MODIFY:
2753       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2754 	if (ep->to_rtx == XEXP (x, 0))
2755 	  {
2756 	    int size = GET_MODE_SIZE (mem_mode);
2757 
2758 	    /* If more bytes than MEM_MODE are pushed, account for them.  */
2759 #ifdef PUSH_ROUNDING
2760 	    if (ep->to_rtx == stack_pointer_rtx)
2761 	      size = PUSH_ROUNDING (size);
2762 #endif
2763 	    if (code == PRE_DEC || code == POST_DEC)
2764 	      ep->offset += size;
2765 	    else if (code == PRE_INC || code == POST_INC)
2766 	      ep->offset -= size;
2767 	    else if ((code == PRE_MODIFY || code == POST_MODIFY)
2768 		     && GET_CODE (XEXP (x, 1)) == PLUS
2769 		     && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
2770 		     && CONSTANT_P (XEXP (XEXP (x, 1), 1)))
2771 	      ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
2772 	  }
2773 
2774       /* These two aren't unary operators.  */
2775       if (code == POST_MODIFY || code == PRE_MODIFY)
2776 	break;
2777 
2778       /* Fall through to generic unary operation case.  */
2779     case STRICT_LOW_PART:
2780     case NEG:          case NOT:
2781     case SIGN_EXTEND:  case ZERO_EXTEND:
2782     case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2783     case FLOAT:        case FIX:
2784     case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2785     case ABS:
2786     case SQRT:
2787     case FFS:
2788       elimination_effects (XEXP (x, 0), mem_mode);
2789       return;
2790 
2791     case SUBREG:
2792       if (GET_CODE (SUBREG_REG (x)) == REG
2793 	  && (GET_MODE_SIZE (GET_MODE (x))
2794 	      <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2795 	  && reg_equiv_memory_loc != 0
2796 	  && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2797 	return;
2798 
2799       elimination_effects (SUBREG_REG (x), mem_mode);
2800       return;
2801 
2802     case USE:
2803       /* If using a register that is the source of an eliminate we still
2804 	 think can be performed, note it cannot be performed since we don't
2805 	 know how this register is used.  */
2806       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2807 	if (ep->from_rtx == XEXP (x, 0))
2808 	  ep->can_eliminate = 0;
2809 
2810       elimination_effects (XEXP (x, 0), mem_mode);
2811       return;
2812 
2813     case CLOBBER:
2814       /* If clobbering a register that is the replacement register for an
2815 	 elimination we still think can be performed, note that it cannot
2816 	 be performed.  Otherwise, we need not be concerned about it.  */
2817       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2818 	if (ep->to_rtx == XEXP (x, 0))
2819 	  ep->can_eliminate = 0;
2820 
2821       elimination_effects (XEXP (x, 0), mem_mode);
2822       return;
2823 
2824     case SET:
2825       /* Check for setting a register that we know about.  */
2826       if (GET_CODE (SET_DEST (x)) == REG)
2827 	{
2828 	  /* See if this is setting the replacement register for an
2829 	     elimination.
2830 
2831 	     If DEST is the hard frame pointer, we do nothing because we
2832 	     assume that all assignments to the frame pointer are for
2833 	     non-local gotos and are being done at a time when they are valid
2834 	     and do not disturb anything else.  Some machines want to
2835 	     eliminate a fake argument pointer (or even a fake frame pointer)
2836 	     with either the real frame or the stack pointer.  Assignments to
2837 	     the hard frame pointer must not prevent this elimination.  */
2838 
2839 	  for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2840 	       ep++)
2841 	    if (ep->to_rtx == SET_DEST (x)
2842 		&& SET_DEST (x) != hard_frame_pointer_rtx)
2843 	      {
2844 		/* If it is being incremented, adjust the offset.  Otherwise,
2845 		   this elimination can't be done.  */
2846 		rtx src = SET_SRC (x);
2847 
2848 		if (GET_CODE (src) == PLUS
2849 		    && XEXP (src, 0) == SET_DEST (x)
2850 		    && GET_CODE (XEXP (src, 1)) == CONST_INT)
2851 		  ep->offset -= INTVAL (XEXP (src, 1));
2852 		else
2853 		  ep->can_eliminate = 0;
2854 	      }
2855 	}
2856 
2857       elimination_effects (SET_DEST (x), 0);
2858       elimination_effects (SET_SRC (x), 0);
2859       return;
2860 
2861     case MEM:
2862       if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
2863 	abort ();
2864 
2865       /* Our only special processing is to pass the mode of the MEM to our
2866 	 recursive call.  */
2867       elimination_effects (XEXP (x, 0), GET_MODE (x));
2868       return;
2869 
2870     default:
2871       break;
2872     }
2873 
2874   fmt = GET_RTX_FORMAT (code);
2875   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2876     {
2877       if (*fmt == 'e')
2878 	elimination_effects (XEXP (x, i), mem_mode);
2879       else if (*fmt == 'E')
2880 	for (j = 0; j < XVECLEN (x, i); j++)
2881 	  elimination_effects (XVECEXP (x, i, j), mem_mode);
2882     }
2883 }
2884 
2885 /* Descend through rtx X and verify that no references to eliminable registers
2886    remain.  If any do remain, mark the involved register as not
2887    eliminable.  */
2888 
2889 static void
check_eliminable_occurrences(x)2890 check_eliminable_occurrences (x)
2891      rtx x;
2892 {
2893   const char *fmt;
2894   int i;
2895   enum rtx_code code;
2896 
2897   if (x == 0)
2898     return;
2899 
2900   code = GET_CODE (x);
2901 
2902   if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2903     {
2904       struct elim_table *ep;
2905 
2906       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2907 	if (ep->from_rtx == x && ep->can_eliminate)
2908 	  ep->can_eliminate = 0;
2909       return;
2910     }
2911 
2912   fmt = GET_RTX_FORMAT (code);
2913   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2914     {
2915       if (*fmt == 'e')
2916 	check_eliminable_occurrences (XEXP (x, i));
2917       else if (*fmt == 'E')
2918 	{
2919 	  int j;
2920 	  for (j = 0; j < XVECLEN (x, i); j++)
2921 	    check_eliminable_occurrences (XVECEXP (x, i, j));
2922 	}
2923     }
2924 }
2925 
2926 /* Scan INSN and eliminate all eliminable registers in it.
2927 
2928    If REPLACE is nonzero, do the replacement destructively.  Also
2929    delete the insn as dead it if it is setting an eliminable register.
2930 
2931    If REPLACE is zero, do all our allocations in reload_obstack.
2932 
2933    If no eliminations were done and this insn doesn't require any elimination
2934    processing (these are not identical conditions: it might be updating sp,
2935    but not referencing fp; this needs to be seen during reload_as_needed so
2936    that the offset between fp and sp can be taken into consideration), zero
2937    is returned.  Otherwise, 1 is returned.  */
2938 
2939 static int
eliminate_regs_in_insn(insn,replace)2940 eliminate_regs_in_insn (insn, replace)
2941      rtx insn;
2942      int replace;
2943 {
2944   int icode = recog_memoized (insn);
2945   rtx old_body = PATTERN (insn);
2946   int insn_is_asm = asm_noperands (old_body) >= 0;
2947   rtx old_set = single_set (insn);
2948   rtx new_body;
2949   int val = 0;
2950   int i, any_changes;
2951   rtx substed_operand[MAX_RECOG_OPERANDS];
2952   rtx orig_operand[MAX_RECOG_OPERANDS];
2953   struct elim_table *ep;
2954 
2955   if (! insn_is_asm && icode < 0)
2956     {
2957       if (GET_CODE (PATTERN (insn)) == USE
2958 	  || GET_CODE (PATTERN (insn)) == CLOBBER
2959 	  || GET_CODE (PATTERN (insn)) == ADDR_VEC
2960 	  || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
2961 	  || GET_CODE (PATTERN (insn)) == ASM_INPUT)
2962 	return 0;
2963       abort ();
2964     }
2965 
2966   if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
2967       && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
2968     {
2969       /* Check for setting an eliminable register.  */
2970       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2971 	if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
2972 	  {
2973 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2974 	    /* If this is setting the frame pointer register to the
2975 	       hardware frame pointer register and this is an elimination
2976 	       that will be done (tested above), this insn is really
2977 	       adjusting the frame pointer downward to compensate for
2978 	       the adjustment done before a nonlocal goto.  */
2979 	    if (ep->from == FRAME_POINTER_REGNUM
2980 		&& ep->to == HARD_FRAME_POINTER_REGNUM)
2981 	      {
2982 		rtx base = SET_SRC (old_set);
2983 		rtx base_insn = insn;
2984 		int offset = 0;
2985 
2986 		while (base != ep->to_rtx)
2987 		  {
2988 		    rtx prev_insn, prev_set;
2989 
2990 		    if (GET_CODE (base) == PLUS
2991 		        && GET_CODE (XEXP (base, 1)) == CONST_INT)
2992 		      {
2993 		        offset += INTVAL (XEXP (base, 1));
2994 		        base = XEXP (base, 0);
2995 		      }
2996 		    else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
2997 			     && (prev_set = single_set (prev_insn)) != 0
2998 			     && rtx_equal_p (SET_DEST (prev_set), base))
2999 		      {
3000 		        base = SET_SRC (prev_set);
3001 		        base_insn = prev_insn;
3002 		      }
3003 		    else
3004 		      break;
3005 		  }
3006 
3007 		if (base == ep->to_rtx)
3008 		  {
3009 		    rtx src
3010 		      = plus_constant (ep->to_rtx, offset - ep->offset);
3011 
3012 		    new_body = old_body;
3013 		    if (! replace)
3014 		      {
3015 			new_body = copy_insn (old_body);
3016 			if (REG_NOTES (insn))
3017 			  REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3018 		      }
3019 		    PATTERN (insn) = new_body;
3020 		    old_set = single_set (insn);
3021 
3022 		    /* First see if this insn remains valid when we
3023 		       make the change.  If not, keep the INSN_CODE
3024 		       the same and let reload fit it up.  */
3025 		    validate_change (insn, &SET_SRC (old_set), src, 1);
3026 		    validate_change (insn, &SET_DEST (old_set),
3027 				     ep->to_rtx, 1);
3028 		    if (! apply_change_group ())
3029 		      {
3030 			SET_SRC (old_set) = src;
3031 			SET_DEST (old_set) = ep->to_rtx;
3032 		      }
3033 
3034 		    val = 1;
3035 		    goto done;
3036 		  }
3037 	      }
3038 #endif
3039 
3040 	    /* In this case this insn isn't serving a useful purpose.  We
3041 	       will delete it in reload_as_needed once we know that this
3042 	       elimination is, in fact, being done.
3043 
3044 	       If REPLACE isn't set, we can't delete this insn, but needn't
3045 	       process it since it won't be used unless something changes.  */
3046 	    if (replace)
3047 	      {
3048 		delete_dead_insn (insn);
3049 		return 1;
3050 	      }
3051 	    val = 1;
3052 	    goto done;
3053 	  }
3054     }
3055 
3056   /* We allow one special case which happens to work on all machines we
3057      currently support: a single set with the source being a PLUS of an
3058      eliminable register and a constant.  */
3059   if (old_set
3060       && GET_CODE (SET_DEST (old_set)) == REG
3061       && GET_CODE (SET_SRC (old_set)) == PLUS
3062       && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3063       && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT
3064       && REGNO (XEXP (SET_SRC (old_set), 0)) < FIRST_PSEUDO_REGISTER)
3065     {
3066       rtx reg = XEXP (SET_SRC (old_set), 0);
3067       int offset = INTVAL (XEXP (SET_SRC (old_set), 1));
3068 
3069       for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3070 	if (ep->from_rtx == reg && ep->can_eliminate)
3071 	  {
3072 	    offset += ep->offset;
3073 
3074 	    if (offset == 0)
3075 	      {
3076 		int num_clobbers;
3077 		/* We assume here that if we need a PARALLEL with
3078 		   CLOBBERs for this assignment, we can do with the
3079 		   MATCH_SCRATCHes that add_clobbers allocates.
3080 		   There's not much we can do if that doesn't work.  */
3081 		PATTERN (insn) = gen_rtx_SET (VOIDmode,
3082 					      SET_DEST (old_set),
3083 					      ep->to_rtx);
3084 		num_clobbers = 0;
3085 		INSN_CODE (insn) = recog (PATTERN (insn), insn, &num_clobbers);
3086 		if (num_clobbers)
3087 		  {
3088 		    rtvec vec = rtvec_alloc (num_clobbers + 1);
3089 
3090 		    vec->elem[0] = PATTERN (insn);
3091 		    PATTERN (insn) = gen_rtx_PARALLEL (VOIDmode, vec);
3092 		    add_clobbers (PATTERN (insn), INSN_CODE (insn));
3093 		  }
3094 		if (INSN_CODE (insn) < 0)
3095 		  abort ();
3096 	      }
3097 	    else
3098 	      {
3099 		new_body = old_body;
3100 		if (! replace)
3101 		  {
3102 		    new_body = copy_insn (old_body);
3103 		    if (REG_NOTES (insn))
3104 		      REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3105 		  }
3106 		PATTERN (insn) = new_body;
3107 		old_set = single_set (insn);
3108 
3109 		XEXP (SET_SRC (old_set), 0) = ep->to_rtx;
3110 		XEXP (SET_SRC (old_set), 1) = GEN_INT (offset);
3111 	      }
3112 	    val = 1;
3113 	    /* This can't have an effect on elimination offsets, so skip right
3114 	       to the end.  */
3115 	    goto done;
3116 	  }
3117     }
3118 
3119   /* Determine the effects of this insn on elimination offsets.  */
3120   elimination_effects (old_body, 0);
3121 
3122   /* Eliminate all eliminable registers occurring in operands that
3123      can be handled by reload.  */
3124   extract_insn (insn);
3125   any_changes = 0;
3126   for (i = 0; i < recog_data.n_operands; i++)
3127     {
3128       orig_operand[i] = recog_data.operand[i];
3129       substed_operand[i] = recog_data.operand[i];
3130 
3131       /* For an asm statement, every operand is eliminable.  */
3132       if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3133 	{
3134 	  /* Check for setting a register that we know about.  */
3135 	  if (recog_data.operand_type[i] != OP_IN
3136 	      && GET_CODE (orig_operand[i]) == REG)
3137 	    {
3138 	      /* If we are assigning to a register that can be eliminated, it
3139 		 must be as part of a PARALLEL, since the code above handles
3140 		 single SETs.  We must indicate that we can no longer
3141 		 eliminate this reg.  */
3142 	      for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3143 		   ep++)
3144 		if (ep->from_rtx == orig_operand[i] && ep->can_eliminate)
3145 		  ep->can_eliminate = 0;
3146 	    }
3147 
3148 	  substed_operand[i] = eliminate_regs (recog_data.operand[i], 0,
3149 					       replace ? insn : NULL_RTX);
3150 	  if (substed_operand[i] != orig_operand[i])
3151 	    val = any_changes = 1;
3152 	  /* Terminate the search in check_eliminable_occurrences at
3153 	     this point.  */
3154 	  *recog_data.operand_loc[i] = 0;
3155 
3156 	/* If an output operand changed from a REG to a MEM and INSN is an
3157 	   insn, write a CLOBBER insn.  */
3158 	  if (recog_data.operand_type[i] != OP_IN
3159 	      && GET_CODE (orig_operand[i]) == REG
3160 	      && GET_CODE (substed_operand[i]) == MEM
3161 	      && replace)
3162 	    emit_insn_after (gen_rtx_CLOBBER (VOIDmode, orig_operand[i]),
3163 			     insn);
3164 	}
3165     }
3166 
3167   for (i = 0; i < recog_data.n_dups; i++)
3168     *recog_data.dup_loc[i]
3169       = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3170 
3171   /* If any eliminable remain, they aren't eliminable anymore.  */
3172   check_eliminable_occurrences (old_body);
3173 
3174   /* Substitute the operands; the new values are in the substed_operand
3175      array.  */
3176   for (i = 0; i < recog_data.n_operands; i++)
3177     *recog_data.operand_loc[i] = substed_operand[i];
3178   for (i = 0; i < recog_data.n_dups; i++)
3179     *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3180 
3181   /* If we are replacing a body that was a (set X (plus Y Z)), try to
3182      re-recognize the insn.  We do this in case we had a simple addition
3183      but now can do this as a load-address.  This saves an insn in this
3184      common case.
3185      If re-recognition fails, the old insn code number will still be used,
3186      and some register operands may have changed into PLUS expressions.
3187      These will be handled by find_reloads by loading them into a register
3188      again.  */
3189 
3190   if (val)
3191     {
3192       /* If we aren't replacing things permanently and we changed something,
3193 	 make another copy to ensure that all the RTL is new.  Otherwise
3194 	 things can go wrong if find_reload swaps commutative operands
3195 	 and one is inside RTL that has been copied while the other is not.  */
3196       new_body = old_body;
3197       if (! replace)
3198 	{
3199 	  new_body = copy_insn (old_body);
3200 	  if (REG_NOTES (insn))
3201 	    REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3202 	}
3203       PATTERN (insn) = new_body;
3204 
3205       /* If we had a move insn but now we don't, rerecognize it.  This will
3206 	 cause spurious re-recognition if the old move had a PARALLEL since
3207 	 the new one still will, but we can't call single_set without
3208 	 having put NEW_BODY into the insn and the re-recognition won't
3209 	 hurt in this rare case.  */
3210       /* ??? Why this huge if statement - why don't we just rerecognize the
3211 	 thing always?  */
3212       if (! insn_is_asm
3213 	  && old_set != 0
3214 	  && ((GET_CODE (SET_SRC (old_set)) == REG
3215 	       && (GET_CODE (new_body) != SET
3216 		   || GET_CODE (SET_SRC (new_body)) != REG))
3217 	      /* If this was a load from or store to memory, compare
3218 		 the MEM in recog_data.operand to the one in the insn.
3219 		 If they are not equal, then rerecognize the insn.  */
3220 	      || (old_set != 0
3221 		  && ((GET_CODE (SET_SRC (old_set)) == MEM
3222 		       && SET_SRC (old_set) != recog_data.operand[1])
3223 		      || (GET_CODE (SET_DEST (old_set)) == MEM
3224 			  && SET_DEST (old_set) != recog_data.operand[0])))
3225 	      /* If this was an add insn before, rerecognize.  */
3226 	      || GET_CODE (SET_SRC (old_set)) == PLUS))
3227 	{
3228 	  int new_icode = recog (PATTERN (insn), insn, 0);
3229 	  if (new_icode < 0)
3230 	    INSN_CODE (insn) = icode;
3231 	}
3232     }
3233 
3234   /* Restore the old body.  If there were any changes to it, we made a copy
3235      of it while the changes were still in place, so we'll correctly return
3236      a modified insn below.  */
3237   if (! replace)
3238     {
3239       /* Restore the old body.  */
3240       for (i = 0; i < recog_data.n_operands; i++)
3241 	*recog_data.operand_loc[i] = orig_operand[i];
3242       for (i = 0; i < recog_data.n_dups; i++)
3243 	*recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3244     }
3245 
3246   /* Update all elimination pairs to reflect the status after the current
3247      insn.  The changes we make were determined by the earlier call to
3248      elimination_effects.
3249 
3250      We also detect cases where register elimination cannot be done,
3251      namely, if a register would be both changed and referenced outside a MEM
3252      in the resulting insn since such an insn is often undefined and, even if
3253      not, we cannot know what meaning will be given to it.  Note that it is
3254      valid to have a register used in an address in an insn that changes it
3255      (presumably with a pre- or post-increment or decrement).
3256 
3257      If anything changes, return nonzero.  */
3258 
3259   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3260     {
3261       if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3262 	ep->can_eliminate = 0;
3263 
3264       ep->ref_outside_mem = 0;
3265 
3266       if (ep->previous_offset != ep->offset)
3267 	val = 1;
3268     }
3269 
3270  done:
3271   /* If we changed something, perform elimination in REG_NOTES.  This is
3272      needed even when REPLACE is zero because a REG_DEAD note might refer
3273      to a register that we eliminate and could cause a different number
3274      of spill registers to be needed in the final reload pass than in
3275      the pre-passes.  */
3276   if (val && REG_NOTES (insn) != 0)
3277     REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3278 
3279   return val;
3280 }
3281 
3282 /* Loop through all elimination pairs.
3283    Recalculate the number not at initial offset.
3284 
3285    Compute the maximum offset (minimum offset if the stack does not
3286    grow downward) for each elimination pair.  */
3287 
3288 static void
update_eliminable_offsets()3289 update_eliminable_offsets ()
3290 {
3291   struct elim_table *ep;
3292 
3293   num_not_at_initial_offset = 0;
3294   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3295     {
3296       ep->previous_offset = ep->offset;
3297       if (ep->can_eliminate && ep->offset != ep->initial_offset)
3298 	num_not_at_initial_offset++;
3299     }
3300 }
3301 
3302 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3303    replacement we currently believe is valid, mark it as not eliminable if X
3304    modifies DEST in any way other than by adding a constant integer to it.
3305 
3306    If DEST is the frame pointer, we do nothing because we assume that
3307    all assignments to the hard frame pointer are nonlocal gotos and are being
3308    done at a time when they are valid and do not disturb anything else.
3309    Some machines want to eliminate a fake argument pointer with either the
3310    frame or stack pointer.  Assignments to the hard frame pointer must not
3311    prevent this elimination.
3312 
3313    Called via note_stores from reload before starting its passes to scan
3314    the insns of the function.  */
3315 
3316 static void
mark_not_eliminable(dest,x,data)3317 mark_not_eliminable (dest, x, data)
3318      rtx dest;
3319      rtx x;
3320      void *data ATTRIBUTE_UNUSED;
3321 {
3322   unsigned int i;
3323 
3324   /* A SUBREG of a hard register here is just changing its mode.  We should
3325      not see a SUBREG of an eliminable hard register, but check just in
3326      case.  */
3327   if (GET_CODE (dest) == SUBREG)
3328     dest = SUBREG_REG (dest);
3329 
3330   if (dest == hard_frame_pointer_rtx)
3331     return;
3332 
3333   for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3334     if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3335 	&& (GET_CODE (x) != SET
3336 	    || GET_CODE (SET_SRC (x)) != PLUS
3337 	    || XEXP (SET_SRC (x), 0) != dest
3338 	    || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3339       {
3340 	reg_eliminate[i].can_eliminate_previous
3341 	  = reg_eliminate[i].can_eliminate = 0;
3342 	num_eliminable--;
3343       }
3344 }
3345 
3346 /* Verify that the initial elimination offsets did not change since the
3347    last call to set_initial_elim_offsets.  This is used to catch cases
3348    where something illegal happened during reload_as_needed that could
3349    cause incorrect code to be generated if we did not check for it.  */
3350 
3351 static void
verify_initial_elim_offsets()3352 verify_initial_elim_offsets ()
3353 {
3354   int t;
3355 
3356 #ifdef ELIMINABLE_REGS
3357   struct elim_table *ep;
3358 
3359   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3360     {
3361       INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3362       if (t != ep->initial_offset)
3363 	abort ();
3364     }
3365 #else
3366   INITIAL_FRAME_POINTER_OFFSET (t);
3367   if (t != reg_eliminate[0].initial_offset)
3368     abort ();
3369 #endif
3370 }
3371 
3372 /* Reset all offsets on eliminable registers to their initial values.  */
3373 
3374 static void
set_initial_elim_offsets()3375 set_initial_elim_offsets ()
3376 {
3377   struct elim_table *ep = reg_eliminate;
3378 
3379 #ifdef ELIMINABLE_REGS
3380   for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3381     {
3382       INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3383       ep->previous_offset = ep->offset = ep->initial_offset;
3384     }
3385 #else
3386   INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3387   ep->previous_offset = ep->offset = ep->initial_offset;
3388 #endif
3389 
3390   num_not_at_initial_offset = 0;
3391 }
3392 
3393 /* Subroutine of set_initial_label_offsets called via for_each_eh_label.  */
3394 
3395 static void set_initial_eh_label_offset PARAMS ((rtx));
3396 static void
set_initial_eh_label_offset(label)3397 set_initial_eh_label_offset (label)
3398      rtx label;
3399 {
3400   set_label_offsets (label, NULL_RTX, 1);
3401 }
3402 
3403 /* Initialize the known label offsets.
3404    Set a known offset for each forced label to be at the initial offset
3405    of each elimination.  We do this because we assume that all
3406    computed jumps occur from a location where each elimination is
3407    at its initial offset.
3408    For all other labels, show that we don't know the offsets.  */
3409 
3410 static void
set_initial_label_offsets()3411 set_initial_label_offsets ()
3412 {
3413   rtx x;
3414   memset (offsets_known_at, 0, num_labels);
3415 
3416   for (x = forced_labels; x; x = XEXP (x, 1))
3417     if (XEXP (x, 0))
3418       set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3419 
3420   for_each_eh_label (set_initial_eh_label_offset);
3421 }
3422 
3423 /* Set all elimination offsets to the known values for the code label given
3424    by INSN.  */
3425 
3426 static void
set_offsets_for_label(insn)3427 set_offsets_for_label (insn)
3428      rtx insn;
3429 {
3430   unsigned int i;
3431   int label_nr = CODE_LABEL_NUMBER (insn);
3432   struct elim_table *ep;
3433 
3434   num_not_at_initial_offset = 0;
3435   for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3436     {
3437       ep->offset = ep->previous_offset
3438 		 = offsets_at[label_nr - first_label_num][i];
3439       if (ep->can_eliminate && ep->offset != ep->initial_offset)
3440 	num_not_at_initial_offset++;
3441     }
3442 }
3443 
3444 /* See if anything that happened changes which eliminations are valid.
3445    For example, on the SPARC, whether or not the frame pointer can
3446    be eliminated can depend on what registers have been used.  We need
3447    not check some conditions again (such as flag_omit_frame_pointer)
3448    since they can't have changed.  */
3449 
3450 static void
update_eliminables(pset)3451 update_eliminables (pset)
3452      HARD_REG_SET *pset;
3453 {
3454   int previous_frame_pointer_needed = frame_pointer_needed;
3455   struct elim_table *ep;
3456 
3457   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3458     if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
3459 #ifdef ELIMINABLE_REGS
3460 	|| ! CAN_ELIMINATE (ep->from, ep->to)
3461 #endif
3462 	)
3463       ep->can_eliminate = 0;
3464 
3465   /* Look for the case where we have discovered that we can't replace
3466      register A with register B and that means that we will now be
3467      trying to replace register A with register C.  This means we can
3468      no longer replace register C with register B and we need to disable
3469      such an elimination, if it exists.  This occurs often with A == ap,
3470      B == sp, and C == fp.  */
3471 
3472   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3473     {
3474       struct elim_table *op;
3475       int new_to = -1;
3476 
3477       if (! ep->can_eliminate && ep->can_eliminate_previous)
3478 	{
3479 	  /* Find the current elimination for ep->from, if there is a
3480 	     new one.  */
3481 	  for (op = reg_eliminate;
3482 	       op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3483 	    if (op->from == ep->from && op->can_eliminate)
3484 	      {
3485 		new_to = op->to;
3486 		break;
3487 	      }
3488 
3489 	  /* See if there is an elimination of NEW_TO -> EP->TO.  If so,
3490 	     disable it.  */
3491 	  for (op = reg_eliminate;
3492 	       op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3493 	    if (op->from == new_to && op->to == ep->to)
3494 	      op->can_eliminate = 0;
3495 	}
3496     }
3497 
3498   /* See if any registers that we thought we could eliminate the previous
3499      time are no longer eliminable.  If so, something has changed and we
3500      must spill the register.  Also, recompute the number of eliminable
3501      registers and see if the frame pointer is needed; it is if there is
3502      no elimination of the frame pointer that we can perform.  */
3503 
3504   frame_pointer_needed = 1;
3505   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3506     {
3507       if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
3508 	  && ep->to != HARD_FRAME_POINTER_REGNUM)
3509 	frame_pointer_needed = 0;
3510 
3511       if (! ep->can_eliminate && ep->can_eliminate_previous)
3512 	{
3513 	  ep->can_eliminate_previous = 0;
3514 	  SET_HARD_REG_BIT (*pset, ep->from);
3515 	  num_eliminable--;
3516 	}
3517     }
3518 
3519   /* If we didn't need a frame pointer last time, but we do now, spill
3520      the hard frame pointer.  */
3521   if (frame_pointer_needed && ! previous_frame_pointer_needed)
3522     SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3523 }
3524 
3525 /* Initialize the table of registers to eliminate.  */
3526 
3527 static void
init_elim_table()3528 init_elim_table ()
3529 {
3530   struct elim_table *ep;
3531 #ifdef ELIMINABLE_REGS
3532   const struct elim_table_1 *ep1;
3533 #endif
3534 
3535   if (!reg_eliminate)
3536     reg_eliminate = (struct elim_table *)
3537       xcalloc (sizeof (struct elim_table), NUM_ELIMINABLE_REGS);
3538 
3539   /* Does this function require a frame pointer?  */
3540 
3541   frame_pointer_needed = (! flag_omit_frame_pointer
3542 #ifdef EXIT_IGNORE_STACK
3543 			  /* ?? If EXIT_IGNORE_STACK is set, we will not save
3544 			     and restore sp for alloca.  So we can't eliminate
3545 			     the frame pointer in that case.  At some point,
3546 			     we should improve this by emitting the
3547 			     sp-adjusting insns for this case.  */
3548 			  || (current_function_calls_alloca
3549 			      && EXIT_IGNORE_STACK)
3550 #endif
3551 			  || FRAME_POINTER_REQUIRED);
3552 
3553   num_eliminable = 0;
3554 
3555 #ifdef ELIMINABLE_REGS
3556   for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3557        ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
3558     {
3559       ep->from = ep1->from;
3560       ep->to = ep1->to;
3561       ep->can_eliminate = ep->can_eliminate_previous
3562 	= (CAN_ELIMINATE (ep->from, ep->to)
3563 	   && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
3564     }
3565 #else
3566   reg_eliminate[0].from = reg_eliminate_1[0].from;
3567   reg_eliminate[0].to = reg_eliminate_1[0].to;
3568   reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3569     = ! frame_pointer_needed;
3570 #endif
3571 
3572   /* Count the number of eliminable registers and build the FROM and TO
3573      REG rtx's.  Note that code in gen_rtx will cause, e.g.,
3574      gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3575      We depend on this.  */
3576   for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3577     {
3578       num_eliminable += ep->can_eliminate;
3579       ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3580       ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3581     }
3582 }
3583 
3584 /* Kick all pseudos out of hard register REGNO.
3585 
3586    If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3587    because we found we can't eliminate some register.  In the case, no pseudos
3588    are allowed to be in the register, even if they are only in a block that
3589    doesn't require spill registers, unlike the case when we are spilling this
3590    hard reg to produce another spill register.
3591 
3592    Return nonzero if any pseudos needed to be kicked out.  */
3593 
3594 static void
spill_hard_reg(regno,cant_eliminate)3595 spill_hard_reg (regno, cant_eliminate)
3596      unsigned int regno;
3597      int cant_eliminate;
3598 {
3599   int i;
3600 
3601   if (cant_eliminate)
3602     {
3603       SET_HARD_REG_BIT (bad_spill_regs_global, regno);
3604       regs_ever_live[regno] = 1;
3605     }
3606 
3607   /* Spill every pseudo reg that was allocated to this reg
3608      or to something that overlaps this reg.  */
3609 
3610   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3611     if (reg_renumber[i] >= 0
3612 	&& (unsigned int) reg_renumber[i] <= regno
3613 	&& ((unsigned int) reg_renumber[i]
3614 	    + HARD_REGNO_NREGS ((unsigned int) reg_renumber[i],
3615 				PSEUDO_REGNO_MODE (i))
3616 	    > regno))
3617       SET_REGNO_REG_SET (&spilled_pseudos, i);
3618 }
3619 
3620 /* I'm getting weird preprocessor errors if I use IOR_HARD_REG_SET
3621    from within EXECUTE_IF_SET_IN_REG_SET.  Hence this awkwardness.  */
3622 
3623 static void
ior_hard_reg_set(set1,set2)3624 ior_hard_reg_set (set1, set2)
3625      HARD_REG_SET *set1, *set2;
3626 {
3627   IOR_HARD_REG_SET (*set1, *set2);
3628 }
3629 
3630 /* After find_reload_regs has been run for all insn that need reloads,
3631    and/or spill_hard_regs was called, this function is used to actually
3632    spill pseudo registers and try to reallocate them.  It also sets up the
3633    spill_regs array for use by choose_reload_regs.  */
3634 
3635 static int
finish_spills(global)3636 finish_spills (global)
3637      int global;
3638 {
3639   struct insn_chain *chain;
3640   int something_changed = 0;
3641   int i;
3642 
3643   /* Build the spill_regs array for the function.  */
3644   /* If there are some registers still to eliminate and one of the spill regs
3645      wasn't ever used before, additional stack space may have to be
3646      allocated to store this register.  Thus, we may have changed the offset
3647      between the stack and frame pointers, so mark that something has changed.
3648 
3649      One might think that we need only set VAL to 1 if this is a call-used
3650      register.  However, the set of registers that must be saved by the
3651      prologue is not identical to the call-used set.  For example, the
3652      register used by the call insn for the return PC is a call-used register,
3653      but must be saved by the prologue.  */
3654 
3655   n_spills = 0;
3656   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3657     if (TEST_HARD_REG_BIT (used_spill_regs, i))
3658       {
3659 	spill_reg_order[i] = n_spills;
3660 	spill_regs[n_spills++] = i;
3661 	if (num_eliminable && ! regs_ever_live[i])
3662 	  something_changed = 1;
3663 	regs_ever_live[i] = 1;
3664       }
3665     else
3666       spill_reg_order[i] = -1;
3667 
3668   EXECUTE_IF_SET_IN_REG_SET
3669     (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i,
3670      {
3671        /* Record the current hard register the pseudo is allocated to in
3672 	  pseudo_previous_regs so we avoid reallocating it to the same
3673 	  hard reg in a later pass.  */
3674        if (reg_renumber[i] < 0)
3675 	 abort ();
3676 
3677        SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
3678        /* Mark it as no longer having a hard register home.  */
3679        reg_renumber[i] = -1;
3680        /* We will need to scan everything again.  */
3681        something_changed = 1;
3682      });
3683 
3684   /* Retry global register allocation if possible.  */
3685   if (global)
3686     {
3687       memset ((char *) pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
3688       /* For every insn that needs reloads, set the registers used as spill
3689 	 regs in pseudo_forbidden_regs for every pseudo live across the
3690 	 insn.  */
3691       for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
3692 	{
3693 	  EXECUTE_IF_SET_IN_REG_SET
3694 	    (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i,
3695 	     {
3696 	       ior_hard_reg_set (pseudo_forbidden_regs + i,
3697 				 &chain->used_spill_regs);
3698 	     });
3699 	  EXECUTE_IF_SET_IN_REG_SET
3700 	    (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i,
3701 	     {
3702 	       ior_hard_reg_set (pseudo_forbidden_regs + i,
3703 				 &chain->used_spill_regs);
3704 	     });
3705 	}
3706 
3707       /* Retry allocating the spilled pseudos.  For each reg, merge the
3708 	 various reg sets that indicate which hard regs can't be used,
3709 	 and call retry_global_alloc.
3710 	 We change spill_pseudos here to only contain pseudos that did not
3711 	 get a new hard register.  */
3712       for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3713 	if (reg_old_renumber[i] != reg_renumber[i])
3714 	  {
3715 	    HARD_REG_SET forbidden;
3716 	    COPY_HARD_REG_SET (forbidden, bad_spill_regs_global);
3717 	    IOR_HARD_REG_SET (forbidden, pseudo_forbidden_regs[i]);
3718 	    IOR_HARD_REG_SET (forbidden, pseudo_previous_regs[i]);
3719 	    retry_global_alloc (i, forbidden);
3720 	    if (reg_renumber[i] >= 0)
3721 	      CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
3722 	  }
3723     }
3724 
3725   /* Fix up the register information in the insn chain.
3726      This involves deleting those of the spilled pseudos which did not get
3727      a new hard register home from the live_{before,after} sets.  */
3728   for (chain = reload_insn_chain; chain; chain = chain->next)
3729     {
3730       HARD_REG_SET used_by_pseudos;
3731       HARD_REG_SET used_by_pseudos2;
3732 
3733       AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
3734       AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
3735 
3736       /* Mark any unallocated hard regs as available for spills.  That
3737 	 makes inheritance work somewhat better.  */
3738       if (chain->need_reload)
3739 	{
3740 	  REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
3741 	  REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
3742 	  IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
3743 
3744 	  /* Save the old value for the sanity test below.  */
3745 	  COPY_HARD_REG_SET (used_by_pseudos2, chain->used_spill_regs);
3746 
3747 	  compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
3748 	  compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
3749 	  COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
3750 	  AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
3751 
3752 	  /* Make sure we only enlarge the set.  */
3753 	  GO_IF_HARD_REG_SUBSET (used_by_pseudos2, chain->used_spill_regs, ok);
3754 	  abort ();
3755 	ok:;
3756 	}
3757     }
3758 
3759   /* Let alter_reg modify the reg rtx's for the modified pseudos.  */
3760   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3761     {
3762       int regno = reg_renumber[i];
3763       if (reg_old_renumber[i] == regno)
3764 	continue;
3765 
3766       alter_reg (i, reg_old_renumber[i]);
3767       reg_old_renumber[i] = regno;
3768       if (rtl_dump_file)
3769 	{
3770 	  if (regno == -1)
3771 	    fprintf (rtl_dump_file, " Register %d now on stack.\n\n", i);
3772 	  else
3773 	    fprintf (rtl_dump_file, " Register %d now in %d.\n\n",
3774 		     i, reg_renumber[i]);
3775 	}
3776     }
3777 
3778   return something_changed;
3779 }
3780 
3781 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3782    Also mark any hard registers used to store user variables as
3783    forbidden from being used for spill registers.  */
3784 
3785 static void
scan_paradoxical_subregs(x)3786 scan_paradoxical_subregs (x)
3787      rtx x;
3788 {
3789   int i;
3790   const char *fmt;
3791   enum rtx_code code = GET_CODE (x);
3792 
3793   switch (code)
3794     {
3795     case REG:
3796 #if 0
3797       if (SMALL_REGISTER_CLASSES && REGNO (x) < FIRST_PSEUDO_REGISTER
3798 	  && REG_USERVAR_P (x))
3799 	SET_HARD_REG_BIT (bad_spill_regs_global, REGNO (x));
3800 #endif
3801       return;
3802 
3803     case CONST_INT:
3804     case CONST:
3805     case SYMBOL_REF:
3806     case LABEL_REF:
3807     case CONST_DOUBLE:
3808     case CONST_VECTOR: /* shouldn't happen, but just in case.  */
3809     case CC0:
3810     case PC:
3811     case USE:
3812     case CLOBBER:
3813       return;
3814 
3815     case SUBREG:
3816       if (GET_CODE (SUBREG_REG (x)) == REG
3817 	  && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3818 	reg_max_ref_width[REGNO (SUBREG_REG (x))]
3819 	  = GET_MODE_SIZE (GET_MODE (x));
3820       return;
3821 
3822     default:
3823       break;
3824     }
3825 
3826   fmt = GET_RTX_FORMAT (code);
3827   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3828     {
3829       if (fmt[i] == 'e')
3830 	scan_paradoxical_subregs (XEXP (x, i));
3831       else if (fmt[i] == 'E')
3832 	{
3833 	  int j;
3834 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3835 	    scan_paradoxical_subregs (XVECEXP (x, i, j));
3836 	}
3837     }
3838 }
3839 
3840 /* Reload pseudo-registers into hard regs around each insn as needed.
3841    Additional register load insns are output before the insn that needs it
3842    and perhaps store insns after insns that modify the reloaded pseudo reg.
3843 
3844    reg_last_reload_reg and reg_reloaded_contents keep track of
3845    which registers are already available in reload registers.
3846    We update these for the reloads that we perform,
3847    as the insns are scanned.  */
3848 
3849 static void
reload_as_needed(live_known)3850 reload_as_needed (live_known)
3851      int live_known;
3852 {
3853   struct insn_chain *chain;
3854 #if defined (AUTO_INC_DEC)
3855   int i;
3856 #endif
3857   rtx x;
3858 
3859   memset ((char *) spill_reg_rtx, 0, sizeof spill_reg_rtx);
3860   memset ((char *) spill_reg_store, 0, sizeof spill_reg_store);
3861   reg_last_reload_reg = (rtx *) xcalloc (max_regno, sizeof (rtx));
3862   reg_has_output_reload = (char *) xmalloc (max_regno);
3863   CLEAR_HARD_REG_SET (reg_reloaded_valid);
3864 
3865   set_initial_elim_offsets ();
3866 
3867   for (chain = reload_insn_chain; chain; chain = chain->next)
3868     {
3869       rtx prev;
3870       rtx insn = chain->insn;
3871       rtx old_next = NEXT_INSN (insn);
3872 
3873       /* If we pass a label, copy the offsets from the label information
3874 	 into the current offsets of each elimination.  */
3875       if (GET_CODE (insn) == CODE_LABEL)
3876 	set_offsets_for_label (insn);
3877 
3878       else if (INSN_P (insn))
3879 	{
3880 	  rtx oldpat = copy_rtx (PATTERN (insn));
3881 
3882 	  /* If this is a USE and CLOBBER of a MEM, ensure that any
3883 	     references to eliminable registers have been removed.  */
3884 
3885 	  if ((GET_CODE (PATTERN (insn)) == USE
3886 	       || GET_CODE (PATTERN (insn)) == CLOBBER)
3887 	      && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3888 	    XEXP (XEXP (PATTERN (insn), 0), 0)
3889 	      = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3890 				GET_MODE (XEXP (PATTERN (insn), 0)),
3891 				NULL_RTX);
3892 
3893 	  /* If we need to do register elimination processing, do so.
3894 	     This might delete the insn, in which case we are done.  */
3895 	  if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
3896 	    {
3897 	      eliminate_regs_in_insn (insn, 1);
3898 	      if (GET_CODE (insn) == NOTE)
3899 		{
3900 		  update_eliminable_offsets ();
3901 		  continue;
3902 		}
3903 	    }
3904 
3905 	  /* If need_elim is nonzero but need_reload is zero, one might think
3906 	     that we could simply set n_reloads to 0.  However, find_reloads
3907 	     could have done some manipulation of the insn (such as swapping
3908 	     commutative operands), and these manipulations are lost during
3909 	     the first pass for every insn that needs register elimination.
3910 	     So the actions of find_reloads must be redone here.  */
3911 
3912 	  if (! chain->need_elim && ! chain->need_reload
3913 	      && ! chain->need_operand_change)
3914 	    n_reloads = 0;
3915 	  /* First find the pseudo regs that must be reloaded for this insn.
3916 	     This info is returned in the tables reload_... (see reload.h).
3917 	     Also modify the body of INSN by substituting RELOAD
3918 	     rtx's for those pseudo regs.  */
3919 	  else
3920 	    {
3921 	      memset (reg_has_output_reload, 0, max_regno);
3922 	      CLEAR_HARD_REG_SET (reg_is_output_reload);
3923 
3924 	      find_reloads (insn, 1, spill_indirect_levels, live_known,
3925 			    spill_reg_order);
3926 	    }
3927 
3928 	  if (n_reloads > 0)
3929 	    {
3930 	      rtx next = NEXT_INSN (insn);
3931 	      rtx p;
3932 
3933 	      prev = PREV_INSN (insn);
3934 
3935 	      /* Now compute which reload regs to reload them into.  Perhaps
3936 		 reusing reload regs from previous insns, or else output
3937 		 load insns to reload them.  Maybe output store insns too.
3938 		 Record the choices of reload reg in reload_reg_rtx.  */
3939 	      choose_reload_regs (chain);
3940 
3941 	      /* Merge any reloads that we didn't combine for fear of
3942 		 increasing the number of spill registers needed but now
3943 		 discover can be safely merged.  */
3944 	      if (SMALL_REGISTER_CLASSES)
3945 		merge_assigned_reloads (insn);
3946 
3947 	      /* Generate the insns to reload operands into or out of
3948 		 their reload regs.  */
3949 	      emit_reload_insns (chain);
3950 
3951 	      /* Substitute the chosen reload regs from reload_reg_rtx
3952 		 into the insn's body (or perhaps into the bodies of other
3953 		 load and store insn that we just made for reloading
3954 		 and that we moved the structure into).  */
3955 	      subst_reloads (insn);
3956 
3957 	      /* If this was an ASM, make sure that all the reload insns
3958 		 we have generated are valid.  If not, give an error
3959 		 and delete them.  */
3960 
3961 	      if (asm_noperands (PATTERN (insn)) >= 0)
3962 		for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3963 		  if (p != insn && INSN_P (p)
3964 		      && GET_CODE (PATTERN (p)) != USE
3965 		      && (recog_memoized (p) < 0
3966 			  || (extract_insn (p), ! constrain_operands (1))))
3967 		    {
3968 		      error_for_asm (insn,
3969 				     "`asm' operand requires impossible reload");
3970 		      delete_insn (p);
3971 		    }
3972 	    }
3973 
3974 	  if (num_eliminable && chain->need_elim)
3975 	    update_eliminable_offsets ();
3976 
3977 	  /* Any previously reloaded spilled pseudo reg, stored in this insn,
3978 	     is no longer validly lying around to save a future reload.
3979 	     Note that this does not detect pseudos that were reloaded
3980 	     for this insn in order to be stored in
3981 	     (obeying register constraints).  That is correct; such reload
3982 	     registers ARE still valid.  */
3983 	  note_stores (oldpat, forget_old_reloads_1, NULL);
3984 
3985 	  /* There may have been CLOBBER insns placed after INSN.  So scan
3986 	     between INSN and NEXT and use them to forget old reloads.  */
3987 	  for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
3988 	    if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3989 	      note_stores (PATTERN (x), forget_old_reloads_1, NULL);
3990 
3991 #ifdef AUTO_INC_DEC
3992 	  /* Likewise for regs altered by auto-increment in this insn.
3993 	     REG_INC notes have been changed by reloading:
3994 	     find_reloads_address_1 records substitutions for them,
3995 	     which have been performed by subst_reloads above.  */
3996 	  for (i = n_reloads - 1; i >= 0; i--)
3997 	    {
3998 	      rtx in_reg = rld[i].in_reg;
3999 	      if (in_reg)
4000 		{
4001 		  enum rtx_code code = GET_CODE (in_reg);
4002 		  /* PRE_INC / PRE_DEC will have the reload register ending up
4003 		     with the same value as the stack slot, but that doesn't
4004 		     hold true for POST_INC / POST_DEC.  Either we have to
4005 		     convert the memory access to a true POST_INC / POST_DEC,
4006 		     or we can't use the reload register for inheritance.  */
4007 		  if ((code == POST_INC || code == POST_DEC)
4008 		      && TEST_HARD_REG_BIT (reg_reloaded_valid,
4009 					    REGNO (rld[i].reg_rtx))
4010 		      /* Make sure it is the inc/dec pseudo, and not
4011 			 some other (e.g. output operand) pseudo.  */
4012 		      && (reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4013 			  == REGNO (XEXP (in_reg, 0))))
4014 
4015 		    {
4016 		      rtx reload_reg = rld[i].reg_rtx;
4017 		      enum machine_mode mode = GET_MODE (reload_reg);
4018 		      int n = 0;
4019 		      rtx p;
4020 
4021 		      for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4022 			{
4023 			  /* We really want to ignore REG_INC notes here, so
4024 			     use PATTERN (p) as argument to reg_set_p .  */
4025 			  if (reg_set_p (reload_reg, PATTERN (p)))
4026 			    break;
4027 			  n = count_occurrences (PATTERN (p), reload_reg, 0);
4028 			  if (! n)
4029 			    continue;
4030 			  if (n == 1)
4031 			    {
4032 			      n = validate_replace_rtx (reload_reg,
4033 							gen_rtx (code, mode,
4034 								 reload_reg),
4035 							p);
4036 
4037 			      /* We must also verify that the constraints
4038 				 are met after the replacement.  */
4039 			      extract_insn (p);
4040 			      if (n)
4041 				n = constrain_operands (1);
4042 			      else
4043 				break;
4044 
4045 			      /* If the constraints were not met, then
4046 				 undo the replacement.  */
4047 			      if (!n)
4048 				{
4049 				  validate_replace_rtx (gen_rtx (code, mode,
4050 								 reload_reg),
4051 							reload_reg, p);
4052 				  break;
4053 				}
4054 
4055 			    }
4056 			  break;
4057 			}
4058 		      if (n == 1)
4059 			{
4060 			  REG_NOTES (p)
4061 			    = gen_rtx_EXPR_LIST (REG_INC, reload_reg,
4062 						 REG_NOTES (p));
4063 			  /* Mark this as having an output reload so that the
4064 			     REG_INC processing code below won't invalidate
4065 			     the reload for inheritance.  */
4066 			  SET_HARD_REG_BIT (reg_is_output_reload,
4067 					    REGNO (reload_reg));
4068 			  reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1;
4069 			}
4070 		      else
4071 			forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4072 					      NULL);
4073 		    }
4074 		  else if ((code == PRE_INC || code == PRE_DEC)
4075 			   && TEST_HARD_REG_BIT (reg_reloaded_valid,
4076 						 REGNO (rld[i].reg_rtx))
4077 			   /* Make sure it is the inc/dec pseudo, and not
4078 			      some other (e.g. output operand) pseudo.  */
4079 			   && (reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4080 			       == REGNO (XEXP (in_reg, 0))))
4081 		    {
4082 		      SET_HARD_REG_BIT (reg_is_output_reload,
4083 					REGNO (rld[i].reg_rtx));
4084 		      reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1;
4085 		    }
4086 		}
4087 	    }
4088 	  /* If a pseudo that got a hard register is auto-incremented,
4089 	     we must purge records of copying it into pseudos without
4090 	     hard registers.  */
4091 	  for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4092 	    if (REG_NOTE_KIND (x) == REG_INC)
4093 	      {
4094 		/* See if this pseudo reg was reloaded in this insn.
4095 		   If so, its last-reload info is still valid
4096 		   because it is based on this insn's reload.  */
4097 		for (i = 0; i < n_reloads; i++)
4098 		  if (rld[i].out == XEXP (x, 0))
4099 		    break;
4100 
4101 		if (i == n_reloads)
4102 		  forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4103 	      }
4104 #endif
4105 	}
4106       /* A reload reg's contents are unknown after a label.  */
4107       if (GET_CODE (insn) == CODE_LABEL)
4108 	CLEAR_HARD_REG_SET (reg_reloaded_valid);
4109 
4110       /* Don't assume a reload reg is still good after a call insn
4111 	 if it is a call-used reg.  */
4112       else if (GET_CODE (insn) == CALL_INSN)
4113 	AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4114     }
4115 
4116   /* Clean up.  */
4117   free (reg_last_reload_reg);
4118   free (reg_has_output_reload);
4119 }
4120 
4121 /* Discard all record of any value reloaded from X,
4122    or reloaded in X from someplace else;
4123    unless X is an output reload reg of the current insn.
4124 
4125    X may be a hard reg (the reload reg)
4126    or it may be a pseudo reg that was reloaded from.  */
4127 
4128 static void
forget_old_reloads_1(x,ignored,data)4129 forget_old_reloads_1 (x, ignored, data)
4130      rtx x;
4131      rtx ignored ATTRIBUTE_UNUSED;
4132      void *data ATTRIBUTE_UNUSED;
4133 {
4134   unsigned int regno;
4135   unsigned int nr;
4136 
4137   /* note_stores does give us subregs of hard regs,
4138      subreg_regno_offset will abort if it is not a hard reg.  */
4139   while (GET_CODE (x) == SUBREG)
4140     {
4141       /* We ignore the subreg offset when calculating the regno,
4142 	 because we are using the entire underlying hard register
4143 	 below.  */
4144       x = SUBREG_REG (x);
4145     }
4146 
4147   if (GET_CODE (x) != REG)
4148     return;
4149 
4150   regno = REGNO (x);
4151 
4152   if (regno >= FIRST_PSEUDO_REGISTER)
4153     nr = 1;
4154   else
4155     {
4156       unsigned int i;
4157 
4158       nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4159       /* Storing into a spilled-reg invalidates its contents.
4160 	 This can happen if a block-local pseudo is allocated to that reg
4161 	 and it wasn't spilled because this block's total need is 0.
4162 	 Then some insn might have an optional reload and use this reg.  */
4163       for (i = 0; i < nr; i++)
4164 	/* But don't do this if the reg actually serves as an output
4165 	   reload reg in the current instruction.  */
4166 	if (n_reloads == 0
4167 	    || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4168 	  {
4169 	    CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4170 	    spill_reg_store[regno + i] = 0;
4171 	  }
4172     }
4173 
4174   /* Since value of X has changed,
4175      forget any value previously copied from it.  */
4176 
4177   while (nr-- > 0)
4178     /* But don't forget a copy if this is the output reload
4179        that establishes the copy's validity.  */
4180     if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4181       reg_last_reload_reg[regno + nr] = 0;
4182 }
4183 
4184 /* The following HARD_REG_SETs indicate when each hard register is
4185    used for a reload of various parts of the current insn.  */
4186 
4187 /* If reg is unavailable for all reloads.  */
4188 static HARD_REG_SET reload_reg_unavailable;
4189 /* If reg is in use as a reload reg for a RELOAD_OTHER reload.  */
4190 static HARD_REG_SET reload_reg_used;
4191 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I.  */
4192 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4193 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I.  */
4194 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4195 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I.  */
4196 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4197 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I.  */
4198 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4199 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I.  */
4200 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4201 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I.  */
4202 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4203 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload.  */
4204 static HARD_REG_SET reload_reg_used_in_op_addr;
4205 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload.  */
4206 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4207 /* If reg is in use for a RELOAD_FOR_INSN reload.  */
4208 static HARD_REG_SET reload_reg_used_in_insn;
4209 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload.  */
4210 static HARD_REG_SET reload_reg_used_in_other_addr;
4211 
4212 /* If reg is in use as a reload reg for any sort of reload.  */
4213 static HARD_REG_SET reload_reg_used_at_all;
4214 
4215 /* If reg is use as an inherited reload.  We just mark the first register
4216    in the group.  */
4217 static HARD_REG_SET reload_reg_used_for_inherit;
4218 
4219 /* Records which hard regs are used in any way, either as explicit use or
4220    by being allocated to a pseudo during any point of the current insn.  */
4221 static HARD_REG_SET reg_used_in_insn;
4222 
4223 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4224    TYPE. MODE is used to indicate how many consecutive regs are
4225    actually used.  */
4226 
4227 static void
mark_reload_reg_in_use(regno,opnum,type,mode)4228 mark_reload_reg_in_use (regno, opnum, type, mode)
4229      unsigned int regno;
4230      int opnum;
4231      enum reload_type type;
4232      enum machine_mode mode;
4233 {
4234   unsigned int nregs = HARD_REGNO_NREGS (regno, mode);
4235   unsigned int i;
4236 
4237   for (i = regno; i < nregs + regno; i++)
4238     {
4239       switch (type)
4240 	{
4241 	case RELOAD_OTHER:
4242 	  SET_HARD_REG_BIT (reload_reg_used, i);
4243 	  break;
4244 
4245 	case RELOAD_FOR_INPUT_ADDRESS:
4246 	  SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4247 	  break;
4248 
4249 	case RELOAD_FOR_INPADDR_ADDRESS:
4250 	  SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4251 	  break;
4252 
4253 	case RELOAD_FOR_OUTPUT_ADDRESS:
4254 	  SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4255 	  break;
4256 
4257 	case RELOAD_FOR_OUTADDR_ADDRESS:
4258 	  SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4259 	  break;
4260 
4261 	case RELOAD_FOR_OPERAND_ADDRESS:
4262 	  SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4263 	  break;
4264 
4265 	case RELOAD_FOR_OPADDR_ADDR:
4266 	  SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4267 	  break;
4268 
4269 	case RELOAD_FOR_OTHER_ADDRESS:
4270 	  SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4271 	  break;
4272 
4273 	case RELOAD_FOR_INPUT:
4274 	  SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4275 	  break;
4276 
4277 	case RELOAD_FOR_OUTPUT:
4278 	  SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4279 	  break;
4280 
4281 	case RELOAD_FOR_INSN:
4282 	  SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4283 	  break;
4284 	}
4285 
4286       SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4287     }
4288 }
4289 
4290 /* Similarly, but show REGNO is no longer in use for a reload.  */
4291 
4292 static void
clear_reload_reg_in_use(regno,opnum,type,mode)4293 clear_reload_reg_in_use (regno, opnum, type, mode)
4294      unsigned int regno;
4295      int opnum;
4296      enum reload_type type;
4297      enum machine_mode mode;
4298 {
4299   unsigned int nregs = HARD_REGNO_NREGS (regno, mode);
4300   unsigned int start_regno, end_regno, r;
4301   int i;
4302   /* A complication is that for some reload types, inheritance might
4303      allow multiple reloads of the same types to share a reload register.
4304      We set check_opnum if we have to check only reloads with the same
4305      operand number, and check_any if we have to check all reloads.  */
4306   int check_opnum = 0;
4307   int check_any = 0;
4308   HARD_REG_SET *used_in_set;
4309 
4310   switch (type)
4311     {
4312     case RELOAD_OTHER:
4313       used_in_set = &reload_reg_used;
4314       break;
4315 
4316     case RELOAD_FOR_INPUT_ADDRESS:
4317       used_in_set = &reload_reg_used_in_input_addr[opnum];
4318       break;
4319 
4320     case RELOAD_FOR_INPADDR_ADDRESS:
4321       check_opnum = 1;
4322       used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
4323       break;
4324 
4325     case RELOAD_FOR_OUTPUT_ADDRESS:
4326       used_in_set = &reload_reg_used_in_output_addr[opnum];
4327       break;
4328 
4329     case RELOAD_FOR_OUTADDR_ADDRESS:
4330       check_opnum = 1;
4331       used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
4332       break;
4333 
4334     case RELOAD_FOR_OPERAND_ADDRESS:
4335       used_in_set = &reload_reg_used_in_op_addr;
4336       break;
4337 
4338     case RELOAD_FOR_OPADDR_ADDR:
4339       check_any = 1;
4340       used_in_set = &reload_reg_used_in_op_addr_reload;
4341       break;
4342 
4343     case RELOAD_FOR_OTHER_ADDRESS:
4344       used_in_set = &reload_reg_used_in_other_addr;
4345       check_any = 1;
4346       break;
4347 
4348     case RELOAD_FOR_INPUT:
4349       used_in_set = &reload_reg_used_in_input[opnum];
4350       break;
4351 
4352     case RELOAD_FOR_OUTPUT:
4353       used_in_set = &reload_reg_used_in_output[opnum];
4354       break;
4355 
4356     case RELOAD_FOR_INSN:
4357       used_in_set = &reload_reg_used_in_insn;
4358       break;
4359     default:
4360       abort ();
4361     }
4362   /* We resolve conflicts with remaining reloads of the same type by
4363      excluding the intervals of reload registers by them from the
4364      interval of freed reload registers.  Since we only keep track of
4365      one set of interval bounds, we might have to exclude somewhat
4366      more than what would be necessary if we used a HARD_REG_SET here.
4367      But this should only happen very infrequently, so there should
4368      be no reason to worry about it.  */
4369 
4370   start_regno = regno;
4371   end_regno = regno + nregs;
4372   if (check_opnum || check_any)
4373     {
4374       for (i = n_reloads - 1; i >= 0; i--)
4375 	{
4376 	  if (rld[i].when_needed == type
4377 	      && (check_any || rld[i].opnum == opnum)
4378 	      && rld[i].reg_rtx)
4379 	    {
4380 	      unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
4381 	      unsigned int conflict_end
4382 		= (conflict_start
4383 		   + HARD_REGNO_NREGS (conflict_start, rld[i].mode));
4384 
4385 	      /* If there is an overlap with the first to-be-freed register,
4386 		 adjust the interval start.  */
4387 	      if (conflict_start <= start_regno && conflict_end > start_regno)
4388 		start_regno = conflict_end;
4389 	      /* Otherwise, if there is a conflict with one of the other
4390 		 to-be-freed registers, adjust the interval end.  */
4391 	      if (conflict_start > start_regno && conflict_start < end_regno)
4392 		end_regno = conflict_start;
4393 	    }
4394 	}
4395     }
4396 
4397   for (r = start_regno; r < end_regno; r++)
4398     CLEAR_HARD_REG_BIT (*used_in_set, r);
4399 }
4400 
4401 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4402    specified by OPNUM and TYPE.  */
4403 
4404 static int
reload_reg_free_p(regno,opnum,type)4405 reload_reg_free_p (regno, opnum, type)
4406      unsigned int regno;
4407      int opnum;
4408      enum reload_type type;
4409 {
4410   int i;
4411 
4412   /* In use for a RELOAD_OTHER means it's not available for anything.  */
4413   if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4414       || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4415     return 0;
4416 
4417   switch (type)
4418     {
4419     case RELOAD_OTHER:
4420       /* In use for anything means we can't use it for RELOAD_OTHER.  */
4421       if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4422 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4423 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4424 	  || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4425 	return 0;
4426 
4427       for (i = 0; i < reload_n_operands; i++)
4428 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4429 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4430 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4431 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4432 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4433 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4434 	  return 0;
4435 
4436       return 1;
4437 
4438     case RELOAD_FOR_INPUT:
4439       if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4440 	  || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4441 	return 0;
4442 
4443       if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4444 	return 0;
4445 
4446       /* If it is used for some other input, can't use it.  */
4447       for (i = 0; i < reload_n_operands; i++)
4448 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4449 	  return 0;
4450 
4451       /* If it is used in a later operand's address, can't use it.  */
4452       for (i = opnum + 1; i < reload_n_operands; i++)
4453 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4454 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4455 	  return 0;
4456 
4457       return 1;
4458 
4459     case RELOAD_FOR_INPUT_ADDRESS:
4460       /* Can't use a register if it is used for an input address for this
4461 	 operand or used as an input in an earlier one.  */
4462       if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4463 	  || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4464 	return 0;
4465 
4466       for (i = 0; i < opnum; i++)
4467 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4468 	  return 0;
4469 
4470       return 1;
4471 
4472     case RELOAD_FOR_INPADDR_ADDRESS:
4473       /* Can't use a register if it is used for an input address
4474 	 for this operand or used as an input in an earlier
4475 	 one.  */
4476       if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4477 	return 0;
4478 
4479       for (i = 0; i < opnum; i++)
4480 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4481 	  return 0;
4482 
4483       return 1;
4484 
4485     case RELOAD_FOR_OUTPUT_ADDRESS:
4486       /* Can't use a register if it is used for an output address for this
4487 	 operand or used as an output in this or a later operand.  Note
4488 	 that multiple output operands are emitted in reverse order, so
4489 	 the conflicting ones are those with lower indices.  */
4490       if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4491 	return 0;
4492 
4493       for (i = 0; i <= opnum; i++)
4494 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4495 	  return 0;
4496 
4497       return 1;
4498 
4499     case RELOAD_FOR_OUTADDR_ADDRESS:
4500       /* Can't use a register if it is used for an output address
4501 	 for this operand or used as an output in this or a
4502 	 later operand.  Note that multiple output operands are
4503 	 emitted in reverse order, so the conflicting ones are
4504 	 those with lower indices.  */
4505       if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4506 	return 0;
4507 
4508       for (i = 0; i <= opnum; i++)
4509 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4510 	  return 0;
4511 
4512       return 1;
4513 
4514     case RELOAD_FOR_OPERAND_ADDRESS:
4515       for (i = 0; i < reload_n_operands; i++)
4516 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4517 	  return 0;
4518 
4519       return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4520 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4521 
4522     case RELOAD_FOR_OPADDR_ADDR:
4523       for (i = 0; i < reload_n_operands; i++)
4524 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4525 	  return 0;
4526 
4527       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4528 
4529     case RELOAD_FOR_OUTPUT:
4530       /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4531 	 outputs, or an operand address for this or an earlier output.
4532 	 Note that multiple output operands are emitted in reverse order,
4533 	 so the conflicting ones are those with higher indices.  */
4534       if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4535 	return 0;
4536 
4537       for (i = 0; i < reload_n_operands; i++)
4538 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4539 	  return 0;
4540 
4541       for (i = opnum; i < reload_n_operands; i++)
4542 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4543 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4544 	  return 0;
4545 
4546       return 1;
4547 
4548     case RELOAD_FOR_INSN:
4549       for (i = 0; i < reload_n_operands; i++)
4550 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4551 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4552 	  return 0;
4553 
4554       return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4555 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4556 
4557     case RELOAD_FOR_OTHER_ADDRESS:
4558       return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4559     }
4560   abort ();
4561 }
4562 
4563 /* Return 1 if the value in reload reg REGNO, as used by a reload
4564    needed for the part of the insn specified by OPNUM and TYPE,
4565    is still available in REGNO at the end of the insn.
4566 
4567    We can assume that the reload reg was already tested for availability
4568    at the time it is needed, and we should not check this again,
4569    in case the reg has already been marked in use.  */
4570 
4571 static int
reload_reg_reaches_end_p(regno,opnum,type)4572 reload_reg_reaches_end_p (regno, opnum, type)
4573      unsigned int regno;
4574      int opnum;
4575      enum reload_type type;
4576 {
4577   int i;
4578 
4579   switch (type)
4580     {
4581     case RELOAD_OTHER:
4582       /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4583 	 its value must reach the end.  */
4584       return 1;
4585 
4586       /* If this use is for part of the insn,
4587 	 its value reaches if no subsequent part uses the same register.
4588 	 Just like the above function, don't try to do this with lots
4589 	 of fallthroughs.  */
4590 
4591     case RELOAD_FOR_OTHER_ADDRESS:
4592       /* Here we check for everything else, since these don't conflict
4593 	 with anything else and everything comes later.  */
4594 
4595       for (i = 0; i < reload_n_operands; i++)
4596 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4597 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4598 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4599 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4600 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4601 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4602 	  return 0;
4603 
4604       return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4605 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4606 	      && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4607 	      && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4608 
4609     case RELOAD_FOR_INPUT_ADDRESS:
4610     case RELOAD_FOR_INPADDR_ADDRESS:
4611       /* Similar, except that we check only for this and subsequent inputs
4612 	 and the address of only subsequent inputs and we do not need
4613 	 to check for RELOAD_OTHER objects since they are known not to
4614 	 conflict.  */
4615 
4616       for (i = opnum; i < reload_n_operands; i++)
4617 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4618 	  return 0;
4619 
4620       for (i = opnum + 1; i < reload_n_operands; i++)
4621 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4622 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4623 	  return 0;
4624 
4625       for (i = 0; i < reload_n_operands; i++)
4626 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4627 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4628 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4629 	  return 0;
4630 
4631       if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4632 	return 0;
4633 
4634       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4635 	      && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4636 	      && !TEST_HARD_REG_BIT (reload_reg_used, regno));
4637 
4638     case RELOAD_FOR_INPUT:
4639       /* Similar to input address, except we start at the next operand for
4640 	 both input and input address and we do not check for
4641 	 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4642 	 would conflict.  */
4643 
4644       for (i = opnum + 1; i < reload_n_operands; i++)
4645 	if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4646 	    || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4647 	    || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4648 	  return 0;
4649 
4650       /* ... fall through ...  */
4651 
4652     case RELOAD_FOR_OPERAND_ADDRESS:
4653       /* Check outputs and their addresses.  */
4654 
4655       for (i = 0; i < reload_n_operands; i++)
4656 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4657 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4658 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4659 	  return 0;
4660 
4661       return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
4662 
4663     case RELOAD_FOR_OPADDR_ADDR:
4664       for (i = 0; i < reload_n_operands; i++)
4665 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4666 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4667 	    || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4668 	  return 0;
4669 
4670       return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4671 	      && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4672 	      && !TEST_HARD_REG_BIT (reload_reg_used, regno));
4673 
4674     case RELOAD_FOR_INSN:
4675       /* These conflict with other outputs with RELOAD_OTHER.  So
4676 	 we need only check for output addresses.  */
4677 
4678       opnum = reload_n_operands;
4679 
4680       /* ... fall through ...  */
4681 
4682     case RELOAD_FOR_OUTPUT:
4683     case RELOAD_FOR_OUTPUT_ADDRESS:
4684     case RELOAD_FOR_OUTADDR_ADDRESS:
4685       /* We already know these can't conflict with a later output.  So the
4686 	 only thing to check are later output addresses.
4687 	 Note that multiple output operands are emitted in reverse order,
4688 	 so the conflicting ones are those with lower indices.  */
4689       for (i = 0; i < opnum; i++)
4690 	if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4691 	    || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4692 	  return 0;
4693 
4694       return 1;
4695     }
4696 
4697   abort ();
4698 }
4699 
4700 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4701    Return 0 otherwise.
4702 
4703    This function uses the same algorithm as reload_reg_free_p above.  */
4704 
4705 int
reloads_conflict(r1,r2)4706 reloads_conflict (r1, r2)
4707      int r1, r2;
4708 {
4709   enum reload_type r1_type = rld[r1].when_needed;
4710   enum reload_type r2_type = rld[r2].when_needed;
4711   int r1_opnum = rld[r1].opnum;
4712   int r2_opnum = rld[r2].opnum;
4713 
4714   /* RELOAD_OTHER conflicts with everything.  */
4715   if (r2_type == RELOAD_OTHER)
4716     return 1;
4717 
4718   /* Otherwise, check conflicts differently for each type.  */
4719 
4720   switch (r1_type)
4721     {
4722     case RELOAD_FOR_INPUT:
4723       return (r2_type == RELOAD_FOR_INSN
4724 	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4725 	      || r2_type == RELOAD_FOR_OPADDR_ADDR
4726 	      || r2_type == RELOAD_FOR_INPUT
4727 	      || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
4728 		   || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
4729 		  && r2_opnum > r1_opnum));
4730 
4731     case RELOAD_FOR_INPUT_ADDRESS:
4732       return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4733 	      || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4734 
4735     case RELOAD_FOR_INPADDR_ADDRESS:
4736       return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
4737 	      || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4738 
4739     case RELOAD_FOR_OUTPUT_ADDRESS:
4740       return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4741 	      || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
4742 
4743     case RELOAD_FOR_OUTADDR_ADDRESS:
4744       return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
4745 	      || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
4746 
4747     case RELOAD_FOR_OPERAND_ADDRESS:
4748       return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4749 	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4750 
4751     case RELOAD_FOR_OPADDR_ADDR:
4752       return (r2_type == RELOAD_FOR_INPUT
4753 	      || r2_type == RELOAD_FOR_OPADDR_ADDR);
4754 
4755     case RELOAD_FOR_OUTPUT:
4756       return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4757 	      || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4758 		   || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
4759 		  && r2_opnum >= r1_opnum));
4760 
4761     case RELOAD_FOR_INSN:
4762       return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4763 	      || r2_type == RELOAD_FOR_INSN
4764 	      || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4765 
4766     case RELOAD_FOR_OTHER_ADDRESS:
4767       return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4768 
4769     case RELOAD_OTHER:
4770       return 1;
4771 
4772     default:
4773       abort ();
4774     }
4775 }
4776 
4777 /* Indexed by reload number, 1 if incoming value
4778    inherited from previous insns.  */
4779 char reload_inherited[MAX_RELOADS];
4780 
4781 /* For an inherited reload, this is the insn the reload was inherited from,
4782    if we know it.  Otherwise, this is 0.  */
4783 rtx reload_inheritance_insn[MAX_RELOADS];
4784 
4785 /* If nonzero, this is a place to get the value of the reload,
4786    rather than using reload_in.  */
4787 rtx reload_override_in[MAX_RELOADS];
4788 
4789 /* For each reload, the hard register number of the register used,
4790    or -1 if we did not need a register for this reload.  */
4791 int reload_spill_index[MAX_RELOADS];
4792 
4793 /* Subroutine of free_for_value_p, used to check a single register.
4794    START_REGNO is the starting regno of the full reload register
4795    (possibly comprising multiple hard registers) that we are considering.  */
4796 
4797 static int
reload_reg_free_for_value_p(start_regno,regno,opnum,type,value,out,reloadnum,ignore_address_reloads)4798 reload_reg_free_for_value_p (start_regno, regno, opnum, type, value, out,
4799 			     reloadnum, ignore_address_reloads)
4800      int start_regno, regno;
4801      int opnum;
4802      enum reload_type type;
4803      rtx value, out;
4804      int reloadnum;
4805      int ignore_address_reloads;
4806 {
4807   int time1;
4808   /* Set if we see an input reload that must not share its reload register
4809      with any new earlyclobber, but might otherwise share the reload
4810      register with an output or input-output reload.  */
4811   int check_earlyclobber = 0;
4812   int i;
4813   int copy = 0;
4814 
4815   if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4816     return 0;
4817 
4818   if (out == const0_rtx)
4819     {
4820       copy = 1;
4821       out = NULL_RTX;
4822     }
4823 
4824   /* We use some pseudo 'time' value to check if the lifetimes of the
4825      new register use would overlap with the one of a previous reload
4826      that is not read-only or uses a different value.
4827      The 'time' used doesn't have to be linear in any shape or form, just
4828      monotonic.
4829      Some reload types use different 'buckets' for each operand.
4830      So there are MAX_RECOG_OPERANDS different time values for each
4831      such reload type.
4832      We compute TIME1 as the time when the register for the prospective
4833      new reload ceases to be live, and TIME2 for each existing
4834      reload as the time when that the reload register of that reload
4835      becomes live.
4836      Where there is little to be gained by exact lifetime calculations,
4837      we just make conservative assumptions, i.e. a longer lifetime;
4838      this is done in the 'default:' cases.  */
4839   switch (type)
4840     {
4841     case RELOAD_FOR_OTHER_ADDRESS:
4842       /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads.  */
4843       time1 = copy ? 0 : 1;
4844       break;
4845     case RELOAD_OTHER:
4846       time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
4847       break;
4848       /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
4849 	 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT.  By adding 0 / 1 / 2 ,
4850 	 respectively, to the time values for these, we get distinct time
4851 	 values.  To get distinct time values for each operand, we have to
4852 	 multiply opnum by at least three.  We round that up to four because
4853 	 multiply by four is often cheaper.  */
4854     case RELOAD_FOR_INPADDR_ADDRESS:
4855       time1 = opnum * 4 + 2;
4856       break;
4857     case RELOAD_FOR_INPUT_ADDRESS:
4858       time1 = opnum * 4 + 3;
4859       break;
4860     case RELOAD_FOR_INPUT:
4861       /* All RELOAD_FOR_INPUT reloads remain live till the instruction
4862 	 executes (inclusive).  */
4863       time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
4864       break;
4865     case RELOAD_FOR_OPADDR_ADDR:
4866       /* opnum * 4 + 4
4867 	 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
4868       time1 = MAX_RECOG_OPERANDS * 4 + 1;
4869       break;
4870     case RELOAD_FOR_OPERAND_ADDRESS:
4871       /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
4872 	 is executed.  */
4873       time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
4874       break;
4875     case RELOAD_FOR_OUTADDR_ADDRESS:
4876       time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
4877       break;
4878     case RELOAD_FOR_OUTPUT_ADDRESS:
4879       time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
4880       break;
4881     default:
4882       time1 = MAX_RECOG_OPERANDS * 5 + 5;
4883     }
4884 
4885   for (i = 0; i < n_reloads; i++)
4886     {
4887       rtx reg = rld[i].reg_rtx;
4888       if (reg && GET_CODE (reg) == REG
4889 	  && ((unsigned) regno - true_regnum (reg)
4890 	      <= HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)) - (unsigned) 1)
4891 	  && i != reloadnum)
4892 	{
4893 	  rtx other_input = rld[i].in;
4894 
4895 	  /* If the other reload loads the same input value, that
4896 	     will not cause a conflict only if it's loading it into
4897 	     the same register.  */
4898 	  if (true_regnum (reg) != start_regno)
4899 	    other_input = NULL_RTX;
4900 	  if (! other_input || ! rtx_equal_p (other_input, value)
4901 	      || rld[i].out || out)
4902 	    {
4903 	      int time2;
4904 	      switch (rld[i].when_needed)
4905 		{
4906 		case RELOAD_FOR_OTHER_ADDRESS:
4907 		  time2 = 0;
4908 		  break;
4909 		case RELOAD_FOR_INPADDR_ADDRESS:
4910 		  /* find_reloads makes sure that a
4911 		     RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
4912 		     by at most one - the first -
4913 		     RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS .  If the
4914 		     address reload is inherited, the address address reload
4915 		     goes away, so we can ignore this conflict.  */
4916 		  if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
4917 		      && ignore_address_reloads
4918 		      /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
4919 			 Then the address address is still needed to store
4920 			 back the new address.  */
4921 		      && ! rld[reloadnum].out)
4922 		    continue;
4923 		  /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
4924 		     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
4925 		     reloads go away.  */
4926 		  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
4927 		      && ignore_address_reloads
4928 		      /* Unless we are reloading an auto_inc expression.  */
4929 		      && ! rld[reloadnum].out)
4930 		    continue;
4931 		  time2 = rld[i].opnum * 4 + 2;
4932 		  break;
4933 		case RELOAD_FOR_INPUT_ADDRESS:
4934 		  if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
4935 		      && ignore_address_reloads
4936 		      && ! rld[reloadnum].out)
4937 		    continue;
4938 		  time2 = rld[i].opnum * 4 + 3;
4939 		  break;
4940 		case RELOAD_FOR_INPUT:
4941 		  time2 = rld[i].opnum * 4 + 4;
4942 		  check_earlyclobber = 1;
4943 		  break;
4944 		  /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
4945 		     == MAX_RECOG_OPERAND * 4  */
4946 		case RELOAD_FOR_OPADDR_ADDR:
4947 		  if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
4948 		      && ignore_address_reloads
4949 		      && ! rld[reloadnum].out)
4950 		    continue;
4951 		  time2 = MAX_RECOG_OPERANDS * 4 + 1;
4952 		  break;
4953 		case RELOAD_FOR_OPERAND_ADDRESS:
4954 		  time2 = MAX_RECOG_OPERANDS * 4 + 2;
4955 		  check_earlyclobber = 1;
4956 		  break;
4957 		case RELOAD_FOR_INSN:
4958 		  time2 = MAX_RECOG_OPERANDS * 4 + 3;
4959 		  break;
4960 		case RELOAD_FOR_OUTPUT:
4961 		  /* All RELOAD_FOR_OUTPUT reloads become live just after the
4962 		     instruction is executed.  */
4963 		  time2 = MAX_RECOG_OPERANDS * 4 + 4;
4964 		  break;
4965 		  /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
4966 		     the RELOAD_FOR_OUTPUT reloads, so assign it the same time
4967 		     value.  */
4968 		case RELOAD_FOR_OUTADDR_ADDRESS:
4969 		  if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
4970 		      && ignore_address_reloads
4971 		      && ! rld[reloadnum].out)
4972 		    continue;
4973 		  time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
4974 		  break;
4975 		case RELOAD_FOR_OUTPUT_ADDRESS:
4976 		  time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
4977 		  break;
4978 		case RELOAD_OTHER:
4979 		  /* If there is no conflict in the input part, handle this
4980 		     like an output reload.  */
4981 		  if (! rld[i].in || rtx_equal_p (other_input, value))
4982 		    {
4983 		      time2 = MAX_RECOG_OPERANDS * 4 + 4;
4984 		      /* Earlyclobbered outputs must conflict with inputs.  */
4985 		      if (earlyclobber_operand_p (rld[i].out))
4986 			time2 = MAX_RECOG_OPERANDS * 4 + 3;
4987 
4988 		      break;
4989 		    }
4990 		  time2 = 1;
4991 		  /* RELOAD_OTHER might be live beyond instruction execution,
4992 		     but this is not obvious when we set time2 = 1.  So check
4993 		     here if there might be a problem with the new reload
4994 		     clobbering the register used by the RELOAD_OTHER.  */
4995 		  if (out)
4996 		    return 0;
4997 		  break;
4998 		default:
4999 		  return 0;
5000 		}
5001 	      if ((time1 >= time2
5002 		   && (! rld[i].in || rld[i].out
5003 		       || ! rtx_equal_p (other_input, value)))
5004 		  || (out && rld[reloadnum].out_reg
5005 		      && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
5006 		return 0;
5007 	    }
5008 	}
5009     }
5010 
5011   /* Earlyclobbered outputs must conflict with inputs.  */
5012   if (check_earlyclobber && out && earlyclobber_operand_p (out))
5013     return 0;
5014 
5015   return 1;
5016 }
5017 
5018 /* Return 1 if the value in reload reg REGNO, as used by a reload
5019    needed for the part of the insn specified by OPNUM and TYPE,
5020    may be used to load VALUE into it.
5021 
5022    MODE is the mode in which the register is used, this is needed to
5023    determine how many hard regs to test.
5024 
5025    Other read-only reloads with the same value do not conflict
5026    unless OUT is nonzero and these other reloads have to live while
5027    output reloads live.
5028    If OUT is CONST0_RTX, this is a special case: it means that the
5029    test should not be for using register REGNO as reload register, but
5030    for copying from register REGNO into the reload register.
5031 
5032    RELOADNUM is the number of the reload we want to load this value for;
5033    a reload does not conflict with itself.
5034 
5035    When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
5036    reloads that load an address for the very reload we are considering.
5037 
5038    The caller has to make sure that there is no conflict with the return
5039    register.  */
5040 
5041 static int
free_for_value_p(regno,mode,opnum,type,value,out,reloadnum,ignore_address_reloads)5042 free_for_value_p (regno, mode, opnum, type, value, out, reloadnum,
5043 		  ignore_address_reloads)
5044      int regno;
5045      enum machine_mode mode;
5046      int opnum;
5047      enum reload_type type;
5048      rtx value, out;
5049      int reloadnum;
5050      int ignore_address_reloads;
5051 {
5052   int nregs = HARD_REGNO_NREGS (regno, mode);
5053   while (nregs-- > 0)
5054     if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
5055 				       value, out, reloadnum,
5056 				       ignore_address_reloads))
5057       return 0;
5058   return 1;
5059 }
5060 
5061 /* Determine whether the reload reg X overlaps any rtx'es used for
5062    overriding inheritance.  Return nonzero if so.  */
5063 
5064 static int
conflicts_with_override(x)5065 conflicts_with_override (x)
5066      rtx x;
5067 {
5068   int i;
5069   for (i = 0; i < n_reloads; i++)
5070     if (reload_override_in[i]
5071 	&& reg_overlap_mentioned_p (x, reload_override_in[i]))
5072       return 1;
5073   return 0;
5074 }
5075 
5076 /* Give an error message saying we failed to find a reload for INSN,
5077    and clear out reload R.  */
5078 static void
failed_reload(insn,r)5079 failed_reload (insn, r)
5080      rtx insn;
5081      int r;
5082 {
5083   if (asm_noperands (PATTERN (insn)) < 0)
5084     /* It's the compiler's fault.  */
5085     fatal_insn ("could not find a spill register", insn);
5086 
5087   /* It's the user's fault; the operand's mode and constraint
5088      don't match.  Disable this reload so we don't crash in final.  */
5089   error_for_asm (insn,
5090 		 "`asm' operand constraint incompatible with operand size");
5091   rld[r].in = 0;
5092   rld[r].out = 0;
5093   rld[r].reg_rtx = 0;
5094   rld[r].optional = 1;
5095   rld[r].secondary_p = 1;
5096 }
5097 
5098 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
5099    for reload R.  If it's valid, get an rtx for it.  Return nonzero if
5100    successful.  */
5101 static int
set_reload_reg(i,r)5102 set_reload_reg (i, r)
5103      int i, r;
5104 {
5105   int regno;
5106   rtx reg = spill_reg_rtx[i];
5107 
5108   if (reg == 0 || GET_MODE (reg) != rld[r].mode)
5109     spill_reg_rtx[i] = reg
5110       = gen_rtx_REG (rld[r].mode, spill_regs[i]);
5111 
5112   regno = true_regnum (reg);
5113 
5114   /* Detect when the reload reg can't hold the reload mode.
5115      This used to be one `if', but Sequent compiler can't handle that.  */
5116   if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
5117     {
5118       enum machine_mode test_mode = VOIDmode;
5119       if (rld[r].in)
5120 	test_mode = GET_MODE (rld[r].in);
5121       /* If rld[r].in has VOIDmode, it means we will load it
5122 	 in whatever mode the reload reg has: to wit, rld[r].mode.
5123 	 We have already tested that for validity.  */
5124       /* Aside from that, we need to test that the expressions
5125 	 to reload from or into have modes which are valid for this
5126 	 reload register.  Otherwise the reload insns would be invalid.  */
5127       if (! (rld[r].in != 0 && test_mode != VOIDmode
5128 	     && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5129 	if (! (rld[r].out != 0
5130 	       && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
5131 	  {
5132 	    /* The reg is OK.  */
5133 	    last_spill_reg = i;
5134 
5135 	    /* Mark as in use for this insn the reload regs we use
5136 	       for this.  */
5137 	    mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
5138 				    rld[r].when_needed, rld[r].mode);
5139 
5140 	    rld[r].reg_rtx = reg;
5141 	    reload_spill_index[r] = spill_regs[i];
5142 	    return 1;
5143 	  }
5144     }
5145   return 0;
5146 }
5147 
5148 /* Find a spill register to use as a reload register for reload R.
5149    LAST_RELOAD is nonzero if this is the last reload for the insn being
5150    processed.
5151 
5152    Set rld[R].reg_rtx to the register allocated.
5153 
5154    We return 1 if successful, or 0 if we couldn't find a spill reg and
5155    we didn't change anything.  */
5156 
5157 static int
allocate_reload_reg(chain,r,last_reload)5158 allocate_reload_reg (chain, r, last_reload)
5159      struct insn_chain *chain ATTRIBUTE_UNUSED;
5160      int r;
5161      int last_reload;
5162 {
5163   int i, pass, count;
5164 
5165   /* If we put this reload ahead, thinking it is a group,
5166      then insist on finding a group.  Otherwise we can grab a
5167      reg that some other reload needs.
5168      (That can happen when we have a 68000 DATA_OR_FP_REG
5169      which is a group of data regs or one fp reg.)
5170      We need not be so restrictive if there are no more reloads
5171      for this insn.
5172 
5173      ??? Really it would be nicer to have smarter handling
5174      for that kind of reg class, where a problem like this is normal.
5175      Perhaps those classes should be avoided for reloading
5176      by use of more alternatives.  */
5177 
5178   int force_group = rld[r].nregs > 1 && ! last_reload;
5179 
5180   /* If we want a single register and haven't yet found one,
5181      take any reg in the right class and not in use.
5182      If we want a consecutive group, here is where we look for it.
5183 
5184      We use two passes so we can first look for reload regs to
5185      reuse, which are already in use for other reloads in this insn,
5186      and only then use additional registers.
5187      I think that maximizing reuse is needed to make sure we don't
5188      run out of reload regs.  Suppose we have three reloads, and
5189      reloads A and B can share regs.  These need two regs.
5190      Suppose A and B are given different regs.
5191      That leaves none for C.  */
5192   for (pass = 0; pass < 2; pass++)
5193     {
5194       /* I is the index in spill_regs.
5195 	 We advance it round-robin between insns to use all spill regs
5196 	 equally, so that inherited reloads have a chance
5197 	 of leapfrogging each other.  */
5198 
5199       i = last_spill_reg;
5200 
5201       for (count = 0; count < n_spills; count++)
5202 	{
5203 	  int class = (int) rld[r].class;
5204 	  int regnum;
5205 
5206 	  i++;
5207 	  if (i >= n_spills)
5208 	    i -= n_spills;
5209 	  regnum = spill_regs[i];
5210 
5211 	  if ((reload_reg_free_p (regnum, rld[r].opnum,
5212 				  rld[r].when_needed)
5213 	       || (rld[r].in
5214 		   /* We check reload_reg_used to make sure we
5215 		      don't clobber the return register.  */
5216 		   && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
5217 		   && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
5218 					rld[r].when_needed, rld[r].in,
5219 					rld[r].out, r, 1)))
5220 	      && TEST_HARD_REG_BIT (reg_class_contents[class], regnum)
5221 	      && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
5222 	      /* Look first for regs to share, then for unshared.  But
5223 		 don't share regs used for inherited reloads; they are
5224 		 the ones we want to preserve.  */
5225 	      && (pass
5226 		  || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5227 					 regnum)
5228 		      && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5229 					      regnum))))
5230 	    {
5231 	      int nr = HARD_REGNO_NREGS (regnum, rld[r].mode);
5232 	      /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5233 		 (on 68000) got us two FP regs.  If NR is 1,
5234 		 we would reject both of them.  */
5235 	      if (force_group)
5236 		nr = rld[r].nregs;
5237 	      /* If we need only one reg, we have already won.  */
5238 	      if (nr == 1)
5239 		{
5240 		  /* But reject a single reg if we demand a group.  */
5241 		  if (force_group)
5242 		    continue;
5243 		  break;
5244 		}
5245 	      /* Otherwise check that as many consecutive regs as we need
5246 		 are available here.  */
5247 	      while (nr > 1)
5248 		{
5249 		  int regno = regnum + nr - 1;
5250 		  if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5251 			&& spill_reg_order[regno] >= 0
5252 			&& reload_reg_free_p (regno, rld[r].opnum,
5253 					      rld[r].when_needed)))
5254 		    break;
5255 		  nr--;
5256 		}
5257 	      if (nr == 1)
5258 		break;
5259 	    }
5260 	}
5261 
5262       /* If we found something on pass 1, omit pass 2.  */
5263       if (count < n_spills)
5264 	break;
5265     }
5266 
5267   /* We should have found a spill register by now.  */
5268   if (count >= n_spills)
5269     return 0;
5270 
5271   /* I is the index in SPILL_REG_RTX of the reload register we are to
5272      allocate.  Get an rtx for it and find its register number.  */
5273 
5274   return set_reload_reg (i, r);
5275 }
5276 
5277 /* Initialize all the tables needed to allocate reload registers.
5278    CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
5279    is the array we use to restore the reg_rtx field for every reload.  */
5280 
5281 static void
choose_reload_regs_init(chain,save_reload_reg_rtx)5282 choose_reload_regs_init (chain, save_reload_reg_rtx)
5283      struct insn_chain *chain;
5284      rtx *save_reload_reg_rtx;
5285 {
5286   int i;
5287 
5288   for (i = 0; i < n_reloads; i++)
5289     rld[i].reg_rtx = save_reload_reg_rtx[i];
5290 
5291   memset (reload_inherited, 0, MAX_RELOADS);
5292   memset ((char *) reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
5293   memset ((char *) reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
5294 
5295   CLEAR_HARD_REG_SET (reload_reg_used);
5296   CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5297   CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5298   CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5299   CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5300   CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5301 
5302   CLEAR_HARD_REG_SET (reg_used_in_insn);
5303   {
5304     HARD_REG_SET tmp;
5305     REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
5306     IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5307     REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
5308     IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5309     compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
5310     compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
5311   }
5312 
5313   for (i = 0; i < reload_n_operands; i++)
5314     {
5315       CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5316       CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5317       CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5318       CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5319       CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5320       CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5321     }
5322 
5323   COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
5324 
5325   CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5326 
5327   for (i = 0; i < n_reloads; i++)
5328     /* If we have already decided to use a certain register,
5329        don't use it in another way.  */
5330     if (rld[i].reg_rtx)
5331       mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
5332 			      rld[i].when_needed, rld[i].mode);
5333 }
5334 
5335 /* Assign hard reg targets for the pseudo-registers we must reload
5336    into hard regs for this insn.
5337    Also output the instructions to copy them in and out of the hard regs.
5338 
5339    For machines with register classes, we are responsible for
5340    finding a reload reg in the proper class.  */
5341 
5342 static void
choose_reload_regs(chain)5343 choose_reload_regs (chain)
5344      struct insn_chain *chain;
5345 {
5346   rtx insn = chain->insn;
5347   int i, j;
5348   unsigned int max_group_size = 1;
5349   enum reg_class group_class = NO_REGS;
5350   int pass, win, inheritance;
5351 
5352   rtx save_reload_reg_rtx[MAX_RELOADS];
5353 
5354   /* In order to be certain of getting the registers we need,
5355      we must sort the reloads into order of increasing register class.
5356      Then our grabbing of reload registers will parallel the process
5357      that provided the reload registers.
5358 
5359      Also note whether any of the reloads wants a consecutive group of regs.
5360      If so, record the maximum size of the group desired and what
5361      register class contains all the groups needed by this insn.  */
5362 
5363   for (j = 0; j < n_reloads; j++)
5364     {
5365       reload_order[j] = j;
5366       reload_spill_index[j] = -1;
5367 
5368       if (rld[j].nregs > 1)
5369 	{
5370 	  max_group_size = MAX (rld[j].nregs, max_group_size);
5371 	  group_class
5372 	    = reg_class_superunion[(int) rld[j].class][(int) group_class];
5373 	}
5374 
5375       save_reload_reg_rtx[j] = rld[j].reg_rtx;
5376     }
5377 
5378   if (n_reloads > 1)
5379     qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5380 
5381   /* If -O, try first with inheritance, then turning it off.
5382      If not -O, don't do inheritance.
5383      Using inheritance when not optimizing leads to paradoxes
5384      with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5385      because one side of the comparison might be inherited.  */
5386   win = 0;
5387   for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5388     {
5389       choose_reload_regs_init (chain, save_reload_reg_rtx);
5390 
5391       /* Process the reloads in order of preference just found.
5392 	 Beyond this point, subregs can be found in reload_reg_rtx.
5393 
5394 	 This used to look for an existing reloaded home for all of the
5395 	 reloads, and only then perform any new reloads.  But that could lose
5396 	 if the reloads were done out of reg-class order because a later
5397 	 reload with a looser constraint might have an old home in a register
5398 	 needed by an earlier reload with a tighter constraint.
5399 
5400 	 To solve this, we make two passes over the reloads, in the order
5401 	 described above.  In the first pass we try to inherit a reload
5402 	 from a previous insn.  If there is a later reload that needs a
5403 	 class that is a proper subset of the class being processed, we must
5404 	 also allocate a spill register during the first pass.
5405 
5406 	 Then make a second pass over the reloads to allocate any reloads
5407 	 that haven't been given registers yet.  */
5408 
5409       for (j = 0; j < n_reloads; j++)
5410 	{
5411 	  int r = reload_order[j];
5412 	  rtx search_equiv = NULL_RTX;
5413 
5414 	  /* Ignore reloads that got marked inoperative.  */
5415 	  if (rld[r].out == 0 && rld[r].in == 0
5416 	      && ! rld[r].secondary_p)
5417 	    continue;
5418 
5419 	  /* If find_reloads chose to use reload_in or reload_out as a reload
5420 	     register, we don't need to chose one.  Otherwise, try even if it
5421 	     found one since we might save an insn if we find the value lying
5422 	     around.
5423 	     Try also when reload_in is a pseudo without a hard reg.  */
5424 	  if (rld[r].in != 0 && rld[r].reg_rtx != 0
5425 	      && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
5426 		  || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
5427 		      && GET_CODE (rld[r].in) != MEM
5428 		      && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
5429 	    continue;
5430 
5431 #if 0 /* No longer needed for correct operation.
5432 	 It might give better code, or might not; worth an experiment?  */
5433 	  /* If this is an optional reload, we can't inherit from earlier insns
5434 	     until we are sure that any non-optional reloads have been allocated.
5435 	     The following code takes advantage of the fact that optional reloads
5436 	     are at the end of reload_order.  */
5437 	  if (rld[r].optional != 0)
5438 	    for (i = 0; i < j; i++)
5439 	      if ((rld[reload_order[i]].out != 0
5440 		   || rld[reload_order[i]].in != 0
5441 		   || rld[reload_order[i]].secondary_p)
5442 		  && ! rld[reload_order[i]].optional
5443 		  && rld[reload_order[i]].reg_rtx == 0)
5444 		allocate_reload_reg (chain, reload_order[i], 0);
5445 #endif
5446 
5447 	  /* First see if this pseudo is already available as reloaded
5448 	     for a previous insn.  We cannot try to inherit for reloads
5449 	     that are smaller than the maximum number of registers needed
5450 	     for groups unless the register we would allocate cannot be used
5451 	     for the groups.
5452 
5453 	     We could check here to see if this is a secondary reload for
5454 	     an object that is already in a register of the desired class.
5455 	     This would avoid the need for the secondary reload register.
5456 	     But this is complex because we can't easily determine what
5457 	     objects might want to be loaded via this reload.  So let a
5458 	     register be allocated here.  In `emit_reload_insns' we suppress
5459 	     one of the loads in the case described above.  */
5460 
5461 	  if (inheritance)
5462 	    {
5463 	      int byte = 0;
5464 	      int regno = -1;
5465 	      enum machine_mode mode = VOIDmode;
5466 
5467 	      if (rld[r].in == 0)
5468 		;
5469 	      else if (GET_CODE (rld[r].in) == REG)
5470 		{
5471 		  regno = REGNO (rld[r].in);
5472 		  mode = GET_MODE (rld[r].in);
5473 		}
5474 	      else if (GET_CODE (rld[r].in_reg) == REG)
5475 		{
5476 		  regno = REGNO (rld[r].in_reg);
5477 		  mode = GET_MODE (rld[r].in_reg);
5478 		}
5479 	      else if (GET_CODE (rld[r].in_reg) == SUBREG
5480 		       && GET_CODE (SUBREG_REG (rld[r].in_reg)) == REG)
5481 		{
5482 		  byte = SUBREG_BYTE (rld[r].in_reg);
5483 		  regno = REGNO (SUBREG_REG (rld[r].in_reg));
5484 		  if (regno < FIRST_PSEUDO_REGISTER)
5485 		    regno = subreg_regno (rld[r].in_reg);
5486 		  mode = GET_MODE (rld[r].in_reg);
5487 		}
5488 #ifdef AUTO_INC_DEC
5489 	      else if ((GET_CODE (rld[r].in_reg) == PRE_INC
5490 			|| GET_CODE (rld[r].in_reg) == PRE_DEC
5491 			|| GET_CODE (rld[r].in_reg) == POST_INC
5492 			|| GET_CODE (rld[r].in_reg) == POST_DEC)
5493 		       && GET_CODE (XEXP (rld[r].in_reg, 0)) == REG)
5494 		{
5495 		  regno = REGNO (XEXP (rld[r].in_reg, 0));
5496 		  mode = GET_MODE (XEXP (rld[r].in_reg, 0));
5497 		  rld[r].out = rld[r].in;
5498 		}
5499 #endif
5500 #if 0
5501 	      /* This won't work, since REGNO can be a pseudo reg number.
5502 		 Also, it takes much more hair to keep track of all the things
5503 		 that can invalidate an inherited reload of part of a pseudoreg.  */
5504 	      else if (GET_CODE (rld[r].in) == SUBREG
5505 		       && GET_CODE (SUBREG_REG (rld[r].in)) == REG)
5506 		regno = subreg_regno (rld[r].in);
5507 #endif
5508 
5509 	      if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5510 		{
5511 		  enum reg_class class = rld[r].class, last_class;
5512 		  rtx last_reg = reg_last_reload_reg[regno];
5513 		  enum machine_mode need_mode;
5514 
5515 		  i = REGNO (last_reg);
5516 		  i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
5517 		  last_class = REGNO_REG_CLASS (i);
5518 
5519 		  if (byte == 0)
5520 		    need_mode = mode;
5521 		  else
5522 		    need_mode
5523 		      = smallest_mode_for_size (GET_MODE_SIZE (mode) + byte,
5524 						GET_MODE_CLASS (mode));
5525 
5526 		  if (
5527 #ifdef CANNOT_CHANGE_MODE_CLASS
5528 		      (!REG_CANNOT_CHANGE_MODE_P (i, GET_MODE (last_reg),
5529 						  need_mode)
5530 		       &&
5531 #endif
5532 		      (GET_MODE_SIZE (GET_MODE (last_reg))
5533 		       >= GET_MODE_SIZE (need_mode))
5534 #ifdef CANNOT_CHANGE_MODE_CLASS
5535 		      )
5536 #endif
5537 		      && reg_reloaded_contents[i] == regno
5538 		      && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
5539 		      && HARD_REGNO_MODE_OK (i, rld[r].mode)
5540 		      && (TEST_HARD_REG_BIT (reg_class_contents[(int) class], i)
5541 			  /* Even if we can't use this register as a reload
5542 			     register, we might use it for reload_override_in,
5543 			     if copying it to the desired class is cheap
5544 			     enough.  */
5545 			  || ((REGISTER_MOVE_COST (mode, last_class, class)
5546 			       < MEMORY_MOVE_COST (mode, class, 1))
5547 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5548 			      && (SECONDARY_INPUT_RELOAD_CLASS (class, mode,
5549 								last_reg)
5550 				  == NO_REGS)
5551 #endif
5552 #ifdef SECONDARY_MEMORY_NEEDED
5553 			      && ! SECONDARY_MEMORY_NEEDED (last_class, class,
5554 							    mode)
5555 #endif
5556 			      ))
5557 
5558 		      && (rld[r].nregs == max_group_size
5559 			  || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5560 						  i))
5561 		      && free_for_value_p (i, rld[r].mode, rld[r].opnum,
5562 					   rld[r].when_needed, rld[r].in,
5563 					   const0_rtx, r, 1))
5564 		    {
5565 		      /* If a group is needed, verify that all the subsequent
5566 			 registers still have their values intact.  */
5567 		      int nr = HARD_REGNO_NREGS (i, rld[r].mode);
5568 		      int k;
5569 
5570 		      for (k = 1; k < nr; k++)
5571 			if (reg_reloaded_contents[i + k] != regno
5572 			    || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
5573 			  break;
5574 
5575 		      if (k == nr)
5576 			{
5577 			  int i1;
5578 			  int bad_for_class;
5579 
5580 			  last_reg = (GET_MODE (last_reg) == mode
5581 				      ? last_reg : gen_rtx_REG (mode, i));
5582 
5583 			  bad_for_class = 0;
5584 			  for (k = 0; k < nr; k++)
5585 			    bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
5586 								  i+k);
5587 
5588 			  /* We found a register that contains the
5589 			     value we need.  If this register is the
5590 			     same as an `earlyclobber' operand of the
5591 			     current insn, just mark it as a place to
5592 			     reload from since we can't use it as the
5593 			     reload register itself.  */
5594 
5595 			  for (i1 = 0; i1 < n_earlyclobbers; i1++)
5596 			    if (reg_overlap_mentioned_for_reload_p
5597 				(reg_last_reload_reg[regno],
5598 				 reload_earlyclobbers[i1]))
5599 			      break;
5600 
5601 			  if (i1 != n_earlyclobbers
5602 			      || ! (free_for_value_p (i, rld[r].mode,
5603 						      rld[r].opnum,
5604 						      rld[r].when_needed, rld[r].in,
5605 						      rld[r].out, r, 1))
5606 			      /* Don't use it if we'd clobber a pseudo reg.  */
5607 			      || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
5608 				  && rld[r].out
5609 				  && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
5610 			      /* Don't clobber the frame pointer.  */
5611 			      || (i == HARD_FRAME_POINTER_REGNUM
5612 				  && frame_pointer_needed
5613 				  && rld[r].out)
5614 			      /* Don't really use the inherited spill reg
5615 				 if we need it wider than we've got it.  */
5616 			      || (GET_MODE_SIZE (rld[r].mode)
5617 				  > GET_MODE_SIZE (mode))
5618 			      || bad_for_class
5619 
5620 			      /* If find_reloads chose reload_out as reload
5621 				 register, stay with it - that leaves the
5622 				 inherited register for subsequent reloads.  */
5623 			      || (rld[r].out && rld[r].reg_rtx
5624 				  && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
5625 			    {
5626 			      if (! rld[r].optional)
5627 				{
5628 				  reload_override_in[r] = last_reg;
5629 				  reload_inheritance_insn[r]
5630 				    = reg_reloaded_insn[i];
5631 				}
5632 			    }
5633 			  else
5634 			    {
5635 			      int k;
5636 			      /* We can use this as a reload reg.  */
5637 			      /* Mark the register as in use for this part of
5638 				 the insn.  */
5639 			      mark_reload_reg_in_use (i,
5640 						      rld[r].opnum,
5641 						      rld[r].when_needed,
5642 						      rld[r].mode);
5643 			      rld[r].reg_rtx = last_reg;
5644 			      reload_inherited[r] = 1;
5645 			      reload_inheritance_insn[r]
5646 				= reg_reloaded_insn[i];
5647 			      reload_spill_index[r] = i;
5648 			      for (k = 0; k < nr; k++)
5649 				SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5650 						  i + k);
5651 			    }
5652 			}
5653 		    }
5654 		}
5655 	    }
5656 
5657 	  /* Here's another way to see if the value is already lying around.  */
5658 	  if (inheritance
5659 	      && rld[r].in != 0
5660 	      && ! reload_inherited[r]
5661 	      && rld[r].out == 0
5662 	      && (CONSTANT_P (rld[r].in)
5663 		  || GET_CODE (rld[r].in) == PLUS
5664 		  || GET_CODE (rld[r].in) == REG
5665 		  || GET_CODE (rld[r].in) == MEM)
5666 	      && (rld[r].nregs == max_group_size
5667 		  || ! reg_classes_intersect_p (rld[r].class, group_class)))
5668 	    search_equiv = rld[r].in;
5669 	  /* If this is an output reload from a simple move insn, look
5670 	     if an equivalence for the input is available.  */
5671 	  else if (inheritance && rld[r].in == 0 && rld[r].out != 0)
5672 	    {
5673 	      rtx set = single_set (insn);
5674 
5675 	      if (set
5676 		  && rtx_equal_p (rld[r].out, SET_DEST (set))
5677 		  && CONSTANT_P (SET_SRC (set)))
5678 		search_equiv = SET_SRC (set);
5679 	    }
5680 
5681 	  if (search_equiv)
5682 	    {
5683 	      rtx equiv
5684 		= find_equiv_reg (search_equiv, insn, rld[r].class,
5685 				  -1, NULL, 0, rld[r].mode);
5686 	      int regno = 0;
5687 
5688 	      if (equiv != 0)
5689 		{
5690 		  if (GET_CODE (equiv) == REG)
5691 		    regno = REGNO (equiv);
5692 		  else if (GET_CODE (equiv) == SUBREG)
5693 		    {
5694 		      /* This must be a SUBREG of a hard register.
5695 			 Make a new REG since this might be used in an
5696 			 address and not all machines support SUBREGs
5697 			 there.  */
5698 		      regno = subreg_regno (equiv);
5699 		      equiv = gen_rtx_REG (rld[r].mode, regno);
5700 		    }
5701 		  else
5702 		    abort ();
5703 		}
5704 
5705 	      /* If we found a spill reg, reject it unless it is free
5706 		 and of the desired class.  */
5707 	      if (equiv != 0
5708 		  && ((TEST_HARD_REG_BIT (reload_reg_used_at_all, regno)
5709 		       && ! free_for_value_p (regno, rld[r].mode,
5710 					      rld[r].opnum, rld[r].when_needed,
5711 					      rld[r].in, rld[r].out, r, 1))
5712 		      || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
5713 					      regno)))
5714 		equiv = 0;
5715 
5716 	      if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
5717 		equiv = 0;
5718 
5719 	      /* We found a register that contains the value we need.
5720 		 If this register is the same as an `earlyclobber' operand
5721 		 of the current insn, just mark it as a place to reload from
5722 		 since we can't use it as the reload register itself.  */
5723 
5724 	      if (equiv != 0)
5725 		for (i = 0; i < n_earlyclobbers; i++)
5726 		  if (reg_overlap_mentioned_for_reload_p (equiv,
5727 							  reload_earlyclobbers[i]))
5728 		    {
5729 		      if (! rld[r].optional)
5730 			reload_override_in[r] = equiv;
5731 		      equiv = 0;
5732 		      break;
5733 		    }
5734 
5735 	      /* If the equiv register we have found is explicitly clobbered
5736 		 in the current insn, it depends on the reload type if we
5737 		 can use it, use it for reload_override_in, or not at all.
5738 		 In particular, we then can't use EQUIV for a
5739 		 RELOAD_FOR_OUTPUT_ADDRESS reload.  */
5740 
5741 	      if (equiv != 0)
5742 		{
5743 		  if (regno_clobbered_p (regno, insn, rld[r].mode, 0))
5744 		    switch (rld[r].when_needed)
5745 		      {
5746 		      case RELOAD_FOR_OTHER_ADDRESS:
5747 		      case RELOAD_FOR_INPADDR_ADDRESS:
5748 		      case RELOAD_FOR_INPUT_ADDRESS:
5749 		      case RELOAD_FOR_OPADDR_ADDR:
5750 			break;
5751 		      case RELOAD_OTHER:
5752 		      case RELOAD_FOR_INPUT:
5753 		      case RELOAD_FOR_OPERAND_ADDRESS:
5754 			if (! rld[r].optional)
5755 			  reload_override_in[r] = equiv;
5756 			/* Fall through.  */
5757 		      default:
5758 			equiv = 0;
5759 			break;
5760 		      }
5761 		  else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
5762 		    switch (rld[r].when_needed)
5763 		      {
5764 		      case RELOAD_FOR_OTHER_ADDRESS:
5765 		      case RELOAD_FOR_INPADDR_ADDRESS:
5766 		      case RELOAD_FOR_INPUT_ADDRESS:
5767 		      case RELOAD_FOR_OPADDR_ADDR:
5768 		      case RELOAD_FOR_OPERAND_ADDRESS:
5769 		      case RELOAD_FOR_INPUT:
5770 			break;
5771 		      case RELOAD_OTHER:
5772 			if (! rld[r].optional)
5773 			  reload_override_in[r] = equiv;
5774 			/* Fall through.  */
5775 		      default:
5776 			equiv = 0;
5777 			break;
5778 		      }
5779 		}
5780 
5781 	      /* If we found an equivalent reg, say no code need be generated
5782 		 to load it, and use it as our reload reg.  */
5783 	      if (equiv != 0
5784 		  && (regno != HARD_FRAME_POINTER_REGNUM
5785 		      || !frame_pointer_needed))
5786 		{
5787 		  int nr = HARD_REGNO_NREGS (regno, rld[r].mode);
5788 		  int k;
5789 		  rld[r].reg_rtx = equiv;
5790 		  reload_inherited[r] = 1;
5791 
5792 		  /* If reg_reloaded_valid is not set for this register,
5793 		     there might be a stale spill_reg_store lying around.
5794 		     We must clear it, since otherwise emit_reload_insns
5795 		     might delete the store.  */
5796 		  if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
5797 		    spill_reg_store[regno] = NULL_RTX;
5798 		  /* If any of the hard registers in EQUIV are spill
5799 		     registers, mark them as in use for this insn.  */
5800 		  for (k = 0; k < nr; k++)
5801 		    {
5802 		      i = spill_reg_order[regno + k];
5803 		      if (i >= 0)
5804 			{
5805 			  mark_reload_reg_in_use (regno, rld[r].opnum,
5806 						  rld[r].when_needed,
5807 						  rld[r].mode);
5808 			  SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5809 					    regno + k);
5810 			}
5811 		    }
5812 		}
5813 	    }
5814 
5815 	  /* If we found a register to use already, or if this is an optional
5816 	     reload, we are done.  */
5817 	  if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
5818 	    continue;
5819 
5820 #if 0
5821 	  /* No longer needed for correct operation.  Might or might
5822 	     not give better code on the average.  Want to experiment?  */
5823 
5824 	  /* See if there is a later reload that has a class different from our
5825 	     class that intersects our class or that requires less register
5826 	     than our reload.  If so, we must allocate a register to this
5827 	     reload now, since that reload might inherit a previous reload
5828 	     and take the only available register in our class.  Don't do this
5829 	     for optional reloads since they will force all previous reloads
5830 	     to be allocated.  Also don't do this for reloads that have been
5831 	     turned off.  */
5832 
5833 	  for (i = j + 1; i < n_reloads; i++)
5834 	    {
5835 	      int s = reload_order[i];
5836 
5837 	      if ((rld[s].in == 0 && rld[s].out == 0
5838 		   && ! rld[s].secondary_p)
5839 		  || rld[s].optional)
5840 		continue;
5841 
5842 	      if ((rld[s].class != rld[r].class
5843 		   && reg_classes_intersect_p (rld[r].class,
5844 					       rld[s].class))
5845 		  || rld[s].nregs < rld[r].nregs)
5846 		break;
5847 	    }
5848 
5849 	  if (i == n_reloads)
5850 	    continue;
5851 
5852 	  allocate_reload_reg (chain, r, j == n_reloads - 1);
5853 #endif
5854 	}
5855 
5856       /* Now allocate reload registers for anything non-optional that
5857 	 didn't get one yet.  */
5858       for (j = 0; j < n_reloads; j++)
5859 	{
5860 	  int r = reload_order[j];
5861 
5862 	  /* Ignore reloads that got marked inoperative.  */
5863 	  if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
5864 	    continue;
5865 
5866 	  /* Skip reloads that already have a register allocated or are
5867 	     optional.  */
5868 	  if (rld[r].reg_rtx != 0 || rld[r].optional)
5869 	    continue;
5870 
5871 	  if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
5872 	    break;
5873 	}
5874 
5875       /* If that loop got all the way, we have won.  */
5876       if (j == n_reloads)
5877 	{
5878 	  win = 1;
5879 	  break;
5880 	}
5881 
5882       /* Loop around and try without any inheritance.  */
5883     }
5884 
5885   if (! win)
5886     {
5887       /* First undo everything done by the failed attempt
5888 	 to allocate with inheritance.  */
5889       choose_reload_regs_init (chain, save_reload_reg_rtx);
5890 
5891       /* Some sanity tests to verify that the reloads found in the first
5892 	 pass are identical to the ones we have now.  */
5893       if (chain->n_reloads != n_reloads)
5894 	abort ();
5895 
5896       for (i = 0; i < n_reloads; i++)
5897 	{
5898 	  if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
5899 	    continue;
5900 	  if (chain->rld[i].when_needed != rld[i].when_needed)
5901 	    abort ();
5902 	  for (j = 0; j < n_spills; j++)
5903 	    if (spill_regs[j] == chain->rld[i].regno)
5904 	      if (! set_reload_reg (j, i))
5905 		failed_reload (chain->insn, i);
5906 	}
5907     }
5908 
5909   /* If we thought we could inherit a reload, because it seemed that
5910      nothing else wanted the same reload register earlier in the insn,
5911      verify that assumption, now that all reloads have been assigned.
5912      Likewise for reloads where reload_override_in has been set.  */
5913 
5914   /* If doing expensive optimizations, do one preliminary pass that doesn't
5915      cancel any inheritance, but removes reloads that have been needed only
5916      for reloads that we know can be inherited.  */
5917   for (pass = flag_expensive_optimizations; pass >= 0; pass--)
5918     {
5919       for (j = 0; j < n_reloads; j++)
5920 	{
5921 	  int r = reload_order[j];
5922 	  rtx check_reg;
5923 	  if (reload_inherited[r] && rld[r].reg_rtx)
5924 	    check_reg = rld[r].reg_rtx;
5925 	  else if (reload_override_in[r]
5926 		   && (GET_CODE (reload_override_in[r]) == REG
5927 		       || GET_CODE (reload_override_in[r]) == SUBREG))
5928 	    check_reg = reload_override_in[r];
5929 	  else
5930 	    continue;
5931 	  if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
5932 				  rld[r].opnum, rld[r].when_needed, rld[r].in,
5933 				  (reload_inherited[r]
5934 				   ? rld[r].out : const0_rtx),
5935 				  r, 1))
5936 	    {
5937 	      if (pass)
5938 		continue;
5939 	      reload_inherited[r] = 0;
5940 	      reload_override_in[r] = 0;
5941 	    }
5942 	  /* If we can inherit a RELOAD_FOR_INPUT, or can use a
5943 	     reload_override_in, then we do not need its related
5944 	     RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
5945 	     likewise for other reload types.
5946 	     We handle this by removing a reload when its only replacement
5947 	     is mentioned in reload_in of the reload we are going to inherit.
5948 	     A special case are auto_inc expressions; even if the input is
5949 	     inherited, we still need the address for the output.  We can
5950 	     recognize them because they have RELOAD_OUT set to RELOAD_IN.
5951 	     If we succeeded removing some reload and we are doing a preliminary
5952 	     pass just to remove such reloads, make another pass, since the
5953 	     removal of one reload might allow us to inherit another one.  */
5954 	  else if (rld[r].in
5955 		   && rld[r].out != rld[r].in
5956 		   && remove_address_replacements (rld[r].in) && pass)
5957 	    pass = 2;
5958 	}
5959     }
5960 
5961   /* Now that reload_override_in is known valid,
5962      actually override reload_in.  */
5963   for (j = 0; j < n_reloads; j++)
5964     if (reload_override_in[j])
5965       rld[j].in = reload_override_in[j];
5966 
5967   /* If this reload won't be done because it has been cancelled or is
5968      optional and not inherited, clear reload_reg_rtx so other
5969      routines (such as subst_reloads) don't get confused.  */
5970   for (j = 0; j < n_reloads; j++)
5971     if (rld[j].reg_rtx != 0
5972 	&& ((rld[j].optional && ! reload_inherited[j])
5973 	    || (rld[j].in == 0 && rld[j].out == 0
5974 		&& ! rld[j].secondary_p)))
5975       {
5976 	int regno = true_regnum (rld[j].reg_rtx);
5977 
5978 	if (spill_reg_order[regno] >= 0)
5979 	  clear_reload_reg_in_use (regno, rld[j].opnum,
5980 				   rld[j].when_needed, rld[j].mode);
5981 	rld[j].reg_rtx = 0;
5982 	reload_spill_index[j] = -1;
5983       }
5984 
5985   /* Record which pseudos and which spill regs have output reloads.  */
5986   for (j = 0; j < n_reloads; j++)
5987     {
5988       int r = reload_order[j];
5989 
5990       i = reload_spill_index[r];
5991 
5992       /* I is nonneg if this reload uses a register.
5993 	 If rld[r].reg_rtx is 0, this is an optional reload
5994 	 that we opted to ignore.  */
5995       if (rld[r].out_reg != 0 && GET_CODE (rld[r].out_reg) == REG
5996 	  && rld[r].reg_rtx != 0)
5997 	{
5998 	  int nregno = REGNO (rld[r].out_reg);
5999 	  int nr = 1;
6000 
6001 	  if (nregno < FIRST_PSEUDO_REGISTER)
6002 	    nr = HARD_REGNO_NREGS (nregno, rld[r].mode);
6003 
6004 	  while (--nr >= 0)
6005 	    reg_has_output_reload[nregno + nr] = 1;
6006 
6007 	  if (i >= 0)
6008 	    {
6009 	      nr = HARD_REGNO_NREGS (i, rld[r].mode);
6010 	      while (--nr >= 0)
6011 		SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
6012 	    }
6013 
6014 	  if (rld[r].when_needed != RELOAD_OTHER
6015 	      && rld[r].when_needed != RELOAD_FOR_OUTPUT
6016 	      && rld[r].when_needed != RELOAD_FOR_INSN)
6017 	    abort ();
6018 	}
6019     }
6020 }
6021 
6022 /* Deallocate the reload register for reload R.  This is called from
6023    remove_address_replacements.  */
6024 
6025 void
deallocate_reload_reg(r)6026 deallocate_reload_reg (r)
6027      int r;
6028 {
6029   int regno;
6030 
6031   if (! rld[r].reg_rtx)
6032     return;
6033   regno = true_regnum (rld[r].reg_rtx);
6034   rld[r].reg_rtx = 0;
6035   if (spill_reg_order[regno] >= 0)
6036     clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
6037 			     rld[r].mode);
6038   reload_spill_index[r] = -1;
6039 }
6040 
6041 /* If SMALL_REGISTER_CLASSES is nonzero, we may not have merged two
6042    reloads of the same item for fear that we might not have enough reload
6043    registers. However, normally they will get the same reload register
6044    and hence actually need not be loaded twice.
6045 
6046    Here we check for the most common case of this phenomenon: when we have
6047    a number of reloads for the same object, each of which were allocated
6048    the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6049    reload, and is not modified in the insn itself.  If we find such,
6050    merge all the reloads and set the resulting reload to RELOAD_OTHER.
6051    This will not increase the number of spill registers needed and will
6052    prevent redundant code.  */
6053 
6054 static void
merge_assigned_reloads(insn)6055 merge_assigned_reloads (insn)
6056      rtx insn;
6057 {
6058   int i, j;
6059 
6060   /* Scan all the reloads looking for ones that only load values and
6061      are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6062      assigned and not modified by INSN.  */
6063 
6064   for (i = 0; i < n_reloads; i++)
6065     {
6066       int conflicting_input = 0;
6067       int max_input_address_opnum = -1;
6068       int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6069 
6070       if (rld[i].in == 0 || rld[i].when_needed == RELOAD_OTHER
6071 	  || rld[i].out != 0 || rld[i].reg_rtx == 0
6072 	  || reg_set_p (rld[i].reg_rtx, insn))
6073 	continue;
6074 
6075       /* Look at all other reloads.  Ensure that the only use of this
6076 	 reload_reg_rtx is in a reload that just loads the same value
6077 	 as we do.  Note that any secondary reloads must be of the identical
6078 	 class since the values, modes, and result registers are the
6079 	 same, so we need not do anything with any secondary reloads.  */
6080 
6081       for (j = 0; j < n_reloads; j++)
6082 	{
6083 	  if (i == j || rld[j].reg_rtx == 0
6084 	      || ! reg_overlap_mentioned_p (rld[j].reg_rtx,
6085 					    rld[i].reg_rtx))
6086 	    continue;
6087 
6088 	  if (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6089 	      && rld[j].opnum > max_input_address_opnum)
6090 	    max_input_address_opnum = rld[j].opnum;
6091 
6092 	  /* If the reload regs aren't exactly the same (e.g, different modes)
6093 	     or if the values are different, we can't merge this reload.
6094 	     But if it is an input reload, we might still merge
6095 	     RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads.  */
6096 
6097 	  if (! rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6098 	      || rld[j].out != 0 || rld[j].in == 0
6099 	      || ! rtx_equal_p (rld[i].in, rld[j].in))
6100 	    {
6101 	      if (rld[j].when_needed != RELOAD_FOR_INPUT
6102 		  || ((rld[i].when_needed != RELOAD_FOR_INPUT_ADDRESS
6103 		       || rld[i].opnum > rld[j].opnum)
6104 		      && rld[i].when_needed != RELOAD_FOR_OTHER_ADDRESS))
6105 		break;
6106 	      conflicting_input = 1;
6107 	      if (min_conflicting_input_opnum > rld[j].opnum)
6108 		min_conflicting_input_opnum = rld[j].opnum;
6109 	    }
6110 	}
6111 
6112       /* If all is OK, merge the reloads.  Only set this to RELOAD_OTHER if
6113 	 we, in fact, found any matching reloads.  */
6114 
6115       if (j == n_reloads
6116 	  && max_input_address_opnum <= min_conflicting_input_opnum)
6117 	{
6118 	  for (j = 0; j < n_reloads; j++)
6119 	    if (i != j && rld[j].reg_rtx != 0
6120 		&& rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6121 		&& (! conflicting_input
6122 		    || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6123 		    || rld[j].when_needed == RELOAD_FOR_OTHER_ADDRESS))
6124 	      {
6125 		rld[i].when_needed = RELOAD_OTHER;
6126 		rld[j].in = 0;
6127 		reload_spill_index[j] = -1;
6128 		transfer_replacements (i, j);
6129 	      }
6130 
6131 	  /* If this is now RELOAD_OTHER, look for any reloads that load
6132 	     parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
6133 	     if they were for inputs, RELOAD_OTHER for outputs.  Note that
6134 	     this test is equivalent to looking for reloads for this operand
6135 	     number.  */
6136 	  /* We must take special care when there are two or more reloads to
6137 	     be merged and a RELOAD_FOR_OUTPUT_ADDRESS reload that loads the
6138 	     same value or a part of it; we must not change its type if there
6139 	     is a conflicting input.  */
6140 
6141 	  if (rld[i].when_needed == RELOAD_OTHER)
6142 	    for (j = 0; j < n_reloads; j++)
6143 	      if (rld[j].in != 0
6144 		  && rld[j].when_needed != RELOAD_OTHER
6145 		  && rld[j].when_needed != RELOAD_FOR_OTHER_ADDRESS
6146 		  && (! conflicting_input
6147 		      || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6148 		      || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6149 		  && reg_overlap_mentioned_for_reload_p (rld[j].in,
6150 							 rld[i].in))
6151 		{
6152 		  int k;
6153 
6154 		  rld[j].when_needed
6155 		    = ((rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6156 			|| rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6157 		       ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
6158 
6159 		  /* Check to see if we accidentally converted two reloads
6160 		     that use the same reload register with different inputs
6161 		     to the same type.  If so, the resulting code won't work,
6162 		     so abort.  */
6163 		  if (rld[j].reg_rtx)
6164 		    for (k = 0; k < j; k++)
6165 		      if (rld[k].in != 0 && rld[k].reg_rtx != 0
6166 			  && rld[k].when_needed == rld[j].when_needed
6167 			  && rtx_equal_p (rld[k].reg_rtx, rld[j].reg_rtx)
6168 			  && ! rtx_equal_p (rld[k].in, rld[j].in))
6169 			abort ();
6170 		}
6171 	}
6172     }
6173 }
6174 
6175 /* These arrays are filled by emit_reload_insns and its subroutines.  */
6176 static rtx input_reload_insns[MAX_RECOG_OPERANDS];
6177 static rtx other_input_address_reload_insns = 0;
6178 static rtx other_input_reload_insns = 0;
6179 static rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
6180 static rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6181 static rtx output_reload_insns[MAX_RECOG_OPERANDS];
6182 static rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
6183 static rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6184 static rtx operand_reload_insns = 0;
6185 static rtx other_operand_reload_insns = 0;
6186 static rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
6187 
6188 /* Values to be put in spill_reg_store are put here first.  */
6189 static rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
6190 static HARD_REG_SET reg_reloaded_died;
6191 
6192 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
6193    has the number J.  OLD contains the value to be used as input.  */
6194 
6195 static void
emit_input_reload_insns(chain,rl,old,j)6196 emit_input_reload_insns (chain, rl, old, j)
6197      struct insn_chain *chain;
6198      struct reload *rl;
6199      rtx old;
6200      int j;
6201 {
6202   rtx insn = chain->insn;
6203   rtx reloadreg = rl->reg_rtx;
6204   rtx oldequiv_reg = 0;
6205   rtx oldequiv = 0;
6206   int special = 0;
6207   enum machine_mode mode;
6208   rtx *where;
6209 
6210   /* Determine the mode to reload in.
6211      This is very tricky because we have three to choose from.
6212      There is the mode the insn operand wants (rl->inmode).
6213      There is the mode of the reload register RELOADREG.
6214      There is the intrinsic mode of the operand, which we could find
6215      by stripping some SUBREGs.
6216      It turns out that RELOADREG's mode is irrelevant:
6217      we can change that arbitrarily.
6218 
6219      Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6220      then the reload reg may not support QImode moves, so use SImode.
6221      If foo is in memory due to spilling a pseudo reg, this is safe,
6222      because the QImode value is in the least significant part of a
6223      slot big enough for a SImode.  If foo is some other sort of
6224      memory reference, then it is impossible to reload this case,
6225      so previous passes had better make sure this never happens.
6226 
6227      Then consider a one-word union which has SImode and one of its
6228      members is a float, being fetched as (SUBREG:SF union:SI).
6229      We must fetch that as SFmode because we could be loading into
6230      a float-only register.  In this case OLD's mode is correct.
6231 
6232      Consider an immediate integer: it has VOIDmode.  Here we need
6233      to get a mode from something else.
6234 
6235      In some cases, there is a fourth mode, the operand's
6236      containing mode.  If the insn specifies a containing mode for
6237      this operand, it overrides all others.
6238 
6239      I am not sure whether the algorithm here is always right,
6240      but it does the right things in those cases.  */
6241 
6242   mode = GET_MODE (old);
6243   if (mode == VOIDmode)
6244     mode = rl->inmode;
6245 
6246 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6247   /* If we need a secondary register for this operation, see if
6248      the value is already in a register in that class.  Don't
6249      do this if the secondary register will be used as a scratch
6250      register.  */
6251 
6252   if (rl->secondary_in_reload >= 0
6253       && rl->secondary_in_icode == CODE_FOR_nothing
6254       && optimize)
6255     oldequiv
6256       = find_equiv_reg (old, insn,
6257 			rld[rl->secondary_in_reload].class,
6258 			-1, NULL, 0, mode);
6259 #endif
6260 
6261   /* If reloading from memory, see if there is a register
6262      that already holds the same value.  If so, reload from there.
6263      We can pass 0 as the reload_reg_p argument because
6264      any other reload has either already been emitted,
6265      in which case find_equiv_reg will see the reload-insn,
6266      or has yet to be emitted, in which case it doesn't matter
6267      because we will use this equiv reg right away.  */
6268 
6269   if (oldequiv == 0 && optimize
6270       && (GET_CODE (old) == MEM
6271 	  || (GET_CODE (old) == REG
6272 	      && REGNO (old) >= FIRST_PSEUDO_REGISTER
6273 	      && reg_renumber[REGNO (old)] < 0)))
6274     oldequiv = find_equiv_reg (old, insn, ALL_REGS, -1, NULL, 0, mode);
6275 
6276   if (oldequiv)
6277     {
6278       unsigned int regno = true_regnum (oldequiv);
6279 
6280       /* Don't use OLDEQUIV if any other reload changes it at an
6281 	 earlier stage of this insn or at this stage.  */
6282       if (! free_for_value_p (regno, rl->mode, rl->opnum, rl->when_needed,
6283 			      rl->in, const0_rtx, j, 0))
6284 	oldequiv = 0;
6285 
6286       /* If it is no cheaper to copy from OLDEQUIV into the
6287 	 reload register than it would be to move from memory,
6288 	 don't use it. Likewise, if we need a secondary register
6289 	 or memory.  */
6290 
6291       if (oldequiv != 0
6292 	  && ((REGNO_REG_CLASS (regno) != rl->class
6293 	       && (REGISTER_MOVE_COST (mode, REGNO_REG_CLASS (regno),
6294 				       rl->class)
6295 		   >= MEMORY_MOVE_COST (mode, rl->class, 1)))
6296 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6297 	      || (SECONDARY_INPUT_RELOAD_CLASS (rl->class,
6298 						mode, oldequiv)
6299 		  != NO_REGS)
6300 #endif
6301 #ifdef SECONDARY_MEMORY_NEEDED
6302 	      || SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (regno),
6303 					  rl->class,
6304 					  mode)
6305 #endif
6306 	      ))
6307 	oldequiv = 0;
6308     }
6309 
6310   /* delete_output_reload is only invoked properly if old contains
6311      the original pseudo register.  Since this is replaced with a
6312      hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6313      find the pseudo in RELOAD_IN_REG.  */
6314   if (oldequiv == 0
6315       && reload_override_in[j]
6316       && GET_CODE (rl->in_reg) == REG)
6317     {
6318       oldequiv = old;
6319       old = rl->in_reg;
6320     }
6321   if (oldequiv == 0)
6322     oldequiv = old;
6323   else if (GET_CODE (oldequiv) == REG)
6324     oldequiv_reg = oldequiv;
6325   else if (GET_CODE (oldequiv) == SUBREG)
6326     oldequiv_reg = SUBREG_REG (oldequiv);
6327 
6328   /* If we are reloading from a register that was recently stored in
6329      with an output-reload, see if we can prove there was
6330      actually no need to store the old value in it.  */
6331 
6332   if (optimize && GET_CODE (oldequiv) == REG
6333       && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6334       && spill_reg_store[REGNO (oldequiv)]
6335       && GET_CODE (old) == REG
6336       && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
6337 	  || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6338 			  rl->out_reg)))
6339     delete_output_reload (insn, j, REGNO (oldequiv));
6340 
6341   /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6342      then load RELOADREG from OLDEQUIV.  Note that we cannot use
6343      gen_lowpart_common since it can do the wrong thing when
6344      RELOADREG has a multi-word mode.  Note that RELOADREG
6345      must always be a REG here.  */
6346 
6347   if (GET_MODE (reloadreg) != mode)
6348     reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
6349   while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6350     oldequiv = SUBREG_REG (oldequiv);
6351   if (GET_MODE (oldequiv) != VOIDmode
6352       && mode != GET_MODE (oldequiv))
6353     oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
6354 
6355   /* Switch to the right place to emit the reload insns.  */
6356   switch (rl->when_needed)
6357     {
6358     case RELOAD_OTHER:
6359       where = &other_input_reload_insns;
6360       break;
6361     case RELOAD_FOR_INPUT:
6362       where = &input_reload_insns[rl->opnum];
6363       break;
6364     case RELOAD_FOR_INPUT_ADDRESS:
6365       where = &input_address_reload_insns[rl->opnum];
6366       break;
6367     case RELOAD_FOR_INPADDR_ADDRESS:
6368       where = &inpaddr_address_reload_insns[rl->opnum];
6369       break;
6370     case RELOAD_FOR_OUTPUT_ADDRESS:
6371       where = &output_address_reload_insns[rl->opnum];
6372       break;
6373     case RELOAD_FOR_OUTADDR_ADDRESS:
6374       where = &outaddr_address_reload_insns[rl->opnum];
6375       break;
6376     case RELOAD_FOR_OPERAND_ADDRESS:
6377       where = &operand_reload_insns;
6378       break;
6379     case RELOAD_FOR_OPADDR_ADDR:
6380       where = &other_operand_reload_insns;
6381       break;
6382     case RELOAD_FOR_OTHER_ADDRESS:
6383       where = &other_input_address_reload_insns;
6384       break;
6385     default:
6386       abort ();
6387     }
6388 
6389   push_to_sequence (*where);
6390 
6391   /* Auto-increment addresses must be reloaded in a special way.  */
6392   if (rl->out && ! rl->out_reg)
6393     {
6394       /* We are not going to bother supporting the case where a
6395 	 incremented register can't be copied directly from
6396 	 OLDEQUIV since this seems highly unlikely.  */
6397       if (rl->secondary_in_reload >= 0)
6398 	abort ();
6399 
6400       if (reload_inherited[j])
6401 	oldequiv = reloadreg;
6402 
6403       old = XEXP (rl->in_reg, 0);
6404 
6405       if (optimize && GET_CODE (oldequiv) == REG
6406 	  && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6407 	  && spill_reg_store[REGNO (oldequiv)]
6408 	  && GET_CODE (old) == REG
6409 	  && (dead_or_set_p (insn,
6410 			     spill_reg_stored_to[REGNO (oldequiv)])
6411 	      || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6412 			      old)))
6413 	delete_output_reload (insn, j, REGNO (oldequiv));
6414 
6415       /* Prevent normal processing of this reload.  */
6416       special = 1;
6417       /* Output a special code sequence for this case.  */
6418       new_spill_reg_store[REGNO (reloadreg)]
6419 	= inc_for_reload (reloadreg, oldequiv, rl->out,
6420 			  rl->inc);
6421     }
6422 
6423   /* If we are reloading a pseudo-register that was set by the previous
6424      insn, see if we can get rid of that pseudo-register entirely
6425      by redirecting the previous insn into our reload register.  */
6426 
6427   else if (optimize && GET_CODE (old) == REG
6428 	   && REGNO (old) >= FIRST_PSEUDO_REGISTER
6429 	   && dead_or_set_p (insn, old)
6430 	   /* This is unsafe if some other reload
6431 	      uses the same reg first.  */
6432 	   && ! conflicts_with_override (reloadreg)
6433 	   && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
6434 				rl->when_needed, old, rl->out, j, 0))
6435     {
6436       rtx temp = PREV_INSN (insn);
6437       while (temp && GET_CODE (temp) == NOTE)
6438 	temp = PREV_INSN (temp);
6439       if (temp
6440 	  && GET_CODE (temp) == INSN
6441 	  && GET_CODE (PATTERN (temp)) == SET
6442 	  && SET_DEST (PATTERN (temp)) == old
6443 	  /* Make sure we can access insn_operand_constraint.  */
6444 	  && asm_noperands (PATTERN (temp)) < 0
6445 	  /* This is unsafe if operand occurs more than once in current
6446 	     insn.  Perhaps some occurrences aren't reloaded.  */
6447 	  && count_occurrences (PATTERN (insn), old, 0) == 1)
6448 	{
6449 	  rtx old = SET_DEST (PATTERN (temp));
6450 	  /* Store into the reload register instead of the pseudo.  */
6451 	  SET_DEST (PATTERN (temp)) = reloadreg;
6452 
6453 	  /* Verify that resulting insn is valid.  */
6454 	  extract_insn (temp);
6455 	  if (constrain_operands (1))
6456 	    {
6457 	      /* If the previous insn is an output reload, the source is
6458 		 a reload register, and its spill_reg_store entry will
6459 		 contain the previous destination.  This is now
6460 		 invalid.  */
6461 	      if (GET_CODE (SET_SRC (PATTERN (temp))) == REG
6462 		  && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
6463 		{
6464 		  spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6465 		  spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6466 		}
6467 
6468 	      /* If these are the only uses of the pseudo reg,
6469 		 pretend for GDB it lives in the reload reg we used.  */
6470 	      if (REG_N_DEATHS (REGNO (old)) == 1
6471 		  && REG_N_SETS (REGNO (old)) == 1)
6472 		{
6473 		  reg_renumber[REGNO (old)] = REGNO (rl->reg_rtx);
6474 		  alter_reg (REGNO (old), -1);
6475 		}
6476 	      special = 1;
6477 	    }
6478 	  else
6479 	    {
6480 	      SET_DEST (PATTERN (temp)) = old;
6481 	    }
6482 	}
6483     }
6484 
6485   /* We can't do that, so output an insn to load RELOADREG.  */
6486 
6487 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6488   /* If we have a secondary reload, pick up the secondary register
6489      and icode, if any.  If OLDEQUIV and OLD are different or
6490      if this is an in-out reload, recompute whether or not we
6491      still need a secondary register and what the icode should
6492      be.  If we still need a secondary register and the class or
6493      icode is different, go back to reloading from OLD if using
6494      OLDEQUIV means that we got the wrong type of register.  We
6495      cannot have different class or icode due to an in-out reload
6496      because we don't make such reloads when both the input and
6497      output need secondary reload registers.  */
6498 
6499   if (! special && rl->secondary_in_reload >= 0)
6500     {
6501       rtx second_reload_reg = 0;
6502       int secondary_reload = rl->secondary_in_reload;
6503       rtx real_oldequiv = oldequiv;
6504       rtx real_old = old;
6505       rtx tmp;
6506       enum insn_code icode;
6507 
6508       /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6509 	 and similarly for OLD.
6510 	 See comments in get_secondary_reload in reload.c.  */
6511       /* If it is a pseudo that cannot be replaced with its
6512 	 equivalent MEM, we must fall back to reload_in, which
6513 	 will have all the necessary substitutions registered.
6514 	 Likewise for a pseudo that can't be replaced with its
6515 	 equivalent constant.
6516 
6517 	 Take extra care for subregs of such pseudos.  Note that
6518 	 we cannot use reg_equiv_mem in this case because it is
6519 	 not in the right mode.  */
6520 
6521       tmp = oldequiv;
6522       if (GET_CODE (tmp) == SUBREG)
6523 	tmp = SUBREG_REG (tmp);
6524       if (GET_CODE (tmp) == REG
6525 	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6526 	  && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6527 	      || reg_equiv_constant[REGNO (tmp)] != 0))
6528 	{
6529 	  if (! reg_equiv_mem[REGNO (tmp)]
6530 	      || num_not_at_initial_offset
6531 	      || GET_CODE (oldequiv) == SUBREG)
6532 	    real_oldequiv = rl->in;
6533 	  else
6534 	    real_oldequiv = reg_equiv_mem[REGNO (tmp)];
6535 	}
6536 
6537       tmp = old;
6538       if (GET_CODE (tmp) == SUBREG)
6539 	tmp = SUBREG_REG (tmp);
6540       if (GET_CODE (tmp) == REG
6541 	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6542 	  && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6543 	      || reg_equiv_constant[REGNO (tmp)] != 0))
6544 	{
6545 	  if (! reg_equiv_mem[REGNO (tmp)]
6546 	      || num_not_at_initial_offset
6547 	      || GET_CODE (old) == SUBREG)
6548 	    real_old = rl->in;
6549 	  else
6550 	    real_old = reg_equiv_mem[REGNO (tmp)];
6551 	}
6552 
6553       second_reload_reg = rld[secondary_reload].reg_rtx;
6554       icode = rl->secondary_in_icode;
6555 
6556       if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6557 	  || (rl->in != 0 && rl->out != 0))
6558 	{
6559 	  enum reg_class new_class
6560 	    = SECONDARY_INPUT_RELOAD_CLASS (rl->class,
6561 					    mode, real_oldequiv);
6562 
6563 	  if (new_class == NO_REGS)
6564 	    second_reload_reg = 0;
6565 	  else
6566 	    {
6567 	      enum insn_code new_icode;
6568 	      enum machine_mode new_mode;
6569 
6570 	      if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6571 				       REGNO (second_reload_reg)))
6572 		oldequiv = old, real_oldequiv = real_old;
6573 	      else
6574 		{
6575 		  new_icode = reload_in_optab[(int) mode];
6576 		  if (new_icode != CODE_FOR_nothing
6577 		      && ((insn_data[(int) new_icode].operand[0].predicate
6578 			   && ! ((*insn_data[(int) new_icode].operand[0].predicate)
6579 				 (reloadreg, mode)))
6580 			  || (insn_data[(int) new_icode].operand[1].predicate
6581 			      && ! ((*insn_data[(int) new_icode].operand[1].predicate)
6582 				    (real_oldequiv, mode)))))
6583 		    new_icode = CODE_FOR_nothing;
6584 
6585 		  if (new_icode == CODE_FOR_nothing)
6586 		    new_mode = mode;
6587 		  else
6588 		    new_mode = insn_data[(int) new_icode].operand[2].mode;
6589 
6590 		  if (GET_MODE (second_reload_reg) != new_mode)
6591 		    {
6592 		      if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6593 					       new_mode))
6594 			oldequiv = old, real_oldequiv = real_old;
6595 		      else
6596 			second_reload_reg
6597 			  = gen_rtx_REG (new_mode,
6598 					 REGNO (second_reload_reg));
6599 		    }
6600 		}
6601 	    }
6602 	}
6603 
6604       /* If we still need a secondary reload register, check
6605 	 to see if it is being used as a scratch or intermediate
6606 	 register and generate code appropriately.  If we need
6607 	 a scratch register, use REAL_OLDEQUIV since the form of
6608 	 the insn may depend on the actual address if it is
6609 	 a MEM.  */
6610 
6611       if (second_reload_reg)
6612 	{
6613 	  if (icode != CODE_FOR_nothing)
6614 	    {
6615 	      emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6616 					  second_reload_reg));
6617 	      special = 1;
6618 	    }
6619 	  else
6620 	    {
6621 	      /* See if we need a scratch register to load the
6622 		 intermediate register (a tertiary reload).  */
6623 	      enum insn_code tertiary_icode
6624 		= rld[secondary_reload].secondary_in_icode;
6625 
6626 	      if (tertiary_icode != CODE_FOR_nothing)
6627 		{
6628 		  rtx third_reload_reg
6629 		    = rld[rld[secondary_reload].secondary_in_reload].reg_rtx;
6630 
6631 		  emit_insn ((GEN_FCN (tertiary_icode)
6632 			      (second_reload_reg, real_oldequiv,
6633 			       third_reload_reg)));
6634 		}
6635 	      else
6636 		gen_reload (second_reload_reg, real_oldequiv,
6637 			    rl->opnum,
6638 			    rl->when_needed);
6639 
6640 	      oldequiv = second_reload_reg;
6641 	    }
6642 	}
6643     }
6644 #endif
6645 
6646   if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6647     {
6648       rtx real_oldequiv = oldequiv;
6649 
6650       if ((GET_CODE (oldequiv) == REG
6651 	   && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6652 	   && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0
6653 	       || reg_equiv_constant[REGNO (oldequiv)] != 0))
6654 	  || (GET_CODE (oldequiv) == SUBREG
6655 	      && GET_CODE (SUBREG_REG (oldequiv)) == REG
6656 	      && (REGNO (SUBREG_REG (oldequiv))
6657 		  >= FIRST_PSEUDO_REGISTER)
6658 	      && ((reg_equiv_memory_loc
6659 		   [REGNO (SUBREG_REG (oldequiv))] != 0)
6660 		  || (reg_equiv_constant
6661 		      [REGNO (SUBREG_REG (oldequiv))] != 0)))
6662 	  || (CONSTANT_P (oldequiv)
6663 	      && (PREFERRED_RELOAD_CLASS (oldequiv,
6664 					  REGNO_REG_CLASS (REGNO (reloadreg)))
6665 		  == NO_REGS)))
6666 	real_oldequiv = rl->in;
6667       gen_reload (reloadreg, real_oldequiv, rl->opnum,
6668 		  rl->when_needed);
6669     }
6670 
6671   if (flag_non_call_exceptions)
6672     copy_eh_notes (insn, get_insns ());
6673 
6674   /* End this sequence.  */
6675   *where = get_insns ();
6676   end_sequence ();
6677 
6678   /* Update reload_override_in so that delete_address_reloads_1
6679      can see the actual register usage.  */
6680   if (oldequiv_reg)
6681     reload_override_in[j] = oldequiv;
6682 }
6683 
6684 /* Generate insns to for the output reload RL, which is for the insn described
6685    by CHAIN and has the number J.  */
6686 static void
emit_output_reload_insns(chain,rl,j)6687 emit_output_reload_insns (chain, rl, j)
6688      struct insn_chain *chain;
6689      struct reload *rl;
6690      int j;
6691 {
6692   rtx reloadreg = rl->reg_rtx;
6693   rtx insn = chain->insn;
6694   int special = 0;
6695   rtx old = rl->out;
6696   enum machine_mode mode = GET_MODE (old);
6697   rtx p;
6698 
6699   if (rl->when_needed == RELOAD_OTHER)
6700     start_sequence ();
6701   else
6702     push_to_sequence (output_reload_insns[rl->opnum]);
6703 
6704   /* Determine the mode to reload in.
6705      See comments above (for input reloading).  */
6706 
6707   if (mode == VOIDmode)
6708     {
6709       /* VOIDmode should never happen for an output.  */
6710       if (asm_noperands (PATTERN (insn)) < 0)
6711 	/* It's the compiler's fault.  */
6712 	fatal_insn ("VOIDmode on an output", insn);
6713       error_for_asm (insn, "output operand is constant in `asm'");
6714       /* Prevent crash--use something we know is valid.  */
6715       mode = word_mode;
6716       old = gen_rtx_REG (mode, REGNO (reloadreg));
6717     }
6718 
6719   if (GET_MODE (reloadreg) != mode)
6720     reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
6721 
6722 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6723 
6724   /* If we need two reload regs, set RELOADREG to the intermediate
6725      one, since it will be stored into OLD.  We might need a secondary
6726      register only for an input reload, so check again here.  */
6727 
6728   if (rl->secondary_out_reload >= 0)
6729     {
6730       rtx real_old = old;
6731 
6732       if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6733 	  && reg_equiv_mem[REGNO (old)] != 0)
6734 	real_old = reg_equiv_mem[REGNO (old)];
6735 
6736       if ((SECONDARY_OUTPUT_RELOAD_CLASS (rl->class,
6737 					  mode, real_old)
6738 	   != NO_REGS))
6739 	{
6740 	  rtx second_reloadreg = reloadreg;
6741 	  reloadreg = rld[rl->secondary_out_reload].reg_rtx;
6742 
6743 	  /* See if RELOADREG is to be used as a scratch register
6744 	     or as an intermediate register.  */
6745 	  if (rl->secondary_out_icode != CODE_FOR_nothing)
6746 	    {
6747 	      emit_insn ((GEN_FCN (rl->secondary_out_icode)
6748 			  (real_old, second_reloadreg, reloadreg)));
6749 	      special = 1;
6750 	    }
6751 	  else
6752 	    {
6753 	      /* See if we need both a scratch and intermediate reload
6754 		 register.  */
6755 
6756 	      int secondary_reload = rl->secondary_out_reload;
6757 	      enum insn_code tertiary_icode
6758 		= rld[secondary_reload].secondary_out_icode;
6759 
6760 	      if (GET_MODE (reloadreg) != mode)
6761 		reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
6762 
6763 	      if (tertiary_icode != CODE_FOR_nothing)
6764 		{
6765 		  rtx third_reloadreg
6766 		    = rld[rld[secondary_reload].secondary_out_reload].reg_rtx;
6767 		  rtx tem;
6768 
6769 		  /* Copy primary reload reg to secondary reload reg.
6770 		     (Note that these have been swapped above, then
6771 		     secondary reload reg to OLD using our insn.)  */
6772 
6773 		  /* If REAL_OLD is a paradoxical SUBREG, remove it
6774 		     and try to put the opposite SUBREG on
6775 		     RELOADREG.  */
6776 		  if (GET_CODE (real_old) == SUBREG
6777 		      && (GET_MODE_SIZE (GET_MODE (real_old))
6778 			  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6779 		      && 0 != (tem = gen_lowpart_common
6780 			       (GET_MODE (SUBREG_REG (real_old)),
6781 				reloadreg)))
6782 		    real_old = SUBREG_REG (real_old), reloadreg = tem;
6783 
6784 		  gen_reload (reloadreg, second_reloadreg,
6785 			      rl->opnum, rl->when_needed);
6786 		  emit_insn ((GEN_FCN (tertiary_icode)
6787 			      (real_old, reloadreg, third_reloadreg)));
6788 		  special = 1;
6789 		}
6790 
6791 	      else
6792 		/* Copy between the reload regs here and then to
6793 		   OUT later.  */
6794 
6795 		gen_reload (reloadreg, second_reloadreg,
6796 			    rl->opnum, rl->when_needed);
6797 	    }
6798 	}
6799     }
6800 #endif
6801 
6802   /* Output the last reload insn.  */
6803   if (! special)
6804     {
6805       rtx set;
6806 
6807       /* Don't output the last reload if OLD is not the dest of
6808 	 INSN and is in the src and is clobbered by INSN.  */
6809       if (! flag_expensive_optimizations
6810 	  || GET_CODE (old) != REG
6811 	  || !(set = single_set (insn))
6812 	  || rtx_equal_p (old, SET_DEST (set))
6813 	  || !reg_mentioned_p (old, SET_SRC (set))
6814 	  || !regno_clobbered_p (REGNO (old), insn, rl->mode, 0))
6815 	gen_reload (old, reloadreg, rl->opnum,
6816 		    rl->when_needed);
6817     }
6818 
6819   /* Look at all insns we emitted, just to be safe.  */
6820   for (p = get_insns (); p; p = NEXT_INSN (p))
6821     if (INSN_P (p))
6822       {
6823 	rtx pat = PATTERN (p);
6824 
6825 	/* If this output reload doesn't come from a spill reg,
6826 	   clear any memory of reloaded copies of the pseudo reg.
6827 	   If this output reload comes from a spill reg,
6828 	   reg_has_output_reload will make this do nothing.  */
6829 	note_stores (pat, forget_old_reloads_1, NULL);
6830 
6831 	if (reg_mentioned_p (rl->reg_rtx, pat))
6832 	  {
6833 	    rtx set = single_set (insn);
6834 	    if (reload_spill_index[j] < 0
6835 		&& set
6836 		&& SET_SRC (set) == rl->reg_rtx)
6837 	      {
6838 		int src = REGNO (SET_SRC (set));
6839 
6840 		reload_spill_index[j] = src;
6841 		SET_HARD_REG_BIT (reg_is_output_reload, src);
6842 		if (find_regno_note (insn, REG_DEAD, src))
6843 		  SET_HARD_REG_BIT (reg_reloaded_died, src);
6844 	      }
6845 	    if (REGNO (rl->reg_rtx) < FIRST_PSEUDO_REGISTER)
6846 	      {
6847 		int s = rl->secondary_out_reload;
6848 		set = single_set (p);
6849 		/* If this reload copies only to the secondary reload
6850 		   register, the secondary reload does the actual
6851 		   store.  */
6852 		if (s >= 0 && set == NULL_RTX)
6853 		  /* We can't tell what function the secondary reload
6854 		     has and where the actual store to the pseudo is
6855 		     made; leave new_spill_reg_store alone.  */
6856 		  ;
6857 		else if (s >= 0
6858 			 && SET_SRC (set) == rl->reg_rtx
6859 			 && SET_DEST (set) == rld[s].reg_rtx)
6860 		  {
6861 		    /* Usually the next instruction will be the
6862 		       secondary reload insn;  if we can confirm
6863 		       that it is, setting new_spill_reg_store to
6864 		       that insn will allow an extra optimization.  */
6865 		    rtx s_reg = rld[s].reg_rtx;
6866 		    rtx next = NEXT_INSN (p);
6867 		    rld[s].out = rl->out;
6868 		    rld[s].out_reg = rl->out_reg;
6869 		    set = single_set (next);
6870 		    if (set && SET_SRC (set) == s_reg
6871 			&& ! new_spill_reg_store[REGNO (s_reg)])
6872 		      {
6873 			SET_HARD_REG_BIT (reg_is_output_reload,
6874 					  REGNO (s_reg));
6875 			new_spill_reg_store[REGNO (s_reg)] = next;
6876 		      }
6877 		  }
6878 		else
6879 		  new_spill_reg_store[REGNO (rl->reg_rtx)] = p;
6880 	      }
6881 	  }
6882       }
6883 
6884   if (rl->when_needed == RELOAD_OTHER)
6885     {
6886       emit_insn (other_output_reload_insns[rl->opnum]);
6887       other_output_reload_insns[rl->opnum] = get_insns ();
6888     }
6889   else
6890     output_reload_insns[rl->opnum] = get_insns ();
6891 
6892   if (flag_non_call_exceptions)
6893     copy_eh_notes (insn, get_insns ());
6894 
6895   end_sequence ();
6896 }
6897 
6898 /* Do input reloading for reload RL, which is for the insn described by CHAIN
6899    and has the number J.  */
6900 static void
do_input_reload(chain,rl,j)6901 do_input_reload (chain, rl, j)
6902      struct insn_chain *chain;
6903      struct reload *rl;
6904      int j;
6905 {
6906   int expect_occurrences = 1;
6907   rtx insn = chain->insn;
6908   rtx old = (rl->in && GET_CODE (rl->in) == MEM
6909 	     ? rl->in_reg : rl->in);
6910 
6911   if (old != 0
6912       /* AUTO_INC reloads need to be handled even if inherited.  We got an
6913 	 AUTO_INC reload if reload_out is set but reload_out_reg isn't.  */
6914       && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
6915       && ! rtx_equal_p (rl->reg_rtx, old)
6916       && rl->reg_rtx != 0)
6917     emit_input_reload_insns (chain, rld + j, old, j);
6918 
6919   /* When inheriting a wider reload, we have a MEM in rl->in,
6920      e.g. inheriting a SImode output reload for
6921      (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10)))  */
6922   if (optimize && reload_inherited[j] && rl->in
6923       && GET_CODE (rl->in) == MEM
6924       && GET_CODE (rl->in_reg) == MEM
6925       && reload_spill_index[j] >= 0
6926       && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
6927     {
6928       expect_occurrences
6929 	= count_occurrences (PATTERN (insn), rl->in, 0) == 1 ? 0 : -1;
6930       rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
6931     }
6932 
6933   /* If we are reloading a register that was recently stored in with an
6934      output-reload, see if we can prove there was
6935      actually no need to store the old value in it.  */
6936 
6937   if (optimize
6938       /* Only attempt this for input reloads; for RELOAD_OTHER we miss
6939 	 that there may be multiple uses of the previous output reload.
6940 	 Restricting to RELOAD_FOR_INPUT is mostly paranoia.  */
6941       && rl->when_needed == RELOAD_FOR_INPUT
6942       && (reload_inherited[j] || reload_override_in[j])
6943       && rl->reg_rtx
6944       && GET_CODE (rl->reg_rtx) == REG
6945       && spill_reg_store[REGNO (rl->reg_rtx)] != 0
6946 #if 0
6947       /* There doesn't seem to be any reason to restrict this to pseudos
6948 	 and doing so loses in the case where we are copying from a
6949 	 register of the wrong class.  */
6950       && (REGNO (spill_reg_stored_to[REGNO (rl->reg_rtx)])
6951 	  >= FIRST_PSEUDO_REGISTER)
6952 #endif
6953       /* The insn might have already some references to stackslots
6954 	 replaced by MEMs, while reload_out_reg still names the
6955 	 original pseudo.  */
6956       && (dead_or_set_p (insn,
6957 			 spill_reg_stored_to[REGNO (rl->reg_rtx)])
6958 	  || rtx_equal_p (spill_reg_stored_to[REGNO (rl->reg_rtx)],
6959 			  rl->out_reg)))
6960     delete_output_reload (insn, j, REGNO (rl->reg_rtx));
6961 }
6962 
6963 /* Do output reloading for reload RL, which is for the insn described by
6964    CHAIN and has the number J.
6965    ??? At some point we need to support handling output reloads of
6966    JUMP_INSNs or insns that set cc0.  */
6967 static void
do_output_reload(chain,rl,j)6968 do_output_reload (chain, rl, j)
6969      struct insn_chain *chain;
6970      struct reload *rl;
6971      int j;
6972 {
6973   rtx note, old;
6974   rtx insn = chain->insn;
6975   /* If this is an output reload that stores something that is
6976      not loaded in this same reload, see if we can eliminate a previous
6977      store.  */
6978   rtx pseudo = rl->out_reg;
6979 
6980   if (pseudo
6981       && optimize
6982       && GET_CODE (pseudo) == REG
6983       && ! rtx_equal_p (rl->in_reg, pseudo)
6984       && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
6985       && reg_last_reload_reg[REGNO (pseudo)])
6986     {
6987       int pseudo_no = REGNO (pseudo);
6988       int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
6989 
6990       /* We don't need to test full validity of last_regno for
6991 	 inherit here; we only want to know if the store actually
6992 	 matches the pseudo.  */
6993       if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
6994 	  && reg_reloaded_contents[last_regno] == pseudo_no
6995 	  && spill_reg_store[last_regno]
6996 	  && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
6997 	delete_output_reload (insn, j, last_regno);
6998     }
6999 
7000   old = rl->out_reg;
7001   if (old == 0
7002       || rl->reg_rtx == old
7003       || rl->reg_rtx == 0)
7004     return;
7005 
7006   /* An output operand that dies right away does need a reload,
7007      but need not be copied from it.  Show the new location in the
7008      REG_UNUSED note.  */
7009   if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
7010       && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7011     {
7012       XEXP (note, 0) = rl->reg_rtx;
7013       return;
7014     }
7015   /* Likewise for a SUBREG of an operand that dies.  */
7016   else if (GET_CODE (old) == SUBREG
7017 	   && GET_CODE (SUBREG_REG (old)) == REG
7018 	   && 0 != (note = find_reg_note (insn, REG_UNUSED,
7019 					  SUBREG_REG (old))))
7020     {
7021       XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
7022 					   rl->reg_rtx);
7023       return;
7024     }
7025   else if (GET_CODE (old) == SCRATCH)
7026     /* If we aren't optimizing, there won't be a REG_UNUSED note,
7027        but we don't want to make an output reload.  */
7028     return;
7029 
7030   /* If is a JUMP_INSN, we can't support output reloads yet.  */
7031   if (GET_CODE (insn) == JUMP_INSN)
7032     abort ();
7033 
7034   emit_output_reload_insns (chain, rld + j, j);
7035 }
7036 
7037 /* Output insns to reload values in and out of the chosen reload regs.  */
7038 
7039 static void
emit_reload_insns(chain)7040 emit_reload_insns (chain)
7041      struct insn_chain *chain;
7042 {
7043   rtx insn = chain->insn;
7044 
7045   int j;
7046 
7047   CLEAR_HARD_REG_SET (reg_reloaded_died);
7048 
7049   for (j = 0; j < reload_n_operands; j++)
7050     input_reload_insns[j] = input_address_reload_insns[j]
7051       = inpaddr_address_reload_insns[j]
7052       = output_reload_insns[j] = output_address_reload_insns[j]
7053       = outaddr_address_reload_insns[j]
7054       = other_output_reload_insns[j] = 0;
7055   other_input_address_reload_insns = 0;
7056   other_input_reload_insns = 0;
7057   operand_reload_insns = 0;
7058   other_operand_reload_insns = 0;
7059 
7060   /* Dump reloads into the dump file.  */
7061   if (rtl_dump_file)
7062     {
7063       fprintf (rtl_dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
7064       debug_reload_to_stream (rtl_dump_file);
7065     }
7066 
7067   /* Now output the instructions to copy the data into and out of the
7068      reload registers.  Do these in the order that the reloads were reported,
7069      since reloads of base and index registers precede reloads of operands
7070      and the operands may need the base and index registers reloaded.  */
7071 
7072   for (j = 0; j < n_reloads; j++)
7073     {
7074       if (rld[j].reg_rtx
7075 	  && REGNO (rld[j].reg_rtx) < FIRST_PSEUDO_REGISTER)
7076 	new_spill_reg_store[REGNO (rld[j].reg_rtx)] = 0;
7077 
7078       do_input_reload (chain, rld + j, j);
7079       do_output_reload (chain, rld + j, j);
7080     }
7081 
7082   /* Now write all the insns we made for reloads in the order expected by
7083      the allocation functions.  Prior to the insn being reloaded, we write
7084      the following reloads:
7085 
7086      RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7087 
7088      RELOAD_OTHER reloads.
7089 
7090      For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7091      by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7092      RELOAD_FOR_INPUT reload for the operand.
7093 
7094      RELOAD_FOR_OPADDR_ADDRS reloads.
7095 
7096      RELOAD_FOR_OPERAND_ADDRESS reloads.
7097 
7098      After the insn being reloaded, we write the following:
7099 
7100      For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7101      by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7102      RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7103      reloads for the operand.  The RELOAD_OTHER output reloads are
7104      output in descending order by reload number.  */
7105 
7106   emit_insn_before (other_input_address_reload_insns, insn);
7107   emit_insn_before (other_input_reload_insns, insn);
7108 
7109   for (j = 0; j < reload_n_operands; j++)
7110     {
7111       emit_insn_before (inpaddr_address_reload_insns[j], insn);
7112       emit_insn_before (input_address_reload_insns[j], insn);
7113       emit_insn_before (input_reload_insns[j], insn);
7114     }
7115 
7116   emit_insn_before (other_operand_reload_insns, insn);
7117   emit_insn_before (operand_reload_insns, insn);
7118 
7119   for (j = 0; j < reload_n_operands; j++)
7120     {
7121       rtx x = emit_insn_after (outaddr_address_reload_insns[j], insn);
7122       x = emit_insn_after (output_address_reload_insns[j], x);
7123       x = emit_insn_after (output_reload_insns[j], x);
7124       emit_insn_after (other_output_reload_insns[j], x);
7125     }
7126 
7127   /* For all the spill regs newly reloaded in this instruction,
7128      record what they were reloaded from, so subsequent instructions
7129      can inherit the reloads.
7130 
7131      Update spill_reg_store for the reloads of this insn.
7132      Copy the elements that were updated in the loop above.  */
7133 
7134   for (j = 0; j < n_reloads; j++)
7135     {
7136       int r = reload_order[j];
7137       int i = reload_spill_index[r];
7138 
7139       /* If this is a non-inherited input reload from a pseudo, we must
7140 	 clear any memory of a previous store to the same pseudo.  Only do
7141 	 something if there will not be an output reload for the pseudo
7142 	 being reloaded.  */
7143       if (rld[r].in_reg != 0
7144 	  && ! (reload_inherited[r] || reload_override_in[r]))
7145 	{
7146 	  rtx reg = rld[r].in_reg;
7147 
7148 	  if (GET_CODE (reg) == SUBREG)
7149 	    reg = SUBREG_REG (reg);
7150 
7151 	  if (GET_CODE (reg) == REG
7152 	      && REGNO (reg) >= FIRST_PSEUDO_REGISTER
7153 	      && ! reg_has_output_reload[REGNO (reg)])
7154 	    {
7155 	      int nregno = REGNO (reg);
7156 
7157 	      if (reg_last_reload_reg[nregno])
7158 		{
7159 		  int last_regno = REGNO (reg_last_reload_reg[nregno]);
7160 
7161 		  if (reg_reloaded_contents[last_regno] == nregno)
7162 		    spill_reg_store[last_regno] = 0;
7163 		}
7164 	    }
7165 	}
7166 
7167       /* I is nonneg if this reload used a register.
7168 	 If rld[r].reg_rtx is 0, this is an optional reload
7169 	 that we opted to ignore.  */
7170 
7171       if (i >= 0 && rld[r].reg_rtx != 0)
7172 	{
7173 	  int nr = HARD_REGNO_NREGS (i, GET_MODE (rld[r].reg_rtx));
7174 	  int k;
7175 	  int part_reaches_end = 0;
7176 	  int all_reaches_end = 1;
7177 
7178 	  /* For a multi register reload, we need to check if all or part
7179 	     of the value lives to the end.  */
7180 	  for (k = 0; k < nr; k++)
7181 	    {
7182 	      if (reload_reg_reaches_end_p (i + k, rld[r].opnum,
7183 					    rld[r].when_needed))
7184 		part_reaches_end = 1;
7185 	      else
7186 		all_reaches_end = 0;
7187 	    }
7188 
7189 	  /* Ignore reloads that don't reach the end of the insn in
7190 	     entirety.  */
7191 	  if (all_reaches_end)
7192 	    {
7193 	      /* First, clear out memory of what used to be in this spill reg.
7194 		 If consecutive registers are used, clear them all.  */
7195 
7196 	      for (k = 0; k < nr; k++)
7197 		CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7198 
7199 	      /* Maybe the spill reg contains a copy of reload_out.  */
7200 	      if (rld[r].out != 0
7201 		  && (GET_CODE (rld[r].out) == REG
7202 #ifdef AUTO_INC_DEC
7203 		      || ! rld[r].out_reg
7204 #endif
7205 		      || GET_CODE (rld[r].out_reg) == REG))
7206 		{
7207 		  rtx out = (GET_CODE (rld[r].out) == REG
7208 			     ? rld[r].out
7209 			     : rld[r].out_reg
7210 			     ? rld[r].out_reg
7211 /* AUTO_INC */		     : XEXP (rld[r].in_reg, 0));
7212 		  int nregno = REGNO (out);
7213 		  int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7214 			     : HARD_REGNO_NREGS (nregno,
7215 						 GET_MODE (rld[r].reg_rtx)));
7216 
7217 		  spill_reg_store[i] = new_spill_reg_store[i];
7218 		  spill_reg_stored_to[i] = out;
7219 		  reg_last_reload_reg[nregno] = rld[r].reg_rtx;
7220 
7221 		  /* If NREGNO is a hard register, it may occupy more than
7222 		     one register.  If it does, say what is in the
7223 		     rest of the registers assuming that both registers
7224 		     agree on how many words the object takes.  If not,
7225 		     invalidate the subsequent registers.  */
7226 
7227 		  if (nregno < FIRST_PSEUDO_REGISTER)
7228 		    for (k = 1; k < nnr; k++)
7229 		      reg_last_reload_reg[nregno + k]
7230 			= (nr == nnr
7231 			   ? regno_reg_rtx[REGNO (rld[r].reg_rtx) + k]
7232 			   : 0);
7233 
7234 		  /* Now do the inverse operation.  */
7235 		  for (k = 0; k < nr; k++)
7236 		    {
7237 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7238 		      reg_reloaded_contents[i + k]
7239 			= (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7240 			   ? nregno
7241 			   : nregno + k);
7242 		      reg_reloaded_insn[i + k] = insn;
7243 		      SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7244 		    }
7245 		}
7246 
7247 	      /* Maybe the spill reg contains a copy of reload_in.  Only do
7248 		 something if there will not be an output reload for
7249 		 the register being reloaded.  */
7250 	      else if (rld[r].out_reg == 0
7251 		       && rld[r].in != 0
7252 		       && ((GET_CODE (rld[r].in) == REG
7253 			    && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER
7254 			    && ! reg_has_output_reload[REGNO (rld[r].in)])
7255 			   || (GET_CODE (rld[r].in_reg) == REG
7256 			       && ! reg_has_output_reload[REGNO (rld[r].in_reg)]))
7257 		       && ! reg_set_p (rld[r].reg_rtx, PATTERN (insn)))
7258 		{
7259 		  int nregno;
7260 		  int nnr;
7261 
7262 		  if (GET_CODE (rld[r].in) == REG
7263 		      && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
7264 		    nregno = REGNO (rld[r].in);
7265 		  else if (GET_CODE (rld[r].in_reg) == REG)
7266 		    nregno = REGNO (rld[r].in_reg);
7267 		  else
7268 		    nregno = REGNO (XEXP (rld[r].in_reg, 0));
7269 
7270 		  nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7271 			 : HARD_REGNO_NREGS (nregno,
7272 					     GET_MODE (rld[r].reg_rtx)));
7273 
7274 		  reg_last_reload_reg[nregno] = rld[r].reg_rtx;
7275 
7276 		  if (nregno < FIRST_PSEUDO_REGISTER)
7277 		    for (k = 1; k < nnr; k++)
7278 		      reg_last_reload_reg[nregno + k]
7279 			= (nr == nnr
7280 			   ? regno_reg_rtx[REGNO (rld[r].reg_rtx) + k]
7281 			   : 0);
7282 
7283 		  /* Unless we inherited this reload, show we haven't
7284 		     recently done a store.
7285 		     Previous stores of inherited auto_inc expressions
7286 		     also have to be discarded.  */
7287 		  if (! reload_inherited[r]
7288 		      || (rld[r].out && ! rld[r].out_reg))
7289 		    spill_reg_store[i] = 0;
7290 
7291 		  for (k = 0; k < nr; k++)
7292 		    {
7293 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7294 		      reg_reloaded_contents[i + k]
7295 			= (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7296 			   ? nregno
7297 			   : nregno + k);
7298 		      reg_reloaded_insn[i + k] = insn;
7299 		      SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7300 		    }
7301 		}
7302 	    }
7303 
7304 	  /* However, if part of the reload reaches the end, then we must
7305 	     invalidate the old info for the part that survives to the end.  */
7306 	  else if (part_reaches_end)
7307 	    {
7308 	      for (k = 0; k < nr; k++)
7309 		if (reload_reg_reaches_end_p (i + k,
7310 					      rld[r].opnum,
7311 					      rld[r].when_needed))
7312 		  CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7313 	    }
7314 	}
7315 
7316       /* The following if-statement was #if 0'd in 1.34 (or before...).
7317 	 It's reenabled in 1.35 because supposedly nothing else
7318 	 deals with this problem.  */
7319 
7320       /* If a register gets output-reloaded from a non-spill register,
7321 	 that invalidates any previous reloaded copy of it.
7322 	 But forget_old_reloads_1 won't get to see it, because
7323 	 it thinks only about the original insn.  So invalidate it here.  */
7324       if (i < 0 && rld[r].out != 0
7325 	  && (GET_CODE (rld[r].out) == REG
7326 	      || (GET_CODE (rld[r].out) == MEM
7327 		  && GET_CODE (rld[r].out_reg) == REG)))
7328 	{
7329 	  rtx out = (GET_CODE (rld[r].out) == REG
7330 		     ? rld[r].out : rld[r].out_reg);
7331 	  int nregno = REGNO (out);
7332 	  if (nregno >= FIRST_PSEUDO_REGISTER)
7333 	    {
7334 	      rtx src_reg, store_insn = NULL_RTX;
7335 
7336 	      reg_last_reload_reg[nregno] = 0;
7337 
7338 	      /* If we can find a hard register that is stored, record
7339 		 the storing insn so that we may delete this insn with
7340 		 delete_output_reload.  */
7341 	      src_reg = rld[r].reg_rtx;
7342 
7343 	      /* If this is an optional reload, try to find the source reg
7344 		 from an input reload.  */
7345 	      if (! src_reg)
7346 		{
7347 		  rtx set = single_set (insn);
7348 		  if (set && SET_DEST (set) == rld[r].out)
7349 		    {
7350 		      int k;
7351 
7352 		      src_reg = SET_SRC (set);
7353 		      store_insn = insn;
7354 		      for (k = 0; k < n_reloads; k++)
7355 			{
7356 			  if (rld[k].in == src_reg)
7357 			    {
7358 			      src_reg = rld[k].reg_rtx;
7359 			      break;
7360 			    }
7361 			}
7362 		    }
7363 		}
7364 	      else
7365 		store_insn = new_spill_reg_store[REGNO (src_reg)];
7366 	      if (src_reg && GET_CODE (src_reg) == REG
7367 		  && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
7368 		{
7369 		  int src_regno = REGNO (src_reg);
7370 		  int nr = HARD_REGNO_NREGS (src_regno, rld[r].mode);
7371 		  /* The place where to find a death note varies with
7372 		     PRESERVE_DEATH_INFO_REGNO_P .  The condition is not
7373 		     necessarily checked exactly in the code that moves
7374 		     notes, so just check both locations.  */
7375 		  rtx note = find_regno_note (insn, REG_DEAD, src_regno);
7376 		  if (! note && store_insn)
7377 		    note = find_regno_note (store_insn, REG_DEAD, src_regno);
7378 		  while (nr-- > 0)
7379 		    {
7380 		      spill_reg_store[src_regno + nr] = store_insn;
7381 		      spill_reg_stored_to[src_regno + nr] = out;
7382 		      reg_reloaded_contents[src_regno + nr] = nregno;
7383 		      reg_reloaded_insn[src_regno + nr] = store_insn;
7384 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + nr);
7385 		      SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + nr);
7386 		      SET_HARD_REG_BIT (reg_is_output_reload, src_regno + nr);
7387 		      if (note)
7388 			SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
7389 		      else
7390 			CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
7391 		    }
7392 		  reg_last_reload_reg[nregno] = src_reg;
7393 		}
7394 	    }
7395 	  else
7396 	    {
7397 	      int num_regs = HARD_REGNO_NREGS (nregno, GET_MODE (rld[r].out));
7398 
7399 	      while (num_regs-- > 0)
7400 		reg_last_reload_reg[nregno + num_regs] = 0;
7401 	    }
7402 	}
7403     }
7404   IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
7405 }
7406 
7407 /* Emit code to perform a reload from IN (which may be a reload register) to
7408    OUT (which may also be a reload register).  IN or OUT is from operand
7409    OPNUM with reload type TYPE.
7410 
7411    Returns first insn emitted.  */
7412 
7413 rtx
gen_reload(out,in,opnum,type)7414 gen_reload (out, in, opnum, type)
7415      rtx out;
7416      rtx in;
7417      int opnum;
7418      enum reload_type type;
7419 {
7420   rtx last = get_last_insn ();
7421   rtx tem;
7422 
7423   /* If IN is a paradoxical SUBREG, remove it and try to put the
7424      opposite SUBREG on OUT.  Likewise for a paradoxical SUBREG on OUT.  */
7425   if (GET_CODE (in) == SUBREG
7426       && (GET_MODE_SIZE (GET_MODE (in))
7427 	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7428       && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7429     in = SUBREG_REG (in), out = tem;
7430   else if (GET_CODE (out) == SUBREG
7431 	   && (GET_MODE_SIZE (GET_MODE (out))
7432 	       > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7433 	   && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7434     out = SUBREG_REG (out), in = tem;
7435 
7436   /* How to do this reload can get quite tricky.  Normally, we are being
7437      asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7438      register that didn't get a hard register.  In that case we can just
7439      call emit_move_insn.
7440 
7441      We can also be asked to reload a PLUS that adds a register or a MEM to
7442      another register, constant or MEM.  This can occur during frame pointer
7443      elimination and while reloading addresses.  This case is handled by
7444      trying to emit a single insn to perform the add.  If it is not valid,
7445      we use a two insn sequence.
7446 
7447      Finally, we could be called to handle an 'o' constraint by putting
7448      an address into a register.  In that case, we first try to do this
7449      with a named pattern of "reload_load_address".  If no such pattern
7450      exists, we just emit a SET insn and hope for the best (it will normally
7451      be valid on machines that use 'o').
7452 
7453      This entire process is made complex because reload will never
7454      process the insns we generate here and so we must ensure that
7455      they will fit their constraints and also by the fact that parts of
7456      IN might be being reloaded separately and replaced with spill registers.
7457      Because of this, we are, in some sense, just guessing the right approach
7458      here.  The one listed above seems to work.
7459 
7460      ??? At some point, this whole thing needs to be rethought.  */
7461 
7462   if (GET_CODE (in) == PLUS
7463       && (GET_CODE (XEXP (in, 0)) == REG
7464 	  || GET_CODE (XEXP (in, 0)) == SUBREG
7465 	  || GET_CODE (XEXP (in, 0)) == MEM)
7466       && (GET_CODE (XEXP (in, 1)) == REG
7467 	  || GET_CODE (XEXP (in, 1)) == SUBREG
7468 	  || CONSTANT_P (XEXP (in, 1))
7469 	  || GET_CODE (XEXP (in, 1)) == MEM))
7470     {
7471       /* We need to compute the sum of a register or a MEM and another
7472 	 register, constant, or MEM, and put it into the reload
7473 	 register.  The best possible way of doing this is if the machine
7474 	 has a three-operand ADD insn that accepts the required operands.
7475 
7476 	 The simplest approach is to try to generate such an insn and see if it
7477 	 is recognized and matches its constraints.  If so, it can be used.
7478 
7479 	 It might be better not to actually emit the insn unless it is valid,
7480 	 but we need to pass the insn as an operand to `recog' and
7481 	 `extract_insn' and it is simpler to emit and then delete the insn if
7482 	 not valid than to dummy things up.  */
7483 
7484       rtx op0, op1, tem, insn;
7485       int code;
7486 
7487       op0 = find_replacement (&XEXP (in, 0));
7488       op1 = find_replacement (&XEXP (in, 1));
7489 
7490       /* Since constraint checking is strict, commutativity won't be
7491 	 checked, so we need to do that here to avoid spurious failure
7492 	 if the add instruction is two-address and the second operand
7493 	 of the add is the same as the reload reg, which is frequently
7494 	 the case.  If the insn would be A = B + A, rearrange it so
7495 	 it will be A = A + B as constrain_operands expects.  */
7496 
7497       if (GET_CODE (XEXP (in, 1)) == REG
7498 	  && REGNO (out) == REGNO (XEXP (in, 1)))
7499 	tem = op0, op0 = op1, op1 = tem;
7500 
7501       if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
7502 	in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
7503 
7504       insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
7505       code = recog_memoized (insn);
7506 
7507       if (code >= 0)
7508 	{
7509 	  extract_insn (insn);
7510 	  /* We want constrain operands to treat this insn strictly in
7511 	     its validity determination, i.e., the way it would after reload
7512 	     has completed.  */
7513 	  if (constrain_operands (1))
7514 	    return insn;
7515 	}
7516 
7517       delete_insns_since (last);
7518 
7519       /* If that failed, we must use a conservative two-insn sequence.
7520 
7521 	 Use a move to copy one operand into the reload register.  Prefer
7522 	 to reload a constant, MEM or pseudo since the move patterns can
7523 	 handle an arbitrary operand.  If OP1 is not a constant, MEM or
7524 	 pseudo and OP1 is not a valid operand for an add instruction, then
7525 	 reload OP1.
7526 
7527 	 After reloading one of the operands into the reload register, add
7528 	 the reload register to the output register.
7529 
7530 	 If there is another way to do this for a specific machine, a
7531 	 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7532 	 we emit below.  */
7533 
7534       code = (int) add_optab->handlers[(int) GET_MODE (out)].insn_code;
7535 
7536       if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
7537 	  || (GET_CODE (op1) == REG
7538 	      && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
7539 	  || (code != CODE_FOR_nothing
7540 	      && ! ((*insn_data[code].operand[2].predicate)
7541 		    (op1, insn_data[code].operand[2].mode))))
7542 	tem = op0, op0 = op1, op1 = tem;
7543 
7544       gen_reload (out, op0, opnum, type);
7545 
7546       /* If OP0 and OP1 are the same, we can use OUT for OP1.
7547 	 This fixes a problem on the 32K where the stack pointer cannot
7548 	 be used as an operand of an add insn.  */
7549 
7550       if (rtx_equal_p (op0, op1))
7551 	op1 = out;
7552 
7553       insn = emit_insn (gen_add2_insn (out, op1));
7554 
7555       /* If that failed, copy the address register to the reload register.
7556 	 Then add the constant to the reload register.  */
7557 
7558       code = recog_memoized (insn);
7559 
7560       if (code >= 0)
7561 	{
7562 	  extract_insn (insn);
7563 	  /* We want constrain operands to treat this insn strictly in
7564 	     its validity determination, i.e., the way it would after reload
7565 	     has completed.  */
7566 	  if (constrain_operands (1))
7567 	    {
7568 	      /* Add a REG_EQUIV note so that find_equiv_reg can find it.  */
7569 	      REG_NOTES (insn)
7570 		= gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7571 	      return insn;
7572 	    }
7573 	}
7574 
7575       delete_insns_since (last);
7576 
7577       gen_reload (out, op1, opnum, type);
7578       insn = emit_insn (gen_add2_insn (out, op0));
7579       REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7580     }
7581 
7582 #ifdef SECONDARY_MEMORY_NEEDED
7583   /* If we need a memory location to do the move, do it that way.  */
7584   else if ((GET_CODE (in) == REG || GET_CODE (in) == SUBREG)
7585 	   && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
7586 	   && (GET_CODE (out) == REG || GET_CODE (out) == SUBREG)
7587 	   && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
7588 	   && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
7589 				       REGNO_REG_CLASS (reg_or_subregno (out)),
7590 				       GET_MODE (out)))
7591     {
7592       /* Get the memory to use and rewrite both registers to its mode.  */
7593       rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
7594 
7595       if (GET_MODE (loc) != GET_MODE (out))
7596 	out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
7597 
7598       if (GET_MODE (loc) != GET_MODE (in))
7599 	in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
7600 
7601       gen_reload (loc, in, opnum, type);
7602       gen_reload (out, loc, opnum, type);
7603     }
7604 #endif
7605 
7606   /* If IN is a simple operand, use gen_move_insn.  */
7607   else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
7608     emit_insn (gen_move_insn (out, in));
7609 
7610 #ifdef HAVE_reload_load_address
7611   else if (HAVE_reload_load_address)
7612     emit_insn (gen_reload_load_address (out, in));
7613 #endif
7614 
7615   /* Otherwise, just write (set OUT IN) and hope for the best.  */
7616   else
7617     emit_insn (gen_rtx_SET (VOIDmode, out, in));
7618 
7619   /* Return the first insn emitted.
7620      We can not just return get_last_insn, because there may have
7621      been multiple instructions emitted.  Also note that gen_move_insn may
7622      emit more than one insn itself, so we can not assume that there is one
7623      insn emitted per emit_insn_before call.  */
7624 
7625   return last ? NEXT_INSN (last) : get_insns ();
7626 }
7627 
7628 /* Delete a previously made output-reload whose result we now believe
7629    is not needed.  First we double-check.
7630 
7631    INSN is the insn now being processed.
7632    LAST_RELOAD_REG is the hard register number for which we want to delete
7633    the last output reload.
7634    J is the reload-number that originally used REG.  The caller has made
7635    certain that reload J doesn't use REG any longer for input.  */
7636 
7637 static void
delete_output_reload(insn,j,last_reload_reg)7638 delete_output_reload (insn, j, last_reload_reg)
7639      rtx insn;
7640      int j;
7641      int last_reload_reg;
7642 {
7643   rtx output_reload_insn = spill_reg_store[last_reload_reg];
7644   rtx reg = spill_reg_stored_to[last_reload_reg];
7645   int k;
7646   int n_occurrences;
7647   int n_inherited = 0;
7648   rtx i1;
7649   rtx substed;
7650 
7651   /* It is possible that this reload has been only used to set another reload
7652      we eliminated earlier and thus deleted this instruction too.  */
7653   if (INSN_DELETED_P (output_reload_insn))
7654     return;
7655 
7656   /* Get the raw pseudo-register referred to.  */
7657 
7658   while (GET_CODE (reg) == SUBREG)
7659     reg = SUBREG_REG (reg);
7660   substed = reg_equiv_memory_loc[REGNO (reg)];
7661 
7662   /* This is unsafe if the operand occurs more often in the current
7663      insn than it is inherited.  */
7664   for (k = n_reloads - 1; k >= 0; k--)
7665     {
7666       rtx reg2 = rld[k].in;
7667       if (! reg2)
7668 	continue;
7669       if (GET_CODE (reg2) == MEM || reload_override_in[k])
7670 	reg2 = rld[k].in_reg;
7671 #ifdef AUTO_INC_DEC
7672       if (rld[k].out && ! rld[k].out_reg)
7673 	reg2 = XEXP (rld[k].in_reg, 0);
7674 #endif
7675       while (GET_CODE (reg2) == SUBREG)
7676 	reg2 = SUBREG_REG (reg2);
7677       if (rtx_equal_p (reg2, reg))
7678 	{
7679 	  if (reload_inherited[k] || reload_override_in[k] || k == j)
7680 	    {
7681 	      n_inherited++;
7682 	      reg2 = rld[k].out_reg;
7683 	      if (! reg2)
7684 		continue;
7685 	      while (GET_CODE (reg2) == SUBREG)
7686 		reg2 = XEXP (reg2, 0);
7687 	      if (rtx_equal_p (reg2, reg))
7688 		n_inherited++;
7689 	    }
7690 	  else
7691 	    return;
7692 	}
7693     }
7694   n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
7695   if (substed)
7696     n_occurrences += count_occurrences (PATTERN (insn),
7697 					eliminate_regs (substed, 0,
7698 							NULL_RTX), 0);
7699   if (n_occurrences > n_inherited)
7700     return;
7701 
7702   /* If the pseudo-reg we are reloading is no longer referenced
7703      anywhere between the store into it and here,
7704      and no jumps or labels intervene, then the value can get
7705      here through the reload reg alone.
7706      Otherwise, give up--return.  */
7707   for (i1 = NEXT_INSN (output_reload_insn);
7708        i1 != insn; i1 = NEXT_INSN (i1))
7709     {
7710       if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
7711 	return;
7712       if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
7713 	  && reg_mentioned_p (reg, PATTERN (i1)))
7714 	{
7715 	  /* If this is USE in front of INSN, we only have to check that
7716 	     there are no more references than accounted for by inheritance.  */
7717 	  while (GET_CODE (i1) == INSN && GET_CODE (PATTERN (i1)) == USE)
7718 	    {
7719 	      n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
7720 	      i1 = NEXT_INSN (i1);
7721 	    }
7722 	  if (n_occurrences <= n_inherited && i1 == insn)
7723 	    break;
7724 	  return;
7725 	}
7726     }
7727 
7728   /* We will be deleting the insn.  Remove the spill reg information.  */
7729   for (k = HARD_REGNO_NREGS (last_reload_reg, GET_MODE (reg)); k-- > 0; )
7730     {
7731       spill_reg_store[last_reload_reg + k] = 0;
7732       spill_reg_stored_to[last_reload_reg + k] = 0;
7733     }
7734 
7735   /* The caller has already checked that REG dies or is set in INSN.
7736      It has also checked that we are optimizing, and thus some
7737      inaccurancies in the debugging information are acceptable.
7738      So we could just delete output_reload_insn.  But in some cases
7739      we can improve the debugging information without sacrificing
7740      optimization - maybe even improving the code: See if the pseudo
7741      reg has been completely replaced with reload regs.  If so, delete
7742      the store insn and forget we had a stack slot for the pseudo.  */
7743   if (rld[j].out != rld[j].in
7744       && REG_N_DEATHS (REGNO (reg)) == 1
7745       && REG_N_SETS (REGNO (reg)) == 1
7746       && REG_BASIC_BLOCK (REGNO (reg)) >= 0
7747       && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7748     {
7749       rtx i2;
7750 
7751       /* We know that it was used only between here and the beginning of
7752 	 the current basic block.  (We also know that the last use before
7753 	 INSN was the output reload we are thinking of deleting, but never
7754 	 mind that.)  Search that range; see if any ref remains.  */
7755       for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7756 	{
7757 	  rtx set = single_set (i2);
7758 
7759 	  /* Uses which just store in the pseudo don't count,
7760 	     since if they are the only uses, they are dead.  */
7761 	  if (set != 0 && SET_DEST (set) == reg)
7762 	    continue;
7763 	  if (GET_CODE (i2) == CODE_LABEL
7764 	      || GET_CODE (i2) == JUMP_INSN)
7765 	    break;
7766 	  if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7767 	      && reg_mentioned_p (reg, PATTERN (i2)))
7768 	    {
7769 	      /* Some other ref remains; just delete the output reload we
7770 		 know to be dead.  */
7771 	      delete_address_reloads (output_reload_insn, insn);
7772 	      delete_insn (output_reload_insn);
7773 	      return;
7774 	    }
7775 	}
7776 
7777       /* Delete the now-dead stores into this pseudo.  Note that this
7778 	 loop also takes care of deleting output_reload_insn.  */
7779       for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7780 	{
7781 	  rtx set = single_set (i2);
7782 
7783 	  if (set != 0 && SET_DEST (set) == reg)
7784 	    {
7785 	      delete_address_reloads (i2, insn);
7786 	      delete_insn (i2);
7787 	    }
7788 	  if (GET_CODE (i2) == CODE_LABEL
7789 	      || GET_CODE (i2) == JUMP_INSN)
7790 	    break;
7791 	}
7792 
7793       /* For the debugging info, say the pseudo lives in this reload reg.  */
7794       reg_renumber[REGNO (reg)] = REGNO (rld[j].reg_rtx);
7795       alter_reg (REGNO (reg), -1);
7796     }
7797   else
7798     {
7799       delete_address_reloads (output_reload_insn, insn);
7800       delete_insn (output_reload_insn);
7801     }
7802 }
7803 
7804 /* We are going to delete DEAD_INSN.  Recursively delete loads of
7805    reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
7806    CURRENT_INSN is being reloaded, so we have to check its reloads too.  */
7807 static void
delete_address_reloads(dead_insn,current_insn)7808 delete_address_reloads (dead_insn, current_insn)
7809      rtx dead_insn, current_insn;
7810 {
7811   rtx set = single_set (dead_insn);
7812   rtx set2, dst, prev, next;
7813   if (set)
7814     {
7815       rtx dst = SET_DEST (set);
7816       if (GET_CODE (dst) == MEM)
7817 	delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
7818     }
7819   /* If we deleted the store from a reloaded post_{in,de}c expression,
7820      we can delete the matching adds.  */
7821   prev = PREV_INSN (dead_insn);
7822   next = NEXT_INSN (dead_insn);
7823   if (! prev || ! next)
7824     return;
7825   set = single_set (next);
7826   set2 = single_set (prev);
7827   if (! set || ! set2
7828       || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
7829       || GET_CODE (XEXP (SET_SRC (set), 1)) != CONST_INT
7830       || GET_CODE (XEXP (SET_SRC (set2), 1)) != CONST_INT)
7831     return;
7832   dst = SET_DEST (set);
7833   if (! rtx_equal_p (dst, SET_DEST (set2))
7834       || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
7835       || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
7836       || (INTVAL (XEXP (SET_SRC (set), 1))
7837 	  != -INTVAL (XEXP (SET_SRC (set2), 1))))
7838     return;
7839   delete_related_insns (prev);
7840   delete_related_insns (next);
7841 }
7842 
7843 /* Subfunction of delete_address_reloads: process registers found in X.  */
7844 static void
delete_address_reloads_1(dead_insn,x,current_insn)7845 delete_address_reloads_1 (dead_insn, x, current_insn)
7846      rtx dead_insn, x, current_insn;
7847 {
7848   rtx prev, set, dst, i2;
7849   int i, j;
7850   enum rtx_code code = GET_CODE (x);
7851 
7852   if (code != REG)
7853     {
7854       const char *fmt = GET_RTX_FORMAT (code);
7855       for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7856 	{
7857 	  if (fmt[i] == 'e')
7858 	    delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
7859 	  else if (fmt[i] == 'E')
7860 	    {
7861 	      for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7862 		delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
7863 					  current_insn);
7864 	    }
7865 	}
7866       return;
7867     }
7868 
7869   if (spill_reg_order[REGNO (x)] < 0)
7870     return;
7871 
7872   /* Scan backwards for the insn that sets x.  This might be a way back due
7873      to inheritance.  */
7874   for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
7875     {
7876       code = GET_CODE (prev);
7877       if (code == CODE_LABEL || code == JUMP_INSN)
7878 	return;
7879       if (GET_RTX_CLASS (code) != 'i')
7880 	continue;
7881       if (reg_set_p (x, PATTERN (prev)))
7882 	break;
7883       if (reg_referenced_p (x, PATTERN (prev)))
7884 	return;
7885     }
7886   if (! prev || INSN_UID (prev) < reload_first_uid)
7887     return;
7888   /* Check that PREV only sets the reload register.  */
7889   set = single_set (prev);
7890   if (! set)
7891     return;
7892   dst = SET_DEST (set);
7893   if (GET_CODE (dst) != REG
7894       || ! rtx_equal_p (dst, x))
7895     return;
7896   if (! reg_set_p (dst, PATTERN (dead_insn)))
7897     {
7898       /* Check if DST was used in a later insn -
7899 	 it might have been inherited.  */
7900       for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
7901 	{
7902 	  if (GET_CODE (i2) == CODE_LABEL)
7903 	    break;
7904 	  if (! INSN_P (i2))
7905 	    continue;
7906 	  if (reg_referenced_p (dst, PATTERN (i2)))
7907 	    {
7908 	      /* If there is a reference to the register in the current insn,
7909 		 it might be loaded in a non-inherited reload.  If no other
7910 		 reload uses it, that means the register is set before
7911 		 referenced.  */
7912 	      if (i2 == current_insn)
7913 		{
7914 		  for (j = n_reloads - 1; j >= 0; j--)
7915 		    if ((rld[j].reg_rtx == dst && reload_inherited[j])
7916 			|| reload_override_in[j] == dst)
7917 		      return;
7918 		  for (j = n_reloads - 1; j >= 0; j--)
7919 		    if (rld[j].in && rld[j].reg_rtx == dst)
7920 		      break;
7921 		  if (j >= 0)
7922 		    break;
7923 		}
7924 	      return;
7925 	    }
7926 	  if (GET_CODE (i2) == JUMP_INSN)
7927 	    break;
7928 	  /* If DST is still live at CURRENT_INSN, check if it is used for
7929 	     any reload.  Note that even if CURRENT_INSN sets DST, we still
7930 	     have to check the reloads.  */
7931 	  if (i2 == current_insn)
7932 	    {
7933 	      for (j = n_reloads - 1; j >= 0; j--)
7934 		if ((rld[j].reg_rtx == dst && reload_inherited[j])
7935 		    || reload_override_in[j] == dst)
7936 		  return;
7937 	      /* ??? We can't finish the loop here, because dst might be
7938 		 allocated to a pseudo in this block if no reload in this
7939 		 block needs any of the clsses containing DST - see
7940 		 spill_hard_reg.  There is no easy way to tell this, so we
7941 		 have to scan till the end of the basic block.  */
7942 	    }
7943 	  if (reg_set_p (dst, PATTERN (i2)))
7944 	    break;
7945 	}
7946     }
7947   delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
7948   reg_reloaded_contents[REGNO (dst)] = -1;
7949   delete_insn (prev);
7950 }
7951 
7952 /* Output reload-insns to reload VALUE into RELOADREG.
7953    VALUE is an autoincrement or autodecrement RTX whose operand
7954    is a register or memory location;
7955    so reloading involves incrementing that location.
7956    IN is either identical to VALUE, or some cheaper place to reload from.
7957 
7958    INC_AMOUNT is the number to increment or decrement by (always positive).
7959    This cannot be deduced from VALUE.
7960 
7961    Return the instruction that stores into RELOADREG.  */
7962 
7963 static rtx
inc_for_reload(reloadreg,in,value,inc_amount)7964 inc_for_reload (reloadreg, in, value, inc_amount)
7965      rtx reloadreg;
7966      rtx in, value;
7967      int inc_amount;
7968 {
7969   /* REG or MEM to be copied and incremented.  */
7970   rtx incloc = XEXP (value, 0);
7971   /* Nonzero if increment after copying.  */
7972   int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
7973   rtx last;
7974   rtx inc;
7975   rtx add_insn;
7976   int code;
7977   rtx store;
7978   rtx real_in = in == value ? XEXP (in, 0) : in;
7979 
7980   /* No hard register is equivalent to this register after
7981      inc/dec operation.  If REG_LAST_RELOAD_REG were nonzero,
7982      we could inc/dec that register as well (maybe even using it for
7983      the source), but I'm not sure it's worth worrying about.  */
7984   if (GET_CODE (incloc) == REG)
7985     reg_last_reload_reg[REGNO (incloc)] = 0;
7986 
7987   if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7988     inc_amount = -inc_amount;
7989 
7990   inc = GEN_INT (inc_amount);
7991 
7992   /* If this is post-increment, first copy the location to the reload reg.  */
7993   if (post && real_in != reloadreg)
7994     emit_insn (gen_move_insn (reloadreg, real_in));
7995 
7996   if (in == value)
7997     {
7998       /* See if we can directly increment INCLOC.  Use a method similar to
7999 	 that in gen_reload.  */
8000 
8001       last = get_last_insn ();
8002       add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
8003 					 gen_rtx_PLUS (GET_MODE (incloc),
8004 						       incloc, inc)));
8005 
8006       code = recog_memoized (add_insn);
8007       if (code >= 0)
8008 	{
8009 	  extract_insn (add_insn);
8010 	  if (constrain_operands (1))
8011 	    {
8012 	      /* If this is a pre-increment and we have incremented the value
8013 		 where it lives, copy the incremented value to RELOADREG to
8014 		 be used as an address.  */
8015 
8016 	      if (! post)
8017 		emit_insn (gen_move_insn (reloadreg, incloc));
8018 
8019 	      return add_insn;
8020 	    }
8021 	}
8022       delete_insns_since (last);
8023     }
8024 
8025   /* If couldn't do the increment directly, must increment in RELOADREG.
8026      The way we do this depends on whether this is pre- or post-increment.
8027      For pre-increment, copy INCLOC to the reload register, increment it
8028      there, then save back.  */
8029 
8030   if (! post)
8031     {
8032       if (in != reloadreg)
8033 	emit_insn (gen_move_insn (reloadreg, real_in));
8034       emit_insn (gen_add2_insn (reloadreg, inc));
8035       store = emit_insn (gen_move_insn (incloc, reloadreg));
8036     }
8037   else
8038     {
8039       /* Postincrement.
8040 	 Because this might be a jump insn or a compare, and because RELOADREG
8041 	 may not be available after the insn in an input reload, we must do
8042 	 the incrementation before the insn being reloaded for.
8043 
8044 	 We have already copied IN to RELOADREG.  Increment the copy in
8045 	 RELOADREG, save that back, then decrement RELOADREG so it has
8046 	 the original value.  */
8047 
8048       emit_insn (gen_add2_insn (reloadreg, inc));
8049       store = emit_insn (gen_move_insn (incloc, reloadreg));
8050       emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
8051     }
8052 
8053   return store;
8054 }
8055 
8056 
8057 /* See whether a single set SET is a noop.  */
8058 static int
reload_cse_noop_set_p(set)8059 reload_cse_noop_set_p (set)
8060      rtx set;
8061 {
8062   rtx dest, src;
8063 
8064   dest = SET_DEST (set);
8065   src = SET_SRC (set);
8066 
8067   if (! rtx_equal_for_cselib_p (dest, src))
8068     return 0;
8069 
8070   if ((GET_CODE (dest) == MEM && MEM_VOLATILE_P (dest))
8071       || (GET_CODE (src) == MEM && MEM_VOLATILE_P (src)))
8072     return 0;
8073 
8074   return 1;
8075 }
8076 
8077 /* Try to simplify INSN.  */
8078 static void
reload_cse_simplify(insn,testreg)8079 reload_cse_simplify (insn, testreg)
8080      rtx insn;
8081      rtx testreg;
8082 {
8083   rtx body = PATTERN (insn);
8084 
8085   if (GET_CODE (body) == SET)
8086     {
8087       int count = 0;
8088 
8089       /* Simplify even if we may think it is a no-op.
8090          We may think a memory load of a value smaller than WORD_SIZE
8091          is redundant because we haven't taken into account possible
8092          implicit extension.  reload_cse_simplify_set() will bring
8093          this out, so it's safer to simplify before we delete.  */
8094       count += reload_cse_simplify_set (body, insn);
8095 
8096       if (!count && reload_cse_noop_set_p (body))
8097 	{
8098 	  rtx value = SET_DEST (body);
8099 	  if (REG_P (value)
8100 	      && ! REG_FUNCTION_VALUE_P (value))
8101 	    value = 0;
8102 	  delete_insn_and_edges (insn);
8103 	  return;
8104 	}
8105 
8106       if (count > 0)
8107 	apply_change_group ();
8108       else
8109 	reload_cse_simplify_operands (insn, testreg);
8110     }
8111   else if (GET_CODE (body) == PARALLEL)
8112     {
8113       int i;
8114       int count = 0;
8115       rtx value = NULL_RTX;
8116 
8117       /* Registers mentioned in the clobber list for an asm cannot be reused
8118 	 within the body of the asm.  Invalidate those registers now so that
8119 	 we don't try to substitute values for them.  */
8120       if (asm_noperands (body) >= 0)
8121 	{
8122 	  for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8123 	    {
8124 	      rtx part = XVECEXP (body, 0, i);
8125 	      if (GET_CODE (part) == CLOBBER && REG_P (XEXP (part, 0)))
8126 		cselib_invalidate_rtx (XEXP (part, 0));
8127 	    }
8128 	}
8129 
8130       /* If every action in a PARALLEL is a noop, we can delete
8131 	 the entire PARALLEL.  */
8132       for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8133 	{
8134 	  rtx part = XVECEXP (body, 0, i);
8135 	  if (GET_CODE (part) == SET)
8136 	    {
8137 	      if (! reload_cse_noop_set_p (part))
8138 		break;
8139 	      if (REG_P (SET_DEST (part))
8140 		  && REG_FUNCTION_VALUE_P (SET_DEST (part)))
8141 		{
8142 		  if (value)
8143 		    break;
8144 		  value = SET_DEST (part);
8145 		}
8146 	    }
8147 	  else if (GET_CODE (part) != CLOBBER)
8148 	    break;
8149 	}
8150 
8151       if (i < 0)
8152 	{
8153 	  delete_insn_and_edges (insn);
8154 	  /* We're done with this insn.  */
8155 	  return;
8156 	}
8157 
8158       /* It's not a no-op, but we can try to simplify it.  */
8159       for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8160 	if (GET_CODE (XVECEXP (body, 0, i)) == SET)
8161 	  count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
8162 
8163       if (count > 0)
8164 	apply_change_group ();
8165       else
8166 	reload_cse_simplify_operands (insn, testreg);
8167     }
8168 }
8169 
8170 /* Do a very simple CSE pass over the hard registers.
8171 
8172    This function detects no-op moves where we happened to assign two
8173    different pseudo-registers to the same hard register, and then
8174    copied one to the other.  Reload will generate a useless
8175    instruction copying a register to itself.
8176 
8177    This function also detects cases where we load a value from memory
8178    into two different registers, and (if memory is more expensive than
8179    registers) changes it to simply copy the first register into the
8180    second register.
8181 
8182    Another optimization is performed that scans the operands of each
8183    instruction to see whether the value is already available in a
8184    hard register.  It then replaces the operand with the hard register
8185    if possible, much like an optional reload would.  */
8186 
8187 static void
reload_cse_regs_1(first)8188 reload_cse_regs_1 (first)
8189      rtx first;
8190 {
8191   rtx insn;
8192   rtx testreg = gen_rtx_REG (VOIDmode, -1);
8193 
8194   cselib_init ();
8195   init_alias_analysis ();
8196 
8197   for (insn = first; insn; insn = NEXT_INSN (insn))
8198     {
8199       if (INSN_P (insn))
8200 	reload_cse_simplify (insn, testreg);
8201 
8202       cselib_process_insn (insn);
8203     }
8204 
8205   /* Clean up.  */
8206   end_alias_analysis ();
8207   cselib_finish ();
8208 }
8209 
8210 /* Call cse / combine like post-reload optimization phases.
8211    FIRST is the first instruction.  */
8212 void
reload_cse_regs(first)8213 reload_cse_regs (first)
8214      rtx first;
8215 {
8216   reload_cse_regs_1 (first);
8217   reload_combine ();
8218   reload_cse_move2add (first);
8219   if (flag_expensive_optimizations)
8220     reload_cse_regs_1 (first);
8221 }
8222 
8223 /* Try to simplify a single SET instruction.  SET is the set pattern.
8224    INSN is the instruction it came from.
8225    This function only handles one case: if we set a register to a value
8226    which is not a register, we try to find that value in some other register
8227    and change the set into a register copy.  */
8228 
8229 static int
reload_cse_simplify_set(set,insn)8230 reload_cse_simplify_set (set, insn)
8231      rtx set;
8232      rtx insn;
8233 {
8234   int did_change = 0;
8235   int dreg;
8236   rtx src;
8237   enum reg_class dclass;
8238   int old_cost;
8239   cselib_val *val;
8240   struct elt_loc_list *l;
8241 #ifdef LOAD_EXTEND_OP
8242   enum rtx_code extend_op = NIL;
8243 #endif
8244 
8245   dreg = true_regnum (SET_DEST (set));
8246   if (dreg < 0)
8247     return 0;
8248 
8249   src = SET_SRC (set);
8250   if (side_effects_p (src) || true_regnum (src) >= 0)
8251     return 0;
8252 
8253   dclass = REGNO_REG_CLASS (dreg);
8254 
8255 #ifdef LOAD_EXTEND_OP
8256   /* When replacing a memory with a register, we need to honor assumptions
8257      that combine made wrt the contents of sign bits.  We'll do this by
8258      generating an extend instruction instead of a reg->reg copy.  Thus
8259      the destination must be a register that we can widen.  */
8260   if (GET_CODE (src) == MEM
8261       && GET_MODE_BITSIZE (GET_MODE (src)) < BITS_PER_WORD
8262       && (extend_op = LOAD_EXTEND_OP (GET_MODE (src))) != NIL
8263       && GET_CODE (SET_DEST (set)) != REG)
8264     return 0;
8265 #endif
8266 
8267   /* If memory loads are cheaper than register copies, don't change them.  */
8268   if (GET_CODE (src) == MEM)
8269     old_cost = MEMORY_MOVE_COST (GET_MODE (src), dclass, 1);
8270   else if (CONSTANT_P (src))
8271     old_cost = rtx_cost (src, SET);
8272   else if (GET_CODE (src) == REG)
8273     old_cost = REGISTER_MOVE_COST (GET_MODE (src),
8274 				   REGNO_REG_CLASS (REGNO (src)), dclass);
8275   else
8276     /* ???   */
8277     old_cost = rtx_cost (src, SET);
8278 
8279   val = cselib_lookup (src, GET_MODE (SET_DEST (set)), 0);
8280   if (! val)
8281     return 0;
8282   for (l = val->locs; l; l = l->next)
8283     {
8284       rtx this_rtx = l->loc;
8285       int this_cost;
8286 
8287       if (CONSTANT_P (this_rtx) && ! references_value_p (this_rtx, 0))
8288 	{
8289 #ifdef LOAD_EXTEND_OP
8290 	  if (extend_op != NIL)
8291 	    {
8292 	      HOST_WIDE_INT this_val;
8293 
8294 	      /* ??? I'm lazy and don't wish to handle CONST_DOUBLE.  Other
8295 		 constants, such as SYMBOL_REF, cannot be extended.  */
8296 	      if (GET_CODE (this_rtx) != CONST_INT)
8297 		continue;
8298 
8299 	      this_val = INTVAL (this_rtx);
8300 	      switch (extend_op)
8301 		{
8302 		case ZERO_EXTEND:
8303 		  this_val &= GET_MODE_MASK (GET_MODE (src));
8304 		  break;
8305 		case SIGN_EXTEND:
8306 		  /* ??? In theory we're already extended.  */
8307 		  if (this_val == trunc_int_for_mode (this_val, GET_MODE (src)))
8308 		    break;
8309 		default:
8310 		  abort ();
8311 		}
8312 	      this_rtx = GEN_INT (this_val);
8313 	    }
8314 #endif
8315 	  this_cost = rtx_cost (this_rtx, SET);
8316 	}
8317       else if (GET_CODE (this_rtx) == REG)
8318 	{
8319 #ifdef LOAD_EXTEND_OP
8320 	  if (extend_op != NIL)
8321 	    {
8322 	      this_rtx = gen_rtx_fmt_e (extend_op, word_mode, this_rtx);
8323 	      this_cost = rtx_cost (this_rtx, SET);
8324 	    }
8325 	  else
8326 #endif
8327 	    this_cost = REGISTER_MOVE_COST (GET_MODE (this_rtx),
8328 					    REGNO_REG_CLASS (REGNO (this_rtx)),
8329 					    dclass);
8330 	}
8331       else
8332 	continue;
8333 
8334       /* If equal costs, prefer registers over anything else.  That
8335 	 tends to lead to smaller instructions on some machines.  */
8336       if (this_cost < old_cost
8337 	  || (this_cost == old_cost
8338 	      && GET_CODE (this_rtx) == REG
8339 	      && GET_CODE (SET_SRC (set)) != REG))
8340 	{
8341 #ifdef LOAD_EXTEND_OP
8342 	  if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set))) < BITS_PER_WORD
8343 	      && extend_op != NIL
8344 #ifdef CANNOT_CHANGE_MODE_CLASS
8345 	      && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set)),
8346 					    word_mode,
8347 					    REGNO_REG_CLASS (REGNO (SET_DEST (set))))
8348 #endif
8349 	      )
8350 	    {
8351 	      rtx wide_dest = gen_rtx_REG (word_mode, REGNO (SET_DEST (set)));
8352 	      ORIGINAL_REGNO (wide_dest) = ORIGINAL_REGNO (SET_DEST (set));
8353 	      validate_change (insn, &SET_DEST (set), wide_dest, 1);
8354 	    }
8355 #endif
8356 
8357 	  validate_change (insn, &SET_SRC (set), copy_rtx (this_rtx), 1);
8358 	  old_cost = this_cost, did_change = 1;
8359 	}
8360     }
8361 
8362   return did_change;
8363 }
8364 
8365 /* Try to replace operands in INSN with equivalent values that are already
8366    in registers.  This can be viewed as optional reloading.
8367 
8368    For each non-register operand in the insn, see if any hard regs are
8369    known to be equivalent to that operand.  Record the alternatives which
8370    can accept these hard registers.  Among all alternatives, select the
8371    ones which are better or equal to the one currently matching, where
8372    "better" is in terms of '?' and '!' constraints.  Among the remaining
8373    alternatives, select the one which replaces most operands with
8374    hard registers.  */
8375 
8376 static int
reload_cse_simplify_operands(insn,testreg)8377 reload_cse_simplify_operands (insn, testreg)
8378      rtx insn;
8379      rtx testreg;
8380 {
8381   int i, j;
8382 
8383   /* For each operand, all registers that are equivalent to it.  */
8384   HARD_REG_SET equiv_regs[MAX_RECOG_OPERANDS];
8385 
8386   const char *constraints[MAX_RECOG_OPERANDS];
8387 
8388   /* Vector recording how bad an alternative is.  */
8389   int *alternative_reject;
8390   /* Vector recording how many registers can be introduced by choosing
8391      this alternative.  */
8392   int *alternative_nregs;
8393   /* Array of vectors recording, for each operand and each alternative,
8394      which hard register to substitute, or -1 if the operand should be
8395      left as it is.  */
8396   int *op_alt_regno[MAX_RECOG_OPERANDS];
8397   /* Array of alternatives, sorted in order of decreasing desirability.  */
8398   int *alternative_order;
8399 
8400   extract_insn (insn);
8401 
8402   if (recog_data.n_alternatives == 0 || recog_data.n_operands == 0)
8403     return 0;
8404 
8405   /* Figure out which alternative currently matches.  */
8406   if (! constrain_operands (1))
8407     fatal_insn_not_found (insn);
8408 
8409   alternative_reject = (int *) alloca (recog_data.n_alternatives * sizeof (int));
8410   alternative_nregs = (int *) alloca (recog_data.n_alternatives * sizeof (int));
8411   alternative_order = (int *) alloca (recog_data.n_alternatives * sizeof (int));
8412   memset ((char *) alternative_reject, 0, recog_data.n_alternatives * sizeof (int));
8413   memset ((char *) alternative_nregs, 0, recog_data.n_alternatives * sizeof (int));
8414 
8415   /* For each operand, find out which regs are equivalent.  */
8416   for (i = 0; i < recog_data.n_operands; i++)
8417     {
8418       cselib_val *v;
8419       struct elt_loc_list *l;
8420       rtx op;
8421       enum machine_mode mode;
8422 
8423       CLEAR_HARD_REG_SET (equiv_regs[i]);
8424 
8425       /* cselib blows up on CODE_LABELs.  Trying to fix that doesn't seem
8426 	 right, so avoid the problem here.  Likewise if we have a constant
8427          and the insn pattern doesn't tell us the mode we need.  */
8428       if (GET_CODE (recog_data.operand[i]) == CODE_LABEL
8429 	  || (CONSTANT_P (recog_data.operand[i])
8430 	      && recog_data.operand_mode[i] == VOIDmode))
8431 	continue;
8432 
8433       op = recog_data.operand[i];
8434       mode = GET_MODE (op);
8435 #ifdef LOAD_EXTEND_OP
8436       if (GET_CODE (op) == MEM
8437 	  && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
8438 	  && LOAD_EXTEND_OP (mode) != NIL)
8439 	{
8440 	  rtx set = single_set (insn);
8441 
8442 	  /* We might have multiple sets, some of which do implict
8443 	     extension.  Punt on this for now.  */
8444 	  if (! set)
8445 	    continue;
8446 	  /* If the destination is a also MEM or a STRICT_LOW_PART, no
8447 	     extension applies.
8448 	     Also, if there is an explicit extension, we don't have to
8449 	     worry about an implicit one.  */
8450 	  else if (GET_CODE (SET_DEST (set)) == MEM
8451 		   || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART
8452 		   || GET_CODE (SET_SRC (set)) == ZERO_EXTEND
8453 		   || GET_CODE (SET_SRC (set)) == SIGN_EXTEND)
8454 	    ; /* Continue ordinary processing.  */
8455 	  /* If this is a straight load, make the extension explicit.  */
8456 	  else if (GET_CODE (SET_DEST (set)) == REG
8457 		   && recog_data.n_operands == 2
8458 		   && SET_SRC (set) == op
8459 		   && SET_DEST (set) == recog_data.operand[1-i])
8460 	    {
8461 	      validate_change (insn, recog_data.operand_loc[i],
8462 			       gen_rtx_fmt_e (LOAD_EXTEND_OP (mode),
8463 					      word_mode, op),
8464 			       1);
8465 	      validate_change (insn, recog_data.operand_loc[1-i],
8466 			       gen_rtx_REG (word_mode, REGNO (SET_DEST (set))),
8467 			       1);
8468 	      if (!apply_change_group ())
8469 		return 0;
8470 	      return reload_cse_simplify_operands (insn, testreg);
8471 	    }
8472 	  else
8473 	    /* ??? There might be arithmetic operations with memory that are
8474 	       safe to optimize, but is it worth the trouble?  */
8475 	    continue;
8476 	}
8477 #endif /* LOAD_EXTEND_OP */
8478       v = cselib_lookup (op, recog_data.operand_mode[i], 0);
8479       if (! v)
8480 	continue;
8481 
8482       for (l = v->locs; l; l = l->next)
8483 	if (GET_CODE (l->loc) == REG)
8484 	  SET_HARD_REG_BIT (equiv_regs[i], REGNO (l->loc));
8485     }
8486 
8487   for (i = 0; i < recog_data.n_operands; i++)
8488     {
8489       enum machine_mode mode;
8490       int regno;
8491       const char *p;
8492 
8493       op_alt_regno[i] = (int *) alloca (recog_data.n_alternatives * sizeof (int));
8494       for (j = 0; j < recog_data.n_alternatives; j++)
8495 	op_alt_regno[i][j] = -1;
8496 
8497       p = constraints[i] = recog_data.constraints[i];
8498       mode = recog_data.operand_mode[i];
8499 
8500       /* Add the reject values for each alternative given by the constraints
8501 	 for this operand.  */
8502       j = 0;
8503       while (*p != '\0')
8504 	{
8505 	  char c = *p++;
8506 	  if (c == ',')
8507 	    j++;
8508 	  else if (c == '?')
8509 	    alternative_reject[j] += 3;
8510 	  else if (c == '!')
8511 	    alternative_reject[j] += 300;
8512 	}
8513 
8514       /* We won't change operands which are already registers.  We
8515 	 also don't want to modify output operands.  */
8516       regno = true_regnum (recog_data.operand[i]);
8517       if (regno >= 0
8518 	  || constraints[i][0] == '='
8519 	  || constraints[i][0] == '+')
8520 	continue;
8521 
8522       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8523 	{
8524 	  int class = (int) NO_REGS;
8525 
8526 	  if (! TEST_HARD_REG_BIT (equiv_regs[i], regno))
8527 	    continue;
8528 
8529 	  REGNO (testreg) = regno;
8530 	  PUT_MODE (testreg, mode);
8531 
8532 	  /* We found a register equal to this operand.  Now look for all
8533 	     alternatives that can accept this register and have not been
8534 	     assigned a register they can use yet.  */
8535 	  j = 0;
8536 	  p = constraints[i];
8537 	  for (;;)
8538 	    {
8539 	      char c = *p++;
8540 
8541 	      switch (c)
8542 		{
8543 		case '=':  case '+':  case '?':
8544 		case '#':  case '&':  case '!':
8545 		case '*':  case '%':
8546 		case '0':  case '1':  case '2':  case '3':  case '4':
8547 		case '5':  case '6':  case '7':  case '8':  case '9':
8548 		case 'm':  case '<':  case '>':  case 'V':  case 'o':
8549 		case 'E':  case 'F':  case 'G':  case 'H':
8550 		case 's':  case 'i':  case 'n':
8551 		case 'I':  case 'J':  case 'K':  case 'L':
8552 		case 'M':  case 'N':  case 'O':  case 'P':
8553 		case 'p': case 'X':
8554 		  /* These don't say anything we care about.  */
8555 		  break;
8556 
8557 		case 'g': case 'r':
8558 		  class = reg_class_subunion[(int) class][(int) GENERAL_REGS];
8559 		  break;
8560 
8561 		default:
8562 		  class
8563 		    = reg_class_subunion[(int) class][(int) REG_CLASS_FROM_LETTER ((unsigned char) c)];
8564 		  break;
8565 
8566 		case ',': case '\0':
8567 		  /* See if REGNO fits this alternative, and set it up as the
8568 		     replacement register if we don't have one for this
8569 		     alternative yet and the operand being replaced is not
8570 		     a cheap CONST_INT.  */
8571 		  if (op_alt_regno[i][j] == -1
8572 		      && reg_fits_class_p (testreg, class, 0, mode)
8573 		      && (GET_CODE (recog_data.operand[i]) != CONST_INT
8574 			  || (rtx_cost (recog_data.operand[i], SET)
8575 			      > rtx_cost (testreg, SET))))
8576 		    {
8577 		      alternative_nregs[j]++;
8578 		      op_alt_regno[i][j] = regno;
8579 		    }
8580 		  j++;
8581 		  break;
8582 		}
8583 
8584 	      if (c == '\0')
8585 		break;
8586 	    }
8587 	}
8588     }
8589 
8590   /* Record all alternatives which are better or equal to the currently
8591      matching one in the alternative_order array.  */
8592   for (i = j = 0; i < recog_data.n_alternatives; i++)
8593     if (alternative_reject[i] <= alternative_reject[which_alternative])
8594       alternative_order[j++] = i;
8595   recog_data.n_alternatives = j;
8596 
8597   /* Sort it.  Given a small number of alternatives, a dumb algorithm
8598      won't hurt too much.  */
8599   for (i = 0; i < recog_data.n_alternatives - 1; i++)
8600     {
8601       int best = i;
8602       int best_reject = alternative_reject[alternative_order[i]];
8603       int best_nregs = alternative_nregs[alternative_order[i]];
8604       int tmp;
8605 
8606       for (j = i + 1; j < recog_data.n_alternatives; j++)
8607 	{
8608 	  int this_reject = alternative_reject[alternative_order[j]];
8609 	  int this_nregs = alternative_nregs[alternative_order[j]];
8610 
8611 	  if (this_reject < best_reject
8612 	      || (this_reject == best_reject && this_nregs < best_nregs))
8613 	    {
8614 	      best = j;
8615 	      best_reject = this_reject;
8616 	      best_nregs = this_nregs;
8617 	    }
8618 	}
8619 
8620       tmp = alternative_order[best];
8621       alternative_order[best] = alternative_order[i];
8622       alternative_order[i] = tmp;
8623     }
8624 
8625   /* Substitute the operands as determined by op_alt_regno for the best
8626      alternative.  */
8627   j = alternative_order[0];
8628 
8629   for (i = 0; i < recog_data.n_operands; i++)
8630     {
8631       enum machine_mode mode = recog_data.operand_mode[i];
8632       if (op_alt_regno[i][j] == -1)
8633 	continue;
8634 
8635       validate_change (insn, recog_data.operand_loc[i],
8636 		       gen_rtx_REG (mode, op_alt_regno[i][j]), 1);
8637     }
8638 
8639   for (i = recog_data.n_dups - 1; i >= 0; i--)
8640     {
8641       int op = recog_data.dup_num[i];
8642       enum machine_mode mode = recog_data.operand_mode[op];
8643 
8644       if (op_alt_regno[op][j] == -1)
8645 	continue;
8646 
8647       validate_change (insn, recog_data.dup_loc[i],
8648 		       gen_rtx_REG (mode, op_alt_regno[op][j]), 1);
8649     }
8650 
8651   return apply_change_group ();
8652 }
8653 
8654 /* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
8655    addressing now.
8656    This code might also be useful when reload gave up on reg+reg addresssing
8657    because of clashes between the return register and INDEX_REG_CLASS.  */
8658 
8659 /* The maximum number of uses of a register we can keep track of to
8660    replace them with reg+reg addressing.  */
8661 #define RELOAD_COMBINE_MAX_USES 6
8662 
8663 /* INSN is the insn where a register has ben used, and USEP points to the
8664    location of the register within the rtl.  */
8665 struct reg_use { rtx insn, *usep; };
8666 
8667 /* If the register is used in some unknown fashion, USE_INDEX is negative.
8668    If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
8669    indicates where it becomes live again.
8670    Otherwise, USE_INDEX is the index of the last encountered use of the
8671    register (which is first among these we have seen since we scan backwards),
8672    OFFSET contains the constant offset that is added to the register in
8673    all encountered uses, and USE_RUID indicates the first encountered, i.e.
8674    last, of these uses.
8675    STORE_RUID is always meaningful if we only want to use a value in a
8676    register in a different place: it denotes the next insn in the insn
8677    stream (i.e. the last ecountered) that sets or clobbers the register.  */
8678 static struct
8679   {
8680     struct reg_use reg_use[RELOAD_COMBINE_MAX_USES];
8681     int use_index;
8682     rtx offset;
8683     int store_ruid;
8684     int use_ruid;
8685   } reg_state[FIRST_PSEUDO_REGISTER];
8686 
8687 /* Reverse linear uid.  This is increased in reload_combine while scanning
8688    the instructions from last to first.  It is used to set last_label_ruid
8689    and the store_ruid / use_ruid fields in reg_state.  */
8690 static int reload_combine_ruid;
8691 
8692 #define LABEL_LIVE(LABEL) \
8693   (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno])
8694 
8695 static void
reload_combine()8696 reload_combine ()
8697 {
8698   rtx insn, set;
8699   int first_index_reg = -1;
8700   int last_index_reg = 0;
8701   int i;
8702   basic_block bb;
8703   unsigned int r;
8704   int last_label_ruid;
8705   int min_labelno, n_labels;
8706   HARD_REG_SET ever_live_at_start, *label_live;
8707 
8708   /* If reg+reg can be used in offsetable memory addresses, the main chunk of
8709      reload has already used it where appropriate, so there is no use in
8710      trying to generate it now.  */
8711   if (double_reg_address_ok && INDEX_REG_CLASS != NO_REGS)
8712     return;
8713 
8714   /* To avoid wasting too much time later searching for an index register,
8715      determine the minimum and maximum index register numbers.  */
8716   for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
8717     if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], r))
8718       {
8719 	if (first_index_reg == -1)
8720 	  first_index_reg = r;
8721 
8722 	last_index_reg = r;
8723       }
8724 
8725   /* If no index register is available, we can quit now.  */
8726   if (first_index_reg == -1)
8727     return;
8728 
8729   /* Set up LABEL_LIVE and EVER_LIVE_AT_START.  The register lifetime
8730      information is a bit fuzzy immediately after reload, but it's
8731      still good enough to determine which registers are live at a jump
8732      destination.  */
8733   min_labelno = get_first_label_num ();
8734   n_labels = max_label_num () - min_labelno;
8735   label_live = (HARD_REG_SET *) xmalloc (n_labels * sizeof (HARD_REG_SET));
8736   CLEAR_HARD_REG_SET (ever_live_at_start);
8737 
8738   FOR_EACH_BB_REVERSE (bb)
8739     {
8740       insn = bb->head;
8741       if (GET_CODE (insn) == CODE_LABEL)
8742 	{
8743 	  HARD_REG_SET live;
8744 
8745 	  REG_SET_TO_HARD_REG_SET (live,
8746 				   bb->global_live_at_start);
8747 	  compute_use_by_pseudos (&live,
8748 				  bb->global_live_at_start);
8749 	  COPY_HARD_REG_SET (LABEL_LIVE (insn), live);
8750 	  IOR_HARD_REG_SET (ever_live_at_start, live);
8751 	}
8752     }
8753 
8754   /* Initialize last_label_ruid, reload_combine_ruid and reg_state.  */
8755   last_label_ruid = reload_combine_ruid = 0;
8756   for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
8757     {
8758       reg_state[r].store_ruid = reload_combine_ruid;
8759       if (fixed_regs[r])
8760 	reg_state[r].use_index = -1;
8761       else
8762 	reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
8763     }
8764 
8765   for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
8766     {
8767       rtx note;
8768 
8769       /* We cannot do our optimization across labels.  Invalidating all the use
8770 	 information we have would be costly, so we just note where the label
8771 	 is and then later disable any optimization that would cross it.  */
8772       if (GET_CODE (insn) == CODE_LABEL)
8773 	last_label_ruid = reload_combine_ruid;
8774       else if (GET_CODE (insn) == BARRIER)
8775 	for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
8776 	  if (! fixed_regs[r])
8777 	      reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
8778 
8779       if (! INSN_P (insn))
8780 	continue;
8781 
8782       reload_combine_ruid++;
8783 
8784       /* Look for (set (REGX) (CONST_INT))
8785 	 (set (REGX) (PLUS (REGX) (REGY)))
8786 	 ...
8787 	 ... (MEM (REGX)) ...
8788 	 and convert it to
8789 	 (set (REGZ) (CONST_INT))
8790 	 ...
8791 	 ... (MEM (PLUS (REGZ) (REGY)))... .
8792 
8793 	 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
8794 	 and that we know all uses of REGX before it dies.  */
8795       set = single_set (insn);
8796       if (set != NULL_RTX
8797 	  && GET_CODE (SET_DEST (set)) == REG
8798 	  && (HARD_REGNO_NREGS (REGNO (SET_DEST (set)),
8799 				GET_MODE (SET_DEST (set)))
8800 	      == 1)
8801 	  && GET_CODE (SET_SRC (set)) == PLUS
8802 	  && GET_CODE (XEXP (SET_SRC (set), 1)) == REG
8803 	  && rtx_equal_p (XEXP (SET_SRC (set), 0), SET_DEST (set))
8804 	  && last_label_ruid < reg_state[REGNO (SET_DEST (set))].use_ruid)
8805 	{
8806 	  rtx reg = SET_DEST (set);
8807 	  rtx plus = SET_SRC (set);
8808 	  rtx base = XEXP (plus, 1);
8809 	  rtx prev = prev_nonnote_insn (insn);
8810 	  rtx prev_set = prev ? single_set (prev) : NULL_RTX;
8811 	  unsigned int regno = REGNO (reg);
8812 	  rtx const_reg = NULL_RTX;
8813 	  rtx reg_sum = NULL_RTX;
8814 
8815 	  /* Now, we need an index register.
8816 	     We'll set index_reg to this index register, const_reg to the
8817 	     register that is to be loaded with the constant
8818 	     (denoted as REGZ in the substitution illustration above),
8819 	     and reg_sum to the register-register that we want to use to
8820 	     substitute uses of REG (typically in MEMs) with.
8821 	     First check REG and BASE for being index registers;
8822 	     we can use them even if they are not dead.  */
8823 	  if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno)
8824 	      || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
8825 				    REGNO (base)))
8826 	    {
8827 	      const_reg = reg;
8828 	      reg_sum = plus;
8829 	    }
8830 	  else
8831 	    {
8832 	      /* Otherwise, look for a free index register.  Since we have
8833 		 checked above that neiter REG nor BASE are index registers,
8834 		 if we find anything at all, it will be different from these
8835 		 two registers.  */
8836 	      for (i = first_index_reg; i <= last_index_reg; i++)
8837 		{
8838 		  if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
8839 					 i)
8840 		      && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
8841 		      && reg_state[i].store_ruid <= reg_state[regno].use_ruid
8842 		      && HARD_REGNO_NREGS (i, GET_MODE (reg)) == 1)
8843 		    {
8844 		      rtx index_reg = gen_rtx_REG (GET_MODE (reg), i);
8845 
8846 		      const_reg = index_reg;
8847 		      reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base);
8848 		      break;
8849 		    }
8850 		}
8851 	    }
8852 
8853 	  /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
8854 	     (REGY), i.e. BASE, is not clobbered before the last use we'll
8855 	     create.  */
8856 	  if (prev_set != 0
8857 	      && GET_CODE (SET_SRC (prev_set)) == CONST_INT
8858 	      && rtx_equal_p (SET_DEST (prev_set), reg)
8859 	      && reg_state[regno].use_index >= 0
8860 	      && (reg_state[REGNO (base)].store_ruid
8861 		  <= reg_state[regno].use_ruid)
8862 	      && reg_sum != 0)
8863 	    {
8864 	      int i;
8865 
8866 	      /* Change destination register and, if necessary, the
8867 		 constant value in PREV, the constant loading instruction.  */
8868 	      validate_change (prev, &SET_DEST (prev_set), const_reg, 1);
8869 	      if (reg_state[regno].offset != const0_rtx)
8870 		validate_change (prev,
8871 				 &SET_SRC (prev_set),
8872 				 GEN_INT (INTVAL (SET_SRC (prev_set))
8873 					  + INTVAL (reg_state[regno].offset)),
8874 				 1);
8875 
8876 	      /* Now for every use of REG that we have recorded, replace REG
8877 		 with REG_SUM.  */
8878 	      for (i = reg_state[regno].use_index;
8879 		   i < RELOAD_COMBINE_MAX_USES; i++)
8880 		validate_change (reg_state[regno].reg_use[i].insn,
8881 				 reg_state[regno].reg_use[i].usep,
8882 				 /* Each change must have its own
8883 				    replacement.  */
8884 				 copy_rtx (reg_sum), 1);
8885 
8886 	      if (apply_change_group ())
8887 		{
8888 		  rtx *np;
8889 
8890 		  /* Delete the reg-reg addition.  */
8891 		  delete_insn (insn);
8892 
8893 		  if (reg_state[regno].offset != const0_rtx)
8894 		    /* Previous REG_EQUIV / REG_EQUAL notes for PREV
8895 		       are now invalid.  */
8896 		    for (np = &REG_NOTES (prev); *np;)
8897 		      {
8898 			if (REG_NOTE_KIND (*np) == REG_EQUAL
8899 			    || REG_NOTE_KIND (*np) == REG_EQUIV)
8900 			  *np = XEXP (*np, 1);
8901 			else
8902 			  np = &XEXP (*np, 1);
8903 		      }
8904 
8905 		  reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
8906 		  reg_state[REGNO (const_reg)].store_ruid
8907 		    = reload_combine_ruid;
8908 		  continue;
8909 		}
8910 	    }
8911 	}
8912 
8913       note_stores (PATTERN (insn), reload_combine_note_store, NULL);
8914 
8915       if (GET_CODE (insn) == CALL_INSN)
8916 	{
8917 	  rtx link;
8918 
8919 	  for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
8920 	    if (call_used_regs[r])
8921 	      {
8922 		reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
8923 		reg_state[r].store_ruid = reload_combine_ruid;
8924 	      }
8925 
8926 	  for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
8927 	       link = XEXP (link, 1))
8928 	    {
8929 	      rtx usage_rtx = XEXP (XEXP (link, 0), 0);
8930 	      if (GET_CODE (usage_rtx) == REG)
8931 	        {
8932 		  unsigned int i;
8933 		  unsigned int start_reg = REGNO (usage_rtx);
8934 		  unsigned int num_regs =
8935 			HARD_REGNO_NREGS (start_reg, GET_MODE (usage_rtx));
8936 		  unsigned int end_reg  = start_reg + num_regs - 1;
8937 		  for (i = start_reg; i <= end_reg; i++)
8938 		    if (GET_CODE (XEXP (link, 0)) == CLOBBER)
8939 		      {
8940 		        reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
8941 		        reg_state[i].store_ruid = reload_combine_ruid;
8942 		      }
8943 		    else
8944 		      reg_state[i].use_index = -1;
8945 	         }
8946 	     }
8947 
8948 	}
8949       else if (GET_CODE (insn) == JUMP_INSN
8950 	       && GET_CODE (PATTERN (insn)) != RETURN)
8951 	{
8952 	  /* Non-spill registers might be used at the call destination in
8953 	     some unknown fashion, so we have to mark the unknown use.  */
8954 	  HARD_REG_SET *live;
8955 
8956 	  if ((condjump_p (insn) || condjump_in_parallel_p (insn))
8957 	      && JUMP_LABEL (insn))
8958 	    live = &LABEL_LIVE (JUMP_LABEL (insn));
8959 	  else
8960 	    live = &ever_live_at_start;
8961 
8962 	  for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
8963 	    if (TEST_HARD_REG_BIT (*live, i))
8964 	      reg_state[i].use_index = -1;
8965 	}
8966 
8967       reload_combine_note_use (&PATTERN (insn), insn);
8968       for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
8969 	{
8970 	  if (REG_NOTE_KIND (note) == REG_INC
8971 	      && GET_CODE (XEXP (note, 0)) == REG)
8972 	    {
8973 	      int regno = REGNO (XEXP (note, 0));
8974 
8975 	      reg_state[regno].store_ruid = reload_combine_ruid;
8976 	      reg_state[regno].use_index = -1;
8977 	    }
8978 	}
8979     }
8980 
8981   free (label_live);
8982 }
8983 
8984 /* Check if DST is a register or a subreg of a register; if it is,
8985    update reg_state[regno].store_ruid and reg_state[regno].use_index
8986    accordingly.  Called via note_stores from reload_combine.  */
8987 
8988 static void
reload_combine_note_store(dst,set,data)8989 reload_combine_note_store (dst, set, data)
8990      rtx dst, set;
8991      void *data ATTRIBUTE_UNUSED;
8992 {
8993   int regno = 0;
8994   int i;
8995   enum machine_mode mode = GET_MODE (dst);
8996 
8997   if (GET_CODE (dst) == SUBREG)
8998     {
8999       regno = subreg_regno_offset (REGNO (SUBREG_REG (dst)),
9000 				   GET_MODE (SUBREG_REG (dst)),
9001 				   SUBREG_BYTE (dst),
9002 				   GET_MODE (dst));
9003       dst = SUBREG_REG (dst);
9004     }
9005   if (GET_CODE (dst) != REG)
9006     return;
9007   regno += REGNO (dst);
9008 
9009   /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
9010      careful with registers / register parts that are not full words.
9011 
9012      Similarly for ZERO_EXTRACT and SIGN_EXTRACT.  */
9013   if (GET_CODE (set) != SET
9014       || GET_CODE (SET_DEST (set)) == ZERO_EXTRACT
9015       || GET_CODE (SET_DEST (set)) == SIGN_EXTRACT
9016       || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART)
9017     {
9018       for (i = HARD_REGNO_NREGS (regno, mode) - 1 + regno; i >= regno; i--)
9019 	{
9020 	  reg_state[i].use_index = -1;
9021 	  reg_state[i].store_ruid = reload_combine_ruid;
9022 	}
9023     }
9024   else
9025     {
9026       for (i = HARD_REGNO_NREGS (regno, mode) - 1 + regno; i >= regno; i--)
9027 	{
9028 	  reg_state[i].store_ruid = reload_combine_ruid;
9029 	  reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
9030 	}
9031     }
9032 }
9033 
9034 /* XP points to a piece of rtl that has to be checked for any uses of
9035    registers.
9036    *XP is the pattern of INSN, or a part of it.
9037    Called from reload_combine, and recursively by itself.  */
9038 static void
reload_combine_note_use(xp,insn)9039 reload_combine_note_use (xp, insn)
9040      rtx *xp, insn;
9041 {
9042   rtx x = *xp;
9043   enum rtx_code code = x->code;
9044   const char *fmt;
9045   int i, j;
9046   rtx offset = const0_rtx; /* For the REG case below.  */
9047 
9048   switch (code)
9049     {
9050     case SET:
9051       if (GET_CODE (SET_DEST (x)) == REG)
9052 	{
9053 	  reload_combine_note_use (&SET_SRC (x), insn);
9054 	  return;
9055 	}
9056       break;
9057 
9058     case USE:
9059       /* If this is the USE of a return value, we can't change it.  */
9060       if (GET_CODE (XEXP (x, 0)) == REG && REG_FUNCTION_VALUE_P (XEXP (x, 0)))
9061 	{
9062 	/* Mark the return register as used in an unknown fashion.  */
9063 	  rtx reg = XEXP (x, 0);
9064 	  int regno = REGNO (reg);
9065 	  int nregs = HARD_REGNO_NREGS (regno, GET_MODE (reg));
9066 
9067 	  while (--nregs >= 0)
9068 	    reg_state[regno + nregs].use_index = -1;
9069 	  return;
9070 	}
9071       break;
9072 
9073     case CLOBBER:
9074       if (GET_CODE (SET_DEST (x)) == REG)
9075 	{
9076 	  /* No spurious CLOBBERs of pseudo registers may remain.  */
9077 	  if (REGNO (SET_DEST (x)) >= FIRST_PSEUDO_REGISTER)
9078 	    abort ();
9079 	  return;
9080 	}
9081       break;
9082 
9083     case PLUS:
9084       /* We are interested in (plus (reg) (const_int)) .  */
9085       if (GET_CODE (XEXP (x, 0)) != REG
9086 	  || GET_CODE (XEXP (x, 1)) != CONST_INT)
9087 	break;
9088       offset = XEXP (x, 1);
9089       x = XEXP (x, 0);
9090       /* Fall through.  */
9091     case REG:
9092       {
9093 	int regno = REGNO (x);
9094 	int use_index;
9095 	int nregs;
9096 
9097 	/* No spurious USEs of pseudo registers may remain.  */
9098 	if (regno >= FIRST_PSEUDO_REGISTER)
9099 	  abort ();
9100 
9101 	nregs = HARD_REGNO_NREGS (regno, GET_MODE (x));
9102 
9103 	/* We can't substitute into multi-hard-reg uses.  */
9104 	if (nregs > 1)
9105 	  {
9106 	    while (--nregs >= 0)
9107 	      reg_state[regno + nregs].use_index = -1;
9108 	    return;
9109 	  }
9110 
9111 	/* If this register is already used in some unknown fashion, we
9112 	   can't do anything.
9113 	   If we decrement the index from zero to -1, we can't store more
9114 	   uses, so this register becomes used in an unknown fashion.  */
9115 	use_index = --reg_state[regno].use_index;
9116 	if (use_index < 0)
9117 	  return;
9118 
9119 	if (use_index != RELOAD_COMBINE_MAX_USES - 1)
9120 	  {
9121 	    /* We have found another use for a register that is already
9122 	       used later.  Check if the offsets match; if not, mark the
9123 	       register as used in an unknown fashion.  */
9124 	    if (! rtx_equal_p (offset, reg_state[regno].offset))
9125 	      {
9126 		reg_state[regno].use_index = -1;
9127 		return;
9128 	      }
9129 	  }
9130 	else
9131 	  {
9132 	    /* This is the first use of this register we have seen since we
9133 	       marked it as dead.  */
9134 	    reg_state[regno].offset = offset;
9135 	    reg_state[regno].use_ruid = reload_combine_ruid;
9136 	  }
9137 	reg_state[regno].reg_use[use_index].insn = insn;
9138 	reg_state[regno].reg_use[use_index].usep = xp;
9139 	return;
9140       }
9141 
9142     default:
9143       break;
9144     }
9145 
9146   /* Recursively process the components of X.  */
9147   fmt = GET_RTX_FORMAT (code);
9148   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9149     {
9150       if (fmt[i] == 'e')
9151 	reload_combine_note_use (&XEXP (x, i), insn);
9152       else if (fmt[i] == 'E')
9153 	{
9154 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9155 	    reload_combine_note_use (&XVECEXP (x, i, j), insn);
9156 	}
9157     }
9158 }
9159 
9160 /* See if we can reduce the cost of a constant by replacing a move
9161    with an add.  We track situations in which a register is set to a
9162    constant or to a register plus a constant.  */
9163 /* We cannot do our optimization across labels.  Invalidating all the
9164    information about register contents we have would be costly, so we
9165    use move2add_last_label_luid to note where the label is and then
9166    later disable any optimization that would cross it.
9167    reg_offset[n] / reg_base_reg[n] / reg_mode[n] are only valid if
9168    reg_set_luid[n] is greater than last_label_luid[n] .  */
9169 static int reg_set_luid[FIRST_PSEUDO_REGISTER];
9170 
9171 /* If reg_base_reg[n] is negative, register n has been set to
9172    reg_offset[n] in mode reg_mode[n] .
9173    If reg_base_reg[n] is non-negative, register n has been set to the
9174    sum of reg_offset[n] and the value of register reg_base_reg[n]
9175    before reg_set_luid[n], calculated in mode reg_mode[n] .  */
9176 static HOST_WIDE_INT reg_offset[FIRST_PSEUDO_REGISTER];
9177 static int reg_base_reg[FIRST_PSEUDO_REGISTER];
9178 static enum machine_mode reg_mode[FIRST_PSEUDO_REGISTER];
9179 
9180 /* move2add_luid is linearily increased while scanning the instructions
9181    from first to last.  It is used to set reg_set_luid in
9182    reload_cse_move2add and move2add_note_store.  */
9183 static int move2add_luid;
9184 
9185 /* move2add_last_label_luid is set whenever a label is found.  Labels
9186    invalidate all previously collected reg_offset data.  */
9187 static int move2add_last_label_luid;
9188 
9189 /* Generate a CONST_INT and force it in the range of MODE.  */
9190 
9191 static HOST_WIDE_INT
sext_for_mode(mode,value)9192 sext_for_mode (mode, value)
9193      enum machine_mode mode;
9194      HOST_WIDE_INT value;
9195 {
9196   HOST_WIDE_INT cval = value & GET_MODE_MASK (mode);
9197   int width = GET_MODE_BITSIZE (mode);
9198 
9199   /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative number,
9200      sign extend it.  */
9201   if (width > 0 && width < HOST_BITS_PER_WIDE_INT
9202       && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
9203     cval |= (HOST_WIDE_INT) -1 << width;
9204 
9205   return cval;
9206 }
9207 
9208 /* ??? We don't know how zero / sign extension is handled, hence we
9209    can't go from a narrower to a wider mode.  */
9210 #define MODES_OK_FOR_MOVE2ADD(OUTMODE, INMODE) \
9211   (GET_MODE_SIZE (OUTMODE) == GET_MODE_SIZE (INMODE) \
9212    || (GET_MODE_SIZE (OUTMODE) <= GET_MODE_SIZE (INMODE) \
9213        && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (OUTMODE), \
9214 				 GET_MODE_BITSIZE (INMODE))))
9215 
9216 static void
reload_cse_move2add(first)9217 reload_cse_move2add (first)
9218      rtx first;
9219 {
9220   int i;
9221   rtx insn;
9222 
9223   for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
9224     reg_set_luid[i] = 0;
9225 
9226   move2add_last_label_luid = 0;
9227   move2add_luid = 2;
9228   for (insn = first; insn; insn = NEXT_INSN (insn), move2add_luid++)
9229     {
9230       rtx pat, note;
9231 
9232       if (GET_CODE (insn) == CODE_LABEL)
9233 	{
9234 	  move2add_last_label_luid = move2add_luid;
9235 	  /* We're going to increment move2add_luid twice after a
9236 	     label, so that we can use move2add_last_label_luid + 1 as
9237 	     the luid for constants.  */
9238 	  move2add_luid++;
9239 	  continue;
9240 	}
9241       if (! INSN_P (insn))
9242 	continue;
9243       pat = PATTERN (insn);
9244       /* For simplicity, we only perform this optimization on
9245 	 straightforward SETs.  */
9246       if (GET_CODE (pat) == SET
9247 	  && GET_CODE (SET_DEST (pat)) == REG)
9248 	{
9249 	  rtx reg = SET_DEST (pat);
9250 	  int regno = REGNO (reg);
9251 	  rtx src = SET_SRC (pat);
9252 
9253 	  /* Check if we have valid information on the contents of this
9254 	     register in the mode of REG.  */
9255 	  if (reg_set_luid[regno] > move2add_last_label_luid
9256 	      && MODES_OK_FOR_MOVE2ADD (GET_MODE (reg), reg_mode[regno]))
9257 	    {
9258 	      /* Try to transform (set (REGX) (CONST_INT A))
9259 				  ...
9260 				  (set (REGX) (CONST_INT B))
9261 		 to
9262 				  (set (REGX) (CONST_INT A))
9263 				  ...
9264 				  (set (REGX) (plus (REGX) (CONST_INT B-A)))  */
9265 
9266 	      if (GET_CODE (src) == CONST_INT && reg_base_reg[regno] < 0)
9267 		{
9268 		  int success = 0;
9269 		  rtx new_src = GEN_INT (sext_for_mode (GET_MODE (reg),
9270 							INTVAL (src)
9271 							- reg_offset[regno]));
9272 		  /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
9273 		     use (set (reg) (reg)) instead.
9274 		     We don't delete this insn, nor do we convert it into a
9275 		     note, to avoid losing register notes or the return
9276 		     value flag.  jump2 already knowns how to get rid of
9277 		     no-op moves.  */
9278 		  if (new_src == const0_rtx)
9279 		    success = validate_change (insn, &SET_SRC (pat), reg, 0);
9280 		  else if (rtx_cost (new_src, PLUS) < rtx_cost (src, SET)
9281 			   && have_add2_insn (reg, new_src))
9282 		    success = validate_change (insn, &PATTERN (insn),
9283 					       gen_add2_insn (reg, new_src), 0);
9284 		  reg_set_luid[regno] = move2add_luid;
9285 		  reg_mode[regno] = GET_MODE (reg);
9286 		  reg_offset[regno] = INTVAL (src);
9287 		  continue;
9288 		}
9289 
9290 	      /* Try to transform (set (REGX) (REGY))
9291 				  (set (REGX) (PLUS (REGX) (CONST_INT A)))
9292 				  ...
9293 				  (set (REGX) (REGY))
9294 				  (set (REGX) (PLUS (REGX) (CONST_INT B)))
9295 		 to
9296 				  (REGX) (REGY))
9297 				  (set (REGX) (PLUS (REGX) (CONST_INT A)))
9298 				  ...
9299 				  (set (REGX) (plus (REGX) (CONST_INT B-A)))  */
9300 	      else if (GET_CODE (src) == REG
9301 		       && reg_set_luid[regno] == reg_set_luid[REGNO (src)]
9302 		       && reg_base_reg[regno] == reg_base_reg[REGNO (src)]
9303 		       && MODES_OK_FOR_MOVE2ADD (GET_MODE (reg),
9304 						 reg_mode[REGNO (src)]))
9305 		{
9306 		  rtx next = next_nonnote_insn (insn);
9307 		  rtx set = NULL_RTX;
9308 		  if (next)
9309 		    set = single_set (next);
9310 		  if (set
9311 		      && SET_DEST (set) == reg
9312 		      && GET_CODE (SET_SRC (set)) == PLUS
9313 		      && XEXP (SET_SRC (set), 0) == reg
9314 		      && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
9315 		    {
9316 		      rtx src3 = XEXP (SET_SRC (set), 1);
9317 		      HOST_WIDE_INT added_offset = INTVAL (src3);
9318 		      HOST_WIDE_INT base_offset = reg_offset[REGNO (src)];
9319 		      HOST_WIDE_INT regno_offset = reg_offset[regno];
9320 		      rtx new_src = GEN_INT (sext_for_mode (GET_MODE (reg),
9321 							    added_offset
9322 							    + base_offset
9323 							    - regno_offset));
9324 		      int success = 0;
9325 
9326 		      if (new_src == const0_rtx)
9327 			/* See above why we create (set (reg) (reg)) here.  */
9328 			success
9329 			  = validate_change (next, &SET_SRC (set), reg, 0);
9330 		      else if ((rtx_cost (new_src, PLUS)
9331 				< COSTS_N_INSNS (1) + rtx_cost (src3, SET))
9332 			       && have_add2_insn (reg, new_src))
9333 			success
9334 			  = validate_change (next, &PATTERN (next),
9335 					     gen_add2_insn (reg, new_src), 0);
9336 		      if (success)
9337 			delete_insn (insn);
9338 		      insn = next;
9339 		      reg_mode[regno] = GET_MODE (reg);
9340 		      reg_offset[regno] = sext_for_mode (GET_MODE (reg),
9341 							 added_offset
9342 							 + base_offset);
9343 		      continue;
9344 		    }
9345 		}
9346 	    }
9347 	}
9348 
9349       for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
9350 	{
9351 	  if (REG_NOTE_KIND (note) == REG_INC
9352 	      && GET_CODE (XEXP (note, 0)) == REG)
9353 	    {
9354 	      /* Reset the information about this register.  */
9355 	      int regno = REGNO (XEXP (note, 0));
9356 	      if (regno < FIRST_PSEUDO_REGISTER)
9357 		reg_set_luid[regno] = 0;
9358 	    }
9359 	}
9360       note_stores (PATTERN (insn), move2add_note_store, NULL);
9361       /* If this is a CALL_INSN, all call used registers are stored with
9362 	 unknown values.  */
9363       if (GET_CODE (insn) == CALL_INSN)
9364 	{
9365 	  for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
9366 	    {
9367 	      if (call_used_regs[i])
9368 		/* Reset the information about this register.  */
9369 		reg_set_luid[i] = 0;
9370 	    }
9371 	}
9372     }
9373 }
9374 
9375 /* SET is a SET or CLOBBER that sets DST.
9376    Update reg_set_luid, reg_offset and reg_base_reg accordingly.
9377    Called from reload_cse_move2add via note_stores.  */
9378 
9379 static void
move2add_note_store(dst,set,data)9380 move2add_note_store (dst, set, data)
9381      rtx dst, set;
9382      void *data ATTRIBUTE_UNUSED;
9383 {
9384   unsigned int regno = 0;
9385   unsigned int i;
9386   enum machine_mode mode = GET_MODE (dst);
9387 
9388   if (GET_CODE (dst) == SUBREG)
9389     {
9390       regno = subreg_regno_offset (REGNO (SUBREG_REG (dst)),
9391 				   GET_MODE (SUBREG_REG (dst)),
9392 				   SUBREG_BYTE (dst),
9393 				   GET_MODE (dst));
9394       dst = SUBREG_REG (dst);
9395     }
9396 
9397   /* Some targets do argument pushes without adding REG_INC notes.  */
9398 
9399   if (GET_CODE (dst) == MEM)
9400     {
9401       dst = XEXP (dst, 0);
9402       if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC
9403 	  || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC)
9404 	reg_set_luid[REGNO (XEXP (dst, 0))] = 0;
9405       return;
9406     }
9407   if (GET_CODE (dst) != REG)
9408     return;
9409 
9410   regno += REGNO (dst);
9411 
9412   if (HARD_REGNO_NREGS (regno, mode) == 1 && GET_CODE (set) == SET
9413       && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
9414       && GET_CODE (SET_DEST (set)) != SIGN_EXTRACT
9415       && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
9416     {
9417       rtx src = SET_SRC (set);
9418       rtx base_reg;
9419       HOST_WIDE_INT offset;
9420       int base_regno;
9421       /* This may be different from mode, if SET_DEST (set) is a
9422 	 SUBREG.  */
9423       enum machine_mode dst_mode = GET_MODE (dst);
9424 
9425       switch (GET_CODE (src))
9426 	{
9427 	case PLUS:
9428 	  if (GET_CODE (XEXP (src, 0)) == REG)
9429 	    {
9430 	      base_reg = XEXP (src, 0);
9431 
9432 	      if (GET_CODE (XEXP (src, 1)) == CONST_INT)
9433 		offset = INTVAL (XEXP (src, 1));
9434 	      else if (GET_CODE (XEXP (src, 1)) == REG
9435 		       && (reg_set_luid[REGNO (XEXP (src, 1))]
9436 			   > move2add_last_label_luid)
9437 		       && (MODES_OK_FOR_MOVE2ADD
9438 			   (dst_mode, reg_mode[REGNO (XEXP (src, 1))])))
9439 		{
9440 		  if (reg_base_reg[REGNO (XEXP (src, 1))] < 0)
9441 		    offset = reg_offset[REGNO (XEXP (src, 1))];
9442 		  /* Maybe the first register is known to be a
9443 		     constant.  */
9444 		  else if (reg_set_luid[REGNO (base_reg)]
9445 			   > move2add_last_label_luid
9446 			   && (MODES_OK_FOR_MOVE2ADD
9447 			       (dst_mode, reg_mode[REGNO (XEXP (src, 1))]))
9448 			   && reg_base_reg[REGNO (base_reg)] < 0)
9449 		    {
9450 		      offset = reg_offset[REGNO (base_reg)];
9451 		      base_reg = XEXP (src, 1);
9452 		    }
9453 		  else
9454 		    goto invalidate;
9455 		}
9456 	      else
9457 		goto invalidate;
9458 
9459 	      break;
9460 	    }
9461 
9462 	  goto invalidate;
9463 
9464 	case REG:
9465 	  base_reg = src;
9466 	  offset = 0;
9467 	  break;
9468 
9469 	case CONST_INT:
9470 	  /* Start tracking the register as a constant.  */
9471 	  reg_base_reg[regno] = -1;
9472 	  reg_offset[regno] = INTVAL (SET_SRC (set));
9473 	  /* We assign the same luid to all registers set to constants.  */
9474 	  reg_set_luid[regno] = move2add_last_label_luid + 1;
9475 	  reg_mode[regno] = mode;
9476 	  return;
9477 
9478 	default:
9479 	invalidate:
9480 	  /* Invalidate the contents of the register.  */
9481 	  reg_set_luid[regno] = 0;
9482 	  return;
9483 	}
9484 
9485       base_regno = REGNO (base_reg);
9486       /* If information about the base register is not valid, set it
9487 	 up as a new base register, pretending its value is known
9488 	 starting from the current insn.  */
9489       if (reg_set_luid[base_regno] <= move2add_last_label_luid)
9490 	{
9491 	  reg_base_reg[base_regno] = base_regno;
9492 	  reg_offset[base_regno] = 0;
9493 	  reg_set_luid[base_regno] = move2add_luid;
9494 	  reg_mode[base_regno] = mode;
9495 	}
9496       else if (! MODES_OK_FOR_MOVE2ADD (dst_mode,
9497 					reg_mode[base_regno]))
9498 	goto invalidate;
9499 
9500       reg_mode[regno] = mode;
9501 
9502       /* Copy base information from our base register.  */
9503       reg_set_luid[regno] = reg_set_luid[base_regno];
9504       reg_base_reg[regno] = reg_base_reg[base_regno];
9505 
9506       /* Compute the sum of the offsets or constants.  */
9507       reg_offset[regno] = sext_for_mode (dst_mode,
9508 					 offset
9509 					 + reg_offset[base_regno]);
9510     }
9511   else
9512     {
9513       unsigned int endregno = regno + HARD_REGNO_NREGS (regno, mode);
9514 
9515       for (i = regno; i < endregno; i++)
9516 	/* Reset the information about this register.  */
9517 	reg_set_luid[i] = 0;
9518     }
9519 }
9520 
9521 #ifdef AUTO_INC_DEC
9522 static void
add_auto_inc_notes(insn,x)9523 add_auto_inc_notes (insn, x)
9524      rtx insn;
9525      rtx x;
9526 {
9527   enum rtx_code code = GET_CODE (x);
9528   const char *fmt;
9529   int i, j;
9530 
9531   if (code == MEM && auto_inc_p (XEXP (x, 0)))
9532     {
9533       REG_NOTES (insn)
9534 	= gen_rtx_EXPR_LIST (REG_INC, XEXP (XEXP (x, 0), 0), REG_NOTES (insn));
9535       return;
9536     }
9537 
9538   /* Scan all the operand sub-expressions.  */
9539   fmt = GET_RTX_FORMAT (code);
9540   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9541     {
9542       if (fmt[i] == 'e')
9543 	add_auto_inc_notes (insn, XEXP (x, i));
9544       else if (fmt[i] == 'E')
9545 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9546 	  add_auto_inc_notes (insn, XVECEXP (x, i, j));
9547     }
9548 }
9549 #endif
9550 
9551 /* Copy EH notes from an insn to its reloads.  */
9552 static void
copy_eh_notes(insn,x)9553 copy_eh_notes (insn, x)
9554      rtx insn;
9555      rtx x;
9556 {
9557   rtx eh_note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
9558   if (eh_note)
9559     {
9560       for (; x != 0; x = NEXT_INSN (x))
9561 	{
9562 	  if (may_trap_p (PATTERN (x)))
9563 	    REG_NOTES (x)
9564 	      = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (eh_note, 0),
9565 				   REG_NOTES (x));
9566 	}
9567     }
9568 }
9569 
9570 /* This is used by reload pass, that does emit some instructions after
9571    abnormal calls moving basic block end, but in fact it wants to emit
9572    them on the edge.  Looks for abnormal call edges, find backward the
9573    proper call and fix the damage.
9574 
9575    Similar handle instructions throwing exceptions internally.  */
9576 void
fixup_abnormal_edges()9577 fixup_abnormal_edges ()
9578 {
9579   bool inserted = false;
9580   basic_block bb;
9581 
9582   FOR_EACH_BB (bb)
9583     {
9584       edge e;
9585 
9586       /* Look for cases we are interested in - calls or instructions causing
9587          exceptions.  */
9588       for (e = bb->succ; e; e = e->succ_next)
9589 	{
9590 	  if (e->flags & EDGE_ABNORMAL_CALL)
9591 	    break;
9592 	  if ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
9593 	      == (EDGE_ABNORMAL | EDGE_EH))
9594 	    break;
9595 	}
9596       if (e && GET_CODE (bb->end) != CALL_INSN && !can_throw_internal (bb->end))
9597 	{
9598 	  rtx insn = bb->end, stop = NEXT_INSN (bb->end);
9599 	  rtx next;
9600 	  for (e = bb->succ; e; e = e->succ_next)
9601 	    if (e->flags & EDGE_FALLTHRU)
9602 	      break;
9603 	  /* Get past the new insns generated. Allow notes, as the insns may
9604 	     be already deleted.  */
9605 	  while ((GET_CODE (insn) == INSN || GET_CODE (insn) == NOTE)
9606 		 && !can_throw_internal (insn)
9607 		 && insn != bb->head)
9608 	    insn = PREV_INSN (insn);
9609 	  if (GET_CODE (insn) != CALL_INSN && !can_throw_internal (insn))
9610 	    abort ();
9611 	  bb->end = insn;
9612 	  inserted = true;
9613 	  insn = NEXT_INSN (insn);
9614 	  while (insn && insn != stop)
9615 	    {
9616 	      next = NEXT_INSN (insn);
9617 	      if (INSN_P (insn))
9618 		{
9619 	          delete_insn (insn);
9620 
9621 		  /* Sometimes there's still the return value USE.
9622 		     If it's placed after a trapping call (i.e. that
9623 		     call is the last insn anyway), we have no fallthru
9624 		     edge.  Simply delete this use and don't try to insert
9625 		     on the non-existant edge.  */
9626 		  if (GET_CODE (PATTERN (insn)) != USE)
9627 		    {
9628 		      /* We're not deleting it, we're moving it.  */
9629 		      INSN_DELETED_P (insn) = 0;
9630 		      PREV_INSN (insn) = NULL_RTX;
9631 		      NEXT_INSN (insn) = NULL_RTX;
9632 
9633 		      insert_insn_on_edge (insn, e);
9634 		    }
9635 		}
9636 	      insn = next;
9637 	    }
9638 	}
9639     }
9640   if (inserted)
9641     commit_edge_insertions ();
9642 }
9643