xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/lra-constraints.c (revision bdc22b2e01993381dcefeff2bc9b56ca75a4235c)
1 /* Code for RTL transformations to satisfy insn constraints.
2    Copyright (C) 2010-2015 Free Software Foundation, Inc.
3    Contributed by Vladimir Makarov <vmakarov@redhat.com>.
4 
5    This file is part of GCC.
6 
7    GCC is free software; you can redistribute it and/or modify it under
8    the terms of the GNU General Public License as published by the Free
9    Software Foundation; either version 3, or (at your option) any later
10    version.
11 
12    GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13    WARRANTY; without even the implied warranty of MERCHANTABILITY or
14    FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15    for more details.
16 
17    You should have received a copy of the GNU General Public License
18    along with GCC; see the file COPYING3.  If not see
19    <http://www.gnu.org/licenses/>.  */
20 
21 
22 /* This file contains code for 3 passes: constraint pass,
23    inheritance/split pass, and pass for undoing failed inheritance and
24    split.
25 
26    The major goal of constraint pass is to transform RTL to satisfy
27    insn and address constraints by:
28      o choosing insn alternatives;
29      o generating *reload insns* (or reloads in brief) and *reload
30        pseudos* which will get necessary hard registers later;
31      o substituting pseudos with equivalent values and removing the
32        instructions that initialized those pseudos.
33 
34    The constraint pass has biggest and most complicated code in LRA.
35    There are a lot of important details like:
36      o reuse of input reload pseudos to simplify reload pseudo
37        allocations;
38      o some heuristics to choose insn alternative to improve the
39        inheritance;
40      o early clobbers etc.
41 
42    The pass is mimicking former reload pass in alternative choosing
43    because the reload pass is oriented to current machine description
44    model.  It might be changed if the machine description model is
45    changed.
46 
47    There is special code for preventing all LRA and this pass cycling
48    in case of bugs.
49 
50    On the first iteration of the pass we process every instruction and
51    choose an alternative for each one.  On subsequent iterations we try
52    to avoid reprocessing instructions if we can be sure that the old
53    choice is still valid.
54 
55    The inheritance/spilt pass is to transform code to achieve
56    ineheritance and live range splitting.  It is done on backward
57    traversal of EBBs.
58 
59    The inheritance optimization goal is to reuse values in hard
60    registers. There is analogous optimization in old reload pass.  The
61    inheritance is achieved by following transformation:
62 
63        reload_p1 <- p	     reload_p1 <- p
64        ...		     new_p <- reload_p1
65        ...		=>   ...
66        reload_p2 <- p	     reload_p2 <- new_p
67 
68    where p is spilled and not changed between the insns.  Reload_p1 is
69    also called *original pseudo* and new_p is called *inheritance
70    pseudo*.
71 
72    The subsequent assignment pass will try to assign the same (or
73    another if it is not possible) hard register to new_p as to
74    reload_p1 or reload_p2.
75 
76    If the assignment pass fails to assign a hard register to new_p,
77    this file will undo the inheritance and restore the original code.
78    This is because implementing the above sequence with a spilled
79    new_p would make the code much worse.  The inheritance is done in
80    EBB scope.  The above is just a simplified example to get an idea
81    of the inheritance as the inheritance is also done for non-reload
82    insns.
83 
84    Splitting (transformation) is also done in EBB scope on the same
85    pass as the inheritance:
86 
87        r <- ... or ... <- r		 r <- ... or ... <- r
88        ...				 s <- r (new insn -- save)
89        ...			  =>
90        ...				 r <- s (new insn -- restore)
91        ... <- r				 ... <- r
92 
93     The *split pseudo* s is assigned to the hard register of the
94     original pseudo or hard register r.
95 
96     Splitting is done:
97       o In EBBs with high register pressure for global pseudos (living
98 	in at least 2 BBs) and assigned to hard registers when there
99 	are more one reloads needing the hard registers;
100       o for pseudos needing save/restore code around calls.
101 
102     If the split pseudo still has the same hard register as the
103     original pseudo after the subsequent assignment pass or the
104     original pseudo was split, the opposite transformation is done on
105     the same pass for undoing inheritance.  */
106 
107 #undef REG_OK_STRICT
108 
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "hard-reg-set.h"
114 #include "rtl.h"
115 #include "tm_p.h"
116 #include "regs.h"
117 #include "insn-config.h"
118 #include "insn-codes.h"
119 #include "recog.h"
120 #include "output.h"
121 #include "addresses.h"
122 #include "target.h"
123 #include "hashtab.h"
124 #include "hash-set.h"
125 #include "vec.h"
126 #include "machmode.h"
127 #include "input.h"
128 #include "function.h"
129 #include "symtab.h"
130 #include "flags.h"
131 #include "statistics.h"
132 #include "double-int.h"
133 #include "real.h"
134 #include "fixed-value.h"
135 #include "alias.h"
136 #include "wide-int.h"
137 #include "inchash.h"
138 #include "tree.h"
139 #include "expmed.h"
140 #include "dojump.h"
141 #include "explow.h"
142 #include "calls.h"
143 #include "emit-rtl.h"
144 #include "varasm.h"
145 #include "stmt.h"
146 #include "expr.h"
147 #include "predict.h"
148 #include "dominance.h"
149 #include "cfg.h"
150 #include "cfgrtl.h"
151 #include "basic-block.h"
152 #include "except.h"
153 #include "optabs.h"
154 #include "df.h"
155 #include "ira.h"
156 #include "rtl-error.h"
157 #include "params.h"
158 #include "lra-int.h"
159 
160 /* Value of LRA_CURR_RELOAD_NUM at the beginning of BB of the current
161    insn.  Remember that LRA_CURR_RELOAD_NUM is the number of emitted
162    reload insns.  */
163 static int bb_reload_num;
164 
165 /* The current insn being processed and corresponding its single set
166    (NULL otherwise), its data (basic block, the insn data, the insn
167    static data, and the mode of each operand).  */
168 static rtx_insn *curr_insn;
169 static rtx curr_insn_set;
170 static basic_block curr_bb;
171 static lra_insn_recog_data_t curr_id;
172 static struct lra_static_insn_data *curr_static_id;
173 static machine_mode curr_operand_mode[MAX_RECOG_OPERANDS];
174 /* Mode of the register substituted by its equivalence with VOIDmode
175    (e.g. constant) and whose subreg is given operand of the current
176    insn.  VOIDmode in all other cases.  */
177 static machine_mode original_subreg_reg_mode[MAX_RECOG_OPERANDS];
178 
179 
180 
181 /* Start numbers for new registers and insns at the current constraints
182    pass start.	*/
183 static int new_regno_start;
184 static int new_insn_uid_start;
185 
186 /* If LOC is nonnull, strip any outer subreg from it.  */
187 static inline rtx *
188 strip_subreg (rtx *loc)
189 {
190   return loc && GET_CODE (*loc) == SUBREG ? &SUBREG_REG (*loc) : loc;
191 }
192 
193 /* Return hard regno of REGNO or if it is was not assigned to a hard
194    register, use a hard register from its allocno class.  */
195 static int
196 get_try_hard_regno (int regno)
197 {
198   int hard_regno;
199   enum reg_class rclass;
200 
201   if ((hard_regno = regno) >= FIRST_PSEUDO_REGISTER)
202     hard_regno = lra_get_regno_hard_regno (regno);
203   if (hard_regno >= 0)
204     return hard_regno;
205   rclass = lra_get_allocno_class (regno);
206   if (rclass == NO_REGS)
207     return -1;
208   return ira_class_hard_regs[rclass][0];
209 }
210 
211 /* Return final hard regno (plus offset) which will be after
212    elimination.	 We do this for matching constraints because the final
213    hard regno could have a different class.  */
214 static int
215 get_final_hard_regno (int hard_regno, int offset)
216 {
217   if (hard_regno < 0)
218     return hard_regno;
219   hard_regno = lra_get_elimination_hard_regno (hard_regno);
220   return hard_regno + offset;
221 }
222 
223 /* Return hard regno of X after removing subreg and making
224    elimination.  If X is not a register or subreg of register, return
225    -1.  For pseudo use its assignment.  */
226 static int
227 get_hard_regno (rtx x)
228 {
229   rtx reg;
230   int offset, hard_regno;
231 
232   reg = x;
233   if (GET_CODE (x) == SUBREG)
234     reg = SUBREG_REG (x);
235   if (! REG_P (reg))
236     return -1;
237   if ((hard_regno = REGNO (reg)) >= FIRST_PSEUDO_REGISTER)
238     hard_regno = lra_get_regno_hard_regno (hard_regno);
239   if (hard_regno < 0)
240     return -1;
241   offset = 0;
242   if (GET_CODE (x) == SUBREG)
243     offset += subreg_regno_offset (hard_regno, GET_MODE (reg),
244 				   SUBREG_BYTE (x),  GET_MODE (x));
245   return get_final_hard_regno (hard_regno, offset);
246 }
247 
248 /* If REGNO is a hard register or has been allocated a hard register,
249    return the class of that register.  If REGNO is a reload pseudo
250    created by the current constraints pass, return its allocno class.
251    Return NO_REGS otherwise.  */
252 static enum reg_class
253 get_reg_class (int regno)
254 {
255   int hard_regno;
256 
257   if ((hard_regno = regno) >= FIRST_PSEUDO_REGISTER)
258     hard_regno = lra_get_regno_hard_regno (regno);
259   if (hard_regno >= 0)
260     {
261       hard_regno = get_final_hard_regno (hard_regno, 0);
262       return REGNO_REG_CLASS (hard_regno);
263     }
264   if (regno >= new_regno_start)
265     return lra_get_allocno_class (regno);
266   return NO_REGS;
267 }
268 
269 /* Return true if REG satisfies (or will satisfy) reg class constraint
270    CL.  Use elimination first if REG is a hard register.  If REG is a
271    reload pseudo created by this constraints pass, assume that it will
272    be allocated a hard register from its allocno class, but allow that
273    class to be narrowed to CL if it is currently a superset of CL.
274 
275    If NEW_CLASS is nonnull, set *NEW_CLASS to the new allocno class of
276    REGNO (reg), or NO_REGS if no change in its class was needed.  */
277 static bool
278 in_class_p (rtx reg, enum reg_class cl, enum reg_class *new_class)
279 {
280   enum reg_class rclass, common_class;
281   machine_mode reg_mode;
282   int class_size, hard_regno, nregs, i, j;
283   int regno = REGNO (reg);
284 
285   if (new_class != NULL)
286     *new_class = NO_REGS;
287   if (regno < FIRST_PSEUDO_REGISTER)
288     {
289       rtx final_reg = reg;
290       rtx *final_loc = &final_reg;
291 
292       lra_eliminate_reg_if_possible (final_loc);
293       return TEST_HARD_REG_BIT (reg_class_contents[cl], REGNO (*final_loc));
294     }
295   reg_mode = GET_MODE (reg);
296   rclass = get_reg_class (regno);
297   if (regno < new_regno_start
298       /* Do not allow the constraints for reload instructions to
299 	 influence the classes of new pseudos.  These reloads are
300 	 typically moves that have many alternatives, and restricting
301 	 reload pseudos for one alternative may lead to situations
302 	 where other reload pseudos are no longer allocatable.  */
303       || (INSN_UID (curr_insn) >= new_insn_uid_start
304 	  && curr_insn_set != NULL
305 	  && ((OBJECT_P (SET_SRC (curr_insn_set))
306 	       && ! CONSTANT_P (SET_SRC (curr_insn_set)))
307 	      || (GET_CODE (SET_SRC (curr_insn_set)) == SUBREG
308 		  && OBJECT_P (SUBREG_REG (SET_SRC (curr_insn_set)))
309 		  && ! CONSTANT_P (SUBREG_REG (SET_SRC (curr_insn_set)))))))
310     /* When we don't know what class will be used finally for reload
311        pseudos, we use ALL_REGS.  */
312     return ((regno >= new_regno_start && rclass == ALL_REGS)
313 	    || (rclass != NO_REGS && ira_class_subset_p[rclass][cl]
314 		&& ! hard_reg_set_subset_p (reg_class_contents[cl],
315 					    lra_no_alloc_regs)));
316   else
317     {
318       common_class = ira_reg_class_subset[rclass][cl];
319       if (new_class != NULL)
320 	*new_class = common_class;
321       if (hard_reg_set_subset_p (reg_class_contents[common_class],
322 				 lra_no_alloc_regs))
323 	return false;
324       /* Check that there are enough allocatable regs.  */
325       class_size = ira_class_hard_regs_num[common_class];
326       for (i = 0; i < class_size; i++)
327 	{
328 	  hard_regno = ira_class_hard_regs[common_class][i];
329 	  nregs = hard_regno_nregs[hard_regno][reg_mode];
330 	  if (nregs == 1)
331 	    return true;
332 	  for (j = 0; j < nregs; j++)
333 	    if (TEST_HARD_REG_BIT (lra_no_alloc_regs, hard_regno + j)
334 		|| ! TEST_HARD_REG_BIT (reg_class_contents[common_class],
335 					hard_regno + j))
336 	      break;
337 	  if (j >= nregs)
338 	    return true;
339 	}
340       return false;
341     }
342 }
343 
344 /* Return true if REGNO satisfies a memory constraint.	*/
345 static bool
346 in_mem_p (int regno)
347 {
348   return get_reg_class (regno) == NO_REGS;
349 }
350 
351 /* Return 1 if ADDR is a valid memory address for mode MODE in address
352    space AS, and check that each pseudo has the proper kind of hard
353    reg.	 */
354 static int
355 valid_address_p (machine_mode mode ATTRIBUTE_UNUSED,
356 		 rtx addr, addr_space_t as)
357 {
358 #ifdef GO_IF_LEGITIMATE_ADDRESS
359   lra_assert (ADDR_SPACE_GENERIC_P (as));
360   GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
361   return 0;
362 
363  win:
364   return 1;
365 #else
366   return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
367 #endif
368 }
369 
370 namespace {
371   /* Temporarily eliminates registers in an address (for the lifetime of
372      the object).  */
373   class address_eliminator {
374   public:
375     address_eliminator (struct address_info *ad);
376     ~address_eliminator ();
377 
378   private:
379     struct address_info *m_ad;
380     rtx *m_base_loc;
381     rtx m_base_reg;
382     rtx *m_index_loc;
383     rtx m_index_reg;
384   };
385 }
386 
387 address_eliminator::address_eliminator (struct address_info *ad)
388   : m_ad (ad),
389     m_base_loc (strip_subreg (ad->base_term)),
390     m_base_reg (NULL_RTX),
391     m_index_loc (strip_subreg (ad->index_term)),
392     m_index_reg (NULL_RTX)
393 {
394   if (m_base_loc != NULL)
395     {
396       m_base_reg = *m_base_loc;
397       lra_eliminate_reg_if_possible (m_base_loc);
398       if (m_ad->base_term2 != NULL)
399 	*m_ad->base_term2 = *m_ad->base_term;
400     }
401   if (m_index_loc != NULL)
402     {
403       m_index_reg = *m_index_loc;
404       lra_eliminate_reg_if_possible (m_index_loc);
405     }
406 }
407 
408 address_eliminator::~address_eliminator ()
409 {
410   if (m_base_loc && *m_base_loc != m_base_reg)
411     {
412       *m_base_loc = m_base_reg;
413       if (m_ad->base_term2 != NULL)
414 	*m_ad->base_term2 = *m_ad->base_term;
415     }
416   if (m_index_loc && *m_index_loc != m_index_reg)
417     *m_index_loc = m_index_reg;
418 }
419 
420 /* Return true if the eliminated form of AD is a legitimate target address.  */
421 static bool
422 valid_address_p (struct address_info *ad)
423 {
424   address_eliminator eliminator (ad);
425   return valid_address_p (ad->mode, *ad->outer, ad->as);
426 }
427 
428 /* Return true if the eliminated form of memory reference OP satisfies
429    extra memory constraint CONSTRAINT.  */
430 static bool
431 satisfies_memory_constraint_p (rtx op, enum constraint_num constraint)
432 {
433   struct address_info ad;
434 
435   decompose_mem_address (&ad, op);
436   address_eliminator eliminator (&ad);
437   return constraint_satisfied_p (op, constraint);
438 }
439 
440 /* Return true if the eliminated form of address AD satisfies extra
441    address constraint CONSTRAINT.  */
442 static bool
443 satisfies_address_constraint_p (struct address_info *ad,
444 				enum constraint_num constraint)
445 {
446   address_eliminator eliminator (ad);
447   return constraint_satisfied_p (*ad->outer, constraint);
448 }
449 
450 /* Return true if the eliminated form of address OP satisfies extra
451    address constraint CONSTRAINT.  */
452 static bool
453 satisfies_address_constraint_p (rtx op, enum constraint_num constraint)
454 {
455   struct address_info ad;
456 
457   decompose_lea_address (&ad, &op);
458   return satisfies_address_constraint_p (&ad, constraint);
459 }
460 
461 /* Initiate equivalences for LRA.  As we keep original equivalences
462    before any elimination, we need to make copies otherwise any change
463    in insns might change the equivalences.  */
464 void
465 lra_init_equiv (void)
466 {
467   ira_expand_reg_equiv ();
468   for (int i = FIRST_PSEUDO_REGISTER; i < max_reg_num (); i++)
469     {
470       rtx res;
471 
472       if ((res = ira_reg_equiv[i].memory) != NULL_RTX)
473 	ira_reg_equiv[i].memory = copy_rtx (res);
474       if ((res = ira_reg_equiv[i].invariant) != NULL_RTX)
475 	ira_reg_equiv[i].invariant = copy_rtx (res);
476     }
477 }
478 
479 static rtx loc_equivalence_callback (rtx, const_rtx, void *);
480 
481 /* Update equivalence for REGNO.  We need to this as the equivalence
482    might contain other pseudos which are changed by their
483    equivalences.  */
484 static void
485 update_equiv (int regno)
486 {
487   rtx x;
488 
489   if ((x = ira_reg_equiv[regno].memory) != NULL_RTX)
490     ira_reg_equiv[regno].memory
491       = simplify_replace_fn_rtx (x, NULL_RTX, loc_equivalence_callback,
492 				 NULL_RTX);
493   if ((x = ira_reg_equiv[regno].invariant) != NULL_RTX)
494     ira_reg_equiv[regno].invariant
495       = simplify_replace_fn_rtx (x, NULL_RTX, loc_equivalence_callback,
496 				 NULL_RTX);
497 }
498 
499 /* If we have decided to substitute X with another value, return that
500    value, otherwise return X.  */
501 static rtx
502 get_equiv (rtx x)
503 {
504   int regno;
505   rtx res;
506 
507   if (! REG_P (x) || (regno = REGNO (x)) < FIRST_PSEUDO_REGISTER
508       || ! ira_reg_equiv[regno].defined_p
509       || ! ira_reg_equiv[regno].profitable_p
510       || lra_get_regno_hard_regno (regno) >= 0)
511     return x;
512   if ((res = ira_reg_equiv[regno].memory) != NULL_RTX)
513     {
514       if (targetm.cannot_substitute_mem_equiv_p (res))
515 	return x;
516       return res;
517     }
518   if ((res = ira_reg_equiv[regno].constant) != NULL_RTX)
519     return res;
520   if ((res = ira_reg_equiv[regno].invariant) != NULL_RTX)
521     return res;
522   gcc_unreachable ();
523 }
524 
525 /* If we have decided to substitute X with the equivalent value,
526    return that value after elimination for INSN, otherwise return
527    X.  */
528 static rtx
529 get_equiv_with_elimination (rtx x, rtx_insn *insn)
530 {
531   rtx res = get_equiv (x);
532 
533   if (x == res || CONSTANT_P (res))
534     return res;
535   return lra_eliminate_regs_1 (insn, res, GET_MODE (res),
536 			       false, false, 0, true);
537 }
538 
539 /* Set up curr_operand_mode.  */
540 static void
541 init_curr_operand_mode (void)
542 {
543   int nop = curr_static_id->n_operands;
544   for (int i = 0; i < nop; i++)
545     {
546       machine_mode mode = GET_MODE (*curr_id->operand_loc[i]);
547       if (mode == VOIDmode)
548 	{
549 	  /* The .md mode for address operands is the mode of the
550 	     addressed value rather than the mode of the address itself.  */
551 	  if (curr_id->icode >= 0 && curr_static_id->operand[i].is_address)
552 	    mode = Pmode;
553 	  else
554 	    mode = curr_static_id->operand[i].mode;
555 	}
556       curr_operand_mode[i] = mode;
557     }
558 }
559 
560 
561 
562 /* The page contains code to reuse input reloads.  */
563 
564 /* Structure describes input reload of the current insns.  */
565 struct input_reload
566 {
567   /* Reloaded value.  */
568   rtx input;
569   /* Reload pseudo used.  */
570   rtx reg;
571 };
572 
573 /* The number of elements in the following array.  */
574 static int curr_insn_input_reloads_num;
575 /* Array containing info about input reloads.  It is used to find the
576    same input reload and reuse the reload pseudo in this case.	*/
577 static struct input_reload curr_insn_input_reloads[LRA_MAX_INSN_RELOADS];
578 
579 /* Initiate data concerning reuse of input reloads for the current
580    insn.  */
581 static void
582 init_curr_insn_input_reloads (void)
583 {
584   curr_insn_input_reloads_num = 0;
585 }
586 
587 /* Create a new pseudo using MODE, RCLASS, ORIGINAL or reuse already
588    created input reload pseudo (only if TYPE is not OP_OUT).  Don't
589    reuse pseudo if IN_SUBREG_P is true and the reused pseudo should be
590    wrapped up in SUBREG.  The result pseudo is returned through
591    RESULT_REG.  Return TRUE if we created a new pseudo, FALSE if we
592    reused the already created input reload pseudo.  Use TITLE to
593    describe new registers for debug purposes.  */
594 static bool
595 get_reload_reg (enum op_type type, machine_mode mode, rtx original,
596 		enum reg_class rclass, bool in_subreg_p,
597 		const char *title, rtx *result_reg)
598 {
599   int i, regno;
600   enum reg_class new_class;
601 
602   if (type == OP_OUT)
603     {
604       *result_reg
605 	= lra_create_new_reg_with_unique_value (mode, original, rclass, title);
606       return true;
607     }
608   /* Prevent reuse value of expression with side effects,
609      e.g. volatile memory.  */
610   if (! side_effects_p (original))
611     for (i = 0; i < curr_insn_input_reloads_num; i++)
612       if (rtx_equal_p (curr_insn_input_reloads[i].input, original)
613 	  && in_class_p (curr_insn_input_reloads[i].reg, rclass, &new_class))
614 	{
615 	  rtx reg = curr_insn_input_reloads[i].reg;
616 	  regno = REGNO (reg);
617 	  /* If input is equal to original and both are VOIDmode,
618 	     GET_MODE (reg) might be still different from mode.
619 	     Ensure we don't return *result_reg with wrong mode.  */
620 	  if (GET_MODE (reg) != mode)
621 	    {
622 	      if (in_subreg_p)
623 		continue;
624 	      if (GET_MODE_SIZE (GET_MODE (reg)) < GET_MODE_SIZE (mode))
625 		continue;
626 	      reg = lowpart_subreg (mode, reg, GET_MODE (reg));
627 	      if (reg == NULL_RTX || GET_CODE (reg) != SUBREG)
628 		continue;
629 	    }
630 	  *result_reg = reg;
631 	  if (lra_dump_file != NULL)
632 	    {
633 	      fprintf (lra_dump_file, "	 Reuse r%d for reload ", regno);
634 	      dump_value_slim (lra_dump_file, original, 1);
635 	    }
636 	  if (new_class != lra_get_allocno_class (regno))
637 	    lra_change_class (regno, new_class, ", change to", false);
638 	  if (lra_dump_file != NULL)
639 	    fprintf (lra_dump_file, "\n");
640 	  return false;
641 	}
642   *result_reg = lra_create_new_reg (mode, original, rclass, title);
643   lra_assert (curr_insn_input_reloads_num < LRA_MAX_INSN_RELOADS);
644   curr_insn_input_reloads[curr_insn_input_reloads_num].input = original;
645   curr_insn_input_reloads[curr_insn_input_reloads_num++].reg = *result_reg;
646   return true;
647 }
648 
649 
650 
651 /* The page contains code to extract memory address parts.  */
652 
653 /* Wrapper around REGNO_OK_FOR_INDEX_P, to allow pseudos.  */
654 static inline bool
655 ok_for_index_p_nonstrict (rtx reg)
656 {
657   unsigned regno = REGNO (reg);
658 
659   return regno >= FIRST_PSEUDO_REGISTER || REGNO_OK_FOR_INDEX_P (regno);
660 }
661 
662 /* A version of regno_ok_for_base_p for use here, when all pseudos
663    should count as OK.	Arguments as for regno_ok_for_base_p.  */
664 static inline bool
665 ok_for_base_p_nonstrict (rtx reg, machine_mode mode, addr_space_t as,
666 			 enum rtx_code outer_code, enum rtx_code index_code)
667 {
668   unsigned regno = REGNO (reg);
669 
670   if (regno >= FIRST_PSEUDO_REGISTER)
671     return true;
672   return ok_for_base_p_1 (regno, mode, as, outer_code, index_code);
673 }
674 
675 
676 
677 /* The page contains major code to choose the current insn alternative
678    and generate reloads for it.	 */
679 
680 /* Return the offset from REGNO of the least significant register
681    in (reg:MODE REGNO).
682 
683    This function is used to tell whether two registers satisfy
684    a matching constraint.  (reg:MODE1 REGNO1) matches (reg:MODE2 REGNO2) if:
685 
686          REGNO1 + lra_constraint_offset (REGNO1, MODE1)
687 	 == REGNO2 + lra_constraint_offset (REGNO2, MODE2)  */
688 int
689 lra_constraint_offset (int regno, machine_mode mode)
690 {
691   lra_assert (regno < FIRST_PSEUDO_REGISTER);
692   if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (mode) > UNITS_PER_WORD
693       && SCALAR_INT_MODE_P (mode))
694     return hard_regno_nregs[regno][mode] - 1;
695   return 0;
696 }
697 
698 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
699    if they are the same hard reg, and has special hacks for
700    auto-increment and auto-decrement.  This is specifically intended for
701    process_alt_operands to use in determining whether two operands
702    match.  X is the operand whose number is the lower of the two.
703 
704    It is supposed that X is the output operand and Y is the input
705    operand.  Y_HARD_REGNO is the final hard regno of register Y or
706    register in subreg Y as we know it now.  Otherwise, it is a
707    negative value.  */
708 static bool
709 operands_match_p (rtx x, rtx y, int y_hard_regno)
710 {
711   int i;
712   RTX_CODE code = GET_CODE (x);
713   const char *fmt;
714 
715   if (x == y)
716     return true;
717   if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
718       && (REG_P (y) || (GET_CODE (y) == SUBREG && REG_P (SUBREG_REG (y)))))
719     {
720       int j;
721 
722       i = get_hard_regno (x);
723       if (i < 0)
724 	goto slow;
725 
726       if ((j = y_hard_regno) < 0)
727 	goto slow;
728 
729       i += lra_constraint_offset (i, GET_MODE (x));
730       j += lra_constraint_offset (j, GET_MODE (y));
731 
732       return i == j;
733     }
734 
735   /* If two operands must match, because they are really a single
736      operand of an assembler insn, then two post-increments are invalid
737      because the assembler insn would increment only once.  On the
738      other hand, a post-increment matches ordinary indexing if the
739      post-increment is the output operand.  */
740   if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
741     return operands_match_p (XEXP (x, 0), y, y_hard_regno);
742 
743   /* Two pre-increments are invalid because the assembler insn would
744      increment only once.  On the other hand, a pre-increment matches
745      ordinary indexing if the pre-increment is the input operand.  */
746   if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
747       || GET_CODE (y) == PRE_MODIFY)
748     return operands_match_p (x, XEXP (y, 0), -1);
749 
750  slow:
751 
752   if (code == REG && GET_CODE (y) == SUBREG && REG_P (SUBREG_REG (y))
753       && x == SUBREG_REG (y))
754     return true;
755   if (GET_CODE (y) == REG && code == SUBREG && REG_P (SUBREG_REG (x))
756       && SUBREG_REG (x) == y)
757     return true;
758 
759   /* Now we have disposed of all the cases in which different rtx
760      codes can match.  */
761   if (code != GET_CODE (y))
762     return false;
763 
764   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
765   if (GET_MODE (x) != GET_MODE (y))
766     return false;
767 
768   switch (code)
769     {
770     CASE_CONST_UNIQUE:
771       return false;
772 
773     case LABEL_REF:
774       return LABEL_REF_LABEL (x) == LABEL_REF_LABEL (y);
775     case SYMBOL_REF:
776       return XSTR (x, 0) == XSTR (y, 0);
777 
778     default:
779       break;
780     }
781 
782   /* Compare the elements.  If any pair of corresponding elements fail
783      to match, return false for the whole things.  */
784 
785   fmt = GET_RTX_FORMAT (code);
786   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
787     {
788       int val, j;
789       switch (fmt[i])
790 	{
791 	case 'w':
792 	  if (XWINT (x, i) != XWINT (y, i))
793 	    return false;
794 	  break;
795 
796 	case 'i':
797 	  if (XINT (x, i) != XINT (y, i))
798 	    return false;
799 	  break;
800 
801 	case 'e':
802 	  val = operands_match_p (XEXP (x, i), XEXP (y, i), -1);
803 	  if (val == 0)
804 	    return false;
805 	  break;
806 
807 	case '0':
808 	  break;
809 
810 	case 'E':
811 	  if (XVECLEN (x, i) != XVECLEN (y, i))
812 	    return false;
813 	  for (j = XVECLEN (x, i) - 1; j >= 0; --j)
814 	    {
815 	      val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j), -1);
816 	      if (val == 0)
817 		return false;
818 	    }
819 	  break;
820 
821 	  /* It is believed that rtx's at this level will never
822 	     contain anything but integers and other rtx's, except for
823 	     within LABEL_REFs and SYMBOL_REFs.	 */
824 	default:
825 	  gcc_unreachable ();
826 	}
827     }
828   return true;
829 }
830 
831 /* True if X is a constant that can be forced into the constant pool.
832    MODE is the mode of the operand, or VOIDmode if not known.  */
833 #define CONST_POOL_OK_P(MODE, X)		\
834   ((MODE) != VOIDmode				\
835    && CONSTANT_P (X)				\
836    && GET_CODE (X) != HIGH			\
837    && !targetm.cannot_force_const_mem (MODE, X))
838 
839 /* True if C is a non-empty register class that has too few registers
840    to be safely used as a reload target class.	*/
841 #define SMALL_REGISTER_CLASS_P(C)		\
842   (ira_class_hard_regs_num [(C)] == 1		\
843    || (ira_class_hard_regs_num [(C)] >= 1	\
844        && targetm.class_likely_spilled_p (C)))
845 
846 /* If REG is a reload pseudo, try to make its class satisfying CL.  */
847 static void
848 narrow_reload_pseudo_class (rtx reg, enum reg_class cl)
849 {
850   enum reg_class rclass;
851 
852   /* Do not make more accurate class from reloads generated.  They are
853      mostly moves with a lot of constraints.  Making more accurate
854      class may results in very narrow class and impossibility of find
855      registers for several reloads of one insn.	 */
856   if (INSN_UID (curr_insn) >= new_insn_uid_start)
857     return;
858   if (GET_CODE (reg) == SUBREG)
859     reg = SUBREG_REG (reg);
860   if (! REG_P (reg) || (int) REGNO (reg) < new_regno_start)
861     return;
862   if (in_class_p (reg, cl, &rclass) && rclass != cl)
863     lra_change_class (REGNO (reg), rclass, "      Change to", true);
864 }
865 
866 /* Generate reloads for matching OUT and INS (array of input operand
867    numbers with end marker -1) with reg class GOAL_CLASS.  Add input
868    and output reloads correspondingly to the lists *BEFORE and *AFTER.
869    OUT might be negative.  In this case we generate input reloads for
870    matched input operands INS.  */
871 static void
872 match_reload (signed char out, signed char *ins, enum reg_class goal_class,
873 	      rtx_insn **before, rtx_insn **after)
874 {
875   int i, in;
876   rtx new_in_reg, new_out_reg, reg, clobber;
877   machine_mode inmode, outmode;
878   rtx in_rtx = *curr_id->operand_loc[ins[0]];
879   rtx out_rtx = out < 0 ? in_rtx : *curr_id->operand_loc[out];
880 
881   inmode = curr_operand_mode[ins[0]];
882   outmode = out < 0 ? inmode : curr_operand_mode[out];
883   push_to_sequence (*before);
884   if (inmode != outmode)
885     {
886       if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
887 	{
888 	  reg = new_in_reg
889 	    = lra_create_new_reg_with_unique_value (inmode, in_rtx,
890 						    goal_class, "");
891 	  if (SCALAR_INT_MODE_P (inmode))
892 	    new_out_reg = gen_lowpart_SUBREG (outmode, reg);
893 	  else
894 	    new_out_reg = gen_rtx_SUBREG (outmode, reg, 0);
895 	  LRA_SUBREG_P (new_out_reg) = 1;
896 	  /* If the input reg is dying here, we can use the same hard
897 	     register for REG and IN_RTX.  We do it only for original
898 	     pseudos as reload pseudos can die although original
899 	     pseudos still live where reload pseudos dies.  */
900 	  if (REG_P (in_rtx) && (int) REGNO (in_rtx) < lra_new_regno_start
901 	      && find_regno_note (curr_insn, REG_DEAD, REGNO (in_rtx)))
902 	    lra_assign_reg_val (REGNO (in_rtx), REGNO (reg));
903 	}
904       else
905 	{
906 	  reg = new_out_reg
907 	    = lra_create_new_reg_with_unique_value (outmode, out_rtx,
908 						    goal_class, "");
909 	  if (SCALAR_INT_MODE_P (outmode))
910 	    new_in_reg = gen_lowpart_SUBREG (inmode, reg);
911 	  else
912 	    new_in_reg = gen_rtx_SUBREG (inmode, reg, 0);
913 	  /* NEW_IN_REG is non-paradoxical subreg.  We don't want
914 	     NEW_OUT_REG living above.  We add clobber clause for
915 	     this.  This is just a temporary clobber.  We can remove
916 	     it at the end of LRA work.  */
917 	  clobber = emit_clobber (new_out_reg);
918 	  LRA_TEMP_CLOBBER_P (PATTERN (clobber)) = 1;
919 	  LRA_SUBREG_P (new_in_reg) = 1;
920 	  if (GET_CODE (in_rtx) == SUBREG)
921 	    {
922 	      rtx subreg_reg = SUBREG_REG (in_rtx);
923 
924 	      /* If SUBREG_REG is dying here and sub-registers IN_RTX
925 		 and NEW_IN_REG are similar, we can use the same hard
926 		 register for REG and SUBREG_REG.  */
927 	      if (REG_P (subreg_reg)
928 		  && (int) REGNO (subreg_reg) < lra_new_regno_start
929 		  && GET_MODE (subreg_reg) == outmode
930 		  && SUBREG_BYTE (in_rtx) == SUBREG_BYTE (new_in_reg)
931 		  && find_regno_note (curr_insn, REG_DEAD, REGNO (subreg_reg)))
932 		lra_assign_reg_val (REGNO (subreg_reg), REGNO (reg));
933 	    }
934 	}
935     }
936   else
937     {
938       /* Pseudos have values -- see comments for lra_reg_info.
939 	 Different pseudos with the same value do not conflict even if
940 	 they live in the same place.  When we create a pseudo we
941 	 assign value of original pseudo (if any) from which we
942 	 created the new pseudo.  If we create the pseudo from the
943 	 input pseudo, the new pseudo will no conflict with the input
944 	 pseudo which is wrong when the input pseudo lives after the
945 	 insn and as the new pseudo value is changed by the insn
946 	 output.  Therefore we create the new pseudo from the output.
947 
948 	 We cannot reuse the current output register because we might
949 	 have a situation like "a <- a op b", where the constraints
950 	 force the second input operand ("b") to match the output
951 	 operand ("a").  "b" must then be copied into a new register
952 	 so that it doesn't clobber the current value of "a".  */
953 
954       new_in_reg = new_out_reg
955 	= lra_create_new_reg_with_unique_value (outmode, out_rtx,
956 						goal_class, "");
957     }
958   /* In operand can be got from transformations before processing insn
959      constraints.  One example of such transformations is subreg
960      reloading (see function simplify_operand_subreg).  The new
961      pseudos created by the transformations might have inaccurate
962      class (ALL_REGS) and we should make their classes more
963      accurate.  */
964   narrow_reload_pseudo_class (in_rtx, goal_class);
965   lra_emit_move (copy_rtx (new_in_reg), in_rtx);
966   *before = get_insns ();
967   end_sequence ();
968   for (i = 0; (in = ins[i]) >= 0; i++)
969     {
970       lra_assert
971 	(GET_MODE (*curr_id->operand_loc[in]) == VOIDmode
972 	 || GET_MODE (new_in_reg) == GET_MODE (*curr_id->operand_loc[in]));
973       *curr_id->operand_loc[in] = new_in_reg;
974     }
975   lra_update_dups (curr_id, ins);
976   if (out < 0)
977     return;
978   /* See a comment for the input operand above.  */
979   narrow_reload_pseudo_class (out_rtx, goal_class);
980   if (find_reg_note (curr_insn, REG_UNUSED, out_rtx) == NULL_RTX)
981     {
982       start_sequence ();
983       lra_emit_move (out_rtx, copy_rtx (new_out_reg));
984       emit_insn (*after);
985       *after = get_insns ();
986       end_sequence ();
987     }
988   *curr_id->operand_loc[out] = new_out_reg;
989   lra_update_dup (curr_id, out);
990 }
991 
992 /* Return register class which is union of all reg classes in insn
993    constraint alternative string starting with P.  */
994 static enum reg_class
995 reg_class_from_constraints (const char *p)
996 {
997   int c, len;
998   enum reg_class op_class = NO_REGS;
999 
1000   do
1001     switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
1002       {
1003       case '#':
1004       case ',':
1005 	return op_class;
1006 
1007       case 'g':
1008 	op_class = reg_class_subunion[op_class][GENERAL_REGS];
1009 	break;
1010 
1011       default:
1012 	enum constraint_num cn = lookup_constraint (p);
1013 	enum reg_class cl = reg_class_for_constraint (cn);
1014 	if (cl == NO_REGS)
1015 	  {
1016 	    if (insn_extra_address_constraint (cn))
1017 	      op_class
1018 		= (reg_class_subunion
1019 		   [op_class][base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1020 					      ADDRESS, SCRATCH)]);
1021 	    break;
1022 	  }
1023 
1024 	op_class = reg_class_subunion[op_class][cl];
1025  	break;
1026       }
1027   while ((p += len), c);
1028   return op_class;
1029 }
1030 
1031 /* If OP is a register, return the class of the register as per
1032    get_reg_class, otherwise return NO_REGS.  */
1033 static inline enum reg_class
1034 get_op_class (rtx op)
1035 {
1036   return REG_P (op) ? get_reg_class (REGNO (op)) : NO_REGS;
1037 }
1038 
1039 /* Return generated insn mem_pseudo:=val if TO_P or val:=mem_pseudo
1040    otherwise.  If modes of MEM_PSEUDO and VAL are different, use
1041    SUBREG for VAL to make them equal.  */
1042 static rtx_insn *
1043 emit_spill_move (bool to_p, rtx mem_pseudo, rtx val)
1044 {
1045   if (GET_MODE (mem_pseudo) != GET_MODE (val))
1046     {
1047       /* Usually size of mem_pseudo is greater than val size but in
1048 	 rare cases it can be less as it can be defined by target
1049 	 dependent macro HARD_REGNO_CALLER_SAVE_MODE.  */
1050       if (! MEM_P (val))
1051 	{
1052 	  val = gen_rtx_SUBREG (GET_MODE (mem_pseudo),
1053 				GET_CODE (val) == SUBREG ? SUBREG_REG (val) : val,
1054 				0);
1055 	  LRA_SUBREG_P (val) = 1;
1056 	}
1057       else
1058 	{
1059 	  mem_pseudo = gen_lowpart_SUBREG (GET_MODE (val), mem_pseudo);
1060 	  LRA_SUBREG_P (mem_pseudo) = 1;
1061 	}
1062     }
1063   return as_a <rtx_insn *> (to_p
1064 			    ? gen_move_insn (mem_pseudo, val)
1065 			    : gen_move_insn (val, mem_pseudo));
1066 }
1067 
1068 /* Process a special case insn (register move), return true if we
1069    don't need to process it anymore.  INSN should be a single set
1070    insn.  Set up that RTL was changed through CHANGE_P and macro
1071    SECONDARY_MEMORY_NEEDED says to use secondary memory through
1072    SEC_MEM_P.  */
1073 static bool
1074 check_and_process_move (bool *change_p, bool *sec_mem_p ATTRIBUTE_UNUSED)
1075 {
1076   int sregno, dregno;
1077   rtx dest, src, dreg, sreg, new_reg, scratch_reg;
1078   rtx_insn *before;
1079   enum reg_class dclass, sclass, secondary_class;
1080   secondary_reload_info sri;
1081 
1082   lra_assert (curr_insn_set != NULL_RTX);
1083   dreg = dest = SET_DEST (curr_insn_set);
1084   sreg = src = SET_SRC (curr_insn_set);
1085   if (GET_CODE (dest) == SUBREG)
1086     dreg = SUBREG_REG (dest);
1087   if (GET_CODE (src) == SUBREG)
1088     sreg = SUBREG_REG (src);
1089   if (! (REG_P (dreg) || MEM_P (dreg)) || ! (REG_P (sreg) || MEM_P (sreg)))
1090     return false;
1091   sclass = dclass = NO_REGS;
1092   if (REG_P (dreg))
1093     dclass = get_reg_class (REGNO (dreg));
1094   if (dclass == ALL_REGS)
1095     /* ALL_REGS is used for new pseudos created by transformations
1096        like reload of SUBREG_REG (see function
1097        simplify_operand_subreg).  We don't know their class yet.  We
1098        should figure out the class from processing the insn
1099        constraints not in this fast path function.  Even if ALL_REGS
1100        were a right class for the pseudo, secondary_... hooks usually
1101        are not define for ALL_REGS.  */
1102     return false;
1103   if (REG_P (sreg))
1104     sclass = get_reg_class (REGNO (sreg));
1105   if (sclass == ALL_REGS)
1106     /* See comments above.  */
1107     return false;
1108   if (sclass == NO_REGS && dclass == NO_REGS)
1109     return false;
1110 #ifdef SECONDARY_MEMORY_NEEDED
1111   if (SECONDARY_MEMORY_NEEDED (sclass, dclass, GET_MODE (src))
1112 #ifdef SECONDARY_MEMORY_NEEDED_MODE
1113       && ((sclass != NO_REGS && dclass != NO_REGS)
1114 	  || GET_MODE (src) != SECONDARY_MEMORY_NEEDED_MODE (GET_MODE (src)))
1115 #endif
1116       )
1117     {
1118       *sec_mem_p = true;
1119       return false;
1120     }
1121 #endif
1122   if (! REG_P (dreg) || ! REG_P (sreg))
1123     return false;
1124   sri.prev_sri = NULL;
1125   sri.icode = CODE_FOR_nothing;
1126   sri.extra_cost = 0;
1127   secondary_class = NO_REGS;
1128   /* Set up hard register for a reload pseudo for hook
1129      secondary_reload because some targets just ignore unassigned
1130      pseudos in the hook.  */
1131   if (dclass != NO_REGS && lra_get_regno_hard_regno (REGNO (dreg)) < 0)
1132     {
1133       dregno = REGNO (dreg);
1134       reg_renumber[dregno] = ira_class_hard_regs[dclass][0];
1135     }
1136   else
1137     dregno = -1;
1138   if (sclass != NO_REGS && lra_get_regno_hard_regno (REGNO (sreg)) < 0)
1139     {
1140       sregno = REGNO (sreg);
1141       reg_renumber[sregno] = ira_class_hard_regs[sclass][0];
1142     }
1143   else
1144     sregno = -1;
1145   if (sclass != NO_REGS)
1146     secondary_class
1147       = (enum reg_class) targetm.secondary_reload (false, dest,
1148 						   (reg_class_t) sclass,
1149 						   GET_MODE (src), &sri);
1150   if (sclass == NO_REGS
1151       || ((secondary_class != NO_REGS || sri.icode != CODE_FOR_nothing)
1152 	  && dclass != NO_REGS))
1153     {
1154       enum reg_class old_sclass = secondary_class;
1155       secondary_reload_info old_sri = sri;
1156 
1157       sri.prev_sri = NULL;
1158       sri.icode = CODE_FOR_nothing;
1159       sri.extra_cost = 0;
1160       secondary_class
1161 	= (enum reg_class) targetm.secondary_reload (true, src,
1162 						     (reg_class_t) dclass,
1163 						     GET_MODE (src), &sri);
1164       /* Check the target hook consistency.  */
1165       lra_assert
1166 	((secondary_class == NO_REGS && sri.icode == CODE_FOR_nothing)
1167 	 || (old_sclass == NO_REGS && old_sri.icode == CODE_FOR_nothing)
1168 	 || (secondary_class == old_sclass && sri.icode == old_sri.icode));
1169     }
1170   if (sregno >= 0)
1171     reg_renumber [sregno] = -1;
1172   if (dregno >= 0)
1173     reg_renumber [dregno] = -1;
1174   if (secondary_class == NO_REGS && sri.icode == CODE_FOR_nothing)
1175     return false;
1176   *change_p = true;
1177   new_reg = NULL_RTX;
1178   if (secondary_class != NO_REGS)
1179     new_reg = lra_create_new_reg_with_unique_value (GET_MODE (src), NULL_RTX,
1180 						    secondary_class,
1181 						    "secondary");
1182   start_sequence ();
1183   if (sri.icode == CODE_FOR_nothing)
1184     lra_emit_move (new_reg, src);
1185   else
1186     {
1187       enum reg_class scratch_class;
1188 
1189       scratch_class = (reg_class_from_constraints
1190 		       (insn_data[sri.icode].operand[2].constraint));
1191       scratch_reg = (lra_create_new_reg_with_unique_value
1192 		     (insn_data[sri.icode].operand[2].mode, NULL_RTX,
1193 		      scratch_class, "scratch"));
1194       emit_insn (GEN_FCN (sri.icode) (new_reg != NULL_RTX ? new_reg : dest,
1195 				      src, scratch_reg));
1196     }
1197   before = get_insns ();
1198   end_sequence ();
1199   lra_process_new_insns (curr_insn, before, NULL, "Inserting the move");
1200   if (new_reg != NULL_RTX)
1201     SET_SRC (curr_insn_set) = new_reg;
1202   else
1203     {
1204       if (lra_dump_file != NULL)
1205 	{
1206 	  fprintf (lra_dump_file, "Deleting move %u\n", INSN_UID (curr_insn));
1207 	  dump_insn_slim (lra_dump_file, curr_insn);
1208 	}
1209       lra_set_insn_deleted (curr_insn);
1210       return true;
1211     }
1212   return false;
1213 }
1214 
1215 /* The following data describe the result of process_alt_operands.
1216    The data are used in curr_insn_transform to generate reloads.  */
1217 
1218 /* The chosen reg classes which should be used for the corresponding
1219    operands.  */
1220 static enum reg_class goal_alt[MAX_RECOG_OPERANDS];
1221 /* True if the operand should be the same as another operand and that
1222    other operand does not need a reload.  */
1223 static bool goal_alt_match_win[MAX_RECOG_OPERANDS];
1224 /* True if the operand does not need a reload.	*/
1225 static bool goal_alt_win[MAX_RECOG_OPERANDS];
1226 /* True if the operand can be offsetable memory.  */
1227 static bool goal_alt_offmemok[MAX_RECOG_OPERANDS];
1228 /* The number of an operand to which given operand can be matched to.  */
1229 static int goal_alt_matches[MAX_RECOG_OPERANDS];
1230 /* The number of elements in the following array.  */
1231 static int goal_alt_dont_inherit_ops_num;
1232 /* Numbers of operands whose reload pseudos should not be inherited.  */
1233 static int goal_alt_dont_inherit_ops[MAX_RECOG_OPERANDS];
1234 /* True if the insn commutative operands should be swapped.  */
1235 static bool goal_alt_swapped;
1236 /* The chosen insn alternative.	 */
1237 static int goal_alt_number;
1238 
1239 /* The following five variables are used to choose the best insn
1240    alternative.	 They reflect final characteristics of the best
1241    alternative.	 */
1242 
1243 /* Number of necessary reloads and overall cost reflecting the
1244    previous value and other unpleasantness of the best alternative.  */
1245 static int best_losers, best_overall;
1246 /* Overall number hard registers used for reloads.  For example, on
1247    some targets we need 2 general registers to reload DFmode and only
1248    one floating point register.	 */
1249 static int best_reload_nregs;
1250 /* Overall number reflecting distances of previous reloading the same
1251    value.  The distances are counted from the current BB start.  It is
1252    used to improve inheritance chances.  */
1253 static int best_reload_sum;
1254 
1255 /* True if the current insn should have no correspondingly input or
1256    output reloads.  */
1257 static bool no_input_reloads_p, no_output_reloads_p;
1258 
1259 /* True if we swapped the commutative operands in the current
1260    insn.  */
1261 static int curr_swapped;
1262 
1263 /* if CHECK_ONLY_P is false, arrange for address element *LOC to be a
1264    register of class CL.  Add any input reloads to list BEFORE.  AFTER
1265    is nonnull if *LOC is an automodified value; handle that case by
1266    adding the required output reloads to list AFTER.  Return true if
1267    the RTL was changed.
1268 
1269    if CHECK_ONLY_P is true, check that the *LOC is a correct address
1270    register.  Return false if the address register is correct.  */
1271 static bool
1272 process_addr_reg (rtx *loc, bool check_only_p, rtx_insn **before, rtx_insn **after,
1273 		  enum reg_class cl)
1274 {
1275   int regno;
1276   enum reg_class rclass, new_class;
1277   rtx reg;
1278   rtx new_reg;
1279   machine_mode mode;
1280   bool subreg_p, before_p = false;
1281 
1282   subreg_p = GET_CODE (*loc) == SUBREG;
1283   if (subreg_p)
1284     loc = &SUBREG_REG (*loc);
1285   reg = *loc;
1286   mode = GET_MODE (reg);
1287   if (! REG_P (reg))
1288     {
1289       if (check_only_p)
1290 	return true;
1291       /* Always reload memory in an address even if the target supports
1292 	 such addresses.  */
1293       new_reg = lra_create_new_reg_with_unique_value (mode, reg, cl, "address");
1294       before_p = true;
1295     }
1296   else
1297     {
1298       regno = REGNO (reg);
1299       rclass = get_reg_class (regno);
1300       if (! check_only_p
1301 	  && (*loc = get_equiv_with_elimination (reg, curr_insn)) != reg)
1302 	{
1303 	  if (lra_dump_file != NULL)
1304 	    {
1305 	      fprintf (lra_dump_file,
1306 		       "Changing pseudo %d in address of insn %u on equiv ",
1307 		       REGNO (reg), INSN_UID (curr_insn));
1308 	      dump_value_slim (lra_dump_file, *loc, 1);
1309 	      fprintf (lra_dump_file, "\n");
1310 	    }
1311 	  *loc = copy_rtx (*loc);
1312 	}
1313       if (*loc != reg || ! in_class_p (reg, cl, &new_class))
1314 	{
1315 	  if (check_only_p)
1316 	    return true;
1317 	  reg = *loc;
1318 	  if (get_reload_reg (after == NULL ? OP_IN : OP_INOUT,
1319 			      mode, reg, cl, subreg_p, "address", &new_reg))
1320 	    before_p = true;
1321 	}
1322       else if (new_class != NO_REGS && rclass != new_class)
1323 	{
1324 	  if (check_only_p)
1325 	    return true;
1326 	  lra_change_class (regno, new_class, "	   Change to", true);
1327 	  return false;
1328 	}
1329       else
1330 	return false;
1331     }
1332   if (before_p)
1333     {
1334       push_to_sequence (*before);
1335       lra_emit_move (new_reg, reg);
1336       *before = get_insns ();
1337       end_sequence ();
1338     }
1339   *loc = new_reg;
1340   if (after != NULL)
1341     {
1342       start_sequence ();
1343       lra_emit_move (before_p ? copy_rtx (reg) : reg, new_reg);
1344       emit_insn (*after);
1345       *after = get_insns ();
1346       end_sequence ();
1347     }
1348   return true;
1349 }
1350 
1351 /* Insert move insn in simplify_operand_subreg. BEFORE returns
1352    the insn to be inserted before curr insn. AFTER returns the
1353    the insn to be inserted after curr insn.  ORIGREG and NEWREG
1354    are the original reg and new reg for reload.  */
1355 static void
1356 insert_move_for_subreg (rtx_insn **before, rtx_insn **after, rtx origreg,
1357 			rtx newreg)
1358 {
1359   if (before)
1360     {
1361       push_to_sequence (*before);
1362       lra_emit_move (newreg, origreg);
1363       *before = get_insns ();
1364       end_sequence ();
1365     }
1366   if (after)
1367     {
1368       start_sequence ();
1369       lra_emit_move (origreg, newreg);
1370       emit_insn (*after);
1371       *after = get_insns ();
1372       end_sequence ();
1373     }
1374 }
1375 
1376 static int valid_address_p (machine_mode mode, rtx addr, addr_space_t as);
1377 
1378 /* Make reloads for subreg in operand NOP with internal subreg mode
1379    REG_MODE, add new reloads for further processing.  Return true if
1380    any change was done.  */
1381 static bool
1382 simplify_operand_subreg (int nop, machine_mode reg_mode)
1383 {
1384   int hard_regno;
1385   rtx_insn *before, *after;
1386   machine_mode mode, innermode;
1387   rtx reg, new_reg;
1388   rtx operand = *curr_id->operand_loc[nop];
1389   enum reg_class regclass;
1390   enum op_type type;
1391 
1392   before = after = NULL;
1393 
1394   if (GET_CODE (operand) != SUBREG)
1395     return false;
1396 
1397   mode = GET_MODE (operand);
1398   reg = SUBREG_REG (operand);
1399   innermode = GET_MODE (reg);
1400   type = curr_static_id->operand[nop].type;
1401   /* If we change address for paradoxical subreg of memory, the
1402      address might violate the necessary alignment or the access might
1403      be slow.  So take this into consideration.  We should not worry
1404      about access beyond allocated memory for paradoxical memory
1405      subregs as we don't substitute such equiv memory (see processing
1406      equivalences in function lra_constraints) and because for spilled
1407      pseudos we allocate stack memory enough for the biggest
1408      corresponding paradoxical subreg.  */
1409   if (MEM_P (reg)
1410       && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (reg))
1411 	  || MEM_ALIGN (reg) >= GET_MODE_ALIGNMENT (mode)))
1412     {
1413       rtx subst, old = *curr_id->operand_loc[nop];
1414 
1415       alter_subreg (curr_id->operand_loc[nop], false);
1416       subst = *curr_id->operand_loc[nop];
1417       lra_assert (MEM_P (subst));
1418       if (! valid_address_p (innermode, XEXP (reg, 0),
1419 			     MEM_ADDR_SPACE (reg))
1420 	  || valid_address_p (GET_MODE (subst), XEXP (subst, 0),
1421 			      MEM_ADDR_SPACE (subst)))
1422 	return true;
1423       /* If the address was valid and became invalid, prefer to reload
1424 	 the memory.  Typical case is when the index scale should
1425 	 correspond the memory.  */
1426       *curr_id->operand_loc[nop] = old;
1427     }
1428   else if (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER)
1429     {
1430       alter_subreg (curr_id->operand_loc[nop], false);
1431       return true;
1432     }
1433   else if (CONSTANT_P (reg))
1434     {
1435       /* Try to simplify subreg of constant.  It is usually result of
1436 	 equivalence substitution.  */
1437       if (innermode == VOIDmode
1438 	  && (innermode = original_subreg_reg_mode[nop]) == VOIDmode)
1439 	innermode = curr_static_id->operand[nop].mode;
1440       if ((new_reg = simplify_subreg (mode, reg, innermode,
1441 				      SUBREG_BYTE (operand))) != NULL_RTX)
1442 	{
1443 	  *curr_id->operand_loc[nop] = new_reg;
1444 	  return true;
1445 	}
1446     }
1447   /* Put constant into memory when we have mixed modes.  It generates
1448      a better code in most cases as it does not need a secondary
1449      reload memory.  It also prevents LRA looping when LRA is using
1450      secondary reload memory again and again.  */
1451   if (CONSTANT_P (reg) && CONST_POOL_OK_P (reg_mode, reg)
1452       && SCALAR_INT_MODE_P (reg_mode) != SCALAR_INT_MODE_P (mode))
1453     {
1454       SUBREG_REG (operand) = force_const_mem (reg_mode, reg);
1455       alter_subreg (curr_id->operand_loc[nop], false);
1456       return true;
1457     }
1458   /* Force a reload of the SUBREG_REG if this is a constant or PLUS or
1459      if there may be a problem accessing OPERAND in the outer
1460      mode.  */
1461   if ((REG_P (reg)
1462        && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1463        && (hard_regno = lra_get_regno_hard_regno (REGNO (reg))) >= 0
1464        /* Don't reload paradoxical subregs because we could be looping
1465 	  having repeatedly final regno out of hard regs range.  */
1466        && (hard_regno_nregs[hard_regno][innermode]
1467 	   >= hard_regno_nregs[hard_regno][mode])
1468        && simplify_subreg_regno (hard_regno, innermode,
1469 				 SUBREG_BYTE (operand), mode) < 0
1470        /* Don't reload subreg for matching reload.  It is actually
1471 	  valid subreg in LRA.  */
1472        && ! LRA_SUBREG_P (operand))
1473       || CONSTANT_P (reg) || GET_CODE (reg) == PLUS || MEM_P (reg))
1474     {
1475       enum reg_class rclass;
1476 
1477       if (REG_P (reg))
1478 	/* There is a big probability that we will get the same class
1479 	   for the new pseudo and we will get the same insn which
1480 	   means infinite looping.  So spill the new pseudo.  */
1481 	rclass = NO_REGS;
1482       else
1483 	/* The class will be defined later in curr_insn_transform.  */
1484 	rclass
1485 	  = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
1486 
1487       if (get_reload_reg (curr_static_id->operand[nop].type, reg_mode, reg,
1488 			  rclass, TRUE, "subreg reg", &new_reg))
1489 	{
1490 	  bool insert_before, insert_after;
1491 	  bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1492 
1493 	  insert_before = (type != OP_OUT
1494 			   || GET_MODE_SIZE (innermode) > GET_MODE_SIZE (mode));
1495 	  insert_after = (type != OP_IN);
1496 	  insert_move_for_subreg (insert_before ? &before : NULL,
1497 				  insert_after ? &after : NULL,
1498 				  reg, new_reg);
1499 	}
1500       SUBREG_REG (operand) = new_reg;
1501       lra_process_new_insns (curr_insn, before, after,
1502 			     "Inserting subreg reload");
1503       return true;
1504     }
1505   /* Force a reload for a paradoxical subreg. For paradoxical subreg,
1506      IRA allocates hardreg to the inner pseudo reg according to its mode
1507      instead of the outermode, so the size of the hardreg may not be enough
1508      to contain the outermode operand, in that case we may need to insert
1509      reload for the reg. For the following two types of paradoxical subreg,
1510      we need to insert reload:
1511      1. If the op_type is OP_IN, and the hardreg could not be paired with
1512         other hardreg to contain the outermode operand
1513         (checked by in_hard_reg_set_p), we need to insert the reload.
1514      2. If the op_type is OP_OUT or OP_INOUT.
1515 
1516      Here is a paradoxical subreg example showing how the reload is generated:
1517 
1518      (insn 5 4 7 2 (set (reg:TI 106 [ __comp ])
1519         (subreg:TI (reg:DI 107 [ __comp ]) 0)) {*movti_internal_rex64}
1520 
1521      In IRA, reg107 is allocated to a DImode hardreg. We use x86-64 as example
1522      here, if reg107 is assigned to hardreg R15, because R15 is the last
1523      hardreg, compiler cannot find another hardreg to pair with R15 to
1524      contain TImode data. So we insert a TImode reload reg180 for it.
1525      After reload is inserted:
1526 
1527      (insn 283 0 0 (set (subreg:DI (reg:TI 180 [orig:107 __comp ] [107]) 0)
1528         (reg:DI 107 [ __comp ])) -1
1529      (insn 5 4 7 2 (set (reg:TI 106 [ __comp ])
1530         (subreg:TI (reg:TI 180 [orig:107 __comp ] [107]) 0)) {*movti_internal_rex64}
1531 
1532      Two reload hard registers will be allocated to reg180 to save TImode data
1533      in LRA_assign.  */
1534   else if (REG_P (reg)
1535 	   && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1536 	   && (hard_regno = lra_get_regno_hard_regno (REGNO (reg))) >= 0
1537 	   && (hard_regno_nregs[hard_regno][innermode]
1538 	       < hard_regno_nregs[hard_regno][mode])
1539 	   && (regclass = lra_get_allocno_class (REGNO (reg)))
1540 	   && (type != OP_IN
1541 	       || !in_hard_reg_set_p (reg_class_contents[regclass],
1542 				      mode, hard_regno)))
1543     {
1544       /* The class will be defined later in curr_insn_transform.  */
1545       enum reg_class rclass
1546 	= (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
1547 
1548       if (get_reload_reg (curr_static_id->operand[nop].type, mode, reg,
1549                           rclass, TRUE, "paradoxical subreg", &new_reg))
1550         {
1551 	  rtx subreg;
1552 	  bool insert_before, insert_after;
1553 
1554 	  PUT_MODE (new_reg, mode);
1555           subreg = simplify_gen_subreg (innermode, new_reg, mode, 0);
1556 	  bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1557 
1558 	  insert_before = (type != OP_OUT);
1559 	  insert_after = (type != OP_IN);
1560 	  insert_move_for_subreg (insert_before ? &before : NULL,
1561 				  insert_after ? &after : NULL,
1562 				  reg, subreg);
1563 	}
1564       SUBREG_REG (operand) = new_reg;
1565       lra_process_new_insns (curr_insn, before, after,
1566                              "Inserting paradoxical subreg reload");
1567       return true;
1568     }
1569   return false;
1570 }
1571 
1572 /* Return TRUE if X refers for a hard register from SET.  */
1573 static bool
1574 uses_hard_regs_p (rtx x, HARD_REG_SET set)
1575 {
1576   int i, j, x_hard_regno;
1577   machine_mode mode;
1578   const char *fmt;
1579   enum rtx_code code;
1580 
1581   if (x == NULL_RTX)
1582     return false;
1583   code = GET_CODE (x);
1584   mode = GET_MODE (x);
1585   if (code == SUBREG)
1586     {
1587       x = SUBREG_REG (x);
1588       code = GET_CODE (x);
1589       if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (mode))
1590 	mode = GET_MODE (x);
1591     }
1592 
1593   if (REG_P (x))
1594     {
1595       x_hard_regno = get_hard_regno (x);
1596       return (x_hard_regno >= 0
1597 	      && overlaps_hard_reg_set_p (set, mode, x_hard_regno));
1598     }
1599   if (MEM_P (x))
1600     {
1601       struct address_info ad;
1602 
1603       decompose_mem_address (&ad, x);
1604       if (ad.base_term != NULL && uses_hard_regs_p (*ad.base_term, set))
1605 	return true;
1606       if (ad.index_term != NULL && uses_hard_regs_p (*ad.index_term, set))
1607 	return true;
1608     }
1609   fmt = GET_RTX_FORMAT (code);
1610   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1611     {
1612       if (fmt[i] == 'e')
1613 	{
1614 	  if (uses_hard_regs_p (XEXP (x, i), set))
1615 	    return true;
1616 	}
1617       else if (fmt[i] == 'E')
1618 	{
1619 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1620 	    if (uses_hard_regs_p (XVECEXP (x, i, j), set))
1621 	      return true;
1622 	}
1623     }
1624   return false;
1625 }
1626 
1627 /* Return true if OP is a spilled pseudo. */
1628 static inline bool
1629 spilled_pseudo_p (rtx op)
1630 {
1631   return (REG_P (op)
1632 	  && REGNO (op) >= FIRST_PSEUDO_REGISTER && in_mem_p (REGNO (op)));
1633 }
1634 
1635 /* Return true if X is a general constant.  */
1636 static inline bool
1637 general_constant_p (rtx x)
1638 {
1639   return CONSTANT_P (x) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (x));
1640 }
1641 
1642 static bool
1643 reg_in_class_p (rtx reg, enum reg_class cl)
1644 {
1645   if (cl == NO_REGS)
1646     return get_reg_class (REGNO (reg)) == NO_REGS;
1647   return in_class_p (reg, cl, NULL);
1648 }
1649 
1650 /* Return true if SET of RCLASS contains no hard regs which can be
1651    used in MODE.  */
1652 static bool
1653 prohibited_class_reg_set_mode_p (enum reg_class rclass,
1654 				 HARD_REG_SET &set,
1655 				 enum machine_mode mode)
1656 {
1657   HARD_REG_SET temp;
1658 
1659   // ??? Is this assert right
1660   // lra_assert (hard_reg_set_subset_p (set, reg_class_contents[rclass]));
1661   COPY_HARD_REG_SET (temp, set);
1662   AND_COMPL_HARD_REG_SET (temp, lra_no_alloc_regs);
1663   return (hard_reg_set_subset_p
1664 	  (temp, ira_prohibited_class_mode_regs[rclass][mode]));
1665 }
1666 
1667 /* Major function to choose the current insn alternative and what
1668    operands should be reloaded and how.	 If ONLY_ALTERNATIVE is not
1669    negative we should consider only this alternative.  Return false if
1670    we can not choose the alternative or find how to reload the
1671    operands.  */
1672 static bool
1673 process_alt_operands (int only_alternative)
1674 {
1675   bool ok_p = false;
1676   int nop, overall, nalt;
1677   int n_alternatives = curr_static_id->n_alternatives;
1678   int n_operands = curr_static_id->n_operands;
1679   /* LOSERS counts the operands that don't fit this alternative and
1680      would require loading.  */
1681   int losers;
1682   /* REJECT is a count of how undesirable this alternative says it is
1683      if any reloading is required.  If the alternative matches exactly
1684      then REJECT is ignored, but otherwise it gets this much counted
1685      against it in addition to the reloading needed.  */
1686   int reject;
1687   int op_reject;
1688   /* The number of elements in the following array.  */
1689   int early_clobbered_regs_num;
1690   /* Numbers of operands which are early clobber registers.  */
1691   int early_clobbered_nops[MAX_RECOG_OPERANDS];
1692   enum reg_class curr_alt[MAX_RECOG_OPERANDS];
1693   HARD_REG_SET curr_alt_set[MAX_RECOG_OPERANDS];
1694   bool curr_alt_match_win[MAX_RECOG_OPERANDS];
1695   bool curr_alt_win[MAX_RECOG_OPERANDS];
1696   bool curr_alt_offmemok[MAX_RECOG_OPERANDS];
1697   int curr_alt_matches[MAX_RECOG_OPERANDS];
1698   /* The number of elements in the following array.  */
1699   int curr_alt_dont_inherit_ops_num;
1700   /* Numbers of operands whose reload pseudos should not be inherited.	*/
1701   int curr_alt_dont_inherit_ops[MAX_RECOG_OPERANDS];
1702   rtx op;
1703   /* The register when the operand is a subreg of register, otherwise the
1704      operand itself.  */
1705   rtx no_subreg_reg_operand[MAX_RECOG_OPERANDS];
1706   /* The register if the operand is a register or subreg of register,
1707      otherwise NULL.  */
1708   rtx operand_reg[MAX_RECOG_OPERANDS];
1709   int hard_regno[MAX_RECOG_OPERANDS];
1710   machine_mode biggest_mode[MAX_RECOG_OPERANDS];
1711   int reload_nregs, reload_sum;
1712   bool costly_p;
1713   enum reg_class cl;
1714 
1715   /* Calculate some data common for all alternatives to speed up the
1716      function.	*/
1717   for (nop = 0; nop < n_operands; nop++)
1718     {
1719       rtx reg;
1720 
1721       op = no_subreg_reg_operand[nop] = *curr_id->operand_loc[nop];
1722       /* The real hard regno of the operand after the allocation.  */
1723       hard_regno[nop] = get_hard_regno (op);
1724 
1725       operand_reg[nop] = reg = op;
1726       biggest_mode[nop] = GET_MODE (op);
1727       if (GET_CODE (op) == SUBREG)
1728 	{
1729 	  operand_reg[nop] = reg = SUBREG_REG (op);
1730 	  if (GET_MODE_SIZE (biggest_mode[nop])
1731 	      < GET_MODE_SIZE (GET_MODE (reg)))
1732 	    biggest_mode[nop] = GET_MODE (reg);
1733 	}
1734       if (! REG_P (reg))
1735 	operand_reg[nop] = NULL_RTX;
1736       else if (REGNO (reg) >= FIRST_PSEUDO_REGISTER
1737 	       || ((int) REGNO (reg)
1738 		   == lra_get_elimination_hard_regno (REGNO (reg))))
1739 	no_subreg_reg_operand[nop] = reg;
1740       else
1741 	operand_reg[nop] = no_subreg_reg_operand[nop]
1742 	  /* Just use natural mode for elimination result.  It should
1743 	     be enough for extra constraints hooks.  */
1744 	  = regno_reg_rtx[hard_regno[nop]];
1745     }
1746 
1747   /* The constraints are made of several alternatives.	Each operand's
1748      constraint looks like foo,bar,... with commas separating the
1749      alternatives.  The first alternatives for all operands go
1750      together, the second alternatives go together, etc.
1751 
1752      First loop over alternatives.  */
1753   alternative_mask preferred = curr_id->preferred_alternatives;
1754   if (only_alternative >= 0)
1755     preferred &= ALTERNATIVE_BIT (only_alternative);
1756 
1757   for (nalt = 0; nalt < n_alternatives; nalt++)
1758     {
1759       /* Loop over operands for one constraint alternative.  */
1760       if (!TEST_BIT (preferred, nalt))
1761 	continue;
1762 
1763       overall = losers = reject = reload_nregs = reload_sum = 0;
1764       for (nop = 0; nop < n_operands; nop++)
1765 	{
1766 	  int inc = (curr_static_id
1767 		     ->operand_alternative[nalt * n_operands + nop].reject);
1768 	  if (lra_dump_file != NULL && inc != 0)
1769 	    fprintf (lra_dump_file,
1770 		     "            Staticly defined alt reject+=%d\n", inc);
1771 	  reject += inc;
1772 	}
1773       early_clobbered_regs_num = 0;
1774 
1775       for (nop = 0; nop < n_operands; nop++)
1776 	{
1777 	  const char *p;
1778 	  char *end;
1779 	  int len, c, m, i, opalt_num, this_alternative_matches;
1780 	  bool win, did_match, offmemok, early_clobber_p;
1781 	  /* false => this operand can be reloaded somehow for this
1782 	     alternative.  */
1783 	  bool badop;
1784 	  /* true => this operand can be reloaded if the alternative
1785 	     allows regs.  */
1786 	  bool winreg;
1787 	  /* True if a constant forced into memory would be OK for
1788 	     this operand.  */
1789 	  bool constmemok;
1790 	  enum reg_class this_alternative, this_costly_alternative;
1791 	  HARD_REG_SET this_alternative_set, this_costly_alternative_set;
1792 	  bool this_alternative_match_win, this_alternative_win;
1793 	  bool this_alternative_offmemok;
1794 	  bool scratch_p;
1795 	  machine_mode mode;
1796 	  enum constraint_num cn;
1797 
1798 	  opalt_num = nalt * n_operands + nop;
1799 	  if (curr_static_id->operand_alternative[opalt_num].anything_ok)
1800 	    {
1801 	      /* Fast track for no constraints at all.	*/
1802 	      curr_alt[nop] = NO_REGS;
1803 	      CLEAR_HARD_REG_SET (curr_alt_set[nop]);
1804 	      curr_alt_win[nop] = true;
1805 	      curr_alt_match_win[nop] = false;
1806 	      curr_alt_offmemok[nop] = false;
1807 	      curr_alt_matches[nop] = -1;
1808 	      continue;
1809 	    }
1810 
1811 	  op = no_subreg_reg_operand[nop];
1812 	  mode = curr_operand_mode[nop];
1813 
1814 	  win = did_match = winreg = offmemok = constmemok = false;
1815 	  badop = true;
1816 
1817 	  early_clobber_p = false;
1818 	  p = curr_static_id->operand_alternative[opalt_num].constraint;
1819 
1820 	  this_costly_alternative = this_alternative = NO_REGS;
1821 	  /* We update set of possible hard regs besides its class
1822 	     because reg class might be inaccurate.  For example,
1823 	     union of LO_REGS (l), HI_REGS(h), and STACK_REG(k) in ARM
1824 	     is translated in HI_REGS because classes are merged by
1825 	     pairs and there is no accurate intermediate class.	 */
1826 	  CLEAR_HARD_REG_SET (this_alternative_set);
1827 	  CLEAR_HARD_REG_SET (this_costly_alternative_set);
1828 	  this_alternative_win = false;
1829 	  this_alternative_match_win = false;
1830 	  this_alternative_offmemok = false;
1831 	  this_alternative_matches = -1;
1832 
1833 	  /* An empty constraint should be excluded by the fast
1834 	     track.  */
1835 	  lra_assert (*p != 0 && *p != ',');
1836 
1837 	  op_reject = 0;
1838 	  /* Scan this alternative's specs for this operand; set WIN
1839 	     if the operand fits any letter in this alternative.
1840 	     Otherwise, clear BADOP if this operand could fit some
1841 	     letter after reloads, or set WINREG if this operand could
1842 	     fit after reloads provided the constraint allows some
1843 	     registers.	 */
1844 	  costly_p = false;
1845 	  do
1846 	    {
1847 	      switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
1848 		{
1849 		case '\0':
1850 		  len = 0;
1851 		  break;
1852 		case ',':
1853 		  c = '\0';
1854 		  break;
1855 
1856 		case '&':
1857 		  early_clobber_p = true;
1858 		  break;
1859 
1860 		case '$':
1861 		  op_reject += LRA_MAX_REJECT;
1862 		  break;
1863 		case '^':
1864 		  op_reject += LRA_LOSER_COST_FACTOR;
1865 		  break;
1866 
1867 		case '#':
1868 		  /* Ignore rest of this alternative.  */
1869 		  c = '\0';
1870 		  break;
1871 
1872 		case '0':  case '1':  case '2':	 case '3':  case '4':
1873 		case '5':  case '6':  case '7':	 case '8':  case '9':
1874 		  {
1875 		    int m_hregno;
1876 		    bool match_p;
1877 
1878 		    m = strtoul (p, &end, 10);
1879 		    p = end;
1880 		    len = 0;
1881 		    lra_assert (nop > m);
1882 
1883 		    this_alternative_matches = m;
1884 		    m_hregno = get_hard_regno (*curr_id->operand_loc[m]);
1885 		    /* We are supposed to match a previous operand.
1886 		       If we do, we win if that one did.  If we do
1887 		       not, count both of the operands as losers.
1888 		       (This is too conservative, since most of the
1889 		       time only a single reload insn will be needed
1890 		       to make the two operands win.  As a result,
1891 		       this alternative may be rejected when it is
1892 		       actually desirable.)  */
1893 		    match_p = false;
1894 		    if (operands_match_p (*curr_id->operand_loc[nop],
1895 					  *curr_id->operand_loc[m], m_hregno))
1896 		      {
1897 			/* We should reject matching of an early
1898 			   clobber operand if the matching operand is
1899 			   not dying in the insn.  */
1900 			if (! curr_static_id->operand[m].early_clobber
1901 			    || operand_reg[nop] == NULL_RTX
1902 			    || (find_regno_note (curr_insn, REG_DEAD,
1903 						 REGNO (op))
1904 				|| REGNO (op) == REGNO (operand_reg[m])))
1905 			  match_p = true;
1906 		      }
1907 		    if (match_p)
1908 		      {
1909 			/* If we are matching a non-offsettable
1910 			   address where an offsettable address was
1911 			   expected, then we must reject this
1912 			   combination, because we can't reload
1913 			   it.	*/
1914 			if (curr_alt_offmemok[m]
1915 			    && MEM_P (*curr_id->operand_loc[m])
1916 			    && curr_alt[m] == NO_REGS && ! curr_alt_win[m])
1917 			  continue;
1918 		      }
1919 		    else
1920 		      {
1921 			/* Operands don't match.  Both operands must
1922 			   allow a reload register, otherwise we
1923 			   cannot make them match.  */
1924 			if (curr_alt[m] == NO_REGS)
1925 			  break;
1926 			/* Retroactively mark the operand we had to
1927 			   match as a loser, if it wasn't already and
1928 			   it wasn't matched to a register constraint
1929 			   (e.g it might be matched by memory). */
1930 			if (curr_alt_win[m]
1931 			    && (operand_reg[m] == NULL_RTX
1932 				|| hard_regno[m] < 0))
1933 			  {
1934 			    losers++;
1935 			    reload_nregs
1936 			      += (ira_reg_class_max_nregs[curr_alt[m]]
1937 				  [GET_MODE (*curr_id->operand_loc[m])]);
1938 			  }
1939 
1940 			/* Prefer matching earlyclobber alternative as
1941 			   it results in less hard regs required for
1942 			   the insn than a non-matching earlyclobber
1943 			   alternative.  */
1944 			if (curr_static_id->operand[m].early_clobber)
1945 			  {
1946 			    if (lra_dump_file != NULL)
1947 			      fprintf
1948 				(lra_dump_file,
1949 				 "            %d Matching earlyclobber alt:"
1950 				 " reject--\n",
1951 				 nop);
1952 			    reject--;
1953 			  }
1954 			/* Otherwise we prefer no matching
1955 			   alternatives because it gives more freedom
1956 			   in RA.  */
1957 			else if (operand_reg[nop] == NULL_RTX
1958 				 || (find_regno_note (curr_insn, REG_DEAD,
1959 						      REGNO (operand_reg[nop]))
1960 				     == NULL_RTX))
1961 			  {
1962 			    if (lra_dump_file != NULL)
1963 			      fprintf
1964 				(lra_dump_file,
1965 				 "            %d Matching alt: reject+=2\n",
1966 				 nop);
1967 			    reject += 2;
1968 			  }
1969 		      }
1970 		    /* If we have to reload this operand and some
1971 		       previous operand also had to match the same
1972 		       thing as this operand, we don't know how to do
1973 		       that.  */
1974 		    if (!match_p || !curr_alt_win[m])
1975 		      {
1976 			for (i = 0; i < nop; i++)
1977 			  if (curr_alt_matches[i] == m)
1978 			    break;
1979 			if (i < nop)
1980 			  break;
1981 		      }
1982 		    else
1983 		      did_match = true;
1984 
1985 		    /* This can be fixed with reloads if the operand
1986 		       we are supposed to match can be fixed with
1987 		       reloads. */
1988 		    badop = false;
1989 		    this_alternative = curr_alt[m];
1990 		    COPY_HARD_REG_SET (this_alternative_set, curr_alt_set[m]);
1991 		    winreg = this_alternative != NO_REGS;
1992 		    break;
1993 		  }
1994 
1995 		case 'g':
1996 		  if (MEM_P (op)
1997 		      || general_constant_p (op)
1998 		      || spilled_pseudo_p (op))
1999 		    win = true;
2000 		  cl = GENERAL_REGS;
2001 		  goto reg;
2002 
2003 		default:
2004 		  cn = lookup_constraint (p);
2005 		  switch (get_constraint_type (cn))
2006 		    {
2007 		    case CT_REGISTER:
2008 		      cl = reg_class_for_constraint (cn);
2009 		      if (cl != NO_REGS)
2010 			goto reg;
2011 		      break;
2012 
2013 		    case CT_CONST_INT:
2014 		      if (CONST_INT_P (op)
2015 			  && insn_const_int_ok_for_constraint (INTVAL (op), cn))
2016 			win = true;
2017 		      break;
2018 
2019 		    case CT_MEMORY:
2020 		      if (MEM_P (op)
2021 			  && satisfies_memory_constraint_p (op, cn))
2022 			win = true;
2023 		      else if (spilled_pseudo_p (op))
2024 			win = true;
2025 
2026 		      /* If we didn't already win, we can reload constants
2027 			 via force_const_mem or put the pseudo value into
2028 			 memory, or make other memory by reloading the
2029 			 address like for 'o'.  */
2030 		      if (CONST_POOL_OK_P (mode, op)
2031 			  || MEM_P (op) || REG_P (op))
2032 			badop = false;
2033 		      constmemok = true;
2034 		      offmemok = true;
2035 		      break;
2036 
2037 		    case CT_ADDRESS:
2038 		      /* If we didn't already win, we can reload the address
2039 			 into a base register.  */
2040 		      if (satisfies_address_constraint_p (op, cn))
2041 			win = true;
2042 		      cl = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2043 					   ADDRESS, SCRATCH);
2044 		      badop = false;
2045 		      goto reg;
2046 
2047 		    case CT_FIXED_FORM:
2048 		      if (constraint_satisfied_p (op, cn))
2049 			win = true;
2050 		      break;
2051 		    }
2052 		  break;
2053 
2054 		reg:
2055 		  this_alternative = reg_class_subunion[this_alternative][cl];
2056 		  IOR_HARD_REG_SET (this_alternative_set,
2057 				    reg_class_contents[cl]);
2058 		  if (costly_p)
2059 		    {
2060 		      this_costly_alternative
2061 			= reg_class_subunion[this_costly_alternative][cl];
2062 		      IOR_HARD_REG_SET (this_costly_alternative_set,
2063 					reg_class_contents[cl]);
2064 		    }
2065 		  if (mode == BLKmode)
2066 		    break;
2067 		  winreg = true;
2068 		  if (REG_P (op))
2069 		    {
2070 		      if (hard_regno[nop] >= 0
2071 			  && in_hard_reg_set_p (this_alternative_set,
2072 						mode, hard_regno[nop]))
2073 			win = true;
2074 		      else if (hard_regno[nop] < 0
2075 			       && in_class_p (op, this_alternative, NULL))
2076 			win = true;
2077 		    }
2078 		  break;
2079 		}
2080 	      if (c != ' ' && c != '\t')
2081 		costly_p = c == '*';
2082 	    }
2083 	  while ((p += len), c);
2084 
2085 	  scratch_p = (operand_reg[nop] != NULL_RTX
2086 		       && lra_former_scratch_p (REGNO (operand_reg[nop])));
2087 	  /* Record which operands fit this alternative.  */
2088 	  if (win)
2089 	    {
2090 	      this_alternative_win = true;
2091 	      if (operand_reg[nop] != NULL_RTX)
2092 		{
2093 		  if (hard_regno[nop] >= 0)
2094 		    {
2095 		      if (in_hard_reg_set_p (this_costly_alternative_set,
2096 					     mode, hard_regno[nop]))
2097 			{
2098 			  if (lra_dump_file != NULL)
2099 			    fprintf (lra_dump_file,
2100 				     "            %d Costly set: reject++\n",
2101 				     nop);
2102 			  reject++;
2103 			}
2104 		    }
2105 		  else
2106 		    {
2107 		      /* Prefer won reg to spilled pseudo under other
2108 			 equal conditions for possibe inheritance.  */
2109 		      if (! scratch_p)
2110 			{
2111 			  if (lra_dump_file != NULL)
2112 			    fprintf
2113 			      (lra_dump_file,
2114 			       "            %d Non pseudo reload: reject++\n",
2115 			       nop);
2116 			  reject++;
2117 			}
2118 		      if (in_class_p (operand_reg[nop],
2119 				      this_costly_alternative, NULL))
2120 			{
2121 			  if (lra_dump_file != NULL)
2122 			    fprintf
2123 			      (lra_dump_file,
2124 			       "            %d Non pseudo costly reload:"
2125 			       " reject++\n",
2126 			       nop);
2127 			  reject++;
2128 			}
2129 		    }
2130 		  /* We simulate the behaviour of old reload here.
2131 		     Although scratches need hard registers and it
2132 		     might result in spilling other pseudos, no reload
2133 		     insns are generated for the scratches.  So it
2134 		     might cost something but probably less than old
2135 		     reload pass believes.  */
2136 		  if (scratch_p)
2137 		    {
2138 		      if (lra_dump_file != NULL)
2139 			fprintf (lra_dump_file,
2140 				 "            %d Scratch win: reject+=2\n",
2141 				 nop);
2142 		      reject += 2;
2143 		    }
2144 		}
2145 	    }
2146 	  else if (did_match)
2147 	    this_alternative_match_win = true;
2148 	  else
2149 	    {
2150 	      int const_to_mem = 0;
2151 	      bool no_regs_p;
2152 
2153 	      reject += op_reject;
2154 	      /* Never do output reload of stack pointer.  It makes
2155 		 impossible to do elimination when SP is changed in
2156 		 RTL.  */
2157 	      if (op == stack_pointer_rtx && ! frame_pointer_needed
2158 		  && curr_static_id->operand[nop].type != OP_IN)
2159 		goto fail;
2160 
2161 	      /* If this alternative asks for a specific reg class, see if there
2162 		 is at least one allocatable register in that class.  */
2163 	      no_regs_p
2164 		= (this_alternative == NO_REGS
2165 		   || (hard_reg_set_subset_p
2166 		       (reg_class_contents[this_alternative],
2167 			lra_no_alloc_regs)));
2168 
2169 	      /* For asms, verify that the class for this alternative is possible
2170 		 for the mode that is specified.  */
2171 	      if (!no_regs_p && INSN_CODE (curr_insn) < 0)
2172 		{
2173 		  int i;
2174 		  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2175 		    if (HARD_REGNO_MODE_OK (i, mode)
2176 			&& in_hard_reg_set_p (reg_class_contents[this_alternative],
2177 					      mode, i))
2178 		      break;
2179 		  if (i == FIRST_PSEUDO_REGISTER)
2180 		    winreg = false;
2181 		}
2182 
2183 	      /* If this operand accepts a register, and if the
2184 		 register class has at least one allocatable register,
2185 		 then this operand can be reloaded.  */
2186 	      if (winreg && !no_regs_p)
2187 		badop = false;
2188 
2189 	      if (badop)
2190 		{
2191 		  if (lra_dump_file != NULL)
2192 		    fprintf (lra_dump_file,
2193 			     "            alt=%d: Bad operand -- refuse\n",
2194 			     nalt);
2195 		  goto fail;
2196 		}
2197 
2198 	      /* If not assigned pseudo has a class which a subset of
2199 		 required reg class, it is a less costly alternative
2200 		 as the pseudo still can get a hard reg of necessary
2201 		 class.  */
2202 	      if (! no_regs_p && REG_P (op) && hard_regno[nop] < 0
2203 		  && (cl = get_reg_class (REGNO (op))) != NO_REGS
2204 		  && ira_class_subset_p[this_alternative][cl])
2205 		{
2206 		  if (lra_dump_file != NULL)
2207 		    fprintf
2208 		      (lra_dump_file,
2209 		       "            %d Super set class reg: reject-=3\n", nop);
2210 		  reject -= 3;
2211 		}
2212 
2213 	      this_alternative_offmemok = offmemok;
2214 	      if (this_costly_alternative != NO_REGS)
2215 		{
2216 		  if (lra_dump_file != NULL)
2217 		    fprintf (lra_dump_file,
2218 			     "            %d Costly loser: reject++\n", nop);
2219 		  reject++;
2220 		}
2221 	      /* If the operand is dying, has a matching constraint,
2222 		 and satisfies constraints of the matched operand
2223 		 which failed to satisfy the own constraints, most probably
2224 		 the reload for this operand will be gone.  */
2225 	      if (this_alternative_matches >= 0
2226 		  && !curr_alt_win[this_alternative_matches]
2227 		  && REG_P (op)
2228 		  && find_regno_note (curr_insn, REG_DEAD, REGNO (op))
2229 		  && (hard_regno[nop] >= 0
2230 		      ? in_hard_reg_set_p (this_alternative_set,
2231 					   mode, hard_regno[nop])
2232 		      : in_class_p (op, this_alternative, NULL)))
2233 		{
2234 		  if (lra_dump_file != NULL)
2235 		    fprintf
2236 		      (lra_dump_file,
2237 		       "            %d Dying matched operand reload: reject++\n",
2238 		       nop);
2239 		  reject++;
2240 		}
2241 	      else
2242 		{
2243 		  /* Strict_low_part requires to reload the register
2244 		     not the sub-register.  In this case we should
2245 		     check that a final reload hard reg can hold the
2246 		     value mode.  */
2247 		  if (curr_static_id->operand[nop].strict_low
2248 		      && REG_P (op)
2249 		      && hard_regno[nop] < 0
2250 		      && GET_CODE (*curr_id->operand_loc[nop]) == SUBREG
2251 		      && ira_class_hard_regs_num[this_alternative] > 0
2252 		      && ! HARD_REGNO_MODE_OK (ira_class_hard_regs
2253 					       [this_alternative][0],
2254 					       GET_MODE
2255 					       (*curr_id->operand_loc[nop])))
2256 		    {
2257 		      if (lra_dump_file != NULL)
2258 			fprintf
2259 			  (lra_dump_file,
2260 			   "            alt=%d: Strict low subreg reload -- refuse\n",
2261 			   nalt);
2262 		      goto fail;
2263 		    }
2264 		  losers++;
2265 		}
2266 	      if (operand_reg[nop] != NULL_RTX
2267 		  /* Output operands and matched input operands are
2268 		     not inherited.  The following conditions do not
2269 		     exactly describe the previous statement but they
2270 		     are pretty close.  */
2271 		  && curr_static_id->operand[nop].type != OP_OUT
2272 		  && (this_alternative_matches < 0
2273 		      || curr_static_id->operand[nop].type != OP_IN))
2274 		{
2275 		  int last_reload = (lra_reg_info[ORIGINAL_REGNO
2276 						  (operand_reg[nop])]
2277 				     .last_reload);
2278 
2279 		  /* The value of reload_sum has sense only if we
2280 		     process insns in their order.  It happens only on
2281 		     the first constraints sub-pass when we do most of
2282 		     reload work.  */
2283 		  if (lra_constraint_iter == 1 && last_reload > bb_reload_num)
2284 		    reload_sum += last_reload - bb_reload_num;
2285 		}
2286 	      /* If this is a constant that is reloaded into the
2287 		 desired class by copying it to memory first, count
2288 		 that as another reload.  This is consistent with
2289 		 other code and is required to avoid choosing another
2290 		 alternative when the constant is moved into memory.
2291 		 Note that the test here is precisely the same as in
2292 		 the code below that calls force_const_mem.  */
2293 	      if (CONST_POOL_OK_P (mode, op)
2294 		  && ((targetm.preferred_reload_class
2295 		       (op, this_alternative) == NO_REGS)
2296 		      || no_input_reloads_p))
2297 		{
2298 		  const_to_mem = 1;
2299 		  if (! no_regs_p)
2300 		    losers++;
2301 		}
2302 
2303 	      /* Alternative loses if it requires a type of reload not
2304 		 permitted for this insn.  We can always reload
2305 		 objects with a REG_UNUSED note.  */
2306 	      if ((curr_static_id->operand[nop].type != OP_IN
2307 		   && no_output_reloads_p
2308 		   && ! find_reg_note (curr_insn, REG_UNUSED, op))
2309 		  || (curr_static_id->operand[nop].type != OP_OUT
2310 		      && no_input_reloads_p && ! const_to_mem)
2311 		  || (this_alternative_matches >= 0
2312 		      && (no_input_reloads_p
2313 			  || (no_output_reloads_p
2314 			      && (curr_static_id->operand
2315 				  [this_alternative_matches].type != OP_IN)
2316 			      && ! find_reg_note (curr_insn, REG_UNUSED,
2317 						  no_subreg_reg_operand
2318 						  [this_alternative_matches])))))
2319 		{
2320 		  if (lra_dump_file != NULL)
2321 		    fprintf
2322 		      (lra_dump_file,
2323 		       "            alt=%d: No input/otput reload -- refuse\n",
2324 		       nalt);
2325 		  goto fail;
2326 		}
2327 
2328 	      /* Alternative loses if it required class pseudo can not
2329 		 hold value of required mode.  Such insns can be
2330 		 described by insn definitions with mode iterators.  */
2331 	      if (GET_MODE (*curr_id->operand_loc[nop]) != VOIDmode
2332 		  && ! hard_reg_set_empty_p (this_alternative_set)
2333 		  /* It is common practice for constraints to use a
2334 		     class which does not have actually enough regs to
2335 		     hold the value (e.g. x86 AREG for mode requiring
2336 		     more one general reg).  Therefore we have 2
2337 		     conditions to check that the reload pseudo can
2338 		     not hold the mode value.  */
2339 		  && ! HARD_REGNO_MODE_OK (ira_class_hard_regs
2340 					   [this_alternative][0],
2341 					   GET_MODE (*curr_id->operand_loc[nop]))
2342 		  /* The above condition is not enough as the first
2343 		     reg in ira_class_hard_regs can be not aligned for
2344 		     multi-words mode values.  */
2345 		  && (prohibited_class_reg_set_mode_p
2346 		      (this_alternative, this_alternative_set,
2347 		       GET_MODE (*curr_id->operand_loc[nop]))))
2348 		{
2349 		  if (lra_dump_file != NULL)
2350 		    fprintf (lra_dump_file,
2351 			     "            alt=%d: reload pseudo for op %d "
2352 			     " can not hold the mode value -- refuse\n",
2353 			     nalt, nop);
2354 		  goto fail;
2355 		}
2356 
2357 	      /* Check strong discouragement of reload of non-constant
2358 		 into class THIS_ALTERNATIVE.  */
2359 	      if (! CONSTANT_P (op) && ! no_regs_p
2360 		  && (targetm.preferred_reload_class
2361 		      (op, this_alternative) == NO_REGS
2362 		      || (curr_static_id->operand[nop].type == OP_OUT
2363 			  && (targetm.preferred_output_reload_class
2364 			      (op, this_alternative) == NO_REGS))))
2365 		{
2366 		  if (lra_dump_file != NULL)
2367 		    fprintf (lra_dump_file,
2368 			     "            %d Non-prefered reload: reject+=%d\n",
2369 			     nop, LRA_MAX_REJECT);
2370 		  reject += LRA_MAX_REJECT;
2371 		}
2372 
2373 	      if (! (MEM_P (op) && offmemok)
2374 		  && ! (const_to_mem && constmemok))
2375 		{
2376 		  /* We prefer to reload pseudos over reloading other
2377 		     things, since such reloads may be able to be
2378 		     eliminated later.  So bump REJECT in other cases.
2379 		     Don't do this in the case where we are forcing a
2380 		     constant into memory and it will then win since
2381 		     we don't want to have a different alternative
2382 		     match then.  */
2383 		  if (! (REG_P (op) && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2384 		    {
2385 		      if (lra_dump_file != NULL)
2386 			fprintf
2387 			  (lra_dump_file,
2388 			   "            %d Non-pseudo reload: reject+=2\n",
2389 			   nop);
2390 		      reject += 2;
2391 		    }
2392 
2393 		  if (! no_regs_p)
2394 		    reload_nregs
2395 		      += ira_reg_class_max_nregs[this_alternative][mode];
2396 
2397 		  if (SMALL_REGISTER_CLASS_P (this_alternative))
2398 		    {
2399 		      if (lra_dump_file != NULL)
2400 			fprintf
2401 			  (lra_dump_file,
2402 			   "            %d Small class reload: reject+=%d\n",
2403 			   nop, LRA_LOSER_COST_FACTOR / 2);
2404 		      reject += LRA_LOSER_COST_FACTOR / 2;
2405 		    }
2406 		}
2407 
2408 	      /* We are trying to spill pseudo into memory.  It is
2409 		 usually more costly than moving to a hard register
2410 		 although it might takes the same number of
2411 		 reloads.  */
2412 	      if (no_regs_p && REG_P (op) && hard_regno[nop] >= 0)
2413 		{
2414 		  if (lra_dump_file != NULL)
2415 		    fprintf
2416 		      (lra_dump_file,
2417 		       "            %d Spill pseudo into memory: reject+=3\n",
2418 		       nop);
2419 		  reject += 3;
2420 		  if (VECTOR_MODE_P (mode))
2421 		    {
2422 		      /* Spilling vectors into memory is usually more
2423 			 costly as they contain big values.  */
2424 		      if (lra_dump_file != NULL)
2425 			fprintf
2426 			  (lra_dump_file,
2427 			   "            %d Spill vector pseudo: reject+=2\n",
2428 			   nop);
2429 		      reject += 2;
2430 		    }
2431 		}
2432 
2433 #ifdef SECONDARY_MEMORY_NEEDED
2434 	      /* If reload requires moving value through secondary
2435 		 memory, it will need one more insn at least.  */
2436 	      if (this_alternative != NO_REGS
2437 		  && REG_P (op) && (cl = get_reg_class (REGNO (op))) != NO_REGS
2438 		  && ((curr_static_id->operand[nop].type != OP_OUT
2439 		       && SECONDARY_MEMORY_NEEDED (cl, this_alternative,
2440 						   GET_MODE (op)))
2441 		      || (curr_static_id->operand[nop].type != OP_IN
2442 			  && SECONDARY_MEMORY_NEEDED (this_alternative, cl,
2443 						      GET_MODE (op)))))
2444 		losers++;
2445 #endif
2446 	      /* Input reloads can be inherited more often than output
2447 		 reloads can be removed, so penalize output
2448 		 reloads.  */
2449 	      if (!REG_P (op) || curr_static_id->operand[nop].type != OP_IN)
2450 		{
2451 		  if (lra_dump_file != NULL)
2452 		    fprintf
2453 		      (lra_dump_file,
2454 		       "            %d Non input pseudo reload: reject++\n",
2455 		       nop);
2456 		  reject++;
2457 		}
2458 	    }
2459 
2460 	  if (early_clobber_p && ! scratch_p)
2461 	    {
2462 	      if (lra_dump_file != NULL)
2463 		fprintf (lra_dump_file,
2464 			 "            %d Early clobber: reject++\n", nop);
2465 	      reject++;
2466 	    }
2467 	  /* ??? We check early clobbers after processing all operands
2468 	     (see loop below) and there we update the costs more.
2469 	     Should we update the cost (may be approximately) here
2470 	     because of early clobber register reloads or it is a rare
2471 	     or non-important thing to be worth to do it.  */
2472 	  overall = losers * LRA_LOSER_COST_FACTOR + reject;
2473 	  if ((best_losers == 0 || losers != 0) && best_overall < overall)
2474             {
2475               if (lra_dump_file != NULL)
2476 		fprintf (lra_dump_file,
2477 			 "            alt=%d,overall=%d,losers=%d -- refuse\n",
2478 			 nalt, overall, losers);
2479               goto fail;
2480             }
2481 
2482 	  curr_alt[nop] = this_alternative;
2483 	  COPY_HARD_REG_SET (curr_alt_set[nop], this_alternative_set);
2484 	  curr_alt_win[nop] = this_alternative_win;
2485 	  curr_alt_match_win[nop] = this_alternative_match_win;
2486 	  curr_alt_offmemok[nop] = this_alternative_offmemok;
2487 	  curr_alt_matches[nop] = this_alternative_matches;
2488 
2489 	  if (this_alternative_matches >= 0
2490 	      && !did_match && !this_alternative_win)
2491 	    curr_alt_win[this_alternative_matches] = false;
2492 
2493 	  if (early_clobber_p && operand_reg[nop] != NULL_RTX)
2494 	    early_clobbered_nops[early_clobbered_regs_num++] = nop;
2495 	}
2496       if (curr_insn_set != NULL_RTX && n_operands == 2
2497 	  /* Prevent processing non-move insns.  */
2498 	  && (GET_CODE (SET_SRC (curr_insn_set)) == SUBREG
2499 	      || SET_SRC (curr_insn_set) == no_subreg_reg_operand[1])
2500 	  && ((! curr_alt_win[0] && ! curr_alt_win[1]
2501 	       && REG_P (no_subreg_reg_operand[0])
2502 	       && REG_P (no_subreg_reg_operand[1])
2503 	       && (reg_in_class_p (no_subreg_reg_operand[0], curr_alt[1])
2504 		   || reg_in_class_p (no_subreg_reg_operand[1], curr_alt[0])))
2505 	      || (! curr_alt_win[0] && curr_alt_win[1]
2506 		  && REG_P (no_subreg_reg_operand[1])
2507 		  && reg_in_class_p (no_subreg_reg_operand[1], curr_alt[0]))
2508 	      || (curr_alt_win[0] && ! curr_alt_win[1]
2509 		  && REG_P (no_subreg_reg_operand[0])
2510 		  && reg_in_class_p (no_subreg_reg_operand[0], curr_alt[1])
2511 		  && (! CONST_POOL_OK_P (curr_operand_mode[1],
2512 					 no_subreg_reg_operand[1])
2513 		      || (targetm.preferred_reload_class
2514 			  (no_subreg_reg_operand[1],
2515 			   (enum reg_class) curr_alt[1]) != NO_REGS))
2516 		  /* If it is a result of recent elimination in move
2517 		     insn we can transform it into an add still by
2518 		     using this alternative.  */
2519 		  && GET_CODE (no_subreg_reg_operand[1]) != PLUS)))
2520 	{
2521 	  /* We have a move insn and a new reload insn will be similar
2522 	     to the current insn.  We should avoid such situation as it
2523 	     results in LRA cycling.  */
2524 	  overall += LRA_MAX_REJECT;
2525 	}
2526       ok_p = true;
2527       curr_alt_dont_inherit_ops_num = 0;
2528       for (nop = 0; nop < early_clobbered_regs_num; nop++)
2529 	{
2530 	  int i, j, clobbered_hard_regno, first_conflict_j, last_conflict_j;
2531 	  HARD_REG_SET temp_set;
2532 
2533 	  i = early_clobbered_nops[nop];
2534 	  if ((! curr_alt_win[i] && ! curr_alt_match_win[i])
2535 	      || hard_regno[i] < 0)
2536 	    continue;
2537 	  lra_assert (operand_reg[i] != NULL_RTX);
2538 	  clobbered_hard_regno = hard_regno[i];
2539 	  CLEAR_HARD_REG_SET (temp_set);
2540 	  add_to_hard_reg_set (&temp_set, biggest_mode[i], clobbered_hard_regno);
2541 	  first_conflict_j = last_conflict_j = -1;
2542 	  for (j = 0; j < n_operands; j++)
2543 	    if (j == i
2544 		/* We don't want process insides of match_operator and
2545 		   match_parallel because otherwise we would process
2546 		   their operands once again generating a wrong
2547 		   code.  */
2548 		|| curr_static_id->operand[j].is_operator)
2549 	      continue;
2550 	    else if ((curr_alt_matches[j] == i && curr_alt_match_win[j])
2551 		     || (curr_alt_matches[i] == j && curr_alt_match_win[i]))
2552 	      continue;
2553 	    /* If we don't reload j-th operand, check conflicts.  */
2554 	    else if ((curr_alt_win[j] || curr_alt_match_win[j])
2555 		     && uses_hard_regs_p (*curr_id->operand_loc[j], temp_set))
2556 	      {
2557 		if (first_conflict_j < 0)
2558 		  first_conflict_j = j;
2559 		last_conflict_j = j;
2560 	      }
2561 	  if (last_conflict_j < 0)
2562 	    continue;
2563 	  /* If earlyclobber operand conflicts with another
2564 	     non-matching operand which is actually the same register
2565 	     as the earlyclobber operand, it is better to reload the
2566 	     another operand as an operand matching the earlyclobber
2567 	     operand can be also the same.  */
2568 	  if (first_conflict_j == last_conflict_j
2569 	      && operand_reg[last_conflict_j]
2570 	      != NULL_RTX && ! curr_alt_match_win[last_conflict_j]
2571 	      && REGNO (operand_reg[i]) == REGNO (operand_reg[last_conflict_j]))
2572 	    {
2573 	      curr_alt_win[last_conflict_j] = false;
2574 	      curr_alt_dont_inherit_ops[curr_alt_dont_inherit_ops_num++]
2575 		= last_conflict_j;
2576 	      losers++;
2577 	      /* Early clobber was already reflected in REJECT. */
2578 	      lra_assert (reject > 0);
2579 	      if (lra_dump_file != NULL)
2580 		fprintf
2581 		  (lra_dump_file,
2582 		   "            %d Conflict early clobber reload: reject--\n",
2583 		   i);
2584 	      reject--;
2585 	      overall += LRA_LOSER_COST_FACTOR - 1;
2586 	    }
2587 	  else
2588 	    {
2589 	      /* We need to reload early clobbered register and the
2590 		 matched registers.  */
2591 	      for (j = 0; j < n_operands; j++)
2592 		if (curr_alt_matches[j] == i)
2593 		  {
2594 		    curr_alt_match_win[j] = false;
2595 		    losers++;
2596 		    overall += LRA_LOSER_COST_FACTOR;
2597 		  }
2598 	      if (! curr_alt_match_win[i])
2599 		curr_alt_dont_inherit_ops[curr_alt_dont_inherit_ops_num++] = i;
2600 	      else
2601 		{
2602 		  /* Remember pseudos used for match reloads are never
2603 		     inherited.  */
2604 		  lra_assert (curr_alt_matches[i] >= 0);
2605 		  curr_alt_win[curr_alt_matches[i]] = false;
2606 		}
2607 	      curr_alt_win[i] = curr_alt_match_win[i] = false;
2608 	      losers++;
2609 	      /* Early clobber was already reflected in REJECT. */
2610 	      lra_assert (reject > 0);
2611 	      if (lra_dump_file != NULL)
2612 		fprintf
2613 		  (lra_dump_file,
2614 		   "            %d Matched conflict early clobber reloads:"
2615 		   "reject--\n",
2616 		   i);
2617 	      reject--;
2618 	      overall += LRA_LOSER_COST_FACTOR - 1;
2619 	    }
2620 	}
2621       if (lra_dump_file != NULL)
2622 	fprintf (lra_dump_file, "          alt=%d,overall=%d,losers=%d,rld_nregs=%d\n",
2623 		 nalt, overall, losers, reload_nregs);
2624 
2625       /* If this alternative can be made to work by reloading, and it
2626 	 needs less reloading than the others checked so far, record
2627 	 it as the chosen goal for reloading.  */
2628       if ((best_losers != 0 && losers == 0)
2629 	  || (((best_losers == 0 && losers == 0)
2630 	       || (best_losers != 0 && losers != 0))
2631 	      && (best_overall > overall
2632 		  || (best_overall == overall
2633 		      /* If the cost of the reloads is the same,
2634 			 prefer alternative which requires minimal
2635 			 number of reload regs.  */
2636 		      && (reload_nregs < best_reload_nregs
2637 			  || (reload_nregs == best_reload_nregs
2638 			      && (best_reload_sum < reload_sum
2639 				  || (best_reload_sum == reload_sum
2640 				      && nalt < goal_alt_number))))))))
2641 	{
2642 	  for (nop = 0; nop < n_operands; nop++)
2643 	    {
2644 	      goal_alt_win[nop] = curr_alt_win[nop];
2645 	      goal_alt_match_win[nop] = curr_alt_match_win[nop];
2646 	      goal_alt_matches[nop] = curr_alt_matches[nop];
2647 	      goal_alt[nop] = curr_alt[nop];
2648 	      goal_alt_offmemok[nop] = curr_alt_offmemok[nop];
2649 	    }
2650 	  goal_alt_dont_inherit_ops_num = curr_alt_dont_inherit_ops_num;
2651 	  for (nop = 0; nop < curr_alt_dont_inherit_ops_num; nop++)
2652 	    goal_alt_dont_inherit_ops[nop] = curr_alt_dont_inherit_ops[nop];
2653 	  goal_alt_swapped = curr_swapped;
2654 	  best_overall = overall;
2655 	  best_losers = losers;
2656 	  best_reload_nregs = reload_nregs;
2657 	  best_reload_sum = reload_sum;
2658 	  goal_alt_number = nalt;
2659 	}
2660       if (losers == 0)
2661 	/* Everything is satisfied.  Do not process alternatives
2662 	   anymore.  */
2663 	break;
2664     fail:
2665       ;
2666     }
2667   return ok_p;
2668 }
2669 
2670 /* Make reload base reg from address AD.  */
2671 static rtx
2672 base_to_reg (struct address_info *ad)
2673 {
2674   enum reg_class cl;
2675   int code = -1;
2676   rtx new_inner = NULL_RTX;
2677   rtx new_reg = NULL_RTX;
2678   rtx_insn *insn;
2679   rtx_insn *last_insn = get_last_insn();
2680 
2681   lra_assert (ad->base == ad->base_term && ad->disp == ad->disp_term);
2682   cl = base_reg_class (ad->mode, ad->as, ad->base_outer_code,
2683                        get_index_code (ad));
2684   new_reg = lra_create_new_reg (GET_MODE (*ad->base_term), NULL_RTX,
2685                                 cl, "base");
2686   new_inner = simplify_gen_binary (PLUS, GET_MODE (new_reg), new_reg,
2687                                    ad->disp_term == NULL
2688                                    ? gen_int_mode (0, ad->mode)
2689                                    : *ad->disp_term);
2690   if (!valid_address_p (ad->mode, new_inner, ad->as))
2691     return NULL_RTX;
2692   insn = emit_insn (gen_rtx_SET (ad->mode, new_reg, *ad->base_term));
2693   code = recog_memoized (insn);
2694   if (code < 0)
2695     {
2696       delete_insns_since (last_insn);
2697       return NULL_RTX;
2698     }
2699 
2700   return new_inner;
2701 }
2702 
2703 /* Make reload base reg + disp from address AD.  Return the new pseudo.  */
2704 static rtx
2705 base_plus_disp_to_reg (struct address_info *ad)
2706 {
2707   enum reg_class cl;
2708   rtx new_reg;
2709 
2710   lra_assert (ad->base == ad->base_term && ad->disp == ad->disp_term);
2711   cl = base_reg_class (ad->mode, ad->as, ad->base_outer_code,
2712 		       get_index_code (ad));
2713   new_reg = lra_create_new_reg (GET_MODE (*ad->base_term), NULL_RTX,
2714 				cl, "base + disp");
2715   lra_emit_add (new_reg, *ad->base_term, *ad->disp_term);
2716   return new_reg;
2717 }
2718 
2719 /* Make reload of index part of address AD.  Return the new
2720    pseudo.  */
2721 static rtx
2722 index_part_to_reg (struct address_info *ad)
2723 {
2724   rtx new_reg;
2725 
2726   new_reg = lra_create_new_reg (GET_MODE (*ad->index), NULL_RTX,
2727 				INDEX_REG_CLASS, "index term");
2728   expand_mult (GET_MODE (*ad->index), *ad->index_term,
2729 	       GEN_INT (get_index_scale (ad)), new_reg, 1);
2730   return new_reg;
2731 }
2732 
2733 /* Return true if we can add a displacement to address AD, even if that
2734    makes the address invalid.  The fix-up code requires any new address
2735    to be the sum of the BASE_TERM, INDEX and DISP_TERM fields.  */
2736 static bool
2737 can_add_disp_p (struct address_info *ad)
2738 {
2739   return (!ad->autoinc_p
2740 	  && ad->segment == NULL
2741 	  && ad->base == ad->base_term
2742 	  && ad->disp == ad->disp_term);
2743 }
2744 
2745 /* Make equiv substitution in address AD.  Return true if a substitution
2746    was made.  */
2747 static bool
2748 equiv_address_substitution (struct address_info *ad)
2749 {
2750   rtx base_reg, new_base_reg, index_reg, new_index_reg, *base_term, *index_term;
2751   HOST_WIDE_INT disp, scale;
2752   bool change_p;
2753 
2754   base_term = strip_subreg (ad->base_term);
2755   if (base_term == NULL)
2756     base_reg = new_base_reg = NULL_RTX;
2757   else
2758     {
2759       base_reg = *base_term;
2760       new_base_reg = get_equiv_with_elimination (base_reg, curr_insn);
2761     }
2762   index_term = strip_subreg (ad->index_term);
2763   if (index_term == NULL)
2764     index_reg = new_index_reg = NULL_RTX;
2765   else
2766     {
2767       index_reg = *index_term;
2768       new_index_reg = get_equiv_with_elimination (index_reg, curr_insn);
2769     }
2770   if (base_reg == new_base_reg && index_reg == new_index_reg)
2771     return false;
2772   disp = 0;
2773   change_p = false;
2774   if (lra_dump_file != NULL)
2775     {
2776       fprintf (lra_dump_file, "Changing address in insn %d ",
2777 	       INSN_UID (curr_insn));
2778       dump_value_slim (lra_dump_file, *ad->outer, 1);
2779     }
2780   if (base_reg != new_base_reg)
2781     {
2782       if (REG_P (new_base_reg))
2783 	{
2784 	  *base_term = new_base_reg;
2785 	  change_p = true;
2786 	}
2787       else if (GET_CODE (new_base_reg) == PLUS
2788 	       && REG_P (XEXP (new_base_reg, 0))
2789 	       && CONST_INT_P (XEXP (new_base_reg, 1))
2790 	       && can_add_disp_p (ad))
2791 	{
2792 	  disp += INTVAL (XEXP (new_base_reg, 1));
2793 	  *base_term = XEXP (new_base_reg, 0);
2794 	  change_p = true;
2795 	}
2796       if (ad->base_term2 != NULL)
2797 	*ad->base_term2 = *ad->base_term;
2798     }
2799   if (index_reg != new_index_reg)
2800     {
2801       if (REG_P (new_index_reg))
2802 	{
2803 	  *index_term = new_index_reg;
2804 	  change_p = true;
2805 	}
2806       else if (GET_CODE (new_index_reg) == PLUS
2807 	       && REG_P (XEXP (new_index_reg, 0))
2808 	       && CONST_INT_P (XEXP (new_index_reg, 1))
2809 	       && can_add_disp_p (ad)
2810 	       && (scale = get_index_scale (ad)))
2811 	{
2812 	  disp += INTVAL (XEXP (new_index_reg, 1)) * scale;
2813 	  *index_term = XEXP (new_index_reg, 0);
2814 	  change_p = true;
2815 	}
2816     }
2817   if (disp != 0)
2818     {
2819       if (ad->disp != NULL)
2820 	*ad->disp = plus_constant (GET_MODE (*ad->inner), *ad->disp, disp);
2821       else
2822 	{
2823 	  *ad->inner = plus_constant (GET_MODE (*ad->inner), *ad->inner, disp);
2824 	  update_address (ad);
2825 	}
2826       change_p = true;
2827     }
2828   if (lra_dump_file != NULL)
2829     {
2830       if (! change_p)
2831 	fprintf (lra_dump_file, " -- no change\n");
2832       else
2833 	{
2834 	  fprintf (lra_dump_file, " on equiv ");
2835 	  dump_value_slim (lra_dump_file, *ad->outer, 1);
2836 	  fprintf (lra_dump_file, "\n");
2837 	}
2838     }
2839   return change_p;
2840 }
2841 
2842 /* Major function to make reloads for an address in operand NOP or
2843    check its correctness (If CHECK_ONLY_P is true). The supported
2844    cases are:
2845 
2846    1) an address that existed before LRA started, at which point it
2847    must have been valid.  These addresses are subject to elimination
2848    and may have become invalid due to the elimination offset being out
2849    of range.
2850 
2851    2) an address created by forcing a constant to memory
2852    (force_const_to_mem).  The initial form of these addresses might
2853    not be valid, and it is this function's job to make them valid.
2854 
2855    3) a frame address formed from a register and a (possibly zero)
2856    constant offset.  As above, these addresses might not be valid and
2857    this function must make them so.
2858 
2859    Add reloads to the lists *BEFORE and *AFTER.  We might need to add
2860    reloads to *AFTER because of inc/dec, {pre, post} modify in the
2861    address.  Return true for any RTL change.
2862 
2863    The function is a helper function which does not produce all
2864    transformations (when CHECK_ONLY_P is false) which can be
2865    necessary.  It does just basic steps.  To do all necessary
2866    transformations use function process_address.  */
2867 static bool
2868 process_address_1 (int nop, bool check_only_p,
2869 		   rtx_insn **before, rtx_insn **after)
2870 {
2871   struct address_info ad;
2872   rtx new_reg;
2873   rtx op = *curr_id->operand_loc[nop];
2874   const char *constraint = curr_static_id->operand[nop].constraint;
2875   enum constraint_num cn = lookup_constraint (constraint);
2876   bool change_p = false;
2877 
2878   if (insn_extra_address_constraint (cn))
2879     decompose_lea_address (&ad, curr_id->operand_loc[nop]);
2880   else if (MEM_P (op))
2881     decompose_mem_address (&ad, op);
2882   else if (GET_CODE (op) == SUBREG
2883 	   && MEM_P (SUBREG_REG (op)))
2884     decompose_mem_address (&ad, SUBREG_REG (op));
2885   else
2886     return false;
2887   /* If INDEX_REG_CLASS is assigned to base_term already and isn't to
2888      index_term, swap them so to avoid assigning INDEX_REG_CLASS to both
2889      when INDEX_REG_CLASS is a single register class.  */
2890   if (ad.base_term != NULL
2891       && ad.index_term != NULL
2892       && ira_class_hard_regs_num[INDEX_REG_CLASS] == 1
2893       && REG_P (*ad.base_term)
2894       && REG_P (*ad.index_term)
2895       && in_class_p (*ad.base_term, INDEX_REG_CLASS, NULL)
2896       && ! in_class_p (*ad.index_term, INDEX_REG_CLASS, NULL))
2897     {
2898       std::swap (ad.base, ad.index);
2899       std::swap (ad.base_term, ad.index_term);
2900     }
2901   if (! check_only_p)
2902     change_p = equiv_address_substitution (&ad);
2903   if (ad.base_term != NULL
2904       && (process_addr_reg
2905 	  (ad.base_term, check_only_p, before,
2906 	   (ad.autoinc_p
2907 	    && !(REG_P (*ad.base_term)
2908 		 && find_regno_note (curr_insn, REG_DEAD,
2909 				     REGNO (*ad.base_term)) != NULL_RTX)
2910 	    ? after : NULL),
2911 	   base_reg_class (ad.mode, ad.as, ad.base_outer_code,
2912 			   get_index_code (&ad)))))
2913     {
2914       change_p = true;
2915       if (ad.base_term2 != NULL)
2916 	*ad.base_term2 = *ad.base_term;
2917     }
2918   if (ad.index_term != NULL
2919       && process_addr_reg (ad.index_term, check_only_p,
2920 			   before, NULL, INDEX_REG_CLASS))
2921     change_p = true;
2922 
2923   /* Target hooks sometimes don't treat extra-constraint addresses as
2924      legitimate address_operands, so handle them specially.  */
2925   if (insn_extra_address_constraint (cn)
2926       && satisfies_address_constraint_p (&ad, cn))
2927     return change_p;
2928 
2929   if (check_only_p)
2930     return change_p;
2931 
2932   /* There are three cases where the shape of *AD.INNER may now be invalid:
2933 
2934      1) the original address was valid, but either elimination or
2935      equiv_address_substitution was applied and that made
2936      the address invalid.
2937 
2938      2) the address is an invalid symbolic address created by
2939      force_const_to_mem.
2940 
2941      3) the address is a frame address with an invalid offset.
2942 
2943      4) the address is a frame address with an invalid base.
2944 
2945      All these cases involve a non-autoinc address, so there is no
2946      point revalidating other types.  */
2947   if (ad.autoinc_p || valid_address_p (&ad))
2948     return change_p;
2949 
2950   /* Any index existed before LRA started, so we can assume that the
2951      presence and shape of the index is valid.  */
2952   push_to_sequence (*before);
2953   lra_assert (ad.disp == ad.disp_term);
2954   if (ad.base == NULL)
2955     {
2956       if (ad.index == NULL)
2957 	{
2958 	  int code = -1;
2959 	  enum reg_class cl = base_reg_class (ad.mode, ad.as,
2960 					      SCRATCH, SCRATCH);
2961 	  rtx addr = *ad.inner;
2962 
2963 	  new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, "addr");
2964 #ifdef HAVE_lo_sum
2965 	  {
2966 	    rtx_insn *insn;
2967 	    rtx_insn *last = get_last_insn ();
2968 
2969 	    /* addr => lo_sum (new_base, addr), case (2) above.  */
2970 	    insn = emit_insn (gen_rtx_SET
2971 			      (VOIDmode, new_reg,
2972 			       gen_rtx_HIGH (Pmode, copy_rtx (addr))));
2973 	    code = recog_memoized (insn);
2974 	    if (code >= 0)
2975 	      {
2976 		*ad.inner = gen_rtx_LO_SUM (Pmode, new_reg, addr);
2977 		if (! valid_address_p (ad.mode, *ad.outer, ad.as))
2978 		  {
2979 		    /* Try to put lo_sum into register.  */
2980 		    insn = emit_insn (gen_rtx_SET
2981 				      (VOIDmode, new_reg,
2982 				       gen_rtx_LO_SUM (Pmode, new_reg, addr)));
2983 		    code = recog_memoized (insn);
2984 		    if (code >= 0)
2985 		      {
2986 			*ad.inner = new_reg;
2987 			if (! valid_address_p (ad.mode, *ad.outer, ad.as))
2988 			  {
2989 			    *ad.inner = addr;
2990 			    code = -1;
2991 			  }
2992 		      }
2993 
2994 		  }
2995 	      }
2996 	    if (code < 0)
2997 	      delete_insns_since (last);
2998 	  }
2999 #endif
3000 	  if (code < 0)
3001 	    {
3002 	      /* addr => new_base, case (2) above.  */
3003 	      lra_emit_move (new_reg, addr);
3004 	      *ad.inner = new_reg;
3005 	    }
3006 	}
3007       else
3008 	{
3009 	  /* index * scale + disp => new base + index * scale,
3010 	     case (1) above.  */
3011 	  enum reg_class cl = base_reg_class (ad.mode, ad.as, PLUS,
3012 					      GET_CODE (*ad.index));
3013 
3014 	  lra_assert (INDEX_REG_CLASS != NO_REGS);
3015 	  new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, "disp");
3016 	  lra_emit_move (new_reg, *ad.disp);
3017 	  *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
3018 					   new_reg, *ad.index);
3019 	}
3020     }
3021   else if (ad.index == NULL)
3022     {
3023       int regno;
3024       enum reg_class cl;
3025       rtx set;
3026       rtx_insn *insns, *last_insn;
3027       /* Try to reload base into register only if the base is invalid
3028          for the address but with valid offset, case (4) above.  */
3029       start_sequence ();
3030       new_reg = base_to_reg (&ad);
3031 
3032       /* base + disp => new base, cases (1) and (3) above.  */
3033       /* Another option would be to reload the displacement into an
3034 	 index register.  However, postreload has code to optimize
3035 	 address reloads that have the same base and different
3036 	 displacements, so reloading into an index register would
3037 	 not necessarily be a win.  */
3038       if (new_reg == NULL_RTX)
3039         new_reg = base_plus_disp_to_reg (&ad);
3040       insns = get_insns ();
3041       last_insn = get_last_insn ();
3042       /* If we generated at least two insns, try last insn source as
3043 	 an address.  If we succeed, we generate one less insn.  */
3044       if (last_insn != insns && (set = single_set (last_insn)) != NULL_RTX
3045 	  && GET_CODE (SET_SRC (set)) == PLUS
3046 	  && REG_P (XEXP (SET_SRC (set), 0))
3047 	  && CONSTANT_P (XEXP (SET_SRC (set), 1)))
3048 	{
3049 	  *ad.inner = SET_SRC (set);
3050 	  if (valid_address_p (ad.mode, *ad.outer, ad.as))
3051 	    {
3052 	      *ad.base_term = XEXP (SET_SRC (set), 0);
3053 	      *ad.disp_term = XEXP (SET_SRC (set), 1);
3054 	      cl = base_reg_class (ad.mode, ad.as, ad.base_outer_code,
3055 				   get_index_code (&ad));
3056 	      regno = REGNO (*ad.base_term);
3057 	      if (regno >= FIRST_PSEUDO_REGISTER
3058 		  && cl != lra_get_allocno_class (regno))
3059 		lra_change_class (regno, cl, "      Change to", true);
3060 	      new_reg = SET_SRC (set);
3061 	      delete_insns_since (PREV_INSN (last_insn));
3062 	    }
3063 	}
3064       /* Try if target can split displacement into legitimite new disp
3065 	 and offset.  If it's the case, we replace the last insn with
3066 	 insns for base + offset => new_reg and set new_reg + new disp
3067 	 to *ad.inner.  */
3068       last_insn = get_last_insn ();
3069       if ((set = single_set (last_insn)) != NULL_RTX
3070 	  && GET_CODE (SET_SRC (set)) == PLUS
3071 	  && REG_P (XEXP (SET_SRC (set), 0))
3072 	  && REGNO (XEXP (SET_SRC (set), 0)) < FIRST_PSEUDO_REGISTER
3073 	  && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3074 	{
3075 	  rtx addend, disp = XEXP (SET_SRC (set), 1);
3076 	  if (targetm.legitimize_address_displacement (&disp, &addend,
3077 						       ad.mode))
3078 	    {
3079 	      rtx_insn *new_insns;
3080 	      start_sequence ();
3081 	      lra_emit_add (new_reg, XEXP (SET_SRC (set), 0), addend);
3082 	      new_insns = get_insns ();
3083 	      end_sequence ();
3084 	      new_reg = gen_rtx_PLUS (Pmode, new_reg, disp);
3085 	      delete_insns_since (PREV_INSN (last_insn));
3086 	      add_insn (new_insns);
3087 	      insns = get_insns ();
3088 	    }
3089 	}
3090       end_sequence ();
3091       emit_insn (insns);
3092       *ad.inner = new_reg;
3093     }
3094   else if (ad.disp_term != NULL)
3095     {
3096       /* base + scale * index + disp => new base + scale * index,
3097 	 case (1) above.  */
3098       new_reg = base_plus_disp_to_reg (&ad);
3099       *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
3100 				       new_reg, *ad.index);
3101     }
3102   else if (get_index_scale (&ad) == 1)
3103     {
3104       /* The last transformation to one reg will be made in
3105 	 curr_insn_transform function.  */
3106       end_sequence ();
3107       return false;
3108     }
3109   else
3110     {
3111       /* base + scale * index => base + new_reg,
3112 	 case (1) above.
3113       Index part of address may become invalid.  For example, we
3114       changed pseudo on the equivalent memory and a subreg of the
3115       pseudo onto the memory of different mode for which the scale is
3116       prohibitted.  */
3117       new_reg = index_part_to_reg (&ad);
3118       *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
3119 				       *ad.base_term, new_reg);
3120     }
3121   *before = get_insns ();
3122   end_sequence ();
3123   return true;
3124 }
3125 
3126 /* If CHECK_ONLY_P is false, do address reloads until it is necessary.
3127    Use process_address_1 as a helper function.  Return true for any
3128    RTL changes.
3129 
3130    If CHECK_ONLY_P is true, just check address correctness.  Return
3131    false if the address correct.  */
3132 static bool
3133 process_address (int nop, bool check_only_p,
3134 		 rtx_insn **before, rtx_insn **after)
3135 {
3136   bool res = false;
3137 
3138   while (process_address_1 (nop, check_only_p, before, after))
3139     {
3140       if (check_only_p)
3141 	return true;
3142       res = true;
3143     }
3144   return res;
3145 }
3146 
3147 /* Emit insns to reload VALUE into a new register.  VALUE is an
3148    auto-increment or auto-decrement RTX whose operand is a register or
3149    memory location; so reloading involves incrementing that location.
3150    IN is either identical to VALUE, or some cheaper place to reload
3151    value being incremented/decremented from.
3152 
3153    INC_AMOUNT is the number to increment or decrement by (always
3154    positive and ignored for POST_MODIFY/PRE_MODIFY).
3155 
3156    Return pseudo containing the result.	 */
3157 static rtx
3158 emit_inc (enum reg_class new_rclass, rtx in, rtx value, int inc_amount)
3159 {
3160   /* REG or MEM to be copied and incremented.  */
3161   rtx incloc = XEXP (value, 0);
3162   /* Nonzero if increment after copying.  */
3163   int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
3164 	      || GET_CODE (value) == POST_MODIFY);
3165   rtx_insn *last;
3166   rtx inc;
3167   rtx_insn *add_insn;
3168   int code;
3169   rtx real_in = in == value ? incloc : in;
3170   rtx result;
3171   bool plus_p = true;
3172 
3173   if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
3174     {
3175       lra_assert (GET_CODE (XEXP (value, 1)) == PLUS
3176 		  || GET_CODE (XEXP (value, 1)) == MINUS);
3177       lra_assert (rtx_equal_p (XEXP (XEXP (value, 1), 0), XEXP (value, 0)));
3178       plus_p = GET_CODE (XEXP (value, 1)) == PLUS;
3179       inc = XEXP (XEXP (value, 1), 1);
3180     }
3181   else
3182     {
3183       if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
3184 	inc_amount = -inc_amount;
3185 
3186       inc = GEN_INT (inc_amount);
3187     }
3188 
3189   if (! post && REG_P (incloc))
3190     result = incloc;
3191   else
3192     result = lra_create_new_reg (GET_MODE (value), value, new_rclass,
3193 				 "INC/DEC result");
3194 
3195   if (real_in != result)
3196     {
3197       /* First copy the location to the result register.  */
3198       lra_assert (REG_P (result));
3199       emit_insn (gen_move_insn (result, real_in));
3200     }
3201 
3202   /* We suppose that there are insns to add/sub with the constant
3203      increment permitted in {PRE/POST)_{DEC/INC/MODIFY}.  At least the
3204      old reload worked with this assumption.  If the assumption
3205      becomes wrong, we should use approach in function
3206      base_plus_disp_to_reg.  */
3207   if (in == value)
3208     {
3209       /* See if we can directly increment INCLOC.  */
3210       last = get_last_insn ();
3211       add_insn = emit_insn (plus_p
3212 			    ? gen_add2_insn (incloc, inc)
3213 			    : gen_sub2_insn (incloc, inc));
3214 
3215       code = recog_memoized (add_insn);
3216       if (code >= 0)
3217 	{
3218 	  if (! post && result != incloc)
3219 	    emit_insn (gen_move_insn (result, incloc));
3220 	  return result;
3221 	}
3222       delete_insns_since (last);
3223     }
3224 
3225   /* If couldn't do the increment directly, must increment in RESULT.
3226      The way we do this depends on whether this is pre- or
3227      post-increment.  For pre-increment, copy INCLOC to the reload
3228      register, increment it there, then save back.  */
3229   if (! post)
3230     {
3231       if (real_in != result)
3232 	emit_insn (gen_move_insn (result, real_in));
3233       if (plus_p)
3234 	emit_insn (gen_add2_insn (result, inc));
3235       else
3236 	emit_insn (gen_sub2_insn (result, inc));
3237       if (result != incloc)
3238 	emit_insn (gen_move_insn (incloc, result));
3239     }
3240   else
3241     {
3242       /* Post-increment.
3243 
3244 	 Because this might be a jump insn or a compare, and because
3245 	 RESULT may not be available after the insn in an input
3246 	 reload, we must do the incrementing before the insn being
3247 	 reloaded for.
3248 
3249 	 We have already copied IN to RESULT.  Increment the copy in
3250 	 RESULT, save that back, then decrement RESULT so it has
3251 	 the original value.  */
3252       if (plus_p)
3253 	emit_insn (gen_add2_insn (result, inc));
3254       else
3255 	emit_insn (gen_sub2_insn (result, inc));
3256       emit_insn (gen_move_insn (incloc, result));
3257       /* Restore non-modified value for the result.  We prefer this
3258 	 way because it does not require an additional hard
3259 	 register.  */
3260       if (plus_p)
3261 	{
3262 	  if (CONST_INT_P (inc))
3263 	    emit_insn (gen_add2_insn (result,
3264 				      gen_int_mode (-INTVAL (inc),
3265 						    GET_MODE (result))));
3266 	  else
3267 	    emit_insn (gen_sub2_insn (result, inc));
3268 	}
3269       else
3270 	emit_insn (gen_add2_insn (result, inc));
3271     }
3272   return result;
3273 }
3274 
3275 /* Return true if the current move insn does not need processing as we
3276    already know that it satisfies its constraints.  */
3277 static bool
3278 simple_move_p (void)
3279 {
3280   rtx dest, src;
3281   enum reg_class dclass, sclass;
3282 
3283   lra_assert (curr_insn_set != NULL_RTX);
3284   dest = SET_DEST (curr_insn_set);
3285   src = SET_SRC (curr_insn_set);
3286 
3287   /* If the instruction has multiple sets we need to process it even if it
3288      is single_set.  This can happen if one or more of the SETs are dead.
3289      See PR73650.  */
3290   if (multiple_sets (curr_insn))
3291     return false;
3292 
3293   return ((dclass = get_op_class (dest)) != NO_REGS
3294 	  && (sclass = get_op_class (src)) != NO_REGS
3295 	  /* The backend guarantees that register moves of cost 2
3296 	     never need reloads.  */
3297 	  && targetm.register_move_cost (GET_MODE (src), sclass, dclass) == 2);
3298  }
3299 
3300 /* Swap operands NOP and NOP + 1. */
3301 static inline void
3302 swap_operands (int nop)
3303 {
3304   machine_mode mode = curr_operand_mode[nop];
3305   curr_operand_mode[nop] = curr_operand_mode[nop + 1];
3306   curr_operand_mode[nop + 1] = mode;
3307   mode = original_subreg_reg_mode[nop];
3308   original_subreg_reg_mode[nop] = original_subreg_reg_mode[nop + 1];
3309   original_subreg_reg_mode[nop + 1] = mode;
3310   rtx x = *curr_id->operand_loc[nop];
3311   *curr_id->operand_loc[nop] = *curr_id->operand_loc[nop + 1];
3312   *curr_id->operand_loc[nop + 1] = x;
3313   /* Swap the duplicates too.  */
3314   lra_update_dup (curr_id, nop);
3315   lra_update_dup (curr_id, nop + 1);
3316 }
3317 
3318 /* Main entry point of the constraint code: search the body of the
3319    current insn to choose the best alternative.  It is mimicking insn
3320    alternative cost calculation model of former reload pass.  That is
3321    because machine descriptions were written to use this model.  This
3322    model can be changed in future.  Make commutative operand exchange
3323    if it is chosen.
3324 
3325    if CHECK_ONLY_P is false, do RTL changes to satisfy the
3326    constraints.  Return true if any change happened during function
3327    call.
3328 
3329    If CHECK_ONLY_P is true then don't do any transformation.  Just
3330    check that the insn satisfies all constraints.  If the insn does
3331    not satisfy any constraint, return true.  */
3332 static bool
3333 curr_insn_transform (bool check_only_p)
3334 {
3335   int i, j, k;
3336   int n_operands;
3337   int n_alternatives;
3338   int commutative;
3339   signed char goal_alt_matched[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
3340   signed char match_inputs[MAX_RECOG_OPERANDS + 1];
3341   rtx_insn *before, *after;
3342   bool alt_p = false;
3343   /* Flag that the insn has been changed through a transformation.  */
3344   bool change_p;
3345   bool sec_mem_p;
3346 #ifdef SECONDARY_MEMORY_NEEDED
3347   bool use_sec_mem_p;
3348 #endif
3349   int max_regno_before;
3350   int reused_alternative_num;
3351 
3352   curr_insn_set = single_set (curr_insn);
3353   if (curr_insn_set != NULL_RTX && simple_move_p ())
3354     return false;
3355 
3356   no_input_reloads_p = no_output_reloads_p = false;
3357   goal_alt_number = -1;
3358   change_p = sec_mem_p = false;
3359   /* JUMP_INSNs and CALL_INSNs are not allowed to have any output
3360      reloads; neither are insns that SET cc0.  Insns that use CC0 are
3361      not allowed to have any input reloads.  */
3362   if (JUMP_P (curr_insn) || CALL_P (curr_insn))
3363     no_output_reloads_p = true;
3364 
3365 #ifdef HAVE_cc0
3366   if (reg_referenced_p (cc0_rtx, PATTERN (curr_insn)))
3367     no_input_reloads_p = true;
3368   if (reg_set_p (cc0_rtx, PATTERN (curr_insn)))
3369     no_output_reloads_p = true;
3370 #endif
3371 
3372   n_operands = curr_static_id->n_operands;
3373   n_alternatives = curr_static_id->n_alternatives;
3374 
3375   /* Just return "no reloads" if insn has no operands with
3376      constraints.  */
3377   if (n_operands == 0 || n_alternatives == 0)
3378     return false;
3379 
3380   max_regno_before = max_reg_num ();
3381 
3382   for (i = 0; i < n_operands; i++)
3383     {
3384       goal_alt_matched[i][0] = -1;
3385       goal_alt_matches[i] = -1;
3386     }
3387 
3388   commutative = curr_static_id->commutative;
3389 
3390   /* Now see what we need for pseudos that didn't get hard regs or got
3391      the wrong kind of hard reg.  For this, we must consider all the
3392      operands together against the register constraints.  */
3393 
3394   best_losers = best_overall = INT_MAX;
3395   best_reload_sum = 0;
3396 
3397   curr_swapped = false;
3398   goal_alt_swapped = false;
3399 
3400   if (! check_only_p)
3401     /* Make equivalence substitution and memory subreg elimination
3402        before address processing because an address legitimacy can
3403        depend on memory mode.  */
3404     for (i = 0; i < n_operands; i++)
3405       {
3406 	rtx op = *curr_id->operand_loc[i];
3407 	rtx subst, old = op;
3408 	bool op_change_p = false;
3409 
3410 	if (GET_CODE (old) == SUBREG)
3411 	  old = SUBREG_REG (old);
3412 	subst = get_equiv_with_elimination (old, curr_insn);
3413 	original_subreg_reg_mode[i] = VOIDmode;
3414 	if (subst != old)
3415 	  {
3416 	    subst = copy_rtx (subst);
3417 	    lra_assert (REG_P (old));
3418 	    if (GET_CODE (op) != SUBREG)
3419 	      *curr_id->operand_loc[i] = subst;
3420 	    else
3421 	      {
3422 		SUBREG_REG (op) = subst;
3423 		if (GET_MODE (subst) == VOIDmode)
3424 		  original_subreg_reg_mode[i] = GET_MODE (old);
3425 	      }
3426 	    if (lra_dump_file != NULL)
3427 	      {
3428 		fprintf (lra_dump_file,
3429 			 "Changing pseudo %d in operand %i of insn %u on equiv ",
3430 			 REGNO (old), i, INSN_UID (curr_insn));
3431 		dump_value_slim (lra_dump_file, subst, 1);
3432 		fprintf (lra_dump_file, "\n");
3433 	      }
3434 	    op_change_p = change_p = true;
3435 	  }
3436 	if (simplify_operand_subreg (i, GET_MODE (old)) || op_change_p)
3437 	  {
3438 	    change_p = true;
3439 	    lra_update_dup (curr_id, i);
3440 	  }
3441       }
3442 
3443   /* Reload address registers and displacements.  We do it before
3444      finding an alternative because of memory constraints.  */
3445   before = after = NULL;
3446   for (i = 0; i < n_operands; i++)
3447     if (! curr_static_id->operand[i].is_operator
3448 	&& process_address (i, check_only_p, &before, &after))
3449       {
3450 	if (check_only_p)
3451 	  return true;
3452 	change_p = true;
3453 	lra_update_dup (curr_id, i);
3454       }
3455 
3456   if (change_p)
3457     /* If we've changed the instruction then any alternative that
3458        we chose previously may no longer be valid.  */
3459     lra_set_used_insn_alternative (curr_insn, -1);
3460 
3461   if (! check_only_p && curr_insn_set != NULL_RTX
3462       && check_and_process_move (&change_p, &sec_mem_p))
3463     return change_p;
3464 
3465  try_swapped:
3466 
3467   reused_alternative_num = check_only_p ? -1 : curr_id->used_insn_alternative;
3468   if (lra_dump_file != NULL && reused_alternative_num >= 0)
3469     fprintf (lra_dump_file, "Reusing alternative %d for insn #%u\n",
3470 	     reused_alternative_num, INSN_UID (curr_insn));
3471 
3472   if (process_alt_operands (reused_alternative_num))
3473     alt_p = true;
3474 
3475   if (check_only_p)
3476     return ! alt_p || best_losers != 0;
3477 
3478   /* If insn is commutative (it's safe to exchange a certain pair of
3479      operands) then we need to try each alternative twice, the second
3480      time matching those two operands as if we had exchanged them.  To
3481      do this, really exchange them in operands.
3482 
3483      If we have just tried the alternatives the second time, return
3484      operands to normal and drop through.  */
3485 
3486   if (reused_alternative_num < 0 && commutative >= 0)
3487     {
3488       curr_swapped = !curr_swapped;
3489       if (curr_swapped)
3490 	{
3491 	  swap_operands (commutative);
3492 	  goto try_swapped;
3493 	}
3494       else
3495 	swap_operands (commutative);
3496     }
3497 
3498   if (! alt_p && ! sec_mem_p)
3499     {
3500       /* No alternative works with reloads??  */
3501       if (INSN_CODE (curr_insn) >= 0)
3502 	fatal_insn ("unable to generate reloads for:", curr_insn);
3503       error_for_asm (curr_insn,
3504 		     "inconsistent operand constraints in an %<asm%>");
3505       /* Avoid further trouble with this insn.	*/
3506       PATTERN (curr_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3507       lra_invalidate_insn_data (curr_insn);
3508       return true;
3509     }
3510 
3511   /* If the best alternative is with operands 1 and 2 swapped, swap
3512      them.  Update the operand numbers of any reloads already
3513      pushed.  */
3514 
3515   if (goal_alt_swapped)
3516     {
3517       if (lra_dump_file != NULL)
3518 	fprintf (lra_dump_file, "  Commutative operand exchange in insn %u\n",
3519 		 INSN_UID (curr_insn));
3520 
3521       /* Swap the duplicates too.  */
3522       swap_operands (commutative);
3523       change_p = true;
3524     }
3525 
3526 #ifdef SECONDARY_MEMORY_NEEDED
3527   /* Some target macros SECONDARY_MEMORY_NEEDED (e.g. x86) are defined
3528      too conservatively.  So we use the secondary memory only if there
3529      is no any alternative without reloads.  */
3530   use_sec_mem_p = false;
3531   if (! alt_p)
3532     use_sec_mem_p = true;
3533   else if (sec_mem_p)
3534     {
3535       for (i = 0; i < n_operands; i++)
3536 	if (! goal_alt_win[i] && ! goal_alt_match_win[i])
3537 	  break;
3538       use_sec_mem_p = i < n_operands;
3539     }
3540 
3541   if (use_sec_mem_p)
3542     {
3543       rtx new_reg, src, dest, rld;
3544       machine_mode sec_mode, rld_mode;
3545 
3546       lra_assert (sec_mem_p);
3547       lra_assert (curr_static_id->operand[0].type == OP_OUT
3548 		  && curr_static_id->operand[1].type == OP_IN);
3549       dest = *curr_id->operand_loc[0];
3550       src = *curr_id->operand_loc[1];
3551       rld = (GET_MODE_SIZE (GET_MODE (dest)) <= GET_MODE_SIZE (GET_MODE (src))
3552 	     ? dest : src);
3553       rld_mode = GET_MODE (rld);
3554 #ifdef SECONDARY_MEMORY_NEEDED_MODE
3555       sec_mode = SECONDARY_MEMORY_NEEDED_MODE (rld_mode);
3556 #else
3557       sec_mode = rld_mode;
3558 #endif
3559       new_reg = lra_create_new_reg (sec_mode, NULL_RTX,
3560 				    NO_REGS, "secondary");
3561       /* If the mode is changed, it should be wider.  */
3562       lra_assert (GET_MODE_SIZE (sec_mode) >= GET_MODE_SIZE (rld_mode));
3563       if (sec_mode != rld_mode)
3564         {
3565 	  /* If the target says specifically to use another mode for
3566 	     secondary memory moves we can not reuse the original
3567 	     insn.  */
3568 	  after = emit_spill_move (false, new_reg, dest);
3569 	  lra_process_new_insns (curr_insn, NULL, after,
3570 				 "Inserting the sec. move");
3571 	  /* We may have non null BEFORE here (e.g. after address
3572 	     processing.  */
3573 	  push_to_sequence (before);
3574 	  before = emit_spill_move (true, new_reg, src);
3575 	  emit_insn (before);
3576 	  before = get_insns ();
3577 	  end_sequence ();
3578 	  lra_process_new_insns (curr_insn, before, NULL, "Changing on");
3579 	  lra_set_insn_deleted (curr_insn);
3580 	}
3581       else if (dest == rld)
3582         {
3583 	  *curr_id->operand_loc[0] = new_reg;
3584 	  after = emit_spill_move (false, new_reg, dest);
3585 	  lra_process_new_insns (curr_insn, NULL, after,
3586 				 "Inserting the sec. move");
3587 	}
3588       else
3589 	{
3590 	  *curr_id->operand_loc[1] = new_reg;
3591 	  /* See comments above.  */
3592 	  push_to_sequence (before);
3593 	  before = emit_spill_move (true, new_reg, src);
3594 	  emit_insn (before);
3595 	  before = get_insns ();
3596 	  end_sequence ();
3597 	  lra_process_new_insns (curr_insn, before, NULL,
3598 				 "Inserting the sec. move");
3599 	}
3600       lra_update_insn_regno_info (curr_insn);
3601       return true;
3602     }
3603 #endif
3604 
3605   lra_assert (goal_alt_number >= 0);
3606   lra_set_used_insn_alternative (curr_insn, goal_alt_number);
3607 
3608   if (lra_dump_file != NULL)
3609     {
3610       const char *p;
3611 
3612       fprintf (lra_dump_file, "	 Choosing alt %d in insn %u:",
3613 	       goal_alt_number, INSN_UID (curr_insn));
3614       for (i = 0; i < n_operands; i++)
3615 	{
3616 	  p = (curr_static_id->operand_alternative
3617 	       [goal_alt_number * n_operands + i].constraint);
3618 	  if (*p == '\0')
3619 	    continue;
3620 	  fprintf (lra_dump_file, "  (%d) ", i);
3621 	  for (; *p != '\0' && *p != ',' && *p != '#'; p++)
3622 	    fputc (*p, lra_dump_file);
3623 	}
3624       if (INSN_CODE (curr_insn) >= 0
3625           && (p = get_insn_name (INSN_CODE (curr_insn))) != NULL)
3626         fprintf (lra_dump_file, " {%s}", p);
3627       if (curr_id->sp_offset != 0)
3628         fprintf (lra_dump_file, " (sp_off=%" HOST_WIDE_INT_PRINT "d)",
3629 		 curr_id->sp_offset);
3630        fprintf (lra_dump_file, "\n");
3631     }
3632 
3633   /* Right now, for any pair of operands I and J that are required to
3634      match, with J < I, goal_alt_matches[I] is J.  Add I to
3635      goal_alt_matched[J].  */
3636 
3637   for (i = 0; i < n_operands; i++)
3638     if ((j = goal_alt_matches[i]) >= 0)
3639       {
3640 	for (k = 0; goal_alt_matched[j][k] >= 0; k++)
3641 	  ;
3642 	/* We allow matching one output operand and several input
3643 	   operands.  */
3644 	lra_assert (k == 0
3645 		    || (curr_static_id->operand[j].type == OP_OUT
3646 			&& curr_static_id->operand[i].type == OP_IN
3647 			&& (curr_static_id->operand
3648 			    [goal_alt_matched[j][0]].type == OP_IN)));
3649 	goal_alt_matched[j][k] = i;
3650 	goal_alt_matched[j][k + 1] = -1;
3651       }
3652 
3653   for (i = 0; i < n_operands; i++)
3654     goal_alt_win[i] |= goal_alt_match_win[i];
3655 
3656   /* Any constants that aren't allowed and can't be reloaded into
3657      registers are here changed into memory references.	 */
3658   for (i = 0; i < n_operands; i++)
3659     if (goal_alt_win[i])
3660       {
3661 	int regno;
3662 	enum reg_class new_class;
3663 	rtx reg = *curr_id->operand_loc[i];
3664 
3665 	if (GET_CODE (reg) == SUBREG)
3666 	  reg = SUBREG_REG (reg);
3667 
3668 	if (REG_P (reg) && (regno = REGNO (reg)) >= FIRST_PSEUDO_REGISTER)
3669 	  {
3670 	    bool ok_p = in_class_p (reg, goal_alt[i], &new_class);
3671 
3672 	    if (new_class != NO_REGS && get_reg_class (regno) != new_class)
3673 	      {
3674 		lra_assert (ok_p);
3675 		lra_change_class (regno, new_class, "      Change to", true);
3676 	      }
3677 	  }
3678       }
3679     else
3680       {
3681 	const char *constraint;
3682 	char c;
3683 	rtx op = *curr_id->operand_loc[i];
3684 	rtx subreg = NULL_RTX;
3685 	machine_mode mode = curr_operand_mode[i];
3686 
3687 	if (GET_CODE (op) == SUBREG)
3688 	  {
3689 	    subreg = op;
3690 	    op = SUBREG_REG (op);
3691 	    mode = GET_MODE (op);
3692 	  }
3693 
3694 	if (CONST_POOL_OK_P (mode, op)
3695 	    && ((targetm.preferred_reload_class
3696 		 (op, (enum reg_class) goal_alt[i]) == NO_REGS)
3697 		|| no_input_reloads_p))
3698 	  {
3699 	    rtx tem = force_const_mem (mode, op);
3700 
3701 	    change_p = true;
3702 	    if (subreg != NULL_RTX)
3703 	      tem = gen_rtx_SUBREG (mode, tem, SUBREG_BYTE (subreg));
3704 
3705 	    *curr_id->operand_loc[i] = tem;
3706 	    lra_update_dup (curr_id, i);
3707 	    process_address (i, false, &before, &after);
3708 
3709 	    /* If the alternative accepts constant pool refs directly
3710 	       there will be no reload needed at all.  */
3711 	    if (subreg != NULL_RTX)
3712 	      continue;
3713 	    /* Skip alternatives before the one requested.  */
3714 	    constraint = (curr_static_id->operand_alternative
3715 			  [goal_alt_number * n_operands + i].constraint);
3716 	    for (;
3717 		 (c = *constraint) && c != ',' && c != '#';
3718 		 constraint += CONSTRAINT_LEN (c, constraint))
3719 	      {
3720 		enum constraint_num cn = lookup_constraint (constraint);
3721 		if (insn_extra_memory_constraint (cn)
3722 		    && satisfies_memory_constraint_p (tem, cn))
3723 		  break;
3724 	      }
3725 	    if (c == '\0' || c == ',' || c == '#')
3726 	      continue;
3727 
3728 	    goal_alt_win[i] = true;
3729 	  }
3730       }
3731 
3732   for (i = 0; i < n_operands; i++)
3733     {
3734       int regno;
3735       bool optional_p = false;
3736       rtx old, new_reg;
3737       rtx op = *curr_id->operand_loc[i];
3738 
3739       if (goal_alt_win[i])
3740 	{
3741 	  if (goal_alt[i] == NO_REGS
3742 	      && REG_P (op)
3743 	      /* When we assign NO_REGS it means that we will not
3744 		 assign a hard register to the scratch pseudo by
3745 		 assigment pass and the scratch pseudo will be
3746 		 spilled.  Spilled scratch pseudos are transformed
3747 		 back to scratches at the LRA end.  */
3748 	      && lra_former_scratch_operand_p (curr_insn, i)
3749 	      && lra_former_scratch_p (REGNO (op)))
3750 	    {
3751 	      int regno = REGNO (op);
3752 	      lra_change_class (regno, NO_REGS, "      Change to", true);
3753 	      if (lra_get_regno_hard_regno (regno) >= 0)
3754 		/* We don't have to mark all insn affected by the
3755 		   spilled pseudo as there is only one such insn, the
3756 		   current one.  */
3757 		reg_renumber[regno] = -1;
3758 	      lra_assert (bitmap_single_bit_set_p
3759 			  (&lra_reg_info[REGNO (op)].insn_bitmap));
3760 	    }
3761 	  /* We can do an optional reload.  If the pseudo got a hard
3762 	     reg, we might improve the code through inheritance.  If
3763 	     it does not get a hard register we coalesce memory/memory
3764 	     moves later.  Ignore move insns to avoid cycling.  */
3765 	  if (! lra_simple_p
3766 	      && lra_undo_inheritance_iter < LRA_MAX_INHERITANCE_PASSES
3767 	      && goal_alt[i] != NO_REGS && REG_P (op)
3768 	      && (regno = REGNO (op)) >= FIRST_PSEUDO_REGISTER
3769 	      && regno < new_regno_start
3770 	      && ! lra_former_scratch_p (regno)
3771 	      && reg_renumber[regno] < 0
3772 	      /* Check that the optional reload pseudo will be able to
3773 		 hold given mode value.  */
3774 	      && ! (prohibited_class_reg_set_mode_p
3775 		    (goal_alt[i], reg_class_contents[goal_alt[i]],
3776 		     PSEUDO_REGNO_MODE (regno)))
3777 	      && (curr_insn_set == NULL_RTX
3778 		  || !((REG_P (SET_SRC (curr_insn_set))
3779 			|| MEM_P (SET_SRC (curr_insn_set))
3780 			|| GET_CODE (SET_SRC (curr_insn_set)) == SUBREG)
3781 		       && (REG_P (SET_DEST (curr_insn_set))
3782 			   || MEM_P (SET_DEST (curr_insn_set))
3783 			   || GET_CODE (SET_DEST (curr_insn_set)) == SUBREG))))
3784 	    optional_p = true;
3785 	  else
3786 	    continue;
3787 	}
3788 
3789       /* Operands that match previous ones have already been handled.  */
3790       if (goal_alt_matches[i] >= 0)
3791 	continue;
3792 
3793       /* We should not have an operand with a non-offsettable address
3794 	 appearing where an offsettable address will do.  It also may
3795 	 be a case when the address should be special in other words
3796 	 not a general one (e.g. it needs no index reg).  */
3797       if (goal_alt_matched[i][0] == -1 && goal_alt_offmemok[i] && MEM_P (op))
3798 	{
3799 	  enum reg_class rclass;
3800 	  rtx *loc = &XEXP (op, 0);
3801 	  enum rtx_code code = GET_CODE (*loc);
3802 
3803 	  push_to_sequence (before);
3804 	  rclass = base_reg_class (GET_MODE (op), MEM_ADDR_SPACE (op),
3805 				   MEM, SCRATCH);
3806 	  if (GET_RTX_CLASS (code) == RTX_AUTOINC)
3807 	    new_reg = emit_inc (rclass, *loc, *loc,
3808 				/* This value does not matter for MODIFY.  */
3809 				GET_MODE_SIZE (GET_MODE (op)));
3810 	  else if (get_reload_reg (OP_IN, Pmode, *loc, rclass, FALSE,
3811 				   "offsetable address", &new_reg))
3812 	    lra_emit_move (new_reg, *loc);
3813 	  before = get_insns ();
3814 	  end_sequence ();
3815 	  *loc = new_reg;
3816 	  lra_update_dup (curr_id, i);
3817 	}
3818       else if (goal_alt_matched[i][0] == -1)
3819 	{
3820 	  machine_mode mode;
3821 	  rtx reg, *loc;
3822 	  int hard_regno, byte;
3823 	  enum op_type type = curr_static_id->operand[i].type;
3824 
3825 	  loc = curr_id->operand_loc[i];
3826 	  mode = curr_operand_mode[i];
3827 	  if (GET_CODE (*loc) == SUBREG)
3828 	    {
3829 	      reg = SUBREG_REG (*loc);
3830 	      byte = SUBREG_BYTE (*loc);
3831 	      if (REG_P (reg)
3832 		  /* Strict_low_part requires reload the register not
3833 		     the sub-register.	*/
3834 		  && (curr_static_id->operand[i].strict_low
3835 		      || (GET_MODE_SIZE (mode)
3836 			  <= GET_MODE_SIZE (GET_MODE (reg))
3837 			  && (hard_regno
3838 			      = get_try_hard_regno (REGNO (reg))) >= 0
3839 			  && (simplify_subreg_regno
3840 			      (hard_regno,
3841 			       GET_MODE (reg), byte, mode) < 0)
3842 			  && (goal_alt[i] == NO_REGS
3843 			      || (simplify_subreg_regno
3844 				  (ira_class_hard_regs[goal_alt[i]][0],
3845 				   GET_MODE (reg), byte, mode) >= 0)))))
3846 		{
3847 		  if (type == OP_OUT)
3848 		    type = OP_INOUT;
3849 		  loc = &SUBREG_REG (*loc);
3850 		  mode = GET_MODE (*loc);
3851 		}
3852 	    }
3853 	  old = *loc;
3854 	  if (get_reload_reg (type, mode, old, goal_alt[i],
3855 			      loc != curr_id->operand_loc[i], "", &new_reg)
3856 	      && type != OP_OUT)
3857 	    {
3858 	      push_to_sequence (before);
3859 	      lra_emit_move (new_reg, old);
3860 	      before = get_insns ();
3861 	      end_sequence ();
3862 	    }
3863 	  *loc = new_reg;
3864 	  if (type != OP_IN
3865 	      && find_reg_note (curr_insn, REG_UNUSED, old) == NULL_RTX)
3866 	    {
3867 	      start_sequence ();
3868 	      lra_emit_move (type == OP_INOUT ? copy_rtx (old) : old, new_reg);
3869 	      emit_insn (after);
3870 	      after = get_insns ();
3871 	      end_sequence ();
3872 	      *loc = new_reg;
3873 	    }
3874 	  for (j = 0; j < goal_alt_dont_inherit_ops_num; j++)
3875 	    if (goal_alt_dont_inherit_ops[j] == i)
3876 	      {
3877 		lra_set_regno_unique_value (REGNO (new_reg));
3878 		break;
3879 	      }
3880 	  lra_update_dup (curr_id, i);
3881 	}
3882       else if (curr_static_id->operand[i].type == OP_IN
3883 	       && (curr_static_id->operand[goal_alt_matched[i][0]].type
3884 		   == OP_OUT))
3885 	{
3886 	  /* generate reloads for input and matched outputs.  */
3887 	  match_inputs[0] = i;
3888 	  match_inputs[1] = -1;
3889 	  match_reload (goal_alt_matched[i][0], match_inputs,
3890 			goal_alt[i], &before, &after);
3891 	}
3892       else if (curr_static_id->operand[i].type == OP_OUT
3893 	       && (curr_static_id->operand[goal_alt_matched[i][0]].type
3894 		   == OP_IN))
3895 	/* Generate reloads for output and matched inputs.  */
3896 	match_reload (i, goal_alt_matched[i], goal_alt[i], &before, &after);
3897       else if (curr_static_id->operand[i].type == OP_IN
3898 	       && (curr_static_id->operand[goal_alt_matched[i][0]].type
3899 		   == OP_IN))
3900 	{
3901 	  /* Generate reloads for matched inputs.  */
3902 	  match_inputs[0] = i;
3903 	  for (j = 0; (k = goal_alt_matched[i][j]) >= 0; j++)
3904 	    match_inputs[j + 1] = k;
3905 	  match_inputs[j + 1] = -1;
3906 	  match_reload (-1, match_inputs, goal_alt[i], &before, &after);
3907 	}
3908       else
3909 	/* We must generate code in any case when function
3910 	   process_alt_operands decides that it is possible.  */
3911 	gcc_unreachable ();
3912       if (optional_p)
3913 	{
3914 	  lra_assert (REG_P (op));
3915 	  regno = REGNO (op);
3916 	  op = *curr_id->operand_loc[i]; /* Substitution.  */
3917 	  if (GET_CODE (op) == SUBREG)
3918 	    op = SUBREG_REG (op);
3919 	  gcc_assert (REG_P (op) && (int) REGNO (op) >= new_regno_start);
3920 	  bitmap_set_bit (&lra_optional_reload_pseudos, REGNO (op));
3921 	  lra_reg_info[REGNO (op)].restore_regno = regno;
3922 	  if (lra_dump_file != NULL)
3923 	    fprintf (lra_dump_file,
3924 		     "      Making reload reg %d for reg %d optional\n",
3925 		     REGNO (op), regno);
3926 	}
3927     }
3928   if (before != NULL_RTX || after != NULL_RTX
3929       || max_regno_before != max_reg_num ())
3930     change_p = true;
3931   if (change_p)
3932     {
3933       lra_update_operator_dups (curr_id);
3934       /* Something changes -- process the insn.	 */
3935       lra_update_insn_regno_info (curr_insn);
3936     }
3937   lra_process_new_insns (curr_insn, before, after, "Inserting insn reload");
3938   return change_p;
3939 }
3940 
3941 /* Return true if INSN satisfies all constraints.  In other words, no
3942    reload insns are needed.  */
3943 bool
3944 lra_constrain_insn (rtx_insn *insn)
3945 {
3946   int saved_new_regno_start = new_regno_start;
3947   int saved_new_insn_uid_start = new_insn_uid_start;
3948   bool change_p;
3949 
3950   curr_insn = insn;
3951   curr_id = lra_get_insn_recog_data (curr_insn);
3952   curr_static_id = curr_id->insn_static_data;
3953   new_insn_uid_start = get_max_uid ();
3954   new_regno_start = max_reg_num ();
3955   change_p = curr_insn_transform (true);
3956   new_regno_start = saved_new_regno_start;
3957   new_insn_uid_start = saved_new_insn_uid_start;
3958   return ! change_p;
3959 }
3960 
3961 /* Return true if X is in LIST.	 */
3962 static bool
3963 in_list_p (rtx x, rtx list)
3964 {
3965   for (; list != NULL_RTX; list = XEXP (list, 1))
3966     if (XEXP (list, 0) == x)
3967       return true;
3968   return false;
3969 }
3970 
3971 /* Return true if X contains an allocatable hard register (if
3972    HARD_REG_P) or a (spilled if SPILLED_P) pseudo.  */
3973 static bool
3974 contains_reg_p (rtx x, bool hard_reg_p, bool spilled_p)
3975 {
3976   int i, j;
3977   const char *fmt;
3978   enum rtx_code code;
3979 
3980   code = GET_CODE (x);
3981   if (REG_P (x))
3982     {
3983       int regno = REGNO (x);
3984       HARD_REG_SET alloc_regs;
3985 
3986       if (hard_reg_p)
3987 	{
3988 	  if (regno >= FIRST_PSEUDO_REGISTER)
3989 	    regno = lra_get_regno_hard_regno (regno);
3990 	  if (regno < 0)
3991 	    return false;
3992 	  COMPL_HARD_REG_SET (alloc_regs, lra_no_alloc_regs);
3993 	  return overlaps_hard_reg_set_p (alloc_regs, GET_MODE (x), regno);
3994 	}
3995       else
3996 	{
3997 	  if (regno < FIRST_PSEUDO_REGISTER)
3998 	    return false;
3999 	  if (! spilled_p)
4000 	    return true;
4001 	  return lra_get_regno_hard_regno (regno) < 0;
4002 	}
4003     }
4004   fmt = GET_RTX_FORMAT (code);
4005   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4006     {
4007       if (fmt[i] == 'e')
4008 	{
4009 	  if (contains_reg_p (XEXP (x, i), hard_reg_p, spilled_p))
4010 	    return true;
4011 	}
4012       else if (fmt[i] == 'E')
4013 	{
4014 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4015 	    if (contains_reg_p (XVECEXP (x, i, j), hard_reg_p, spilled_p))
4016 	      return true;
4017 	}
4018     }
4019   return false;
4020 }
4021 
4022 /* Return true if X contains a symbol reg.  */
4023 static bool
4024 contains_symbol_ref_p (rtx x)
4025 {
4026   int i, j;
4027   const char *fmt;
4028   enum rtx_code code;
4029 
4030   code = GET_CODE (x);
4031   if (code == SYMBOL_REF)
4032     return true;
4033   fmt = GET_RTX_FORMAT (code);
4034   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4035     {
4036       if (fmt[i] == 'e')
4037 	{
4038 	  if (contains_symbol_ref_p (XEXP (x, i)))
4039 	    return true;
4040 	}
4041       else if (fmt[i] == 'E')
4042 	{
4043 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4044 	    if (contains_symbol_ref_p (XVECEXP (x, i, j)))
4045 	      return true;
4046 	}
4047     }
4048   return false;
4049 }
4050 
4051 /* Process all regs in location *LOC and change them on equivalent
4052    substitution.  Return true if any change was done.  */
4053 static bool
4054 loc_equivalence_change_p (rtx *loc)
4055 {
4056   rtx subst, reg, x = *loc;
4057   bool result = false;
4058   enum rtx_code code = GET_CODE (x);
4059   const char *fmt;
4060   int i, j;
4061 
4062   if (code == SUBREG)
4063     {
4064       reg = SUBREG_REG (x);
4065       if ((subst = get_equiv_with_elimination (reg, curr_insn)) != reg
4066 	  && GET_MODE (subst) == VOIDmode)
4067 	{
4068 	  /* We cannot reload debug location.  Simplify subreg here
4069 	     while we know the inner mode.  */
4070 	  *loc = simplify_gen_subreg (GET_MODE (x), subst,
4071 				      GET_MODE (reg), SUBREG_BYTE (x));
4072 	  return true;
4073 	}
4074     }
4075   if (code == REG && (subst = get_equiv_with_elimination (x, curr_insn)) != x)
4076     {
4077       *loc = subst;
4078       return true;
4079     }
4080 
4081   /* Scan all the operand sub-expressions.  */
4082   fmt = GET_RTX_FORMAT (code);
4083   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4084     {
4085       if (fmt[i] == 'e')
4086 	result = loc_equivalence_change_p (&XEXP (x, i)) || result;
4087       else if (fmt[i] == 'E')
4088 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4089 	  result
4090 	    = loc_equivalence_change_p (&XVECEXP (x, i, j)) || result;
4091     }
4092   return result;
4093 }
4094 
4095 /* Similar to loc_equivalence_change_p, but for use as
4096    simplify_replace_fn_rtx callback.  DATA is insn for which the
4097    elimination is done.  If it null we don't do the elimination.  */
4098 static rtx
4099 loc_equivalence_callback (rtx loc, const_rtx, void *data)
4100 {
4101   if (!REG_P (loc))
4102     return NULL_RTX;
4103 
4104   rtx subst = (data == NULL
4105 	       ? get_equiv (loc) : get_equiv_with_elimination (loc, (rtx_insn *) data));
4106   if (subst != loc)
4107     return subst;
4108 
4109   return NULL_RTX;
4110 }
4111 
4112 /* Maximum number of generated reload insns per an insn.  It is for
4113    preventing this pass cycling in a bug case.	*/
4114 #define MAX_RELOAD_INSNS_NUMBER LRA_MAX_INSN_RELOADS
4115 
4116 /* The current iteration number of this LRA pass.  */
4117 int lra_constraint_iter;
4118 
4119 /* True if we substituted equiv which needs checking register
4120    allocation correctness because the equivalent value contains
4121    allocatable hard registers or when we restore multi-register
4122    pseudo.  */
4123 bool lra_risky_transformations_p;
4124 
4125 /* Return true if REGNO is referenced in more than one block.  */
4126 static bool
4127 multi_block_pseudo_p (int regno)
4128 {
4129   basic_block bb = NULL;
4130   unsigned int uid;
4131   bitmap_iterator bi;
4132 
4133   if (regno < FIRST_PSEUDO_REGISTER)
4134     return false;
4135 
4136     EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
4137       if (bb == NULL)
4138 	bb = BLOCK_FOR_INSN (lra_insn_recog_data[uid]->insn);
4139       else if (BLOCK_FOR_INSN (lra_insn_recog_data[uid]->insn) != bb)
4140 	return true;
4141     return false;
4142 }
4143 
4144 /* Return true if LIST contains a deleted insn.  */
4145 static bool
4146 contains_deleted_insn_p (rtx_insn_list *list)
4147 {
4148   for (; list != NULL_RTX; list = list->next ())
4149     if (NOTE_P (list->insn ())
4150 	&& NOTE_KIND (list->insn ()) == NOTE_INSN_DELETED)
4151       return true;
4152   return false;
4153 }
4154 
4155 /* Return true if X contains a pseudo dying in INSN.  */
4156 static bool
4157 dead_pseudo_p (rtx x, rtx insn)
4158 {
4159   int i, j;
4160   const char *fmt;
4161   enum rtx_code code;
4162 
4163   if (REG_P (x))
4164     return (insn != NULL_RTX
4165 	    && find_regno_note (insn, REG_DEAD, REGNO (x)) != NULL_RTX);
4166   code = GET_CODE (x);
4167   fmt = GET_RTX_FORMAT (code);
4168   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4169     {
4170       if (fmt[i] == 'e')
4171 	{
4172 	  if (dead_pseudo_p (XEXP (x, i), insn))
4173 	    return true;
4174 	}
4175       else if (fmt[i] == 'E')
4176 	{
4177 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4178 	    if (dead_pseudo_p (XVECEXP (x, i, j), insn))
4179 	      return true;
4180 	}
4181     }
4182   return false;
4183 }
4184 
4185 /* Return true if INSN contains a dying pseudo in INSN right hand
4186    side.  */
4187 static bool
4188 insn_rhs_dead_pseudo_p (rtx_insn *insn)
4189 {
4190   rtx set = single_set (insn);
4191 
4192   gcc_assert (set != NULL);
4193   return dead_pseudo_p (SET_SRC (set), insn);
4194 }
4195 
4196 /* Return true if any init insn of REGNO contains a dying pseudo in
4197    insn right hand side.  */
4198 static bool
4199 init_insn_rhs_dead_pseudo_p (int regno)
4200 {
4201   rtx_insn_list *insns = ira_reg_equiv[regno].init_insns;
4202 
4203   if (insns == NULL)
4204     return false;
4205   for (; insns != NULL_RTX; insns = insns->next ())
4206     if (insn_rhs_dead_pseudo_p (insns->insn ()))
4207       return true;
4208   return false;
4209 }
4210 
4211 /* Return TRUE if REGNO has a reverse equivalence.  The equivalence is
4212    reverse only if we have one init insn with given REGNO as a
4213    source.  */
4214 static bool
4215 reverse_equiv_p (int regno)
4216 {
4217   rtx_insn_list *insns = ira_reg_equiv[regno].init_insns;
4218   rtx set;
4219 
4220   if (insns == NULL)
4221     return false;
4222   if (! INSN_P (insns->insn ())
4223       || insns->next () != NULL)
4224     return false;
4225   if ((set = single_set (insns->insn ())) == NULL_RTX)
4226     return false;
4227   return REG_P (SET_SRC (set)) && (int) REGNO (SET_SRC (set)) == regno;
4228 }
4229 
4230 /* Return TRUE if REGNO was reloaded in an equivalence init insn.  We
4231    call this function only for non-reverse equivalence.  */
4232 static bool
4233 contains_reloaded_insn_p (int regno)
4234 {
4235   rtx set;
4236   rtx_insn_list *list = ira_reg_equiv[regno].init_insns;
4237 
4238   for (; list != NULL; list = list->next ())
4239     if ((set = single_set (list->insn ())) == NULL_RTX
4240 	|| ! REG_P (SET_DEST (set))
4241 	|| (int) REGNO (SET_DEST (set)) != regno)
4242       return true;
4243   return false;
4244 }
4245 
4246 /* Entry function of LRA constraint pass.  Return true if the
4247    constraint pass did change the code.	 */
4248 bool
4249 lra_constraints (bool first_p)
4250 {
4251   bool changed_p;
4252   int i, hard_regno, new_insns_num;
4253   unsigned int min_len, new_min_len, uid;
4254   rtx set, x, reg, dest_reg;
4255   basic_block last_bb;
4256   bitmap_head equiv_insn_bitmap;
4257   bitmap_iterator bi;
4258 
4259   lra_constraint_iter++;
4260   if (lra_dump_file != NULL)
4261     fprintf (lra_dump_file, "\n********** Local #%d: **********\n\n",
4262 	     lra_constraint_iter);
4263   changed_p = false;
4264   if (pic_offset_table_rtx
4265       && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
4266     lra_risky_transformations_p = true;
4267   else
4268     lra_risky_transformations_p = false;
4269   new_insn_uid_start = get_max_uid ();
4270   new_regno_start = first_p ? lra_constraint_new_regno_start : max_reg_num ();
4271   /* Mark used hard regs for target stack size calulations.  */
4272   for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
4273     if (lra_reg_info[i].nrefs != 0
4274 	&& (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
4275       {
4276 	int j, nregs;
4277 
4278 	nregs = hard_regno_nregs[hard_regno][lra_reg_info[i].biggest_mode];
4279 	for (j = 0; j < nregs; j++)
4280 	  df_set_regs_ever_live (hard_regno + j, true);
4281       }
4282   /* Do elimination before the equivalence processing as we can spill
4283      some pseudos during elimination.  */
4284   lra_eliminate (false, first_p);
4285   bitmap_initialize (&equiv_insn_bitmap, &reg_obstack);
4286   for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
4287     if (lra_reg_info[i].nrefs != 0)
4288       {
4289 	ira_reg_equiv[i].profitable_p = true;
4290 	reg = regno_reg_rtx[i];
4291 	if (lra_get_regno_hard_regno (i) < 0 && (x = get_equiv (reg)) != reg)
4292 	  {
4293 	    bool pseudo_p = contains_reg_p (x, false, false);
4294 
4295 	    /* After RTL transformation, we can not guarantee that
4296 	       pseudo in the substitution was not reloaded which might
4297 	       make equivalence invalid.  For example, in reverse
4298 	       equiv of p0
4299 
4300 	       p0 <- ...
4301 	       ...
4302 	       equiv_mem <- p0
4303 
4304 	       the memory address register was reloaded before the 2nd
4305 	       insn.  */
4306 	    if ((! first_p && pseudo_p)
4307 		/* We don't use DF for compilation speed sake.  So it
4308 		   is problematic to update live info when we use an
4309 		   equivalence containing pseudos in more than one
4310 		   BB.  */
4311 		|| (pseudo_p && multi_block_pseudo_p (i))
4312 		/* If an init insn was deleted for some reason, cancel
4313 		   the equiv.  We could update the equiv insns after
4314 		   transformations including an equiv insn deletion
4315 		   but it is not worthy as such cases are extremely
4316 		   rare.  */
4317 		|| contains_deleted_insn_p (ira_reg_equiv[i].init_insns)
4318 		/* If it is not a reverse equivalence, we check that a
4319 		   pseudo in rhs of the init insn is not dying in the
4320 		   insn.  Otherwise, the live info at the beginning of
4321 		   the corresponding BB might be wrong after we
4322 		   removed the insn.  When the equiv can be a
4323 		   constant, the right hand side of the init insn can
4324 		   be a pseudo.  */
4325 		|| (! reverse_equiv_p (i)
4326 		    && (init_insn_rhs_dead_pseudo_p (i)
4327 			/* If we reloaded the pseudo in an equivalence
4328 			   init insn, we can not remove the equiv init
4329 			   insns and the init insns might write into
4330 			   const memory in this case.  */
4331 			|| contains_reloaded_insn_p (i)))
4332 		/* Prevent access beyond equivalent memory for
4333 		   paradoxical subregs.  */
4334 		|| (MEM_P (x)
4335 		    && (GET_MODE_SIZE (lra_reg_info[i].biggest_mode)
4336 			> GET_MODE_SIZE (GET_MODE (x))))
4337 		|| (pic_offset_table_rtx
4338 		    && ((CONST_POOL_OK_P (PSEUDO_REGNO_MODE (i), x)
4339 			 && (targetm.preferred_reload_class
4340 			     (x, lra_get_allocno_class (i)) == NO_REGS))
4341 			|| contains_symbol_ref_p (x))))
4342 	      ira_reg_equiv[i].defined_p = false;
4343 	    if (contains_reg_p (x, false, true))
4344 	      ira_reg_equiv[i].profitable_p = false;
4345 	    if (get_equiv (reg) != reg)
4346 	      bitmap_ior_into (&equiv_insn_bitmap, &lra_reg_info[i].insn_bitmap);
4347 	  }
4348       }
4349   for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
4350     update_equiv (i);
4351   /* We should add all insns containing pseudos which should be
4352      substituted by their equivalences.  */
4353   EXECUTE_IF_SET_IN_BITMAP (&equiv_insn_bitmap, 0, uid, bi)
4354     lra_push_insn_by_uid (uid);
4355   min_len = lra_insn_stack_length ();
4356   new_insns_num = 0;
4357   last_bb = NULL;
4358   changed_p = false;
4359   while ((new_min_len = lra_insn_stack_length ()) != 0)
4360     {
4361       curr_insn = lra_pop_insn ();
4362       --new_min_len;
4363       curr_bb = BLOCK_FOR_INSN (curr_insn);
4364       if (curr_bb != last_bb)
4365 	{
4366 	  last_bb = curr_bb;
4367 	  bb_reload_num = lra_curr_reload_num;
4368 	}
4369       if (min_len > new_min_len)
4370 	{
4371 	  min_len = new_min_len;
4372 	  new_insns_num = 0;
4373 	}
4374       if (new_insns_num > MAX_RELOAD_INSNS_NUMBER)
4375 	internal_error
4376 	  ("Max. number of generated reload insns per insn is achieved (%d)\n",
4377 	   MAX_RELOAD_INSNS_NUMBER);
4378       new_insns_num++;
4379       if (DEBUG_INSN_P (curr_insn))
4380 	{
4381 	  /* We need to check equivalence in debug insn and change
4382 	     pseudo to the equivalent value if necessary.  */
4383 	  curr_id = lra_get_insn_recog_data (curr_insn);
4384 	  if (bitmap_bit_p (&equiv_insn_bitmap, INSN_UID (curr_insn)))
4385 	    {
4386 	      rtx old = *curr_id->operand_loc[0];
4387 	      *curr_id->operand_loc[0]
4388 		= simplify_replace_fn_rtx (old, NULL_RTX,
4389 					   loc_equivalence_callback, curr_insn);
4390 	      if (old != *curr_id->operand_loc[0])
4391 		{
4392 		  lra_update_insn_regno_info (curr_insn);
4393 		  changed_p = true;
4394 		}
4395 	    }
4396 	}
4397       else if (INSN_P (curr_insn))
4398 	{
4399 	  if ((set = single_set (curr_insn)) != NULL_RTX)
4400 	    {
4401 	      dest_reg = SET_DEST (set);
4402 	      /* The equivalence pseudo could be set up as SUBREG in a
4403 		 case when it is a call restore insn in a mode
4404 		 different from the pseudo mode.  */
4405 	      if (GET_CODE (dest_reg) == SUBREG)
4406 		dest_reg = SUBREG_REG (dest_reg);
4407 	      if ((REG_P (dest_reg)
4408 		   && (x = get_equiv (dest_reg)) != dest_reg
4409 		   /* Remove insns which set up a pseudo whose value
4410 		      can not be changed.  Such insns might be not in
4411 		      init_insns because we don't update equiv data
4412 		      during insn transformations.
4413 
4414 		      As an example, let suppose that a pseudo got
4415 		      hard register and on the 1st pass was not
4416 		      changed to equivalent constant.  We generate an
4417 		      additional insn setting up the pseudo because of
4418 		      secondary memory movement.  Then the pseudo is
4419 		      spilled and we use the equiv constant.  In this
4420 		      case we should remove the additional insn and
4421 		      this insn is not init_insns list.  */
4422 		   && (! MEM_P (x) || MEM_READONLY_P (x)
4423 		       /* Check that this is actually an insn setting
4424 			  up the equivalence.  */
4425 		       || in_list_p (curr_insn,
4426 				     ira_reg_equiv
4427 				     [REGNO (dest_reg)].init_insns)))
4428 		  || (((x = get_equiv (SET_SRC (set))) != SET_SRC (set))
4429 		      && in_list_p (curr_insn,
4430 				    ira_reg_equiv
4431 				    [REGNO (SET_SRC (set))].init_insns)))
4432 		{
4433 		  /* This is equiv init insn of pseudo which did not get a
4434 		     hard register -- remove the insn.	*/
4435 		  if (lra_dump_file != NULL)
4436 		    {
4437 		      fprintf (lra_dump_file,
4438 			       "      Removing equiv init insn %i (freq=%d)\n",
4439 			       INSN_UID (curr_insn),
4440 			       REG_FREQ_FROM_BB (BLOCK_FOR_INSN (curr_insn)));
4441 		      dump_insn_slim (lra_dump_file, curr_insn);
4442 		    }
4443 		  if (contains_reg_p (x, true, false))
4444 		    lra_risky_transformations_p = true;
4445 		  lra_set_insn_deleted (curr_insn);
4446 		  continue;
4447 		}
4448 	    }
4449 	  curr_id = lra_get_insn_recog_data (curr_insn);
4450 	  curr_static_id = curr_id->insn_static_data;
4451 	  init_curr_insn_input_reloads ();
4452 	  init_curr_operand_mode ();
4453 	  if (curr_insn_transform (false))
4454 	    changed_p = true;
4455 	  /* Check non-transformed insns too for equiv change as USE
4456 	     or CLOBBER don't need reloads but can contain pseudos
4457 	     being changed on their equivalences.  */
4458 	  else if (bitmap_bit_p (&equiv_insn_bitmap, INSN_UID (curr_insn))
4459 		   && loc_equivalence_change_p (&PATTERN (curr_insn)))
4460 	    {
4461 	      lra_update_insn_regno_info (curr_insn);
4462 	      changed_p = true;
4463 	    }
4464 	}
4465     }
4466   bitmap_clear (&equiv_insn_bitmap);
4467   /* If we used a new hard regno, changed_p should be true because the
4468      hard reg is assigned to a new pseudo.  */
4469 #ifdef ENABLE_CHECKING
4470   if (! changed_p)
4471     {
4472       for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
4473 	if (lra_reg_info[i].nrefs != 0
4474 	    && (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
4475 	  {
4476 	    int j, nregs = hard_regno_nregs[hard_regno][PSEUDO_REGNO_MODE (i)];
4477 
4478 	    for (j = 0; j < nregs; j++)
4479 	      lra_assert (df_regs_ever_live_p (hard_regno + j));
4480 	  }
4481     }
4482 #endif
4483   return changed_p;
4484 }
4485 
4486 /* Initiate the LRA constraint pass.  It is done once per
4487    function.  */
4488 void
4489 lra_constraints_init (void)
4490 {
4491 }
4492 
4493 /* Finalize the LRA constraint pass.  It is done once per
4494    function.  */
4495 void
4496 lra_constraints_finish (void)
4497 {
4498 }
4499 
4500 
4501 
4502 /* This page contains code to do inheritance/split
4503    transformations.  */
4504 
4505 /* Number of reloads passed so far in current EBB.  */
4506 static int reloads_num;
4507 
4508 /* Number of calls passed so far in current EBB.  */
4509 static int calls_num;
4510 
4511 /* Current reload pseudo check for validity of elements in
4512    USAGE_INSNS.	 */
4513 static int curr_usage_insns_check;
4514 
4515 /* Info about last usage of registers in EBB to do inheritance/split
4516    transformation.  Inheritance transformation is done from a spilled
4517    pseudo and split transformations from a hard register or a pseudo
4518    assigned to a hard register.	 */
4519 struct usage_insns
4520 {
4521   /* If the value is equal to CURR_USAGE_INSNS_CHECK, then the member
4522      value INSNS is valid.  The insns is chain of optional debug insns
4523      and a finishing non-debug insn using the corresponding reg.  The
4524      value is also used to mark the registers which are set up in the
4525      current insn.  The negated insn uid is used for this.  */
4526   int check;
4527   /* Value of global reloads_num at the last insn in INSNS.  */
4528   int reloads_num;
4529   /* Value of global reloads_nums at the last insn in INSNS.  */
4530   int calls_num;
4531   /* It can be true only for splitting.	 And it means that the restore
4532      insn should be put after insn given by the following member.  */
4533   bool after_p;
4534   /* Next insns in the current EBB which use the original reg and the
4535      original reg value is not changed between the current insn and
4536      the next insns.  In order words, e.g. for inheritance, if we need
4537      to use the original reg value again in the next insns we can try
4538      to use the value in a hard register from a reload insn of the
4539      current insn.  */
4540   rtx insns;
4541 };
4542 
4543 /* Map: regno -> corresponding pseudo usage insns.  */
4544 static struct usage_insns *usage_insns;
4545 
4546 static void
4547 setup_next_usage_insn (int regno, rtx insn, int reloads_num, bool after_p)
4548 {
4549   usage_insns[regno].check = curr_usage_insns_check;
4550   usage_insns[regno].insns = insn;
4551   usage_insns[regno].reloads_num = reloads_num;
4552   usage_insns[regno].calls_num = calls_num;
4553   usage_insns[regno].after_p = after_p;
4554 }
4555 
4556 /* The function is used to form list REGNO usages which consists of
4557    optional debug insns finished by a non-debug insn using REGNO.
4558    RELOADS_NUM is current number of reload insns processed so far.  */
4559 static void
4560 add_next_usage_insn (int regno, rtx insn, int reloads_num)
4561 {
4562   rtx next_usage_insns;
4563 
4564   if (usage_insns[regno].check == curr_usage_insns_check
4565       && (next_usage_insns = usage_insns[regno].insns) != NULL_RTX
4566       && DEBUG_INSN_P (insn))
4567     {
4568       /* Check that we did not add the debug insn yet.	*/
4569       if (next_usage_insns != insn
4570 	  && (GET_CODE (next_usage_insns) != INSN_LIST
4571 	      || XEXP (next_usage_insns, 0) != insn))
4572 	usage_insns[regno].insns = gen_rtx_INSN_LIST (VOIDmode, insn,
4573 						      next_usage_insns);
4574     }
4575   else if (NONDEBUG_INSN_P (insn))
4576     setup_next_usage_insn (regno, insn, reloads_num, false);
4577   else
4578     usage_insns[regno].check = 0;
4579 }
4580 
4581 /* Return first non-debug insn in list USAGE_INSNS.  */
4582 static rtx_insn *
4583 skip_usage_debug_insns (rtx usage_insns)
4584 {
4585   rtx insn;
4586 
4587   /* Skip debug insns.  */
4588   for (insn = usage_insns;
4589        insn != NULL_RTX && GET_CODE (insn) == INSN_LIST;
4590        insn = XEXP (insn, 1))
4591     ;
4592   return safe_as_a <rtx_insn *> (insn);
4593 }
4594 
4595 /* Return true if we need secondary memory moves for insn in
4596    USAGE_INSNS after inserting inherited pseudo of class INHER_CL
4597    into the insn.  */
4598 static bool
4599 check_secondary_memory_needed_p (enum reg_class inher_cl ATTRIBUTE_UNUSED,
4600 				 rtx usage_insns ATTRIBUTE_UNUSED)
4601 {
4602 #ifndef SECONDARY_MEMORY_NEEDED
4603   return false;
4604 #else
4605   rtx_insn *insn;
4606   rtx set, dest;
4607   enum reg_class cl;
4608 
4609   if (inher_cl == ALL_REGS
4610       || (insn = skip_usage_debug_insns (usage_insns)) == NULL_RTX)
4611     return false;
4612   lra_assert (INSN_P (insn));
4613   if ((set = single_set (insn)) == NULL_RTX || ! REG_P (SET_DEST (set)))
4614     return false;
4615   dest = SET_DEST (set);
4616   if (! REG_P (dest))
4617     return false;
4618   lra_assert (inher_cl != NO_REGS);
4619   cl = get_reg_class (REGNO (dest));
4620   return (cl != NO_REGS && cl != ALL_REGS
4621 	  && SECONDARY_MEMORY_NEEDED (inher_cl, cl, GET_MODE (dest)));
4622 #endif
4623 }
4624 
4625 /* Registers involved in inheritance/split in the current EBB
4626    (inheritance/split pseudos and original registers).	*/
4627 static bitmap_head check_only_regs;
4628 
4629 /* Do inheritance transformations for insn INSN, which defines (if
4630    DEF_P) or uses ORIGINAL_REGNO.  NEXT_USAGE_INSNS specifies which
4631    instruction in the EBB next uses ORIGINAL_REGNO; it has the same
4632    form as the "insns" field of usage_insns.  Return true if we
4633    succeed in such transformation.
4634 
4635    The transformations look like:
4636 
4637      p <- ...		  i <- ...
4638      ...		  p <- i    (new insn)
4639      ...	     =>
4640      <- ... p ...	  <- ... i ...
4641    or
4642      ...		  i <- p    (new insn)
4643      <- ... p ...	  <- ... i ...
4644      ...	     =>
4645      <- ... p ...	  <- ... i ...
4646    where p is a spilled original pseudo and i is a new inheritance pseudo.
4647 
4648 
4649    The inheritance pseudo has the smallest class of two classes CL and
4650    class of ORIGINAL REGNO.  */
4651 static bool
4652 inherit_reload_reg (bool def_p, int original_regno,
4653 		    enum reg_class cl, rtx_insn *insn, rtx next_usage_insns)
4654 {
4655   if (optimize_function_for_size_p (cfun))
4656     return false;
4657 
4658   enum reg_class rclass = lra_get_allocno_class (original_regno);
4659   rtx original_reg = regno_reg_rtx[original_regno];
4660   rtx new_reg, usage_insn;
4661   rtx_insn *new_insns;
4662 
4663   lra_assert (! usage_insns[original_regno].after_p);
4664   if (lra_dump_file != NULL)
4665     fprintf (lra_dump_file,
4666 	     "    <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n");
4667   if (! ira_reg_classes_intersect_p[cl][rclass])
4668     {
4669       if (lra_dump_file != NULL)
4670 	{
4671 	  fprintf (lra_dump_file,
4672 		   "    Rejecting inheritance for %d "
4673 		   "because of disjoint classes %s and %s\n",
4674 		   original_regno, reg_class_names[cl],
4675 		   reg_class_names[rclass]);
4676 	  fprintf (lra_dump_file,
4677 		   "    >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
4678 	}
4679       return false;
4680     }
4681   if ((ira_class_subset_p[cl][rclass] && cl != rclass)
4682       /* We don't use a subset of two classes because it can be
4683 	 NO_REGS.  This transformation is still profitable in most
4684 	 cases even if the classes are not intersected as register
4685 	 move is probably cheaper than a memory load.  */
4686       || ira_class_hard_regs_num[cl] < ira_class_hard_regs_num[rclass])
4687     {
4688       if (lra_dump_file != NULL)
4689 	fprintf (lra_dump_file, "    Use smallest class of %s and %s\n",
4690 		 reg_class_names[cl], reg_class_names[rclass]);
4691 
4692       rclass = cl;
4693     }
4694   if (check_secondary_memory_needed_p (rclass, next_usage_insns))
4695     {
4696       /* Reject inheritance resulting in secondary memory moves.
4697 	 Otherwise, there is a danger in LRA cycling.  Also such
4698 	 transformation will be unprofitable.  */
4699       if (lra_dump_file != NULL)
4700 	{
4701 	  rtx_insn *insn = skip_usage_debug_insns (next_usage_insns);
4702 	  rtx set = single_set (insn);
4703 
4704 	  lra_assert (set != NULL_RTX);
4705 
4706 	  rtx dest = SET_DEST (set);
4707 
4708 	  lra_assert (REG_P (dest));
4709 	  fprintf (lra_dump_file,
4710 		   "    Rejecting inheritance for insn %d(%s)<-%d(%s) "
4711 		   "as secondary mem is needed\n",
4712 		   REGNO (dest), reg_class_names[get_reg_class (REGNO (dest))],
4713 		   original_regno, reg_class_names[rclass]);
4714 	  fprintf (lra_dump_file,
4715 		   "    >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
4716 	}
4717       return false;
4718     }
4719   new_reg = lra_create_new_reg (GET_MODE (original_reg), original_reg,
4720 				rclass, "inheritance");
4721   start_sequence ();
4722   if (def_p)
4723     lra_emit_move (original_reg, new_reg);
4724   else
4725     lra_emit_move (new_reg, original_reg);
4726   new_insns = get_insns ();
4727   end_sequence ();
4728   if (NEXT_INSN (new_insns) != NULL_RTX)
4729     {
4730       if (lra_dump_file != NULL)
4731 	{
4732 	  fprintf (lra_dump_file,
4733 		   "    Rejecting inheritance %d->%d "
4734 		   "as it results in 2 or more insns:\n",
4735 		   original_regno, REGNO (new_reg));
4736 	  dump_rtl_slim (lra_dump_file, new_insns, NULL, -1, 0);
4737 	  fprintf (lra_dump_file,
4738 		   "	>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
4739 	}
4740       return false;
4741     }
4742   lra_substitute_pseudo_within_insn (insn, original_regno, new_reg, false);
4743   lra_update_insn_regno_info (insn);
4744   if (! def_p)
4745     /* We now have a new usage insn for original regno.  */
4746     setup_next_usage_insn (original_regno, new_insns, reloads_num, false);
4747   if (lra_dump_file != NULL)
4748     fprintf (lra_dump_file, "    Original reg change %d->%d (bb%d):\n",
4749 	     original_regno, REGNO (new_reg), BLOCK_FOR_INSN (insn)->index);
4750   lra_reg_info[REGNO (new_reg)].restore_regno = original_regno;
4751   bitmap_set_bit (&check_only_regs, REGNO (new_reg));
4752   bitmap_set_bit (&check_only_regs, original_regno);
4753   bitmap_set_bit (&lra_inheritance_pseudos, REGNO (new_reg));
4754   if (def_p)
4755     lra_process_new_insns (insn, NULL, new_insns,
4756 			   "Add original<-inheritance");
4757   else
4758     lra_process_new_insns (insn, new_insns, NULL,
4759 			   "Add inheritance<-original");
4760   while (next_usage_insns != NULL_RTX)
4761     {
4762       if (GET_CODE (next_usage_insns) != INSN_LIST)
4763 	{
4764 	  usage_insn = next_usage_insns;
4765 	  lra_assert (NONDEBUG_INSN_P (usage_insn));
4766 	  next_usage_insns = NULL;
4767 	}
4768       else
4769 	{
4770 	  usage_insn = XEXP (next_usage_insns, 0);
4771 	  lra_assert (DEBUG_INSN_P (usage_insn));
4772 	  next_usage_insns = XEXP (next_usage_insns, 1);
4773 	}
4774       lra_substitute_pseudo (&usage_insn, original_regno, new_reg, false);
4775       lra_update_insn_regno_info (as_a <rtx_insn *> (usage_insn));
4776       if (lra_dump_file != NULL)
4777 	{
4778 	  fprintf (lra_dump_file,
4779 		   "    Inheritance reuse change %d->%d (bb%d):\n",
4780 		   original_regno, REGNO (new_reg),
4781 		   BLOCK_FOR_INSN (usage_insn)->index);
4782 	  dump_insn_slim (lra_dump_file, usage_insn);
4783 	}
4784     }
4785   if (lra_dump_file != NULL)
4786     fprintf (lra_dump_file,
4787 	     "	  >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
4788   return true;
4789 }
4790 
4791 /* Return true if we need a caller save/restore for pseudo REGNO which
4792    was assigned to a hard register.  */
4793 static inline bool
4794 need_for_call_save_p (int regno)
4795 {
4796   lra_assert (regno >= FIRST_PSEUDO_REGISTER && reg_renumber[regno] >= 0);
4797   return (usage_insns[regno].calls_num < calls_num
4798 	  && (overlaps_hard_reg_set_p
4799 	      ((flag_ipa_ra &&
4800 		! hard_reg_set_empty_p (lra_reg_info[regno].actual_call_used_reg_set))
4801 	       ? lra_reg_info[regno].actual_call_used_reg_set
4802 	       : call_used_reg_set,
4803 	       PSEUDO_REGNO_MODE (regno), reg_renumber[regno])
4804 	      || HARD_REGNO_CALL_PART_CLOBBERED (reg_renumber[regno],
4805 						 PSEUDO_REGNO_MODE (regno))));
4806 }
4807 
4808 /* Global registers occurring in the current EBB.  */
4809 static bitmap_head ebb_global_regs;
4810 
4811 /* Return true if we need a split for hard register REGNO or pseudo
4812    REGNO which was assigned to a hard register.
4813    POTENTIAL_RELOAD_HARD_REGS contains hard registers which might be
4814    used for reloads since the EBB end.	It is an approximation of the
4815    used hard registers in the split range.  The exact value would
4816    require expensive calculations.  If we were aggressive with
4817    splitting because of the approximation, the split pseudo will save
4818    the same hard register assignment and will be removed in the undo
4819    pass.  We still need the approximation because too aggressive
4820    splitting would result in too inaccurate cost calculation in the
4821    assignment pass because of too many generated moves which will be
4822    probably removed in the undo pass.  */
4823 static inline bool
4824 need_for_split_p (HARD_REG_SET potential_reload_hard_regs, int regno)
4825 {
4826   int hard_regno = regno < FIRST_PSEUDO_REGISTER ? regno : reg_renumber[regno];
4827 
4828   lra_assert (hard_regno >= 0);
4829   return ((TEST_HARD_REG_BIT (potential_reload_hard_regs, hard_regno)
4830 	   /* Don't split eliminable hard registers, otherwise we can
4831 	      split hard registers like hard frame pointer, which
4832 	      lives on BB start/end according to DF-infrastructure,
4833 	      when there is a pseudo assigned to the register and
4834 	      living in the same BB.  */
4835 	   && (regno >= FIRST_PSEUDO_REGISTER
4836 	       || ! TEST_HARD_REG_BIT (eliminable_regset, hard_regno))
4837 	   && ! TEST_HARD_REG_BIT (lra_no_alloc_regs, hard_regno)
4838 	   /* Don't split call clobbered hard regs living through
4839 	      calls, otherwise we might have a check problem in the
4840 	      assign sub-pass as in the most cases (exception is a
4841 	      situation when lra_risky_transformations_p value is
4842 	      true) the assign pass assumes that all pseudos living
4843 	      through calls are assigned to call saved hard regs.  */
4844 	   && (regno >= FIRST_PSEUDO_REGISTER
4845 	       || ! TEST_HARD_REG_BIT (call_used_reg_set, regno)
4846 	       || usage_insns[regno].calls_num == calls_num)
4847 	   /* We need at least 2 reloads to make pseudo splitting
4848 	      profitable.  We should provide hard regno splitting in
4849 	      any case to solve 1st insn scheduling problem when
4850 	      moving hard register definition up might result in
4851 	      impossibility to find hard register for reload pseudo of
4852 	      small register class.  */
4853 	   && (usage_insns[regno].reloads_num
4854 	       + (regno < FIRST_PSEUDO_REGISTER ? 0 : 3) < reloads_num)
4855 	   && (regno < FIRST_PSEUDO_REGISTER
4856 	       /* For short living pseudos, spilling + inheritance can
4857 		  be considered a substitution for splitting.
4858 		  Therefore we do not splitting for local pseudos.  It
4859 		  decreases also aggressiveness of splitting.  The
4860 		  minimal number of references is chosen taking into
4861 		  account that for 2 references splitting has no sense
4862 		  as we can just spill the pseudo.  */
4863 	       || (regno >= FIRST_PSEUDO_REGISTER
4864 		   && lra_reg_info[regno].nrefs > 3
4865 		   && bitmap_bit_p (&ebb_global_regs, regno))))
4866 	  || (regno >= FIRST_PSEUDO_REGISTER && need_for_call_save_p (regno)));
4867 }
4868 
4869 /* Return class for the split pseudo created from original pseudo with
4870    ALLOCNO_CLASS and MODE which got a hard register HARD_REGNO.	 We
4871    choose subclass of ALLOCNO_CLASS which contains HARD_REGNO and
4872    results in no secondary memory movements.  */
4873 static enum reg_class
4874 choose_split_class (enum reg_class allocno_class,
4875 		    int hard_regno ATTRIBUTE_UNUSED,
4876 		    machine_mode mode ATTRIBUTE_UNUSED)
4877 {
4878 #ifndef SECONDARY_MEMORY_NEEDED
4879   return allocno_class;
4880 #else
4881   int i;
4882   enum reg_class cl, best_cl = NO_REGS;
4883   enum reg_class hard_reg_class ATTRIBUTE_UNUSED
4884     = REGNO_REG_CLASS (hard_regno);
4885 
4886   if (! SECONDARY_MEMORY_NEEDED (allocno_class, allocno_class, mode)
4887       && TEST_HARD_REG_BIT (reg_class_contents[allocno_class], hard_regno))
4888     return allocno_class;
4889   for (i = 0;
4890        (cl = reg_class_subclasses[allocno_class][i]) != LIM_REG_CLASSES;
4891        i++)
4892     if (! SECONDARY_MEMORY_NEEDED (cl, hard_reg_class, mode)
4893 	&& ! SECONDARY_MEMORY_NEEDED (hard_reg_class, cl, mode)
4894 	&& TEST_HARD_REG_BIT (reg_class_contents[cl], hard_regno)
4895 	&& (best_cl == NO_REGS
4896 	    || ira_class_hard_regs_num[best_cl] < ira_class_hard_regs_num[cl]))
4897       best_cl = cl;
4898   return best_cl;
4899 #endif
4900 }
4901 
4902 /* Do split transformations for insn INSN, which defines or uses
4903    ORIGINAL_REGNO.  NEXT_USAGE_INSNS specifies which instruction in
4904    the EBB next uses ORIGINAL_REGNO; it has the same form as the
4905    "insns" field of usage_insns.
4906 
4907    The transformations look like:
4908 
4909      p <- ...		  p <- ...
4910      ...		  s <- p    (new insn -- save)
4911      ...	     =>
4912      ...		  p <- s    (new insn -- restore)
4913      <- ... p ...	  <- ... p ...
4914    or
4915      <- ... p ...	  <- ... p ...
4916      ...		  s <- p    (new insn -- save)
4917      ...	     =>
4918      ...		  p <- s    (new insn -- restore)
4919      <- ... p ...	  <- ... p ...
4920 
4921    where p is an original pseudo got a hard register or a hard
4922    register and s is a new split pseudo.  The save is put before INSN
4923    if BEFORE_P is true.	 Return true if we succeed in such
4924    transformation.  */
4925 static bool
4926 split_reg (bool before_p, int original_regno, rtx_insn *insn,
4927 	   rtx next_usage_insns)
4928 {
4929   enum reg_class rclass;
4930   rtx original_reg;
4931   int hard_regno, nregs;
4932   rtx new_reg, usage_insn;
4933   rtx_insn *restore, *save;
4934   bool after_p;
4935   bool call_save_p;
4936 
4937   if (original_regno < FIRST_PSEUDO_REGISTER)
4938     {
4939       rclass = ira_allocno_class_translate[REGNO_REG_CLASS (original_regno)];
4940       hard_regno = original_regno;
4941       call_save_p = false;
4942       nregs = 1;
4943     }
4944   else
4945     {
4946       hard_regno = reg_renumber[original_regno];
4947       nregs = hard_regno_nregs[hard_regno][PSEUDO_REGNO_MODE (original_regno)];
4948       rclass = lra_get_allocno_class (original_regno);
4949       original_reg = regno_reg_rtx[original_regno];
4950       call_save_p = need_for_call_save_p (original_regno);
4951     }
4952   original_reg = regno_reg_rtx[original_regno];
4953   lra_assert (hard_regno >= 0);
4954   if (lra_dump_file != NULL)
4955     fprintf (lra_dump_file,
4956 	     "	  ((((((((((((((((((((((((((((((((((((((((((((((((\n");
4957   if (call_save_p)
4958     {
4959       machine_mode mode = GET_MODE (original_reg);
4960 
4961       mode = HARD_REGNO_CALLER_SAVE_MODE (hard_regno,
4962 					  hard_regno_nregs[hard_regno][mode],
4963 					  mode);
4964       new_reg = lra_create_new_reg (mode, NULL_RTX, NO_REGS, "save");
4965     }
4966   else
4967     {
4968       rclass = choose_split_class (rclass, hard_regno,
4969 				   GET_MODE (original_reg));
4970       if (rclass == NO_REGS)
4971 	{
4972 	  if (lra_dump_file != NULL)
4973 	    {
4974 	      fprintf (lra_dump_file,
4975 		       "    Rejecting split of %d(%s): "
4976 		       "no good reg class for %d(%s)\n",
4977 		       original_regno,
4978 		       reg_class_names[lra_get_allocno_class (original_regno)],
4979 		       hard_regno,
4980 		       reg_class_names[REGNO_REG_CLASS (hard_regno)]);
4981 	      fprintf
4982 		(lra_dump_file,
4983 		 "    ))))))))))))))))))))))))))))))))))))))))))))))))\n");
4984 	    }
4985 	  return false;
4986 	}
4987       new_reg = lra_create_new_reg (GET_MODE (original_reg), original_reg,
4988 				    rclass, "split");
4989       reg_renumber[REGNO (new_reg)] = hard_regno;
4990     }
4991   save = emit_spill_move (true, new_reg, original_reg);
4992   if (NEXT_INSN (save) != NULL_RTX && !call_save_p)
4993     {
4994       if (lra_dump_file != NULL)
4995 	{
4996 	  fprintf
4997 	    (lra_dump_file,
4998 	     "	  Rejecting split %d->%d resulting in > 2 save insns:\n",
4999 	     original_regno, REGNO (new_reg));
5000 	  dump_rtl_slim (lra_dump_file, save, NULL, -1, 0);
5001 	  fprintf (lra_dump_file,
5002 		   "	))))))))))))))))))))))))))))))))))))))))))))))))\n");
5003 	}
5004       return false;
5005     }
5006   restore = emit_spill_move (false, new_reg, original_reg);
5007   if (NEXT_INSN (restore) != NULL_RTX && !call_save_p)
5008     {
5009       if (lra_dump_file != NULL)
5010 	{
5011 	  fprintf (lra_dump_file,
5012 		   "	Rejecting split %d->%d "
5013 		   "resulting in > 2 restore insns:\n",
5014 		   original_regno, REGNO (new_reg));
5015 	  dump_rtl_slim (lra_dump_file, restore, NULL, -1, 0);
5016 	  fprintf (lra_dump_file,
5017 		   "	))))))))))))))))))))))))))))))))))))))))))))))))\n");
5018 	}
5019       return false;
5020     }
5021   after_p = usage_insns[original_regno].after_p;
5022   lra_reg_info[REGNO (new_reg)].restore_regno = original_regno;
5023   bitmap_set_bit (&check_only_regs, REGNO (new_reg));
5024   bitmap_set_bit (&check_only_regs, original_regno);
5025   bitmap_set_bit (&lra_split_regs, REGNO (new_reg));
5026   for (;;)
5027     {
5028       if (GET_CODE (next_usage_insns) != INSN_LIST)
5029 	{
5030 	  usage_insn = next_usage_insns;
5031 	  break;
5032 	}
5033       usage_insn = XEXP (next_usage_insns, 0);
5034       lra_assert (DEBUG_INSN_P (usage_insn));
5035       next_usage_insns = XEXP (next_usage_insns, 1);
5036       lra_substitute_pseudo (&usage_insn, original_regno, new_reg, false);
5037       lra_update_insn_regno_info (as_a <rtx_insn *> (usage_insn));
5038       if (lra_dump_file != NULL)
5039 	{
5040 	  fprintf (lra_dump_file, "    Split reuse change %d->%d:\n",
5041 		   original_regno, REGNO (new_reg));
5042 	  dump_insn_slim (lra_dump_file, usage_insn);
5043 	}
5044     }
5045   lra_assert (NOTE_P (usage_insn) || NONDEBUG_INSN_P (usage_insn));
5046   lra_assert (usage_insn != insn || (after_p && before_p));
5047   lra_process_new_insns (as_a <rtx_insn *> (usage_insn),
5048 			 after_p ? NULL : restore,
5049 			 after_p ? restore : NULL,
5050 			 call_save_p
5051 			 ?  "Add reg<-save" : "Add reg<-split");
5052   lra_process_new_insns (insn, before_p ? save : NULL,
5053 			 before_p ? NULL : save,
5054 			 call_save_p
5055 			 ?  "Add save<-reg" : "Add split<-reg");
5056   if (nregs > 1)
5057     /* If we are trying to split multi-register.  We should check
5058        conflicts on the next assignment sub-pass.  IRA can allocate on
5059        sub-register levels, LRA do this on pseudos level right now and
5060        this discrepancy may create allocation conflicts after
5061        splitting.  */
5062     lra_risky_transformations_p = true;
5063   if (lra_dump_file != NULL)
5064     fprintf (lra_dump_file,
5065 	     "	  ))))))))))))))))))))))))))))))))))))))))))))))))\n");
5066   return true;
5067 }
5068 
5069 /* Recognize that we need a split transformation for insn INSN, which
5070    defines or uses REGNO in its insn biggest MODE (we use it only if
5071    REGNO is a hard register).  POTENTIAL_RELOAD_HARD_REGS contains
5072    hard registers which might be used for reloads since the EBB end.
5073    Put the save before INSN if BEFORE_P is true.  MAX_UID is maximla
5074    uid before starting INSN processing.  Return true if we succeed in
5075    such transformation.  */
5076 static bool
5077 split_if_necessary (int regno, machine_mode mode,
5078 		    HARD_REG_SET potential_reload_hard_regs,
5079 		    bool before_p, rtx_insn *insn, int max_uid)
5080 {
5081   bool res = false;
5082   int i, nregs = 1;
5083   rtx next_usage_insns;
5084 
5085   if (regno < FIRST_PSEUDO_REGISTER)
5086     nregs = hard_regno_nregs[regno][mode];
5087   for (i = 0; i < nregs; i++)
5088     if (usage_insns[regno + i].check == curr_usage_insns_check
5089 	&& (next_usage_insns = usage_insns[regno + i].insns) != NULL_RTX
5090 	/* To avoid processing the register twice or more.  */
5091 	&& ((GET_CODE (next_usage_insns) != INSN_LIST
5092 	     && INSN_UID (next_usage_insns) < max_uid)
5093 	    || (GET_CODE (next_usage_insns) == INSN_LIST
5094 		&& (INSN_UID (XEXP (next_usage_insns, 0)) < max_uid)))
5095 	&& need_for_split_p (potential_reload_hard_regs, regno + i)
5096 	&& split_reg (before_p, regno + i, insn, next_usage_insns))
5097     res = true;
5098   return res;
5099 }
5100 
5101 /* Check only registers living at the current program point in the
5102    current EBB.	 */
5103 static bitmap_head live_regs;
5104 
5105 /* Update live info in EBB given by its HEAD and TAIL insns after
5106    inheritance/split transformation.  The function removes dead moves
5107    too.	 */
5108 static void
5109 update_ebb_live_info (rtx_insn *head, rtx_insn *tail)
5110 {
5111   unsigned int j;
5112   int i, regno;
5113   bool live_p;
5114   rtx_insn *prev_insn;
5115   rtx set;
5116   bool remove_p;
5117   basic_block last_bb, prev_bb, curr_bb;
5118   bitmap_iterator bi;
5119   struct lra_insn_reg *reg;
5120   edge e;
5121   edge_iterator ei;
5122 
5123   last_bb = BLOCK_FOR_INSN (tail);
5124   prev_bb = NULL;
5125   for (curr_insn = tail;
5126        curr_insn != PREV_INSN (head);
5127        curr_insn = prev_insn)
5128     {
5129       prev_insn = PREV_INSN (curr_insn);
5130       /* We need to process empty blocks too.  They contain
5131 	 NOTE_INSN_BASIC_BLOCK referring for the basic block.  */
5132       if (NOTE_P (curr_insn) && NOTE_KIND (curr_insn) != NOTE_INSN_BASIC_BLOCK)
5133 	continue;
5134       curr_bb = BLOCK_FOR_INSN (curr_insn);
5135       if (curr_bb != prev_bb)
5136 	{
5137 	  if (prev_bb != NULL)
5138 	    {
5139 	      /* Update df_get_live_in (prev_bb):  */
5140 	      EXECUTE_IF_SET_IN_BITMAP (&check_only_regs, 0, j, bi)
5141 		if (bitmap_bit_p (&live_regs, j))
5142 		  bitmap_set_bit (df_get_live_in (prev_bb), j);
5143 		else
5144 		  bitmap_clear_bit (df_get_live_in (prev_bb), j);
5145 	    }
5146 	  if (curr_bb != last_bb)
5147 	    {
5148 	      /* Update df_get_live_out (curr_bb):  */
5149 	      EXECUTE_IF_SET_IN_BITMAP (&check_only_regs, 0, j, bi)
5150 		{
5151 		  live_p = bitmap_bit_p (&live_regs, j);
5152 		  if (! live_p)
5153 		    FOR_EACH_EDGE (e, ei, curr_bb->succs)
5154 		      if (bitmap_bit_p (df_get_live_in (e->dest), j))
5155 			{
5156 			  live_p = true;
5157 			  break;
5158 			}
5159 		  if (live_p)
5160 		    bitmap_set_bit (df_get_live_out (curr_bb), j);
5161 		  else
5162 		    bitmap_clear_bit (df_get_live_out (curr_bb), j);
5163 		}
5164 	    }
5165 	  prev_bb = curr_bb;
5166 	  bitmap_and (&live_regs, &check_only_regs, df_get_live_out (curr_bb));
5167 	}
5168       if (! NONDEBUG_INSN_P (curr_insn))
5169 	continue;
5170       curr_id = lra_get_insn_recog_data (curr_insn);
5171       curr_static_id = curr_id->insn_static_data;
5172       remove_p = false;
5173       if ((set = single_set (curr_insn)) != NULL_RTX
5174 	  && REG_P (SET_DEST (set))
5175 	  && (regno = REGNO (SET_DEST (set))) >= FIRST_PSEUDO_REGISTER
5176 	  && SET_DEST (set) != pic_offset_table_rtx
5177 	  && bitmap_bit_p (&check_only_regs, regno)
5178 	  && ! bitmap_bit_p (&live_regs, regno))
5179 	remove_p = true;
5180       /* See which defined values die here.  */
5181       for (reg = curr_id->regs; reg != NULL; reg = reg->next)
5182 	if (reg->type == OP_OUT && ! reg->subreg_p)
5183 	  bitmap_clear_bit (&live_regs, reg->regno);
5184       for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
5185 	if (reg->type == OP_OUT && ! reg->subreg_p)
5186 	  bitmap_clear_bit (&live_regs, reg->regno);
5187       if (curr_id->arg_hard_regs != NULL)
5188 	/* Make clobbered argument hard registers die.  */
5189 	for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
5190 	  if (regno >= FIRST_PSEUDO_REGISTER)
5191 	    bitmap_clear_bit (&live_regs, regno - FIRST_PSEUDO_REGISTER);
5192       /* Mark each used value as live.  */
5193       for (reg = curr_id->regs; reg != NULL; reg = reg->next)
5194 	if (reg->type != OP_OUT
5195 	    && bitmap_bit_p (&check_only_regs, reg->regno))
5196 	  bitmap_set_bit (&live_regs, reg->regno);
5197       for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
5198 	if (reg->type != OP_OUT
5199 	    && bitmap_bit_p (&check_only_regs, reg->regno))
5200 	  bitmap_set_bit (&live_regs, reg->regno);
5201       if (curr_id->arg_hard_regs != NULL)
5202 	/* Make used argument hard registers live.  */
5203 	for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
5204 	  if (regno < FIRST_PSEUDO_REGISTER
5205 	      && bitmap_bit_p (&check_only_regs, regno))
5206 	    bitmap_set_bit (&live_regs, regno);
5207       /* It is quite important to remove dead move insns because it
5208 	 means removing dead store.  We don't need to process them for
5209 	 constraints.  */
5210       if (remove_p)
5211 	{
5212 	  if (lra_dump_file != NULL)
5213 	    {
5214 	      fprintf (lra_dump_file, "	    Removing dead insn:\n ");
5215 	      dump_insn_slim (lra_dump_file, curr_insn);
5216 	    }
5217 	  lra_set_insn_deleted (curr_insn);
5218 	}
5219     }
5220 }
5221 
5222 /* The structure describes info to do an inheritance for the current
5223    insn.  We need to collect such info first before doing the
5224    transformations because the transformations change the insn
5225    internal representation.  */
5226 struct to_inherit
5227 {
5228   /* Original regno.  */
5229   int regno;
5230   /* Subsequent insns which can inherit original reg value.  */
5231   rtx insns;
5232 };
5233 
5234 /* Array containing all info for doing inheritance from the current
5235    insn.  */
5236 static struct to_inherit to_inherit[LRA_MAX_INSN_RELOADS];
5237 
5238 /* Number elements in the previous array.  */
5239 static int to_inherit_num;
5240 
5241 /* Add inheritance info REGNO and INSNS. Their meaning is described in
5242    structure to_inherit.  */
5243 static void
5244 add_to_inherit (int regno, rtx insns)
5245 {
5246   int i;
5247 
5248   for (i = 0; i < to_inherit_num; i++)
5249     if (to_inherit[i].regno == regno)
5250       return;
5251   lra_assert (to_inherit_num < LRA_MAX_INSN_RELOADS);
5252   to_inherit[to_inherit_num].regno = regno;
5253   to_inherit[to_inherit_num++].insns = insns;
5254 }
5255 
5256 /* Return the last non-debug insn in basic block BB, or the block begin
5257    note if none.  */
5258 static rtx_insn *
5259 get_last_insertion_point (basic_block bb)
5260 {
5261   rtx_insn *insn;
5262 
5263   FOR_BB_INSNS_REVERSE (bb, insn)
5264     if (NONDEBUG_INSN_P (insn) || NOTE_INSN_BASIC_BLOCK_P (insn))
5265       return insn;
5266   gcc_unreachable ();
5267 }
5268 
5269 /* Set up RES by registers living on edges FROM except the edge (FROM,
5270    TO) or by registers set up in a jump insn in BB FROM.  */
5271 static void
5272 get_live_on_other_edges (basic_block from, basic_block to, bitmap res)
5273 {
5274   rtx_insn *last;
5275   struct lra_insn_reg *reg;
5276   edge e;
5277   edge_iterator ei;
5278 
5279   lra_assert (to != NULL);
5280   bitmap_clear (res);
5281   FOR_EACH_EDGE (e, ei, from->succs)
5282     if (e->dest != to)
5283       bitmap_ior_into (res, df_get_live_in (e->dest));
5284   last = get_last_insertion_point (from);
5285   if (! JUMP_P (last))
5286     return;
5287   curr_id = lra_get_insn_recog_data (last);
5288   for (reg = curr_id->regs; reg != NULL; reg = reg->next)
5289     if (reg->type != OP_IN)
5290       bitmap_set_bit (res, reg->regno);
5291 }
5292 
5293 /* Used as a temporary results of some bitmap calculations.  */
5294 static bitmap_head temp_bitmap;
5295 
5296 /* We split for reloads of small class of hard regs.  The following
5297    defines how many hard regs the class should have to be qualified as
5298    small.  The code is mostly oriented to x86/x86-64 architecture
5299    where some insns need to use only specific register or pair of
5300    registers and these register can live in RTL explicitly, e.g. for
5301    parameter passing.  */
5302 static const int max_small_class_regs_num = 2;
5303 
5304 /* Do inheritance/split transformations in EBB starting with HEAD and
5305    finishing on TAIL.  We process EBB insns in the reverse order.
5306    Return true if we did any inheritance/split transformation in the
5307    EBB.
5308 
5309    We should avoid excessive splitting which results in worse code
5310    because of inaccurate cost calculations for spilling new split
5311    pseudos in such case.  To achieve this we do splitting only if
5312    register pressure is high in given basic block and there are reload
5313    pseudos requiring hard registers.  We could do more register
5314    pressure calculations at any given program point to avoid necessary
5315    splitting even more but it is to expensive and the current approach
5316    works well enough.  */
5317 static bool
5318 inherit_in_ebb (rtx_insn *head, rtx_insn *tail)
5319 {
5320   int i, src_regno, dst_regno, nregs;
5321   bool change_p, succ_p, update_reloads_num_p;
5322   rtx_insn *prev_insn, *last_insn;
5323   rtx next_usage_insns, set;
5324   enum reg_class cl;
5325   struct lra_insn_reg *reg;
5326   basic_block last_processed_bb, curr_bb = NULL;
5327   HARD_REG_SET potential_reload_hard_regs, live_hard_regs;
5328   bitmap to_process;
5329   unsigned int j;
5330   bitmap_iterator bi;
5331   bool head_p, after_p;
5332 
5333   change_p = false;
5334   curr_usage_insns_check++;
5335   reloads_num = calls_num = 0;
5336   bitmap_clear (&check_only_regs);
5337   last_processed_bb = NULL;
5338   CLEAR_HARD_REG_SET (potential_reload_hard_regs);
5339   COPY_HARD_REG_SET (live_hard_regs, eliminable_regset);
5340   IOR_HARD_REG_SET (live_hard_regs, lra_no_alloc_regs);
5341   /* We don't process new insns generated in the loop.	*/
5342   for (curr_insn = tail; curr_insn != PREV_INSN (head); curr_insn = prev_insn)
5343     {
5344       prev_insn = PREV_INSN (curr_insn);
5345       if (BLOCK_FOR_INSN (curr_insn) != NULL)
5346 	curr_bb = BLOCK_FOR_INSN (curr_insn);
5347       if (last_processed_bb != curr_bb)
5348 	{
5349 	  /* We are at the end of BB.  Add qualified living
5350 	     pseudos for potential splitting.  */
5351 	  to_process = df_get_live_out (curr_bb);
5352 	  if (last_processed_bb != NULL)
5353 	    {
5354 	      /* We are somewhere in the middle of EBB.	 */
5355 	      get_live_on_other_edges (curr_bb, last_processed_bb,
5356 				       &temp_bitmap);
5357 	      to_process = &temp_bitmap;
5358 	    }
5359 	  last_processed_bb = curr_bb;
5360 	  last_insn = get_last_insertion_point (curr_bb);
5361 	  after_p = (! JUMP_P (last_insn)
5362 		     && (! CALL_P (last_insn)
5363 			 || (find_reg_note (last_insn,
5364 					   REG_NORETURN, NULL_RTX) == NULL_RTX
5365 			     && ! SIBLING_CALL_P (last_insn))));
5366 	  CLEAR_HARD_REG_SET (potential_reload_hard_regs);
5367 	  EXECUTE_IF_SET_IN_BITMAP (to_process, 0, j, bi)
5368 	    {
5369 	      if ((int) j >= lra_constraint_new_regno_start)
5370 		break;
5371 	      if (j < FIRST_PSEUDO_REGISTER || reg_renumber[j] >= 0)
5372 		{
5373 		  if (j < FIRST_PSEUDO_REGISTER)
5374 		    SET_HARD_REG_BIT (live_hard_regs, j);
5375 		  else
5376 		    add_to_hard_reg_set (&live_hard_regs,
5377 					 PSEUDO_REGNO_MODE (j),
5378 					 reg_renumber[j]);
5379 		  setup_next_usage_insn (j, last_insn, reloads_num, after_p);
5380 		}
5381 	    }
5382 	}
5383       src_regno = dst_regno = -1;
5384       if (NONDEBUG_INSN_P (curr_insn)
5385 	  && (set = single_set (curr_insn)) != NULL_RTX
5386 	  && REG_P (SET_DEST (set)) && REG_P (SET_SRC (set)))
5387 	{
5388 	  src_regno = REGNO (SET_SRC (set));
5389 	  dst_regno = REGNO (SET_DEST (set));
5390 	}
5391       update_reloads_num_p = true;
5392       if (src_regno < lra_constraint_new_regno_start
5393 	  && src_regno >= FIRST_PSEUDO_REGISTER
5394 	  && reg_renumber[src_regno] < 0
5395 	  && dst_regno >= lra_constraint_new_regno_start
5396 	  && (cl = lra_get_allocno_class (dst_regno)) != NO_REGS)
5397 	{
5398 	  /* 'reload_pseudo <- original_pseudo'.  */
5399 	  if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
5400 	    reloads_num++;
5401 	  update_reloads_num_p = false;
5402 	  succ_p = false;
5403 	  if (usage_insns[src_regno].check == curr_usage_insns_check
5404 	      && (next_usage_insns = usage_insns[src_regno].insns) != NULL_RTX)
5405 	    succ_p = inherit_reload_reg (false, src_regno, cl,
5406 					 curr_insn, next_usage_insns);
5407 	  if (succ_p)
5408 	    change_p = true;
5409 	  else
5410 	    setup_next_usage_insn (src_regno, curr_insn, reloads_num, false);
5411 	  if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
5412 	    IOR_HARD_REG_SET (potential_reload_hard_regs,
5413 			      reg_class_contents[cl]);
5414 	}
5415       else if (src_regno >= lra_constraint_new_regno_start
5416 	       && dst_regno < lra_constraint_new_regno_start
5417 	       && dst_regno >= FIRST_PSEUDO_REGISTER
5418 	       && reg_renumber[dst_regno] < 0
5419 	       && (cl = lra_get_allocno_class (src_regno)) != NO_REGS
5420 	       && usage_insns[dst_regno].check == curr_usage_insns_check
5421 	       && (next_usage_insns
5422 		   = usage_insns[dst_regno].insns) != NULL_RTX)
5423 	{
5424 	  if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
5425 	    reloads_num++;
5426 	  update_reloads_num_p = false;
5427 	  /* 'original_pseudo <- reload_pseudo'.  */
5428 	  if (! JUMP_P (curr_insn)
5429 	      && inherit_reload_reg (true, dst_regno, cl,
5430 				     curr_insn, next_usage_insns))
5431 	    change_p = true;
5432 	  /* Invalidate.  */
5433 	  usage_insns[dst_regno].check = 0;
5434 	  if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
5435 	    IOR_HARD_REG_SET (potential_reload_hard_regs,
5436 			      reg_class_contents[cl]);
5437 	}
5438       else if (INSN_P (curr_insn))
5439 	{
5440 	  int iter;
5441 	  int max_uid = get_max_uid ();
5442 
5443 	  curr_id = lra_get_insn_recog_data (curr_insn);
5444 	  curr_static_id = curr_id->insn_static_data;
5445 	  to_inherit_num = 0;
5446 	  /* Process insn definitions.	*/
5447 	  for (iter = 0; iter < 2; iter++)
5448 	    for (reg = iter == 0 ? curr_id->regs : curr_static_id->hard_regs;
5449 		 reg != NULL;
5450 		 reg = reg->next)
5451 	      if (reg->type != OP_IN
5452 		  && (dst_regno = reg->regno) < lra_constraint_new_regno_start)
5453 		{
5454 		  if (dst_regno >= FIRST_PSEUDO_REGISTER && reg->type == OP_OUT
5455 		      && reg_renumber[dst_regno] < 0 && ! reg->subreg_p
5456 		      && usage_insns[dst_regno].check == curr_usage_insns_check
5457 		      && (next_usage_insns
5458 			  = usage_insns[dst_regno].insns) != NULL_RTX)
5459 		    {
5460 		      struct lra_insn_reg *r;
5461 
5462 		      for (r = curr_id->regs; r != NULL; r = r->next)
5463 			if (r->type != OP_OUT && r->regno == dst_regno)
5464 			  break;
5465 		      /* Don't do inheritance if the pseudo is also
5466 			 used in the insn.  */
5467 		      if (r == NULL)
5468 			/* We can not do inheritance right now
5469 			   because the current insn reg info (chain
5470 			   regs) can change after that.  */
5471 			add_to_inherit (dst_regno, next_usage_insns);
5472 		    }
5473 		  /* We can not process one reg twice here because of
5474 		     usage_insns invalidation.  */
5475 		  if ((dst_regno < FIRST_PSEUDO_REGISTER
5476 		       || reg_renumber[dst_regno] >= 0)
5477 		      && ! reg->subreg_p && reg->type != OP_IN)
5478 		    {
5479 		      HARD_REG_SET s;
5480 
5481 		      if (split_if_necessary (dst_regno, reg->biggest_mode,
5482 					      potential_reload_hard_regs,
5483 					      false, curr_insn, max_uid))
5484 			change_p = true;
5485 		      CLEAR_HARD_REG_SET (s);
5486 		      if (dst_regno < FIRST_PSEUDO_REGISTER)
5487 			add_to_hard_reg_set (&s, reg->biggest_mode, dst_regno);
5488 		      else
5489 			add_to_hard_reg_set (&s, PSEUDO_REGNO_MODE (dst_regno),
5490 					     reg_renumber[dst_regno]);
5491 		      AND_COMPL_HARD_REG_SET (live_hard_regs, s);
5492 		    }
5493 		  /* We should invalidate potential inheritance or
5494 		     splitting for the current insn usages to the next
5495 		     usage insns (see code below) as the output pseudo
5496 		     prevents this.  */
5497 		  if ((dst_regno >= FIRST_PSEUDO_REGISTER
5498 		       && reg_renumber[dst_regno] < 0)
5499 		      || (reg->type == OP_OUT && ! reg->subreg_p
5500 			  && (dst_regno < FIRST_PSEUDO_REGISTER
5501 			      || reg_renumber[dst_regno] >= 0)))
5502 		    {
5503 		      /* Invalidate and mark definitions.  */
5504 		      if (dst_regno >= FIRST_PSEUDO_REGISTER)
5505 			usage_insns[dst_regno].check = -(int) INSN_UID (curr_insn);
5506 		      else
5507 			{
5508 			  nregs = hard_regno_nregs[dst_regno][reg->biggest_mode];
5509 			  for (i = 0; i < nregs; i++)
5510 			    usage_insns[dst_regno + i].check
5511 			      = -(int) INSN_UID (curr_insn);
5512 			}
5513 		    }
5514 		}
5515 	  /* Process clobbered call regs.  */
5516 	  if (curr_id->arg_hard_regs != NULL)
5517 	    for (i = 0; (dst_regno = curr_id->arg_hard_regs[i]) >= 0; i++)
5518 	      if (dst_regno >= FIRST_PSEUDO_REGISTER)
5519 		usage_insns[dst_regno - FIRST_PSEUDO_REGISTER].check
5520 		  = -(int) INSN_UID (curr_insn);
5521 	  if (! JUMP_P (curr_insn))
5522 	    for (i = 0; i < to_inherit_num; i++)
5523 	      if (inherit_reload_reg (true, to_inherit[i].regno,
5524 				      ALL_REGS, curr_insn,
5525 				      to_inherit[i].insns))
5526 	      change_p = true;
5527 	  if (CALL_P (curr_insn))
5528 	    {
5529 	      rtx cheap, pat, dest;
5530 	      rtx_insn *restore;
5531 	      int regno, hard_regno;
5532 
5533 	      calls_num++;
5534 	      if ((cheap = find_reg_note (curr_insn,
5535 					  REG_RETURNED, NULL_RTX)) != NULL_RTX
5536 		  && ((cheap = XEXP (cheap, 0)), true)
5537 		  && (regno = REGNO (cheap)) >= FIRST_PSEUDO_REGISTER
5538 		  && (hard_regno = reg_renumber[regno]) >= 0
5539 		  /* If there are pending saves/restores, the
5540 		     optimization is not worth.	 */
5541 		  && usage_insns[regno].calls_num == calls_num - 1
5542 		  && TEST_HARD_REG_BIT (call_used_reg_set, hard_regno))
5543 		{
5544 		  /* Restore the pseudo from the call result as
5545 		     REG_RETURNED note says that the pseudo value is
5546 		     in the call result and the pseudo is an argument
5547 		     of the call.  */
5548 		  pat = PATTERN (curr_insn);
5549 		  if (GET_CODE (pat) == PARALLEL)
5550 		    pat = XVECEXP (pat, 0, 0);
5551 		  dest = SET_DEST (pat);
5552 		  /* For multiple return values dest is PARALLEL.
5553 		     Currently we handle only single return value case.  */
5554 		  if (REG_P (dest))
5555 		    {
5556 		      start_sequence ();
5557 		      emit_move_insn (cheap, copy_rtx (dest));
5558 		      restore = get_insns ();
5559 		      end_sequence ();
5560 		      lra_process_new_insns (curr_insn, NULL, restore,
5561 					     "Inserting call parameter restore");
5562 		      /* We don't need to save/restore of the pseudo from
5563 			 this call.	 */
5564 		      usage_insns[regno].calls_num = calls_num;
5565 		      bitmap_set_bit (&check_only_regs, regno);
5566 		    }
5567 		}
5568 	    }
5569 	  to_inherit_num = 0;
5570 	  /* Process insn usages.  */
5571 	  for (iter = 0; iter < 2; iter++)
5572 	    for (reg = iter == 0 ? curr_id->regs : curr_static_id->hard_regs;
5573 		 reg != NULL;
5574 		 reg = reg->next)
5575 	      if ((reg->type != OP_OUT
5576 		   || (reg->type == OP_OUT && reg->subreg_p))
5577 		  && (src_regno = reg->regno) < lra_constraint_new_regno_start)
5578 		{
5579 		  if (src_regno >= FIRST_PSEUDO_REGISTER
5580 		      && reg_renumber[src_regno] < 0 && reg->type == OP_IN)
5581 		    {
5582 		      if (usage_insns[src_regno].check == curr_usage_insns_check
5583 			  && (next_usage_insns
5584 			      = usage_insns[src_regno].insns) != NULL_RTX
5585 			  && NONDEBUG_INSN_P (curr_insn))
5586 			add_to_inherit (src_regno, next_usage_insns);
5587 		      else if (usage_insns[src_regno].check
5588 			       != -(int) INSN_UID (curr_insn))
5589 			/* Add usages but only if the reg is not set up
5590 			   in the same insn.  */
5591 			add_next_usage_insn (src_regno, curr_insn, reloads_num);
5592 		    }
5593 		  else if (src_regno < FIRST_PSEUDO_REGISTER
5594 			   || reg_renumber[src_regno] >= 0)
5595 		    {
5596 		      bool before_p;
5597 		      rtx use_insn = curr_insn;
5598 
5599 		      before_p = (JUMP_P (curr_insn)
5600 				  || (CALL_P (curr_insn) && reg->type == OP_IN));
5601 		      if (NONDEBUG_INSN_P (curr_insn)
5602 			  && (! JUMP_P (curr_insn) || reg->type == OP_IN)
5603 			  && split_if_necessary (src_regno, reg->biggest_mode,
5604 						 potential_reload_hard_regs,
5605 						 before_p, curr_insn, max_uid))
5606 			{
5607 			  if (reg->subreg_p)
5608 			    lra_risky_transformations_p = true;
5609 			  change_p = true;
5610 			  /* Invalidate. */
5611 			  usage_insns[src_regno].check = 0;
5612 			  if (before_p)
5613 			    use_insn = PREV_INSN (curr_insn);
5614 			}
5615 		      if (NONDEBUG_INSN_P (curr_insn))
5616 			{
5617 			  if (src_regno < FIRST_PSEUDO_REGISTER)
5618 			    add_to_hard_reg_set (&live_hard_regs,
5619 						 reg->biggest_mode, src_regno);
5620 			  else
5621 			    add_to_hard_reg_set (&live_hard_regs,
5622 						 PSEUDO_REGNO_MODE (src_regno),
5623 						 reg_renumber[src_regno]);
5624 			}
5625 		      add_next_usage_insn (src_regno, use_insn, reloads_num);
5626 		    }
5627 		}
5628 	  /* Process used call regs.  */
5629 	  if (curr_id->arg_hard_regs != NULL)
5630 	    for (i = 0; (src_regno = curr_id->arg_hard_regs[i]) >= 0; i++)
5631 	      if (src_regno < FIRST_PSEUDO_REGISTER)
5632 		{
5633 	           SET_HARD_REG_BIT (live_hard_regs, src_regno);
5634 	           add_next_usage_insn (src_regno, curr_insn, reloads_num);
5635 		}
5636 	  for (i = 0; i < to_inherit_num; i++)
5637 	    {
5638 	      src_regno = to_inherit[i].regno;
5639 	      if (inherit_reload_reg (false, src_regno, ALL_REGS,
5640 				      curr_insn, to_inherit[i].insns))
5641 		change_p = true;
5642 	      else
5643 		setup_next_usage_insn (src_regno, curr_insn, reloads_num, false);
5644 	    }
5645 	}
5646       if (update_reloads_num_p
5647 	  && NONDEBUG_INSN_P (curr_insn)
5648           && (set = single_set (curr_insn)) != NULL_RTX)
5649 	{
5650 	  int regno = -1;
5651 	  if ((REG_P (SET_DEST (set))
5652 	       && (regno = REGNO (SET_DEST (set))) >= lra_constraint_new_regno_start
5653 	       && reg_renumber[regno] < 0
5654 	       && (cl = lra_get_allocno_class (regno)) != NO_REGS)
5655 	      || (REG_P (SET_SRC (set))
5656 	          && (regno = REGNO (SET_SRC (set))) >= lra_constraint_new_regno_start
5657 	          && reg_renumber[regno] < 0
5658 	          && (cl = lra_get_allocno_class (regno)) != NO_REGS))
5659 	    {
5660 	      if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
5661 		reloads_num++;
5662 	      if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
5663 		IOR_HARD_REG_SET (potential_reload_hard_regs,
5664 	                          reg_class_contents[cl]);
5665 	    }
5666 	}
5667       /* We reached the start of the current basic block.  */
5668       if (prev_insn == NULL_RTX || prev_insn == PREV_INSN (head)
5669 	  || BLOCK_FOR_INSN (prev_insn) != curr_bb)
5670 	{
5671 	  /* We reached the beginning of the current block -- do
5672 	     rest of spliting in the current BB.  */
5673 	  to_process = df_get_live_in (curr_bb);
5674 	  if (BLOCK_FOR_INSN (head) != curr_bb)
5675 	    {
5676 	      /* We are somewhere in the middle of EBB.	 */
5677 	      get_live_on_other_edges (EDGE_PRED (curr_bb, 0)->src,
5678 				       curr_bb, &temp_bitmap);
5679 	      to_process = &temp_bitmap;
5680 	    }
5681 	  head_p = true;
5682 	  EXECUTE_IF_SET_IN_BITMAP (to_process, 0, j, bi)
5683 	    {
5684 	      if ((int) j >= lra_constraint_new_regno_start)
5685 		break;
5686 	      if (((int) j < FIRST_PSEUDO_REGISTER || reg_renumber[j] >= 0)
5687 		  && usage_insns[j].check == curr_usage_insns_check
5688 		  && (next_usage_insns = usage_insns[j].insns) != NULL_RTX)
5689 		{
5690 		  if (need_for_split_p (potential_reload_hard_regs, j))
5691 		    {
5692 		      if (lra_dump_file != NULL && head_p)
5693 			{
5694 			  fprintf (lra_dump_file,
5695 				   "  ----------------------------------\n");
5696 			  head_p = false;
5697 			}
5698 		      if (split_reg (false, j, bb_note (curr_bb),
5699 				     next_usage_insns))
5700 			change_p = true;
5701 		    }
5702 		  usage_insns[j].check = 0;
5703 		}
5704 	    }
5705 	}
5706     }
5707   return change_p;
5708 }
5709 
5710 /* This value affects EBB forming.  If probability of edge from EBB to
5711    a BB is not greater than the following value, we don't add the BB
5712    to EBB.  */
5713 #define EBB_PROBABILITY_CUTOFF \
5714   ((REG_BR_PROB_BASE * LRA_INHERITANCE_EBB_PROBABILITY_CUTOFF) / 100)
5715 
5716 /* Current number of inheritance/split iteration.  */
5717 int lra_inheritance_iter;
5718 
5719 /* Entry function for inheritance/split pass.  */
5720 void
5721 lra_inheritance (void)
5722 {
5723   int i;
5724   basic_block bb, start_bb;
5725   edge e;
5726 
5727   lra_inheritance_iter++;
5728   if (lra_inheritance_iter > LRA_MAX_INHERITANCE_PASSES)
5729     return;
5730   timevar_push (TV_LRA_INHERITANCE);
5731   if (lra_dump_file != NULL)
5732     fprintf (lra_dump_file, "\n********** Inheritance #%d: **********\n\n",
5733 	     lra_inheritance_iter);
5734   curr_usage_insns_check = 0;
5735   usage_insns = XNEWVEC (struct usage_insns, lra_constraint_new_regno_start);
5736   for (i = 0; i < lra_constraint_new_regno_start; i++)
5737     usage_insns[i].check = 0;
5738   bitmap_initialize (&check_only_regs, &reg_obstack);
5739   bitmap_initialize (&live_regs, &reg_obstack);
5740   bitmap_initialize (&temp_bitmap, &reg_obstack);
5741   bitmap_initialize (&ebb_global_regs, &reg_obstack);
5742   FOR_EACH_BB_FN (bb, cfun)
5743     {
5744       start_bb = bb;
5745       if (lra_dump_file != NULL)
5746 	fprintf (lra_dump_file, "EBB");
5747       /* Form a EBB starting with BB.  */
5748       bitmap_clear (&ebb_global_regs);
5749       bitmap_ior_into (&ebb_global_regs, df_get_live_in (bb));
5750       for (;;)
5751 	{
5752 	  if (lra_dump_file != NULL)
5753 	    fprintf (lra_dump_file, " %d", bb->index);
5754 	  if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5755 	      || LABEL_P (BB_HEAD (bb->next_bb)))
5756 	    break;
5757 	  e = find_fallthru_edge (bb->succs);
5758 	  if (! e)
5759 	    break;
5760 	  if (e->probability < EBB_PROBABILITY_CUTOFF)
5761 	    break;
5762 	  bb = bb->next_bb;
5763 	}
5764       bitmap_ior_into (&ebb_global_regs, df_get_live_out (bb));
5765       if (lra_dump_file != NULL)
5766 	fprintf (lra_dump_file, "\n");
5767       if (inherit_in_ebb (BB_HEAD (start_bb), BB_END (bb)))
5768 	/* Remember that the EBB head and tail can change in
5769 	   inherit_in_ebb.  */
5770 	update_ebb_live_info (BB_HEAD (start_bb), BB_END (bb));
5771     }
5772   bitmap_clear (&ebb_global_regs);
5773   bitmap_clear (&temp_bitmap);
5774   bitmap_clear (&live_regs);
5775   bitmap_clear (&check_only_regs);
5776   free (usage_insns);
5777 
5778   timevar_pop (TV_LRA_INHERITANCE);
5779 }
5780 
5781 
5782 
5783 /* This page contains code to undo failed inheritance/split
5784    transformations.  */
5785 
5786 /* Current number of iteration undoing inheritance/split.  */
5787 int lra_undo_inheritance_iter;
5788 
5789 /* Fix BB live info LIVE after removing pseudos created on pass doing
5790    inheritance/split which are REMOVED_PSEUDOS.	 */
5791 static void
5792 fix_bb_live_info (bitmap live, bitmap removed_pseudos)
5793 {
5794   unsigned int regno;
5795   bitmap_iterator bi;
5796 
5797   EXECUTE_IF_SET_IN_BITMAP (removed_pseudos, 0, regno, bi)
5798     if (bitmap_clear_bit (live, regno))
5799       bitmap_set_bit (live, lra_reg_info[regno].restore_regno);
5800 }
5801 
5802 /* Return regno of the (subreg of) REG. Otherwise, return a negative
5803    number.  */
5804 static int
5805 get_regno (rtx reg)
5806 {
5807   if (GET_CODE (reg) == SUBREG)
5808     reg = SUBREG_REG (reg);
5809   if (REG_P (reg))
5810     return REGNO (reg);
5811   return -1;
5812 }
5813 
5814 /* Delete a move INSN with destination reg DREGNO and a previous
5815    clobber insn with the same regno.  The inheritance/split code can
5816    generate moves with preceding clobber and when we delete such moves
5817    we should delete the clobber insn too to keep the correct life
5818    info.  */
5819 static void
5820 delete_move_and_clobber (rtx_insn *insn, int dregno)
5821 {
5822   rtx_insn *prev_insn = PREV_INSN (insn);
5823 
5824   lra_set_insn_deleted (insn);
5825   lra_assert (dregno > 0);
5826   if (prev_insn != NULL && NONDEBUG_INSN_P (prev_insn)
5827       && GET_CODE (PATTERN (prev_insn)) == CLOBBER
5828       && dregno == get_regno (XEXP (PATTERN (prev_insn), 0)))
5829     lra_set_insn_deleted (prev_insn);
5830 }
5831 
5832 /* Remove inheritance/split pseudos which are in REMOVE_PSEUDOS and
5833    return true if we did any change.  The undo transformations for
5834    inheritance looks like
5835       i <- i2
5836       p <- i	  =>   p <- i2
5837    or removing
5838       p <- i, i <- p, and i <- i3
5839    where p is original pseudo from which inheritance pseudo i was
5840    created, i and i3 are removed inheritance pseudos, i2 is another
5841    not removed inheritance pseudo.  All split pseudos or other
5842    occurrences of removed inheritance pseudos are changed on the
5843    corresponding original pseudos.
5844 
5845    The function also schedules insns changed and created during
5846    inheritance/split pass for processing by the subsequent constraint
5847    pass.  */
5848 static bool
5849 remove_inheritance_pseudos (bitmap remove_pseudos)
5850 {
5851   basic_block bb;
5852   int regno, sregno, prev_sregno, dregno, restore_regno;
5853   rtx set, prev_set;
5854   rtx_insn *prev_insn;
5855   bool change_p, done_p;
5856 
5857   change_p = ! bitmap_empty_p (remove_pseudos);
5858   /* We can not finish the function right away if CHANGE_P is true
5859      because we need to marks insns affected by previous
5860      inheritance/split pass for processing by the subsequent
5861      constraint pass.  */
5862   FOR_EACH_BB_FN (bb, cfun)
5863     {
5864       fix_bb_live_info (df_get_live_in (bb), remove_pseudos);
5865       fix_bb_live_info (df_get_live_out (bb), remove_pseudos);
5866       FOR_BB_INSNS_REVERSE (bb, curr_insn)
5867 	{
5868 	  if (! INSN_P (curr_insn))
5869 	    continue;
5870 	  done_p = false;
5871 	  sregno = dregno = -1;
5872 	  if (change_p && NONDEBUG_INSN_P (curr_insn)
5873 	      && (set = single_set (curr_insn)) != NULL_RTX)
5874 	    {
5875 	      dregno = get_regno (SET_DEST (set));
5876 	      sregno = get_regno (SET_SRC (set));
5877 	    }
5878 
5879 	  if (sregno >= 0 && dregno >= 0)
5880 	    {
5881 	      if ((bitmap_bit_p (remove_pseudos, sregno)
5882 		   && (lra_reg_info[sregno].restore_regno == dregno
5883 		       || (bitmap_bit_p (remove_pseudos, dregno)
5884 			   && (lra_reg_info[sregno].restore_regno
5885 			       == lra_reg_info[dregno].restore_regno))))
5886 		  || (bitmap_bit_p (remove_pseudos, dregno)
5887 		      && lra_reg_info[dregno].restore_regno == sregno))
5888 		/* One of the following cases:
5889 		     original <- removed inheritance pseudo
5890 		     removed inherit pseudo <- another removed inherit pseudo
5891 		     removed inherit pseudo <- original pseudo
5892 		   Or
5893 		     removed_split_pseudo <- original_reg
5894 		     original_reg <- removed_split_pseudo */
5895 		{
5896 		  if (lra_dump_file != NULL)
5897 		    {
5898 		      fprintf (lra_dump_file, "	   Removing %s:\n",
5899 			       bitmap_bit_p (&lra_split_regs, sregno)
5900 			       || bitmap_bit_p (&lra_split_regs, dregno)
5901 			       ? "split" : "inheritance");
5902 		      dump_insn_slim (lra_dump_file, curr_insn);
5903 		    }
5904 		  delete_move_and_clobber (curr_insn, dregno);
5905 		  done_p = true;
5906 		}
5907 	      else if (bitmap_bit_p (remove_pseudos, sregno)
5908 		       && bitmap_bit_p (&lra_inheritance_pseudos, sregno))
5909 		{
5910 		  /* Search the following pattern:
5911 		       inherit_or_split_pseudo1 <- inherit_or_split_pseudo2
5912 		       original_pseudo <- inherit_or_split_pseudo1
5913 		    where the 2nd insn is the current insn and
5914 		    inherit_or_split_pseudo2 is not removed.  If it is found,
5915 		    change the current insn onto:
5916 		       original_pseudo <- inherit_or_split_pseudo2.  */
5917 		  for (prev_insn = PREV_INSN (curr_insn);
5918 		       prev_insn != NULL_RTX && ! NONDEBUG_INSN_P (prev_insn);
5919 		       prev_insn = PREV_INSN (prev_insn))
5920 		    ;
5921 		  if (prev_insn != NULL_RTX && BLOCK_FOR_INSN (prev_insn) == bb
5922 		      && (prev_set = single_set (prev_insn)) != NULL_RTX
5923 		      /* There should be no subregs in insn we are
5924 			 searching because only the original reg might
5925 			 be in subreg when we changed the mode of
5926 			 load/store for splitting.  */
5927 		      && REG_P (SET_DEST (prev_set))
5928 		      && REG_P (SET_SRC (prev_set))
5929 		      && (int) REGNO (SET_DEST (prev_set)) == sregno
5930 		      && ((prev_sregno = REGNO (SET_SRC (prev_set)))
5931 			  >= FIRST_PSEUDO_REGISTER)
5932 		      /* As we consider chain of inheritance or
5933 			 splitting described in above comment we should
5934 			 check that sregno and prev_sregno were
5935 			 inheritance/split pseudos created from the
5936 			 same original regno.  */
5937 		      && (lra_reg_info[sregno].restore_regno
5938 			  == lra_reg_info[prev_sregno].restore_regno)
5939 		      && ! bitmap_bit_p (remove_pseudos, prev_sregno))
5940 		    {
5941 		      lra_assert (GET_MODE (SET_SRC (prev_set))
5942 				  == GET_MODE (regno_reg_rtx[sregno]));
5943 		      if (GET_CODE (SET_SRC (set)) == SUBREG)
5944 			SUBREG_REG (SET_SRC (set)) = SET_SRC (prev_set);
5945 		      else
5946 			SET_SRC (set) = SET_SRC (prev_set);
5947 		      /* As we are finishing with processing the insn
5948 			 here, check the destination too as it might
5949 			 inheritance pseudo for another pseudo.  */
5950 		      if (bitmap_bit_p (remove_pseudos, dregno)
5951 			  && bitmap_bit_p (&lra_inheritance_pseudos, dregno)
5952 			  && (restore_regno
5953 			      = lra_reg_info[dregno].restore_regno) >= 0)
5954 			{
5955 			  if (GET_CODE (SET_DEST (set)) == SUBREG)
5956 			    SUBREG_REG (SET_DEST (set))
5957 			      = regno_reg_rtx[restore_regno];
5958 			  else
5959 			    SET_DEST (set) = regno_reg_rtx[restore_regno];
5960 			}
5961 		      lra_push_insn_and_update_insn_regno_info (curr_insn);
5962 		      lra_set_used_insn_alternative_by_uid
5963 			(INSN_UID (curr_insn), -1);
5964 		      done_p = true;
5965 		      if (lra_dump_file != NULL)
5966 			{
5967 			  fprintf (lra_dump_file, "    Change reload insn:\n");
5968 			  dump_insn_slim (lra_dump_file, curr_insn);
5969 			}
5970 		    }
5971 		}
5972 	    }
5973 	  if (! done_p)
5974 	    {
5975 	      struct lra_insn_reg *reg;
5976 	      bool restored_regs_p = false;
5977 	      bool kept_regs_p = false;
5978 
5979 	      curr_id = lra_get_insn_recog_data (curr_insn);
5980 	      for (reg = curr_id->regs; reg != NULL; reg = reg->next)
5981 		{
5982 		  regno = reg->regno;
5983 		  restore_regno = lra_reg_info[regno].restore_regno;
5984 		  if (restore_regno >= 0)
5985 		    {
5986 		      if (change_p && bitmap_bit_p (remove_pseudos, regno))
5987 			{
5988 			  lra_substitute_pseudo_within_insn
5989 			    (curr_insn, regno, regno_reg_rtx[restore_regno],
5990 			     false);
5991 			  restored_regs_p = true;
5992 			}
5993 		      else
5994 			kept_regs_p = true;
5995 		    }
5996 		}
5997 	      if (NONDEBUG_INSN_P (curr_insn) && kept_regs_p)
5998 		{
5999 		  /* The instruction has changed since the previous
6000 		     constraints pass.  */
6001 		  lra_push_insn_and_update_insn_regno_info (curr_insn);
6002 		  lra_set_used_insn_alternative_by_uid
6003 		    (INSN_UID (curr_insn), -1);
6004 		}
6005 	      else if (restored_regs_p)
6006 		/* The instruction has been restored to the form that
6007 		   it had during the previous constraints pass.  */
6008 		lra_update_insn_regno_info (curr_insn);
6009 	      if (restored_regs_p && lra_dump_file != NULL)
6010 		{
6011 		  fprintf (lra_dump_file, "   Insn after restoring regs:\n");
6012 		  dump_insn_slim (lra_dump_file, curr_insn);
6013 		}
6014 	    }
6015 	}
6016     }
6017   return change_p;
6018 }
6019 
6020 /* If optional reload pseudos failed to get a hard register or was not
6021    inherited, it is better to remove optional reloads.  We do this
6022    transformation after undoing inheritance to figure out necessity to
6023    remove optional reloads easier.  Return true if we do any
6024    change.  */
6025 static bool
6026 undo_optional_reloads (void)
6027 {
6028   bool change_p, keep_p;
6029   unsigned int regno, uid;
6030   bitmap_iterator bi, bi2;
6031   rtx_insn *insn;
6032   rtx set, src, dest;
6033   bitmap_head removed_optional_reload_pseudos, insn_bitmap;
6034 
6035   bitmap_initialize (&removed_optional_reload_pseudos, &reg_obstack);
6036   bitmap_copy (&removed_optional_reload_pseudos, &lra_optional_reload_pseudos);
6037   EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
6038     {
6039       keep_p = false;
6040       /* Keep optional reloads from previous subpasses.  */
6041       if (lra_reg_info[regno].restore_regno < 0
6042 	  /* If the original pseudo changed its allocation, just
6043 	     removing the optional pseudo is dangerous as the original
6044 	     pseudo will have longer live range.  */
6045 	  || reg_renumber[lra_reg_info[regno].restore_regno] >= 0)
6046 	keep_p = true;
6047       else if (reg_renumber[regno] >= 0)
6048 	EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi2)
6049 	  {
6050 	    insn = lra_insn_recog_data[uid]->insn;
6051 	    if ((set = single_set (insn)) == NULL_RTX)
6052 	      continue;
6053 	    src = SET_SRC (set);
6054 	    dest = SET_DEST (set);
6055 	    if (! REG_P (src) || ! REG_P (dest))
6056 	      continue;
6057 	    if (REGNO (dest) == regno
6058 		/* Ignore insn for optional reloads itself.  */
6059 		&& lra_reg_info[regno].restore_regno != (int) REGNO (src)
6060 		/* Check only inheritance on last inheritance pass.  */
6061 		&& (int) REGNO (src) >= new_regno_start
6062 		/* Check that the optional reload was inherited.  */
6063 		&& bitmap_bit_p (&lra_inheritance_pseudos, REGNO (src)))
6064 	      {
6065 		keep_p = true;
6066 		break;
6067 	      }
6068 	  }
6069       if (keep_p)
6070 	{
6071 	  bitmap_clear_bit (&removed_optional_reload_pseudos, regno);
6072 	  if (lra_dump_file != NULL)
6073 	    fprintf (lra_dump_file, "Keep optional reload reg %d\n", regno);
6074 	}
6075     }
6076   change_p = ! bitmap_empty_p (&removed_optional_reload_pseudos);
6077   bitmap_initialize (&insn_bitmap, &reg_obstack);
6078   EXECUTE_IF_SET_IN_BITMAP (&removed_optional_reload_pseudos, 0, regno, bi)
6079     {
6080       if (lra_dump_file != NULL)
6081 	fprintf (lra_dump_file, "Remove optional reload reg %d\n", regno);
6082       bitmap_copy (&insn_bitmap, &lra_reg_info[regno].insn_bitmap);
6083       EXECUTE_IF_SET_IN_BITMAP (&insn_bitmap, 0, uid, bi2)
6084 	{
6085 	  insn = lra_insn_recog_data[uid]->insn;
6086 	  if ((set = single_set (insn)) != NULL_RTX)
6087 	    {
6088 	      src = SET_SRC (set);
6089 	      dest = SET_DEST (set);
6090 	      if (REG_P (src) && REG_P (dest)
6091 		  && ((REGNO (src) == regno
6092 		       && (lra_reg_info[regno].restore_regno
6093 			   == (int) REGNO (dest)))
6094 		      || (REGNO (dest) == regno
6095 			  && (lra_reg_info[regno].restore_regno
6096 			      == (int) REGNO (src)))))
6097 		{
6098 		  if (lra_dump_file != NULL)
6099 		    {
6100 		      fprintf (lra_dump_file, "  Deleting move %u\n",
6101 			       INSN_UID (insn));
6102 		      dump_insn_slim (lra_dump_file, insn);
6103 		    }
6104 		  delete_move_and_clobber (insn, REGNO (dest));
6105 		  continue;
6106 		}
6107 	      /* We should not worry about generation memory-memory
6108 		 moves here as if the corresponding inheritance did
6109 		 not work (inheritance pseudo did not get a hard reg),
6110 		 we remove the inheritance pseudo and the optional
6111 		 reload.  */
6112 	    }
6113 	  lra_substitute_pseudo_within_insn
6114 	    (insn, regno, regno_reg_rtx[lra_reg_info[regno].restore_regno],
6115 	     false);
6116 	  lra_update_insn_regno_info (insn);
6117 	  if (lra_dump_file != NULL)
6118 	    {
6119 	      fprintf (lra_dump_file,
6120 		       "  Restoring original insn:\n");
6121 	      dump_insn_slim (lra_dump_file, insn);
6122 	    }
6123 	}
6124     }
6125   /* Clear restore_regnos.  */
6126   EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
6127     lra_reg_info[regno].restore_regno = -1;
6128   bitmap_clear (&insn_bitmap);
6129   bitmap_clear (&removed_optional_reload_pseudos);
6130   return change_p;
6131 }
6132 
6133 /* Entry function for undoing inheritance/split transformation.	 Return true
6134    if we did any RTL change in this pass.  */
6135 bool
6136 lra_undo_inheritance (void)
6137 {
6138   unsigned int regno;
6139   int restore_regno, hard_regno;
6140   int n_all_inherit, n_inherit, n_all_split, n_split;
6141   bitmap_head remove_pseudos;
6142   bitmap_iterator bi;
6143   bool change_p;
6144 
6145   lra_undo_inheritance_iter++;
6146   if (lra_undo_inheritance_iter > LRA_MAX_INHERITANCE_PASSES)
6147     return false;
6148   if (lra_dump_file != NULL)
6149     fprintf (lra_dump_file,
6150 	     "\n********** Undoing inheritance #%d: **********\n\n",
6151 	     lra_undo_inheritance_iter);
6152   bitmap_initialize (&remove_pseudos, &reg_obstack);
6153   n_inherit = n_all_inherit = 0;
6154   EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
6155     if (lra_reg_info[regno].restore_regno >= 0)
6156       {
6157 	n_all_inherit++;
6158 	if (reg_renumber[regno] < 0
6159 	    /* If the original pseudo changed its allocation, just
6160 	       removing inheritance is dangerous as for changing
6161 	       allocation we used shorter live-ranges.  */
6162 	    && reg_renumber[lra_reg_info[regno].restore_regno] < 0)
6163 	  bitmap_set_bit (&remove_pseudos, regno);
6164 	else
6165 	  n_inherit++;
6166       }
6167   if (lra_dump_file != NULL && n_all_inherit != 0)
6168     fprintf (lra_dump_file, "Inherit %d out of %d (%.2f%%)\n",
6169 	     n_inherit, n_all_inherit,
6170 	     (double) n_inherit / n_all_inherit * 100);
6171   n_split = n_all_split = 0;
6172   EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, regno, bi)
6173     if ((restore_regno = lra_reg_info[regno].restore_regno) >= 0)
6174       {
6175 	n_all_split++;
6176 	hard_regno = (restore_regno >= FIRST_PSEUDO_REGISTER
6177 		      ? reg_renumber[restore_regno] : restore_regno);
6178 	if (hard_regno < 0 || reg_renumber[regno] == hard_regno)
6179 	  bitmap_set_bit (&remove_pseudos, regno);
6180 	else
6181 	  {
6182 	    n_split++;
6183 	    if (lra_dump_file != NULL)
6184 	      fprintf (lra_dump_file, "	     Keep split r%d (orig=r%d)\n",
6185 		       regno, restore_regno);
6186 	  }
6187       }
6188   if (lra_dump_file != NULL && n_all_split != 0)
6189     fprintf (lra_dump_file, "Split %d out of %d (%.2f%%)\n",
6190 	     n_split, n_all_split,
6191 	     (double) n_split / n_all_split * 100);
6192   change_p = remove_inheritance_pseudos (&remove_pseudos);
6193   bitmap_clear (&remove_pseudos);
6194   /* Clear restore_regnos.  */
6195   EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
6196     lra_reg_info[regno].restore_regno = -1;
6197   EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, regno, bi)
6198     lra_reg_info[regno].restore_regno = -1;
6199   change_p = undo_optional_reloads () || change_p;
6200   return change_p;
6201 }
6202