xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/lra-constraints.c (revision 8feb0f0b7eaff0608f8350bbfa3098827b4bb91b)
1 /* Code for RTL transformations to satisfy insn constraints.
2    Copyright (C) 2010-2020 Free Software Foundation, Inc.
3    Contributed by Vladimir Makarov <vmakarov@redhat.com>.
4 
5    This file is part of GCC.
6 
7    GCC is free software; you can redistribute it and/or modify it under
8    the terms of the GNU General Public License as published by the Free
9    Software Foundation; either version 3, or (at your option) any later
10    version.
11 
12    GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13    WARRANTY; without even the implied warranty of MERCHANTABILITY or
14    FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15    for more details.
16 
17    You should have received a copy of the GNU General Public License
18    along with GCC; see the file COPYING3.  If not see
19    <http://www.gnu.org/licenses/>.  */
20 
21 
22 /* This file contains code for 3 passes: constraint pass,
23    inheritance/split pass, and pass for undoing failed inheritance and
24    split.
25 
26    The major goal of constraint pass is to transform RTL to satisfy
27    insn and address constraints by:
28      o choosing insn alternatives;
29      o generating *reload insns* (or reloads in brief) and *reload
30        pseudos* which will get necessary hard registers later;
31      o substituting pseudos with equivalent values and removing the
32        instructions that initialized those pseudos.
33 
34    The constraint pass has biggest and most complicated code in LRA.
35    There are a lot of important details like:
36      o reuse of input reload pseudos to simplify reload pseudo
37        allocations;
38      o some heuristics to choose insn alternative to improve the
39        inheritance;
40      o early clobbers etc.
41 
42    The pass is mimicking former reload pass in alternative choosing
43    because the reload pass is oriented to current machine description
44    model.  It might be changed if the machine description model is
45    changed.
46 
47    There is special code for preventing all LRA and this pass cycling
48    in case of bugs.
49 
50    On the first iteration of the pass we process every instruction and
51    choose an alternative for each one.  On subsequent iterations we try
52    to avoid reprocessing instructions if we can be sure that the old
53    choice is still valid.
54 
55    The inheritance/spilt pass is to transform code to achieve
56    ineheritance and live range splitting.  It is done on backward
57    traversal of EBBs.
58 
59    The inheritance optimization goal is to reuse values in hard
60    registers. There is analogous optimization in old reload pass.  The
61    inheritance is achieved by following transformation:
62 
63        reload_p1 <- p	     reload_p1 <- p
64        ...		     new_p <- reload_p1
65        ...		=>   ...
66        reload_p2 <- p	     reload_p2 <- new_p
67 
68    where p is spilled and not changed between the insns.  Reload_p1 is
69    also called *original pseudo* and new_p is called *inheritance
70    pseudo*.
71 
72    The subsequent assignment pass will try to assign the same (or
73    another if it is not possible) hard register to new_p as to
74    reload_p1 or reload_p2.
75 
76    If the assignment pass fails to assign a hard register to new_p,
77    this file will undo the inheritance and restore the original code.
78    This is because implementing the above sequence with a spilled
79    new_p would make the code much worse.  The inheritance is done in
80    EBB scope.  The above is just a simplified example to get an idea
81    of the inheritance as the inheritance is also done for non-reload
82    insns.
83 
84    Splitting (transformation) is also done in EBB scope on the same
85    pass as the inheritance:
86 
87        r <- ... or ... <- r		 r <- ... or ... <- r
88        ...				 s <- r (new insn -- save)
89        ...			  =>
90        ...				 r <- s (new insn -- restore)
91        ... <- r				 ... <- r
92 
93     The *split pseudo* s is assigned to the hard register of the
94     original pseudo or hard register r.
95 
96     Splitting is done:
97       o In EBBs with high register pressure for global pseudos (living
98 	in at least 2 BBs) and assigned to hard registers when there
99 	are more one reloads needing the hard registers;
100       o for pseudos needing save/restore code around calls.
101 
102     If the split pseudo still has the same hard register as the
103     original pseudo after the subsequent assignment pass or the
104     original pseudo was split, the opposite transformation is done on
105     the same pass for undoing inheritance.  */
106 
107 #undef REG_OK_STRICT
108 
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "backend.h"
113 #include "target.h"
114 #include "rtl.h"
115 #include "tree.h"
116 #include "predict.h"
117 #include "df.h"
118 #include "memmodel.h"
119 #include "tm_p.h"
120 #include "expmed.h"
121 #include "optabs.h"
122 #include "regs.h"
123 #include "ira.h"
124 #include "recog.h"
125 #include "output.h"
126 #include "addresses.h"
127 #include "expr.h"
128 #include "cfgrtl.h"
129 #include "rtl-error.h"
130 #include "lra.h"
131 #include "lra-int.h"
132 #include "print-rtl.h"
133 #include "function-abi.h"
134 
135 /* Value of LRA_CURR_RELOAD_NUM at the beginning of BB of the current
136    insn.  Remember that LRA_CURR_RELOAD_NUM is the number of emitted
137    reload insns.  */
138 static int bb_reload_num;
139 
140 /* The current insn being processed and corresponding its single set
141    (NULL otherwise), its data (basic block, the insn data, the insn
142    static data, and the mode of each operand).  */
143 static rtx_insn *curr_insn;
144 static rtx curr_insn_set;
145 static basic_block curr_bb;
146 static lra_insn_recog_data_t curr_id;
147 static struct lra_static_insn_data *curr_static_id;
148 static machine_mode curr_operand_mode[MAX_RECOG_OPERANDS];
149 /* Mode of the register substituted by its equivalence with VOIDmode
150    (e.g. constant) and whose subreg is given operand of the current
151    insn.  VOIDmode in all other cases.  */
152 static machine_mode original_subreg_reg_mode[MAX_RECOG_OPERANDS];
153 
154 
155 
156 /* Start numbers for new registers and insns at the current constraints
157    pass start.	*/
158 static int new_regno_start;
159 static int new_insn_uid_start;
160 
161 /* If LOC is nonnull, strip any outer subreg from it.  */
162 static inline rtx *
strip_subreg(rtx * loc)163 strip_subreg (rtx *loc)
164 {
165   return loc && GET_CODE (*loc) == SUBREG ? &SUBREG_REG (*loc) : loc;
166 }
167 
168 /* Return hard regno of REGNO or if it is was not assigned to a hard
169    register, use a hard register from its allocno class.  */
170 static int
get_try_hard_regno(int regno)171 get_try_hard_regno (int regno)
172 {
173   int hard_regno;
174   enum reg_class rclass;
175 
176   if ((hard_regno = regno) >= FIRST_PSEUDO_REGISTER)
177     hard_regno = lra_get_regno_hard_regno (regno);
178   if (hard_regno >= 0)
179     return hard_regno;
180   rclass = lra_get_allocno_class (regno);
181   if (rclass == NO_REGS)
182     return -1;
183   return ira_class_hard_regs[rclass][0];
184 }
185 
186 /* Return the hard regno of X after removing its subreg.  If X is not
187    a register or a subreg of a register, return -1.  If X is a pseudo,
188    use its assignment.  If FINAL_P return the final hard regno which will
189    be after elimination.  */
190 static int
get_hard_regno(rtx x,bool final_p)191 get_hard_regno (rtx x, bool final_p)
192 {
193   rtx reg;
194   int hard_regno;
195 
196   reg = x;
197   if (SUBREG_P (x))
198     reg = SUBREG_REG (x);
199   if (! REG_P (reg))
200     return -1;
201   if (! HARD_REGISTER_NUM_P (hard_regno = REGNO (reg)))
202     hard_regno = lra_get_regno_hard_regno (hard_regno);
203   if (hard_regno < 0)
204     return -1;
205   if (final_p)
206     hard_regno = lra_get_elimination_hard_regno (hard_regno);
207   if (SUBREG_P (x))
208     hard_regno += subreg_regno_offset (hard_regno, GET_MODE (reg),
209 				       SUBREG_BYTE (x),  GET_MODE (x));
210   return hard_regno;
211 }
212 
213 /* If REGNO is a hard register or has been allocated a hard register,
214    return the class of that register.  If REGNO is a reload pseudo
215    created by the current constraints pass, return its allocno class.
216    Return NO_REGS otherwise.  */
217 static enum reg_class
get_reg_class(int regno)218 get_reg_class (int regno)
219 {
220   int hard_regno;
221 
222   if (! HARD_REGISTER_NUM_P (hard_regno = regno))
223     hard_regno = lra_get_regno_hard_regno (regno);
224   if (hard_regno >= 0)
225     {
226       hard_regno = lra_get_elimination_hard_regno (hard_regno);
227       return REGNO_REG_CLASS (hard_regno);
228     }
229   if (regno >= new_regno_start)
230     return lra_get_allocno_class (regno);
231   return NO_REGS;
232 }
233 
234 /* Return true if REG satisfies (or will satisfy) reg class constraint
235    CL.  Use elimination first if REG is a hard register.  If REG is a
236    reload pseudo created by this constraints pass, assume that it will
237    be allocated a hard register from its allocno class, but allow that
238    class to be narrowed to CL if it is currently a superset of CL and
239    if either:
240 
241    - ALLOW_ALL_RELOAD_CLASS_CHANGES_P is true or
242    - the instruction we're processing is not a reload move.
243 
244    If NEW_CLASS is nonnull, set *NEW_CLASS to the new allocno class of
245    REGNO (reg), or NO_REGS if no change in its class was needed.  */
246 static bool
247 in_class_p (rtx reg, enum reg_class cl, enum reg_class *new_class,
248 	    bool allow_all_reload_class_changes_p = false)
249 {
250   enum reg_class rclass, common_class;
251   machine_mode reg_mode;
252   rtx src;
253   int class_size, hard_regno, nregs, i, j;
254   int regno = REGNO (reg);
255 
256   if (new_class != NULL)
257     *new_class = NO_REGS;
258   if (regno < FIRST_PSEUDO_REGISTER)
259     {
260       rtx final_reg = reg;
261       rtx *final_loc = &final_reg;
262 
263       lra_eliminate_reg_if_possible (final_loc);
264       return TEST_HARD_REG_BIT (reg_class_contents[cl], REGNO (*final_loc));
265     }
266   reg_mode = GET_MODE (reg);
267   rclass = get_reg_class (regno);
268   src = curr_insn_set != NULL ? SET_SRC (curr_insn_set) : NULL;
269   if (regno < new_regno_start
270       /* Do not allow the constraints for reload instructions to
271 	 influence the classes of new pseudos.  These reloads are
272 	 typically moves that have many alternatives, and restricting
273 	 reload pseudos for one alternative may lead to situations
274 	 where other reload pseudos are no longer allocatable.  */
275       || (!allow_all_reload_class_changes_p
276 	  && INSN_UID (curr_insn) >= new_insn_uid_start
277 	  && src != NULL
278 	  && ((REG_P (src) || MEM_P (src))
279 	      || (GET_CODE (src) == SUBREG
280 		  && (REG_P (SUBREG_REG (src)) || MEM_P (SUBREG_REG (src)))))))
281     /* When we don't know what class will be used finally for reload
282        pseudos, we use ALL_REGS.  */
283     return ((regno >= new_regno_start && rclass == ALL_REGS)
284 	    || (rclass != NO_REGS && ira_class_subset_p[rclass][cl]
285 		&& ! hard_reg_set_subset_p (reg_class_contents[cl],
286 					    lra_no_alloc_regs)));
287   else
288     {
289       common_class = ira_reg_class_subset[rclass][cl];
290       if (new_class != NULL)
291 	*new_class = common_class;
292       if (hard_reg_set_subset_p (reg_class_contents[common_class],
293 				 lra_no_alloc_regs))
294 	return false;
295       /* Check that there are enough allocatable regs.  */
296       class_size = ira_class_hard_regs_num[common_class];
297       for (i = 0; i < class_size; i++)
298 	{
299 	  hard_regno = ira_class_hard_regs[common_class][i];
300 	  nregs = hard_regno_nregs (hard_regno, reg_mode);
301 	  if (nregs == 1)
302 	    return true;
303 	  for (j = 0; j < nregs; j++)
304 	    if (TEST_HARD_REG_BIT (lra_no_alloc_regs, hard_regno + j)
305 		|| ! TEST_HARD_REG_BIT (reg_class_contents[common_class],
306 					hard_regno + j))
307 	      break;
308 	  if (j >= nregs)
309 	    return true;
310 	}
311       return false;
312     }
313 }
314 
315 /* Return true if REGNO satisfies a memory constraint.	*/
316 static bool
in_mem_p(int regno)317 in_mem_p (int regno)
318 {
319   return get_reg_class (regno) == NO_REGS;
320 }
321 
322 /* Return 1 if ADDR is a valid memory address for mode MODE in address
323    space AS, and check that each pseudo has the proper kind of hard
324    reg.	 */
325 static int
valid_address_p(machine_mode mode ATTRIBUTE_UNUSED,rtx addr,addr_space_t as)326 valid_address_p (machine_mode mode ATTRIBUTE_UNUSED,
327 		 rtx addr, addr_space_t as)
328 {
329 #ifdef GO_IF_LEGITIMATE_ADDRESS
330   lra_assert (ADDR_SPACE_GENERIC_P (as));
331   GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
332   return 0;
333 
334  win:
335   return 1;
336 #else
337   return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
338 #endif
339 }
340 
341 namespace {
342   /* Temporarily eliminates registers in an address (for the lifetime of
343      the object).  */
344   class address_eliminator {
345   public:
346     address_eliminator (struct address_info *ad);
347     ~address_eliminator ();
348 
349   private:
350     struct address_info *m_ad;
351     rtx *m_base_loc;
352     rtx m_base_reg;
353     rtx *m_index_loc;
354     rtx m_index_reg;
355   };
356 }
357 
address_eliminator(struct address_info * ad)358 address_eliminator::address_eliminator (struct address_info *ad)
359   : m_ad (ad),
360     m_base_loc (strip_subreg (ad->base_term)),
361     m_base_reg (NULL_RTX),
362     m_index_loc (strip_subreg (ad->index_term)),
363     m_index_reg (NULL_RTX)
364 {
365   if (m_base_loc != NULL)
366     {
367       m_base_reg = *m_base_loc;
368       /* If we have non-legitimate address which is decomposed not in
369 	 the way we expected, don't do elimination here.  In such case
370 	 the address will be reloaded and elimination will be done in
371 	 reload insn finally.  */
372       if (REG_P (m_base_reg))
373 	lra_eliminate_reg_if_possible (m_base_loc);
374       if (m_ad->base_term2 != NULL)
375 	*m_ad->base_term2 = *m_ad->base_term;
376     }
377   if (m_index_loc != NULL)
378     {
379       m_index_reg = *m_index_loc;
380       if (REG_P (m_index_reg))
381 	lra_eliminate_reg_if_possible (m_index_loc);
382     }
383 }
384 
~address_eliminator()385 address_eliminator::~address_eliminator ()
386 {
387   if (m_base_loc && *m_base_loc != m_base_reg)
388     {
389       *m_base_loc = m_base_reg;
390       if (m_ad->base_term2 != NULL)
391 	*m_ad->base_term2 = *m_ad->base_term;
392     }
393   if (m_index_loc && *m_index_loc != m_index_reg)
394     *m_index_loc = m_index_reg;
395 }
396 
397 /* Return true if the eliminated form of AD is a legitimate target address.
398    If OP is a MEM, AD is the address within OP, otherwise OP should be
399    ignored.  CONSTRAINT is one constraint that the operand may need
400    to meet.  */
401 static bool
valid_address_p(rtx op,struct address_info * ad,enum constraint_num constraint)402 valid_address_p (rtx op, struct address_info *ad,
403 		 enum constraint_num constraint)
404 {
405   address_eliminator eliminator (ad);
406 
407   /* Allow a memory OP if it matches CONSTRAINT, even if CONSTRAINT is more
408      forgiving than "m".  */
409   if (MEM_P (op)
410       && (insn_extra_memory_constraint (constraint)
411 	  || insn_extra_special_memory_constraint (constraint))
412       && constraint_satisfied_p (op, constraint))
413     return true;
414 
415   return valid_address_p (ad->mode, *ad->outer, ad->as);
416 }
417 
418 /* Return true if the eliminated form of memory reference OP satisfies
419    extra (special) memory constraint CONSTRAINT.  */
420 static bool
satisfies_memory_constraint_p(rtx op,enum constraint_num constraint)421 satisfies_memory_constraint_p (rtx op, enum constraint_num constraint)
422 {
423   struct address_info ad;
424 
425   decompose_mem_address (&ad, op);
426   address_eliminator eliminator (&ad);
427   return constraint_satisfied_p (op, constraint);
428 }
429 
430 /* Return true if the eliminated form of address AD satisfies extra
431    address constraint CONSTRAINT.  */
432 static bool
satisfies_address_constraint_p(struct address_info * ad,enum constraint_num constraint)433 satisfies_address_constraint_p (struct address_info *ad,
434 				enum constraint_num constraint)
435 {
436   address_eliminator eliminator (ad);
437   return constraint_satisfied_p (*ad->outer, constraint);
438 }
439 
440 /* Return true if the eliminated form of address OP satisfies extra
441    address constraint CONSTRAINT.  */
442 static bool
satisfies_address_constraint_p(rtx op,enum constraint_num constraint)443 satisfies_address_constraint_p (rtx op, enum constraint_num constraint)
444 {
445   struct address_info ad;
446 
447   decompose_lea_address (&ad, &op);
448   return satisfies_address_constraint_p (&ad, constraint);
449 }
450 
451 /* Initiate equivalences for LRA.  As we keep original equivalences
452    before any elimination, we need to make copies otherwise any change
453    in insns might change the equivalences.  */
454 void
lra_init_equiv(void)455 lra_init_equiv (void)
456 {
457   ira_expand_reg_equiv ();
458   for (int i = FIRST_PSEUDO_REGISTER; i < max_reg_num (); i++)
459     {
460       rtx res;
461 
462       if ((res = ira_reg_equiv[i].memory) != NULL_RTX)
463 	ira_reg_equiv[i].memory = copy_rtx (res);
464       if ((res = ira_reg_equiv[i].invariant) != NULL_RTX)
465 	ira_reg_equiv[i].invariant = copy_rtx (res);
466     }
467 }
468 
469 static rtx loc_equivalence_callback (rtx, const_rtx, void *);
470 
471 /* Update equivalence for REGNO.  We need to this as the equivalence
472    might contain other pseudos which are changed by their
473    equivalences.  */
474 static void
update_equiv(int regno)475 update_equiv (int regno)
476 {
477   rtx x;
478 
479   if ((x = ira_reg_equiv[regno].memory) != NULL_RTX)
480     ira_reg_equiv[regno].memory
481       = simplify_replace_fn_rtx (x, NULL_RTX, loc_equivalence_callback,
482 				 NULL_RTX);
483   if ((x = ira_reg_equiv[regno].invariant) != NULL_RTX)
484     ira_reg_equiv[regno].invariant
485       = simplify_replace_fn_rtx (x, NULL_RTX, loc_equivalence_callback,
486 				 NULL_RTX);
487 }
488 
489 /* If we have decided to substitute X with another value, return that
490    value, otherwise return X.  */
491 static rtx
get_equiv(rtx x)492 get_equiv (rtx x)
493 {
494   int regno;
495   rtx res;
496 
497   if (! REG_P (x) || (regno = REGNO (x)) < FIRST_PSEUDO_REGISTER
498       || ! ira_reg_equiv[regno].defined_p
499       || ! ira_reg_equiv[regno].profitable_p
500       || lra_get_regno_hard_regno (regno) >= 0)
501     return x;
502   if ((res = ira_reg_equiv[regno].memory) != NULL_RTX)
503     {
504       if (targetm.cannot_substitute_mem_equiv_p (res))
505 	return x;
506       return res;
507     }
508   if ((res = ira_reg_equiv[regno].constant) != NULL_RTX)
509     return res;
510   if ((res = ira_reg_equiv[regno].invariant) != NULL_RTX)
511     return res;
512   gcc_unreachable ();
513 }
514 
515 /* If we have decided to substitute X with the equivalent value,
516    return that value after elimination for INSN, otherwise return
517    X.  */
518 static rtx
get_equiv_with_elimination(rtx x,rtx_insn * insn)519 get_equiv_with_elimination (rtx x, rtx_insn *insn)
520 {
521   rtx res = get_equiv (x);
522 
523   if (x == res || CONSTANT_P (res))
524     return res;
525   return lra_eliminate_regs_1 (insn, res, GET_MODE (res),
526 			       false, false, 0, true);
527 }
528 
529 /* Set up curr_operand_mode.  */
530 static void
init_curr_operand_mode(void)531 init_curr_operand_mode (void)
532 {
533   int nop = curr_static_id->n_operands;
534   for (int i = 0; i < nop; i++)
535     {
536       machine_mode mode = GET_MODE (*curr_id->operand_loc[i]);
537       if (mode == VOIDmode)
538 	{
539 	  /* The .md mode for address operands is the mode of the
540 	     addressed value rather than the mode of the address itself.  */
541 	  if (curr_id->icode >= 0 && curr_static_id->operand[i].is_address)
542 	    mode = Pmode;
543 	  else
544 	    mode = curr_static_id->operand[i].mode;
545 	}
546       curr_operand_mode[i] = mode;
547     }
548 }
549 
550 
551 
552 /* The page contains code to reuse input reloads.  */
553 
554 /* Structure describes input reload of the current insns.  */
555 struct input_reload
556 {
557   /* True for input reload of matched operands.  */
558   bool match_p;
559   /* Reloaded value.  */
560   rtx input;
561   /* Reload pseudo used.  */
562   rtx reg;
563 };
564 
565 /* The number of elements in the following array.  */
566 static int curr_insn_input_reloads_num;
567 /* Array containing info about input reloads.  It is used to find the
568    same input reload and reuse the reload pseudo in this case.	*/
569 static struct input_reload curr_insn_input_reloads[LRA_MAX_INSN_RELOADS];
570 
571 /* Initiate data concerning reuse of input reloads for the current
572    insn.  */
573 static void
init_curr_insn_input_reloads(void)574 init_curr_insn_input_reloads (void)
575 {
576   curr_insn_input_reloads_num = 0;
577 }
578 
579 /* Create a new pseudo using MODE, RCLASS, ORIGINAL or reuse an existing
580    reload pseudo.  Don't reuse an existing reload pseudo if IN_SUBREG_P
581    is true and the reused pseudo should be wrapped up in a SUBREG.
582    The result pseudo is returned through RESULT_REG.  Return TRUE if we
583    created a new pseudo, FALSE if we reused an existing reload pseudo.
584    Use TITLE to describe new registers for debug purposes.  */
585 static bool
get_reload_reg(enum op_type type,machine_mode mode,rtx original,enum reg_class rclass,bool in_subreg_p,const char * title,rtx * result_reg)586 get_reload_reg (enum op_type type, machine_mode mode, rtx original,
587 		enum reg_class rclass, bool in_subreg_p,
588 		const char *title, rtx *result_reg)
589 {
590   int i, regno;
591   enum reg_class new_class;
592   bool unique_p = false;
593 
594   if (type == OP_OUT)
595     {
596       /* Output reload registers tend to start out with a conservative
597 	 choice of register class.  Usually this is ALL_REGS, although
598 	 a target might narrow it (for performance reasons) through
599 	 targetm.preferred_reload_class.  It's therefore quite common
600 	 for a reload instruction to require a more restrictive class
601 	 than the class that was originally assigned to the reload register.
602 
603 	 In these situations, it's more efficient to refine the choice
604 	 of register class rather than create a second reload register.
605 	 This also helps to avoid cycling for registers that are only
606 	 used by reload instructions.  */
607       rtx src = curr_insn_set != NULL ? SET_SRC (curr_insn_set) : NULL;
608       if (REG_P (original)
609 	  && (int) REGNO (original) >= new_regno_start
610 	  && INSN_UID (curr_insn) >= new_insn_uid_start
611 	  && in_class_p (original, rclass, &new_class, true)
612 	  && src != NULL
613 	  && ((REG_P (src) || MEM_P (src))
614 	      || (GET_CODE (src) == SUBREG
615 		  && (REG_P (SUBREG_REG (src)) || MEM_P (SUBREG_REG (src))))))
616 	{
617 	  unsigned int regno = REGNO (original);
618 	  if (lra_dump_file != NULL)
619 	    {
620 	      fprintf (lra_dump_file, "	 Reuse r%d for output ", regno);
621 	      dump_value_slim (lra_dump_file, original, 1);
622 	    }
623 	  if (new_class != lra_get_allocno_class (regno))
624 	    lra_change_class (regno, new_class, ", change to", false);
625 	  if (lra_dump_file != NULL)
626 	    fprintf (lra_dump_file, "\n");
627 	  *result_reg = original;
628 	  return false;
629 	}
630       *result_reg
631 	= lra_create_new_reg_with_unique_value (mode, original, rclass, title);
632       return true;
633     }
634   /* Prevent reuse value of expression with side effects,
635      e.g. volatile memory.  */
636   if (! side_effects_p (original))
637     for (i = 0; i < curr_insn_input_reloads_num; i++)
638       {
639 	if (! curr_insn_input_reloads[i].match_p
640 	    && rtx_equal_p (curr_insn_input_reloads[i].input, original)
641 	    && in_class_p (curr_insn_input_reloads[i].reg, rclass, &new_class))
642 	  {
643 	    rtx reg = curr_insn_input_reloads[i].reg;
644 	    regno = REGNO (reg);
645 	    /* If input is equal to original and both are VOIDmode,
646 	       GET_MODE (reg) might be still different from mode.
647 	       Ensure we don't return *result_reg with wrong mode.  */
648 	    if (GET_MODE (reg) != mode)
649 	      {
650 		if (in_subreg_p)
651 		  continue;
652 		if (maybe_lt (GET_MODE_SIZE (GET_MODE (reg)),
653 			      GET_MODE_SIZE (mode)))
654 		  continue;
655 		reg = lowpart_subreg (mode, reg, GET_MODE (reg));
656 		if (reg == NULL_RTX || GET_CODE (reg) != SUBREG)
657 		  continue;
658 	      }
659 	    *result_reg = reg;
660 	    if (lra_dump_file != NULL)
661 	      {
662 		fprintf (lra_dump_file, "	 Reuse r%d for reload ", regno);
663 		dump_value_slim (lra_dump_file, original, 1);
664 	      }
665 	    if (new_class != lra_get_allocno_class (regno))
666 	      lra_change_class (regno, new_class, ", change to", false);
667 	    if (lra_dump_file != NULL)
668 	      fprintf (lra_dump_file, "\n");
669 	    return false;
670 	  }
671 	/* If we have an input reload with a different mode, make sure it
672 	   will get a different hard reg.  */
673 	else if (REG_P (original)
674 		 && REG_P (curr_insn_input_reloads[i].input)
675 		 && REGNO (original) == REGNO (curr_insn_input_reloads[i].input)
676 		 && (GET_MODE (original)
677 		     != GET_MODE (curr_insn_input_reloads[i].input)))
678 	  unique_p = true;
679       }
680   *result_reg = (unique_p
681 		 ? lra_create_new_reg_with_unique_value
682 		 : lra_create_new_reg) (mode, original, rclass, title);
683   lra_assert (curr_insn_input_reloads_num < LRA_MAX_INSN_RELOADS);
684   curr_insn_input_reloads[curr_insn_input_reloads_num].input = original;
685   curr_insn_input_reloads[curr_insn_input_reloads_num].match_p = false;
686   curr_insn_input_reloads[curr_insn_input_reloads_num++].reg = *result_reg;
687   return true;
688 }
689 
690 
691 /* The page contains major code to choose the current insn alternative
692    and generate reloads for it.	 */
693 
694 /* Return the offset from REGNO of the least significant register
695    in (reg:MODE REGNO).
696 
697    This function is used to tell whether two registers satisfy
698    a matching constraint.  (reg:MODE1 REGNO1) matches (reg:MODE2 REGNO2) if:
699 
700          REGNO1 + lra_constraint_offset (REGNO1, MODE1)
701 	 == REGNO2 + lra_constraint_offset (REGNO2, MODE2)  */
702 int
lra_constraint_offset(int regno,machine_mode mode)703 lra_constraint_offset (int regno, machine_mode mode)
704 {
705   lra_assert (regno < FIRST_PSEUDO_REGISTER);
706 
707   scalar_int_mode int_mode;
708   if (WORDS_BIG_ENDIAN
709       && is_a <scalar_int_mode> (mode, &int_mode)
710       && GET_MODE_SIZE (int_mode) > UNITS_PER_WORD)
711     return hard_regno_nregs (regno, mode) - 1;
712   return 0;
713 }
714 
715 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
716    if they are the same hard reg, and has special hacks for
717    auto-increment and auto-decrement.  This is specifically intended for
718    process_alt_operands to use in determining whether two operands
719    match.  X is the operand whose number is the lower of the two.
720 
721    It is supposed that X is the output operand and Y is the input
722    operand.  Y_HARD_REGNO is the final hard regno of register Y or
723    register in subreg Y as we know it now.  Otherwise, it is a
724    negative value.  */
725 static bool
operands_match_p(rtx x,rtx y,int y_hard_regno)726 operands_match_p (rtx x, rtx y, int y_hard_regno)
727 {
728   int i;
729   RTX_CODE code = GET_CODE (x);
730   const char *fmt;
731 
732   if (x == y)
733     return true;
734   if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
735       && (REG_P (y) || (GET_CODE (y) == SUBREG && REG_P (SUBREG_REG (y)))))
736     {
737       int j;
738 
739       i = get_hard_regno (x, false);
740       if (i < 0)
741 	goto slow;
742 
743       if ((j = y_hard_regno) < 0)
744 	goto slow;
745 
746       i += lra_constraint_offset (i, GET_MODE (x));
747       j += lra_constraint_offset (j, GET_MODE (y));
748 
749       return i == j;
750     }
751 
752   /* If two operands must match, because they are really a single
753      operand of an assembler insn, then two post-increments are invalid
754      because the assembler insn would increment only once.  On the
755      other hand, a post-increment matches ordinary indexing if the
756      post-increment is the output operand.  */
757   if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
758     return operands_match_p (XEXP (x, 0), y, y_hard_regno);
759 
760   /* Two pre-increments are invalid because the assembler insn would
761      increment only once.  On the other hand, a pre-increment matches
762      ordinary indexing if the pre-increment is the input operand.  */
763   if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
764       || GET_CODE (y) == PRE_MODIFY)
765     return operands_match_p (x, XEXP (y, 0), -1);
766 
767  slow:
768 
769   if (code == REG && REG_P (y))
770     return REGNO (x) == REGNO (y);
771 
772   if (code == REG && GET_CODE (y) == SUBREG && REG_P (SUBREG_REG (y))
773       && x == SUBREG_REG (y))
774     return true;
775   if (GET_CODE (y) == REG && code == SUBREG && REG_P (SUBREG_REG (x))
776       && SUBREG_REG (x) == y)
777     return true;
778 
779   /* Now we have disposed of all the cases in which different rtx
780      codes can match.  */
781   if (code != GET_CODE (y))
782     return false;
783 
784   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
785   if (GET_MODE (x) != GET_MODE (y))
786     return false;
787 
788   switch (code)
789     {
790     CASE_CONST_UNIQUE:
791       return false;
792 
793     case CONST_VECTOR:
794       if (!same_vector_encodings_p (x, y))
795 	return false;
796       break;
797 
798     case LABEL_REF:
799       return label_ref_label (x) == label_ref_label (y);
800     case SYMBOL_REF:
801       return XSTR (x, 0) == XSTR (y, 0);
802 
803     default:
804       break;
805     }
806 
807   /* Compare the elements.  If any pair of corresponding elements fail
808      to match, return false for the whole things.  */
809 
810   fmt = GET_RTX_FORMAT (code);
811   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
812     {
813       int val, j;
814       switch (fmt[i])
815 	{
816 	case 'w':
817 	  if (XWINT (x, i) != XWINT (y, i))
818 	    return false;
819 	  break;
820 
821 	case 'i':
822 	  if (XINT (x, i) != XINT (y, i))
823 	    return false;
824 	  break;
825 
826 	case 'p':
827 	  if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
828 	    return false;
829 	  break;
830 
831 	case 'e':
832 	  val = operands_match_p (XEXP (x, i), XEXP (y, i), -1);
833 	  if (val == 0)
834 	    return false;
835 	  break;
836 
837 	case '0':
838 	  break;
839 
840 	case 'E':
841 	  if (XVECLEN (x, i) != XVECLEN (y, i))
842 	    return false;
843 	  for (j = XVECLEN (x, i) - 1; j >= 0; --j)
844 	    {
845 	      val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j), -1);
846 	      if (val == 0)
847 		return false;
848 	    }
849 	  break;
850 
851 	  /* It is believed that rtx's at this level will never
852 	     contain anything but integers and other rtx's, except for
853 	     within LABEL_REFs and SYMBOL_REFs.	 */
854 	default:
855 	  gcc_unreachable ();
856 	}
857     }
858   return true;
859 }
860 
861 /* True if X is a constant that can be forced into the constant pool.
862    MODE is the mode of the operand, or VOIDmode if not known.  */
863 #define CONST_POOL_OK_P(MODE, X)		\
864   ((MODE) != VOIDmode				\
865    && CONSTANT_P (X)				\
866    && GET_CODE (X) != HIGH			\
867    && GET_MODE_SIZE (MODE).is_constant ()	\
868    && !targetm.cannot_force_const_mem (MODE, X))
869 
870 /* True if C is a non-empty register class that has too few registers
871    to be safely used as a reload target class.	*/
872 #define SMALL_REGISTER_CLASS_P(C)		\
873   (ira_class_hard_regs_num [(C)] == 1		\
874    || (ira_class_hard_regs_num [(C)] >= 1	\
875        && targetm.class_likely_spilled_p (C)))
876 
877 /* If REG is a reload pseudo, try to make its class satisfying CL.  */
878 static void
narrow_reload_pseudo_class(rtx reg,enum reg_class cl)879 narrow_reload_pseudo_class (rtx reg, enum reg_class cl)
880 {
881   enum reg_class rclass;
882 
883   /* Do not make more accurate class from reloads generated.  They are
884      mostly moves with a lot of constraints.  Making more accurate
885      class may results in very narrow class and impossibility of find
886      registers for several reloads of one insn.	 */
887   if (INSN_UID (curr_insn) >= new_insn_uid_start)
888     return;
889   if (GET_CODE (reg) == SUBREG)
890     reg = SUBREG_REG (reg);
891   if (! REG_P (reg) || (int) REGNO (reg) < new_regno_start)
892     return;
893   if (in_class_p (reg, cl, &rclass) && rclass != cl)
894     lra_change_class (REGNO (reg), rclass, "      Change to", true);
895 }
896 
897 /* Searches X for any reference to a reg with the same value as REGNO,
898    returning the rtx of the reference found if any.  Otherwise,
899    returns NULL_RTX.  */
900 static rtx
regno_val_use_in(unsigned int regno,rtx x)901 regno_val_use_in (unsigned int regno, rtx x)
902 {
903   const char *fmt;
904   int i, j;
905   rtx tem;
906 
907   if (REG_P (x) && lra_reg_info[REGNO (x)].val == lra_reg_info[regno].val)
908     return x;
909 
910   fmt = GET_RTX_FORMAT (GET_CODE (x));
911   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
912     {
913       if (fmt[i] == 'e')
914 	{
915 	  if ((tem = regno_val_use_in (regno, XEXP (x, i))))
916 	    return tem;
917 	}
918       else if (fmt[i] == 'E')
919 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
920 	  if ((tem = regno_val_use_in (regno , XVECEXP (x, i, j))))
921 	    return tem;
922     }
923 
924   return NULL_RTX;
925 }
926 
927 /* Return true if all current insn non-output operands except INS (it
928    has a negaitve end marker) do not use pseudos with the same value
929    as REGNO.  */
930 static bool
check_conflict_input_operands(int regno,signed char * ins)931 check_conflict_input_operands (int regno, signed char *ins)
932 {
933   int in;
934   int n_operands = curr_static_id->n_operands;
935 
936   for (int nop = 0; nop < n_operands; nop++)
937     if (! curr_static_id->operand[nop].is_operator
938 	&& curr_static_id->operand[nop].type != OP_OUT)
939       {
940 	for (int i = 0; (in = ins[i]) >= 0; i++)
941 	  if (in == nop)
942 	    break;
943 	if (in < 0
944 	    && regno_val_use_in (regno, *curr_id->operand_loc[nop]) != NULL_RTX)
945 	  return false;
946       }
947   return true;
948 }
949 
950 /* Generate reloads for matching OUT and INS (array of input operand
951    numbers with end marker -1) with reg class GOAL_CLASS, considering
952    output operands OUTS (similar array to INS) needing to be in different
953    registers.  Add input and output reloads correspondingly to the lists
954    *BEFORE and *AFTER.  OUT might be negative.  In this case we generate
955    input reloads for matched input operands INS.  EARLY_CLOBBER_P is a flag
956    that the output operand is early clobbered for chosen alternative.  */
957 static void
match_reload(signed char out,signed char * ins,signed char * outs,enum reg_class goal_class,rtx_insn ** before,rtx_insn ** after,bool early_clobber_p)958 match_reload (signed char out, signed char *ins, signed char *outs,
959 	      enum reg_class goal_class, rtx_insn **before,
960 	      rtx_insn **after, bool early_clobber_p)
961 {
962   bool out_conflict;
963   int i, in;
964   rtx new_in_reg, new_out_reg, reg;
965   machine_mode inmode, outmode;
966   rtx in_rtx = *curr_id->operand_loc[ins[0]];
967   rtx out_rtx = out < 0 ? in_rtx : *curr_id->operand_loc[out];
968 
969   inmode = curr_operand_mode[ins[0]];
970   outmode = out < 0 ? inmode : curr_operand_mode[out];
971   push_to_sequence (*before);
972   if (inmode != outmode)
973     {
974       /* process_alt_operands has already checked that the mode sizes
975 	 are ordered.  */
976       if (partial_subreg_p (outmode, inmode))
977 	{
978 	  reg = new_in_reg
979 	    = lra_create_new_reg_with_unique_value (inmode, in_rtx,
980 						    goal_class, "");
981 	  new_out_reg = gen_lowpart_SUBREG (outmode, reg);
982 	  LRA_SUBREG_P (new_out_reg) = 1;
983 	  /* If the input reg is dying here, we can use the same hard
984 	     register for REG and IN_RTX.  We do it only for original
985 	     pseudos as reload pseudos can die although original
986 	     pseudos still live where reload pseudos dies.  */
987 	  if (REG_P (in_rtx) && (int) REGNO (in_rtx) < lra_new_regno_start
988 	      && find_regno_note (curr_insn, REG_DEAD, REGNO (in_rtx))
989 	      && (!early_clobber_p
990 		  || check_conflict_input_operands(REGNO (in_rtx), ins)))
991 	    lra_assign_reg_val (REGNO (in_rtx), REGNO (reg));
992 	}
993       else
994 	{
995 	  reg = new_out_reg
996 	    = lra_create_new_reg_with_unique_value (outmode, out_rtx,
997 						    goal_class, "");
998 	  new_in_reg = gen_lowpart_SUBREG (inmode, reg);
999 	  /* NEW_IN_REG is non-paradoxical subreg.  We don't want
1000 	     NEW_OUT_REG living above.  We add clobber clause for
1001 	     this.  This is just a temporary clobber.  We can remove
1002 	     it at the end of LRA work.  */
1003 	  rtx_insn *clobber = emit_clobber (new_out_reg);
1004 	  LRA_TEMP_CLOBBER_P (PATTERN (clobber)) = 1;
1005 	  LRA_SUBREG_P (new_in_reg) = 1;
1006 	  if (GET_CODE (in_rtx) == SUBREG)
1007 	    {
1008 	      rtx subreg_reg = SUBREG_REG (in_rtx);
1009 
1010 	      /* If SUBREG_REG is dying here and sub-registers IN_RTX
1011 		 and NEW_IN_REG are similar, we can use the same hard
1012 		 register for REG and SUBREG_REG.  */
1013 	      if (REG_P (subreg_reg)
1014 		  && (int) REGNO (subreg_reg) < lra_new_regno_start
1015 		  && GET_MODE (subreg_reg) == outmode
1016 		  && known_eq (SUBREG_BYTE (in_rtx), SUBREG_BYTE (new_in_reg))
1017 		  && find_regno_note (curr_insn, REG_DEAD, REGNO (subreg_reg))
1018 		  && (! early_clobber_p
1019 		      || check_conflict_input_operands (REGNO (subreg_reg),
1020 							ins)))
1021 		lra_assign_reg_val (REGNO (subreg_reg), REGNO (reg));
1022 	    }
1023 	}
1024     }
1025   else
1026     {
1027       /* Pseudos have values -- see comments for lra_reg_info.
1028 	 Different pseudos with the same value do not conflict even if
1029 	 they live in the same place.  When we create a pseudo we
1030 	 assign value of original pseudo (if any) from which we
1031 	 created the new pseudo.  If we create the pseudo from the
1032 	 input pseudo, the new pseudo will have no conflict with the
1033 	 input pseudo which is wrong when the input pseudo lives after
1034 	 the insn and as the new pseudo value is changed by the insn
1035 	 output.  Therefore we create the new pseudo from the output
1036 	 except the case when we have single matched dying input
1037 	 pseudo.
1038 
1039 	 We cannot reuse the current output register because we might
1040 	 have a situation like "a <- a op b", where the constraints
1041 	 force the second input operand ("b") to match the output
1042 	 operand ("a").  "b" must then be copied into a new register
1043 	 so that it doesn't clobber the current value of "a".
1044 
1045 	 We cannot use the same value if the output pseudo is
1046 	 early clobbered or the input pseudo is mentioned in the
1047 	 output, e.g. as an address part in memory, because
1048 	 output reload will actually extend the pseudo liveness.
1049 	 We don't care about eliminable hard regs here as we are
1050 	 interesting only in pseudos.  */
1051 
1052       /* Matching input's register value is the same as one of the other
1053 	 output operand.  Output operands in a parallel insn must be in
1054 	 different registers.  */
1055       out_conflict = false;
1056       if (REG_P (in_rtx))
1057 	{
1058 	  for (i = 0; outs[i] >= 0; i++)
1059 	    {
1060 	      rtx other_out_rtx = *curr_id->operand_loc[outs[i]];
1061 	      if (outs[i] != out && REG_P (other_out_rtx)
1062 		  && (regno_val_use_in (REGNO (in_rtx), other_out_rtx)
1063 		      != NULL_RTX))
1064 		{
1065 		  out_conflict = true;
1066 		  break;
1067 		}
1068 	    }
1069 	}
1070 
1071       new_in_reg = new_out_reg
1072 	= (! early_clobber_p && ins[1] < 0 && REG_P (in_rtx)
1073 	   && (int) REGNO (in_rtx) < lra_new_regno_start
1074 	   && find_regno_note (curr_insn, REG_DEAD, REGNO (in_rtx))
1075 	   && (! early_clobber_p
1076 	       || check_conflict_input_operands (REGNO (in_rtx), ins))
1077 	   && (out < 0
1078 	       || regno_val_use_in (REGNO (in_rtx), out_rtx) == NULL_RTX)
1079 	   && !out_conflict
1080 	   ? lra_create_new_reg (inmode, in_rtx, goal_class, "")
1081 	   : lra_create_new_reg_with_unique_value (outmode, out_rtx,
1082 						   goal_class, ""));
1083     }
1084   /* In operand can be got from transformations before processing insn
1085      constraints.  One example of such transformations is subreg
1086      reloading (see function simplify_operand_subreg).  The new
1087      pseudos created by the transformations might have inaccurate
1088      class (ALL_REGS) and we should make their classes more
1089      accurate.  */
1090   narrow_reload_pseudo_class (in_rtx, goal_class);
1091   lra_emit_move (copy_rtx (new_in_reg), in_rtx);
1092   *before = get_insns ();
1093   end_sequence ();
1094   /* Add the new pseudo to consider values of subsequent input reload
1095      pseudos.  */
1096   lra_assert (curr_insn_input_reloads_num < LRA_MAX_INSN_RELOADS);
1097   curr_insn_input_reloads[curr_insn_input_reloads_num].input = in_rtx;
1098   curr_insn_input_reloads[curr_insn_input_reloads_num].match_p = true;
1099   curr_insn_input_reloads[curr_insn_input_reloads_num++].reg = new_in_reg;
1100   for (i = 0; (in = ins[i]) >= 0; i++)
1101     if (GET_MODE (*curr_id->operand_loc[in]) == VOIDmode
1102 	|| GET_MODE (new_in_reg) == GET_MODE (*curr_id->operand_loc[in]))
1103       *curr_id->operand_loc[in] = new_in_reg;
1104     else
1105       {
1106 	lra_assert
1107 	  (GET_MODE (new_out_reg) == GET_MODE (*curr_id->operand_loc[in]));
1108 	*curr_id->operand_loc[in] = new_out_reg;
1109       }
1110   lra_update_dups (curr_id, ins);
1111   if (out < 0)
1112     return;
1113   /* See a comment for the input operand above.  */
1114   narrow_reload_pseudo_class (out_rtx, goal_class);
1115   if (find_reg_note (curr_insn, REG_UNUSED, out_rtx) == NULL_RTX)
1116     {
1117       reg = SUBREG_P (out_rtx) ? SUBREG_REG (out_rtx) : out_rtx;
1118       start_sequence ();
1119       /* If we had strict_low_part, use it also in reload to keep other
1120 	 parts unchanged but do it only for regs as strict_low_part
1121 	 has no sense for memory and probably there is no insn pattern
1122 	 to match the reload insn in memory case.  */
1123       if (out >= 0 && curr_static_id->operand[out].strict_low && REG_P (reg))
1124 	out_rtx = gen_rtx_STRICT_LOW_PART (VOIDmode, out_rtx);
1125       lra_emit_move (out_rtx, copy_rtx (new_out_reg));
1126       emit_insn (*after);
1127       *after = get_insns ();
1128       end_sequence ();
1129     }
1130   *curr_id->operand_loc[out] = new_out_reg;
1131   lra_update_dup (curr_id, out);
1132 }
1133 
1134 /* Return register class which is union of all reg classes in insn
1135    constraint alternative string starting with P.  */
1136 static enum reg_class
reg_class_from_constraints(const char * p)1137 reg_class_from_constraints (const char *p)
1138 {
1139   int c, len;
1140   enum reg_class op_class = NO_REGS;
1141 
1142   do
1143     switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
1144       {
1145       case '#':
1146       case ',':
1147 	return op_class;
1148 
1149       case 'g':
1150 	op_class = reg_class_subunion[op_class][GENERAL_REGS];
1151 	break;
1152 
1153       default:
1154 	enum constraint_num cn = lookup_constraint (p);
1155 	enum reg_class cl = reg_class_for_constraint (cn);
1156 	if (cl == NO_REGS)
1157 	  {
1158 	    if (insn_extra_address_constraint (cn))
1159 	      op_class
1160 		= (reg_class_subunion
1161 		   [op_class][base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1162 					      ADDRESS, SCRATCH)]);
1163 	    break;
1164 	  }
1165 
1166 	op_class = reg_class_subunion[op_class][cl];
1167  	break;
1168       }
1169   while ((p += len), c);
1170   return op_class;
1171 }
1172 
1173 /* If OP is a register, return the class of the register as per
1174    get_reg_class, otherwise return NO_REGS.  */
1175 static inline enum reg_class
get_op_class(rtx op)1176 get_op_class (rtx op)
1177 {
1178   return REG_P (op) ? get_reg_class (REGNO (op)) : NO_REGS;
1179 }
1180 
1181 /* Return generated insn mem_pseudo:=val if TO_P or val:=mem_pseudo
1182    otherwise.  If modes of MEM_PSEUDO and VAL are different, use
1183    SUBREG for VAL to make them equal.  */
1184 static rtx_insn *
emit_spill_move(bool to_p,rtx mem_pseudo,rtx val)1185 emit_spill_move (bool to_p, rtx mem_pseudo, rtx val)
1186 {
1187   if (GET_MODE (mem_pseudo) != GET_MODE (val))
1188     {
1189       /* Usually size of mem_pseudo is greater than val size but in
1190 	 rare cases it can be less as it can be defined by target
1191 	 dependent macro HARD_REGNO_CALLER_SAVE_MODE.  */
1192       if (! MEM_P (val))
1193 	{
1194 	  val = gen_lowpart_SUBREG (GET_MODE (mem_pseudo),
1195 				    GET_CODE (val) == SUBREG
1196 				    ? SUBREG_REG (val) : val);
1197 	  LRA_SUBREG_P (val) = 1;
1198 	}
1199       else
1200 	{
1201 	  mem_pseudo = gen_lowpart_SUBREG (GET_MODE (val), mem_pseudo);
1202 	  LRA_SUBREG_P (mem_pseudo) = 1;
1203 	}
1204     }
1205   return to_p ? gen_move_insn (mem_pseudo, val)
1206 	      : gen_move_insn (val, mem_pseudo);
1207 }
1208 
1209 /* Process a special case insn (register move), return true if we
1210    don't need to process it anymore.  INSN should be a single set
1211    insn.  Set up that RTL was changed through CHANGE_P and that hook
1212    TARGET_SECONDARY_MEMORY_NEEDED says to use secondary memory through
1213    SEC_MEM_P.  */
1214 static bool
check_and_process_move(bool * change_p,bool * sec_mem_p ATTRIBUTE_UNUSED)1215 check_and_process_move (bool *change_p, bool *sec_mem_p ATTRIBUTE_UNUSED)
1216 {
1217   int sregno, dregno;
1218   rtx dest, src, dreg, sreg, new_reg, scratch_reg;
1219   rtx_insn *before;
1220   enum reg_class dclass, sclass, secondary_class;
1221   secondary_reload_info sri;
1222 
1223   lra_assert (curr_insn_set != NULL_RTX);
1224   dreg = dest = SET_DEST (curr_insn_set);
1225   sreg = src = SET_SRC (curr_insn_set);
1226   if (GET_CODE (dest) == SUBREG)
1227     dreg = SUBREG_REG (dest);
1228   if (GET_CODE (src) == SUBREG)
1229     sreg = SUBREG_REG (src);
1230   if (! (REG_P (dreg) || MEM_P (dreg)) || ! (REG_P (sreg) || MEM_P (sreg)))
1231     return false;
1232   sclass = dclass = NO_REGS;
1233   if (REG_P (dreg))
1234     dclass = get_reg_class (REGNO (dreg));
1235   gcc_assert (dclass < LIM_REG_CLASSES);
1236   if (dclass == ALL_REGS)
1237     /* ALL_REGS is used for new pseudos created by transformations
1238        like reload of SUBREG_REG (see function
1239        simplify_operand_subreg).  We don't know their class yet.  We
1240        should figure out the class from processing the insn
1241        constraints not in this fast path function.  Even if ALL_REGS
1242        were a right class for the pseudo, secondary_... hooks usually
1243        are not define for ALL_REGS.  */
1244     return false;
1245   if (REG_P (sreg))
1246     sclass = get_reg_class (REGNO (sreg));
1247   gcc_assert (sclass < LIM_REG_CLASSES);
1248   if (sclass == ALL_REGS)
1249     /* See comments above.  */
1250     return false;
1251   if (sclass == NO_REGS && dclass == NO_REGS)
1252     return false;
1253   if (targetm.secondary_memory_needed (GET_MODE (src), sclass, dclass)
1254       && ((sclass != NO_REGS && dclass != NO_REGS)
1255 	  || (GET_MODE (src)
1256 	      != targetm.secondary_memory_needed_mode (GET_MODE (src)))))
1257     {
1258       *sec_mem_p = true;
1259       return false;
1260     }
1261   if (! REG_P (dreg) || ! REG_P (sreg))
1262     return false;
1263   sri.prev_sri = NULL;
1264   sri.icode = CODE_FOR_nothing;
1265   sri.extra_cost = 0;
1266   secondary_class = NO_REGS;
1267   /* Set up hard register for a reload pseudo for hook
1268      secondary_reload because some targets just ignore unassigned
1269      pseudos in the hook.  */
1270   if (dclass != NO_REGS && lra_get_regno_hard_regno (REGNO (dreg)) < 0)
1271     {
1272       dregno = REGNO (dreg);
1273       reg_renumber[dregno] = ira_class_hard_regs[dclass][0];
1274     }
1275   else
1276     dregno = -1;
1277   if (sclass != NO_REGS && lra_get_regno_hard_regno (REGNO (sreg)) < 0)
1278     {
1279       sregno = REGNO (sreg);
1280       reg_renumber[sregno] = ira_class_hard_regs[sclass][0];
1281     }
1282   else
1283     sregno = -1;
1284   if (sclass != NO_REGS)
1285     secondary_class
1286       = (enum reg_class) targetm.secondary_reload (false, dest,
1287 						   (reg_class_t) sclass,
1288 						   GET_MODE (src), &sri);
1289   if (sclass == NO_REGS
1290       || ((secondary_class != NO_REGS || sri.icode != CODE_FOR_nothing)
1291 	  && dclass != NO_REGS))
1292     {
1293       enum reg_class old_sclass = secondary_class;
1294       secondary_reload_info old_sri = sri;
1295 
1296       sri.prev_sri = NULL;
1297       sri.icode = CODE_FOR_nothing;
1298       sri.extra_cost = 0;
1299       secondary_class
1300 	= (enum reg_class) targetm.secondary_reload (true, src,
1301 						     (reg_class_t) dclass,
1302 						     GET_MODE (src), &sri);
1303       /* Check the target hook consistency.  */
1304       lra_assert
1305 	((secondary_class == NO_REGS && sri.icode == CODE_FOR_nothing)
1306 	 || (old_sclass == NO_REGS && old_sri.icode == CODE_FOR_nothing)
1307 	 || (secondary_class == old_sclass && sri.icode == old_sri.icode));
1308     }
1309   if (sregno >= 0)
1310     reg_renumber [sregno] = -1;
1311   if (dregno >= 0)
1312     reg_renumber [dregno] = -1;
1313   if (secondary_class == NO_REGS && sri.icode == CODE_FOR_nothing)
1314     return false;
1315   *change_p = true;
1316   new_reg = NULL_RTX;
1317   if (secondary_class != NO_REGS)
1318     new_reg = lra_create_new_reg_with_unique_value (GET_MODE (src), NULL_RTX,
1319 						    secondary_class,
1320 						    "secondary");
1321   start_sequence ();
1322   if (sri.icode == CODE_FOR_nothing)
1323     lra_emit_move (new_reg, src);
1324   else
1325     {
1326       enum reg_class scratch_class;
1327 
1328       scratch_class = (reg_class_from_constraints
1329 		       (insn_data[sri.icode].operand[2].constraint));
1330       scratch_reg = (lra_create_new_reg_with_unique_value
1331 		     (insn_data[sri.icode].operand[2].mode, NULL_RTX,
1332 		      scratch_class, "scratch"));
1333       emit_insn (GEN_FCN (sri.icode) (new_reg != NULL_RTX ? new_reg : dest,
1334 				      src, scratch_reg));
1335     }
1336   before = get_insns ();
1337   end_sequence ();
1338   lra_process_new_insns (curr_insn, before, NULL, "Inserting the move");
1339   if (new_reg != NULL_RTX)
1340     SET_SRC (curr_insn_set) = new_reg;
1341   else
1342     {
1343       if (lra_dump_file != NULL)
1344 	{
1345 	  fprintf (lra_dump_file, "Deleting move %u\n", INSN_UID (curr_insn));
1346 	  dump_insn_slim (lra_dump_file, curr_insn);
1347 	}
1348       lra_set_insn_deleted (curr_insn);
1349       return true;
1350     }
1351   return false;
1352 }
1353 
1354 /* The following data describe the result of process_alt_operands.
1355    The data are used in curr_insn_transform to generate reloads.  */
1356 
1357 /* The chosen reg classes which should be used for the corresponding
1358    operands.  */
1359 static enum reg_class goal_alt[MAX_RECOG_OPERANDS];
1360 /* True if the operand should be the same as another operand and that
1361    other operand does not need a reload.  */
1362 static bool goal_alt_match_win[MAX_RECOG_OPERANDS];
1363 /* True if the operand does not need a reload.	*/
1364 static bool goal_alt_win[MAX_RECOG_OPERANDS];
1365 /* True if the operand can be offsetable memory.  */
1366 static bool goal_alt_offmemok[MAX_RECOG_OPERANDS];
1367 /* The number of an operand to which given operand can be matched to.  */
1368 static int goal_alt_matches[MAX_RECOG_OPERANDS];
1369 /* The number of elements in the following array.  */
1370 static int goal_alt_dont_inherit_ops_num;
1371 /* Numbers of operands whose reload pseudos should not be inherited.  */
1372 static int goal_alt_dont_inherit_ops[MAX_RECOG_OPERANDS];
1373 /* True if the insn commutative operands should be swapped.  */
1374 static bool goal_alt_swapped;
1375 /* The chosen insn alternative.	 */
1376 static int goal_alt_number;
1377 
1378 /* True if the corresponding operand is the result of an equivalence
1379    substitution.  */
1380 static bool equiv_substition_p[MAX_RECOG_OPERANDS];
1381 
1382 /* The following five variables are used to choose the best insn
1383    alternative.	 They reflect final characteristics of the best
1384    alternative.	 */
1385 
1386 /* Number of necessary reloads and overall cost reflecting the
1387    previous value and other unpleasantness of the best alternative.  */
1388 static int best_losers, best_overall;
1389 /* Overall number hard registers used for reloads.  For example, on
1390    some targets we need 2 general registers to reload DFmode and only
1391    one floating point register.	 */
1392 static int best_reload_nregs;
1393 /* Overall number reflecting distances of previous reloading the same
1394    value.  The distances are counted from the current BB start.  It is
1395    used to improve inheritance chances.  */
1396 static int best_reload_sum;
1397 
1398 /* True if the current insn should have no correspondingly input or
1399    output reloads.  */
1400 static bool no_input_reloads_p, no_output_reloads_p;
1401 
1402 /* True if we swapped the commutative operands in the current
1403    insn.  */
1404 static int curr_swapped;
1405 
1406 /* if CHECK_ONLY_P is false, arrange for address element *LOC to be a
1407    register of class CL.  Add any input reloads to list BEFORE.  AFTER
1408    is nonnull if *LOC is an automodified value; handle that case by
1409    adding the required output reloads to list AFTER.  Return true if
1410    the RTL was changed.
1411 
1412    if CHECK_ONLY_P is true, check that the *LOC is a correct address
1413    register.  Return false if the address register is correct.  */
1414 static bool
process_addr_reg(rtx * loc,bool check_only_p,rtx_insn ** before,rtx_insn ** after,enum reg_class cl)1415 process_addr_reg (rtx *loc, bool check_only_p, rtx_insn **before, rtx_insn **after,
1416 		  enum reg_class cl)
1417 {
1418   int regno;
1419   enum reg_class rclass, new_class;
1420   rtx reg;
1421   rtx new_reg;
1422   machine_mode mode;
1423   bool subreg_p, before_p = false;
1424 
1425   subreg_p = GET_CODE (*loc) == SUBREG;
1426   if (subreg_p)
1427     {
1428       reg = SUBREG_REG (*loc);
1429       mode = GET_MODE (reg);
1430 
1431       /* For mode with size bigger than ptr_mode, there unlikely to be "mov"
1432 	 between two registers with different classes, but there normally will
1433 	 be "mov" which transfers element of vector register into the general
1434 	 register, and this normally will be a subreg which should be reloaded
1435 	 as a whole.  This is particularly likely to be triggered when
1436 	 -fno-split-wide-types specified.  */
1437       if (!REG_P (reg)
1438 	  || in_class_p (reg, cl, &new_class)
1439 	  || known_le (GET_MODE_SIZE (mode), GET_MODE_SIZE (ptr_mode)))
1440        loc = &SUBREG_REG (*loc);
1441     }
1442 
1443   reg = *loc;
1444   mode = GET_MODE (reg);
1445   if (! REG_P (reg))
1446     {
1447       if (check_only_p)
1448 	return true;
1449       /* Always reload memory in an address even if the target supports
1450 	 such addresses.  */
1451       new_reg = lra_create_new_reg_with_unique_value (mode, reg, cl, "address");
1452       before_p = true;
1453     }
1454   else
1455     {
1456       regno = REGNO (reg);
1457       rclass = get_reg_class (regno);
1458       if (! check_only_p
1459 	  && (*loc = get_equiv_with_elimination (reg, curr_insn)) != reg)
1460 	{
1461 	  if (lra_dump_file != NULL)
1462 	    {
1463 	      fprintf (lra_dump_file,
1464 		       "Changing pseudo %d in address of insn %u on equiv ",
1465 		       REGNO (reg), INSN_UID (curr_insn));
1466 	      dump_value_slim (lra_dump_file, *loc, 1);
1467 	      fprintf (lra_dump_file, "\n");
1468 	    }
1469 	  *loc = copy_rtx (*loc);
1470 	}
1471       if (*loc != reg || ! in_class_p (reg, cl, &new_class))
1472 	{
1473 	  if (check_only_p)
1474 	    return true;
1475 	  reg = *loc;
1476 	  if (get_reload_reg (after == NULL ? OP_IN : OP_INOUT,
1477 			      mode, reg, cl, subreg_p, "address", &new_reg))
1478 	    before_p = true;
1479 	}
1480       else if (new_class != NO_REGS && rclass != new_class)
1481 	{
1482 	  if (check_only_p)
1483 	    return true;
1484 	  lra_change_class (regno, new_class, "	   Change to", true);
1485 	  return false;
1486 	}
1487       else
1488 	return false;
1489     }
1490   if (before_p)
1491     {
1492       push_to_sequence (*before);
1493       lra_emit_move (new_reg, reg);
1494       *before = get_insns ();
1495       end_sequence ();
1496     }
1497   *loc = new_reg;
1498   if (after != NULL)
1499     {
1500       start_sequence ();
1501       lra_emit_move (before_p ? copy_rtx (reg) : reg, new_reg);
1502       emit_insn (*after);
1503       *after = get_insns ();
1504       end_sequence ();
1505     }
1506   return true;
1507 }
1508 
1509 /* Insert move insn in simplify_operand_subreg. BEFORE returns
1510    the insn to be inserted before curr insn. AFTER returns the
1511    the insn to be inserted after curr insn.  ORIGREG and NEWREG
1512    are the original reg and new reg for reload.  */
1513 static void
insert_move_for_subreg(rtx_insn ** before,rtx_insn ** after,rtx origreg,rtx newreg)1514 insert_move_for_subreg (rtx_insn **before, rtx_insn **after, rtx origreg,
1515 			rtx newreg)
1516 {
1517   if (before)
1518     {
1519       push_to_sequence (*before);
1520       lra_emit_move (newreg, origreg);
1521       *before = get_insns ();
1522       end_sequence ();
1523     }
1524   if (after)
1525     {
1526       start_sequence ();
1527       lra_emit_move (origreg, newreg);
1528       emit_insn (*after);
1529       *after = get_insns ();
1530       end_sequence ();
1531     }
1532 }
1533 
1534 static int valid_address_p (machine_mode mode, rtx addr, addr_space_t as);
1535 static bool process_address (int, bool, rtx_insn **, rtx_insn **);
1536 
1537 /* Make reloads for subreg in operand NOP with internal subreg mode
1538    REG_MODE, add new reloads for further processing.  Return true if
1539    any change was done.  */
1540 static bool
simplify_operand_subreg(int nop,machine_mode reg_mode)1541 simplify_operand_subreg (int nop, machine_mode reg_mode)
1542 {
1543   int hard_regno, inner_hard_regno;
1544   rtx_insn *before, *after;
1545   machine_mode mode, innermode;
1546   rtx reg, new_reg;
1547   rtx operand = *curr_id->operand_loc[nop];
1548   enum reg_class regclass;
1549   enum op_type type;
1550 
1551   before = after = NULL;
1552 
1553   if (GET_CODE (operand) != SUBREG)
1554     return false;
1555 
1556   mode = GET_MODE (operand);
1557   reg = SUBREG_REG (operand);
1558   innermode = GET_MODE (reg);
1559   type = curr_static_id->operand[nop].type;
1560   if (MEM_P (reg))
1561     {
1562       const bool addr_was_valid
1563 	= valid_address_p (innermode, XEXP (reg, 0), MEM_ADDR_SPACE (reg));
1564       alter_subreg (curr_id->operand_loc[nop], false);
1565       rtx subst = *curr_id->operand_loc[nop];
1566       lra_assert (MEM_P (subst));
1567       const bool addr_is_valid = valid_address_p (GET_MODE (subst),
1568 						  XEXP (subst, 0),
1569 						  MEM_ADDR_SPACE (subst));
1570       if (!addr_was_valid
1571 	  || addr_is_valid
1572 	  || ((get_constraint_type (lookup_constraint
1573 				    (curr_static_id->operand[nop].constraint))
1574 	       != CT_SPECIAL_MEMORY)
1575 	      /* We still can reload address and if the address is
1576 		 valid, we can remove subreg without reloading its
1577 		 inner memory.  */
1578 	      && valid_address_p (GET_MODE (subst),
1579 				  regno_reg_rtx
1580 				  [ira_class_hard_regs
1581 				   [base_reg_class (GET_MODE (subst),
1582 						    MEM_ADDR_SPACE (subst),
1583 						    ADDRESS, SCRATCH)][0]],
1584 				  MEM_ADDR_SPACE (subst))))
1585 	{
1586 	  /* If we change the address for a paradoxical subreg of memory, the
1587 	     new address might violate the necessary alignment or the access
1588 	     might be slow; take this into consideration.  We need not worry
1589 	     about accesses beyond allocated memory for paradoxical memory
1590 	     subregs as we don't substitute such equiv memory (see processing
1591 	     equivalences in function lra_constraints) and because for spilled
1592 	     pseudos we allocate stack memory enough for the biggest
1593 	     corresponding paradoxical subreg.
1594 
1595 	     However, do not blindly simplify a (subreg (mem ...)) for
1596 	     WORD_REGISTER_OPERATIONS targets as this may lead to loading junk
1597 	     data into a register when the inner is narrower than outer or
1598 	     missing important data from memory when the inner is wider than
1599 	     outer.  This rule only applies to modes that are no wider than
1600 	     a word.
1601 
1602 	     If valid memory becomes invalid after subreg elimination
1603 	     and address might be different we still have to reload
1604 	     memory.
1605 	  */
1606 	  if ((! addr_was_valid
1607 	       || addr_is_valid
1608 	       || known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (innermode)))
1609 	      && !(maybe_ne (GET_MODE_PRECISION (mode),
1610 			     GET_MODE_PRECISION (innermode))
1611 		   && known_le (GET_MODE_SIZE (mode), UNITS_PER_WORD)
1612 		   && known_le (GET_MODE_SIZE (innermode), UNITS_PER_WORD)
1613 		   && WORD_REGISTER_OPERATIONS)
1614 	      && (!(MEM_ALIGN (subst) < GET_MODE_ALIGNMENT (mode)
1615 		    && targetm.slow_unaligned_access (mode, MEM_ALIGN (subst)))
1616 		  || (MEM_ALIGN (reg) < GET_MODE_ALIGNMENT (innermode)
1617 		      && targetm.slow_unaligned_access (innermode,
1618 							MEM_ALIGN (reg)))))
1619 	    return true;
1620 
1621 	  *curr_id->operand_loc[nop] = operand;
1622 
1623 	  /* But if the address was not valid, we cannot reload the MEM without
1624 	     reloading the address first.  */
1625 	  if (!addr_was_valid)
1626 	    process_address (nop, false, &before, &after);
1627 
1628 	  /* INNERMODE is fast, MODE slow.  Reload the mem in INNERMODE.  */
1629 	  enum reg_class rclass
1630 	    = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
1631 	  if (get_reload_reg (curr_static_id->operand[nop].type, innermode,
1632 			      reg, rclass, TRUE, "slow/invalid mem", &new_reg))
1633 	    {
1634 	      bool insert_before, insert_after;
1635 	      bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1636 
1637 	      insert_before = (type != OP_OUT
1638 			       || partial_subreg_p (mode, innermode));
1639 	      insert_after = type != OP_IN;
1640 	      insert_move_for_subreg (insert_before ? &before : NULL,
1641 				      insert_after ? &after : NULL,
1642 				      reg, new_reg);
1643 	    }
1644 	  SUBREG_REG (operand) = new_reg;
1645 
1646 	  /* Convert to MODE.  */
1647 	  reg = operand;
1648 	  rclass
1649 	    = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
1650 	  if (get_reload_reg (curr_static_id->operand[nop].type, mode, reg,
1651 			      rclass, TRUE, "slow/invalid mem", &new_reg))
1652 	    {
1653 	      bool insert_before, insert_after;
1654 	      bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1655 
1656 	      insert_before = type != OP_OUT;
1657 	      insert_after = type != OP_IN;
1658 	      insert_move_for_subreg (insert_before ? &before : NULL,
1659 				      insert_after ? &after : NULL,
1660 				      reg, new_reg);
1661 	    }
1662 	  *curr_id->operand_loc[nop] = new_reg;
1663 	  lra_process_new_insns (curr_insn, before, after,
1664 				 "Inserting slow/invalid mem reload");
1665 	  return true;
1666 	}
1667 
1668       /* If the address was valid and became invalid, prefer to reload
1669 	 the memory.  Typical case is when the index scale should
1670 	 correspond the memory.  */
1671       *curr_id->operand_loc[nop] = operand;
1672       /* Do not return false here as the MEM_P (reg) will be processed
1673 	 later in this function.  */
1674     }
1675   else if (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER)
1676     {
1677       alter_subreg (curr_id->operand_loc[nop], false);
1678       return true;
1679     }
1680   else if (CONSTANT_P (reg))
1681     {
1682       /* Try to simplify subreg of constant.  It is usually result of
1683 	 equivalence substitution.  */
1684       if (innermode == VOIDmode
1685 	  && (innermode = original_subreg_reg_mode[nop]) == VOIDmode)
1686 	innermode = curr_static_id->operand[nop].mode;
1687       if ((new_reg = simplify_subreg (mode, reg, innermode,
1688 				      SUBREG_BYTE (operand))) != NULL_RTX)
1689 	{
1690 	  *curr_id->operand_loc[nop] = new_reg;
1691 	  return true;
1692 	}
1693     }
1694   /* Put constant into memory when we have mixed modes.  It generates
1695      a better code in most cases as it does not need a secondary
1696      reload memory.  It also prevents LRA looping when LRA is using
1697      secondary reload memory again and again.  */
1698   if (CONSTANT_P (reg) && CONST_POOL_OK_P (reg_mode, reg)
1699       && SCALAR_INT_MODE_P (reg_mode) != SCALAR_INT_MODE_P (mode))
1700     {
1701       SUBREG_REG (operand) = force_const_mem (reg_mode, reg);
1702       alter_subreg (curr_id->operand_loc[nop], false);
1703       return true;
1704     }
1705   /* Force a reload of the SUBREG_REG if this is a constant or PLUS or
1706      if there may be a problem accessing OPERAND in the outer
1707      mode.  */
1708   if ((REG_P (reg)
1709        && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1710        && (hard_regno = lra_get_regno_hard_regno (REGNO (reg))) >= 0
1711        /* Don't reload paradoxical subregs because we could be looping
1712 	  having repeatedly final regno out of hard regs range.  */
1713        && (hard_regno_nregs (hard_regno, innermode)
1714 	   >= hard_regno_nregs (hard_regno, mode))
1715        && simplify_subreg_regno (hard_regno, innermode,
1716 				 SUBREG_BYTE (operand), mode) < 0
1717        /* Don't reload subreg for matching reload.  It is actually
1718 	  valid subreg in LRA.  */
1719        && ! LRA_SUBREG_P (operand))
1720       || CONSTANT_P (reg) || GET_CODE (reg) == PLUS || MEM_P (reg))
1721     {
1722       enum reg_class rclass;
1723 
1724       if (REG_P (reg))
1725 	/* There is a big probability that we will get the same class
1726 	   for the new pseudo and we will get the same insn which
1727 	   means infinite looping.  So spill the new pseudo.  */
1728 	rclass = NO_REGS;
1729       else
1730 	/* The class will be defined later in curr_insn_transform.  */
1731 	rclass
1732 	  = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
1733 
1734       if (get_reload_reg (curr_static_id->operand[nop].type, reg_mode, reg,
1735 			  rclass, TRUE, "subreg reg", &new_reg))
1736 	{
1737 	  bool insert_before, insert_after;
1738 	  bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1739 
1740 	  insert_before = (type != OP_OUT
1741 			   || read_modify_subreg_p (operand));
1742 	  insert_after = (type != OP_IN);
1743 	  insert_move_for_subreg (insert_before ? &before : NULL,
1744 				  insert_after ? &after : NULL,
1745 				  reg, new_reg);
1746 	}
1747       SUBREG_REG (operand) = new_reg;
1748       lra_process_new_insns (curr_insn, before, after,
1749 			     "Inserting subreg reload");
1750       return true;
1751     }
1752   /* Force a reload for a paradoxical subreg. For paradoxical subreg,
1753      IRA allocates hardreg to the inner pseudo reg according to its mode
1754      instead of the outermode, so the size of the hardreg may not be enough
1755      to contain the outermode operand, in that case we may need to insert
1756      reload for the reg. For the following two types of paradoxical subreg,
1757      we need to insert reload:
1758      1. If the op_type is OP_IN, and the hardreg could not be paired with
1759         other hardreg to contain the outermode operand
1760         (checked by in_hard_reg_set_p), we need to insert the reload.
1761      2. If the op_type is OP_OUT or OP_INOUT.
1762 
1763      Here is a paradoxical subreg example showing how the reload is generated:
1764 
1765      (insn 5 4 7 2 (set (reg:TI 106 [ __comp ])
1766         (subreg:TI (reg:DI 107 [ __comp ]) 0)) {*movti_internal_rex64}
1767 
1768      In IRA, reg107 is allocated to a DImode hardreg. We use x86-64 as example
1769      here, if reg107 is assigned to hardreg R15, because R15 is the last
1770      hardreg, compiler cannot find another hardreg to pair with R15 to
1771      contain TImode data. So we insert a TImode reload reg180 for it.
1772      After reload is inserted:
1773 
1774      (insn 283 0 0 (set (subreg:DI (reg:TI 180 [orig:107 __comp ] [107]) 0)
1775         (reg:DI 107 [ __comp ])) -1
1776      (insn 5 4 7 2 (set (reg:TI 106 [ __comp ])
1777         (subreg:TI (reg:TI 180 [orig:107 __comp ] [107]) 0)) {*movti_internal_rex64}
1778 
1779      Two reload hard registers will be allocated to reg180 to save TImode data
1780      in LRA_assign.
1781 
1782      For LRA pseudos this should normally be handled by the biggest_mode
1783      mechanism.  However, it's possible for new uses of an LRA pseudo
1784      to be introduced after we've allocated it, such as when undoing
1785      inheritance, and the allocated register might not then be appropriate
1786      for the new uses.  */
1787   else if (REG_P (reg)
1788 	   && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1789 	   && paradoxical_subreg_p (operand)
1790 	   && (inner_hard_regno = lra_get_regno_hard_regno (REGNO (reg))) >= 0
1791 	   && ((hard_regno
1792 		= simplify_subreg_regno (inner_hard_regno, innermode,
1793 					 SUBREG_BYTE (operand), mode)) < 0
1794 	       || ((hard_regno_nregs (inner_hard_regno, innermode)
1795 		    < hard_regno_nregs (hard_regno, mode))
1796 		   && (regclass = lra_get_allocno_class (REGNO (reg)))
1797 		   && (type != OP_IN
1798 		       || !in_hard_reg_set_p (reg_class_contents[regclass],
1799 					      mode, hard_regno)
1800 		       || overlaps_hard_reg_set_p (lra_no_alloc_regs,
1801 						   mode, hard_regno)))))
1802     {
1803       /* The class will be defined later in curr_insn_transform.  */
1804       enum reg_class rclass
1805 	= (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
1806 
1807       if (get_reload_reg (curr_static_id->operand[nop].type, mode, reg,
1808                           rclass, TRUE, "paradoxical subreg", &new_reg))
1809         {
1810 	  rtx subreg;
1811 	  bool insert_before, insert_after;
1812 
1813 	  PUT_MODE (new_reg, mode);
1814           subreg = gen_lowpart_SUBREG (innermode, new_reg);
1815 	  bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1816 
1817 	  insert_before = (type != OP_OUT);
1818 	  insert_after = (type != OP_IN);
1819 	  insert_move_for_subreg (insert_before ? &before : NULL,
1820 				  insert_after ? &after : NULL,
1821 				  reg, subreg);
1822 	}
1823       SUBREG_REG (operand) = new_reg;
1824       lra_process_new_insns (curr_insn, before, after,
1825                              "Inserting paradoxical subreg reload");
1826       return true;
1827     }
1828   return false;
1829 }
1830 
1831 /* Return TRUE if X refers for a hard register from SET.  */
1832 static bool
uses_hard_regs_p(rtx x,HARD_REG_SET set)1833 uses_hard_regs_p (rtx x, HARD_REG_SET set)
1834 {
1835   int i, j, x_hard_regno;
1836   machine_mode mode;
1837   const char *fmt;
1838   enum rtx_code code;
1839 
1840   if (x == NULL_RTX)
1841     return false;
1842   code = GET_CODE (x);
1843   mode = GET_MODE (x);
1844 
1845   if (code == SUBREG)
1846     {
1847       /* For all SUBREGs we want to check whether the full multi-register
1848 	 overlaps the set.  For normal SUBREGs this means 'get_hard_regno' of
1849 	 the inner register, for paradoxical SUBREGs this means the
1850 	 'get_hard_regno' of the full SUBREG and for complete SUBREGs either is
1851 	 fine.  Use the wider mode for all cases.  */
1852       rtx subreg = SUBREG_REG (x);
1853       mode = wider_subreg_mode (x);
1854       if (mode == GET_MODE (subreg))
1855 	{
1856 	  x = subreg;
1857 	  code = GET_CODE (x);
1858 	}
1859     }
1860 
1861   if (REG_P (x) || SUBREG_P (x))
1862     {
1863       x_hard_regno = get_hard_regno (x, true);
1864       return (x_hard_regno >= 0
1865 	      && overlaps_hard_reg_set_p (set, mode, x_hard_regno));
1866     }
1867   if (MEM_P (x))
1868     {
1869       struct address_info ad;
1870 
1871       decompose_mem_address (&ad, x);
1872       if (ad.base_term != NULL && uses_hard_regs_p (*ad.base_term, set))
1873 	return true;
1874       if (ad.index_term != NULL && uses_hard_regs_p (*ad.index_term, set))
1875 	return true;
1876     }
1877   fmt = GET_RTX_FORMAT (code);
1878   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1879     {
1880       if (fmt[i] == 'e')
1881 	{
1882 	  if (uses_hard_regs_p (XEXP (x, i), set))
1883 	    return true;
1884 	}
1885       else if (fmt[i] == 'E')
1886 	{
1887 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1888 	    if (uses_hard_regs_p (XVECEXP (x, i, j), set))
1889 	      return true;
1890 	}
1891     }
1892   return false;
1893 }
1894 
1895 /* Return true if OP is a spilled pseudo. */
1896 static inline bool
spilled_pseudo_p(rtx op)1897 spilled_pseudo_p (rtx op)
1898 {
1899   return (REG_P (op)
1900 	  && REGNO (op) >= FIRST_PSEUDO_REGISTER && in_mem_p (REGNO (op)));
1901 }
1902 
1903 /* Return true if X is a general constant.  */
1904 static inline bool
general_constant_p(rtx x)1905 general_constant_p (rtx x)
1906 {
1907   return CONSTANT_P (x) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (x));
1908 }
1909 
1910 static bool
reg_in_class_p(rtx reg,enum reg_class cl)1911 reg_in_class_p (rtx reg, enum reg_class cl)
1912 {
1913   if (cl == NO_REGS)
1914     return get_reg_class (REGNO (reg)) == NO_REGS;
1915   return in_class_p (reg, cl, NULL);
1916 }
1917 
1918 /* Return true if SET of RCLASS contains no hard regs which can be
1919    used in MODE.  */
1920 static bool
prohibited_class_reg_set_mode_p(enum reg_class rclass,HARD_REG_SET & set,machine_mode mode)1921 prohibited_class_reg_set_mode_p (enum reg_class rclass,
1922 				 HARD_REG_SET &set,
1923 				 machine_mode mode)
1924 {
1925   HARD_REG_SET temp;
1926 
1927   lra_assert (hard_reg_set_subset_p (reg_class_contents[rclass], set));
1928   temp = set & ~lra_no_alloc_regs;
1929   return (hard_reg_set_subset_p
1930 	  (temp, ira_prohibited_class_mode_regs[rclass][mode]));
1931 }
1932 
1933 
1934 /* Used to check validity info about small class input operands.  It
1935    should be incremented at start of processing an insn
1936    alternative.  */
1937 static unsigned int curr_small_class_check = 0;
1938 
1939 /* Update number of used inputs of class OP_CLASS for operand NOP
1940    of alternative NALT.  Return true if we have more such class operands
1941    than the number of available regs.  */
1942 static bool
update_and_check_small_class_inputs(int nop,int nalt,enum reg_class op_class)1943 update_and_check_small_class_inputs (int nop, int nalt,
1944 				     enum reg_class op_class)
1945 {
1946   static unsigned int small_class_check[LIM_REG_CLASSES];
1947   static int small_class_input_nums[LIM_REG_CLASSES];
1948 
1949   if (SMALL_REGISTER_CLASS_P (op_class)
1950       /* We are interesting in classes became small because of fixing
1951 	 some hard regs, e.g. by an user through GCC options.  */
1952       && hard_reg_set_intersect_p (reg_class_contents[op_class],
1953 				   ira_no_alloc_regs)
1954       && (curr_static_id->operand[nop].type != OP_OUT
1955 	  || TEST_BIT (curr_static_id->operand[nop].early_clobber_alts, nalt)))
1956     {
1957       if (small_class_check[op_class] == curr_small_class_check)
1958 	small_class_input_nums[op_class]++;
1959       else
1960 	{
1961 	  small_class_check[op_class] = curr_small_class_check;
1962 	  small_class_input_nums[op_class] = 1;
1963 	}
1964       if (small_class_input_nums[op_class] > ira_class_hard_regs_num[op_class])
1965 	return true;
1966     }
1967   return false;
1968 }
1969 
1970 /* Major function to choose the current insn alternative and what
1971    operands should be reloaded and how.	 If ONLY_ALTERNATIVE is not
1972    negative we should consider only this alternative.  Return false if
1973    we cannot choose the alternative or find how to reload the
1974    operands.  */
1975 static bool
process_alt_operands(int only_alternative)1976 process_alt_operands (int only_alternative)
1977 {
1978   bool ok_p = false;
1979   int nop, overall, nalt;
1980   int n_alternatives = curr_static_id->n_alternatives;
1981   int n_operands = curr_static_id->n_operands;
1982   /* LOSERS counts the operands that don't fit this alternative and
1983      would require loading.  */
1984   int losers;
1985   int addr_losers;
1986   /* REJECT is a count of how undesirable this alternative says it is
1987      if any reloading is required.  If the alternative matches exactly
1988      then REJECT is ignored, but otherwise it gets this much counted
1989      against it in addition to the reloading needed.  */
1990   int reject;
1991   /* This is defined by '!' or '?' alternative constraint and added to
1992      reject.  But in some cases it can be ignored.  */
1993   int static_reject;
1994   int op_reject;
1995   /* The number of elements in the following array.  */
1996   int early_clobbered_regs_num;
1997   /* Numbers of operands which are early clobber registers.  */
1998   int early_clobbered_nops[MAX_RECOG_OPERANDS];
1999   enum reg_class curr_alt[MAX_RECOG_OPERANDS];
2000   HARD_REG_SET curr_alt_set[MAX_RECOG_OPERANDS];
2001   bool curr_alt_match_win[MAX_RECOG_OPERANDS];
2002   bool curr_alt_win[MAX_RECOG_OPERANDS];
2003   bool curr_alt_offmemok[MAX_RECOG_OPERANDS];
2004   int curr_alt_matches[MAX_RECOG_OPERANDS];
2005   /* The number of elements in the following array.  */
2006   int curr_alt_dont_inherit_ops_num;
2007   /* Numbers of operands whose reload pseudos should not be inherited.	*/
2008   int curr_alt_dont_inherit_ops[MAX_RECOG_OPERANDS];
2009   rtx op;
2010   /* The register when the operand is a subreg of register, otherwise the
2011      operand itself.  */
2012   rtx no_subreg_reg_operand[MAX_RECOG_OPERANDS];
2013   /* The register if the operand is a register or subreg of register,
2014      otherwise NULL.  */
2015   rtx operand_reg[MAX_RECOG_OPERANDS];
2016   int hard_regno[MAX_RECOG_OPERANDS];
2017   machine_mode biggest_mode[MAX_RECOG_OPERANDS];
2018   int reload_nregs, reload_sum;
2019   bool costly_p;
2020   enum reg_class cl;
2021 
2022   /* Calculate some data common for all alternatives to speed up the
2023      function.	*/
2024   for (nop = 0; nop < n_operands; nop++)
2025     {
2026       rtx reg;
2027 
2028       op = no_subreg_reg_operand[nop] = *curr_id->operand_loc[nop];
2029       /* The real hard regno of the operand after the allocation.  */
2030       hard_regno[nop] = get_hard_regno (op, true);
2031 
2032       operand_reg[nop] = reg = op;
2033       biggest_mode[nop] = GET_MODE (op);
2034       if (GET_CODE (op) == SUBREG)
2035 	{
2036 	  biggest_mode[nop] = wider_subreg_mode (op);
2037 	  operand_reg[nop] = reg = SUBREG_REG (op);
2038 	}
2039       if (! REG_P (reg))
2040 	operand_reg[nop] = NULL_RTX;
2041       else if (REGNO (reg) >= FIRST_PSEUDO_REGISTER
2042 	       || ((int) REGNO (reg)
2043 		   == lra_get_elimination_hard_regno (REGNO (reg))))
2044 	no_subreg_reg_operand[nop] = reg;
2045       else
2046 	operand_reg[nop] = no_subreg_reg_operand[nop]
2047 	  /* Just use natural mode for elimination result.  It should
2048 	     be enough for extra constraints hooks.  */
2049 	  = regno_reg_rtx[hard_regno[nop]];
2050     }
2051 
2052   /* The constraints are made of several alternatives.	Each operand's
2053      constraint looks like foo,bar,... with commas separating the
2054      alternatives.  The first alternatives for all operands go
2055      together, the second alternatives go together, etc.
2056 
2057      First loop over alternatives.  */
2058   alternative_mask preferred = curr_id->preferred_alternatives;
2059   if (only_alternative >= 0)
2060     preferred &= ALTERNATIVE_BIT (only_alternative);
2061 
2062   for (nalt = 0; nalt < n_alternatives; nalt++)
2063     {
2064       /* Loop over operands for one constraint alternative.  */
2065       if (!TEST_BIT (preferred, nalt))
2066 	continue;
2067 
2068       bool matching_early_clobber[MAX_RECOG_OPERANDS];
2069       curr_small_class_check++;
2070       overall = losers = addr_losers = 0;
2071       static_reject = reject = reload_nregs = reload_sum = 0;
2072       for (nop = 0; nop < n_operands; nop++)
2073 	{
2074 	  int inc = (curr_static_id
2075 		     ->operand_alternative[nalt * n_operands + nop].reject);
2076 	  if (lra_dump_file != NULL && inc != 0)
2077 	    fprintf (lra_dump_file,
2078 		     "            Staticly defined alt reject+=%d\n", inc);
2079 	  static_reject += inc;
2080 	  matching_early_clobber[nop] = 0;
2081 	}
2082       reject += static_reject;
2083       early_clobbered_regs_num = 0;
2084 
2085       for (nop = 0; nop < n_operands; nop++)
2086 	{
2087 	  const char *p;
2088 	  char *end;
2089 	  int len, c, m, i, opalt_num, this_alternative_matches;
2090 	  bool win, did_match, offmemok, early_clobber_p;
2091 	  /* false => this operand can be reloaded somehow for this
2092 	     alternative.  */
2093 	  bool badop;
2094 	  /* true => this operand can be reloaded if the alternative
2095 	     allows regs.  */
2096 	  bool winreg;
2097 	  /* True if a constant forced into memory would be OK for
2098 	     this operand.  */
2099 	  bool constmemok;
2100 	  enum reg_class this_alternative, this_costly_alternative;
2101 	  HARD_REG_SET this_alternative_set, this_costly_alternative_set;
2102 	  bool this_alternative_match_win, this_alternative_win;
2103 	  bool this_alternative_offmemok;
2104 	  bool scratch_p;
2105 	  machine_mode mode;
2106 	  enum constraint_num cn;
2107 
2108 	  opalt_num = nalt * n_operands + nop;
2109 	  if (curr_static_id->operand_alternative[opalt_num].anything_ok)
2110 	    {
2111 	      /* Fast track for no constraints at all.	*/
2112 	      curr_alt[nop] = NO_REGS;
2113 	      CLEAR_HARD_REG_SET (curr_alt_set[nop]);
2114 	      curr_alt_win[nop] = true;
2115 	      curr_alt_match_win[nop] = false;
2116 	      curr_alt_offmemok[nop] = false;
2117 	      curr_alt_matches[nop] = -1;
2118 	      continue;
2119 	    }
2120 
2121 	  op = no_subreg_reg_operand[nop];
2122 	  mode = curr_operand_mode[nop];
2123 
2124 	  win = did_match = winreg = offmemok = constmemok = false;
2125 	  badop = true;
2126 
2127 	  early_clobber_p = false;
2128 	  p = curr_static_id->operand_alternative[opalt_num].constraint;
2129 
2130 	  this_costly_alternative = this_alternative = NO_REGS;
2131 	  /* We update set of possible hard regs besides its class
2132 	     because reg class might be inaccurate.  For example,
2133 	     union of LO_REGS (l), HI_REGS(h), and STACK_REG(k) in ARM
2134 	     is translated in HI_REGS because classes are merged by
2135 	     pairs and there is no accurate intermediate class.	 */
2136 	  CLEAR_HARD_REG_SET (this_alternative_set);
2137 	  CLEAR_HARD_REG_SET (this_costly_alternative_set);
2138 	  this_alternative_win = false;
2139 	  this_alternative_match_win = false;
2140 	  this_alternative_offmemok = false;
2141 	  this_alternative_matches = -1;
2142 
2143 	  /* An empty constraint should be excluded by the fast
2144 	     track.  */
2145 	  lra_assert (*p != 0 && *p != ',');
2146 
2147 	  op_reject = 0;
2148 	  /* Scan this alternative's specs for this operand; set WIN
2149 	     if the operand fits any letter in this alternative.
2150 	     Otherwise, clear BADOP if this operand could fit some
2151 	     letter after reloads, or set WINREG if this operand could
2152 	     fit after reloads provided the constraint allows some
2153 	     registers.	 */
2154 	  costly_p = false;
2155 	  do
2156 	    {
2157 	      switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
2158 		{
2159 		case '\0':
2160 		  len = 0;
2161 		  break;
2162 		case ',':
2163 		  c = '\0';
2164 		  break;
2165 
2166 		case '&':
2167 		  early_clobber_p = true;
2168 		  break;
2169 
2170 		case '$':
2171 		  op_reject += LRA_MAX_REJECT;
2172 		  break;
2173 		case '^':
2174 		  op_reject += LRA_LOSER_COST_FACTOR;
2175 		  break;
2176 
2177 		case '#':
2178 		  /* Ignore rest of this alternative.  */
2179 		  c = '\0';
2180 		  break;
2181 
2182 		case '0':  case '1':  case '2':	 case '3':  case '4':
2183 		case '5':  case '6':  case '7':	 case '8':  case '9':
2184 		  {
2185 		    int m_hregno;
2186 		    bool match_p;
2187 
2188 		    m = strtoul (p, &end, 10);
2189 		    p = end;
2190 		    len = 0;
2191 		    lra_assert (nop > m);
2192 
2193 		    /* Reject matches if we don't know which operand is
2194 		       bigger.  This situation would arguably be a bug in
2195 		       an .md pattern, but could also occur in a user asm.  */
2196 		    if (!ordered_p (GET_MODE_SIZE (biggest_mode[m]),
2197 				    GET_MODE_SIZE (biggest_mode[nop])))
2198 		      break;
2199 
2200 		    /* Don't match wrong asm insn operands for proper
2201 		       diagnostic later.  */
2202 		    if (INSN_CODE (curr_insn) < 0
2203 			&& (curr_operand_mode[m] == BLKmode
2204 			    || curr_operand_mode[nop] == BLKmode)
2205 			&& curr_operand_mode[m] != curr_operand_mode[nop])
2206 		      break;
2207 
2208 		    m_hregno = get_hard_regno (*curr_id->operand_loc[m], false);
2209 		    /* We are supposed to match a previous operand.
2210 		       If we do, we win if that one did.  If we do
2211 		       not, count both of the operands as losers.
2212 		       (This is too conservative, since most of the
2213 		       time only a single reload insn will be needed
2214 		       to make the two operands win.  As a result,
2215 		       this alternative may be rejected when it is
2216 		       actually desirable.)  */
2217 		    match_p = false;
2218 		    if (operands_match_p (*curr_id->operand_loc[nop],
2219 					  *curr_id->operand_loc[m], m_hregno))
2220 		      {
2221 			/* We should reject matching of an early
2222 			   clobber operand if the matching operand is
2223 			   not dying in the insn.  */
2224 			if (!TEST_BIT (curr_static_id->operand[m]
2225 				       .early_clobber_alts, nalt)
2226 			    || operand_reg[nop] == NULL_RTX
2227 			    || (find_regno_note (curr_insn, REG_DEAD,
2228 						 REGNO (op))
2229 				|| REGNO (op) == REGNO (operand_reg[m])))
2230 			  match_p = true;
2231 		      }
2232 		    if (match_p)
2233 		      {
2234 			/* If we are matching a non-offsettable
2235 			   address where an offsettable address was
2236 			   expected, then we must reject this
2237 			   combination, because we can't reload
2238 			   it.	*/
2239 			if (curr_alt_offmemok[m]
2240 			    && MEM_P (*curr_id->operand_loc[m])
2241 			    && curr_alt[m] == NO_REGS && ! curr_alt_win[m])
2242 			  continue;
2243 		      }
2244 		    else
2245 		      {
2246 			/* If the operands do not match and one
2247 			   operand is INOUT, we can not match them.
2248 			   Try other possibilities, e.g. other
2249 			   alternatives or commutative operand
2250 			   exchange.  */
2251 			if (curr_static_id->operand[nop].type == OP_INOUT
2252 			    || curr_static_id->operand[m].type == OP_INOUT)
2253 			  break;
2254 			/* Operands don't match.  If the operands are
2255 			   different user defined explicit hard
2256 			   registers, then we cannot make them match
2257 			   when one is early clobber operand.  */
2258 			if ((REG_P (*curr_id->operand_loc[nop])
2259 			     || SUBREG_P (*curr_id->operand_loc[nop]))
2260 			    && (REG_P (*curr_id->operand_loc[m])
2261 				|| SUBREG_P (*curr_id->operand_loc[m])))
2262 			  {
2263 			    rtx nop_reg = *curr_id->operand_loc[nop];
2264 			    if (SUBREG_P (nop_reg))
2265 			      nop_reg = SUBREG_REG (nop_reg);
2266 			    rtx m_reg = *curr_id->operand_loc[m];
2267 			    if (SUBREG_P (m_reg))
2268 			      m_reg = SUBREG_REG (m_reg);
2269 
2270 			    if (REG_P (nop_reg)
2271 				&& HARD_REGISTER_P (nop_reg)
2272 				&& REG_USERVAR_P (nop_reg)
2273 				&& REG_P (m_reg)
2274 				&& HARD_REGISTER_P (m_reg)
2275 				&& REG_USERVAR_P (m_reg))
2276 			      {
2277 				int i;
2278 
2279 				for (i = 0; i < early_clobbered_regs_num; i++)
2280 				  if (m == early_clobbered_nops[i])
2281 				    break;
2282 				if (i < early_clobbered_regs_num
2283 				    || early_clobber_p)
2284 				  break;
2285 			      }
2286 			  }
2287 			/* Both operands must allow a reload register,
2288 			   otherwise we cannot make them match.  */
2289 			if (curr_alt[m] == NO_REGS)
2290 			  break;
2291 			/* Retroactively mark the operand we had to
2292 			   match as a loser, if it wasn't already and
2293 			   it wasn't matched to a register constraint
2294 			   (e.g it might be matched by memory). */
2295 			if (curr_alt_win[m]
2296 			    && (operand_reg[m] == NULL_RTX
2297 				|| hard_regno[m] < 0))
2298 			  {
2299 			    losers++;
2300 			    reload_nregs
2301 			      += (ira_reg_class_max_nregs[curr_alt[m]]
2302 				  [GET_MODE (*curr_id->operand_loc[m])]);
2303 			  }
2304 
2305 			/* Prefer matching earlyclobber alternative as
2306 			   it results in less hard regs required for
2307 			   the insn than a non-matching earlyclobber
2308 			   alternative.  */
2309 			if (TEST_BIT (curr_static_id->operand[m]
2310 				      .early_clobber_alts, nalt))
2311 			  {
2312 			    if (lra_dump_file != NULL)
2313 			      fprintf
2314 				(lra_dump_file,
2315 				 "            %d Matching earlyclobber alt:"
2316 				 " reject--\n",
2317 				 nop);
2318 			    if (!matching_early_clobber[m])
2319 			      {
2320 				reject--;
2321 				matching_early_clobber[m] = 1;
2322 			      }
2323 			  }
2324 			/* Otherwise we prefer no matching
2325 			   alternatives because it gives more freedom
2326 			   in RA.  */
2327 			else if (operand_reg[nop] == NULL_RTX
2328 				 || (find_regno_note (curr_insn, REG_DEAD,
2329 						      REGNO (operand_reg[nop]))
2330 				     == NULL_RTX))
2331 			  {
2332 			    if (lra_dump_file != NULL)
2333 			      fprintf
2334 				(lra_dump_file,
2335 				 "            %d Matching alt: reject+=2\n",
2336 				 nop);
2337 			    reject += 2;
2338 			  }
2339 		      }
2340 		    /* If we have to reload this operand and some
2341 		       previous operand also had to match the same
2342 		       thing as this operand, we don't know how to do
2343 		       that.  */
2344 		    if (!match_p || !curr_alt_win[m])
2345 		      {
2346 			for (i = 0; i < nop; i++)
2347 			  if (curr_alt_matches[i] == m)
2348 			    break;
2349 			if (i < nop)
2350 			  break;
2351 		      }
2352 		    else
2353 		      did_match = true;
2354 
2355 		    this_alternative_matches = m;
2356 		    /* This can be fixed with reloads if the operand
2357 		       we are supposed to match can be fixed with
2358 		       reloads. */
2359 		    badop = false;
2360 		    this_alternative = curr_alt[m];
2361 		    this_alternative_set = curr_alt_set[m];
2362 		    winreg = this_alternative != NO_REGS;
2363 		    break;
2364 		  }
2365 
2366 		case 'g':
2367 		  if (MEM_P (op)
2368 		      || general_constant_p (op)
2369 		      || spilled_pseudo_p (op))
2370 		    win = true;
2371 		  cl = GENERAL_REGS;
2372 		  goto reg;
2373 
2374 		default:
2375 		  cn = lookup_constraint (p);
2376 		  switch (get_constraint_type (cn))
2377 		    {
2378 		    case CT_REGISTER:
2379 		      cl = reg_class_for_constraint (cn);
2380 		      if (cl != NO_REGS)
2381 			goto reg;
2382 		      break;
2383 
2384 		    case CT_CONST_INT:
2385 		      if (CONST_INT_P (op)
2386 			  && insn_const_int_ok_for_constraint (INTVAL (op), cn))
2387 			win = true;
2388 		      break;
2389 
2390 		    case CT_MEMORY:
2391 		      if (MEM_P (op)
2392 			  && satisfies_memory_constraint_p (op, cn))
2393 			win = true;
2394 		      else if (spilled_pseudo_p (op))
2395 			win = true;
2396 
2397 		      /* If we didn't already win, we can reload constants
2398 			 via force_const_mem or put the pseudo value into
2399 			 memory, or make other memory by reloading the
2400 			 address like for 'o'.  */
2401 		      if (CONST_POOL_OK_P (mode, op)
2402 			  || MEM_P (op) || REG_P (op)
2403 			  /* We can restore the equiv insn by a
2404 			     reload.  */
2405 			  || equiv_substition_p[nop])
2406 			badop = false;
2407 		      constmemok = true;
2408 		      offmemok = true;
2409 		      break;
2410 
2411 		    case CT_ADDRESS:
2412 		      /* An asm operand with an address constraint
2413 			 that doesn't satisfy address_operand has
2414 			 is_address cleared, so that we don't try to
2415 			 make a non-address fit.  */
2416 		      if (!curr_static_id->operand[nop].is_address)
2417 			break;
2418 		      /* If we didn't already win, we can reload the address
2419 			 into a base register.  */
2420 		      if (satisfies_address_constraint_p (op, cn))
2421 			win = true;
2422 		      cl = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2423 					   ADDRESS, SCRATCH);
2424 		      badop = false;
2425 		      goto reg;
2426 
2427 		    case CT_FIXED_FORM:
2428 		      if (constraint_satisfied_p (op, cn))
2429 			win = true;
2430 		      break;
2431 
2432 		    case CT_SPECIAL_MEMORY:
2433 		      if (MEM_P (op)
2434 			  && satisfies_memory_constraint_p (op, cn))
2435 			win = true;
2436 		      else if (spilled_pseudo_p (op))
2437 			win = true;
2438 		      break;
2439 		    }
2440 		  break;
2441 
2442 		reg:
2443 		  if (mode == BLKmode)
2444 		    break;
2445 		  this_alternative = reg_class_subunion[this_alternative][cl];
2446 		  this_alternative_set |= reg_class_contents[cl];
2447 		  if (costly_p)
2448 		    {
2449 		      this_costly_alternative
2450 			= reg_class_subunion[this_costly_alternative][cl];
2451 		      this_costly_alternative_set |= reg_class_contents[cl];
2452 		    }
2453 		  winreg = true;
2454 		  if (REG_P (op))
2455 		    {
2456 		      if (hard_regno[nop] >= 0
2457 			  && in_hard_reg_set_p (this_alternative_set,
2458 						mode, hard_regno[nop]))
2459 			win = true;
2460 		      else if (hard_regno[nop] < 0
2461 			       && in_class_p (op, this_alternative, NULL))
2462 			win = true;
2463 		    }
2464 		  break;
2465 		}
2466 	      if (c != ' ' && c != '\t')
2467 		costly_p = c == '*';
2468 	    }
2469 	  while ((p += len), c);
2470 
2471 	  scratch_p = (operand_reg[nop] != NULL_RTX
2472 		       && lra_former_scratch_p (REGNO (operand_reg[nop])));
2473 	  /* Record which operands fit this alternative.  */
2474 	  if (win)
2475 	    {
2476 	      this_alternative_win = true;
2477 	      if (operand_reg[nop] != NULL_RTX)
2478 		{
2479 		  if (hard_regno[nop] >= 0)
2480 		    {
2481 		      if (in_hard_reg_set_p (this_costly_alternative_set,
2482 					     mode, hard_regno[nop]))
2483 			{
2484 			  if (lra_dump_file != NULL)
2485 			    fprintf (lra_dump_file,
2486 				     "            %d Costly set: reject++\n",
2487 				     nop);
2488 			  reject++;
2489 			}
2490 		    }
2491 		  else
2492 		    {
2493 		      /* Prefer won reg to spilled pseudo under other
2494 			 equal conditions for possibe inheritance.  */
2495 		      if (! scratch_p)
2496 			{
2497 			  if (lra_dump_file != NULL)
2498 			    fprintf
2499 			      (lra_dump_file,
2500 			       "            %d Non pseudo reload: reject++\n",
2501 			       nop);
2502 			  reject++;
2503 			}
2504 		      if (in_class_p (operand_reg[nop],
2505 				      this_costly_alternative, NULL))
2506 			{
2507 			  if (lra_dump_file != NULL)
2508 			    fprintf
2509 			      (lra_dump_file,
2510 			       "            %d Non pseudo costly reload:"
2511 			       " reject++\n",
2512 			       nop);
2513 			  reject++;
2514 			}
2515 		    }
2516 		  /* We simulate the behavior of old reload here.
2517 		     Although scratches need hard registers and it
2518 		     might result in spilling other pseudos, no reload
2519 		     insns are generated for the scratches.  So it
2520 		     might cost something but probably less than old
2521 		     reload pass believes.  */
2522 		  if (scratch_p)
2523 		    {
2524 		      if (lra_dump_file != NULL)
2525 			fprintf (lra_dump_file,
2526 				 "            %d Scratch win: reject+=2\n",
2527 				 nop);
2528 		      reject += 2;
2529 		    }
2530 		}
2531 	    }
2532 	  else if (did_match)
2533 	    this_alternative_match_win = true;
2534 	  else
2535 	    {
2536 	      int const_to_mem = 0;
2537 	      bool no_regs_p;
2538 
2539 	      reject += op_reject;
2540 	      /* Never do output reload of stack pointer.  It makes
2541 		 impossible to do elimination when SP is changed in
2542 		 RTL.  */
2543 	      if (op == stack_pointer_rtx && ! frame_pointer_needed
2544 		  && curr_static_id->operand[nop].type != OP_IN)
2545 		goto fail;
2546 
2547 	      /* If this alternative asks for a specific reg class, see if there
2548 		 is at least one allocatable register in that class.  */
2549 	      no_regs_p
2550 		= (this_alternative == NO_REGS
2551 		   || (hard_reg_set_subset_p
2552 		       (reg_class_contents[this_alternative],
2553 			lra_no_alloc_regs)));
2554 
2555 	      /* For asms, verify that the class for this alternative is possible
2556 		 for the mode that is specified.  */
2557 	      if (!no_regs_p && INSN_CODE (curr_insn) < 0)
2558 		{
2559 		  int i;
2560 		  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2561 		    if (targetm.hard_regno_mode_ok (i, mode)
2562 			&& in_hard_reg_set_p (reg_class_contents[this_alternative],
2563 					      mode, i))
2564 		      break;
2565 		  if (i == FIRST_PSEUDO_REGISTER)
2566 		    winreg = false;
2567 		}
2568 
2569 	      /* If this operand accepts a register, and if the
2570 		 register class has at least one allocatable register,
2571 		 then this operand can be reloaded.  */
2572 	      if (winreg && !no_regs_p)
2573 		badop = false;
2574 
2575 	      if (badop)
2576 		{
2577 		  if (lra_dump_file != NULL)
2578 		    fprintf (lra_dump_file,
2579 			     "            alt=%d: Bad operand -- refuse\n",
2580 			     nalt);
2581 		  goto fail;
2582 		}
2583 
2584 	      if (this_alternative != NO_REGS)
2585 		{
2586 		  HARD_REG_SET available_regs
2587 		    = (reg_class_contents[this_alternative]
2588 		       & ~((ira_prohibited_class_mode_regs
2589 			    [this_alternative][mode])
2590 			   | lra_no_alloc_regs));
2591 		  if (hard_reg_set_empty_p (available_regs))
2592 		    {
2593 		      /* There are no hard regs holding a value of given
2594 			 mode.  */
2595 		      if (offmemok)
2596 			{
2597 			  this_alternative = NO_REGS;
2598 			  if (lra_dump_file != NULL)
2599 			    fprintf (lra_dump_file,
2600 				     "            %d Using memory because of"
2601 				     " a bad mode: reject+=2\n",
2602 				     nop);
2603 			  reject += 2;
2604 			}
2605 		      else
2606 			{
2607 			  if (lra_dump_file != NULL)
2608 			    fprintf (lra_dump_file,
2609 				     "            alt=%d: Wrong mode -- refuse\n",
2610 				     nalt);
2611 			  goto fail;
2612 			}
2613 		    }
2614 		}
2615 
2616 	      /* If not assigned pseudo has a class which a subset of
2617 		 required reg class, it is a less costly alternative
2618 		 as the pseudo still can get a hard reg of necessary
2619 		 class.  */
2620 	      if (! no_regs_p && REG_P (op) && hard_regno[nop] < 0
2621 		  && (cl = get_reg_class (REGNO (op))) != NO_REGS
2622 		  && ira_class_subset_p[this_alternative][cl])
2623 		{
2624 		  if (lra_dump_file != NULL)
2625 		    fprintf
2626 		      (lra_dump_file,
2627 		       "            %d Super set class reg: reject-=3\n", nop);
2628 		  reject -= 3;
2629 		}
2630 
2631 	      this_alternative_offmemok = offmemok;
2632 	      if (this_costly_alternative != NO_REGS)
2633 		{
2634 		  if (lra_dump_file != NULL)
2635 		    fprintf (lra_dump_file,
2636 			     "            %d Costly loser: reject++\n", nop);
2637 		  reject++;
2638 		}
2639 	      /* If the operand is dying, has a matching constraint,
2640 		 and satisfies constraints of the matched operand
2641 		 which failed to satisfy the own constraints, most probably
2642 		 the reload for this operand will be gone.  */
2643 	      if (this_alternative_matches >= 0
2644 		  && !curr_alt_win[this_alternative_matches]
2645 		  && REG_P (op)
2646 		  && find_regno_note (curr_insn, REG_DEAD, REGNO (op))
2647 		  && (hard_regno[nop] >= 0
2648 		      ? in_hard_reg_set_p (this_alternative_set,
2649 					   mode, hard_regno[nop])
2650 		      : in_class_p (op, this_alternative, NULL)))
2651 		{
2652 		  if (lra_dump_file != NULL)
2653 		    fprintf
2654 		      (lra_dump_file,
2655 		       "            %d Dying matched operand reload: reject++\n",
2656 		       nop);
2657 		  reject++;
2658 		}
2659 	      else
2660 		{
2661 		  /* Strict_low_part requires to reload the register
2662 		     not the sub-register.  In this case we should
2663 		     check that a final reload hard reg can hold the
2664 		     value mode.  */
2665 		  if (curr_static_id->operand[nop].strict_low
2666 		      && REG_P (op)
2667 		      && hard_regno[nop] < 0
2668 		      && GET_CODE (*curr_id->operand_loc[nop]) == SUBREG
2669 		      && ira_class_hard_regs_num[this_alternative] > 0
2670 		      && (!targetm.hard_regno_mode_ok
2671 			  (ira_class_hard_regs[this_alternative][0],
2672 			   GET_MODE (*curr_id->operand_loc[nop]))))
2673 		    {
2674 		      if (lra_dump_file != NULL)
2675 			fprintf
2676 			  (lra_dump_file,
2677 			   "            alt=%d: Strict low subreg reload -- refuse\n",
2678 			   nalt);
2679 		      goto fail;
2680 		    }
2681 		  losers++;
2682 		}
2683 	      if (operand_reg[nop] != NULL_RTX
2684 		  /* Output operands and matched input operands are
2685 		     not inherited.  The following conditions do not
2686 		     exactly describe the previous statement but they
2687 		     are pretty close.  */
2688 		  && curr_static_id->operand[nop].type != OP_OUT
2689 		  && (this_alternative_matches < 0
2690 		      || curr_static_id->operand[nop].type != OP_IN))
2691 		{
2692 		  int last_reload = (lra_reg_info[ORIGINAL_REGNO
2693 						  (operand_reg[nop])]
2694 				     .last_reload);
2695 
2696 		  /* The value of reload_sum has sense only if we
2697 		     process insns in their order.  It happens only on
2698 		     the first constraints sub-pass when we do most of
2699 		     reload work.  */
2700 		  if (lra_constraint_iter == 1 && last_reload > bb_reload_num)
2701 		    reload_sum += last_reload - bb_reload_num;
2702 		}
2703 	      /* If this is a constant that is reloaded into the
2704 		 desired class by copying it to memory first, count
2705 		 that as another reload.  This is consistent with
2706 		 other code and is required to avoid choosing another
2707 		 alternative when the constant is moved into memory.
2708 		 Note that the test here is precisely the same as in
2709 		 the code below that calls force_const_mem.  */
2710 	      if (CONST_POOL_OK_P (mode, op)
2711 		  && ((targetm.preferred_reload_class
2712 		       (op, this_alternative) == NO_REGS)
2713 		      || no_input_reloads_p))
2714 		{
2715 		  const_to_mem = 1;
2716 		  if (! no_regs_p)
2717 		    losers++;
2718 		}
2719 
2720 	      /* Alternative loses if it requires a type of reload not
2721 		 permitted for this insn.  We can always reload
2722 		 objects with a REG_UNUSED note.  */
2723 	      if ((curr_static_id->operand[nop].type != OP_IN
2724 		   && no_output_reloads_p
2725 		   && ! find_reg_note (curr_insn, REG_UNUSED, op))
2726 		  || (curr_static_id->operand[nop].type != OP_OUT
2727 		      && no_input_reloads_p && ! const_to_mem)
2728 		  || (this_alternative_matches >= 0
2729 		      && (no_input_reloads_p
2730 			  || (no_output_reloads_p
2731 			      && (curr_static_id->operand
2732 				  [this_alternative_matches].type != OP_IN)
2733 			      && ! find_reg_note (curr_insn, REG_UNUSED,
2734 						  no_subreg_reg_operand
2735 						  [this_alternative_matches])))))
2736 		{
2737 		  if (lra_dump_file != NULL)
2738 		    fprintf
2739 		      (lra_dump_file,
2740 		       "            alt=%d: No input/otput reload -- refuse\n",
2741 		       nalt);
2742 		  goto fail;
2743 		}
2744 
2745 	      /* Alternative loses if it required class pseudo cannot
2746 		 hold value of required mode.  Such insns can be
2747 		 described by insn definitions with mode iterators.  */
2748 	      if (GET_MODE (*curr_id->operand_loc[nop]) != VOIDmode
2749 		  && ! hard_reg_set_empty_p (this_alternative_set)
2750 		  /* It is common practice for constraints to use a
2751 		     class which does not have actually enough regs to
2752 		     hold the value (e.g. x86 AREG for mode requiring
2753 		     more one general reg).  Therefore we have 2
2754 		     conditions to check that the reload pseudo cannot
2755 		     hold the mode value.  */
2756 		  && (!targetm.hard_regno_mode_ok
2757 		      (ira_class_hard_regs[this_alternative][0],
2758 		       GET_MODE (*curr_id->operand_loc[nop])))
2759 		  /* The above condition is not enough as the first
2760 		     reg in ira_class_hard_regs can be not aligned for
2761 		     multi-words mode values.  */
2762 		  && (prohibited_class_reg_set_mode_p
2763 		      (this_alternative, this_alternative_set,
2764 		       GET_MODE (*curr_id->operand_loc[nop]))))
2765 		{
2766 		  if (lra_dump_file != NULL)
2767 		    fprintf (lra_dump_file,
2768 			     "            alt=%d: reload pseudo for op %d "
2769 			     "cannot hold the mode value -- refuse\n",
2770 			     nalt, nop);
2771 		  goto fail;
2772 		}
2773 
2774 	      /* Check strong discouragement of reload of non-constant
2775 		 into class THIS_ALTERNATIVE.  */
2776 	      if (! CONSTANT_P (op) && ! no_regs_p
2777 		  && (targetm.preferred_reload_class
2778 		      (op, this_alternative) == NO_REGS
2779 		      || (curr_static_id->operand[nop].type == OP_OUT
2780 			  && (targetm.preferred_output_reload_class
2781 			      (op, this_alternative) == NO_REGS))))
2782 		{
2783 		  if (offmemok && REG_P (op))
2784 		    {
2785 		      if (lra_dump_file != NULL)
2786 			fprintf
2787 			  (lra_dump_file,
2788 			   "            %d Spill pseudo into memory: reject+=3\n",
2789 			   nop);
2790 		      reject += 3;
2791 		    }
2792 		  else
2793 		    {
2794 		      if (lra_dump_file != NULL)
2795 			fprintf
2796 			  (lra_dump_file,
2797 			   "            %d Non-prefered reload: reject+=%d\n",
2798 			   nop, LRA_MAX_REJECT);
2799 		      reject += LRA_MAX_REJECT;
2800 		    }
2801 		}
2802 
2803 	      if (! (MEM_P (op) && offmemok)
2804 		  && ! (const_to_mem && constmemok))
2805 		{
2806 		  /* We prefer to reload pseudos over reloading other
2807 		     things, since such reloads may be able to be
2808 		     eliminated later.  So bump REJECT in other cases.
2809 		     Don't do this in the case where we are forcing a
2810 		     constant into memory and it will then win since
2811 		     we don't want to have a different alternative
2812 		     match then.  */
2813 		  if (! (REG_P (op) && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2814 		    {
2815 		      if (lra_dump_file != NULL)
2816 			fprintf
2817 			  (lra_dump_file,
2818 			   "            %d Non-pseudo reload: reject+=2\n",
2819 			   nop);
2820 		      reject += 2;
2821 		    }
2822 
2823 		  if (! no_regs_p)
2824 		    reload_nregs
2825 		      += ira_reg_class_max_nregs[this_alternative][mode];
2826 
2827 		  if (SMALL_REGISTER_CLASS_P (this_alternative))
2828 		    {
2829 		      if (lra_dump_file != NULL)
2830 			fprintf
2831 			  (lra_dump_file,
2832 			   "            %d Small class reload: reject+=%d\n",
2833 			   nop, LRA_LOSER_COST_FACTOR / 2);
2834 		      reject += LRA_LOSER_COST_FACTOR / 2;
2835 		    }
2836 		}
2837 
2838 	      /* We are trying to spill pseudo into memory.  It is
2839 		 usually more costly than moving to a hard register
2840 		 although it might takes the same number of
2841 		 reloads.
2842 
2843 		 Non-pseudo spill may happen also.  Suppose a target allows both
2844 		 register and memory in the operand constraint alternatives,
2845 		 then it's typical that an eliminable register has a substition
2846 		 of "base + offset" which can either be reloaded by a simple
2847 		 "new_reg <= base + offset" which will match the register
2848 		 constraint, or a similar reg addition followed by further spill
2849 		 to and reload from memory which will match the memory
2850 		 constraint, but this memory spill will be much more costly
2851 		 usually.
2852 
2853 		 Code below increases the reject for both pseudo and non-pseudo
2854 		 spill.  */
2855 	      if (no_regs_p
2856 		  && !(MEM_P (op) && offmemok)
2857 		  && !(REG_P (op) && hard_regno[nop] < 0))
2858 		{
2859 		  if (lra_dump_file != NULL)
2860 		    fprintf
2861 		      (lra_dump_file,
2862 		       "            %d Spill %spseudo into memory: reject+=3\n",
2863 		       nop, REG_P (op) ? "" : "Non-");
2864 		  reject += 3;
2865 		  if (VECTOR_MODE_P (mode))
2866 		    {
2867 		      /* Spilling vectors into memory is usually more
2868 			 costly as they contain big values.  */
2869 		      if (lra_dump_file != NULL)
2870 			fprintf
2871 			  (lra_dump_file,
2872 			   "            %d Spill vector pseudo: reject+=2\n",
2873 			   nop);
2874 		      reject += 2;
2875 		    }
2876 		}
2877 
2878 	      /* When we use an operand requiring memory in given
2879 		 alternative, the insn should write *and* read the
2880 		 value to/from memory it is costly in comparison with
2881 		 an insn alternative which does not use memory
2882 		 (e.g. register or immediate operand).  We exclude
2883 		 memory operand for such case as we can satisfy the
2884 		 memory constraints by reloading address.  */
2885 	      if (no_regs_p && offmemok && !MEM_P (op))
2886 		{
2887 		  if (lra_dump_file != NULL)
2888 		    fprintf
2889 		      (lra_dump_file,
2890 		       "            Using memory insn operand %d: reject+=3\n",
2891 		       nop);
2892 		  reject += 3;
2893 		}
2894 
2895 	      /* If reload requires moving value through secondary
2896 		 memory, it will need one more insn at least.  */
2897 	      if (this_alternative != NO_REGS
2898 		  && REG_P (op) && (cl = get_reg_class (REGNO (op))) != NO_REGS
2899 		  && ((curr_static_id->operand[nop].type != OP_OUT
2900 		       && targetm.secondary_memory_needed (GET_MODE (op), cl,
2901 							   this_alternative))
2902 		      || (curr_static_id->operand[nop].type != OP_IN
2903 			  && (targetm.secondary_memory_needed
2904 			      (GET_MODE (op), this_alternative, cl)))))
2905 		losers++;
2906 
2907 	      if (MEM_P (op) && offmemok)
2908 		addr_losers++;
2909 	      else
2910 		{
2911 		  /* Input reloads can be inherited more often than
2912 		     output reloads can be removed, so penalize output
2913 		     reloads.  */
2914 		  if (!REG_P (op) || curr_static_id->operand[nop].type != OP_IN)
2915 		    {
2916 		      if (lra_dump_file != NULL)
2917 			fprintf
2918 			  (lra_dump_file,
2919 			   "            %d Non input pseudo reload: reject++\n",
2920 			   nop);
2921 		      reject++;
2922 		    }
2923 
2924 		  if (curr_static_id->operand[nop].type == OP_INOUT)
2925 		    {
2926 		      if (lra_dump_file != NULL)
2927 			fprintf
2928 			  (lra_dump_file,
2929 			   "            %d Input/Output reload: reject+=%d\n",
2930 			   nop, LRA_LOSER_COST_FACTOR);
2931 		      reject += LRA_LOSER_COST_FACTOR;
2932 		    }
2933 		}
2934 	    }
2935 
2936 	  if (early_clobber_p && ! scratch_p)
2937 	    {
2938 	      if (lra_dump_file != NULL)
2939 		fprintf (lra_dump_file,
2940 			 "            %d Early clobber: reject++\n", nop);
2941 	      reject++;
2942 	    }
2943 	  /* ??? We check early clobbers after processing all operands
2944 	     (see loop below) and there we update the costs more.
2945 	     Should we update the cost (may be approximately) here
2946 	     because of early clobber register reloads or it is a rare
2947 	     or non-important thing to be worth to do it.  */
2948 	  overall = (losers * LRA_LOSER_COST_FACTOR + reject
2949 		     - (addr_losers == losers ? static_reject : 0));
2950 	  if ((best_losers == 0 || losers != 0) && best_overall < overall)
2951             {
2952               if (lra_dump_file != NULL)
2953 		fprintf (lra_dump_file,
2954 			 "            alt=%d,overall=%d,losers=%d -- refuse\n",
2955 			 nalt, overall, losers);
2956               goto fail;
2957             }
2958 
2959 	  if (update_and_check_small_class_inputs (nop, nalt,
2960 						   this_alternative))
2961 	    {
2962 	      if (lra_dump_file != NULL)
2963 		fprintf (lra_dump_file,
2964 			 "            alt=%d, not enough small class regs -- refuse\n",
2965 			 nalt);
2966 	      goto fail;
2967 	    }
2968 	  curr_alt[nop] = this_alternative;
2969 	  curr_alt_set[nop] = this_alternative_set;
2970 	  curr_alt_win[nop] = this_alternative_win;
2971 	  curr_alt_match_win[nop] = this_alternative_match_win;
2972 	  curr_alt_offmemok[nop] = this_alternative_offmemok;
2973 	  curr_alt_matches[nop] = this_alternative_matches;
2974 
2975 	  if (this_alternative_matches >= 0
2976 	      && !did_match && !this_alternative_win)
2977 	    curr_alt_win[this_alternative_matches] = false;
2978 
2979 	  if (early_clobber_p && operand_reg[nop] != NULL_RTX)
2980 	    early_clobbered_nops[early_clobbered_regs_num++] = nop;
2981 	}
2982 
2983       if (curr_insn_set != NULL_RTX && n_operands == 2
2984 	  /* Prevent processing non-move insns.  */
2985 	  && (GET_CODE (SET_SRC (curr_insn_set)) == SUBREG
2986 	      || SET_SRC (curr_insn_set) == no_subreg_reg_operand[1])
2987 	  && ((! curr_alt_win[0] && ! curr_alt_win[1]
2988 	       && REG_P (no_subreg_reg_operand[0])
2989 	       && REG_P (no_subreg_reg_operand[1])
2990 	       && (reg_in_class_p (no_subreg_reg_operand[0], curr_alt[1])
2991 		   || reg_in_class_p (no_subreg_reg_operand[1], curr_alt[0])))
2992 	      || (! curr_alt_win[0] && curr_alt_win[1]
2993 		  && REG_P (no_subreg_reg_operand[1])
2994 		  /* Check that we reload memory not the memory
2995 		     address.  */
2996 		  && ! (curr_alt_offmemok[0]
2997 			&& MEM_P (no_subreg_reg_operand[0]))
2998 		  && reg_in_class_p (no_subreg_reg_operand[1], curr_alt[0]))
2999 	      || (curr_alt_win[0] && ! curr_alt_win[1]
3000 		  && REG_P (no_subreg_reg_operand[0])
3001 		  /* Check that we reload memory not the memory
3002 		     address.  */
3003 		  && ! (curr_alt_offmemok[1]
3004 			&& MEM_P (no_subreg_reg_operand[1]))
3005 		  && reg_in_class_p (no_subreg_reg_operand[0], curr_alt[1])
3006 		  && (! CONST_POOL_OK_P (curr_operand_mode[1],
3007 					 no_subreg_reg_operand[1])
3008 		      || (targetm.preferred_reload_class
3009 			  (no_subreg_reg_operand[1],
3010 			   (enum reg_class) curr_alt[1]) != NO_REGS))
3011 		  /* If it is a result of recent elimination in move
3012 		     insn we can transform it into an add still by
3013 		     using this alternative.  */
3014 		  && GET_CODE (no_subreg_reg_operand[1]) != PLUS
3015 		  /* Likewise if the source has been replaced with an
3016 		     equivalent value.  This only happens once -- the reload
3017 		     will use the equivalent value instead of the register it
3018 		     replaces -- so there should be no danger of cycling.  */
3019 		  && !equiv_substition_p[1])))
3020 	{
3021 	  /* We have a move insn and a new reload insn will be similar
3022 	     to the current insn.  We should avoid such situation as
3023 	     it results in LRA cycling.  */
3024 	  if (lra_dump_file != NULL)
3025 	    fprintf (lra_dump_file,
3026 		     "            Cycle danger: overall += LRA_MAX_REJECT\n");
3027 	  overall += LRA_MAX_REJECT;
3028 	}
3029       ok_p = true;
3030       curr_alt_dont_inherit_ops_num = 0;
3031       for (nop = 0; nop < early_clobbered_regs_num; nop++)
3032 	{
3033 	  int i, j, clobbered_hard_regno, first_conflict_j, last_conflict_j;
3034 	  HARD_REG_SET temp_set;
3035 
3036 	  i = early_clobbered_nops[nop];
3037 	  if ((! curr_alt_win[i] && ! curr_alt_match_win[i])
3038 	      || hard_regno[i] < 0)
3039 	    continue;
3040 	  lra_assert (operand_reg[i] != NULL_RTX);
3041 	  clobbered_hard_regno = hard_regno[i];
3042 	  CLEAR_HARD_REG_SET (temp_set);
3043 	  add_to_hard_reg_set (&temp_set, biggest_mode[i], clobbered_hard_regno);
3044 	  first_conflict_j = last_conflict_j = -1;
3045 	  for (j = 0; j < n_operands; j++)
3046 	    if (j == i
3047 		/* We don't want process insides of match_operator and
3048 		   match_parallel because otherwise we would process
3049 		   their operands once again generating a wrong
3050 		   code.  */
3051 		|| curr_static_id->operand[j].is_operator)
3052 	      continue;
3053 	    else if ((curr_alt_matches[j] == i && curr_alt_match_win[j])
3054 		     || (curr_alt_matches[i] == j && curr_alt_match_win[i]))
3055 	      continue;
3056 	    /* If we don't reload j-th operand, check conflicts.  */
3057 	    else if ((curr_alt_win[j] || curr_alt_match_win[j])
3058 		     && uses_hard_regs_p (*curr_id->operand_loc[j], temp_set))
3059 	      {
3060 		if (first_conflict_j < 0)
3061 		  first_conflict_j = j;
3062 		last_conflict_j = j;
3063 		/* Both the earlyclobber operand and conflicting operand
3064 		   cannot both be user defined hard registers.  */
3065 		if (HARD_REGISTER_P (operand_reg[i])
3066 		    && REG_USERVAR_P (operand_reg[i])
3067 		    && operand_reg[j] != NULL_RTX
3068 		    && HARD_REGISTER_P (operand_reg[j])
3069 		    && REG_USERVAR_P (operand_reg[j]))
3070 		  {
3071 		    /* For asm, let curr_insn_transform diagnose it.  */
3072 		    if (INSN_CODE (curr_insn) < 0)
3073 		      return false;
3074 		    fatal_insn ("unable to generate reloads for "
3075 				"impossible constraints:", curr_insn);
3076 		  }
3077 	      }
3078 	  if (last_conflict_j < 0)
3079 	    continue;
3080 
3081 	  /* If an earlyclobber operand conflicts with another non-matching
3082 	     operand (ie, they have been assigned the same hard register),
3083 	     then it is better to reload the other operand, as there may
3084 	     exist yet another operand with a matching constraint associated
3085 	     with the earlyclobber operand.  However, if one of the operands
3086 	     is an explicit use of a hard register, then we must reload the
3087 	     other non-hard register operand.  */
3088 	  if (HARD_REGISTER_P (operand_reg[i])
3089 	      || (first_conflict_j == last_conflict_j
3090 		  && operand_reg[last_conflict_j] != NULL_RTX
3091 		  && !curr_alt_match_win[last_conflict_j]
3092 		  && !HARD_REGISTER_P (operand_reg[last_conflict_j])))
3093 	    {
3094 	      curr_alt_win[last_conflict_j] = false;
3095 	      curr_alt_dont_inherit_ops[curr_alt_dont_inherit_ops_num++]
3096 		= last_conflict_j;
3097 	      losers++;
3098 	      if (lra_dump_file != NULL)
3099 		fprintf
3100 		  (lra_dump_file,
3101 		   "            %d Conflict early clobber reload: reject--\n",
3102 		   i);
3103 	    }
3104 	  else
3105 	    {
3106 	      /* We need to reload early clobbered register and the
3107 		 matched registers.  */
3108 	      for (j = 0; j < n_operands; j++)
3109 		if (curr_alt_matches[j] == i)
3110 		  {
3111 		    curr_alt_match_win[j] = false;
3112 		    losers++;
3113 		    overall += LRA_LOSER_COST_FACTOR;
3114 		  }
3115 	      if (! curr_alt_match_win[i])
3116 		curr_alt_dont_inherit_ops[curr_alt_dont_inherit_ops_num++] = i;
3117 	      else
3118 		{
3119 		  /* Remember pseudos used for match reloads are never
3120 		     inherited.  */
3121 		  lra_assert (curr_alt_matches[i] >= 0);
3122 		  curr_alt_win[curr_alt_matches[i]] = false;
3123 		}
3124 	      curr_alt_win[i] = curr_alt_match_win[i] = false;
3125 	      losers++;
3126 	      if (lra_dump_file != NULL)
3127 		fprintf
3128 		  (lra_dump_file,
3129 		   "            %d Matched conflict early clobber reloads: "
3130 		   "reject--\n",
3131 		   i);
3132 	    }
3133 	  /* Early clobber was already reflected in REJECT. */
3134 	  if (!matching_early_clobber[i])
3135 	    {
3136 	      lra_assert (reject > 0);
3137 	      reject--;
3138 	      matching_early_clobber[i] = 1;
3139 	    }
3140 	  overall += LRA_LOSER_COST_FACTOR - 1;
3141 	}
3142       if (lra_dump_file != NULL)
3143 	fprintf (lra_dump_file, "          alt=%d,overall=%d,losers=%d,rld_nregs=%d\n",
3144 		 nalt, overall, losers, reload_nregs);
3145 
3146       /* If this alternative can be made to work by reloading, and it
3147 	 needs less reloading than the others checked so far, record
3148 	 it as the chosen goal for reloading.  */
3149       if ((best_losers != 0 && losers == 0)
3150 	  || (((best_losers == 0 && losers == 0)
3151 	       || (best_losers != 0 && losers != 0))
3152 	      && (best_overall > overall
3153 		  || (best_overall == overall
3154 		      /* If the cost of the reloads is the same,
3155 			 prefer alternative which requires minimal
3156 			 number of reload regs.  */
3157 		      && (reload_nregs < best_reload_nregs
3158 			  || (reload_nregs == best_reload_nregs
3159 			      && (best_reload_sum < reload_sum
3160 				  || (best_reload_sum == reload_sum
3161 				      && nalt < goal_alt_number))))))))
3162 	{
3163 	  for (nop = 0; nop < n_operands; nop++)
3164 	    {
3165 	      goal_alt_win[nop] = curr_alt_win[nop];
3166 	      goal_alt_match_win[nop] = curr_alt_match_win[nop];
3167 	      goal_alt_matches[nop] = curr_alt_matches[nop];
3168 	      goal_alt[nop] = curr_alt[nop];
3169 	      goal_alt_offmemok[nop] = curr_alt_offmemok[nop];
3170 	    }
3171 	  goal_alt_dont_inherit_ops_num = curr_alt_dont_inherit_ops_num;
3172 	  for (nop = 0; nop < curr_alt_dont_inherit_ops_num; nop++)
3173 	    goal_alt_dont_inherit_ops[nop] = curr_alt_dont_inherit_ops[nop];
3174 	  goal_alt_swapped = curr_swapped;
3175 	  best_overall = overall;
3176 	  best_losers = losers;
3177 	  best_reload_nregs = reload_nregs;
3178 	  best_reload_sum = reload_sum;
3179 	  goal_alt_number = nalt;
3180 	}
3181       if (losers == 0)
3182 	/* Everything is satisfied.  Do not process alternatives
3183 	   anymore.  */
3184 	break;
3185     fail:
3186       ;
3187     }
3188   return ok_p;
3189 }
3190 
3191 /* Make reload base reg from address AD.  */
3192 static rtx
base_to_reg(struct address_info * ad)3193 base_to_reg (struct address_info *ad)
3194 {
3195   enum reg_class cl;
3196   int code = -1;
3197   rtx new_inner = NULL_RTX;
3198   rtx new_reg = NULL_RTX;
3199   rtx_insn *insn;
3200   rtx_insn *last_insn = get_last_insn();
3201 
3202   lra_assert (ad->disp == ad->disp_term);
3203   cl = base_reg_class (ad->mode, ad->as, ad->base_outer_code,
3204                        get_index_code (ad));
3205   new_reg = lra_create_new_reg (GET_MODE (*ad->base), NULL_RTX,
3206                                 cl, "base");
3207   new_inner = simplify_gen_binary (PLUS, GET_MODE (new_reg), new_reg,
3208                                    ad->disp_term == NULL
3209                                    ? const0_rtx
3210                                    : *ad->disp_term);
3211   if (!valid_address_p (ad->mode, new_inner, ad->as))
3212     return NULL_RTX;
3213   insn = emit_insn (gen_rtx_SET (new_reg, *ad->base));
3214   code = recog_memoized (insn);
3215   if (code < 0)
3216     {
3217       delete_insns_since (last_insn);
3218       return NULL_RTX;
3219     }
3220 
3221   return new_inner;
3222 }
3223 
3224 /* Make reload base reg + DISP from address AD.  Return the new pseudo.  */
3225 static rtx
base_plus_disp_to_reg(struct address_info * ad,rtx disp)3226 base_plus_disp_to_reg (struct address_info *ad, rtx disp)
3227 {
3228   enum reg_class cl;
3229   rtx new_reg;
3230 
3231   lra_assert (ad->base == ad->base_term);
3232   cl = base_reg_class (ad->mode, ad->as, ad->base_outer_code,
3233 		       get_index_code (ad));
3234   new_reg = lra_create_new_reg (GET_MODE (*ad->base_term), NULL_RTX,
3235 				cl, "base + disp");
3236   lra_emit_add (new_reg, *ad->base_term, disp);
3237   return new_reg;
3238 }
3239 
3240 /* Make reload of index part of address AD.  Return the new
3241    pseudo.  */
3242 static rtx
index_part_to_reg(struct address_info * ad)3243 index_part_to_reg (struct address_info *ad)
3244 {
3245   rtx new_reg;
3246 
3247   new_reg = lra_create_new_reg (GET_MODE (*ad->index), NULL_RTX,
3248 				INDEX_REG_CLASS, "index term");
3249   expand_mult (GET_MODE (*ad->index), *ad->index_term,
3250 	       GEN_INT (get_index_scale (ad)), new_reg, 1);
3251   return new_reg;
3252 }
3253 
3254 /* Return true if we can add a displacement to address AD, even if that
3255    makes the address invalid.  The fix-up code requires any new address
3256    to be the sum of the BASE_TERM, INDEX and DISP_TERM fields.  */
3257 static bool
can_add_disp_p(struct address_info * ad)3258 can_add_disp_p (struct address_info *ad)
3259 {
3260   return (!ad->autoinc_p
3261 	  && ad->segment == NULL
3262 	  && ad->base == ad->base_term
3263 	  && ad->disp == ad->disp_term);
3264 }
3265 
3266 /* Make equiv substitution in address AD.  Return true if a substitution
3267    was made.  */
3268 static bool
equiv_address_substitution(struct address_info * ad)3269 equiv_address_substitution (struct address_info *ad)
3270 {
3271   rtx base_reg, new_base_reg, index_reg, new_index_reg, *base_term, *index_term;
3272   poly_int64 disp;
3273   HOST_WIDE_INT scale;
3274   bool change_p;
3275 
3276   base_term = strip_subreg (ad->base_term);
3277   if (base_term == NULL)
3278     base_reg = new_base_reg = NULL_RTX;
3279   else
3280     {
3281       base_reg = *base_term;
3282       new_base_reg = get_equiv_with_elimination (base_reg, curr_insn);
3283     }
3284   index_term = strip_subreg (ad->index_term);
3285   if (index_term == NULL)
3286     index_reg = new_index_reg = NULL_RTX;
3287   else
3288     {
3289       index_reg = *index_term;
3290       new_index_reg = get_equiv_with_elimination (index_reg, curr_insn);
3291     }
3292   if (base_reg == new_base_reg && index_reg == new_index_reg)
3293     return false;
3294   disp = 0;
3295   change_p = false;
3296   if (lra_dump_file != NULL)
3297     {
3298       fprintf (lra_dump_file, "Changing address in insn %d ",
3299 	       INSN_UID (curr_insn));
3300       dump_value_slim (lra_dump_file, *ad->outer, 1);
3301     }
3302   if (base_reg != new_base_reg)
3303     {
3304       poly_int64 offset;
3305       if (REG_P (new_base_reg))
3306 	{
3307 	  *base_term = new_base_reg;
3308 	  change_p = true;
3309 	}
3310       else if (GET_CODE (new_base_reg) == PLUS
3311 	       && REG_P (XEXP (new_base_reg, 0))
3312 	       && poly_int_rtx_p (XEXP (new_base_reg, 1), &offset)
3313 	       && can_add_disp_p (ad))
3314 	{
3315 	  disp += offset;
3316 	  *base_term = XEXP (new_base_reg, 0);
3317 	  change_p = true;
3318 	}
3319       if (ad->base_term2 != NULL)
3320 	*ad->base_term2 = *ad->base_term;
3321     }
3322   if (index_reg != new_index_reg)
3323     {
3324       poly_int64 offset;
3325       if (REG_P (new_index_reg))
3326 	{
3327 	  *index_term = new_index_reg;
3328 	  change_p = true;
3329 	}
3330       else if (GET_CODE (new_index_reg) == PLUS
3331 	       && REG_P (XEXP (new_index_reg, 0))
3332 	       && poly_int_rtx_p (XEXP (new_index_reg, 1), &offset)
3333 	       && can_add_disp_p (ad)
3334 	       && (scale = get_index_scale (ad)))
3335 	{
3336 	  disp += offset * scale;
3337 	  *index_term = XEXP (new_index_reg, 0);
3338 	  change_p = true;
3339 	}
3340     }
3341   if (maybe_ne (disp, 0))
3342     {
3343       if (ad->disp != NULL)
3344 	*ad->disp = plus_constant (GET_MODE (*ad->inner), *ad->disp, disp);
3345       else
3346 	{
3347 	  *ad->inner = plus_constant (GET_MODE (*ad->inner), *ad->inner, disp);
3348 	  update_address (ad);
3349 	}
3350       change_p = true;
3351     }
3352   if (lra_dump_file != NULL)
3353     {
3354       if (! change_p)
3355 	fprintf (lra_dump_file, " -- no change\n");
3356       else
3357 	{
3358 	  fprintf (lra_dump_file, " on equiv ");
3359 	  dump_value_slim (lra_dump_file, *ad->outer, 1);
3360 	  fprintf (lra_dump_file, "\n");
3361 	}
3362     }
3363   return change_p;
3364 }
3365 
3366 /* Major function to make reloads for an address in operand NOP or
3367    check its correctness (If CHECK_ONLY_P is true). The supported
3368    cases are:
3369 
3370    1) an address that existed before LRA started, at which point it
3371    must have been valid.  These addresses are subject to elimination
3372    and may have become invalid due to the elimination offset being out
3373    of range.
3374 
3375    2) an address created by forcing a constant to memory
3376    (force_const_to_mem).  The initial form of these addresses might
3377    not be valid, and it is this function's job to make them valid.
3378 
3379    3) a frame address formed from a register and a (possibly zero)
3380    constant offset.  As above, these addresses might not be valid and
3381    this function must make them so.
3382 
3383    Add reloads to the lists *BEFORE and *AFTER.  We might need to add
3384    reloads to *AFTER because of inc/dec, {pre, post} modify in the
3385    address.  Return true for any RTL change.
3386 
3387    The function is a helper function which does not produce all
3388    transformations (when CHECK_ONLY_P is false) which can be
3389    necessary.  It does just basic steps.  To do all necessary
3390    transformations use function process_address.  */
3391 static bool
process_address_1(int nop,bool check_only_p,rtx_insn ** before,rtx_insn ** after)3392 process_address_1 (int nop, bool check_only_p,
3393 		   rtx_insn **before, rtx_insn **after)
3394 {
3395   struct address_info ad;
3396   rtx new_reg;
3397   HOST_WIDE_INT scale;
3398   rtx op = *curr_id->operand_loc[nop];
3399   const char *constraint = curr_static_id->operand[nop].constraint;
3400   enum constraint_num cn = lookup_constraint (constraint);
3401   bool change_p = false;
3402 
3403   if (MEM_P (op)
3404       && GET_MODE (op) == BLKmode
3405       && GET_CODE (XEXP (op, 0)) == SCRATCH)
3406     return false;
3407 
3408   if (insn_extra_address_constraint (cn)
3409       /* When we find an asm operand with an address constraint that
3410 	 doesn't satisfy address_operand to begin with, we clear
3411 	 is_address, so that we don't try to make a non-address fit.
3412 	 If the asm statement got this far, it's because other
3413 	 constraints are available, and we'll use them, disregarding
3414 	 the unsatisfiable address ones.  */
3415       && curr_static_id->operand[nop].is_address)
3416     decompose_lea_address (&ad, curr_id->operand_loc[nop]);
3417   /* Do not attempt to decompose arbitrary addresses generated by combine
3418      for asm operands with loose constraints, e.g 'X'.  */
3419   else if (MEM_P (op)
3420 	   && !(INSN_CODE (curr_insn) < 0
3421 		&& get_constraint_type (cn) == CT_FIXED_FORM
3422 	        && constraint_satisfied_p (op, cn)))
3423     decompose_mem_address (&ad, op);
3424   else if (GET_CODE (op) == SUBREG
3425 	   && MEM_P (SUBREG_REG (op)))
3426     decompose_mem_address (&ad, SUBREG_REG (op));
3427   else
3428     return false;
3429   /* If INDEX_REG_CLASS is assigned to base_term already and isn't to
3430      index_term, swap them so to avoid assigning INDEX_REG_CLASS to both
3431      when INDEX_REG_CLASS is a single register class.  */
3432   if (ad.base_term != NULL
3433       && ad.index_term != NULL
3434       && ira_class_hard_regs_num[INDEX_REG_CLASS] == 1
3435       && REG_P (*ad.base_term)
3436       && REG_P (*ad.index_term)
3437       && in_class_p (*ad.base_term, INDEX_REG_CLASS, NULL)
3438       && ! in_class_p (*ad.index_term, INDEX_REG_CLASS, NULL))
3439     {
3440       std::swap (ad.base, ad.index);
3441       std::swap (ad.base_term, ad.index_term);
3442     }
3443   if (! check_only_p)
3444     change_p = equiv_address_substitution (&ad);
3445   if (ad.base_term != NULL
3446       && (process_addr_reg
3447 	  (ad.base_term, check_only_p, before,
3448 	   (ad.autoinc_p
3449 	    && !(REG_P (*ad.base_term)
3450 		 && find_regno_note (curr_insn, REG_DEAD,
3451 				     REGNO (*ad.base_term)) != NULL_RTX)
3452 	    ? after : NULL),
3453 	   base_reg_class (ad.mode, ad.as, ad.base_outer_code,
3454 			   get_index_code (&ad)))))
3455     {
3456       change_p = true;
3457       if (ad.base_term2 != NULL)
3458 	*ad.base_term2 = *ad.base_term;
3459     }
3460   if (ad.index_term != NULL
3461       && process_addr_reg (ad.index_term, check_only_p,
3462 			   before, NULL, INDEX_REG_CLASS))
3463     change_p = true;
3464 
3465   /* Target hooks sometimes don't treat extra-constraint addresses as
3466      legitimate address_operands, so handle them specially.  */
3467   if (insn_extra_address_constraint (cn)
3468       && satisfies_address_constraint_p (&ad, cn))
3469     return change_p;
3470 
3471   if (check_only_p)
3472     return change_p;
3473 
3474   /* There are three cases where the shape of *AD.INNER may now be invalid:
3475 
3476      1) the original address was valid, but either elimination or
3477      equiv_address_substitution was applied and that made
3478      the address invalid.
3479 
3480      2) the address is an invalid symbolic address created by
3481      force_const_to_mem.
3482 
3483      3) the address is a frame address with an invalid offset.
3484 
3485      4) the address is a frame address with an invalid base.
3486 
3487      All these cases involve a non-autoinc address, so there is no
3488      point revalidating other types.  */
3489   if (ad.autoinc_p || valid_address_p (op, &ad, cn))
3490     return change_p;
3491 
3492   /* Any index existed before LRA started, so we can assume that the
3493      presence and shape of the index is valid.  */
3494   push_to_sequence (*before);
3495   lra_assert (ad.disp == ad.disp_term);
3496   if (ad.base == NULL)
3497     {
3498       if (ad.index == NULL)
3499 	{
3500 	  rtx_insn *insn;
3501 	  rtx_insn *last = get_last_insn ();
3502 	  int code = -1;
3503 	  enum reg_class cl = base_reg_class (ad.mode, ad.as,
3504 					      SCRATCH, SCRATCH);
3505 	  rtx addr = *ad.inner;
3506 
3507 	  new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, "addr");
3508 	  if (HAVE_lo_sum)
3509 	    {
3510 	      /* addr => lo_sum (new_base, addr), case (2) above.  */
3511 	      insn = emit_insn (gen_rtx_SET
3512 				(new_reg,
3513 				 gen_rtx_HIGH (Pmode, copy_rtx (addr))));
3514 	      code = recog_memoized (insn);
3515 	      if (code >= 0)
3516 		{
3517 		  *ad.inner = gen_rtx_LO_SUM (Pmode, new_reg, addr);
3518 		  if (!valid_address_p (op, &ad, cn))
3519 		    {
3520 		      /* Try to put lo_sum into register.  */
3521 		      insn = emit_insn (gen_rtx_SET
3522 					(new_reg,
3523 					 gen_rtx_LO_SUM (Pmode, new_reg, addr)));
3524 		      code = recog_memoized (insn);
3525 		      if (code >= 0)
3526 			{
3527 			  *ad.inner = new_reg;
3528 			  if (!valid_address_p (op, &ad, cn))
3529 			    {
3530 			      *ad.inner = addr;
3531 			      code = -1;
3532 			    }
3533 			}
3534 
3535 		    }
3536 		}
3537 	      if (code < 0)
3538 		delete_insns_since (last);
3539 	    }
3540 
3541 	  if (code < 0)
3542 	    {
3543 	      /* addr => new_base, case (2) above.  */
3544 	      lra_emit_move (new_reg, addr);
3545 
3546 	      for (insn = last == NULL_RTX ? get_insns () : NEXT_INSN (last);
3547 		   insn != NULL_RTX;
3548 		   insn = NEXT_INSN (insn))
3549 		if (recog_memoized (insn) < 0)
3550 		  break;
3551 	      if (insn != NULL_RTX)
3552 		{
3553 		  /* Do nothing if we cannot generate right insns.
3554 		     This is analogous to reload pass behavior.  */
3555 		  delete_insns_since (last);
3556 		  end_sequence ();
3557 		  return false;
3558 		}
3559 	      *ad.inner = new_reg;
3560 	    }
3561 	}
3562       else
3563 	{
3564 	  /* index * scale + disp => new base + index * scale,
3565 	     case (1) above.  */
3566 	  enum reg_class cl = base_reg_class (ad.mode, ad.as, PLUS,
3567 					      GET_CODE (*ad.index));
3568 
3569 	  lra_assert (INDEX_REG_CLASS != NO_REGS);
3570 	  new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, "disp");
3571 	  lra_emit_move (new_reg, *ad.disp);
3572 	  *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
3573 					   new_reg, *ad.index);
3574 	}
3575     }
3576   else if (ad.index == NULL)
3577     {
3578       int regno;
3579       enum reg_class cl;
3580       rtx set;
3581       rtx_insn *insns, *last_insn;
3582       /* Try to reload base into register only if the base is invalid
3583          for the address but with valid offset, case (4) above.  */
3584       start_sequence ();
3585       new_reg = base_to_reg (&ad);
3586 
3587       /* base + disp => new base, cases (1) and (3) above.  */
3588       /* Another option would be to reload the displacement into an
3589 	 index register.  However, postreload has code to optimize
3590 	 address reloads that have the same base and different
3591 	 displacements, so reloading into an index register would
3592 	 not necessarily be a win.  */
3593       if (new_reg == NULL_RTX)
3594 	{
3595 	  /* See if the target can split the displacement into a
3596 	     legitimate new displacement from a local anchor.  */
3597 	  gcc_assert (ad.disp == ad.disp_term);
3598 	  poly_int64 orig_offset;
3599 	  rtx offset1, offset2;
3600 	  if (poly_int_rtx_p (*ad.disp, &orig_offset)
3601 	      && targetm.legitimize_address_displacement (&offset1, &offset2,
3602 							  orig_offset,
3603 							  ad.mode))
3604 	    {
3605 	      new_reg = base_plus_disp_to_reg (&ad, offset1);
3606 	      new_reg = gen_rtx_PLUS (GET_MODE (new_reg), new_reg, offset2);
3607 	    }
3608 	  else
3609 	    new_reg = base_plus_disp_to_reg (&ad, *ad.disp);
3610 	}
3611       insns = get_insns ();
3612       last_insn = get_last_insn ();
3613       /* If we generated at least two insns, try last insn source as
3614 	 an address.  If we succeed, we generate one less insn.  */
3615       if (REG_P (new_reg)
3616 	  && last_insn != insns
3617 	  && (set = single_set (last_insn)) != NULL_RTX
3618 	  && GET_CODE (SET_SRC (set)) == PLUS
3619 	  && REG_P (XEXP (SET_SRC (set), 0))
3620 	  && CONSTANT_P (XEXP (SET_SRC (set), 1)))
3621 	{
3622 	  *ad.inner = SET_SRC (set);
3623 	  if (valid_address_p (op, &ad, cn))
3624 	    {
3625 	      *ad.base_term = XEXP (SET_SRC (set), 0);
3626 	      *ad.disp_term = XEXP (SET_SRC (set), 1);
3627 	      cl = base_reg_class (ad.mode, ad.as, ad.base_outer_code,
3628 				   get_index_code (&ad));
3629 	      regno = REGNO (*ad.base_term);
3630 	      if (regno >= FIRST_PSEUDO_REGISTER
3631 		  && cl != lra_get_allocno_class (regno))
3632 		lra_change_class (regno, cl, "      Change to", true);
3633 	      new_reg = SET_SRC (set);
3634 	      delete_insns_since (PREV_INSN (last_insn));
3635 	    }
3636 	}
3637       end_sequence ();
3638       emit_insn (insns);
3639       *ad.inner = new_reg;
3640     }
3641   else if (ad.disp_term != NULL)
3642     {
3643       /* base + scale * index + disp => new base + scale * index,
3644 	 case (1) above.  */
3645       gcc_assert (ad.disp == ad.disp_term);
3646       new_reg = base_plus_disp_to_reg (&ad, *ad.disp);
3647       *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
3648 				       new_reg, *ad.index);
3649     }
3650   else if ((scale = get_index_scale (&ad)) == 1)
3651     {
3652       /* The last transformation to one reg will be made in
3653 	 curr_insn_transform function.  */
3654       end_sequence ();
3655       return false;
3656     }
3657   else if (scale != 0)
3658     {
3659       /* base + scale * index => base + new_reg,
3660 	 case (1) above.
3661       Index part of address may become invalid.  For example, we
3662       changed pseudo on the equivalent memory and a subreg of the
3663       pseudo onto the memory of different mode for which the scale is
3664       prohibitted.  */
3665       new_reg = index_part_to_reg (&ad);
3666       *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
3667 				       *ad.base_term, new_reg);
3668     }
3669   else
3670     {
3671       enum reg_class cl = base_reg_class (ad.mode, ad.as,
3672 					  SCRATCH, SCRATCH);
3673       rtx addr = *ad.inner;
3674 
3675       new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, "addr");
3676       /* addr => new_base.  */
3677       lra_emit_move (new_reg, addr);
3678       *ad.inner = new_reg;
3679     }
3680   *before = get_insns ();
3681   end_sequence ();
3682   return true;
3683 }
3684 
3685 /* If CHECK_ONLY_P is false, do address reloads until it is necessary.
3686    Use process_address_1 as a helper function.  Return true for any
3687    RTL changes.
3688 
3689    If CHECK_ONLY_P is true, just check address correctness.  Return
3690    false if the address correct.  */
3691 static bool
process_address(int nop,bool check_only_p,rtx_insn ** before,rtx_insn ** after)3692 process_address (int nop, bool check_only_p,
3693 		 rtx_insn **before, rtx_insn **after)
3694 {
3695   bool res = false;
3696 
3697   while (process_address_1 (nop, check_only_p, before, after))
3698     {
3699       if (check_only_p)
3700 	return true;
3701       res = true;
3702     }
3703   return res;
3704 }
3705 
3706 /* Emit insns to reload VALUE into a new register.  VALUE is an
3707    auto-increment or auto-decrement RTX whose operand is a register or
3708    memory location; so reloading involves incrementing that location.
3709    IN is either identical to VALUE, or some cheaper place to reload
3710    value being incremented/decremented from.
3711 
3712    INC_AMOUNT is the number to increment or decrement by (always
3713    positive and ignored for POST_MODIFY/PRE_MODIFY).
3714 
3715    Return pseudo containing the result.	 */
3716 static rtx
emit_inc(enum reg_class new_rclass,rtx in,rtx value,poly_int64 inc_amount)3717 emit_inc (enum reg_class new_rclass, rtx in, rtx value, poly_int64 inc_amount)
3718 {
3719   /* REG or MEM to be copied and incremented.  */
3720   rtx incloc = XEXP (value, 0);
3721   /* Nonzero if increment after copying.  */
3722   int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
3723 	      || GET_CODE (value) == POST_MODIFY);
3724   rtx_insn *last;
3725   rtx inc;
3726   rtx_insn *add_insn;
3727   int code;
3728   rtx real_in = in == value ? incloc : in;
3729   rtx result;
3730   bool plus_p = true;
3731 
3732   if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
3733     {
3734       lra_assert (GET_CODE (XEXP (value, 1)) == PLUS
3735 		  || GET_CODE (XEXP (value, 1)) == MINUS);
3736       lra_assert (rtx_equal_p (XEXP (XEXP (value, 1), 0), XEXP (value, 0)));
3737       plus_p = GET_CODE (XEXP (value, 1)) == PLUS;
3738       inc = XEXP (XEXP (value, 1), 1);
3739     }
3740   else
3741     {
3742       if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
3743 	inc_amount = -inc_amount;
3744 
3745       inc = gen_int_mode (inc_amount, GET_MODE (value));
3746     }
3747 
3748   if (! post && REG_P (incloc))
3749     result = incloc;
3750   else
3751     result = lra_create_new_reg (GET_MODE (value), value, new_rclass,
3752 				 "INC/DEC result");
3753 
3754   if (real_in != result)
3755     {
3756       /* First copy the location to the result register.  */
3757       lra_assert (REG_P (result));
3758       emit_insn (gen_move_insn (result, real_in));
3759     }
3760 
3761   /* We suppose that there are insns to add/sub with the constant
3762      increment permitted in {PRE/POST)_{DEC/INC/MODIFY}.  At least the
3763      old reload worked with this assumption.  If the assumption
3764      becomes wrong, we should use approach in function
3765      base_plus_disp_to_reg.  */
3766   if (in == value)
3767     {
3768       /* See if we can directly increment INCLOC.  */
3769       last = get_last_insn ();
3770       add_insn = emit_insn (plus_p
3771 			    ? gen_add2_insn (incloc, inc)
3772 			    : gen_sub2_insn (incloc, inc));
3773 
3774       code = recog_memoized (add_insn);
3775       if (code >= 0)
3776 	{
3777 	  if (! post && result != incloc)
3778 	    emit_insn (gen_move_insn (result, incloc));
3779 	  return result;
3780 	}
3781       delete_insns_since (last);
3782     }
3783 
3784   /* If couldn't do the increment directly, must increment in RESULT.
3785      The way we do this depends on whether this is pre- or
3786      post-increment.  For pre-increment, copy INCLOC to the reload
3787      register, increment it there, then save back.  */
3788   if (! post)
3789     {
3790       if (real_in != result)
3791 	emit_insn (gen_move_insn (result, real_in));
3792       if (plus_p)
3793 	emit_insn (gen_add2_insn (result, inc));
3794       else
3795 	emit_insn (gen_sub2_insn (result, inc));
3796       if (result != incloc)
3797 	emit_insn (gen_move_insn (incloc, result));
3798     }
3799   else
3800     {
3801       /* Post-increment.
3802 
3803 	 Because this might be a jump insn or a compare, and because
3804 	 RESULT may not be available after the insn in an input
3805 	 reload, we must do the incrementing before the insn being
3806 	 reloaded for.
3807 
3808 	 We have already copied IN to RESULT.  Increment the copy in
3809 	 RESULT, save that back, then decrement RESULT so it has
3810 	 the original value.  */
3811       if (plus_p)
3812 	emit_insn (gen_add2_insn (result, inc));
3813       else
3814 	emit_insn (gen_sub2_insn (result, inc));
3815       emit_insn (gen_move_insn (incloc, result));
3816       /* Restore non-modified value for the result.  We prefer this
3817 	 way because it does not require an additional hard
3818 	 register.  */
3819       if (plus_p)
3820 	{
3821 	  poly_int64 offset;
3822 	  if (poly_int_rtx_p (inc, &offset))
3823 	    emit_insn (gen_add2_insn (result,
3824 				      gen_int_mode (-offset,
3825 						    GET_MODE (result))));
3826 	  else
3827 	    emit_insn (gen_sub2_insn (result, inc));
3828 	}
3829       else
3830 	emit_insn (gen_add2_insn (result, inc));
3831     }
3832   return result;
3833 }
3834 
3835 /* Return true if the current move insn does not need processing as we
3836    already know that it satisfies its constraints.  */
3837 static bool
simple_move_p(void)3838 simple_move_p (void)
3839 {
3840   rtx dest, src;
3841   enum reg_class dclass, sclass;
3842 
3843   lra_assert (curr_insn_set != NULL_RTX);
3844   dest = SET_DEST (curr_insn_set);
3845   src = SET_SRC (curr_insn_set);
3846 
3847   /* If the instruction has multiple sets we need to process it even if it
3848      is single_set.  This can happen if one or more of the SETs are dead.
3849      See PR73650.  */
3850   if (multiple_sets (curr_insn))
3851     return false;
3852 
3853   return ((dclass = get_op_class (dest)) != NO_REGS
3854 	  && (sclass = get_op_class (src)) != NO_REGS
3855 	  /* The backend guarantees that register moves of cost 2
3856 	     never need reloads.  */
3857 	  && targetm.register_move_cost (GET_MODE (src), sclass, dclass) == 2);
3858  }
3859 
3860 /* Swap operands NOP and NOP + 1. */
3861 static inline void
swap_operands(int nop)3862 swap_operands (int nop)
3863 {
3864   std::swap (curr_operand_mode[nop], curr_operand_mode[nop + 1]);
3865   std::swap (original_subreg_reg_mode[nop], original_subreg_reg_mode[nop + 1]);
3866   std::swap (*curr_id->operand_loc[nop], *curr_id->operand_loc[nop + 1]);
3867   std::swap (equiv_substition_p[nop], equiv_substition_p[nop + 1]);
3868   /* Swap the duplicates too.  */
3869   lra_update_dup (curr_id, nop);
3870   lra_update_dup (curr_id, nop + 1);
3871 }
3872 
3873 /* Main entry point of the constraint code: search the body of the
3874    current insn to choose the best alternative.  It is mimicking insn
3875    alternative cost calculation model of former reload pass.  That is
3876    because machine descriptions were written to use this model.  This
3877    model can be changed in future.  Make commutative operand exchange
3878    if it is chosen.
3879 
3880    if CHECK_ONLY_P is false, do RTL changes to satisfy the
3881    constraints.  Return true if any change happened during function
3882    call.
3883 
3884    If CHECK_ONLY_P is true then don't do any transformation.  Just
3885    check that the insn satisfies all constraints.  If the insn does
3886    not satisfy any constraint, return true.  */
3887 static bool
curr_insn_transform(bool check_only_p)3888 curr_insn_transform (bool check_only_p)
3889 {
3890   int i, j, k;
3891   int n_operands;
3892   int n_alternatives;
3893   int n_outputs;
3894   int commutative;
3895   signed char goal_alt_matched[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
3896   signed char match_inputs[MAX_RECOG_OPERANDS + 1];
3897   signed char outputs[MAX_RECOG_OPERANDS + 1];
3898   rtx_insn *before, *after;
3899   bool alt_p = false;
3900   /* Flag that the insn has been changed through a transformation.  */
3901   bool change_p;
3902   bool sec_mem_p;
3903   bool use_sec_mem_p;
3904   int max_regno_before;
3905   int reused_alternative_num;
3906 
3907   curr_insn_set = single_set (curr_insn);
3908   if (curr_insn_set != NULL_RTX && simple_move_p ())
3909     {
3910       /* We assume that the corresponding insn alternative has no
3911 	 earlier clobbers.  If it is not the case, don't define move
3912 	 cost equal to 2 for the corresponding register classes.  */
3913       lra_set_used_insn_alternative (curr_insn, LRA_NON_CLOBBERED_ALT);
3914       return false;
3915     }
3916 
3917   no_input_reloads_p = no_output_reloads_p = false;
3918   goal_alt_number = -1;
3919   change_p = sec_mem_p = false;
3920   /* JUMP_INSNs and CALL_INSNs are not allowed to have any output
3921      reloads; neither are insns that SET cc0.  Insns that use CC0 are
3922      not allowed to have any input reloads.  */
3923   if (JUMP_P (curr_insn) || CALL_P (curr_insn))
3924     no_output_reloads_p = true;
3925 
3926   if (HAVE_cc0 && reg_referenced_p (cc0_rtx, PATTERN (curr_insn)))
3927     no_input_reloads_p = true;
3928   if (HAVE_cc0 && reg_set_p (cc0_rtx, PATTERN (curr_insn)))
3929     no_output_reloads_p = true;
3930 
3931   n_operands = curr_static_id->n_operands;
3932   n_alternatives = curr_static_id->n_alternatives;
3933 
3934   /* Just return "no reloads" if insn has no operands with
3935      constraints.  */
3936   if (n_operands == 0 || n_alternatives == 0)
3937     return false;
3938 
3939   max_regno_before = max_reg_num ();
3940 
3941   for (i = 0; i < n_operands; i++)
3942     {
3943       goal_alt_matched[i][0] = -1;
3944       goal_alt_matches[i] = -1;
3945     }
3946 
3947   commutative = curr_static_id->commutative;
3948 
3949   /* Now see what we need for pseudos that didn't get hard regs or got
3950      the wrong kind of hard reg.  For this, we must consider all the
3951      operands together against the register constraints.  */
3952 
3953   best_losers = best_overall = INT_MAX;
3954   best_reload_sum = 0;
3955 
3956   curr_swapped = false;
3957   goal_alt_swapped = false;
3958 
3959   if (! check_only_p)
3960     /* Make equivalence substitution and memory subreg elimination
3961        before address processing because an address legitimacy can
3962        depend on memory mode.  */
3963     for (i = 0; i < n_operands; i++)
3964       {
3965 	rtx op, subst, old;
3966 	bool op_change_p = false;
3967 
3968 	if (curr_static_id->operand[i].is_operator)
3969 	  continue;
3970 
3971 	old = op = *curr_id->operand_loc[i];
3972 	if (GET_CODE (old) == SUBREG)
3973 	  old = SUBREG_REG (old);
3974 	subst = get_equiv_with_elimination (old, curr_insn);
3975 	original_subreg_reg_mode[i] = VOIDmode;
3976 	equiv_substition_p[i] = false;
3977 	if (subst != old)
3978 	  {
3979 	    equiv_substition_p[i] = true;
3980 	    subst = copy_rtx (subst);
3981 	    lra_assert (REG_P (old));
3982 	    if (GET_CODE (op) != SUBREG)
3983 	      *curr_id->operand_loc[i] = subst;
3984 	    else
3985 	      {
3986 		SUBREG_REG (op) = subst;
3987 		if (GET_MODE (subst) == VOIDmode)
3988 		  original_subreg_reg_mode[i] = GET_MODE (old);
3989 	      }
3990 	    if (lra_dump_file != NULL)
3991 	      {
3992 		fprintf (lra_dump_file,
3993 			 "Changing pseudo %d in operand %i of insn %u on equiv ",
3994 			 REGNO (old), i, INSN_UID (curr_insn));
3995 		dump_value_slim (lra_dump_file, subst, 1);
3996 		fprintf (lra_dump_file, "\n");
3997 	      }
3998 	    op_change_p = change_p = true;
3999 	  }
4000 	if (simplify_operand_subreg (i, GET_MODE (old)) || op_change_p)
4001 	  {
4002 	    change_p = true;
4003 	    lra_update_dup (curr_id, i);
4004 	  }
4005       }
4006 
4007   /* Reload address registers and displacements.  We do it before
4008      finding an alternative because of memory constraints.  */
4009   before = after = NULL;
4010   for (i = 0; i < n_operands; i++)
4011     if (! curr_static_id->operand[i].is_operator
4012 	&& process_address (i, check_only_p, &before, &after))
4013       {
4014 	if (check_only_p)
4015 	  return true;
4016 	change_p = true;
4017 	lra_update_dup (curr_id, i);
4018       }
4019 
4020   if (change_p)
4021     /* If we've changed the instruction then any alternative that
4022        we chose previously may no longer be valid.  */
4023     lra_set_used_insn_alternative (curr_insn, LRA_UNKNOWN_ALT);
4024 
4025   if (! check_only_p && curr_insn_set != NULL_RTX
4026       && check_and_process_move (&change_p, &sec_mem_p))
4027     return change_p;
4028 
4029  try_swapped:
4030 
4031   reused_alternative_num = check_only_p ? LRA_UNKNOWN_ALT : curr_id->used_insn_alternative;
4032   if (lra_dump_file != NULL && reused_alternative_num >= 0)
4033     fprintf (lra_dump_file, "Reusing alternative %d for insn #%u\n",
4034 	     reused_alternative_num, INSN_UID (curr_insn));
4035 
4036   if (process_alt_operands (reused_alternative_num))
4037     alt_p = true;
4038 
4039   if (check_only_p)
4040     return ! alt_p || best_losers != 0;
4041 
4042   /* If insn is commutative (it's safe to exchange a certain pair of
4043      operands) then we need to try each alternative twice, the second
4044      time matching those two operands as if we had exchanged them.  To
4045      do this, really exchange them in operands.
4046 
4047      If we have just tried the alternatives the second time, return
4048      operands to normal and drop through.  */
4049 
4050   if (reused_alternative_num < 0 && commutative >= 0)
4051     {
4052       curr_swapped = !curr_swapped;
4053       if (curr_swapped)
4054 	{
4055 	  swap_operands (commutative);
4056 	  goto try_swapped;
4057 	}
4058       else
4059 	swap_operands (commutative);
4060     }
4061 
4062   if (! alt_p && ! sec_mem_p)
4063     {
4064       /* No alternative works with reloads??  */
4065       if (INSN_CODE (curr_insn) >= 0)
4066 	fatal_insn ("unable to generate reloads for:", curr_insn);
4067       error_for_asm (curr_insn,
4068 		     "inconsistent operand constraints in an %<asm%>");
4069       lra_asm_error_p = true;
4070       /* Avoid further trouble with this insn.  Don't generate use
4071 	 pattern here as we could use the insn SP offset.  */
4072       lra_set_insn_deleted (curr_insn);
4073       return true;
4074     }
4075 
4076   /* If the best alternative is with operands 1 and 2 swapped, swap
4077      them.  Update the operand numbers of any reloads already
4078      pushed.  */
4079 
4080   if (goal_alt_swapped)
4081     {
4082       if (lra_dump_file != NULL)
4083 	fprintf (lra_dump_file, "  Commutative operand exchange in insn %u\n",
4084 		 INSN_UID (curr_insn));
4085 
4086       /* Swap the duplicates too.  */
4087       swap_operands (commutative);
4088       change_p = true;
4089     }
4090 
4091   /* Some targets' TARGET_SECONDARY_MEMORY_NEEDED (e.g. x86) are defined
4092      too conservatively.  So we use the secondary memory only if there
4093      is no any alternative without reloads.  */
4094   use_sec_mem_p = false;
4095   if (! alt_p)
4096     use_sec_mem_p = true;
4097   else if (sec_mem_p)
4098     {
4099       for (i = 0; i < n_operands; i++)
4100 	if (! goal_alt_win[i] && ! goal_alt_match_win[i])
4101 	  break;
4102       use_sec_mem_p = i < n_operands;
4103     }
4104 
4105   if (use_sec_mem_p)
4106     {
4107       int in = -1, out = -1;
4108       rtx new_reg, src, dest, rld;
4109       machine_mode sec_mode, rld_mode;
4110 
4111       lra_assert (curr_insn_set != NULL_RTX && sec_mem_p);
4112       dest = SET_DEST (curr_insn_set);
4113       src = SET_SRC (curr_insn_set);
4114       for (i = 0; i < n_operands; i++)
4115 	if (*curr_id->operand_loc[i] == dest)
4116 	  out = i;
4117 	else if (*curr_id->operand_loc[i] == src)
4118 	  in = i;
4119       for (i = 0; i < curr_static_id->n_dups; i++)
4120 	if (out < 0 && *curr_id->dup_loc[i] == dest)
4121 	  out = curr_static_id->dup_num[i];
4122 	else if (in < 0 && *curr_id->dup_loc[i] == src)
4123 	  in = curr_static_id->dup_num[i];
4124       lra_assert (out >= 0 && in >= 0
4125 		  && curr_static_id->operand[out].type == OP_OUT
4126 		  && curr_static_id->operand[in].type == OP_IN);
4127       rld = partial_subreg_p (GET_MODE (src), GET_MODE (dest)) ? src : dest;
4128       rld_mode = GET_MODE (rld);
4129       sec_mode = targetm.secondary_memory_needed_mode (rld_mode);
4130       new_reg = lra_create_new_reg (sec_mode, NULL_RTX,
4131 				    NO_REGS, "secondary");
4132       /* If the mode is changed, it should be wider.  */
4133       lra_assert (!partial_subreg_p (sec_mode, rld_mode));
4134       if (sec_mode != rld_mode)
4135         {
4136 	  /* If the target says specifically to use another mode for
4137 	     secondary memory moves we cannot reuse the original
4138 	     insn.  */
4139 	  after = emit_spill_move (false, new_reg, dest);
4140 	  lra_process_new_insns (curr_insn, NULL, after,
4141 				 "Inserting the sec. move");
4142 	  /* We may have non null BEFORE here (e.g. after address
4143 	     processing.  */
4144 	  push_to_sequence (before);
4145 	  before = emit_spill_move (true, new_reg, src);
4146 	  emit_insn (before);
4147 	  before = get_insns ();
4148 	  end_sequence ();
4149 	  lra_process_new_insns (curr_insn, before, NULL, "Changing on");
4150 	  lra_set_insn_deleted (curr_insn);
4151 	}
4152       else if (dest == rld)
4153         {
4154 	  *curr_id->operand_loc[out] = new_reg;
4155 	  lra_update_dup (curr_id, out);
4156 	  after = emit_spill_move (false, new_reg, dest);
4157 	  lra_process_new_insns (curr_insn, NULL, after,
4158 				 "Inserting the sec. move");
4159 	}
4160       else
4161 	{
4162 	  *curr_id->operand_loc[in] = new_reg;
4163 	  lra_update_dup (curr_id, in);
4164 	  /* See comments above.  */
4165 	  push_to_sequence (before);
4166 	  before = emit_spill_move (true, new_reg, src);
4167 	  emit_insn (before);
4168 	  before = get_insns ();
4169 	  end_sequence ();
4170 	  lra_process_new_insns (curr_insn, before, NULL,
4171 				 "Inserting the sec. move");
4172 	}
4173       lra_update_insn_regno_info (curr_insn);
4174       return true;
4175     }
4176 
4177   lra_assert (goal_alt_number >= 0);
4178   lra_set_used_insn_alternative (curr_insn, goal_alt_number);
4179 
4180   if (lra_dump_file != NULL)
4181     {
4182       const char *p;
4183 
4184       fprintf (lra_dump_file, "	 Choosing alt %d in insn %u:",
4185 	       goal_alt_number, INSN_UID (curr_insn));
4186       for (i = 0; i < n_operands; i++)
4187 	{
4188 	  p = (curr_static_id->operand_alternative
4189 	       [goal_alt_number * n_operands + i].constraint);
4190 	  if (*p == '\0')
4191 	    continue;
4192 	  fprintf (lra_dump_file, "  (%d) ", i);
4193 	  for (; *p != '\0' && *p != ',' && *p != '#'; p++)
4194 	    fputc (*p, lra_dump_file);
4195 	}
4196       if (INSN_CODE (curr_insn) >= 0
4197           && (p = get_insn_name (INSN_CODE (curr_insn))) != NULL)
4198         fprintf (lra_dump_file, " {%s}", p);
4199       if (maybe_ne (curr_id->sp_offset, 0))
4200 	{
4201 	  fprintf (lra_dump_file, " (sp_off=");
4202 	  print_dec (curr_id->sp_offset, lra_dump_file);
4203 	  fprintf (lra_dump_file, ")");
4204 	}
4205       fprintf (lra_dump_file, "\n");
4206     }
4207 
4208   /* Right now, for any pair of operands I and J that are required to
4209      match, with J < I, goal_alt_matches[I] is J.  Add I to
4210      goal_alt_matched[J].  */
4211 
4212   for (i = 0; i < n_operands; i++)
4213     if ((j = goal_alt_matches[i]) >= 0)
4214       {
4215 	for (k = 0; goal_alt_matched[j][k] >= 0; k++)
4216 	  ;
4217 	/* We allow matching one output operand and several input
4218 	   operands.  */
4219 	lra_assert (k == 0
4220 		    || (curr_static_id->operand[j].type == OP_OUT
4221 			&& curr_static_id->operand[i].type == OP_IN
4222 			&& (curr_static_id->operand
4223 			    [goal_alt_matched[j][0]].type == OP_IN)));
4224 	goal_alt_matched[j][k] = i;
4225 	goal_alt_matched[j][k + 1] = -1;
4226       }
4227 
4228   for (i = 0; i < n_operands; i++)
4229     goal_alt_win[i] |= goal_alt_match_win[i];
4230 
4231   /* Any constants that aren't allowed and can't be reloaded into
4232      registers are here changed into memory references.	 */
4233   for (i = 0; i < n_operands; i++)
4234     if (goal_alt_win[i])
4235       {
4236 	int regno;
4237 	enum reg_class new_class;
4238 	rtx reg = *curr_id->operand_loc[i];
4239 
4240 	if (GET_CODE (reg) == SUBREG)
4241 	  reg = SUBREG_REG (reg);
4242 
4243 	if (REG_P (reg) && (regno = REGNO (reg)) >= FIRST_PSEUDO_REGISTER)
4244 	  {
4245 	    bool ok_p = in_class_p (reg, goal_alt[i], &new_class);
4246 
4247 	    if (new_class != NO_REGS && get_reg_class (regno) != new_class)
4248 	      {
4249 		lra_assert (ok_p);
4250 		lra_change_class (regno, new_class, "      Change to", true);
4251 	      }
4252 	  }
4253       }
4254     else
4255       {
4256 	const char *constraint;
4257 	char c;
4258 	rtx op = *curr_id->operand_loc[i];
4259 	rtx subreg = NULL_RTX;
4260 	machine_mode mode = curr_operand_mode[i];
4261 
4262 	if (GET_CODE (op) == SUBREG)
4263 	  {
4264 	    subreg = op;
4265 	    op = SUBREG_REG (op);
4266 	    mode = GET_MODE (op);
4267 	  }
4268 
4269 	if (CONST_POOL_OK_P (mode, op)
4270 	    && ((targetm.preferred_reload_class
4271 		 (op, (enum reg_class) goal_alt[i]) == NO_REGS)
4272 		|| no_input_reloads_p))
4273 	  {
4274 	    rtx tem = force_const_mem (mode, op);
4275 
4276 	    change_p = true;
4277 	    if (subreg != NULL_RTX)
4278 	      tem = gen_rtx_SUBREG (mode, tem, SUBREG_BYTE (subreg));
4279 
4280 	    *curr_id->operand_loc[i] = tem;
4281 	    lra_update_dup (curr_id, i);
4282 	    process_address (i, false, &before, &after);
4283 
4284 	    /* If the alternative accepts constant pool refs directly
4285 	       there will be no reload needed at all.  */
4286 	    if (subreg != NULL_RTX)
4287 	      continue;
4288 	    /* Skip alternatives before the one requested.  */
4289 	    constraint = (curr_static_id->operand_alternative
4290 			  [goal_alt_number * n_operands + i].constraint);
4291 	    for (;
4292 		 (c = *constraint) && c != ',' && c != '#';
4293 		 constraint += CONSTRAINT_LEN (c, constraint))
4294 	      {
4295 		enum constraint_num cn = lookup_constraint (constraint);
4296 		if ((insn_extra_memory_constraint (cn)
4297 		     || insn_extra_special_memory_constraint (cn))
4298 		    && satisfies_memory_constraint_p (tem, cn))
4299 		  break;
4300 	      }
4301 	    if (c == '\0' || c == ',' || c == '#')
4302 	      continue;
4303 
4304 	    goal_alt_win[i] = true;
4305 	  }
4306       }
4307 
4308   n_outputs = 0;
4309   for (i = 0; i < n_operands; i++)
4310     if (curr_static_id->operand[i].type == OP_OUT)
4311       outputs[n_outputs++] = i;
4312   outputs[n_outputs] = -1;
4313   for (i = 0; i < n_operands; i++)
4314     {
4315       int regno;
4316       bool optional_p = false;
4317       rtx old, new_reg;
4318       rtx op = *curr_id->operand_loc[i];
4319 
4320       if (goal_alt_win[i])
4321 	{
4322 	  if (goal_alt[i] == NO_REGS
4323 	      && REG_P (op)
4324 	      /* When we assign NO_REGS it means that we will not
4325 		 assign a hard register to the scratch pseudo by
4326 		 assigment pass and the scratch pseudo will be
4327 		 spilled.  Spilled scratch pseudos are transformed
4328 		 back to scratches at the LRA end.  */
4329 	      && lra_former_scratch_operand_p (curr_insn, i)
4330 	      && lra_former_scratch_p (REGNO (op)))
4331 	    {
4332 	      int regno = REGNO (op);
4333 	      lra_change_class (regno, NO_REGS, "      Change to", true);
4334 	      if (lra_get_regno_hard_regno (regno) >= 0)
4335 		/* We don't have to mark all insn affected by the
4336 		   spilled pseudo as there is only one such insn, the
4337 		   current one.  */
4338 		reg_renumber[regno] = -1;
4339 	      lra_assert (bitmap_single_bit_set_p
4340 			  (&lra_reg_info[REGNO (op)].insn_bitmap));
4341 	    }
4342 	  /* We can do an optional reload.  If the pseudo got a hard
4343 	     reg, we might improve the code through inheritance.  If
4344 	     it does not get a hard register we coalesce memory/memory
4345 	     moves later.  Ignore move insns to avoid cycling.  */
4346 	  if (! lra_simple_p
4347 	      && lra_undo_inheritance_iter < LRA_MAX_INHERITANCE_PASSES
4348 	      && goal_alt[i] != NO_REGS && REG_P (op)
4349 	      && (regno = REGNO (op)) >= FIRST_PSEUDO_REGISTER
4350 	      && regno < new_regno_start
4351 	      && ! lra_former_scratch_p (regno)
4352 	      && reg_renumber[regno] < 0
4353 	      /* Check that the optional reload pseudo will be able to
4354 		 hold given mode value.  */
4355 	      && ! (prohibited_class_reg_set_mode_p
4356 		    (goal_alt[i], reg_class_contents[goal_alt[i]],
4357 		     PSEUDO_REGNO_MODE (regno)))
4358 	      && (curr_insn_set == NULL_RTX
4359 		  || !((REG_P (SET_SRC (curr_insn_set))
4360 			|| MEM_P (SET_SRC (curr_insn_set))
4361 			|| GET_CODE (SET_SRC (curr_insn_set)) == SUBREG)
4362 		       && (REG_P (SET_DEST (curr_insn_set))
4363 			   || MEM_P (SET_DEST (curr_insn_set))
4364 			   || GET_CODE (SET_DEST (curr_insn_set)) == SUBREG))))
4365 	    optional_p = true;
4366 	  else if (goal_alt_matched[i][0] != -1
4367 		   && curr_static_id->operand[i].type == OP_OUT
4368 		   && (curr_static_id->operand_alternative
4369 		       [goal_alt_number * n_operands + i].earlyclobber)
4370 		   && REG_P (op))
4371 	    {
4372 	      for (j = 0; goal_alt_matched[i][j] != -1; j++)
4373 		{
4374 		  rtx op2 = *curr_id->operand_loc[goal_alt_matched[i][j]];
4375 
4376 		  if (REG_P (op2) && REGNO (op) != REGNO (op2))
4377 		    break;
4378 		}
4379 	      if (goal_alt_matched[i][j] != -1)
4380 		{
4381 		  /* Generate reloads for different output and matched
4382 		     input registers.  This is the easiest way to avoid
4383 		     creation of non-existing register conflicts in
4384 		     lra-lives.c.  */
4385 		  match_reload (i, goal_alt_matched[i], outputs, goal_alt[i], &before,
4386 				&after, TRUE);
4387 		}
4388 	      continue;
4389 	    }
4390 	  else
4391 	    continue;
4392 	}
4393 
4394       /* Operands that match previous ones have already been handled.  */
4395       if (goal_alt_matches[i] >= 0)
4396 	continue;
4397 
4398       /* We should not have an operand with a non-offsettable address
4399 	 appearing where an offsettable address will do.  It also may
4400 	 be a case when the address should be special in other words
4401 	 not a general one (e.g. it needs no index reg).  */
4402       if (goal_alt_matched[i][0] == -1 && goal_alt_offmemok[i] && MEM_P (op))
4403 	{
4404 	  enum reg_class rclass;
4405 	  rtx *loc = &XEXP (op, 0);
4406 	  enum rtx_code code = GET_CODE (*loc);
4407 
4408 	  push_to_sequence (before);
4409 	  rclass = base_reg_class (GET_MODE (op), MEM_ADDR_SPACE (op),
4410 				   MEM, SCRATCH);
4411 	  if (GET_RTX_CLASS (code) == RTX_AUTOINC)
4412 	    new_reg = emit_inc (rclass, *loc, *loc,
4413 				/* This value does not matter for MODIFY.  */
4414 				GET_MODE_SIZE (GET_MODE (op)));
4415 	  else if (get_reload_reg (OP_IN, Pmode, *loc, rclass, FALSE,
4416 				   "offsetable address", &new_reg))
4417 	    {
4418 	      rtx addr = *loc;
4419 	      enum rtx_code code = GET_CODE (addr);
4420 
4421 	      if (code == AND && CONST_INT_P (XEXP (addr, 1)))
4422 		/* (and ... (const_int -X)) is used to align to X bytes.  */
4423 		addr = XEXP (*loc, 0);
4424 	      lra_emit_move (new_reg, addr);
4425 	      if (addr != *loc)
4426 		emit_move_insn (new_reg, gen_rtx_AND (GET_MODE (new_reg), new_reg, XEXP (*loc, 1)));
4427 	    }
4428 	  before = get_insns ();
4429 	  end_sequence ();
4430 	  *loc = new_reg;
4431 	  lra_update_dup (curr_id, i);
4432 	}
4433       else if (goal_alt_matched[i][0] == -1)
4434 	{
4435 	  machine_mode mode;
4436 	  rtx reg, *loc;
4437 	  int hard_regno;
4438 	  enum op_type type = curr_static_id->operand[i].type;
4439 
4440 	  loc = curr_id->operand_loc[i];
4441 	  mode = curr_operand_mode[i];
4442 	  if (GET_CODE (*loc) == SUBREG)
4443 	    {
4444 	      reg = SUBREG_REG (*loc);
4445 	      poly_int64 byte = SUBREG_BYTE (*loc);
4446 	      if (REG_P (reg)
4447 		  /* Strict_low_part requires reloading the register and not
4448 		     just the subreg.  Likewise for a strict subreg no wider
4449 		     than a word for WORD_REGISTER_OPERATIONS targets.  */
4450 		  && (curr_static_id->operand[i].strict_low
4451 		      || (!paradoxical_subreg_p (mode, GET_MODE (reg))
4452 			  && (hard_regno
4453 			      = get_try_hard_regno (REGNO (reg))) >= 0
4454 			  && (simplify_subreg_regno
4455 			      (hard_regno,
4456 			       GET_MODE (reg), byte, mode) < 0)
4457 			  && (goal_alt[i] == NO_REGS
4458 			      || (simplify_subreg_regno
4459 				  (ira_class_hard_regs[goal_alt[i]][0],
4460 				   GET_MODE (reg), byte, mode) >= 0)))
4461 		      || (partial_subreg_p (mode, GET_MODE (reg))
4462 			  && known_le (GET_MODE_SIZE (GET_MODE (reg)),
4463 				       UNITS_PER_WORD)
4464 			  && WORD_REGISTER_OPERATIONS)))
4465 		{
4466 		  /* An OP_INOUT is required when reloading a subreg of a
4467 		     mode wider than a word to ensure that data beyond the
4468 		     word being reloaded is preserved.  Also automatically
4469 		     ensure that strict_low_part reloads are made into
4470 		     OP_INOUT which should already be true from the backend
4471 		     constraints.  */
4472 		  if (type == OP_OUT
4473 		      && (curr_static_id->operand[i].strict_low
4474 			  || read_modify_subreg_p (*loc)))
4475 		    type = OP_INOUT;
4476 		  loc = &SUBREG_REG (*loc);
4477 		  mode = GET_MODE (*loc);
4478 		}
4479 	    }
4480 	  old = *loc;
4481 	  if (get_reload_reg (type, mode, old, goal_alt[i],
4482 			      loc != curr_id->operand_loc[i], "", &new_reg)
4483 	      && type != OP_OUT)
4484 	    {
4485 	      push_to_sequence (before);
4486 	      lra_emit_move (new_reg, old);
4487 	      before = get_insns ();
4488 	      end_sequence ();
4489 	    }
4490 	  *loc = new_reg;
4491 	  if (type != OP_IN
4492 	      && find_reg_note (curr_insn, REG_UNUSED, old) == NULL_RTX)
4493 	    {
4494 	      start_sequence ();
4495 	      lra_emit_move (type == OP_INOUT ? copy_rtx (old) : old, new_reg);
4496 	      emit_insn (after);
4497 	      after = get_insns ();
4498 	      end_sequence ();
4499 	      *loc = new_reg;
4500 	    }
4501 	  for (j = 0; j < goal_alt_dont_inherit_ops_num; j++)
4502 	    if (goal_alt_dont_inherit_ops[j] == i)
4503 	      {
4504 		lra_set_regno_unique_value (REGNO (new_reg));
4505 		break;
4506 	      }
4507 	  lra_update_dup (curr_id, i);
4508 	}
4509       else if (curr_static_id->operand[i].type == OP_IN
4510 	       && (curr_static_id->operand[goal_alt_matched[i][0]].type
4511 		   == OP_OUT
4512 		   || (curr_static_id->operand[goal_alt_matched[i][0]].type
4513 		       == OP_INOUT
4514 		       && (operands_match_p
4515 			   (*curr_id->operand_loc[i],
4516 			    *curr_id->operand_loc[goal_alt_matched[i][0]],
4517 			    -1)))))
4518 	{
4519 	  /* generate reloads for input and matched outputs.  */
4520 	  match_inputs[0] = i;
4521 	  match_inputs[1] = -1;
4522 	  match_reload (goal_alt_matched[i][0], match_inputs, outputs,
4523 			goal_alt[i], &before, &after,
4524 			curr_static_id->operand_alternative
4525 			[goal_alt_number * n_operands + goal_alt_matched[i][0]]
4526 			.earlyclobber);
4527 	}
4528       else if ((curr_static_id->operand[i].type == OP_OUT
4529 		|| (curr_static_id->operand[i].type == OP_INOUT
4530 		    && (operands_match_p
4531 			(*curr_id->operand_loc[i],
4532 			 *curr_id->operand_loc[goal_alt_matched[i][0]],
4533 			 -1))))
4534 	       && (curr_static_id->operand[goal_alt_matched[i][0]].type
4535 		    == OP_IN))
4536 	/* Generate reloads for output and matched inputs.  */
4537 	match_reload (i, goal_alt_matched[i], outputs, goal_alt[i], &before,
4538 		      &after, curr_static_id->operand_alternative
4539 			      [goal_alt_number * n_operands + i].earlyclobber);
4540       else if (curr_static_id->operand[i].type == OP_IN
4541 	       && (curr_static_id->operand[goal_alt_matched[i][0]].type
4542 		   == OP_IN))
4543 	{
4544 	  /* Generate reloads for matched inputs.  */
4545 	  match_inputs[0] = i;
4546 	  for (j = 0; (k = goal_alt_matched[i][j]) >= 0; j++)
4547 	    match_inputs[j + 1] = k;
4548 	  match_inputs[j + 1] = -1;
4549 	  match_reload (-1, match_inputs, outputs, goal_alt[i], &before,
4550 			&after, false);
4551 	}
4552       else
4553 	/* We must generate code in any case when function
4554 	   process_alt_operands decides that it is possible.  */
4555 	gcc_unreachable ();
4556 
4557       if (optional_p)
4558 	{
4559 	  rtx reg = op;
4560 
4561 	  lra_assert (REG_P (reg));
4562 	  regno = REGNO (reg);
4563 	  op = *curr_id->operand_loc[i]; /* Substitution.  */
4564 	  if (GET_CODE (op) == SUBREG)
4565 	    op = SUBREG_REG (op);
4566 	  gcc_assert (REG_P (op) && (int) REGNO (op) >= new_regno_start);
4567 	  bitmap_set_bit (&lra_optional_reload_pseudos, REGNO (op));
4568 	  lra_reg_info[REGNO (op)].restore_rtx = reg;
4569 	  if (lra_dump_file != NULL)
4570 	    fprintf (lra_dump_file,
4571 		     "      Making reload reg %d for reg %d optional\n",
4572 		     REGNO (op), regno);
4573 	}
4574     }
4575   if (before != NULL_RTX || after != NULL_RTX
4576       || max_regno_before != max_reg_num ())
4577     change_p = true;
4578   if (change_p)
4579     {
4580       lra_update_operator_dups (curr_id);
4581       /* Something changes -- process the insn.	 */
4582       lra_update_insn_regno_info (curr_insn);
4583     }
4584   lra_process_new_insns (curr_insn, before, after, "Inserting insn reload");
4585   return change_p;
4586 }
4587 
4588 /* Return true if INSN satisfies all constraints.  In other words, no
4589    reload insns are needed.  */
4590 bool
lra_constrain_insn(rtx_insn * insn)4591 lra_constrain_insn (rtx_insn *insn)
4592 {
4593   int saved_new_regno_start = new_regno_start;
4594   int saved_new_insn_uid_start = new_insn_uid_start;
4595   bool change_p;
4596 
4597   curr_insn = insn;
4598   curr_id = lra_get_insn_recog_data (curr_insn);
4599   curr_static_id = curr_id->insn_static_data;
4600   new_insn_uid_start = get_max_uid ();
4601   new_regno_start = max_reg_num ();
4602   change_p = curr_insn_transform (true);
4603   new_regno_start = saved_new_regno_start;
4604   new_insn_uid_start = saved_new_insn_uid_start;
4605   return ! change_p;
4606 }
4607 
4608 /* Return true if X is in LIST.	 */
4609 static bool
in_list_p(rtx x,rtx list)4610 in_list_p (rtx x, rtx list)
4611 {
4612   for (; list != NULL_RTX; list = XEXP (list, 1))
4613     if (XEXP (list, 0) == x)
4614       return true;
4615   return false;
4616 }
4617 
4618 /* Return true if X contains an allocatable hard register (if
4619    HARD_REG_P) or a (spilled if SPILLED_P) pseudo.  */
4620 static bool
contains_reg_p(rtx x,bool hard_reg_p,bool spilled_p)4621 contains_reg_p (rtx x, bool hard_reg_p, bool spilled_p)
4622 {
4623   int i, j;
4624   const char *fmt;
4625   enum rtx_code code;
4626 
4627   code = GET_CODE (x);
4628   if (REG_P (x))
4629     {
4630       int regno = REGNO (x);
4631       HARD_REG_SET alloc_regs;
4632 
4633       if (hard_reg_p)
4634 	{
4635 	  if (regno >= FIRST_PSEUDO_REGISTER)
4636 	    regno = lra_get_regno_hard_regno (regno);
4637 	  if (regno < 0)
4638 	    return false;
4639 	  alloc_regs = ~lra_no_alloc_regs;
4640 	  return overlaps_hard_reg_set_p (alloc_regs, GET_MODE (x), regno);
4641 	}
4642       else
4643 	{
4644 	  if (regno < FIRST_PSEUDO_REGISTER)
4645 	    return false;
4646 	  if (! spilled_p)
4647 	    return true;
4648 	  return lra_get_regno_hard_regno (regno) < 0;
4649 	}
4650     }
4651   fmt = GET_RTX_FORMAT (code);
4652   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4653     {
4654       if (fmt[i] == 'e')
4655 	{
4656 	  if (contains_reg_p (XEXP (x, i), hard_reg_p, spilled_p))
4657 	    return true;
4658 	}
4659       else if (fmt[i] == 'E')
4660 	{
4661 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4662 	    if (contains_reg_p (XVECEXP (x, i, j), hard_reg_p, spilled_p))
4663 	      return true;
4664 	}
4665     }
4666   return false;
4667 }
4668 
4669 /* Process all regs in location *LOC and change them on equivalent
4670    substitution.  Return true if any change was done.  */
4671 static bool
loc_equivalence_change_p(rtx * loc)4672 loc_equivalence_change_p (rtx *loc)
4673 {
4674   rtx subst, reg, x = *loc;
4675   bool result = false;
4676   enum rtx_code code = GET_CODE (x);
4677   const char *fmt;
4678   int i, j;
4679 
4680   if (code == SUBREG)
4681     {
4682       reg = SUBREG_REG (x);
4683       if ((subst = get_equiv_with_elimination (reg, curr_insn)) != reg
4684 	  && GET_MODE (subst) == VOIDmode)
4685 	{
4686 	  /* We cannot reload debug location.  Simplify subreg here
4687 	     while we know the inner mode.  */
4688 	  *loc = simplify_gen_subreg (GET_MODE (x), subst,
4689 				      GET_MODE (reg), SUBREG_BYTE (x));
4690 	  return true;
4691 	}
4692     }
4693   if (code == REG && (subst = get_equiv_with_elimination (x, curr_insn)) != x)
4694     {
4695       *loc = subst;
4696       return true;
4697     }
4698 
4699   /* Scan all the operand sub-expressions.  */
4700   fmt = GET_RTX_FORMAT (code);
4701   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4702     {
4703       if (fmt[i] == 'e')
4704 	result = loc_equivalence_change_p (&XEXP (x, i)) || result;
4705       else if (fmt[i] == 'E')
4706 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4707 	  result
4708 	    = loc_equivalence_change_p (&XVECEXP (x, i, j)) || result;
4709     }
4710   return result;
4711 }
4712 
4713 /* Similar to loc_equivalence_change_p, but for use as
4714    simplify_replace_fn_rtx callback.  DATA is insn for which the
4715    elimination is done.  If it null we don't do the elimination.  */
4716 static rtx
loc_equivalence_callback(rtx loc,const_rtx,void * data)4717 loc_equivalence_callback (rtx loc, const_rtx, void *data)
4718 {
4719   if (!REG_P (loc))
4720     return NULL_RTX;
4721 
4722   rtx subst = (data == NULL
4723 	       ? get_equiv (loc) : get_equiv_with_elimination (loc, (rtx_insn *) data));
4724   if (subst != loc)
4725     return subst;
4726 
4727   return NULL_RTX;
4728 }
4729 
4730 /* Maximum number of generated reload insns per an insn.  It is for
4731    preventing this pass cycling in a bug case.	*/
4732 #define MAX_RELOAD_INSNS_NUMBER LRA_MAX_INSN_RELOADS
4733 
4734 /* The current iteration number of this LRA pass.  */
4735 int lra_constraint_iter;
4736 
4737 /* True if we should during assignment sub-pass check assignment
4738    correctness for all pseudos and spill some of them to correct
4739    conflicts.  It can be necessary when we substitute equiv which
4740    needs checking register allocation correctness because the
4741    equivalent value contains allocatable hard registers, or when we
4742    restore multi-register pseudo, or when we change the insn code and
4743    its operand became INOUT operand when it was IN one before.  */
4744 bool check_and_force_assignment_correctness_p;
4745 
4746 /* Return true if REGNO is referenced in more than one block.  */
4747 static bool
multi_block_pseudo_p(int regno)4748 multi_block_pseudo_p (int regno)
4749 {
4750   basic_block bb = NULL;
4751   unsigned int uid;
4752   bitmap_iterator bi;
4753 
4754   if (regno < FIRST_PSEUDO_REGISTER)
4755     return false;
4756 
4757     EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
4758       if (bb == NULL)
4759 	bb = BLOCK_FOR_INSN (lra_insn_recog_data[uid]->insn);
4760       else if (BLOCK_FOR_INSN (lra_insn_recog_data[uid]->insn) != bb)
4761 	return true;
4762     return false;
4763 }
4764 
4765 /* Return true if LIST contains a deleted insn.  */
4766 static bool
contains_deleted_insn_p(rtx_insn_list * list)4767 contains_deleted_insn_p (rtx_insn_list *list)
4768 {
4769   for (; list != NULL_RTX; list = list->next ())
4770     if (NOTE_P (list->insn ())
4771 	&& NOTE_KIND (list->insn ()) == NOTE_INSN_DELETED)
4772       return true;
4773   return false;
4774 }
4775 
4776 /* Return true if X contains a pseudo dying in INSN.  */
4777 static bool
dead_pseudo_p(rtx x,rtx_insn * insn)4778 dead_pseudo_p (rtx x, rtx_insn *insn)
4779 {
4780   int i, j;
4781   const char *fmt;
4782   enum rtx_code code;
4783 
4784   if (REG_P (x))
4785     return (insn != NULL_RTX
4786 	    && find_regno_note (insn, REG_DEAD, REGNO (x)) != NULL_RTX);
4787   code = GET_CODE (x);
4788   fmt = GET_RTX_FORMAT (code);
4789   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4790     {
4791       if (fmt[i] == 'e')
4792 	{
4793 	  if (dead_pseudo_p (XEXP (x, i), insn))
4794 	    return true;
4795 	}
4796       else if (fmt[i] == 'E')
4797 	{
4798 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4799 	    if (dead_pseudo_p (XVECEXP (x, i, j), insn))
4800 	      return true;
4801 	}
4802     }
4803   return false;
4804 }
4805 
4806 /* Return true if INSN contains a dying pseudo in INSN right hand
4807    side.  */
4808 static bool
insn_rhs_dead_pseudo_p(rtx_insn * insn)4809 insn_rhs_dead_pseudo_p (rtx_insn *insn)
4810 {
4811   rtx set = single_set (insn);
4812 
4813   gcc_assert (set != NULL);
4814   return dead_pseudo_p (SET_SRC (set), insn);
4815 }
4816 
4817 /* Return true if any init insn of REGNO contains a dying pseudo in
4818    insn right hand side.  */
4819 static bool
init_insn_rhs_dead_pseudo_p(int regno)4820 init_insn_rhs_dead_pseudo_p (int regno)
4821 {
4822   rtx_insn_list *insns = ira_reg_equiv[regno].init_insns;
4823 
4824   if (insns == NULL)
4825     return false;
4826   for (; insns != NULL_RTX; insns = insns->next ())
4827     if (insn_rhs_dead_pseudo_p (insns->insn ()))
4828       return true;
4829   return false;
4830 }
4831 
4832 /* Return TRUE if REGNO has a reverse equivalence.  The equivalence is
4833    reverse only if we have one init insn with given REGNO as a
4834    source.  */
4835 static bool
reverse_equiv_p(int regno)4836 reverse_equiv_p (int regno)
4837 {
4838   rtx_insn_list *insns = ira_reg_equiv[regno].init_insns;
4839   rtx set;
4840 
4841   if (insns == NULL)
4842     return false;
4843   if (! INSN_P (insns->insn ())
4844       || insns->next () != NULL)
4845     return false;
4846   if ((set = single_set (insns->insn ())) == NULL_RTX)
4847     return false;
4848   return REG_P (SET_SRC (set)) && (int) REGNO (SET_SRC (set)) == regno;
4849 }
4850 
4851 /* Return TRUE if REGNO was reloaded in an equivalence init insn.  We
4852    call this function only for non-reverse equivalence.  */
4853 static bool
contains_reloaded_insn_p(int regno)4854 contains_reloaded_insn_p (int regno)
4855 {
4856   rtx set;
4857   rtx_insn_list *list = ira_reg_equiv[regno].init_insns;
4858 
4859   for (; list != NULL; list = list->next ())
4860     if ((set = single_set (list->insn ())) == NULL_RTX
4861 	|| ! REG_P (SET_DEST (set))
4862 	|| (int) REGNO (SET_DEST (set)) != regno)
4863       return true;
4864   return false;
4865 }
4866 
4867 /* Entry function of LRA constraint pass.  Return true if the
4868    constraint pass did change the code.	 */
4869 bool
lra_constraints(bool first_p)4870 lra_constraints (bool first_p)
4871 {
4872   bool changed_p;
4873   int i, hard_regno, new_insns_num;
4874   unsigned int min_len, new_min_len, uid;
4875   rtx set, x, reg, dest_reg;
4876   basic_block last_bb;
4877   bitmap_iterator bi;
4878 
4879   lra_constraint_iter++;
4880   if (lra_dump_file != NULL)
4881     fprintf (lra_dump_file, "\n********** Local #%d: **********\n\n",
4882 	     lra_constraint_iter);
4883   changed_p = false;
4884   if (pic_offset_table_rtx
4885       && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
4886     check_and_force_assignment_correctness_p = true;
4887   else if (first_p)
4888     /* On the first iteration we should check IRA assignment
4889        correctness.  In rare cases, the assignments can be wrong as
4890        early clobbers operands are ignored in IRA or usages of
4891        paradoxical sub-registers are not taken into account by
4892        IRA.  */
4893     check_and_force_assignment_correctness_p = true;
4894   new_insn_uid_start = get_max_uid ();
4895   new_regno_start = first_p ? lra_constraint_new_regno_start : max_reg_num ();
4896   /* Mark used hard regs for target stack size calulations.  */
4897   for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
4898     if (lra_reg_info[i].nrefs != 0
4899 	&& (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
4900       {
4901 	int j, nregs;
4902 
4903 	nregs = hard_regno_nregs (hard_regno, lra_reg_info[i].biggest_mode);
4904 	for (j = 0; j < nregs; j++)
4905 	  df_set_regs_ever_live (hard_regno + j, true);
4906       }
4907   /* Do elimination before the equivalence processing as we can spill
4908      some pseudos during elimination.  */
4909   lra_eliminate (false, first_p);
4910   auto_bitmap equiv_insn_bitmap (&reg_obstack);
4911   for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
4912     if (lra_reg_info[i].nrefs != 0)
4913       {
4914 	ira_reg_equiv[i].profitable_p = true;
4915 	reg = regno_reg_rtx[i];
4916 	if (lra_get_regno_hard_regno (i) < 0 && (x = get_equiv (reg)) != reg)
4917 	  {
4918 	    bool pseudo_p = contains_reg_p (x, false, false);
4919 
4920 	    /* After RTL transformation, we cannot guarantee that
4921 	       pseudo in the substitution was not reloaded which might
4922 	       make equivalence invalid.  For example, in reverse
4923 	       equiv of p0
4924 
4925 	       p0 <- ...
4926 	       ...
4927 	       equiv_mem <- p0
4928 
4929 	       the memory address register was reloaded before the 2nd
4930 	       insn.  */
4931 	    if ((! first_p && pseudo_p)
4932 		/* We don't use DF for compilation speed sake.  So it
4933 		   is problematic to update live info when we use an
4934 		   equivalence containing pseudos in more than one
4935 		   BB.  */
4936 		|| (pseudo_p && multi_block_pseudo_p (i))
4937 		/* If an init insn was deleted for some reason, cancel
4938 		   the equiv.  We could update the equiv insns after
4939 		   transformations including an equiv insn deletion
4940 		   but it is not worthy as such cases are extremely
4941 		   rare.  */
4942 		|| contains_deleted_insn_p (ira_reg_equiv[i].init_insns)
4943 		/* If it is not a reverse equivalence, we check that a
4944 		   pseudo in rhs of the init insn is not dying in the
4945 		   insn.  Otherwise, the live info at the beginning of
4946 		   the corresponding BB might be wrong after we
4947 		   removed the insn.  When the equiv can be a
4948 		   constant, the right hand side of the init insn can
4949 		   be a pseudo.  */
4950 		|| (! reverse_equiv_p (i)
4951 		    && (init_insn_rhs_dead_pseudo_p (i)
4952 			/* If we reloaded the pseudo in an equivalence
4953 			   init insn, we cannot remove the equiv init
4954 			   insns and the init insns might write into
4955 			   const memory in this case.  */
4956 			|| contains_reloaded_insn_p (i)))
4957 		/* Prevent access beyond equivalent memory for
4958 		   paradoxical subregs.  */
4959 		|| (MEM_P (x)
4960 		    && maybe_gt (GET_MODE_SIZE (lra_reg_info[i].biggest_mode),
4961 				 GET_MODE_SIZE (GET_MODE (x))))
4962 		|| (pic_offset_table_rtx
4963 		    && ((CONST_POOL_OK_P (PSEUDO_REGNO_MODE (i), x)
4964 			 && (targetm.preferred_reload_class
4965 			     (x, lra_get_allocno_class (i)) == NO_REGS))
4966 			|| contains_symbol_ref_p (x))))
4967 	      ira_reg_equiv[i].defined_p = false;
4968 	    if (contains_reg_p (x, false, true))
4969 	      ira_reg_equiv[i].profitable_p = false;
4970 	    if (get_equiv (reg) != reg)
4971 	      bitmap_ior_into (equiv_insn_bitmap, &lra_reg_info[i].insn_bitmap);
4972 	  }
4973       }
4974   for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
4975     update_equiv (i);
4976   /* We should add all insns containing pseudos which should be
4977      substituted by their equivalences.  */
4978   EXECUTE_IF_SET_IN_BITMAP (equiv_insn_bitmap, 0, uid, bi)
4979     lra_push_insn_by_uid (uid);
4980   min_len = lra_insn_stack_length ();
4981   new_insns_num = 0;
4982   last_bb = NULL;
4983   changed_p = false;
4984   while ((new_min_len = lra_insn_stack_length ()) != 0)
4985     {
4986       curr_insn = lra_pop_insn ();
4987       --new_min_len;
4988       curr_bb = BLOCK_FOR_INSN (curr_insn);
4989       if (curr_bb != last_bb)
4990 	{
4991 	  last_bb = curr_bb;
4992 	  bb_reload_num = lra_curr_reload_num;
4993 	}
4994       if (min_len > new_min_len)
4995 	{
4996 	  min_len = new_min_len;
4997 	  new_insns_num = 0;
4998 	}
4999       if (new_insns_num > MAX_RELOAD_INSNS_NUMBER)
5000 	internal_error
5001 	  ("maximum number of generated reload insns per insn achieved (%d)",
5002 	   MAX_RELOAD_INSNS_NUMBER);
5003       new_insns_num++;
5004       if (DEBUG_INSN_P (curr_insn))
5005 	{
5006 	  /* We need to check equivalence in debug insn and change
5007 	     pseudo to the equivalent value if necessary.  */
5008 	  curr_id = lra_get_insn_recog_data (curr_insn);
5009 	  if (bitmap_bit_p (equiv_insn_bitmap, INSN_UID (curr_insn)))
5010 	    {
5011 	      rtx old = *curr_id->operand_loc[0];
5012 	      *curr_id->operand_loc[0]
5013 		= simplify_replace_fn_rtx (old, NULL_RTX,
5014 					   loc_equivalence_callback, curr_insn);
5015 	      if (old != *curr_id->operand_loc[0])
5016 		{
5017 		  lra_update_insn_regno_info (curr_insn);
5018 		  changed_p = true;
5019 		}
5020 	    }
5021 	}
5022       else if (INSN_P (curr_insn))
5023 	{
5024 	  if ((set = single_set (curr_insn)) != NULL_RTX)
5025 	    {
5026 	      dest_reg = SET_DEST (set);
5027 	      /* The equivalence pseudo could be set up as SUBREG in a
5028 		 case when it is a call restore insn in a mode
5029 		 different from the pseudo mode.  */
5030 	      if (GET_CODE (dest_reg) == SUBREG)
5031 		dest_reg = SUBREG_REG (dest_reg);
5032 	      if ((REG_P (dest_reg)
5033 		   && (x = get_equiv (dest_reg)) != dest_reg
5034 		   /* Remove insns which set up a pseudo whose value
5035 		      cannot be changed.  Such insns might be not in
5036 		      init_insns because we don't update equiv data
5037 		      during insn transformations.
5038 
5039 		      As an example, let suppose that a pseudo got
5040 		      hard register and on the 1st pass was not
5041 		      changed to equivalent constant.  We generate an
5042 		      additional insn setting up the pseudo because of
5043 		      secondary memory movement.  Then the pseudo is
5044 		      spilled and we use the equiv constant.  In this
5045 		      case we should remove the additional insn and
5046 		      this insn is not init_insns list.  */
5047 		   && (! MEM_P (x) || MEM_READONLY_P (x)
5048 		       /* Check that this is actually an insn setting
5049 			  up the equivalence.  */
5050 		       || in_list_p (curr_insn,
5051 				     ira_reg_equiv
5052 				     [REGNO (dest_reg)].init_insns)))
5053 		  || (((x = get_equiv (SET_SRC (set))) != SET_SRC (set))
5054 		      && in_list_p (curr_insn,
5055 				    ira_reg_equiv
5056 				    [REGNO (SET_SRC (set))].init_insns)))
5057 		{
5058 		  /* This is equiv init insn of pseudo which did not get a
5059 		     hard register -- remove the insn.	*/
5060 		  if (lra_dump_file != NULL)
5061 		    {
5062 		      fprintf (lra_dump_file,
5063 			       "      Removing equiv init insn %i (freq=%d)\n",
5064 			       INSN_UID (curr_insn),
5065 			       REG_FREQ_FROM_BB (BLOCK_FOR_INSN (curr_insn)));
5066 		      dump_insn_slim (lra_dump_file, curr_insn);
5067 		    }
5068 		  if (contains_reg_p (x, true, false))
5069 		    check_and_force_assignment_correctness_p = true;
5070 		  lra_set_insn_deleted (curr_insn);
5071 		  continue;
5072 		}
5073 	    }
5074 	  curr_id = lra_get_insn_recog_data (curr_insn);
5075 	  curr_static_id = curr_id->insn_static_data;
5076 	  init_curr_insn_input_reloads ();
5077 	  init_curr_operand_mode ();
5078 	  if (curr_insn_transform (false))
5079 	    changed_p = true;
5080 	  /* Check non-transformed insns too for equiv change as USE
5081 	     or CLOBBER don't need reloads but can contain pseudos
5082 	     being changed on their equivalences.  */
5083 	  else if (bitmap_bit_p (equiv_insn_bitmap, INSN_UID (curr_insn))
5084 		   && loc_equivalence_change_p (&PATTERN (curr_insn)))
5085 	    {
5086 	      lra_update_insn_regno_info (curr_insn);
5087 	      changed_p = true;
5088 	    }
5089 	}
5090     }
5091 
5092   /* If we used a new hard regno, changed_p should be true because the
5093      hard reg is assigned to a new pseudo.  */
5094   if (flag_checking && !changed_p)
5095     {
5096       for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
5097 	if (lra_reg_info[i].nrefs != 0
5098 	    && (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
5099 	  {
5100 	    int j, nregs = hard_regno_nregs (hard_regno,
5101 					     PSEUDO_REGNO_MODE (i));
5102 
5103 	    for (j = 0; j < nregs; j++)
5104 	      lra_assert (df_regs_ever_live_p (hard_regno + j));
5105 	  }
5106     }
5107   return changed_p;
5108 }
5109 
5110 static void initiate_invariants (void);
5111 static void finish_invariants (void);
5112 
5113 /* Initiate the LRA constraint pass.  It is done once per
5114    function.  */
5115 void
lra_constraints_init(void)5116 lra_constraints_init (void)
5117 {
5118   initiate_invariants ();
5119 }
5120 
5121 /* Finalize the LRA constraint pass.  It is done once per
5122    function.  */
5123 void
lra_constraints_finish(void)5124 lra_constraints_finish (void)
5125 {
5126   finish_invariants ();
5127 }
5128 
5129 
5130 
5131 /* Structure describes invariants for ineheritance.  */
5132 struct lra_invariant
5133 {
5134   /* The order number of the invariant.  */
5135   int num;
5136   /* The invariant RTX.  */
5137   rtx invariant_rtx;
5138   /* The origin insn of the invariant.  */
5139   rtx_insn *insn;
5140 };
5141 
5142 typedef lra_invariant invariant_t;
5143 typedef invariant_t *invariant_ptr_t;
5144 typedef const invariant_t *const_invariant_ptr_t;
5145 
5146 /* Pointer to the inheritance invariants.  */
5147 static vec<invariant_ptr_t> invariants;
5148 
5149 /* Allocation pool for the invariants.  */
5150 static object_allocator<lra_invariant> *invariants_pool;
5151 
5152 /* Hash table for the invariants.  */
5153 static htab_t invariant_table;
5154 
5155 /* Hash function for INVARIANT.  */
5156 static hashval_t
invariant_hash(const void * invariant)5157 invariant_hash (const void *invariant)
5158 {
5159   rtx inv = ((const_invariant_ptr_t) invariant)->invariant_rtx;
5160   return lra_rtx_hash (inv);
5161 }
5162 
5163 /* Equal function for invariants INVARIANT1 and INVARIANT2.  */
5164 static int
invariant_eq_p(const void * invariant1,const void * invariant2)5165 invariant_eq_p (const void *invariant1, const void *invariant2)
5166 {
5167   rtx inv1 = ((const_invariant_ptr_t) invariant1)->invariant_rtx;
5168   rtx inv2 = ((const_invariant_ptr_t) invariant2)->invariant_rtx;
5169 
5170   return rtx_equal_p (inv1, inv2);
5171 }
5172 
5173 /* Insert INVARIANT_RTX into the table if it is not there yet.  Return
5174    invariant which is in the table.  */
5175 static invariant_ptr_t
insert_invariant(rtx invariant_rtx)5176 insert_invariant (rtx invariant_rtx)
5177 {
5178   void **entry_ptr;
5179   invariant_t invariant;
5180   invariant_ptr_t invariant_ptr;
5181 
5182   invariant.invariant_rtx = invariant_rtx;
5183   entry_ptr = htab_find_slot (invariant_table, &invariant, INSERT);
5184   if (*entry_ptr == NULL)
5185     {
5186       invariant_ptr = invariants_pool->allocate ();
5187       invariant_ptr->invariant_rtx = invariant_rtx;
5188       invariant_ptr->insn = NULL;
5189       invariants.safe_push (invariant_ptr);
5190       *entry_ptr = (void *) invariant_ptr;
5191     }
5192   return (invariant_ptr_t) *entry_ptr;
5193 }
5194 
5195 /* Initiate the invariant table.  */
5196 static void
initiate_invariants(void)5197 initiate_invariants (void)
5198 {
5199   invariants.create (100);
5200   invariants_pool
5201     = new object_allocator<lra_invariant> ("Inheritance invariants");
5202   invariant_table = htab_create (100, invariant_hash, invariant_eq_p, NULL);
5203 }
5204 
5205 /* Finish the invariant table.  */
5206 static void
finish_invariants(void)5207 finish_invariants (void)
5208 {
5209   htab_delete (invariant_table);
5210   delete invariants_pool;
5211   invariants.release ();
5212 }
5213 
5214 /* Make the invariant table empty.  */
5215 static void
clear_invariants(void)5216 clear_invariants (void)
5217 {
5218   htab_empty (invariant_table);
5219   invariants_pool->release ();
5220   invariants.truncate (0);
5221 }
5222 
5223 
5224 
5225 /* This page contains code to do inheritance/split
5226    transformations.  */
5227 
5228 /* Number of reloads passed so far in current EBB.  */
5229 static int reloads_num;
5230 
5231 /* Number of calls passed so far in current EBB.  */
5232 static int calls_num;
5233 
5234 /* Index ID is the CALLS_NUM associated the last call we saw with
5235    ABI identifier ID.  */
5236 static int last_call_for_abi[NUM_ABI_IDS];
5237 
5238 /* Which registers have been fully or partially clobbered by a call
5239    since they were last used.  */
5240 static HARD_REG_SET full_and_partial_call_clobbers;
5241 
5242 /* Current reload pseudo check for validity of elements in
5243    USAGE_INSNS.	 */
5244 static int curr_usage_insns_check;
5245 
5246 /* Info about last usage of registers in EBB to do inheritance/split
5247    transformation.  Inheritance transformation is done from a spilled
5248    pseudo and split transformations from a hard register or a pseudo
5249    assigned to a hard register.	 */
5250 struct usage_insns
5251 {
5252   /* If the value is equal to CURR_USAGE_INSNS_CHECK, then the member
5253      value INSNS is valid.  The insns is chain of optional debug insns
5254      and a finishing non-debug insn using the corresponding reg.  The
5255      value is also used to mark the registers which are set up in the
5256      current insn.  The negated insn uid is used for this.  */
5257   int check;
5258   /* Value of global reloads_num at the last insn in INSNS.  */
5259   int reloads_num;
5260   /* Value of global reloads_nums at the last insn in INSNS.  */
5261   int calls_num;
5262   /* It can be true only for splitting.	 And it means that the restore
5263      insn should be put after insn given by the following member.  */
5264   bool after_p;
5265   /* Next insns in the current EBB which use the original reg and the
5266      original reg value is not changed between the current insn and
5267      the next insns.  In order words, e.g. for inheritance, if we need
5268      to use the original reg value again in the next insns we can try
5269      to use the value in a hard register from a reload insn of the
5270      current insn.  */
5271   rtx insns;
5272 };
5273 
5274 /* Map: regno -> corresponding pseudo usage insns.  */
5275 static struct usage_insns *usage_insns;
5276 
5277 static void
setup_next_usage_insn(int regno,rtx insn,int reloads_num,bool after_p)5278 setup_next_usage_insn (int regno, rtx insn, int reloads_num, bool after_p)
5279 {
5280   usage_insns[regno].check = curr_usage_insns_check;
5281   usage_insns[regno].insns = insn;
5282   usage_insns[regno].reloads_num = reloads_num;
5283   usage_insns[regno].calls_num = calls_num;
5284   usage_insns[regno].after_p = after_p;
5285   if (regno >= FIRST_PSEUDO_REGISTER && reg_renumber[regno] >= 0)
5286     remove_from_hard_reg_set (&full_and_partial_call_clobbers,
5287 			      PSEUDO_REGNO_MODE (regno),
5288 			      reg_renumber[regno]);
5289 }
5290 
5291 /* The function is used to form list REGNO usages which consists of
5292    optional debug insns finished by a non-debug insn using REGNO.
5293    RELOADS_NUM is current number of reload insns processed so far.  */
5294 static void
add_next_usage_insn(int regno,rtx_insn * insn,int reloads_num)5295 add_next_usage_insn (int regno, rtx_insn *insn, int reloads_num)
5296 {
5297   rtx next_usage_insns;
5298 
5299   if (usage_insns[regno].check == curr_usage_insns_check
5300       && (next_usage_insns = usage_insns[regno].insns) != NULL_RTX
5301       && DEBUG_INSN_P (insn))
5302     {
5303       /* Check that we did not add the debug insn yet.	*/
5304       if (next_usage_insns != insn
5305 	  && (GET_CODE (next_usage_insns) != INSN_LIST
5306 	      || XEXP (next_usage_insns, 0) != insn))
5307 	usage_insns[regno].insns = gen_rtx_INSN_LIST (VOIDmode, insn,
5308 						      next_usage_insns);
5309     }
5310   else if (NONDEBUG_INSN_P (insn))
5311     setup_next_usage_insn (regno, insn, reloads_num, false);
5312   else
5313     usage_insns[regno].check = 0;
5314 }
5315 
5316 /* Return first non-debug insn in list USAGE_INSNS.  */
5317 static rtx_insn *
skip_usage_debug_insns(rtx usage_insns)5318 skip_usage_debug_insns (rtx usage_insns)
5319 {
5320   rtx insn;
5321 
5322   /* Skip debug insns.  */
5323   for (insn = usage_insns;
5324        insn != NULL_RTX && GET_CODE (insn) == INSN_LIST;
5325        insn = XEXP (insn, 1))
5326     ;
5327   return safe_as_a <rtx_insn *> (insn);
5328 }
5329 
5330 /* Return true if we need secondary memory moves for insn in
5331    USAGE_INSNS after inserting inherited pseudo of class INHER_CL
5332    into the insn.  */
5333 static bool
check_secondary_memory_needed_p(enum reg_class inher_cl ATTRIBUTE_UNUSED,rtx usage_insns ATTRIBUTE_UNUSED)5334 check_secondary_memory_needed_p (enum reg_class inher_cl ATTRIBUTE_UNUSED,
5335 				 rtx usage_insns ATTRIBUTE_UNUSED)
5336 {
5337   rtx_insn *insn;
5338   rtx set, dest;
5339   enum reg_class cl;
5340 
5341   if (inher_cl == ALL_REGS
5342       || (insn = skip_usage_debug_insns (usage_insns)) == NULL_RTX)
5343     return false;
5344   lra_assert (INSN_P (insn));
5345   if ((set = single_set (insn)) == NULL_RTX || ! REG_P (SET_DEST (set)))
5346     return false;
5347   dest = SET_DEST (set);
5348   if (! REG_P (dest))
5349     return false;
5350   lra_assert (inher_cl != NO_REGS);
5351   cl = get_reg_class (REGNO (dest));
5352   return (cl != NO_REGS && cl != ALL_REGS
5353 	  && targetm.secondary_memory_needed (GET_MODE (dest), inher_cl, cl));
5354 }
5355 
5356 /* Registers involved in inheritance/split in the current EBB
5357    (inheritance/split pseudos and original registers).	*/
5358 static bitmap_head check_only_regs;
5359 
5360 /* Reload pseudos cannot be involded in invariant inheritance in the
5361    current EBB.  */
5362 static bitmap_head invalid_invariant_regs;
5363 
5364 /* Do inheritance transformations for insn INSN, which defines (if
5365    DEF_P) or uses ORIGINAL_REGNO.  NEXT_USAGE_INSNS specifies which
5366    instruction in the EBB next uses ORIGINAL_REGNO; it has the same
5367    form as the "insns" field of usage_insns.  Return true if we
5368    succeed in such transformation.
5369 
5370    The transformations look like:
5371 
5372      p <- ...		  i <- ...
5373      ...		  p <- i    (new insn)
5374      ...	     =>
5375      <- ... p ...	  <- ... i ...
5376    or
5377      ...		  i <- p    (new insn)
5378      <- ... p ...	  <- ... i ...
5379      ...	     =>
5380      <- ... p ...	  <- ... i ...
5381    where p is a spilled original pseudo and i is a new inheritance pseudo.
5382 
5383 
5384    The inheritance pseudo has the smallest class of two classes CL and
5385    class of ORIGINAL REGNO.  */
5386 static bool
inherit_reload_reg(bool def_p,int original_regno,enum reg_class cl,rtx_insn * insn,rtx next_usage_insns)5387 inherit_reload_reg (bool def_p, int original_regno,
5388 		    enum reg_class cl, rtx_insn *insn, rtx next_usage_insns)
5389 {
5390   if (optimize_function_for_size_p (cfun))
5391     return false;
5392 
5393   enum reg_class rclass = lra_get_allocno_class (original_regno);
5394   rtx original_reg = regno_reg_rtx[original_regno];
5395   rtx new_reg, usage_insn;
5396   rtx_insn *new_insns;
5397 
5398   lra_assert (! usage_insns[original_regno].after_p);
5399   if (lra_dump_file != NULL)
5400     fprintf (lra_dump_file,
5401 	     "    <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n");
5402   if (! ira_reg_classes_intersect_p[cl][rclass])
5403     {
5404       if (lra_dump_file != NULL)
5405 	{
5406 	  fprintf (lra_dump_file,
5407 		   "    Rejecting inheritance for %d "
5408 		   "because of disjoint classes %s and %s\n",
5409 		   original_regno, reg_class_names[cl],
5410 		   reg_class_names[rclass]);
5411 	  fprintf (lra_dump_file,
5412 		   "    >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
5413 	}
5414       return false;
5415     }
5416   if ((ira_class_subset_p[cl][rclass] && cl != rclass)
5417       /* We don't use a subset of two classes because it can be
5418 	 NO_REGS.  This transformation is still profitable in most
5419 	 cases even if the classes are not intersected as register
5420 	 move is probably cheaper than a memory load.  */
5421       || ira_class_hard_regs_num[cl] < ira_class_hard_regs_num[rclass])
5422     {
5423       if (lra_dump_file != NULL)
5424 	fprintf (lra_dump_file, "    Use smallest class of %s and %s\n",
5425 		 reg_class_names[cl], reg_class_names[rclass]);
5426 
5427       rclass = cl;
5428     }
5429   if (check_secondary_memory_needed_p (rclass, next_usage_insns))
5430     {
5431       /* Reject inheritance resulting in secondary memory moves.
5432 	 Otherwise, there is a danger in LRA cycling.  Also such
5433 	 transformation will be unprofitable.  */
5434       if (lra_dump_file != NULL)
5435 	{
5436 	  rtx_insn *insn = skip_usage_debug_insns (next_usage_insns);
5437 	  rtx set = single_set (insn);
5438 
5439 	  lra_assert (set != NULL_RTX);
5440 
5441 	  rtx dest = SET_DEST (set);
5442 
5443 	  lra_assert (REG_P (dest));
5444 	  fprintf (lra_dump_file,
5445 		   "    Rejecting inheritance for insn %d(%s)<-%d(%s) "
5446 		   "as secondary mem is needed\n",
5447 		   REGNO (dest), reg_class_names[get_reg_class (REGNO (dest))],
5448 		   original_regno, reg_class_names[rclass]);
5449 	  fprintf (lra_dump_file,
5450 		   "    >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
5451 	}
5452       return false;
5453     }
5454   new_reg = lra_create_new_reg (GET_MODE (original_reg), original_reg,
5455 				rclass, "inheritance");
5456   start_sequence ();
5457   if (def_p)
5458     lra_emit_move (original_reg, new_reg);
5459   else
5460     lra_emit_move (new_reg, original_reg);
5461   new_insns = get_insns ();
5462   end_sequence ();
5463   if (NEXT_INSN (new_insns) != NULL_RTX)
5464     {
5465       if (lra_dump_file != NULL)
5466 	{
5467 	  fprintf (lra_dump_file,
5468 		   "    Rejecting inheritance %d->%d "
5469 		   "as it results in 2 or more insns:\n",
5470 		   original_regno, REGNO (new_reg));
5471 	  dump_rtl_slim (lra_dump_file, new_insns, NULL, -1, 0);
5472 	  fprintf (lra_dump_file,
5473 		   "	>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
5474 	}
5475       return false;
5476     }
5477   lra_substitute_pseudo_within_insn (insn, original_regno, new_reg, false);
5478   lra_update_insn_regno_info (insn);
5479   if (! def_p)
5480     /* We now have a new usage insn for original regno.  */
5481     setup_next_usage_insn (original_regno, new_insns, reloads_num, false);
5482   if (lra_dump_file != NULL)
5483     fprintf (lra_dump_file, "    Original reg change %d->%d (bb%d):\n",
5484 	     original_regno, REGNO (new_reg), BLOCK_FOR_INSN (insn)->index);
5485   lra_reg_info[REGNO (new_reg)].restore_rtx = regno_reg_rtx[original_regno];
5486   bitmap_set_bit (&check_only_regs, REGNO (new_reg));
5487   bitmap_set_bit (&check_only_regs, original_regno);
5488   bitmap_set_bit (&lra_inheritance_pseudos, REGNO (new_reg));
5489   if (def_p)
5490     lra_process_new_insns (insn, NULL, new_insns,
5491 			   "Add original<-inheritance");
5492   else
5493     lra_process_new_insns (insn, new_insns, NULL,
5494 			   "Add inheritance<-original");
5495   while (next_usage_insns != NULL_RTX)
5496     {
5497       if (GET_CODE (next_usage_insns) != INSN_LIST)
5498 	{
5499 	  usage_insn = next_usage_insns;
5500 	  lra_assert (NONDEBUG_INSN_P (usage_insn));
5501 	  next_usage_insns = NULL;
5502 	}
5503       else
5504 	{
5505 	  usage_insn = XEXP (next_usage_insns, 0);
5506 	  lra_assert (DEBUG_INSN_P (usage_insn));
5507 	  next_usage_insns = XEXP (next_usage_insns, 1);
5508 	}
5509       lra_substitute_pseudo (&usage_insn, original_regno, new_reg, false,
5510 			     DEBUG_INSN_P (usage_insn));
5511       lra_update_insn_regno_info (as_a <rtx_insn *> (usage_insn));
5512       if (lra_dump_file != NULL)
5513 	{
5514 	  basic_block bb = BLOCK_FOR_INSN (usage_insn);
5515 	  fprintf (lra_dump_file,
5516 		   "    Inheritance reuse change %d->%d (bb%d):\n",
5517 		   original_regno, REGNO (new_reg),
5518 		   bb ? bb->index : -1);
5519 	  dump_insn_slim (lra_dump_file, as_a <rtx_insn *> (usage_insn));
5520 	}
5521     }
5522   if (lra_dump_file != NULL)
5523     fprintf (lra_dump_file,
5524 	     "	  >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
5525   return true;
5526 }
5527 
5528 /* Return true if we need a caller save/restore for pseudo REGNO which
5529    was assigned to a hard register.  */
5530 static inline bool
need_for_call_save_p(int regno)5531 need_for_call_save_p (int regno)
5532 {
5533   lra_assert (regno >= FIRST_PSEUDO_REGISTER && reg_renumber[regno] >= 0);
5534   if (usage_insns[regno].calls_num < calls_num)
5535     {
5536       unsigned int abis = 0;
5537       for (unsigned int i = 0; i < NUM_ABI_IDS; ++i)
5538 	if (last_call_for_abi[i] > usage_insns[regno].calls_num)
5539 	  abis |= 1 << i;
5540       gcc_assert (abis);
5541       if (call_clobbered_in_region_p (abis, full_and_partial_call_clobbers,
5542 				      PSEUDO_REGNO_MODE (regno),
5543 				      reg_renumber[regno]))
5544 	return true;
5545     }
5546   return false;
5547 }
5548 
5549 /* Global registers occurring in the current EBB.  */
5550 static bitmap_head ebb_global_regs;
5551 
5552 /* Return true if we need a split for hard register REGNO or pseudo
5553    REGNO which was assigned to a hard register.
5554    POTENTIAL_RELOAD_HARD_REGS contains hard registers which might be
5555    used for reloads since the EBB end.	It is an approximation of the
5556    used hard registers in the split range.  The exact value would
5557    require expensive calculations.  If we were aggressive with
5558    splitting because of the approximation, the split pseudo will save
5559    the same hard register assignment and will be removed in the undo
5560    pass.  We still need the approximation because too aggressive
5561    splitting would result in too inaccurate cost calculation in the
5562    assignment pass because of too many generated moves which will be
5563    probably removed in the undo pass.  */
5564 static inline bool
need_for_split_p(HARD_REG_SET potential_reload_hard_regs,int regno)5565 need_for_split_p (HARD_REG_SET potential_reload_hard_regs, int regno)
5566 {
5567   int hard_regno = regno < FIRST_PSEUDO_REGISTER ? regno : reg_renumber[regno];
5568 
5569   lra_assert (hard_regno >= 0);
5570   return ((TEST_HARD_REG_BIT (potential_reload_hard_regs, hard_regno)
5571 	   /* Don't split eliminable hard registers, otherwise we can
5572 	      split hard registers like hard frame pointer, which
5573 	      lives on BB start/end according to DF-infrastructure,
5574 	      when there is a pseudo assigned to the register and
5575 	      living in the same BB.  */
5576 	   && (regno >= FIRST_PSEUDO_REGISTER
5577 	       || ! TEST_HARD_REG_BIT (eliminable_regset, hard_regno))
5578 	   && ! TEST_HARD_REG_BIT (lra_no_alloc_regs, hard_regno)
5579 	   /* Don't split call clobbered hard regs living through
5580 	      calls, otherwise we might have a check problem in the
5581 	      assign sub-pass as in the most cases (exception is a
5582 	      situation when check_and_force_assignment_correctness_p value is
5583 	      true) the assign pass assumes that all pseudos living
5584 	      through calls are assigned to call saved hard regs.  */
5585 	   && (regno >= FIRST_PSEUDO_REGISTER
5586 	       || !TEST_HARD_REG_BIT (full_and_partial_call_clobbers, regno))
5587 	   /* We need at least 2 reloads to make pseudo splitting
5588 	      profitable.  We should provide hard regno splitting in
5589 	      any case to solve 1st insn scheduling problem when
5590 	      moving hard register definition up might result in
5591 	      impossibility to find hard register for reload pseudo of
5592 	      small register class.  */
5593 	   && (usage_insns[regno].reloads_num
5594 	       + (regno < FIRST_PSEUDO_REGISTER ? 0 : 3) < reloads_num)
5595 	   && (regno < FIRST_PSEUDO_REGISTER
5596 	       /* For short living pseudos, spilling + inheritance can
5597 		  be considered a substitution for splitting.
5598 		  Therefore we do not splitting for local pseudos.  It
5599 		  decreases also aggressiveness of splitting.  The
5600 		  minimal number of references is chosen taking into
5601 		  account that for 2 references splitting has no sense
5602 		  as we can just spill the pseudo.  */
5603 	       || (regno >= FIRST_PSEUDO_REGISTER
5604 		   && lra_reg_info[regno].nrefs > 3
5605 		   && bitmap_bit_p (&ebb_global_regs, regno))))
5606 	  || (regno >= FIRST_PSEUDO_REGISTER && need_for_call_save_p (regno)));
5607 }
5608 
5609 /* Return class for the split pseudo created from original pseudo with
5610    ALLOCNO_CLASS and MODE which got a hard register HARD_REGNO.	 We
5611    choose subclass of ALLOCNO_CLASS which contains HARD_REGNO and
5612    results in no secondary memory movements.  */
5613 static enum reg_class
choose_split_class(enum reg_class allocno_class,int hard_regno ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED)5614 choose_split_class (enum reg_class allocno_class,
5615 		    int hard_regno ATTRIBUTE_UNUSED,
5616 		    machine_mode mode ATTRIBUTE_UNUSED)
5617 {
5618   int i;
5619   enum reg_class cl, best_cl = NO_REGS;
5620   enum reg_class hard_reg_class ATTRIBUTE_UNUSED
5621     = REGNO_REG_CLASS (hard_regno);
5622 
5623   if (! targetm.secondary_memory_needed (mode, allocno_class, allocno_class)
5624       && TEST_HARD_REG_BIT (reg_class_contents[allocno_class], hard_regno))
5625     return allocno_class;
5626   for (i = 0;
5627        (cl = reg_class_subclasses[allocno_class][i]) != LIM_REG_CLASSES;
5628        i++)
5629     if (! targetm.secondary_memory_needed (mode, cl, hard_reg_class)
5630 	&& ! targetm.secondary_memory_needed (mode, hard_reg_class, cl)
5631 	&& TEST_HARD_REG_BIT (reg_class_contents[cl], hard_regno)
5632 	&& (best_cl == NO_REGS
5633 	    || ira_class_hard_regs_num[best_cl] < ira_class_hard_regs_num[cl]))
5634       best_cl = cl;
5635   return best_cl;
5636 }
5637 
5638 /* Copy any equivalence information from ORIGINAL_REGNO to NEW_REGNO.
5639    It only makes sense to call this function if NEW_REGNO is always
5640    equal to ORIGINAL_REGNO.  */
5641 
5642 static void
lra_copy_reg_equiv(unsigned int new_regno,unsigned int original_regno)5643 lra_copy_reg_equiv (unsigned int new_regno, unsigned int original_regno)
5644 {
5645   if (!ira_reg_equiv[original_regno].defined_p)
5646     return;
5647 
5648   ira_expand_reg_equiv ();
5649   ira_reg_equiv[new_regno].defined_p = true;
5650   if (ira_reg_equiv[original_regno].memory)
5651     ira_reg_equiv[new_regno].memory
5652       = copy_rtx (ira_reg_equiv[original_regno].memory);
5653   if (ira_reg_equiv[original_regno].constant)
5654     ira_reg_equiv[new_regno].constant
5655       = copy_rtx (ira_reg_equiv[original_regno].constant);
5656   if (ira_reg_equiv[original_regno].invariant)
5657     ira_reg_equiv[new_regno].invariant
5658       = copy_rtx (ira_reg_equiv[original_regno].invariant);
5659 }
5660 
5661 /* Do split transformations for insn INSN, which defines or uses
5662    ORIGINAL_REGNO.  NEXT_USAGE_INSNS specifies which instruction in
5663    the EBB next uses ORIGINAL_REGNO; it has the same form as the
5664    "insns" field of usage_insns.  If TO is not NULL, we don't use
5665    usage_insns, we put restore insns after TO insn.  It is a case when
5666    we call it from lra_split_hard_reg_for, outside the inheritance
5667    pass.
5668 
5669    The transformations look like:
5670 
5671      p <- ...		  p <- ...
5672      ...		  s <- p    (new insn -- save)
5673      ...	     =>
5674      ...		  p <- s    (new insn -- restore)
5675      <- ... p ...	  <- ... p ...
5676    or
5677      <- ... p ...	  <- ... p ...
5678      ...		  s <- p    (new insn -- save)
5679      ...	     =>
5680      ...		  p <- s    (new insn -- restore)
5681      <- ... p ...	  <- ... p ...
5682 
5683    where p is an original pseudo got a hard register or a hard
5684    register and s is a new split pseudo.  The save is put before INSN
5685    if BEFORE_P is true.	 Return true if we succeed in such
5686    transformation.  */
5687 static bool
split_reg(bool before_p,int original_regno,rtx_insn * insn,rtx next_usage_insns,rtx_insn * to)5688 split_reg (bool before_p, int original_regno, rtx_insn *insn,
5689 	   rtx next_usage_insns, rtx_insn *to)
5690 {
5691   enum reg_class rclass;
5692   rtx original_reg;
5693   int hard_regno, nregs;
5694   rtx new_reg, usage_insn;
5695   rtx_insn *restore, *save;
5696   bool after_p;
5697   bool call_save_p;
5698   machine_mode mode;
5699 
5700   if (original_regno < FIRST_PSEUDO_REGISTER)
5701     {
5702       rclass = ira_allocno_class_translate[REGNO_REG_CLASS (original_regno)];
5703       hard_regno = original_regno;
5704       call_save_p = false;
5705       nregs = 1;
5706       mode = lra_reg_info[hard_regno].biggest_mode;
5707       machine_mode reg_rtx_mode = GET_MODE (regno_reg_rtx[hard_regno]);
5708       /* A reg can have a biggest_mode of VOIDmode if it was only ever seen
5709 	 as part of a multi-word register.  In that case, or if the biggest
5710 	 mode was larger than a register, just use the reg_rtx.  Otherwise,
5711 	 limit the size to that of the biggest access in the function.  */
5712       if (mode == VOIDmode
5713 	  || paradoxical_subreg_p (mode, reg_rtx_mode))
5714 	{
5715 	  original_reg = regno_reg_rtx[hard_regno];
5716 	  mode = reg_rtx_mode;
5717 	}
5718       else
5719 	original_reg = gen_rtx_REG (mode, hard_regno);
5720     }
5721   else
5722     {
5723       mode = PSEUDO_REGNO_MODE (original_regno);
5724       hard_regno = reg_renumber[original_regno];
5725       nregs = hard_regno_nregs (hard_regno, mode);
5726       rclass = lra_get_allocno_class (original_regno);
5727       original_reg = regno_reg_rtx[original_regno];
5728       call_save_p = need_for_call_save_p (original_regno);
5729     }
5730   lra_assert (hard_regno >= 0);
5731   if (lra_dump_file != NULL)
5732     fprintf (lra_dump_file,
5733 	     "	  ((((((((((((((((((((((((((((((((((((((((((((((((\n");
5734 
5735   if (call_save_p)
5736     {
5737       mode = HARD_REGNO_CALLER_SAVE_MODE (hard_regno,
5738 					  hard_regno_nregs (hard_regno, mode),
5739 					  mode);
5740       new_reg = lra_create_new_reg (mode, NULL_RTX, NO_REGS, "save");
5741     }
5742   else
5743     {
5744       rclass = choose_split_class (rclass, hard_regno, mode);
5745       if (rclass == NO_REGS)
5746 	{
5747 	  if (lra_dump_file != NULL)
5748 	    {
5749 	      fprintf (lra_dump_file,
5750 		       "    Rejecting split of %d(%s): "
5751 		       "no good reg class for %d(%s)\n",
5752 		       original_regno,
5753 		       reg_class_names[lra_get_allocno_class (original_regno)],
5754 		       hard_regno,
5755 		       reg_class_names[REGNO_REG_CLASS (hard_regno)]);
5756 	      fprintf
5757 		(lra_dump_file,
5758 		 "    ))))))))))))))))))))))))))))))))))))))))))))))))\n");
5759 	    }
5760 	  return false;
5761 	}
5762       /* Split_if_necessary can split hard registers used as part of a
5763 	 multi-register mode but splits each register individually.  The
5764 	 mode used for each independent register may not be supported
5765 	 so reject the split.  Splitting the wider mode should theoretically
5766 	 be possible but is not implemented.  */
5767       if (!targetm.hard_regno_mode_ok (hard_regno, mode))
5768 	{
5769 	  if (lra_dump_file != NULL)
5770 	    {
5771 	      fprintf (lra_dump_file,
5772 		       "    Rejecting split of %d(%s): unsuitable mode %s\n",
5773 		       original_regno,
5774 		       reg_class_names[lra_get_allocno_class (original_regno)],
5775 		       GET_MODE_NAME (mode));
5776 	      fprintf
5777 		(lra_dump_file,
5778 		 "    ))))))))))))))))))))))))))))))))))))))))))))))))\n");
5779 	    }
5780 	  return false;
5781 	}
5782       new_reg = lra_create_new_reg (mode, original_reg, rclass, "split");
5783       reg_renumber[REGNO (new_reg)] = hard_regno;
5784     }
5785   int new_regno = REGNO (new_reg);
5786   save = emit_spill_move (true, new_reg, original_reg);
5787   if (NEXT_INSN (save) != NULL_RTX && !call_save_p)
5788     {
5789       if (lra_dump_file != NULL)
5790 	{
5791 	  fprintf
5792 	    (lra_dump_file,
5793 	     "	  Rejecting split %d->%d resulting in > 2 save insns:\n",
5794 	     original_regno, new_regno);
5795 	  dump_rtl_slim (lra_dump_file, save, NULL, -1, 0);
5796 	  fprintf (lra_dump_file,
5797 		   "	))))))))))))))))))))))))))))))))))))))))))))))))\n");
5798 	}
5799       return false;
5800     }
5801   restore = emit_spill_move (false, new_reg, original_reg);
5802   if (NEXT_INSN (restore) != NULL_RTX && !call_save_p)
5803     {
5804       if (lra_dump_file != NULL)
5805 	{
5806 	  fprintf (lra_dump_file,
5807 		   "	Rejecting split %d->%d "
5808 		   "resulting in > 2 restore insns:\n",
5809 		   original_regno, new_regno);
5810 	  dump_rtl_slim (lra_dump_file, restore, NULL, -1, 0);
5811 	  fprintf (lra_dump_file,
5812 		   "	))))))))))))))))))))))))))))))))))))))))))))))))\n");
5813 	}
5814       return false;
5815     }
5816   /* Transfer equivalence information to the spill register, so that
5817      if we fail to allocate the spill register, we have the option of
5818      rematerializing the original value instead of spilling to the stack.  */
5819   if (!HARD_REGISTER_NUM_P (original_regno)
5820       && mode == PSEUDO_REGNO_MODE (original_regno))
5821     lra_copy_reg_equiv (new_regno, original_regno);
5822   lra_reg_info[new_regno].restore_rtx = regno_reg_rtx[original_regno];
5823   bitmap_set_bit (&lra_split_regs, new_regno);
5824   if (to != NULL)
5825     {
5826       lra_assert (next_usage_insns == NULL);
5827       usage_insn = to;
5828       after_p = TRUE;
5829     }
5830   else
5831     {
5832       /* We need check_only_regs only inside the inheritance pass.  */
5833       bitmap_set_bit (&check_only_regs, new_regno);
5834       bitmap_set_bit (&check_only_regs, original_regno);
5835       after_p = usage_insns[original_regno].after_p;
5836       for (;;)
5837 	{
5838 	  if (GET_CODE (next_usage_insns) != INSN_LIST)
5839 	    {
5840 	      usage_insn = next_usage_insns;
5841 	      break;
5842 	    }
5843 	  usage_insn = XEXP (next_usage_insns, 0);
5844 	  lra_assert (DEBUG_INSN_P (usage_insn));
5845 	  next_usage_insns = XEXP (next_usage_insns, 1);
5846 	  lra_substitute_pseudo (&usage_insn, original_regno, new_reg, false,
5847 				 true);
5848 	  lra_update_insn_regno_info (as_a <rtx_insn *> (usage_insn));
5849 	  if (lra_dump_file != NULL)
5850 	    {
5851 	      fprintf (lra_dump_file, "    Split reuse change %d->%d:\n",
5852 		       original_regno, new_regno);
5853 	      dump_insn_slim (lra_dump_file, as_a <rtx_insn *> (usage_insn));
5854 	    }
5855 	}
5856     }
5857   lra_assert (NOTE_P (usage_insn) || NONDEBUG_INSN_P (usage_insn));
5858   lra_assert (usage_insn != insn || (after_p && before_p));
5859   lra_process_new_insns (as_a <rtx_insn *> (usage_insn),
5860 			 after_p ? NULL : restore,
5861 			 after_p ? restore : NULL,
5862 			 call_save_p
5863 			 ?  "Add reg<-save" : "Add reg<-split");
5864   lra_process_new_insns (insn, before_p ? save : NULL,
5865 			 before_p ? NULL : save,
5866 			 call_save_p
5867 			 ?  "Add save<-reg" : "Add split<-reg");
5868   if (nregs > 1)
5869     /* If we are trying to split multi-register.  We should check
5870        conflicts on the next assignment sub-pass.  IRA can allocate on
5871        sub-register levels, LRA do this on pseudos level right now and
5872        this discrepancy may create allocation conflicts after
5873        splitting.  */
5874     check_and_force_assignment_correctness_p = true;
5875   if (lra_dump_file != NULL)
5876     fprintf (lra_dump_file,
5877 	     "	  ))))))))))))))))))))))))))))))))))))))))))))))))\n");
5878   return true;
5879 }
5880 
5881 /* Split a hard reg for reload pseudo REGNO having RCLASS and living
5882    in the range [FROM, TO].  Return true if did a split.  Otherwise,
5883    return false.  */
5884 bool
spill_hard_reg_in_range(int regno,enum reg_class rclass,rtx_insn * from,rtx_insn * to)5885 spill_hard_reg_in_range (int regno, enum reg_class rclass, rtx_insn *from, rtx_insn *to)
5886 {
5887   int i, hard_regno;
5888   int rclass_size;
5889   rtx_insn *insn;
5890   unsigned int uid;
5891   bitmap_iterator bi;
5892   HARD_REG_SET ignore;
5893 
5894   lra_assert (from != NULL && to != NULL);
5895   CLEAR_HARD_REG_SET (ignore);
5896   EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
5897     {
5898       lra_insn_recog_data_t id = lra_insn_recog_data[uid];
5899       struct lra_static_insn_data *static_id = id->insn_static_data;
5900       struct lra_insn_reg *reg;
5901 
5902       for (reg = id->regs; reg != NULL; reg = reg->next)
5903 	if (reg->regno < FIRST_PSEUDO_REGISTER)
5904 	  SET_HARD_REG_BIT (ignore, reg->regno);
5905       for (reg = static_id->hard_regs; reg != NULL; reg = reg->next)
5906 	SET_HARD_REG_BIT (ignore, reg->regno);
5907     }
5908   rclass_size = ira_class_hard_regs_num[rclass];
5909   for (i = 0; i < rclass_size; i++)
5910     {
5911       hard_regno = ira_class_hard_regs[rclass][i];
5912       if (! TEST_HARD_REG_BIT (lra_reg_info[regno].conflict_hard_regs, hard_regno)
5913 	  || TEST_HARD_REG_BIT (ignore, hard_regno))
5914 	continue;
5915       for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
5916 	{
5917 	  struct lra_static_insn_data *static_id;
5918 	  struct lra_insn_reg *reg;
5919 
5920 	  if (!INSN_P (insn))
5921 	      continue;
5922 	  if (bitmap_bit_p (&lra_reg_info[hard_regno].insn_bitmap,
5923 			    INSN_UID (insn)))
5924 	    break;
5925 	  static_id = lra_get_insn_recog_data (insn)->insn_static_data;
5926 	  for (reg = static_id->hard_regs; reg != NULL; reg = reg->next)
5927 	    if (reg->regno == hard_regno)
5928 	      break;
5929 	  if (reg != NULL)
5930 	    break;
5931 	}
5932       if (insn != NEXT_INSN (to))
5933 	continue;
5934       if (split_reg (TRUE, hard_regno, from, NULL, to))
5935 	return true;
5936     }
5937   return false;
5938 }
5939 
5940 /* Recognize that we need a split transformation for insn INSN, which
5941    defines or uses REGNO in its insn biggest MODE (we use it only if
5942    REGNO is a hard register).  POTENTIAL_RELOAD_HARD_REGS contains
5943    hard registers which might be used for reloads since the EBB end.
5944    Put the save before INSN if BEFORE_P is true.  MAX_UID is maximla
5945    uid before starting INSN processing.  Return true if we succeed in
5946    such transformation.  */
5947 static bool
split_if_necessary(int regno,machine_mode mode,HARD_REG_SET potential_reload_hard_regs,bool before_p,rtx_insn * insn,int max_uid)5948 split_if_necessary (int regno, machine_mode mode,
5949 		    HARD_REG_SET potential_reload_hard_regs,
5950 		    bool before_p, rtx_insn *insn, int max_uid)
5951 {
5952   bool res = false;
5953   int i, nregs = 1;
5954   rtx next_usage_insns;
5955 
5956   if (regno < FIRST_PSEUDO_REGISTER)
5957     nregs = hard_regno_nregs (regno, mode);
5958   for (i = 0; i < nregs; i++)
5959     if (usage_insns[regno + i].check == curr_usage_insns_check
5960 	&& (next_usage_insns = usage_insns[regno + i].insns) != NULL_RTX
5961 	/* To avoid processing the register twice or more.  */
5962 	&& ((GET_CODE (next_usage_insns) != INSN_LIST
5963 	     && INSN_UID (next_usage_insns) < max_uid)
5964 	    || (GET_CODE (next_usage_insns) == INSN_LIST
5965 		&& (INSN_UID (XEXP (next_usage_insns, 0)) < max_uid)))
5966 	&& need_for_split_p (potential_reload_hard_regs, regno + i)
5967 	&& split_reg (before_p, regno + i, insn, next_usage_insns, NULL))
5968     res = true;
5969   return res;
5970 }
5971 
5972 /* Return TRUE if rtx X is considered as an invariant for
5973    inheritance.  */
5974 static bool
invariant_p(const_rtx x)5975 invariant_p (const_rtx x)
5976 {
5977   machine_mode mode;
5978   const char *fmt;
5979   enum rtx_code code;
5980   int i, j;
5981 
5982   if (side_effects_p (x))
5983     return false;
5984 
5985   code = GET_CODE (x);
5986   mode = GET_MODE (x);
5987   if (code == SUBREG)
5988     {
5989       x = SUBREG_REG (x);
5990       code = GET_CODE (x);
5991       mode = wider_subreg_mode (mode, GET_MODE (x));
5992     }
5993 
5994   if (MEM_P (x))
5995     return false;
5996 
5997   if (REG_P (x))
5998     {
5999       int i, nregs, regno = REGNO (x);
6000 
6001       if (regno >= FIRST_PSEUDO_REGISTER || regno == STACK_POINTER_REGNUM
6002 	  || TEST_HARD_REG_BIT (eliminable_regset, regno)
6003 	  || GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
6004 	return false;
6005       nregs = hard_regno_nregs (regno, mode);
6006       for (i = 0; i < nregs; i++)
6007 	if (! fixed_regs[regno + i]
6008 	    /* A hard register may be clobbered in the current insn
6009 	       but we can ignore this case because if the hard
6010 	       register is used it should be set somewhere after the
6011 	       clobber.  */
6012 	    || bitmap_bit_p (&invalid_invariant_regs, regno + i))
6013 	  return false;
6014     }
6015   fmt = GET_RTX_FORMAT (code);
6016   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6017     {
6018       if (fmt[i] == 'e')
6019 	{
6020 	  if (! invariant_p (XEXP (x, i)))
6021 	    return false;
6022 	}
6023       else if (fmt[i] == 'E')
6024 	{
6025 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6026 	    if (! invariant_p (XVECEXP (x, i, j)))
6027 	      return false;
6028 	}
6029     }
6030   return true;
6031 }
6032 
6033 /* We have 'dest_reg <- invariant'.  Let us try to make an invariant
6034    inheritance transformation (using dest_reg instead invariant in a
6035    subsequent insn).  */
6036 static bool
process_invariant_for_inheritance(rtx dst_reg,rtx invariant_rtx)6037 process_invariant_for_inheritance (rtx dst_reg, rtx invariant_rtx)
6038 {
6039   invariant_ptr_t invariant_ptr;
6040   rtx_insn *insn, *new_insns;
6041   rtx insn_set, insn_reg, new_reg;
6042   int insn_regno;
6043   bool succ_p = false;
6044   int dst_regno = REGNO (dst_reg);
6045   machine_mode dst_mode = GET_MODE (dst_reg);
6046   enum reg_class cl = lra_get_allocno_class (dst_regno), insn_reg_cl;
6047 
6048   invariant_ptr = insert_invariant (invariant_rtx);
6049   if ((insn = invariant_ptr->insn) != NULL_RTX)
6050     {
6051       /* We have a subsequent insn using the invariant.  */
6052       insn_set = single_set (insn);
6053       lra_assert (insn_set != NULL);
6054       insn_reg = SET_DEST (insn_set);
6055       lra_assert (REG_P (insn_reg));
6056       insn_regno = REGNO (insn_reg);
6057       insn_reg_cl = lra_get_allocno_class (insn_regno);
6058 
6059       if (dst_mode == GET_MODE (insn_reg)
6060 	  /* We should consider only result move reg insns which are
6061 	     cheap.  */
6062 	  && targetm.register_move_cost (dst_mode, cl, insn_reg_cl) == 2
6063 	  && targetm.register_move_cost (dst_mode, cl, cl) == 2)
6064 	{
6065 	  if (lra_dump_file != NULL)
6066 	    fprintf (lra_dump_file,
6067 		     "    [[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[\n");
6068 	  new_reg = lra_create_new_reg (dst_mode, dst_reg,
6069 					cl, "invariant inheritance");
6070 	  bitmap_set_bit (&lra_inheritance_pseudos, REGNO (new_reg));
6071 	  bitmap_set_bit (&check_only_regs, REGNO (new_reg));
6072 	  lra_reg_info[REGNO (new_reg)].restore_rtx = PATTERN (insn);
6073 	  start_sequence ();
6074 	  lra_emit_move (new_reg, dst_reg);
6075 	  new_insns = get_insns ();
6076 	  end_sequence ();
6077 	  lra_process_new_insns (curr_insn, NULL, new_insns,
6078 				 "Add invariant inheritance<-original");
6079 	  start_sequence ();
6080 	  lra_emit_move (SET_DEST (insn_set), new_reg);
6081 	  new_insns = get_insns ();
6082 	  end_sequence ();
6083 	  lra_process_new_insns (insn, NULL, new_insns,
6084 				 "Changing reload<-inheritance");
6085 	  lra_set_insn_deleted (insn);
6086 	  succ_p = true;
6087 	  if (lra_dump_file != NULL)
6088 	    {
6089 	      fprintf (lra_dump_file,
6090 		       "    Invariant inheritance reuse change %d (bb%d):\n",
6091 		       REGNO (new_reg), BLOCK_FOR_INSN (insn)->index);
6092 	      dump_insn_slim (lra_dump_file, insn);
6093 	      fprintf (lra_dump_file,
6094 		       "	  ]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]\n");
6095 	    }
6096 	}
6097     }
6098   invariant_ptr->insn = curr_insn;
6099   return succ_p;
6100 }
6101 
6102 /* Check only registers living at the current program point in the
6103    current EBB.	 */
6104 static bitmap_head live_regs;
6105 
6106 /* Update live info in EBB given by its HEAD and TAIL insns after
6107    inheritance/split transformation.  The function removes dead moves
6108    too.	 */
6109 static void
update_ebb_live_info(rtx_insn * head,rtx_insn * tail)6110 update_ebb_live_info (rtx_insn *head, rtx_insn *tail)
6111 {
6112   unsigned int j;
6113   int i, regno;
6114   bool live_p;
6115   rtx_insn *prev_insn;
6116   rtx set;
6117   bool remove_p;
6118   basic_block last_bb, prev_bb, curr_bb;
6119   bitmap_iterator bi;
6120   struct lra_insn_reg *reg;
6121   edge e;
6122   edge_iterator ei;
6123 
6124   last_bb = BLOCK_FOR_INSN (tail);
6125   prev_bb = NULL;
6126   for (curr_insn = tail;
6127        curr_insn != PREV_INSN (head);
6128        curr_insn = prev_insn)
6129     {
6130       prev_insn = PREV_INSN (curr_insn);
6131       /* We need to process empty blocks too.  They contain
6132 	 NOTE_INSN_BASIC_BLOCK referring for the basic block.  */
6133       if (NOTE_P (curr_insn) && NOTE_KIND (curr_insn) != NOTE_INSN_BASIC_BLOCK)
6134 	continue;
6135       curr_bb = BLOCK_FOR_INSN (curr_insn);
6136       if (curr_bb != prev_bb)
6137 	{
6138 	  if (prev_bb != NULL)
6139 	    {
6140 	      /* Update df_get_live_in (prev_bb):  */
6141 	      EXECUTE_IF_SET_IN_BITMAP (&check_only_regs, 0, j, bi)
6142 		if (bitmap_bit_p (&live_regs, j))
6143 		  bitmap_set_bit (df_get_live_in (prev_bb), j);
6144 		else
6145 		  bitmap_clear_bit (df_get_live_in (prev_bb), j);
6146 	    }
6147 	  if (curr_bb != last_bb)
6148 	    {
6149 	      /* Update df_get_live_out (curr_bb):  */
6150 	      EXECUTE_IF_SET_IN_BITMAP (&check_only_regs, 0, j, bi)
6151 		{
6152 		  live_p = bitmap_bit_p (&live_regs, j);
6153 		  if (! live_p)
6154 		    FOR_EACH_EDGE (e, ei, curr_bb->succs)
6155 		      if (bitmap_bit_p (df_get_live_in (e->dest), j))
6156 			{
6157 			  live_p = true;
6158 			  break;
6159 			}
6160 		  if (live_p)
6161 		    bitmap_set_bit (df_get_live_out (curr_bb), j);
6162 		  else
6163 		    bitmap_clear_bit (df_get_live_out (curr_bb), j);
6164 		}
6165 	    }
6166 	  prev_bb = curr_bb;
6167 	  bitmap_and (&live_regs, &check_only_regs, df_get_live_out (curr_bb));
6168 	}
6169       if (! NONDEBUG_INSN_P (curr_insn))
6170 	continue;
6171       curr_id = lra_get_insn_recog_data (curr_insn);
6172       curr_static_id = curr_id->insn_static_data;
6173       remove_p = false;
6174       if ((set = single_set (curr_insn)) != NULL_RTX
6175 	  && REG_P (SET_DEST (set))
6176 	  && (regno = REGNO (SET_DEST (set))) >= FIRST_PSEUDO_REGISTER
6177 	  && SET_DEST (set) != pic_offset_table_rtx
6178 	  && bitmap_bit_p (&check_only_regs, regno)
6179 	  && ! bitmap_bit_p (&live_regs, regno))
6180 	remove_p = true;
6181       /* See which defined values die here.  */
6182       for (reg = curr_id->regs; reg != NULL; reg = reg->next)
6183 	if (reg->type == OP_OUT && ! reg->subreg_p)
6184 	  bitmap_clear_bit (&live_regs, reg->regno);
6185       for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
6186 	if (reg->type == OP_OUT && ! reg->subreg_p)
6187 	  bitmap_clear_bit (&live_regs, reg->regno);
6188       if (curr_id->arg_hard_regs != NULL)
6189 	/* Make clobbered argument hard registers die.  */
6190 	for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
6191 	  if (regno >= FIRST_PSEUDO_REGISTER)
6192 	    bitmap_clear_bit (&live_regs, regno - FIRST_PSEUDO_REGISTER);
6193       /* Mark each used value as live.  */
6194       for (reg = curr_id->regs; reg != NULL; reg = reg->next)
6195 	if (reg->type != OP_OUT
6196 	    && bitmap_bit_p (&check_only_regs, reg->regno))
6197 	  bitmap_set_bit (&live_regs, reg->regno);
6198       for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
6199 	if (reg->type != OP_OUT
6200 	    && bitmap_bit_p (&check_only_regs, reg->regno))
6201 	  bitmap_set_bit (&live_regs, reg->regno);
6202       if (curr_id->arg_hard_regs != NULL)
6203 	/* Make used argument hard registers live.  */
6204 	for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
6205 	  if (regno < FIRST_PSEUDO_REGISTER
6206 	      && bitmap_bit_p (&check_only_regs, regno))
6207 	    bitmap_set_bit (&live_regs, regno);
6208       /* It is quite important to remove dead move insns because it
6209 	 means removing dead store.  We don't need to process them for
6210 	 constraints.  */
6211       if (remove_p)
6212 	{
6213 	  if (lra_dump_file != NULL)
6214 	    {
6215 	      fprintf (lra_dump_file, "	    Removing dead insn:\n ");
6216 	      dump_insn_slim (lra_dump_file, curr_insn);
6217 	    }
6218 	  lra_set_insn_deleted (curr_insn);
6219 	}
6220     }
6221 }
6222 
6223 /* The structure describes info to do an inheritance for the current
6224    insn.  We need to collect such info first before doing the
6225    transformations because the transformations change the insn
6226    internal representation.  */
6227 struct to_inherit
6228 {
6229   /* Original regno.  */
6230   int regno;
6231   /* Subsequent insns which can inherit original reg value.  */
6232   rtx insns;
6233 };
6234 
6235 /* Array containing all info for doing inheritance from the current
6236    insn.  */
6237 static struct to_inherit to_inherit[LRA_MAX_INSN_RELOADS];
6238 
6239 /* Number elements in the previous array.  */
6240 static int to_inherit_num;
6241 
6242 /* Add inheritance info REGNO and INSNS. Their meaning is described in
6243    structure to_inherit.  */
6244 static void
add_to_inherit(int regno,rtx insns)6245 add_to_inherit (int regno, rtx insns)
6246 {
6247   int i;
6248 
6249   for (i = 0; i < to_inherit_num; i++)
6250     if (to_inherit[i].regno == regno)
6251       return;
6252   lra_assert (to_inherit_num < LRA_MAX_INSN_RELOADS);
6253   to_inherit[to_inherit_num].regno = regno;
6254   to_inherit[to_inherit_num++].insns = insns;
6255 }
6256 
6257 /* Return the last non-debug insn in basic block BB, or the block begin
6258    note if none.  */
6259 static rtx_insn *
get_last_insertion_point(basic_block bb)6260 get_last_insertion_point (basic_block bb)
6261 {
6262   rtx_insn *insn;
6263 
6264   FOR_BB_INSNS_REVERSE (bb, insn)
6265     if (NONDEBUG_INSN_P (insn) || NOTE_INSN_BASIC_BLOCK_P (insn))
6266       return insn;
6267   gcc_unreachable ();
6268 }
6269 
6270 /* Set up RES by registers living on edges FROM except the edge (FROM,
6271    TO) or by registers set up in a jump insn in BB FROM.  */
6272 static void
get_live_on_other_edges(basic_block from,basic_block to,bitmap res)6273 get_live_on_other_edges (basic_block from, basic_block to, bitmap res)
6274 {
6275   rtx_insn *last;
6276   struct lra_insn_reg *reg;
6277   edge e;
6278   edge_iterator ei;
6279 
6280   lra_assert (to != NULL);
6281   bitmap_clear (res);
6282   FOR_EACH_EDGE (e, ei, from->succs)
6283     if (e->dest != to)
6284       bitmap_ior_into (res, df_get_live_in (e->dest));
6285   last = get_last_insertion_point (from);
6286   if (! JUMP_P (last))
6287     return;
6288   curr_id = lra_get_insn_recog_data (last);
6289   for (reg = curr_id->regs; reg != NULL; reg = reg->next)
6290     if (reg->type != OP_IN)
6291       bitmap_set_bit (res, reg->regno);
6292 }
6293 
6294 /* Used as a temporary results of some bitmap calculations.  */
6295 static bitmap_head temp_bitmap;
6296 
6297 /* We split for reloads of small class of hard regs.  The following
6298    defines how many hard regs the class should have to be qualified as
6299    small.  The code is mostly oriented to x86/x86-64 architecture
6300    where some insns need to use only specific register or pair of
6301    registers and these register can live in RTL explicitly, e.g. for
6302    parameter passing.  */
6303 static const int max_small_class_regs_num = 2;
6304 
6305 /* Do inheritance/split transformations in EBB starting with HEAD and
6306    finishing on TAIL.  We process EBB insns in the reverse order.
6307    Return true if we did any inheritance/split transformation in the
6308    EBB.
6309 
6310    We should avoid excessive splitting which results in worse code
6311    because of inaccurate cost calculations for spilling new split
6312    pseudos in such case.  To achieve this we do splitting only if
6313    register pressure is high in given basic block and there are reload
6314    pseudos requiring hard registers.  We could do more register
6315    pressure calculations at any given program point to avoid necessary
6316    splitting even more but it is to expensive and the current approach
6317    works well enough.  */
6318 static bool
inherit_in_ebb(rtx_insn * head,rtx_insn * tail)6319 inherit_in_ebb (rtx_insn *head, rtx_insn *tail)
6320 {
6321   int i, src_regno, dst_regno, nregs;
6322   bool change_p, succ_p, update_reloads_num_p;
6323   rtx_insn *prev_insn, *last_insn;
6324   rtx next_usage_insns, curr_set;
6325   enum reg_class cl;
6326   struct lra_insn_reg *reg;
6327   basic_block last_processed_bb, curr_bb = NULL;
6328   HARD_REG_SET potential_reload_hard_regs, live_hard_regs;
6329   bitmap to_process;
6330   unsigned int j;
6331   bitmap_iterator bi;
6332   bool head_p, after_p;
6333 
6334   change_p = false;
6335   curr_usage_insns_check++;
6336   clear_invariants ();
6337   reloads_num = calls_num = 0;
6338   for (unsigned int i = 0; i < NUM_ABI_IDS; ++i)
6339     last_call_for_abi[i] = 0;
6340   CLEAR_HARD_REG_SET (full_and_partial_call_clobbers);
6341   bitmap_clear (&check_only_regs);
6342   bitmap_clear (&invalid_invariant_regs);
6343   last_processed_bb = NULL;
6344   CLEAR_HARD_REG_SET (potential_reload_hard_regs);
6345   live_hard_regs = eliminable_regset | lra_no_alloc_regs;
6346   /* We don't process new insns generated in the loop.	*/
6347   for (curr_insn = tail; curr_insn != PREV_INSN (head); curr_insn = prev_insn)
6348     {
6349       prev_insn = PREV_INSN (curr_insn);
6350       if (BLOCK_FOR_INSN (curr_insn) != NULL)
6351 	curr_bb = BLOCK_FOR_INSN (curr_insn);
6352       if (last_processed_bb != curr_bb)
6353 	{
6354 	  /* We are at the end of BB.  Add qualified living
6355 	     pseudos for potential splitting.  */
6356 	  to_process = df_get_live_out (curr_bb);
6357 	  if (last_processed_bb != NULL)
6358 	    {
6359 	      /* We are somewhere in the middle of EBB.	 */
6360 	      get_live_on_other_edges (curr_bb, last_processed_bb,
6361 				       &temp_bitmap);
6362 	      to_process = &temp_bitmap;
6363 	    }
6364 	  last_processed_bb = curr_bb;
6365 	  last_insn = get_last_insertion_point (curr_bb);
6366 	  after_p = (! JUMP_P (last_insn)
6367 		     && (! CALL_P (last_insn)
6368 			 || (find_reg_note (last_insn,
6369 					   REG_NORETURN, NULL_RTX) == NULL_RTX
6370 			     && ! SIBLING_CALL_P (last_insn))));
6371 	  CLEAR_HARD_REG_SET (potential_reload_hard_regs);
6372 	  EXECUTE_IF_SET_IN_BITMAP (to_process, 0, j, bi)
6373 	    {
6374 	      if ((int) j >= lra_constraint_new_regno_start)
6375 		break;
6376 	      if (j < FIRST_PSEUDO_REGISTER || reg_renumber[j] >= 0)
6377 		{
6378 		  if (j < FIRST_PSEUDO_REGISTER)
6379 		    SET_HARD_REG_BIT (live_hard_regs, j);
6380 		  else
6381 		    add_to_hard_reg_set (&live_hard_regs,
6382 					 PSEUDO_REGNO_MODE (j),
6383 					 reg_renumber[j]);
6384 		  setup_next_usage_insn (j, last_insn, reloads_num, after_p);
6385 		}
6386 	    }
6387 	}
6388       src_regno = dst_regno = -1;
6389       curr_set = single_set (curr_insn);
6390       if (curr_set != NULL_RTX && REG_P (SET_DEST (curr_set)))
6391 	dst_regno = REGNO (SET_DEST (curr_set));
6392       if (curr_set != NULL_RTX && REG_P (SET_SRC (curr_set)))
6393 	src_regno = REGNO (SET_SRC (curr_set));
6394       update_reloads_num_p = true;
6395       if (src_regno < lra_constraint_new_regno_start
6396 	  && src_regno >= FIRST_PSEUDO_REGISTER
6397 	  && reg_renumber[src_regno] < 0
6398 	  && dst_regno >= lra_constraint_new_regno_start
6399 	  && (cl = lra_get_allocno_class (dst_regno)) != NO_REGS)
6400 	{
6401 	  /* 'reload_pseudo <- original_pseudo'.  */
6402 	  if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
6403 	    reloads_num++;
6404 	  update_reloads_num_p = false;
6405 	  succ_p = false;
6406 	  if (usage_insns[src_regno].check == curr_usage_insns_check
6407 	      && (next_usage_insns = usage_insns[src_regno].insns) != NULL_RTX)
6408 	    succ_p = inherit_reload_reg (false, src_regno, cl,
6409 					 curr_insn, next_usage_insns);
6410 	  if (succ_p)
6411 	    change_p = true;
6412 	  else
6413 	    setup_next_usage_insn (src_regno, curr_insn, reloads_num, false);
6414 	  if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
6415 	    potential_reload_hard_regs |= reg_class_contents[cl];
6416 	}
6417       else if (src_regno < 0
6418 	       && dst_regno >= lra_constraint_new_regno_start
6419 	       && invariant_p (SET_SRC (curr_set))
6420 	       && (cl = lra_get_allocno_class (dst_regno)) != NO_REGS
6421 	       && ! bitmap_bit_p (&invalid_invariant_regs, dst_regno)
6422 	       && ! bitmap_bit_p (&invalid_invariant_regs,
6423 				  ORIGINAL_REGNO(regno_reg_rtx[dst_regno])))
6424 	{
6425 	  /* 'reload_pseudo <- invariant'.  */
6426 	  if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
6427 	    reloads_num++;
6428 	  update_reloads_num_p = false;
6429 	  if (process_invariant_for_inheritance (SET_DEST (curr_set), SET_SRC (curr_set)))
6430 	    change_p = true;
6431 	  if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
6432 	    potential_reload_hard_regs |= reg_class_contents[cl];
6433 	}
6434       else if (src_regno >= lra_constraint_new_regno_start
6435 	       && dst_regno < lra_constraint_new_regno_start
6436 	       && dst_regno >= FIRST_PSEUDO_REGISTER
6437 	       && reg_renumber[dst_regno] < 0
6438 	       && (cl = lra_get_allocno_class (src_regno)) != NO_REGS
6439 	       && usage_insns[dst_regno].check == curr_usage_insns_check
6440 	       && (next_usage_insns
6441 		   = usage_insns[dst_regno].insns) != NULL_RTX)
6442 	{
6443 	  if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
6444 	    reloads_num++;
6445 	  update_reloads_num_p = false;
6446 	  /* 'original_pseudo <- reload_pseudo'.  */
6447 	  if (! JUMP_P (curr_insn)
6448 	      && inherit_reload_reg (true, dst_regno, cl,
6449 				     curr_insn, next_usage_insns))
6450 	    change_p = true;
6451 	  /* Invalidate.  */
6452 	  usage_insns[dst_regno].check = 0;
6453 	  if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
6454 	    potential_reload_hard_regs |= reg_class_contents[cl];
6455 	}
6456       else if (INSN_P (curr_insn))
6457 	{
6458 	  int iter;
6459 	  int max_uid = get_max_uid ();
6460 
6461 	  curr_id = lra_get_insn_recog_data (curr_insn);
6462 	  curr_static_id = curr_id->insn_static_data;
6463 	  to_inherit_num = 0;
6464 	  /* Process insn definitions.	*/
6465 	  for (iter = 0; iter < 2; iter++)
6466 	    for (reg = iter == 0 ? curr_id->regs : curr_static_id->hard_regs;
6467 		 reg != NULL;
6468 		 reg = reg->next)
6469 	      if (reg->type != OP_IN
6470 		  && (dst_regno = reg->regno) < lra_constraint_new_regno_start)
6471 		{
6472 		  if (dst_regno >= FIRST_PSEUDO_REGISTER && reg->type == OP_OUT
6473 		      && reg_renumber[dst_regno] < 0 && ! reg->subreg_p
6474 		      && usage_insns[dst_regno].check == curr_usage_insns_check
6475 		      && (next_usage_insns
6476 			  = usage_insns[dst_regno].insns) != NULL_RTX)
6477 		    {
6478 		      struct lra_insn_reg *r;
6479 
6480 		      for (r = curr_id->regs; r != NULL; r = r->next)
6481 			if (r->type != OP_OUT && r->regno == dst_regno)
6482 			  break;
6483 		      /* Don't do inheritance if the pseudo is also
6484 			 used in the insn.  */
6485 		      if (r == NULL)
6486 			/* We cannot do inheritance right now
6487 			   because the current insn reg info (chain
6488 			   regs) can change after that.  */
6489 			add_to_inherit (dst_regno, next_usage_insns);
6490 		    }
6491 		  /* We cannot process one reg twice here because of
6492 		     usage_insns invalidation.  */
6493 		  if ((dst_regno < FIRST_PSEUDO_REGISTER
6494 		       || reg_renumber[dst_regno] >= 0)
6495 		      && ! reg->subreg_p && reg->type != OP_IN)
6496 		    {
6497 		      HARD_REG_SET s;
6498 
6499 		      if (split_if_necessary (dst_regno, reg->biggest_mode,
6500 					      potential_reload_hard_regs,
6501 					      false, curr_insn, max_uid))
6502 			change_p = true;
6503 		      CLEAR_HARD_REG_SET (s);
6504 		      if (dst_regno < FIRST_PSEUDO_REGISTER)
6505 			add_to_hard_reg_set (&s, reg->biggest_mode, dst_regno);
6506 		      else
6507 			add_to_hard_reg_set (&s, PSEUDO_REGNO_MODE (dst_regno),
6508 					     reg_renumber[dst_regno]);
6509 		      live_hard_regs &= ~s;
6510 		      potential_reload_hard_regs &= ~s;
6511 		    }
6512 		  /* We should invalidate potential inheritance or
6513 		     splitting for the current insn usages to the next
6514 		     usage insns (see code below) as the output pseudo
6515 		     prevents this.  */
6516 		  if ((dst_regno >= FIRST_PSEUDO_REGISTER
6517 		       && reg_renumber[dst_regno] < 0)
6518 		      || (reg->type == OP_OUT && ! reg->subreg_p
6519 			  && (dst_regno < FIRST_PSEUDO_REGISTER
6520 			      || reg_renumber[dst_regno] >= 0)))
6521 		    {
6522 		      /* Invalidate and mark definitions.  */
6523 		      if (dst_regno >= FIRST_PSEUDO_REGISTER)
6524 			usage_insns[dst_regno].check = -(int) INSN_UID (curr_insn);
6525 		      else
6526 			{
6527 			  nregs = hard_regno_nregs (dst_regno,
6528 						    reg->biggest_mode);
6529 			  for (i = 0; i < nregs; i++)
6530 			    usage_insns[dst_regno + i].check
6531 			      = -(int) INSN_UID (curr_insn);
6532 			}
6533 		    }
6534 		}
6535 	  /* Process clobbered call regs.  */
6536 	  if (curr_id->arg_hard_regs != NULL)
6537 	    for (i = 0; (dst_regno = curr_id->arg_hard_regs[i]) >= 0; i++)
6538 	      if (dst_regno >= FIRST_PSEUDO_REGISTER)
6539 		usage_insns[dst_regno - FIRST_PSEUDO_REGISTER].check
6540 		  = -(int) INSN_UID (curr_insn);
6541 	  if (! JUMP_P (curr_insn))
6542 	    for (i = 0; i < to_inherit_num; i++)
6543 	      if (inherit_reload_reg (true, to_inherit[i].regno,
6544 				      ALL_REGS, curr_insn,
6545 				      to_inherit[i].insns))
6546 	      change_p = true;
6547 	  if (CALL_P (curr_insn))
6548 	    {
6549 	      rtx cheap, pat, dest;
6550 	      rtx_insn *restore;
6551 	      int regno, hard_regno;
6552 
6553 	      calls_num++;
6554 	      function_abi callee_abi = insn_callee_abi (curr_insn);
6555 	      last_call_for_abi[callee_abi.id ()] = calls_num;
6556 	      full_and_partial_call_clobbers
6557 		|= callee_abi.full_and_partial_reg_clobbers ();
6558 	      if ((cheap = find_reg_note (curr_insn,
6559 					  REG_RETURNED, NULL_RTX)) != NULL_RTX
6560 		  && ((cheap = XEXP (cheap, 0)), true)
6561 		  && (regno = REGNO (cheap)) >= FIRST_PSEUDO_REGISTER
6562 		  && (hard_regno = reg_renumber[regno]) >= 0
6563 		  && usage_insns[regno].check == curr_usage_insns_check
6564 		  /* If there are pending saves/restores, the
6565 		     optimization is not worth.	 */
6566 		  && usage_insns[regno].calls_num == calls_num - 1
6567 		  && callee_abi.clobbers_reg_p (GET_MODE (cheap), hard_regno))
6568 		{
6569 		  /* Restore the pseudo from the call result as
6570 		     REG_RETURNED note says that the pseudo value is
6571 		     in the call result and the pseudo is an argument
6572 		     of the call.  */
6573 		  pat = PATTERN (curr_insn);
6574 		  if (GET_CODE (pat) == PARALLEL)
6575 		    pat = XVECEXP (pat, 0, 0);
6576 		  dest = SET_DEST (pat);
6577 		  /* For multiple return values dest is PARALLEL.
6578 		     Currently we handle only single return value case.  */
6579 		  if (REG_P (dest))
6580 		    {
6581 		      start_sequence ();
6582 		      emit_move_insn (cheap, copy_rtx (dest));
6583 		      restore = get_insns ();
6584 		      end_sequence ();
6585 		      lra_process_new_insns (curr_insn, NULL, restore,
6586 					     "Inserting call parameter restore");
6587 		      /* We don't need to save/restore of the pseudo from
6588 			 this call.	 */
6589 		      usage_insns[regno].calls_num = calls_num;
6590 		      remove_from_hard_reg_set
6591 			(&full_and_partial_call_clobbers,
6592 			 GET_MODE (cheap), hard_regno);
6593 		      bitmap_set_bit (&check_only_regs, regno);
6594 		    }
6595 		}
6596 	    }
6597 	  to_inherit_num = 0;
6598 	  /* Process insn usages.  */
6599 	  for (iter = 0; iter < 2; iter++)
6600 	    for (reg = iter == 0 ? curr_id->regs : curr_static_id->hard_regs;
6601 		 reg != NULL;
6602 		 reg = reg->next)
6603 	      if ((reg->type != OP_OUT
6604 		   || (reg->type == OP_OUT && reg->subreg_p))
6605 		  && (src_regno = reg->regno) < lra_constraint_new_regno_start)
6606 		{
6607 		  if (src_regno >= FIRST_PSEUDO_REGISTER
6608 		      && reg_renumber[src_regno] < 0 && reg->type == OP_IN)
6609 		    {
6610 		      if (usage_insns[src_regno].check == curr_usage_insns_check
6611 			  && (next_usage_insns
6612 			      = usage_insns[src_regno].insns) != NULL_RTX
6613 			  && NONDEBUG_INSN_P (curr_insn))
6614 			add_to_inherit (src_regno, next_usage_insns);
6615 		      else if (usage_insns[src_regno].check
6616 			       != -(int) INSN_UID (curr_insn))
6617 			/* Add usages but only if the reg is not set up
6618 			   in the same insn.  */
6619 			add_next_usage_insn (src_regno, curr_insn, reloads_num);
6620 		    }
6621 		  else if (src_regno < FIRST_PSEUDO_REGISTER
6622 			   || reg_renumber[src_regno] >= 0)
6623 		    {
6624 		      bool before_p;
6625 		      rtx_insn *use_insn = curr_insn;
6626 
6627 		      before_p = (JUMP_P (curr_insn)
6628 				  || (CALL_P (curr_insn) && reg->type == OP_IN));
6629 		      if (NONDEBUG_INSN_P (curr_insn)
6630 			  && (! JUMP_P (curr_insn) || reg->type == OP_IN)
6631 			  && split_if_necessary (src_regno, reg->biggest_mode,
6632 						 potential_reload_hard_regs,
6633 						 before_p, curr_insn, max_uid))
6634 			{
6635 			  if (reg->subreg_p)
6636 			    check_and_force_assignment_correctness_p = true;
6637 			  change_p = true;
6638 			  /* Invalidate. */
6639 			  usage_insns[src_regno].check = 0;
6640 			  if (before_p)
6641 			    use_insn = PREV_INSN (curr_insn);
6642 			}
6643 		      if (NONDEBUG_INSN_P (curr_insn))
6644 			{
6645 			  if (src_regno < FIRST_PSEUDO_REGISTER)
6646 			    add_to_hard_reg_set (&live_hard_regs,
6647 						 reg->biggest_mode, src_regno);
6648 			  else
6649 			    add_to_hard_reg_set (&live_hard_regs,
6650 						 PSEUDO_REGNO_MODE (src_regno),
6651 						 reg_renumber[src_regno]);
6652 			}
6653 		      if (src_regno >= FIRST_PSEUDO_REGISTER)
6654 			add_next_usage_insn (src_regno, use_insn, reloads_num);
6655 		      else
6656 			{
6657 			  for (i = 0; i < hard_regno_nregs (src_regno, reg->biggest_mode); i++)
6658 			    add_next_usage_insn (src_regno + i, use_insn, reloads_num);
6659 			}
6660 		    }
6661 		}
6662 	  /* Process used call regs.  */
6663 	  if (curr_id->arg_hard_regs != NULL)
6664 	    for (i = 0; (src_regno = curr_id->arg_hard_regs[i]) >= 0; i++)
6665 	      if (src_regno < FIRST_PSEUDO_REGISTER)
6666 		{
6667 	           SET_HARD_REG_BIT (live_hard_regs, src_regno);
6668 	           add_next_usage_insn (src_regno, curr_insn, reloads_num);
6669 		}
6670 	  for (i = 0; i < to_inherit_num; i++)
6671 	    {
6672 	      src_regno = to_inherit[i].regno;
6673 	      if (inherit_reload_reg (false, src_regno, ALL_REGS,
6674 				      curr_insn, to_inherit[i].insns))
6675 		change_p = true;
6676 	      else
6677 		setup_next_usage_insn (src_regno, curr_insn, reloads_num, false);
6678 	    }
6679 	}
6680       if (update_reloads_num_p
6681 	  && NONDEBUG_INSN_P (curr_insn) && curr_set != NULL_RTX)
6682 	{
6683 	  int regno = -1;
6684 	  if ((REG_P (SET_DEST (curr_set))
6685 	       && (regno = REGNO (SET_DEST (curr_set))) >= lra_constraint_new_regno_start
6686 	       && reg_renumber[regno] < 0
6687 	       && (cl = lra_get_allocno_class (regno)) != NO_REGS)
6688 	      || (REG_P (SET_SRC (curr_set))
6689 	          && (regno = REGNO (SET_SRC (curr_set))) >= lra_constraint_new_regno_start
6690 	          && reg_renumber[regno] < 0
6691 	          && (cl = lra_get_allocno_class (regno)) != NO_REGS))
6692 	    {
6693 	      if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
6694 		reloads_num++;
6695 	      if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
6696 		potential_reload_hard_regs |= reg_class_contents[cl];
6697 	    }
6698 	}
6699       if (NONDEBUG_INSN_P (curr_insn))
6700 	{
6701 	  int regno;
6702 
6703 	  /* Invalidate invariants with changed regs.  */
6704 	  curr_id = lra_get_insn_recog_data (curr_insn);
6705 	  for (reg = curr_id->regs; reg != NULL; reg = reg->next)
6706 	    if (reg->type != OP_IN)
6707 	      {
6708 		bitmap_set_bit (&invalid_invariant_regs, reg->regno);
6709 		bitmap_set_bit (&invalid_invariant_regs,
6710 				ORIGINAL_REGNO (regno_reg_rtx[reg->regno]));
6711 	      }
6712 	  curr_static_id = curr_id->insn_static_data;
6713 	  for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
6714 	    if (reg->type != OP_IN)
6715 	      bitmap_set_bit (&invalid_invariant_regs, reg->regno);
6716 	  if (curr_id->arg_hard_regs != NULL)
6717 	    for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
6718 	      if (regno >= FIRST_PSEUDO_REGISTER)
6719 		bitmap_set_bit (&invalid_invariant_regs,
6720 				regno - FIRST_PSEUDO_REGISTER);
6721 	}
6722       /* We reached the start of the current basic block.  */
6723       if (prev_insn == NULL_RTX || prev_insn == PREV_INSN (head)
6724 	  || BLOCK_FOR_INSN (prev_insn) != curr_bb)
6725 	{
6726 	  /* We reached the beginning of the current block -- do
6727 	     rest of spliting in the current BB.  */
6728 	  to_process = df_get_live_in (curr_bb);
6729 	  if (BLOCK_FOR_INSN (head) != curr_bb)
6730 	    {
6731 	      /* We are somewhere in the middle of EBB.	 */
6732 	      get_live_on_other_edges (EDGE_PRED (curr_bb, 0)->src,
6733 				       curr_bb, &temp_bitmap);
6734 	      to_process = &temp_bitmap;
6735 	    }
6736 	  head_p = true;
6737 	  EXECUTE_IF_SET_IN_BITMAP (to_process, 0, j, bi)
6738 	    {
6739 	      if ((int) j >= lra_constraint_new_regno_start)
6740 		break;
6741 	      if (((int) j < FIRST_PSEUDO_REGISTER || reg_renumber[j] >= 0)
6742 		  && usage_insns[j].check == curr_usage_insns_check
6743 		  && (next_usage_insns = usage_insns[j].insns) != NULL_RTX)
6744 		{
6745 		  if (need_for_split_p (potential_reload_hard_regs, j))
6746 		    {
6747 		      if (lra_dump_file != NULL && head_p)
6748 			{
6749 			  fprintf (lra_dump_file,
6750 				   "  ----------------------------------\n");
6751 			  head_p = false;
6752 			}
6753 		      if (split_reg (false, j, bb_note (curr_bb),
6754 				     next_usage_insns, NULL))
6755 			change_p = true;
6756 		    }
6757 		  usage_insns[j].check = 0;
6758 		}
6759 	    }
6760 	}
6761     }
6762   return change_p;
6763 }
6764 
6765 /* This value affects EBB forming.  If probability of edge from EBB to
6766    a BB is not greater than the following value, we don't add the BB
6767    to EBB.  */
6768 #define EBB_PROBABILITY_CUTOFF \
6769   ((REG_BR_PROB_BASE * param_lra_inheritance_ebb_probability_cutoff) / 100)
6770 
6771 /* Current number of inheritance/split iteration.  */
6772 int lra_inheritance_iter;
6773 
6774 /* Entry function for inheritance/split pass.  */
6775 void
lra_inheritance(void)6776 lra_inheritance (void)
6777 {
6778   int i;
6779   basic_block bb, start_bb;
6780   edge e;
6781 
6782   lra_inheritance_iter++;
6783   if (lra_inheritance_iter > LRA_MAX_INHERITANCE_PASSES)
6784     return;
6785   timevar_push (TV_LRA_INHERITANCE);
6786   if (lra_dump_file != NULL)
6787     fprintf (lra_dump_file, "\n********** Inheritance #%d: **********\n\n",
6788 	     lra_inheritance_iter);
6789   curr_usage_insns_check = 0;
6790   usage_insns = XNEWVEC (struct usage_insns, lra_constraint_new_regno_start);
6791   for (i = 0; i < lra_constraint_new_regno_start; i++)
6792     usage_insns[i].check = 0;
6793   bitmap_initialize (&check_only_regs, &reg_obstack);
6794   bitmap_initialize (&invalid_invariant_regs, &reg_obstack);
6795   bitmap_initialize (&live_regs, &reg_obstack);
6796   bitmap_initialize (&temp_bitmap, &reg_obstack);
6797   bitmap_initialize (&ebb_global_regs, &reg_obstack);
6798   FOR_EACH_BB_FN (bb, cfun)
6799     {
6800       start_bb = bb;
6801       if (lra_dump_file != NULL)
6802 	fprintf (lra_dump_file, "EBB");
6803       /* Form a EBB starting with BB.  */
6804       bitmap_clear (&ebb_global_regs);
6805       bitmap_ior_into (&ebb_global_regs, df_get_live_in (bb));
6806       for (;;)
6807 	{
6808 	  if (lra_dump_file != NULL)
6809 	    fprintf (lra_dump_file, " %d", bb->index);
6810 	  if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
6811 	      || LABEL_P (BB_HEAD (bb->next_bb)))
6812 	    break;
6813 	  e = find_fallthru_edge (bb->succs);
6814 	  if (! e)
6815 	    break;
6816 	  if (e->probability.initialized_p ()
6817 	      && e->probability.to_reg_br_prob_base () < EBB_PROBABILITY_CUTOFF)
6818 	    break;
6819 	  bb = bb->next_bb;
6820 	}
6821       bitmap_ior_into (&ebb_global_regs, df_get_live_out (bb));
6822       if (lra_dump_file != NULL)
6823 	fprintf (lra_dump_file, "\n");
6824       if (inherit_in_ebb (BB_HEAD (start_bb), BB_END (bb)))
6825 	/* Remember that the EBB head and tail can change in
6826 	   inherit_in_ebb.  */
6827 	update_ebb_live_info (BB_HEAD (start_bb), BB_END (bb));
6828     }
6829   bitmap_release (&ebb_global_regs);
6830   bitmap_release (&temp_bitmap);
6831   bitmap_release (&live_regs);
6832   bitmap_release (&invalid_invariant_regs);
6833   bitmap_release (&check_only_regs);
6834   free (usage_insns);
6835 
6836   timevar_pop (TV_LRA_INHERITANCE);
6837 }
6838 
6839 
6840 
6841 /* This page contains code to undo failed inheritance/split
6842    transformations.  */
6843 
6844 /* Current number of iteration undoing inheritance/split.  */
6845 int lra_undo_inheritance_iter;
6846 
6847 /* Fix BB live info LIVE after removing pseudos created on pass doing
6848    inheritance/split which are REMOVED_PSEUDOS.	 */
6849 static void
fix_bb_live_info(bitmap live,bitmap removed_pseudos)6850 fix_bb_live_info (bitmap live, bitmap removed_pseudos)
6851 {
6852   unsigned int regno;
6853   bitmap_iterator bi;
6854 
6855   EXECUTE_IF_SET_IN_BITMAP (removed_pseudos, 0, regno, bi)
6856     if (bitmap_clear_bit (live, regno)
6857 	&& REG_P (lra_reg_info[regno].restore_rtx))
6858       bitmap_set_bit (live, REGNO (lra_reg_info[regno].restore_rtx));
6859 }
6860 
6861 /* Return regno of the (subreg of) REG. Otherwise, return a negative
6862    number.  */
6863 static int
get_regno(rtx reg)6864 get_regno (rtx reg)
6865 {
6866   if (GET_CODE (reg) == SUBREG)
6867     reg = SUBREG_REG (reg);
6868   if (REG_P (reg))
6869     return REGNO (reg);
6870   return -1;
6871 }
6872 
6873 /* Delete a move INSN with destination reg DREGNO and a previous
6874    clobber insn with the same regno.  The inheritance/split code can
6875    generate moves with preceding clobber and when we delete such moves
6876    we should delete the clobber insn too to keep the correct life
6877    info.  */
6878 static void
delete_move_and_clobber(rtx_insn * insn,int dregno)6879 delete_move_and_clobber (rtx_insn *insn, int dregno)
6880 {
6881   rtx_insn *prev_insn = PREV_INSN (insn);
6882 
6883   lra_set_insn_deleted (insn);
6884   lra_assert (dregno >= 0);
6885   if (prev_insn != NULL && NONDEBUG_INSN_P (prev_insn)
6886       && GET_CODE (PATTERN (prev_insn)) == CLOBBER
6887       && dregno == get_regno (XEXP (PATTERN (prev_insn), 0)))
6888     lra_set_insn_deleted (prev_insn);
6889 }
6890 
6891 /* Remove inheritance/split pseudos which are in REMOVE_PSEUDOS and
6892    return true if we did any change.  The undo transformations for
6893    inheritance looks like
6894       i <- i2
6895       p <- i	  =>   p <- i2
6896    or removing
6897       p <- i, i <- p, and i <- i3
6898    where p is original pseudo from which inheritance pseudo i was
6899    created, i and i3 are removed inheritance pseudos, i2 is another
6900    not removed inheritance pseudo.  All split pseudos or other
6901    occurrences of removed inheritance pseudos are changed on the
6902    corresponding original pseudos.
6903 
6904    The function also schedules insns changed and created during
6905    inheritance/split pass for processing by the subsequent constraint
6906    pass.  */
6907 static bool
remove_inheritance_pseudos(bitmap remove_pseudos)6908 remove_inheritance_pseudos (bitmap remove_pseudos)
6909 {
6910   basic_block bb;
6911   int regno, sregno, prev_sregno, dregno;
6912   rtx restore_rtx;
6913   rtx set, prev_set;
6914   rtx_insn *prev_insn;
6915   bool change_p, done_p;
6916 
6917   change_p = ! bitmap_empty_p (remove_pseudos);
6918   /* We cannot finish the function right away if CHANGE_P is true
6919      because we need to marks insns affected by previous
6920      inheritance/split pass for processing by the subsequent
6921      constraint pass.  */
6922   FOR_EACH_BB_FN (bb, cfun)
6923     {
6924       fix_bb_live_info (df_get_live_in (bb), remove_pseudos);
6925       fix_bb_live_info (df_get_live_out (bb), remove_pseudos);
6926       FOR_BB_INSNS_REVERSE (bb, curr_insn)
6927 	{
6928 	  if (! INSN_P (curr_insn))
6929 	    continue;
6930 	  done_p = false;
6931 	  sregno = dregno = -1;
6932 	  if (change_p && NONDEBUG_INSN_P (curr_insn)
6933 	      && (set = single_set (curr_insn)) != NULL_RTX)
6934 	    {
6935 	      dregno = get_regno (SET_DEST (set));
6936 	      sregno = get_regno (SET_SRC (set));
6937 	    }
6938 
6939 	  if (sregno >= 0 && dregno >= 0)
6940 	    {
6941 	      if (bitmap_bit_p (remove_pseudos, dregno)
6942 		  && ! REG_P (lra_reg_info[dregno].restore_rtx))
6943 		{
6944 		  /* invariant inheritance pseudo <- original pseudo */
6945 		  if (lra_dump_file != NULL)
6946 		    {
6947 		      fprintf (lra_dump_file, "	   Removing invariant inheritance:\n");
6948 		      dump_insn_slim (lra_dump_file, curr_insn);
6949 		      fprintf (lra_dump_file, "\n");
6950 		    }
6951 		  delete_move_and_clobber (curr_insn, dregno);
6952 		  done_p = true;
6953 		}
6954 	      else if (bitmap_bit_p (remove_pseudos, sregno)
6955 		       && ! REG_P (lra_reg_info[sregno].restore_rtx))
6956 		{
6957 		  /* reload pseudo <- invariant inheritance pseudo */
6958 		  start_sequence ();
6959 		  /* We cannot just change the source.  It might be
6960 		     an insn different from the move.  */
6961 		  emit_insn (lra_reg_info[sregno].restore_rtx);
6962 		  rtx_insn *new_insns = get_insns ();
6963 		  end_sequence ();
6964 		  lra_assert (single_set (new_insns) != NULL
6965 			      && SET_DEST (set) == SET_DEST (single_set (new_insns)));
6966 		  lra_process_new_insns (curr_insn, NULL, new_insns,
6967 					 "Changing reload<-invariant inheritance");
6968 		  delete_move_and_clobber (curr_insn, dregno);
6969 		  done_p = true;
6970 		}
6971 	      else if ((bitmap_bit_p (remove_pseudos, sregno)
6972 			&& (get_regno (lra_reg_info[sregno].restore_rtx) == dregno
6973 			    || (bitmap_bit_p (remove_pseudos, dregno)
6974 				&& get_regno (lra_reg_info[sregno].restore_rtx) >= 0
6975 				&& (get_regno (lra_reg_info[sregno].restore_rtx)
6976 				    == get_regno (lra_reg_info[dregno].restore_rtx)))))
6977 		       || (bitmap_bit_p (remove_pseudos, dregno)
6978 			   && get_regno (lra_reg_info[dregno].restore_rtx) == sregno))
6979 		/* One of the following cases:
6980 		     original <- removed inheritance pseudo
6981 		     removed inherit pseudo <- another removed inherit pseudo
6982 		     removed inherit pseudo <- original pseudo
6983 		   Or
6984 		     removed_split_pseudo <- original_reg
6985 		     original_reg <- removed_split_pseudo */
6986 		{
6987 		  if (lra_dump_file != NULL)
6988 		    {
6989 		      fprintf (lra_dump_file, "	   Removing %s:\n",
6990 			       bitmap_bit_p (&lra_split_regs, sregno)
6991 			       || bitmap_bit_p (&lra_split_regs, dregno)
6992 			       ? "split" : "inheritance");
6993 		      dump_insn_slim (lra_dump_file, curr_insn);
6994 		    }
6995 		  delete_move_and_clobber (curr_insn, dregno);
6996 		  done_p = true;
6997 		}
6998 	      else if (bitmap_bit_p (remove_pseudos, sregno)
6999 		       && bitmap_bit_p (&lra_inheritance_pseudos, sregno))
7000 		{
7001 		  /* Search the following pattern:
7002 		       inherit_or_split_pseudo1 <- inherit_or_split_pseudo2
7003 		       original_pseudo <- inherit_or_split_pseudo1
7004 		    where the 2nd insn is the current insn and
7005 		    inherit_or_split_pseudo2 is not removed.  If it is found,
7006 		    change the current insn onto:
7007 		       original_pseudo <- inherit_or_split_pseudo2.  */
7008 		  for (prev_insn = PREV_INSN (curr_insn);
7009 		       prev_insn != NULL_RTX && ! NONDEBUG_INSN_P (prev_insn);
7010 		       prev_insn = PREV_INSN (prev_insn))
7011 		    ;
7012 		  if (prev_insn != NULL_RTX && BLOCK_FOR_INSN (prev_insn) == bb
7013 		      && (prev_set = single_set (prev_insn)) != NULL_RTX
7014 		      /* There should be no subregs in insn we are
7015 			 searching because only the original reg might
7016 			 be in subreg when we changed the mode of
7017 			 load/store for splitting.  */
7018 		      && REG_P (SET_DEST (prev_set))
7019 		      && REG_P (SET_SRC (prev_set))
7020 		      && (int) REGNO (SET_DEST (prev_set)) == sregno
7021 		      && ((prev_sregno = REGNO (SET_SRC (prev_set)))
7022 			  >= FIRST_PSEUDO_REGISTER)
7023 		      && (lra_reg_info[prev_sregno].restore_rtx == NULL_RTX
7024 			  ||
7025 			  /* As we consider chain of inheritance or
7026 			     splitting described in above comment we should
7027 			     check that sregno and prev_sregno were
7028 			     inheritance/split pseudos created from the
7029 			     same original regno.  */
7030 			  (get_regno (lra_reg_info[sregno].restore_rtx) >= 0
7031 			   && (get_regno (lra_reg_info[sregno].restore_rtx)
7032 			       == get_regno (lra_reg_info[prev_sregno].restore_rtx))))
7033 		      && ! bitmap_bit_p (remove_pseudos, prev_sregno))
7034 		    {
7035 		      lra_assert (GET_MODE (SET_SRC (prev_set))
7036 				  == GET_MODE (regno_reg_rtx[sregno]));
7037 		      /* Although we have a single set, the insn can
7038 			 contain more one sregno register occurrence
7039 			 as a source.  Change all occurrences.  */
7040 		      lra_substitute_pseudo_within_insn (curr_insn, sregno,
7041 							 SET_SRC (prev_set),
7042 							 false);
7043 		      /* As we are finishing with processing the insn
7044 			 here, check the destination too as it might
7045 			 inheritance pseudo for another pseudo.  */
7046 		      if (bitmap_bit_p (remove_pseudos, dregno)
7047 			  && bitmap_bit_p (&lra_inheritance_pseudos, dregno)
7048 			  && (restore_rtx
7049 			      = lra_reg_info[dregno].restore_rtx) != NULL_RTX)
7050 			{
7051 			  if (GET_CODE (SET_DEST (set)) == SUBREG)
7052 			    SUBREG_REG (SET_DEST (set)) = restore_rtx;
7053 			  else
7054 			    SET_DEST (set) = restore_rtx;
7055 			}
7056 		      lra_push_insn_and_update_insn_regno_info (curr_insn);
7057 		      lra_set_used_insn_alternative_by_uid
7058 			(INSN_UID (curr_insn), LRA_UNKNOWN_ALT);
7059 		      done_p = true;
7060 		      if (lra_dump_file != NULL)
7061 			{
7062 			  fprintf (lra_dump_file, "    Change reload insn:\n");
7063 			  dump_insn_slim (lra_dump_file, curr_insn);
7064 			}
7065 		    }
7066 		}
7067 	    }
7068 	  if (! done_p)
7069 	    {
7070 	      struct lra_insn_reg *reg;
7071 	      bool restored_regs_p = false;
7072 	      bool kept_regs_p = false;
7073 
7074 	      curr_id = lra_get_insn_recog_data (curr_insn);
7075 	      for (reg = curr_id->regs; reg != NULL; reg = reg->next)
7076 		{
7077 		  regno = reg->regno;
7078 		  restore_rtx = lra_reg_info[regno].restore_rtx;
7079 		  if (restore_rtx != NULL_RTX)
7080 		    {
7081 		      if (change_p && bitmap_bit_p (remove_pseudos, regno))
7082 			{
7083 			  lra_substitute_pseudo_within_insn
7084 			    (curr_insn, regno, restore_rtx, false);
7085 			  restored_regs_p = true;
7086 			}
7087 		      else
7088 			kept_regs_p = true;
7089 		    }
7090 		}
7091 	      if (NONDEBUG_INSN_P (curr_insn) && kept_regs_p)
7092 		{
7093 		  /* The instruction has changed since the previous
7094 		     constraints pass.  */
7095 		  lra_push_insn_and_update_insn_regno_info (curr_insn);
7096 		  lra_set_used_insn_alternative_by_uid
7097 		    (INSN_UID (curr_insn), LRA_UNKNOWN_ALT);
7098 		}
7099 	      else if (restored_regs_p)
7100 		/* The instruction has been restored to the form that
7101 		   it had during the previous constraints pass.  */
7102 		lra_update_insn_regno_info (curr_insn);
7103 	      if (restored_regs_p && lra_dump_file != NULL)
7104 		{
7105 		  fprintf (lra_dump_file, "   Insn after restoring regs:\n");
7106 		  dump_insn_slim (lra_dump_file, curr_insn);
7107 		}
7108 	    }
7109 	}
7110     }
7111   return change_p;
7112 }
7113 
7114 /* If optional reload pseudos failed to get a hard register or was not
7115    inherited, it is better to remove optional reloads.  We do this
7116    transformation after undoing inheritance to figure out necessity to
7117    remove optional reloads easier.  Return true if we do any
7118    change.  */
7119 static bool
undo_optional_reloads(void)7120 undo_optional_reloads (void)
7121 {
7122   bool change_p, keep_p;
7123   unsigned int regno, uid;
7124   bitmap_iterator bi, bi2;
7125   rtx_insn *insn;
7126   rtx set, src, dest;
7127   auto_bitmap removed_optional_reload_pseudos (&reg_obstack);
7128 
7129   bitmap_copy (removed_optional_reload_pseudos, &lra_optional_reload_pseudos);
7130   EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
7131     {
7132       keep_p = false;
7133       /* Keep optional reloads from previous subpasses.  */
7134       if (lra_reg_info[regno].restore_rtx == NULL_RTX
7135 	  /* If the original pseudo changed its allocation, just
7136 	     removing the optional pseudo is dangerous as the original
7137 	     pseudo will have longer live range.  */
7138 	  || reg_renumber[REGNO (lra_reg_info[regno].restore_rtx)] >= 0)
7139 	keep_p = true;
7140       else if (reg_renumber[regno] >= 0)
7141 	EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi2)
7142 	  {
7143 	    insn = lra_insn_recog_data[uid]->insn;
7144 	    if ((set = single_set (insn)) == NULL_RTX)
7145 	      continue;
7146 	    src = SET_SRC (set);
7147 	    dest = SET_DEST (set);
7148 	    if (! REG_P (src) || ! REG_P (dest))
7149 	      continue;
7150 	    if (REGNO (dest) == regno
7151 		/* Ignore insn for optional reloads itself.  */
7152 		&& REGNO (lra_reg_info[regno].restore_rtx) != REGNO (src)
7153 		/* Check only inheritance on last inheritance pass.  */
7154 		&& (int) REGNO (src) >= new_regno_start
7155 		/* Check that the optional reload was inherited.  */
7156 		&& bitmap_bit_p (&lra_inheritance_pseudos, REGNO (src)))
7157 	      {
7158 		keep_p = true;
7159 		break;
7160 	      }
7161 	  }
7162       if (keep_p)
7163 	{
7164 	  bitmap_clear_bit (removed_optional_reload_pseudos, regno);
7165 	  if (lra_dump_file != NULL)
7166 	    fprintf (lra_dump_file, "Keep optional reload reg %d\n", regno);
7167 	}
7168     }
7169   change_p = ! bitmap_empty_p (removed_optional_reload_pseudos);
7170   auto_bitmap insn_bitmap (&reg_obstack);
7171   EXECUTE_IF_SET_IN_BITMAP (removed_optional_reload_pseudos, 0, regno, bi)
7172     {
7173       if (lra_dump_file != NULL)
7174 	fprintf (lra_dump_file, "Remove optional reload reg %d\n", regno);
7175       bitmap_copy (insn_bitmap, &lra_reg_info[regno].insn_bitmap);
7176       EXECUTE_IF_SET_IN_BITMAP (insn_bitmap, 0, uid, bi2)
7177 	{
7178 	  insn = lra_insn_recog_data[uid]->insn;
7179 	  if ((set = single_set (insn)) != NULL_RTX)
7180 	    {
7181 	      src = SET_SRC (set);
7182 	      dest = SET_DEST (set);
7183 	      if (REG_P (src) && REG_P (dest)
7184 		  && ((REGNO (src) == regno
7185 		       && (REGNO (lra_reg_info[regno].restore_rtx)
7186 			   == REGNO (dest)))
7187 		      || (REGNO (dest) == regno
7188 			  && (REGNO (lra_reg_info[regno].restore_rtx)
7189 			      == REGNO (src)))))
7190 		{
7191 		  if (lra_dump_file != NULL)
7192 		    {
7193 		      fprintf (lra_dump_file, "  Deleting move %u\n",
7194 			       INSN_UID (insn));
7195 		      dump_insn_slim (lra_dump_file, insn);
7196 		    }
7197 		  delete_move_and_clobber (insn, REGNO (dest));
7198 		  continue;
7199 		}
7200 	      /* We should not worry about generation memory-memory
7201 		 moves here as if the corresponding inheritance did
7202 		 not work (inheritance pseudo did not get a hard reg),
7203 		 we remove the inheritance pseudo and the optional
7204 		 reload.  */
7205 	    }
7206 	  lra_substitute_pseudo_within_insn
7207 	    (insn, regno, lra_reg_info[regno].restore_rtx, false);
7208 	  lra_update_insn_regno_info (insn);
7209 	  if (lra_dump_file != NULL)
7210 	    {
7211 	      fprintf (lra_dump_file,
7212 		       "  Restoring original insn:\n");
7213 	      dump_insn_slim (lra_dump_file, insn);
7214 	    }
7215 	}
7216     }
7217   /* Clear restore_regnos.  */
7218   EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
7219     lra_reg_info[regno].restore_rtx = NULL_RTX;
7220   return change_p;
7221 }
7222 
7223 /* Entry function for undoing inheritance/split transformation.	 Return true
7224    if we did any RTL change in this pass.  */
7225 bool
lra_undo_inheritance(void)7226 lra_undo_inheritance (void)
7227 {
7228   unsigned int regno;
7229   int hard_regno;
7230   int n_all_inherit, n_inherit, n_all_split, n_split;
7231   rtx restore_rtx;
7232   bitmap_iterator bi;
7233   bool change_p;
7234 
7235   lra_undo_inheritance_iter++;
7236   if (lra_undo_inheritance_iter > LRA_MAX_INHERITANCE_PASSES)
7237     return false;
7238   if (lra_dump_file != NULL)
7239     fprintf (lra_dump_file,
7240 	     "\n********** Undoing inheritance #%d: **********\n\n",
7241 	     lra_undo_inheritance_iter);
7242   auto_bitmap remove_pseudos (&reg_obstack);
7243   n_inherit = n_all_inherit = 0;
7244   EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
7245     if (lra_reg_info[regno].restore_rtx != NULL_RTX)
7246       {
7247 	n_all_inherit++;
7248 	if (reg_renumber[regno] < 0
7249 	    /* If the original pseudo changed its allocation, just
7250 	       removing inheritance is dangerous as for changing
7251 	       allocation we used shorter live-ranges.  */
7252 	    && (! REG_P (lra_reg_info[regno].restore_rtx)
7253 		|| reg_renumber[REGNO (lra_reg_info[regno].restore_rtx)] < 0))
7254 	  bitmap_set_bit (remove_pseudos, regno);
7255 	else
7256 	  n_inherit++;
7257       }
7258   if (lra_dump_file != NULL && n_all_inherit != 0)
7259     fprintf (lra_dump_file, "Inherit %d out of %d (%.2f%%)\n",
7260 	     n_inherit, n_all_inherit,
7261 	     (double) n_inherit / n_all_inherit * 100);
7262   n_split = n_all_split = 0;
7263   EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, regno, bi)
7264     if ((restore_rtx = lra_reg_info[regno].restore_rtx) != NULL_RTX)
7265       {
7266 	int restore_regno = REGNO (restore_rtx);
7267 
7268 	n_all_split++;
7269 	hard_regno = (restore_regno >= FIRST_PSEUDO_REGISTER
7270 		      ? reg_renumber[restore_regno] : restore_regno);
7271 	if (hard_regno < 0 || reg_renumber[regno] == hard_regno)
7272 	  bitmap_set_bit (remove_pseudos, regno);
7273 	else
7274 	  {
7275 	    n_split++;
7276 	    if (lra_dump_file != NULL)
7277 	      fprintf (lra_dump_file, "	     Keep split r%d (orig=r%d)\n",
7278 		       regno, restore_regno);
7279 	  }
7280       }
7281   if (lra_dump_file != NULL && n_all_split != 0)
7282     fprintf (lra_dump_file, "Split %d out of %d (%.2f%%)\n",
7283 	     n_split, n_all_split,
7284 	     (double) n_split / n_all_split * 100);
7285   change_p = remove_inheritance_pseudos (remove_pseudos);
7286   /* Clear restore_regnos.  */
7287   EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
7288     lra_reg_info[regno].restore_rtx = NULL_RTX;
7289   EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, regno, bi)
7290     lra_reg_info[regno].restore_rtx = NULL_RTX;
7291   change_p = undo_optional_reloads () || change_p;
7292   return change_p;
7293 }
7294