xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/lra.c (revision cef8759bd76c1b621f8eab8faa6f208faabc2e15)
1 /* LRA (local register allocator) driver and LRA utilities.
2    Copyright (C) 2010-2017 Free Software Foundation, Inc.
3    Contributed by Vladimir Makarov <vmakarov@redhat.com>.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.	If not see
19 <http://www.gnu.org/licenses/>.	 */
20 
21 
22 /* The Local Register Allocator (LRA) is a replacement of former
23    reload pass.	 It is focused to simplify code solving the reload
24    pass tasks, to make the code maintenance easier, and to implement new
25    perspective optimizations.
26 
27    The major LRA design solutions are:
28      o division small manageable, separated sub-tasks
29      o reflection of all transformations and decisions in RTL as more
30        as possible
31      o insn constraints as a primary source of the info (minimizing
32        number of target-depended macros/hooks)
33 
34    In brief LRA works by iterative insn process with the final goal is
35    to satisfy all insn and address constraints:
36      o New reload insns (in brief reloads) and reload pseudos might be
37        generated;
38      o Some pseudos might be spilled to assign hard registers to
39        new reload pseudos;
40      o Recalculating spilled pseudo values (rematerialization);
41      o Changing spilled pseudos to stack memory or their equivalences;
42      o Allocation stack memory changes the address displacement and
43        new iteration is needed.
44 
45    Here is block diagram of LRA passes:
46 
47                                 ------------------------
48            ---------------     | Undo inheritance for   |     ---------------
49           | Memory-memory |    | spilled pseudos,       |    | New (and old) |
50           | move coalesce |<---| splits for pseudos got |<-- |   pseudos     |
51            ---------------     | the same hard regs,    |    |  assignment   |
52   Start           |            | and optional reloads   |     ---------------
53     |             |             ------------------------            ^
54     V             |              ----------------                   |
55  -----------      V             | Update virtual |                  |
56 |  Remove   |----> ------------>|    register    |                  |
57 | scratches |     ^             |  displacements |                  |
58  -----------      |              ----------------                   |
59                   |                      |                          |
60                   |                      V         New              |
61                   |                 ------------  pseudos   -------------------
62                   |                |Constraints:| or insns | Inheritance/split |
63                   |                |    RTL     |--------->|  transformations  |
64                   |                | transfor-  |          |    in EBB scope   |
65                   | substi-        |  mations   |           -------------------
66                   | tutions         ------------
67                   |                     | No change
68           ----------------              V
69          | Spilled pseudo |      -------------------
70          |    to memory   |<----| Rematerialization |
71          |  substitution  |      -------------------
72           ----------------
73                   | No susbtitions
74                   V
75       -------------------------
76      | Hard regs substitution, |
77      |  devirtalization, and   |------> Finish
78      | restoring scratches got |
79      |         memory          |
80       -------------------------
81 
82    To speed up the process:
83      o We process only insns affected by changes on previous
84        iterations;
85      o We don't use DFA-infrastructure because it results in much slower
86        compiler speed than a special IR described below does;
87      o We use a special insn representation for quick access to insn
88        info which is always *synchronized* with the current RTL;
89        o Insn IR is minimized by memory.  It is divided on three parts:
90 	 o one specific for each insn in RTL (only operand locations);
91 	 o one common for all insns in RTL with the same insn code
92 	   (different operand attributes from machine descriptions);
93 	 o one oriented for maintenance of live info (list of pseudos).
94        o Pseudo data:
95 	 o all insns where the pseudo is referenced;
96 	 o live info (conflicting hard regs, live ranges, # of
97 	   references etc);
98 	 o data used for assigning (preferred hard regs, costs etc).
99 
100    This file contains LRA driver, LRA utility functions and data, and
101    code for dealing with scratches.  */
102 
103 #include "config.h"
104 #include "system.h"
105 #include "coretypes.h"
106 #include "backend.h"
107 #include "target.h"
108 #include "rtl.h"
109 #include "tree.h"
110 #include "predict.h"
111 #include "df.h"
112 #include "memmodel.h"
113 #include "tm_p.h"
114 #include "optabs.h"
115 #include "regs.h"
116 #include "ira.h"
117 #include "recog.h"
118 #include "expr.h"
119 #include "cfgrtl.h"
120 #include "cfgbuild.h"
121 #include "lra.h"
122 #include "lra-int.h"
123 #include "print-rtl.h"
124 
125 /* Dump bitmap SET with TITLE and BB INDEX.  */
126 void
127 lra_dump_bitmap_with_title (const char *title, bitmap set, int index)
128 {
129   unsigned int i;
130   int count;
131   bitmap_iterator bi;
132   static const int max_nums_on_line = 10;
133 
134   if (bitmap_empty_p (set))
135     return;
136   fprintf (lra_dump_file, "  %s %d:", title, index);
137   fprintf (lra_dump_file, "\n");
138   count = max_nums_on_line + 1;
139   EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
140     {
141       if (count > max_nums_on_line)
142 	{
143 	  fprintf (lra_dump_file, "\n    ");
144 	  count = 0;
145 	}
146       fprintf (lra_dump_file, " %4u", i);
147       count++;
148     }
149   fprintf (lra_dump_file, "\n");
150 }
151 
152 /* Hard registers currently not available for allocation.  It can
153    changed after some hard  registers become not eliminable.  */
154 HARD_REG_SET lra_no_alloc_regs;
155 
156 static int get_new_reg_value (void);
157 static void expand_reg_info (void);
158 static void invalidate_insn_recog_data (int);
159 static int get_insn_freq (rtx_insn *);
160 static void invalidate_insn_data_regno_info (lra_insn_recog_data_t,
161 					     rtx_insn *, int);
162 
163 /* Expand all regno related info needed for LRA.  */
164 static void
165 expand_reg_data (int old)
166 {
167   resize_reg_info ();
168   expand_reg_info ();
169   ira_expand_reg_equiv ();
170   for (int i = (int) max_reg_num () - 1; i >= old; i--)
171     lra_change_class (i, ALL_REGS, "      Set", true);
172 }
173 
174 /* Create and return a new reg of ORIGINAL mode.  If ORIGINAL is NULL
175    or of VOIDmode, use MD_MODE for the new reg.  Initialize its
176    register class to RCLASS.  Print message about assigning class
177    RCLASS containing new register name TITLE unless it is NULL.  Use
178    attributes of ORIGINAL if it is a register.  The created register
179    will have unique held value.  */
180 rtx
181 lra_create_new_reg_with_unique_value (machine_mode md_mode, rtx original,
182 				      enum reg_class rclass, const char *title)
183 {
184   machine_mode mode;
185   rtx new_reg;
186 
187   if (original == NULL_RTX || (mode = GET_MODE (original)) == VOIDmode)
188     mode = md_mode;
189   lra_assert (mode != VOIDmode);
190   new_reg = gen_reg_rtx (mode);
191   if (original == NULL_RTX || ! REG_P (original))
192     {
193       if (lra_dump_file != NULL)
194 	fprintf (lra_dump_file, "      Creating newreg=%i", REGNO (new_reg));
195     }
196   else
197     {
198       if (ORIGINAL_REGNO (original) >= FIRST_PSEUDO_REGISTER)
199 	ORIGINAL_REGNO (new_reg) = ORIGINAL_REGNO (original);
200       REG_USERVAR_P (new_reg) = REG_USERVAR_P (original);
201       REG_POINTER (new_reg) = REG_POINTER (original);
202       REG_ATTRS (new_reg) = REG_ATTRS (original);
203       if (lra_dump_file != NULL)
204 	fprintf (lra_dump_file, "      Creating newreg=%i from oldreg=%i",
205 		 REGNO (new_reg), REGNO (original));
206     }
207   if (lra_dump_file != NULL)
208     {
209       if (title != NULL)
210 	fprintf (lra_dump_file, ", assigning class %s to%s%s r%d",
211 		 reg_class_names[rclass], *title == '\0' ? "" : " ",
212 		 title, REGNO (new_reg));
213       fprintf (lra_dump_file, "\n");
214     }
215   expand_reg_data (max_reg_num ());
216   setup_reg_classes (REGNO (new_reg), rclass, NO_REGS, rclass);
217   return new_reg;
218 }
219 
220 /* Analogous to the previous function but also inherits value of
221    ORIGINAL.  */
222 rtx
223 lra_create_new_reg (machine_mode md_mode, rtx original,
224 		    enum reg_class rclass, const char *title)
225 {
226   rtx new_reg;
227 
228   new_reg
229     = lra_create_new_reg_with_unique_value (md_mode, original, rclass, title);
230   if (original != NULL_RTX && REG_P (original))
231     lra_assign_reg_val (REGNO (original), REGNO (new_reg));
232   return new_reg;
233 }
234 
235 /* Set up for REGNO unique hold value.	*/
236 void
237 lra_set_regno_unique_value (int regno)
238 {
239   lra_reg_info[regno].val = get_new_reg_value ();
240 }
241 
242 /* Invalidate INSN related info used by LRA.  The info should never be
243    used after that.  */
244 void
245 lra_invalidate_insn_data (rtx_insn *insn)
246 {
247   lra_invalidate_insn_regno_info (insn);
248   invalidate_insn_recog_data (INSN_UID (insn));
249 }
250 
251 /* Mark INSN deleted and invalidate the insn related info used by
252    LRA.	 */
253 void
254 lra_set_insn_deleted (rtx_insn *insn)
255 {
256   lra_invalidate_insn_data (insn);
257   SET_INSN_DELETED (insn);
258 }
259 
260 /* Delete an unneeded INSN and any previous insns who sole purpose is
261    loading data that is dead in INSN.  */
262 void
263 lra_delete_dead_insn (rtx_insn *insn)
264 {
265   rtx_insn *prev = prev_real_insn (insn);
266   rtx prev_dest;
267 
268   /* If the previous insn sets a register that dies in our insn,
269      delete it too.  */
270   if (prev && GET_CODE (PATTERN (prev)) == SET
271       && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
272       && reg_mentioned_p (prev_dest, PATTERN (insn))
273       && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
274       && ! side_effects_p (SET_SRC (PATTERN (prev))))
275     lra_delete_dead_insn (prev);
276 
277   lra_set_insn_deleted (insn);
278 }
279 
280 /* Emit insn x = y + z.  Return NULL if we failed to do it.
281    Otherwise, return the insn.  We don't use gen_add3_insn as it might
282    clobber CC.  */
283 static rtx_insn *
284 emit_add3_insn (rtx x, rtx y, rtx z)
285 {
286   rtx_insn *last;
287 
288   last = get_last_insn ();
289 
290   if (have_addptr3_insn (x, y, z))
291     {
292       rtx_insn *insn = gen_addptr3_insn (x, y, z);
293 
294       /* If the target provides an "addptr" pattern it hopefully does
295 	 for a reason.  So falling back to the normal add would be
296 	 a bug.  */
297       lra_assert (insn != NULL_RTX);
298       emit_insn (insn);
299       return insn;
300     }
301 
302   rtx_insn *insn = emit_insn (gen_rtx_SET (x, gen_rtx_PLUS (GET_MODE (y),
303 							    y, z)));
304   if (recog_memoized (insn) < 0)
305     {
306       delete_insns_since (last);
307       insn = NULL;
308     }
309   return insn;
310 }
311 
312 /* Emit insn x = x + y.  Return the insn.  We use gen_add2_insn as the
313    last resort.  */
314 static rtx_insn *
315 emit_add2_insn (rtx x, rtx y)
316 {
317   rtx_insn *insn = emit_add3_insn (x, x, y);
318   if (insn == NULL_RTX)
319     {
320       insn = gen_add2_insn (x, y);
321       if (insn != NULL_RTX)
322 	emit_insn (insn);
323     }
324   return insn;
325 }
326 
327 /* Target checks operands through operand predicates to recognize an
328    insn.  We should have a special precaution to generate add insns
329    which are frequent results of elimination.
330 
331    Emit insns for x = y + z.  X can be used to store intermediate
332    values and should be not in Y and Z when we use X to store an
333    intermediate value.  Y + Z should form [base] [+ index[ * scale]] [
334    + disp] where base and index are registers, disp and scale are
335    constants.  Y should contain base if it is present, Z should
336    contain disp if any.  index[*scale] can be part of Y or Z.  */
337 void
338 lra_emit_add (rtx x, rtx y, rtx z)
339 {
340   int old;
341   rtx_insn *last;
342   rtx a1, a2, base, index, disp, scale, index_scale;
343   bool ok_p;
344 
345   rtx_insn *add3_insn = emit_add3_insn (x, y, z);
346   old = max_reg_num ();
347   if (add3_insn != NULL)
348     ;
349   else
350     {
351       disp = a2 = NULL_RTX;
352       if (GET_CODE (y) == PLUS)
353 	{
354 	  a1 = XEXP (y, 0);
355 	  a2 = XEXP (y, 1);
356 	  disp = z;
357 	}
358       else
359 	{
360 	  a1 = y;
361 	  if (CONSTANT_P (z))
362 	    disp = z;
363 	  else
364 	    a2 = z;
365 	}
366       index_scale = scale = NULL_RTX;
367       if (GET_CODE (a1) == MULT)
368 	{
369 	  index_scale = a1;
370 	  index = XEXP (a1, 0);
371 	  scale = XEXP (a1, 1);
372 	  base = a2;
373 	}
374       else if (a2 != NULL_RTX && GET_CODE (a2) == MULT)
375 	{
376 	  index_scale = a2;
377 	  index = XEXP (a2, 0);
378 	  scale = XEXP (a2, 1);
379 	  base = a1;
380 	}
381       else
382 	{
383 	  base = a1;
384 	  index = a2;
385 	}
386       if ((base != NULL_RTX && ! (REG_P (base) || GET_CODE (base) == SUBREG))
387 	  || (index != NULL_RTX
388 	      && ! (REG_P (index) || GET_CODE (index) == SUBREG))
389 	  || (disp != NULL_RTX && ! CONSTANT_P (disp))
390 	  || (scale != NULL_RTX && ! CONSTANT_P (scale)))
391 	{
392 	  /* Probably we have no 3 op add.  Last chance is to use 2-op
393 	     add insn.  To succeed, don't move Z to X as an address
394 	     segment always comes in Y.  Otherwise, we might fail when
395 	     adding the address segment to register.  */
396 	  lra_assert (x != y && x != z);
397 	  emit_move_insn (x, y);
398 	  rtx_insn *insn = emit_add2_insn (x, z);
399 	  lra_assert (insn != NULL_RTX);
400 	}
401       else
402 	{
403 	  if (index_scale == NULL_RTX)
404 	    index_scale = index;
405 	  if (disp == NULL_RTX)
406 	    {
407 	      /* Generate x = index_scale; x = x + base.  */
408 	      lra_assert (index_scale != NULL_RTX && base != NULL_RTX);
409 	      emit_move_insn (x, index_scale);
410 	      rtx_insn *insn = emit_add2_insn (x, base);
411 	      lra_assert (insn != NULL_RTX);
412 	    }
413 	  else if (scale == NULL_RTX)
414 	    {
415 	      /* Try x = base + disp.  */
416 	      lra_assert (base != NULL_RTX);
417 	      last = get_last_insn ();
418 	      rtx_insn *move_insn =
419 		emit_move_insn (x, gen_rtx_PLUS (GET_MODE (base), base, disp));
420 	      if (recog_memoized (move_insn) < 0)
421 		{
422 		  delete_insns_since (last);
423 		  /* Generate x = disp; x = x + base.  */
424 		  emit_move_insn (x, disp);
425 		  rtx_insn *add2_insn = emit_add2_insn (x, base);
426 		  lra_assert (add2_insn != NULL_RTX);
427 		}
428 	      /* Generate x = x + index.  */
429 	      if (index != NULL_RTX)
430 		{
431 		  rtx_insn *insn = emit_add2_insn (x, index);
432 		  lra_assert (insn != NULL_RTX);
433 		}
434 	    }
435 	  else
436 	    {
437 	      /* Try x = index_scale; x = x + disp; x = x + base.  */
438 	      last = get_last_insn ();
439 	      rtx_insn *move_insn = emit_move_insn (x, index_scale);
440 	      ok_p = false;
441 	      if (recog_memoized (move_insn) >= 0)
442 		{
443 		  rtx_insn *insn = emit_add2_insn (x, disp);
444 		  if (insn != NULL_RTX)
445 		    {
446 		      if (base == NULL_RTX)
447 			ok_p = true;
448 		      else
449 			{
450 			  insn = emit_add2_insn (x, base);
451 			  if (insn != NULL_RTX)
452 			    ok_p = true;
453 			}
454 		    }
455 		}
456 	      if (! ok_p)
457 		{
458 		  rtx_insn *insn;
459 
460 		  delete_insns_since (last);
461 		  /* Generate x = disp; x = x + base; x = x + index_scale.  */
462 		  emit_move_insn (x, disp);
463 		  if (base != NULL_RTX)
464 		    {
465 		      insn = emit_add2_insn (x, base);
466 		      lra_assert (insn != NULL_RTX);
467 		    }
468 		  insn = emit_add2_insn (x, index_scale);
469 		  lra_assert (insn != NULL_RTX);
470 		}
471 	    }
472 	}
473     }
474   /* Functions emit_... can create pseudos -- so expand the pseudo
475      data.  */
476   if (old != max_reg_num ())
477     expand_reg_data (old);
478 }
479 
480 /* The number of emitted reload insns so far.  */
481 int lra_curr_reload_num;
482 
483 /* Emit x := y, processing special case when y = u + v or y = u + v *
484    scale + w through emit_add (Y can be an address which is base +
485    index reg * scale + displacement in general case).  X may be used
486    as intermediate result therefore it should be not in Y.  */
487 void
488 lra_emit_move (rtx x, rtx y)
489 {
490   int old;
491 
492   if (GET_CODE (y) != PLUS)
493     {
494       if (rtx_equal_p (x, y))
495 	return;
496       old = max_reg_num ();
497       emit_move_insn (x, y);
498       if (REG_P (x))
499 	lra_reg_info[ORIGINAL_REGNO (x)].last_reload = ++lra_curr_reload_num;
500       /* Function emit_move can create pseudos -- so expand the pseudo
501 	 data.	*/
502       if (old != max_reg_num ())
503 	expand_reg_data (old);
504       return;
505     }
506   lra_emit_add (x, XEXP (y, 0), XEXP (y, 1));
507 }
508 
509 /* Update insn operands which are duplication of operands whose
510    numbers are in array of NOPS (with end marker -1).  The insn is
511    represented by its LRA internal representation ID.  */
512 void
513 lra_update_dups (lra_insn_recog_data_t id, signed char *nops)
514 {
515   int i, j, nop;
516   struct lra_static_insn_data *static_id = id->insn_static_data;
517 
518   for (i = 0; i < static_id->n_dups; i++)
519     for (j = 0; (nop = nops[j]) >= 0; j++)
520       if (static_id->dup_num[i] == nop)
521 	*id->dup_loc[i] = *id->operand_loc[nop];
522 }
523 
524 
525 
526 /* This page contains code dealing with info about registers in the
527    insns.  */
528 
529 /* Pools for insn reg info.  */
530 object_allocator<lra_insn_reg> lra_insn_reg_pool ("insn regs");
531 
532 /* Create LRA insn related info about a reference to REGNO in INSN
533    with TYPE (in/out/inout), biggest reference mode MODE, flag that it
534    is reference through subreg (SUBREG_P), flag that is early
535    clobbered in the insn (EARLY_CLOBBER), and reference to the next
536    insn reg info (NEXT).  If REGNO can be early clobbered,
537    alternatives in which it can be early clobbered are given by
538    EARLY_CLOBBER_ALTS.  */
539 static struct lra_insn_reg *
540 new_insn_reg (rtx_insn *insn, int regno, enum op_type type,
541 	      machine_mode mode,
542 	      bool subreg_p, bool early_clobber,
543 	      alternative_mask early_clobber_alts,
544 	      struct lra_insn_reg *next)
545 {
546   lra_insn_reg *ir = lra_insn_reg_pool.allocate ();
547   ir->type = type;
548   ir->biggest_mode = mode;
549   if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (lra_reg_info[regno].biggest_mode)
550       && NONDEBUG_INSN_P (insn))
551     lra_reg_info[regno].biggest_mode = mode;
552   ir->subreg_p = subreg_p;
553   ir->early_clobber = early_clobber;
554   ir->early_clobber_alts = early_clobber_alts;
555   ir->regno = regno;
556   ir->next = next;
557   return ir;
558 }
559 
560 /* Free insn reg info list IR.	*/
561 static void
562 free_insn_regs (struct lra_insn_reg *ir)
563 {
564   struct lra_insn_reg *next_ir;
565 
566   for (; ir != NULL; ir = next_ir)
567     {
568       next_ir = ir->next;
569       lra_insn_reg_pool.remove (ir);
570     }
571 }
572 
573 /* Finish pool for insn reg info.  */
574 static void
575 finish_insn_regs (void)
576 {
577   lra_insn_reg_pool.release ();
578 }
579 
580 
581 
582 /* This page contains code dealing LRA insn info (or in other words
583    LRA internal insn representation).  */
584 
585 /* Map INSN_CODE -> the static insn data.  This info is valid during
586    all translation unit.  */
587 struct lra_static_insn_data *insn_code_data[NUM_INSN_CODES];
588 
589 /* Debug insns are represented as a special insn with one input
590    operand which is RTL expression in var_location.  */
591 
592 /* The following data are used as static insn operand data for all
593    debug insns.	 If structure lra_operand_data is changed, the
594    initializer should be changed too.  */
595 static struct lra_operand_data debug_operand_data =
596   {
597     NULL, /* alternative  */
598     0, /* early_clobber_alts */
599     VOIDmode, /* We are not interesting in the operand mode.  */
600     OP_IN,
601     0, 0, 0, 0
602   };
603 
604 /* The following data are used as static insn data for all debug
605    insns.  If structure lra_static_insn_data is changed, the
606    initializer should be changed too.  */
607 static struct lra_static_insn_data debug_insn_static_data =
608   {
609     &debug_operand_data,
610     0,	/* Duplication operands #.  */
611     -1, /* Commutative operand #.  */
612     1,	/* Operands #.	There is only one operand which is debug RTL
613 	   expression.	*/
614     0,	/* Duplications #.  */
615     0,	/* Alternatives #.  We are not interesting in alternatives
616 	   because we does not proceed debug_insns for reloads.	 */
617     NULL, /* Hard registers referenced in machine description.	*/
618     NULL  /* Descriptions of operands in alternatives.	*/
619   };
620 
621 /* Called once per compiler work to initialize some LRA data related
622    to insns.  */
623 static void
624 init_insn_code_data_once (void)
625 {
626   memset (insn_code_data, 0, sizeof (insn_code_data));
627 }
628 
629 /* Called once per compiler work to finalize some LRA data related to
630    insns.  */
631 static void
632 finish_insn_code_data_once (void)
633 {
634   for (unsigned int i = 0; i < NUM_INSN_CODES; i++)
635     {
636       if (insn_code_data[i] != NULL)
637 	free (insn_code_data[i]);
638     }
639 }
640 
641 /* Return static insn data, allocate and setup if necessary.  Although
642    dup_num is static data (it depends only on icode), to set it up we
643    need to extract insn first.	So recog_data should be valid for
644    normal insn (ICODE >= 0) before the call.  */
645 static struct lra_static_insn_data *
646 get_static_insn_data (int icode, int nop, int ndup, int nalt)
647 {
648   struct lra_static_insn_data *data;
649   size_t n_bytes;
650 
651   lra_assert (icode < (int) NUM_INSN_CODES);
652   if (icode >= 0 && (data = insn_code_data[icode]) != NULL)
653     return data;
654   lra_assert (nop >= 0 && ndup >= 0 && nalt >= 0);
655   n_bytes = sizeof (struct lra_static_insn_data)
656 	    + sizeof (struct lra_operand_data) * nop
657 	    + sizeof (int) * ndup;
658   data = XNEWVAR (struct lra_static_insn_data, n_bytes);
659   data->operand_alternative = NULL;
660   data->n_operands = nop;
661   data->n_dups = ndup;
662   data->n_alternatives = nalt;
663   data->operand = ((struct lra_operand_data *)
664 		   ((char *) data + sizeof (struct lra_static_insn_data)));
665   data->dup_num = ((int *) ((char *) data->operand
666 			    + sizeof (struct lra_operand_data) * nop));
667   if (icode >= 0)
668     {
669       int i;
670 
671       insn_code_data[icode] = data;
672       for (i = 0; i < nop; i++)
673 	{
674 	  data->operand[i].constraint
675 	    = insn_data[icode].operand[i].constraint;
676 	  data->operand[i].mode = insn_data[icode].operand[i].mode;
677 	  data->operand[i].strict_low = insn_data[icode].operand[i].strict_low;
678 	  data->operand[i].is_operator
679 	    = insn_data[icode].operand[i].is_operator;
680 	  data->operand[i].type
681 	    = (data->operand[i].constraint[0] == '=' ? OP_OUT
682 	       : data->operand[i].constraint[0] == '+' ? OP_INOUT
683 	       : OP_IN);
684 	  data->operand[i].is_address = false;
685 	}
686       for (i = 0; i < ndup; i++)
687 	data->dup_num[i] = recog_data.dup_num[i];
688     }
689   return data;
690 }
691 
692 /* The current length of the following array.  */
693 int lra_insn_recog_data_len;
694 
695 /* Map INSN_UID -> the insn recog data (NULL if unknown).  */
696 lra_insn_recog_data_t *lra_insn_recog_data;
697 
698 /* Initialize LRA data about insns.  */
699 static void
700 init_insn_recog_data (void)
701 {
702   lra_insn_recog_data_len = 0;
703   lra_insn_recog_data = NULL;
704 }
705 
706 /* Expand, if necessary, LRA data about insns.	*/
707 static void
708 check_and_expand_insn_recog_data (int index)
709 {
710   int i, old;
711 
712   if (lra_insn_recog_data_len > index)
713     return;
714   old = lra_insn_recog_data_len;
715   lra_insn_recog_data_len = index * 3 / 2 + 1;
716   lra_insn_recog_data = XRESIZEVEC (lra_insn_recog_data_t,
717 				    lra_insn_recog_data,
718 				    lra_insn_recog_data_len);
719   for (i = old; i < lra_insn_recog_data_len; i++)
720     lra_insn_recog_data[i] = NULL;
721 }
722 
723 /* Finish LRA DATA about insn.	*/
724 static void
725 free_insn_recog_data (lra_insn_recog_data_t data)
726 {
727   if (data->operand_loc != NULL)
728     free (data->operand_loc);
729   if (data->dup_loc != NULL)
730     free (data->dup_loc);
731   if (data->arg_hard_regs != NULL)
732     free (data->arg_hard_regs);
733   if (data->icode < 0 && NONDEBUG_INSN_P (data->insn))
734     {
735       if (data->insn_static_data->operand_alternative != NULL)
736 	free (const_cast <operand_alternative *>
737 	      (data->insn_static_data->operand_alternative));
738       free_insn_regs (data->insn_static_data->hard_regs);
739       free (data->insn_static_data);
740     }
741   free_insn_regs (data->regs);
742   data->regs = NULL;
743   free (data);
744 }
745 
746 /* Pools for copies.  */
747 static object_allocator<lra_copy> lra_copy_pool ("lra copies");
748 
749 /* Finish LRA data about all insns.  */
750 static void
751 finish_insn_recog_data (void)
752 {
753   int i;
754   lra_insn_recog_data_t data;
755 
756   for (i = 0; i < lra_insn_recog_data_len; i++)
757     if ((data = lra_insn_recog_data[i]) != NULL)
758       free_insn_recog_data (data);
759   finish_insn_regs ();
760   lra_copy_pool.release ();
761   lra_insn_reg_pool.release ();
762   free (lra_insn_recog_data);
763 }
764 
765 /* Setup info about operands in alternatives of LRA DATA of insn.  */
766 static void
767 setup_operand_alternative (lra_insn_recog_data_t data,
768 			   const operand_alternative *op_alt)
769 {
770   int i, j, nop, nalt;
771   int icode = data->icode;
772   struct lra_static_insn_data *static_data = data->insn_static_data;
773 
774   static_data->commutative = -1;
775   nop = static_data->n_operands;
776   nalt = static_data->n_alternatives;
777   static_data->operand_alternative = op_alt;
778   for (i = 0; i < nop; i++)
779     {
780       static_data->operand[i].early_clobber_alts = 0;
781       static_data->operand[i].early_clobber = false;
782       static_data->operand[i].is_address = false;
783       if (static_data->operand[i].constraint[0] == '%')
784 	{
785 	  /* We currently only support one commutative pair of operands.  */
786 	  if (static_data->commutative < 0)
787 	    static_data->commutative = i;
788 	  else
789 	    lra_assert (icode < 0); /* Asm  */
790 	  /* The last operand should not be marked commutative.  */
791 	  lra_assert (i != nop - 1);
792 	}
793     }
794   for (j = 0; j < nalt; j++)
795     for (i = 0; i < nop; i++, op_alt++)
796       {
797 	static_data->operand[i].early_clobber |= op_alt->earlyclobber;
798 	if (op_alt->earlyclobber)
799 	  static_data->operand[i].early_clobber_alts |= (alternative_mask) 1 << j;
800 	static_data->operand[i].is_address |= op_alt->is_address;
801       }
802 }
803 
804 /* Recursively process X and collect info about registers, which are
805    not the insn operands, in X with TYPE (in/out/inout) and flag that
806    it is early clobbered in the insn (EARLY_CLOBBER) and add the info
807    to LIST.  X is a part of insn given by DATA.	 Return the result
808    list.  */
809 static struct lra_insn_reg *
810 collect_non_operand_hard_regs (rtx *x, lra_insn_recog_data_t data,
811 			       struct lra_insn_reg *list,
812 			       enum op_type type, bool early_clobber)
813 {
814   int i, j, regno, last;
815   bool subreg_p;
816   machine_mode mode;
817   struct lra_insn_reg *curr;
818   rtx op = *x;
819   enum rtx_code code = GET_CODE (op);
820   const char *fmt = GET_RTX_FORMAT (code);
821 
822   for (i = 0; i < data->insn_static_data->n_operands; i++)
823     if (x == data->operand_loc[i])
824       /* It is an operand loc. Stop here.  */
825       return list;
826   for (i = 0; i < data->insn_static_data->n_dups; i++)
827     if (x == data->dup_loc[i])
828       /* It is a dup loc. Stop here.  */
829       return list;
830   mode = GET_MODE (op);
831   subreg_p = false;
832   if (code == SUBREG)
833     {
834       op = SUBREG_REG (op);
835       code = GET_CODE (op);
836       if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (op)))
837 	{
838 	  mode = GET_MODE (op);
839 	  if (GET_MODE_SIZE (mode) > REGMODE_NATURAL_SIZE (mode))
840 	    subreg_p = true;
841 	}
842     }
843   if (REG_P (op))
844     {
845       if ((regno = REGNO (op)) >= FIRST_PSEUDO_REGISTER)
846 	return list;
847       /* Process all regs even unallocatable ones as we need info
848 	 about all regs for rematerialization pass.  */
849       for (last = regno + hard_regno_nregs[regno][mode];
850 	   regno < last;
851 	   regno++)
852 	{
853 	  for (curr = list; curr != NULL; curr = curr->next)
854 	    if (curr->regno == regno && curr->subreg_p == subreg_p
855 		&& curr->biggest_mode == mode)
856 	      {
857 		if (curr->type != type)
858 		  curr->type = OP_INOUT;
859 		if (early_clobber)
860 		  {
861 		    curr->early_clobber = true;
862 		    curr->early_clobber_alts = ALL_ALTERNATIVES;
863 		  }
864 		break;
865 	      }
866 	  if (curr == NULL)
867 	    {
868 	      /* This is a new hard regno or the info can not be
869 		 integrated into the found structure.	 */
870 #ifdef STACK_REGS
871 	      early_clobber
872 		= (early_clobber
873 		   /* This clobber is to inform popping floating
874 		      point stack only.  */
875 		   && ! (FIRST_STACK_REG <= regno
876 			 && regno <= LAST_STACK_REG));
877 #endif
878 	      list = new_insn_reg (data->insn, regno, type, mode, subreg_p,
879 				   early_clobber,
880 				   early_clobber ? ALL_ALTERNATIVES : 0, list);
881 	    }
882 	}
883       return list;
884     }
885   switch (code)
886     {
887     case SET:
888       list = collect_non_operand_hard_regs (&SET_DEST (op), data,
889 					    list, OP_OUT, false);
890       list = collect_non_operand_hard_regs (&SET_SRC (op), data,
891 					    list, OP_IN, false);
892       break;
893     case CLOBBER:
894       /* We treat clobber of non-operand hard registers as early
895 	 clobber (the behavior is expected from asm).  */
896       list = collect_non_operand_hard_regs (&XEXP (op, 0), data,
897 					    list, OP_OUT, true);
898       break;
899     case PRE_INC: case PRE_DEC: case POST_INC: case POST_DEC:
900       list = collect_non_operand_hard_regs (&XEXP (op, 0), data,
901 					    list, OP_INOUT, false);
902       break;
903     case PRE_MODIFY: case POST_MODIFY:
904       list = collect_non_operand_hard_regs (&XEXP (op, 0), data,
905 					    list, OP_INOUT, false);
906       list = collect_non_operand_hard_regs (&XEXP (op, 1), data,
907 					    list, OP_IN, false);
908       break;
909     default:
910       fmt = GET_RTX_FORMAT (code);
911       for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
912 	{
913 	  if (fmt[i] == 'e')
914 	    list = collect_non_operand_hard_regs (&XEXP (op, i), data,
915 						  list, OP_IN, false);
916 	  else if (fmt[i] == 'E')
917 	    for (j = XVECLEN (op, i) - 1; j >= 0; j--)
918 	      list = collect_non_operand_hard_regs (&XVECEXP (op, i, j), data,
919 						    list, OP_IN, false);
920 	}
921     }
922   return list;
923 }
924 
925 /* Set up and return info about INSN.  Set up the info if it is not set up
926    yet.	 */
927 lra_insn_recog_data_t
928 lra_set_insn_recog_data (rtx_insn *insn)
929 {
930   lra_insn_recog_data_t data;
931   int i, n, icode;
932   rtx **locs;
933   unsigned int uid = INSN_UID (insn);
934   struct lra_static_insn_data *insn_static_data;
935 
936   check_and_expand_insn_recog_data (uid);
937   if (DEBUG_INSN_P (insn))
938     icode = -1;
939   else
940     {
941       icode = INSN_CODE (insn);
942       if (icode < 0)
943 	/* It might be a new simple insn which is not recognized yet.  */
944 	INSN_CODE (insn) = icode = recog_memoized (insn);
945     }
946   data = XNEW (struct lra_insn_recog_data);
947   lra_insn_recog_data[uid] = data;
948   data->insn = insn;
949   data->used_insn_alternative = LRA_UNKNOWN_ALT;
950   data->icode = icode;
951   data->regs = NULL;
952   if (DEBUG_INSN_P (insn))
953     {
954       data->insn_static_data = &debug_insn_static_data;
955       data->dup_loc = NULL;
956       data->arg_hard_regs = NULL;
957       data->preferred_alternatives = ALL_ALTERNATIVES;
958       data->operand_loc = XNEWVEC (rtx *, 1);
959       data->operand_loc[0] = &INSN_VAR_LOCATION_LOC (insn);
960       return data;
961     }
962   if (icode < 0)
963     {
964       int nop, nalt;
965       machine_mode operand_mode[MAX_RECOG_OPERANDS];
966       const char *constraints[MAX_RECOG_OPERANDS];
967 
968       nop = asm_noperands (PATTERN (insn));
969       data->operand_loc = data->dup_loc = NULL;
970       nalt = 1;
971       if (nop < 0)
972 	{
973 	  /* It is a special insn like USE or CLOBBER.  We should
974 	     recognize any regular insn otherwise LRA can do nothing
975 	     with this insn.  */
976 	  gcc_assert (GET_CODE (PATTERN (insn)) == USE
977 		      || GET_CODE (PATTERN (insn)) == CLOBBER
978 		      || GET_CODE (PATTERN (insn)) == ASM_INPUT);
979 	  data->insn_static_data = insn_static_data
980 	    = get_static_insn_data (-1, 0, 0, nalt);
981 	}
982       else
983 	{
984 	  /* expand_asm_operands makes sure there aren't too many
985 	     operands.	*/
986 	  lra_assert (nop <= MAX_RECOG_OPERANDS);
987 	  if (nop != 0)
988 	    data->operand_loc = XNEWVEC (rtx *, nop);
989 	  /* Now get the operand values and constraints out of the
990 	     insn.  */
991 	  decode_asm_operands (PATTERN (insn), NULL,
992 			       data->operand_loc,
993 			       constraints, operand_mode, NULL);
994 	  if (nop > 0)
995 	    {
996 	      const char *p =  recog_data.constraints[0];
997 
998 	      for (p =	constraints[0]; *p; p++)
999 		nalt += *p == ',';
1000 	    }
1001 	  data->insn_static_data = insn_static_data
1002 	    = get_static_insn_data (-1, nop, 0, nalt);
1003 	  for (i = 0; i < nop; i++)
1004 	    {
1005 	      insn_static_data->operand[i].mode = operand_mode[i];
1006 	      insn_static_data->operand[i].constraint = constraints[i];
1007 	      insn_static_data->operand[i].strict_low = false;
1008 	      insn_static_data->operand[i].is_operator = false;
1009 	      insn_static_data->operand[i].is_address = false;
1010 	    }
1011 	}
1012       for (i = 0; i < insn_static_data->n_operands; i++)
1013 	insn_static_data->operand[i].type
1014 	  = (insn_static_data->operand[i].constraint[0] == '=' ? OP_OUT
1015 	     : insn_static_data->operand[i].constraint[0] == '+' ? OP_INOUT
1016 	     : OP_IN);
1017       data->preferred_alternatives = ALL_ALTERNATIVES;
1018       if (nop > 0)
1019 	{
1020 	  operand_alternative *op_alt = XCNEWVEC (operand_alternative,
1021 						  nalt * nop);
1022 	  preprocess_constraints (nop, nalt, constraints, op_alt);
1023 	  setup_operand_alternative (data, op_alt);
1024 	}
1025     }
1026   else
1027     {
1028       insn_extract (insn);
1029       data->insn_static_data = insn_static_data
1030 	= get_static_insn_data (icode, insn_data[icode].n_operands,
1031 				insn_data[icode].n_dups,
1032 				insn_data[icode].n_alternatives);
1033       n = insn_static_data->n_operands;
1034       if (n == 0)
1035 	locs = NULL;
1036       else
1037 	{
1038 	  locs = XNEWVEC (rtx *, n);
1039 	  memcpy (locs, recog_data.operand_loc, n * sizeof (rtx *));
1040 	}
1041       data->operand_loc = locs;
1042       n = insn_static_data->n_dups;
1043       if (n == 0)
1044 	locs = NULL;
1045       else
1046 	{
1047 	  locs = XNEWVEC (rtx *, n);
1048 	  memcpy (locs, recog_data.dup_loc, n * sizeof (rtx *));
1049 	}
1050       data->dup_loc = locs;
1051       data->preferred_alternatives = get_preferred_alternatives (insn);
1052       const operand_alternative *op_alt = preprocess_insn_constraints (icode);
1053       if (!insn_static_data->operand_alternative)
1054 	setup_operand_alternative (data, op_alt);
1055       else if (op_alt != insn_static_data->operand_alternative)
1056 	insn_static_data->operand_alternative = op_alt;
1057     }
1058   if (GET_CODE (PATTERN (insn)) == CLOBBER || GET_CODE (PATTERN (insn)) == USE)
1059     insn_static_data->hard_regs = NULL;
1060   else
1061     insn_static_data->hard_regs
1062       = collect_non_operand_hard_regs (&PATTERN (insn), data,
1063 				       NULL, OP_IN, false);
1064   data->arg_hard_regs = NULL;
1065   if (CALL_P (insn))
1066     {
1067       bool use_p;
1068       rtx link;
1069       int n_hard_regs, regno, arg_hard_regs[FIRST_PSEUDO_REGISTER];
1070 
1071       n_hard_regs = 0;
1072       /* Finding implicit hard register usage.	We believe it will be
1073 	 not changed whatever transformations are used.	 Call insns
1074 	 are such example.  */
1075       for (link = CALL_INSN_FUNCTION_USAGE (insn);
1076 	   link != NULL_RTX;
1077 	   link = XEXP (link, 1))
1078 	if (((use_p = GET_CODE (XEXP (link, 0)) == USE)
1079 	     || GET_CODE (XEXP (link, 0)) == CLOBBER)
1080 	    && REG_P (XEXP (XEXP (link, 0), 0)))
1081 	  {
1082 	    regno = REGNO (XEXP (XEXP (link, 0), 0));
1083 	    lra_assert (regno < FIRST_PSEUDO_REGISTER);
1084 	    /* It is an argument register.  */
1085 	    for (i = REG_NREGS (XEXP (XEXP (link, 0), 0)) - 1; i >= 0; i--)
1086 	      arg_hard_regs[n_hard_regs++]
1087 		= regno + i + (use_p ? 0 : FIRST_PSEUDO_REGISTER);
1088 	  }
1089       if (n_hard_regs != 0)
1090 	{
1091 	  arg_hard_regs[n_hard_regs++] = -1;
1092 	  data->arg_hard_regs = XNEWVEC (int, n_hard_regs);
1093 	  memcpy (data->arg_hard_regs, arg_hard_regs,
1094 		  sizeof (int) * n_hard_regs);
1095 	}
1096     }
1097   /* Some output operand can be recognized only from the context not
1098      from the constraints which are empty in this case.	 Call insn may
1099      contain a hard register in set destination with empty constraint
1100      and extract_insn treats them as an input.	*/
1101   for (i = 0; i < insn_static_data->n_operands; i++)
1102     {
1103       int j;
1104       rtx pat, set;
1105       struct lra_operand_data *operand = &insn_static_data->operand[i];
1106 
1107       /* ??? Should we treat 'X' the same way.	It looks to me that
1108 	 'X' means anything and empty constraint means we do not
1109 	 care.	*/
1110       if (operand->type != OP_IN || *operand->constraint != '\0'
1111 	  || operand->is_operator)
1112 	continue;
1113       pat = PATTERN (insn);
1114       if (GET_CODE (pat) == SET)
1115 	{
1116 	  if (data->operand_loc[i] != &SET_DEST (pat))
1117 	    continue;
1118 	}
1119       else if (GET_CODE (pat) == PARALLEL)
1120 	{
1121 	  for (j = XVECLEN (pat, 0) - 1; j >= 0; j--)
1122 	    {
1123 	      set = XVECEXP (PATTERN (insn), 0, j);
1124 	      if (GET_CODE (set) == SET
1125 		  && &SET_DEST (set) == data->operand_loc[i])
1126 		break;
1127 	    }
1128 	  if (j < 0)
1129 	    continue;
1130 	}
1131       else
1132 	continue;
1133       operand->type = OP_OUT;
1134     }
1135   return data;
1136 }
1137 
1138 /* Return info about insn give by UID.	The info should be already set
1139    up.	*/
1140 static lra_insn_recog_data_t
1141 get_insn_recog_data_by_uid (int uid)
1142 {
1143   lra_insn_recog_data_t data;
1144 
1145   data = lra_insn_recog_data[uid];
1146   lra_assert (data != NULL);
1147   return data;
1148 }
1149 
1150 /* Invalidate all info about insn given by its UID.  */
1151 static void
1152 invalidate_insn_recog_data (int uid)
1153 {
1154   lra_insn_recog_data_t data;
1155 
1156   data = lra_insn_recog_data[uid];
1157   lra_assert (data != NULL);
1158   free_insn_recog_data (data);
1159   lra_insn_recog_data[uid] = NULL;
1160 }
1161 
1162 /* Update all the insn info about INSN.	 It is usually called when
1163    something in the insn was changed.  Return the updated info.	 */
1164 lra_insn_recog_data_t
1165 lra_update_insn_recog_data (rtx_insn *insn)
1166 {
1167   lra_insn_recog_data_t data;
1168   int n;
1169   unsigned int uid = INSN_UID (insn);
1170   struct lra_static_insn_data *insn_static_data;
1171   HOST_WIDE_INT sp_offset = 0;
1172 
1173   check_and_expand_insn_recog_data (uid);
1174   if ((data = lra_insn_recog_data[uid]) != NULL
1175       && data->icode != INSN_CODE (insn))
1176     {
1177       sp_offset = data->sp_offset;
1178       invalidate_insn_data_regno_info (data, insn, get_insn_freq (insn));
1179       invalidate_insn_recog_data (uid);
1180       data = NULL;
1181     }
1182   if (data == NULL)
1183     {
1184       data = lra_get_insn_recog_data (insn);
1185       /* Initiate or restore SP offset.  */
1186       data->sp_offset = sp_offset;
1187       return data;
1188     }
1189   insn_static_data = data->insn_static_data;
1190   data->used_insn_alternative = LRA_UNKNOWN_ALT;
1191   if (DEBUG_INSN_P (insn))
1192     return data;
1193   if (data->icode < 0)
1194     {
1195       int nop;
1196       machine_mode operand_mode[MAX_RECOG_OPERANDS];
1197       const char *constraints[MAX_RECOG_OPERANDS];
1198 
1199       nop = asm_noperands (PATTERN (insn));
1200       if (nop >= 0)
1201 	{
1202 	  lra_assert (nop == data->insn_static_data->n_operands);
1203 	  /* Now get the operand values and constraints out of the
1204 	     insn.  */
1205 	  decode_asm_operands (PATTERN (insn), NULL,
1206 			       data->operand_loc,
1207 			       constraints, operand_mode, NULL);
1208 
1209 	  if (flag_checking)
1210 	    for (int i = 0; i < nop; i++)
1211 	      lra_assert
1212 		(insn_static_data->operand[i].mode == operand_mode[i]
1213 		 && insn_static_data->operand[i].constraint == constraints[i]
1214 		 && ! insn_static_data->operand[i].is_operator);
1215 	}
1216 
1217       if (flag_checking)
1218 	for (int i = 0; i < insn_static_data->n_operands; i++)
1219 	  lra_assert
1220 	    (insn_static_data->operand[i].type
1221 	     == (insn_static_data->operand[i].constraint[0] == '=' ? OP_OUT
1222 		 : insn_static_data->operand[i].constraint[0] == '+' ? OP_INOUT
1223 		 : OP_IN));
1224     }
1225   else
1226     {
1227       insn_extract (insn);
1228       n = insn_static_data->n_operands;
1229       if (n != 0)
1230 	memcpy (data->operand_loc, recog_data.operand_loc, n * sizeof (rtx *));
1231       n = insn_static_data->n_dups;
1232       if (n != 0)
1233 	memcpy (data->dup_loc, recog_data.dup_loc, n * sizeof (rtx *));
1234       lra_assert (check_bool_attrs (insn));
1235     }
1236   return data;
1237 }
1238 
1239 /* Set up that INSN is using alternative ALT now.  */
1240 void
1241 lra_set_used_insn_alternative (rtx_insn *insn, int alt)
1242 {
1243   lra_insn_recog_data_t data;
1244 
1245   data = lra_get_insn_recog_data (insn);
1246   data->used_insn_alternative = alt;
1247 }
1248 
1249 /* Set up that insn with UID is using alternative ALT now.  The insn
1250    info should be already set up.  */
1251 void
1252 lra_set_used_insn_alternative_by_uid (int uid, int alt)
1253 {
1254   lra_insn_recog_data_t data;
1255 
1256   check_and_expand_insn_recog_data (uid);
1257   data = lra_insn_recog_data[uid];
1258   lra_assert (data != NULL);
1259   data->used_insn_alternative = alt;
1260 }
1261 
1262 
1263 
1264 /* This page contains code dealing with common register info and
1265    pseudo copies.  */
1266 
1267 /* The size of the following array.  */
1268 static int reg_info_size;
1269 /* Common info about each register.  */
1270 struct lra_reg *lra_reg_info;
1271 
1272 /* Last register value.	 */
1273 static int last_reg_value;
1274 
1275 /* Return new register value.  */
1276 static int
1277 get_new_reg_value (void)
1278 {
1279   return ++last_reg_value;
1280 }
1281 
1282 /* Vec referring to pseudo copies.  */
1283 static vec<lra_copy_t> copy_vec;
1284 
1285 /* Initialize I-th element of lra_reg_info.  */
1286 static inline void
1287 initialize_lra_reg_info_element (int i)
1288 {
1289   bitmap_initialize (&lra_reg_info[i].insn_bitmap, &reg_obstack);
1290 #ifdef STACK_REGS
1291   lra_reg_info[i].no_stack_p = false;
1292 #endif
1293   CLEAR_HARD_REG_SET (lra_reg_info[i].conflict_hard_regs);
1294   CLEAR_HARD_REG_SET (lra_reg_info[i].actual_call_used_reg_set);
1295   lra_reg_info[i].preferred_hard_regno1 = -1;
1296   lra_reg_info[i].preferred_hard_regno2 = -1;
1297   lra_reg_info[i].preferred_hard_regno_profit1 = 0;
1298   lra_reg_info[i].preferred_hard_regno_profit2 = 0;
1299   lra_reg_info[i].biggest_mode = VOIDmode;
1300   lra_reg_info[i].live_ranges = NULL;
1301   lra_reg_info[i].nrefs = lra_reg_info[i].freq = 0;
1302   lra_reg_info[i].last_reload = 0;
1303   lra_reg_info[i].restore_rtx = NULL_RTX;
1304   lra_reg_info[i].val = get_new_reg_value ();
1305   lra_reg_info[i].offset = 0;
1306   lra_reg_info[i].copies = NULL;
1307 }
1308 
1309 /* Initialize common reg info and copies.  */
1310 static void
1311 init_reg_info (void)
1312 {
1313   int i;
1314 
1315   last_reg_value = 0;
1316   reg_info_size = max_reg_num () * 3 / 2 + 1;
1317   lra_reg_info = XNEWVEC (struct lra_reg, reg_info_size);
1318   for (i = 0; i < reg_info_size; i++)
1319     initialize_lra_reg_info_element (i);
1320   copy_vec.truncate (0);
1321 }
1322 
1323 
1324 /* Finish common reg info and copies.  */
1325 static void
1326 finish_reg_info (void)
1327 {
1328   int i;
1329 
1330   for (i = 0; i < reg_info_size; i++)
1331     bitmap_clear (&lra_reg_info[i].insn_bitmap);
1332   free (lra_reg_info);
1333   reg_info_size = 0;
1334 }
1335 
1336 /* Expand common reg info if it is necessary.  */
1337 static void
1338 expand_reg_info (void)
1339 {
1340   int i, old = reg_info_size;
1341 
1342   if (reg_info_size > max_reg_num ())
1343     return;
1344   reg_info_size = max_reg_num () * 3 / 2 + 1;
1345   lra_reg_info = XRESIZEVEC (struct lra_reg, lra_reg_info, reg_info_size);
1346   for (i = old; i < reg_info_size; i++)
1347     initialize_lra_reg_info_element (i);
1348 }
1349 
1350 /* Free all copies.  */
1351 void
1352 lra_free_copies (void)
1353 {
1354   lra_copy_t cp;
1355 
1356   while (copy_vec.length () != 0)
1357     {
1358       cp = copy_vec.pop ();
1359       lra_reg_info[cp->regno1].copies = lra_reg_info[cp->regno2].copies = NULL;
1360       lra_copy_pool.remove (cp);
1361     }
1362 }
1363 
1364 /* Create copy of two pseudos REGNO1 and REGNO2.  The copy execution
1365    frequency is FREQ.  */
1366 void
1367 lra_create_copy (int regno1, int regno2, int freq)
1368 {
1369   bool regno1_dest_p;
1370   lra_copy_t cp;
1371 
1372   lra_assert (regno1 != regno2);
1373   regno1_dest_p = true;
1374   if (regno1 > regno2)
1375     {
1376       std::swap (regno1, regno2);
1377       regno1_dest_p = false;
1378     }
1379   cp = lra_copy_pool.allocate ();
1380   copy_vec.safe_push (cp);
1381   cp->regno1_dest_p = regno1_dest_p;
1382   cp->freq = freq;
1383   cp->regno1 = regno1;
1384   cp->regno2 = regno2;
1385   cp->regno1_next = lra_reg_info[regno1].copies;
1386   lra_reg_info[regno1].copies = cp;
1387   cp->regno2_next = lra_reg_info[regno2].copies;
1388   lra_reg_info[regno2].copies = cp;
1389   if (lra_dump_file != NULL)
1390     fprintf (lra_dump_file, "	   Creating copy r%d%sr%d@%d\n",
1391 	     regno1, regno1_dest_p ? "<-" : "->", regno2, freq);
1392 }
1393 
1394 /* Return N-th (0, 1, ...) copy.  If there is no copy, return
1395    NULL.  */
1396 lra_copy_t
1397 lra_get_copy (int n)
1398 {
1399   if (n >= (int) copy_vec.length ())
1400     return NULL;
1401   return copy_vec[n];
1402 }
1403 
1404 
1405 
1406 /* This page contains code dealing with info about registers in
1407    insns.  */
1408 
1409 /* Process X of insn UID recursively and add info (operand type is
1410    given by TYPE, flag of that it is early clobber is EARLY_CLOBBER)
1411    about registers in X to the insn DATA.  If X can be early clobbered,
1412    alternatives in which it can be early clobbered are given by
1413    EARLY_CLOBBER_ALTS.  */
1414 static void
1415 add_regs_to_insn_regno_info (lra_insn_recog_data_t data, rtx x, int uid,
1416 			     enum op_type type, bool early_clobber,
1417 			     alternative_mask early_clobber_alts)
1418 {
1419   int i, j, regno;
1420   bool subreg_p;
1421   machine_mode mode;
1422   const char *fmt;
1423   enum rtx_code code;
1424   struct lra_insn_reg *curr;
1425 
1426   code = GET_CODE (x);
1427   mode = GET_MODE (x);
1428   subreg_p = false;
1429   if (GET_CODE (x) == SUBREG)
1430     {
1431       x = SUBREG_REG (x);
1432       code = GET_CODE (x);
1433       if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
1434 	{
1435 	  mode = GET_MODE (x);
1436 	  if (GET_MODE_SIZE (mode) > REGMODE_NATURAL_SIZE (mode))
1437 	    subreg_p = true;
1438 	}
1439     }
1440   if (REG_P (x))
1441     {
1442       regno = REGNO (x);
1443       /* Process all regs even unallocatable ones as we need info about
1444 	 all regs for rematerialization pass.  */
1445       expand_reg_info ();
1446       if (bitmap_set_bit (&lra_reg_info[regno].insn_bitmap, uid))
1447 	{
1448 	  data->regs = new_insn_reg (data->insn, regno, type, mode, subreg_p,
1449 				     early_clobber, early_clobber_alts,
1450 				     data->regs);
1451 	  return;
1452 	}
1453       else
1454 	{
1455 	  for (curr = data->regs; curr != NULL; curr = curr->next)
1456 	    if (curr->regno == regno)
1457 	      {
1458 		if (curr->subreg_p != subreg_p || curr->biggest_mode != mode)
1459 		  /* The info can not be integrated into the found
1460 		     structure.  */
1461 		  data->regs = new_insn_reg (data->insn, regno, type, mode,
1462 					     subreg_p, early_clobber,
1463 					     early_clobber_alts, data->regs);
1464 		else
1465 		  {
1466 		    if (curr->type != type)
1467 		      curr->type = OP_INOUT;
1468 		    if (curr->early_clobber != early_clobber)
1469 		      curr->early_clobber = true;
1470 		    curr->early_clobber_alts |= early_clobber_alts;
1471 		  }
1472 		return;
1473 	      }
1474 	  gcc_unreachable ();
1475 	}
1476     }
1477 
1478   switch (code)
1479     {
1480     case SET:
1481       add_regs_to_insn_regno_info (data, SET_DEST (x), uid, OP_OUT, false, 0);
1482       add_regs_to_insn_regno_info (data, SET_SRC (x), uid, OP_IN, false, 0);
1483       break;
1484     case CLOBBER:
1485       /* We treat clobber of non-operand hard registers as early
1486 	 clobber (the behavior is expected from asm).  */
1487       add_regs_to_insn_regno_info (data, XEXP (x, 0), uid, OP_OUT, true, ALL_ALTERNATIVES);
1488       break;
1489     case PRE_INC: case PRE_DEC: case POST_INC: case POST_DEC:
1490       add_regs_to_insn_regno_info (data, XEXP (x, 0), uid, OP_INOUT, false, 0);
1491       break;
1492     case PRE_MODIFY: case POST_MODIFY:
1493       add_regs_to_insn_regno_info (data, XEXP (x, 0), uid, OP_INOUT, false, 0);
1494       add_regs_to_insn_regno_info (data, XEXP (x, 1), uid, OP_IN, false, 0);
1495       break;
1496     default:
1497       if ((code != PARALLEL && code != EXPR_LIST) || type != OP_OUT)
1498 	/* Some targets place small structures in registers for return
1499 	   values of functions, and those registers are wrapped in
1500 	   PARALLEL that we may see as the destination of a SET.  Here
1501 	   is an example:
1502 
1503 	   (call_insn 13 12 14 2 (set (parallel:BLK [
1504 		(expr_list:REG_DEP_TRUE (reg:DI 0 ax)
1505 		    (const_int 0 [0]))
1506 		(expr_list:REG_DEP_TRUE (reg:DI 1 dx)
1507 		    (const_int 8 [0x8]))
1508 	       ])
1509 	     (call (mem:QI (symbol_ref:DI (...	*/
1510 	type = OP_IN;
1511       fmt = GET_RTX_FORMAT (code);
1512       for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1513 	{
1514 	  if (fmt[i] == 'e')
1515 	    add_regs_to_insn_regno_info (data, XEXP (x, i), uid, type, false, 0);
1516 	  else if (fmt[i] == 'E')
1517 	    {
1518 	      for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1519 		add_regs_to_insn_regno_info (data, XVECEXP (x, i, j), uid,
1520 					     type, false, 0);
1521 	    }
1522 	}
1523     }
1524 }
1525 
1526 /* Return execution frequency of INSN.	*/
1527 static int
1528 get_insn_freq (rtx_insn *insn)
1529 {
1530   basic_block bb = BLOCK_FOR_INSN (insn);
1531 
1532   gcc_checking_assert (bb != NULL);
1533   return REG_FREQ_FROM_BB (bb);
1534 }
1535 
1536 /* Invalidate all reg info of INSN with DATA and execution frequency
1537    FREQ.  Update common info about the invalidated registers.  */
1538 static void
1539 invalidate_insn_data_regno_info (lra_insn_recog_data_t data, rtx_insn *insn,
1540 				 int freq)
1541 {
1542   int uid;
1543   bool debug_p;
1544   unsigned int i;
1545   struct lra_insn_reg *ir, *next_ir;
1546 
1547   uid = INSN_UID (insn);
1548   debug_p = DEBUG_INSN_P (insn);
1549   for (ir = data->regs; ir != NULL; ir = next_ir)
1550     {
1551       i = ir->regno;
1552       next_ir = ir->next;
1553       lra_insn_reg_pool.remove (ir);
1554       bitmap_clear_bit (&lra_reg_info[i].insn_bitmap, uid);
1555       if (i >= FIRST_PSEUDO_REGISTER && ! debug_p)
1556 	{
1557 	  lra_reg_info[i].nrefs--;
1558 	  lra_reg_info[i].freq -= freq;
1559 	  lra_assert (lra_reg_info[i].nrefs >= 0 && lra_reg_info[i].freq >= 0);
1560 	}
1561     }
1562   data->regs = NULL;
1563 }
1564 
1565 /* Invalidate all reg info of INSN.  Update common info about the
1566    invalidated registers.  */
1567 void
1568 lra_invalidate_insn_regno_info (rtx_insn *insn)
1569 {
1570   invalidate_insn_data_regno_info (lra_get_insn_recog_data (insn), insn,
1571 				   get_insn_freq (insn));
1572 }
1573 
1574 /* Update common reg info from reg info of insn given by its DATA and
1575    execution frequency FREQ.  */
1576 static void
1577 setup_insn_reg_info (lra_insn_recog_data_t data, int freq)
1578 {
1579   unsigned int i;
1580   struct lra_insn_reg *ir;
1581 
1582   for (ir = data->regs; ir != NULL; ir = ir->next)
1583     if ((i = ir->regno) >= FIRST_PSEUDO_REGISTER)
1584       {
1585 	lra_reg_info[i].nrefs++;
1586 	lra_reg_info[i].freq += freq;
1587       }
1588 }
1589 
1590 /* Set up insn reg info of INSN.  Update common reg info from reg info
1591    of INSN.  */
1592 void
1593 lra_update_insn_regno_info (rtx_insn *insn)
1594 {
1595   int i, uid, freq;
1596   lra_insn_recog_data_t data;
1597   struct lra_static_insn_data *static_data;
1598   enum rtx_code code;
1599   rtx link;
1600 
1601   if (! INSN_P (insn))
1602     return;
1603   data = lra_get_insn_recog_data (insn);
1604   static_data = data->insn_static_data;
1605   freq = get_insn_freq (insn);
1606   invalidate_insn_data_regno_info (data, insn, freq);
1607   uid = INSN_UID (insn);
1608   for (i = static_data->n_operands - 1; i >= 0; i--)
1609     add_regs_to_insn_regno_info (data, *data->operand_loc[i], uid,
1610 				 static_data->operand[i].type,
1611 				 static_data->operand[i].early_clobber,
1612 				 static_data->operand[i].early_clobber_alts);
1613   if ((code = GET_CODE (PATTERN (insn))) == CLOBBER || code == USE)
1614     add_regs_to_insn_regno_info (data, XEXP (PATTERN (insn), 0), uid,
1615 				 code == USE ? OP_IN : OP_OUT, false, 0);
1616   if (CALL_P (insn))
1617     /* On some targets call insns can refer to pseudos in memory in
1618        CALL_INSN_FUNCTION_USAGE list.  Process them in order to
1619        consider their occurrences in calls for different
1620        transformations (e.g. inheritance) with given pseudos.  */
1621     for (link = CALL_INSN_FUNCTION_USAGE (insn);
1622 	 link != NULL_RTX;
1623 	 link = XEXP (link, 1))
1624       if (((code = GET_CODE (XEXP (link, 0))) == USE || code == CLOBBER)
1625 	  && MEM_P (XEXP (XEXP (link, 0), 0)))
1626 	add_regs_to_insn_regno_info (data, XEXP (XEXP (link, 0), 0), uid,
1627 				     code == USE ? OP_IN : OP_OUT, false, 0);
1628   if (NONDEBUG_INSN_P (insn))
1629     setup_insn_reg_info (data, freq);
1630 }
1631 
1632 /* Return reg info of insn given by it UID.  */
1633 struct lra_insn_reg *
1634 lra_get_insn_regs (int uid)
1635 {
1636   lra_insn_recog_data_t data;
1637 
1638   data = get_insn_recog_data_by_uid (uid);
1639   return data->regs;
1640 }
1641 
1642 
1643 
1644 /* Recursive hash function for RTL X.  */
1645 hashval_t
1646 lra_rtx_hash (rtx x)
1647 {
1648   int i, j;
1649   enum rtx_code code;
1650   const char *fmt;
1651   hashval_t val = 0;
1652 
1653   if (x == 0)
1654     return val;
1655 
1656   code = GET_CODE (x);
1657   val += (int) code + 4095;
1658 
1659   /* Some RTL can be compared nonrecursively.  */
1660   switch (code)
1661     {
1662     case REG:
1663       return val + REGNO (x);
1664 
1665     case LABEL_REF:
1666       return iterative_hash_object (XEXP (x, 0), val);
1667 
1668     case SYMBOL_REF:
1669       return iterative_hash_object (XSTR (x, 0), val);
1670 
1671     case SCRATCH:
1672     case CONST_DOUBLE:
1673     case CONST_VECTOR:
1674       return val;
1675 
1676     case CONST_INT:
1677       return val + UINTVAL (x);
1678 
1679     default:
1680       break;
1681     }
1682 
1683   /* Hash the elements.  */
1684   fmt = GET_RTX_FORMAT (code);
1685   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1686     {
1687       switch (fmt[i])
1688 	{
1689 	case 'w':
1690 	  val += XWINT (x, i);
1691 	  break;
1692 
1693 	case 'n':
1694 	case 'i':
1695 	  val += XINT (x, i);
1696 	  break;
1697 
1698 	case 'V':
1699 	case 'E':
1700 	  val += XVECLEN (x, i);
1701 
1702 	  for (j = 0; j < XVECLEN (x, i); j++)
1703 	    val += lra_rtx_hash (XVECEXP (x, i, j));
1704 	  break;
1705 
1706 	case 'e':
1707 	  val += lra_rtx_hash (XEXP (x, i));
1708 	  break;
1709 
1710 	case 'S':
1711 	case 's':
1712 	  val += htab_hash_string (XSTR (x, i));
1713 	  break;
1714 
1715 	case 'u':
1716 	case '0':
1717 	case 't':
1718 	  break;
1719 
1720 	  /* It is believed that rtx's at this level will never
1721 	     contain anything but integers and other rtx's, except for
1722 	     within LABEL_REFs and SYMBOL_REFs.  */
1723 	default:
1724 	  abort ();
1725 	}
1726     }
1727   return val;
1728 }
1729 
1730 
1731 
1732 /* This page contains code dealing with stack of the insns which
1733    should be processed by the next constraint pass.  */
1734 
1735 /* Bitmap used to put an insn on the stack only in one exemplar.  */
1736 static sbitmap lra_constraint_insn_stack_bitmap;
1737 
1738 /* The stack itself.  */
1739 vec<rtx_insn *> lra_constraint_insn_stack;
1740 
1741 /* Put INSN on the stack.  If ALWAYS_UPDATE is true, always update the reg
1742    info for INSN, otherwise only update it if INSN is not already on the
1743    stack.  */
1744 static inline void
1745 lra_push_insn_1 (rtx_insn *insn, bool always_update)
1746 {
1747   unsigned int uid = INSN_UID (insn);
1748   if (always_update)
1749     lra_update_insn_regno_info (insn);
1750   if (uid >= SBITMAP_SIZE (lra_constraint_insn_stack_bitmap))
1751     lra_constraint_insn_stack_bitmap =
1752       sbitmap_resize (lra_constraint_insn_stack_bitmap, 3 * uid / 2, 0);
1753   if (bitmap_bit_p (lra_constraint_insn_stack_bitmap, uid))
1754     return;
1755   bitmap_set_bit (lra_constraint_insn_stack_bitmap, uid);
1756   if (! always_update)
1757     lra_update_insn_regno_info (insn);
1758   lra_constraint_insn_stack.safe_push (insn);
1759 }
1760 
1761 /* Put INSN on the stack.  */
1762 void
1763 lra_push_insn (rtx_insn *insn)
1764 {
1765   lra_push_insn_1 (insn, false);
1766 }
1767 
1768 /* Put INSN on the stack and update its reg info.  */
1769 void
1770 lra_push_insn_and_update_insn_regno_info (rtx_insn *insn)
1771 {
1772   lra_push_insn_1 (insn, true);
1773 }
1774 
1775 /* Put insn with UID on the stack.  */
1776 void
1777 lra_push_insn_by_uid (unsigned int uid)
1778 {
1779   lra_push_insn (lra_insn_recog_data[uid]->insn);
1780 }
1781 
1782 /* Take the last-inserted insns off the stack and return it.  */
1783 rtx_insn *
1784 lra_pop_insn (void)
1785 {
1786   rtx_insn *insn = lra_constraint_insn_stack.pop ();
1787   bitmap_clear_bit (lra_constraint_insn_stack_bitmap, INSN_UID (insn));
1788   return insn;
1789 }
1790 
1791 /* Return the current size of the insn stack.  */
1792 unsigned int
1793 lra_insn_stack_length (void)
1794 {
1795   return lra_constraint_insn_stack.length ();
1796 }
1797 
1798 /* Push insns FROM to TO (excluding it) going in reverse order.	 */
1799 static void
1800 push_insns (rtx_insn *from, rtx_insn *to)
1801 {
1802   rtx_insn *insn;
1803 
1804   if (from == NULL_RTX)
1805     return;
1806   for (insn = from; insn != to; insn = PREV_INSN (insn))
1807     if (INSN_P (insn))
1808       lra_push_insn (insn);
1809 }
1810 
1811 /* Set up sp offset for insn in range [FROM, LAST].  The offset is
1812    taken from the next BB insn after LAST or zero if there in such
1813    insn.  */
1814 static void
1815 setup_sp_offset (rtx_insn *from, rtx_insn *last)
1816 {
1817   rtx_insn *before = next_nonnote_insn_bb (last);
1818   HOST_WIDE_INT offset = (before == NULL_RTX || ! INSN_P (before)
1819 			  ? 0 : lra_get_insn_recog_data (before)->sp_offset);
1820 
1821   for (rtx_insn *insn = from; insn != NEXT_INSN (last); insn = NEXT_INSN (insn))
1822     lra_get_insn_recog_data (insn)->sp_offset = offset;
1823 }
1824 
1825 /* Emit insns BEFORE before INSN and insns AFTER after INSN.  Put the
1826    insns onto the stack.  Print about emitting the insns with
1827    TITLE.  */
1828 void
1829 lra_process_new_insns (rtx_insn *insn, rtx_insn *before, rtx_insn *after,
1830 		       const char *title)
1831 {
1832   rtx_insn *last;
1833 
1834   if (before == NULL_RTX && after == NULL_RTX)
1835     return;
1836   if (lra_dump_file != NULL)
1837     {
1838       dump_insn_slim (lra_dump_file, insn);
1839       if (before != NULL_RTX)
1840 	{
1841 	  fprintf (lra_dump_file,"    %s before:\n", title);
1842 	  dump_rtl_slim (lra_dump_file, before, NULL, -1, 0);
1843 	}
1844       if (after != NULL_RTX)
1845 	{
1846 	  fprintf (lra_dump_file, "    %s after:\n", title);
1847 	  dump_rtl_slim (lra_dump_file, after, NULL, -1, 0);
1848 	}
1849       fprintf (lra_dump_file, "\n");
1850     }
1851   if (before != NULL_RTX)
1852     {
1853       if (cfun->can_throw_non_call_exceptions)
1854 	copy_reg_eh_region_note_forward (insn, before, NULL);
1855       emit_insn_before (before, insn);
1856       push_insns (PREV_INSN (insn), PREV_INSN (before));
1857       setup_sp_offset (before, PREV_INSN (insn));
1858     }
1859   if (after != NULL_RTX)
1860     {
1861       if (cfun->can_throw_non_call_exceptions)
1862 	copy_reg_eh_region_note_forward (insn, after, NULL);
1863       for (last = after; NEXT_INSN (last) != NULL_RTX; last = NEXT_INSN (last))
1864 	;
1865       emit_insn_after (after, insn);
1866       push_insns (last, insn);
1867       setup_sp_offset (after, last);
1868     }
1869   if (cfun->can_throw_non_call_exceptions)
1870     {
1871       rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1872       if (note && !insn_could_throw_p (insn))
1873 	remove_note (insn, note);
1874     }
1875 }
1876 
1877 
1878 /* Replace all references to register OLD_REGNO in *LOC with pseudo
1879    register NEW_REG.  Try to simplify subreg of constant if SUBREG_P.
1880    Return true if any change was made.  */
1881 bool
1882 lra_substitute_pseudo (rtx *loc, int old_regno, rtx new_reg, bool subreg_p)
1883 {
1884   rtx x = *loc;
1885   bool result = false;
1886   enum rtx_code code;
1887   const char *fmt;
1888   int i, j;
1889 
1890   if (x == NULL_RTX)
1891     return false;
1892 
1893   code = GET_CODE (x);
1894   if (code == SUBREG && subreg_p)
1895     {
1896       rtx subst, inner = SUBREG_REG (x);
1897       /* Transform subreg of constant while we still have inner mode
1898 	 of the subreg.  The subreg internal should not be an insn
1899 	 operand.  */
1900       if (REG_P (inner) && (int) REGNO (inner) == old_regno
1901 	  && CONSTANT_P (new_reg)
1902 	  && (subst = simplify_subreg (GET_MODE (x), new_reg, GET_MODE (inner),
1903 				       SUBREG_BYTE (x))) != NULL_RTX)
1904 	{
1905 	  *loc = subst;
1906 	  return true;
1907 	}
1908 
1909     }
1910   else if (code == REG && (int) REGNO (x) == old_regno)
1911     {
1912       machine_mode mode = GET_MODE (x);
1913       machine_mode inner_mode = GET_MODE (new_reg);
1914 
1915       if (mode != inner_mode
1916 	  && ! (CONST_INT_P (new_reg) && SCALAR_INT_MODE_P (mode)))
1917 	{
1918 	  if (GET_MODE_SIZE (mode) >= GET_MODE_SIZE (inner_mode)
1919 	      || ! SCALAR_INT_MODE_P (inner_mode))
1920 	    new_reg = gen_rtx_SUBREG (mode, new_reg, 0);
1921 	  else
1922 	    new_reg = gen_lowpart_SUBREG (mode, new_reg);
1923 	}
1924       *loc = new_reg;
1925       return true;
1926     }
1927 
1928   /* Scan all the operand sub-expressions.  */
1929   fmt = GET_RTX_FORMAT (code);
1930   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1931     {
1932       if (fmt[i] == 'e')
1933 	{
1934 	  if (lra_substitute_pseudo (&XEXP (x, i), old_regno,
1935 				     new_reg, subreg_p))
1936 	    result = true;
1937 	}
1938       else if (fmt[i] == 'E')
1939 	{
1940 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1941 	    if (lra_substitute_pseudo (&XVECEXP (x, i, j), old_regno,
1942 				       new_reg, subreg_p))
1943 	      result = true;
1944 	}
1945     }
1946   return result;
1947 }
1948 
1949 /* Call lra_substitute_pseudo within an insn.  Try to simplify subreg
1950    of constant if SUBREG_P.  This won't update the insn ptr, just the
1951    contents of the insn.  */
1952 bool
1953 lra_substitute_pseudo_within_insn (rtx_insn *insn, int old_regno,
1954 				   rtx new_reg, bool subreg_p)
1955 {
1956   rtx loc = insn;
1957   return lra_substitute_pseudo (&loc, old_regno, new_reg, subreg_p);
1958 }
1959 
1960 
1961 
1962 /* This page contains code dealing with scratches (changing them onto
1963    pseudos and restoring them from the pseudos).
1964 
1965    We change scratches into pseudos at the beginning of LRA to
1966    simplify dealing with them (conflicts, hard register assignments).
1967 
1968    If the pseudo denoting scratch was spilled it means that we do need
1969    a hard register for it.  Such pseudos are transformed back to
1970    scratches at the end of LRA.	 */
1971 
1972 /* Description of location of a former scratch operand.	 */
1973 struct sloc
1974 {
1975   rtx_insn *insn; /* Insn where the scratch was.  */
1976   int nop;  /* Number of the operand which was a scratch.  */
1977 };
1978 
1979 typedef struct sloc *sloc_t;
1980 
1981 /* Locations of the former scratches.  */
1982 static vec<sloc_t> scratches;
1983 
1984 /* Bitmap of scratch regnos.  */
1985 static bitmap_head scratch_bitmap;
1986 
1987 /* Bitmap of scratch operands.	*/
1988 static bitmap_head scratch_operand_bitmap;
1989 
1990 /* Return true if pseudo REGNO is made of SCRATCH.  */
1991 bool
1992 lra_former_scratch_p (int regno)
1993 {
1994   return bitmap_bit_p (&scratch_bitmap, regno);
1995 }
1996 
1997 /* Return true if the operand NOP of INSN is a former scratch.	*/
1998 bool
1999 lra_former_scratch_operand_p (rtx_insn *insn, int nop)
2000 {
2001   return bitmap_bit_p (&scratch_operand_bitmap,
2002 		       INSN_UID (insn) * MAX_RECOG_OPERANDS + nop) != 0;
2003 }
2004 
2005 /* Register operand NOP in INSN as a former scratch.  It will be
2006    changed to scratch back, if it is necessary, at the LRA end.  */
2007 void
2008 lra_register_new_scratch_op (rtx_insn *insn, int nop)
2009 {
2010   lra_insn_recog_data_t id = lra_get_insn_recog_data (insn);
2011   rtx op = *id->operand_loc[nop];
2012   sloc_t loc = XNEW (struct sloc);
2013   lra_assert (REG_P (op));
2014   loc->insn = insn;
2015   loc->nop = nop;
2016   scratches.safe_push (loc);
2017   bitmap_set_bit (&scratch_bitmap, REGNO (op));
2018   bitmap_set_bit (&scratch_operand_bitmap,
2019 		  INSN_UID (insn) * MAX_RECOG_OPERANDS + nop);
2020   add_reg_note (insn, REG_UNUSED, op);
2021 }
2022 
2023 /* Change scratches onto pseudos and save their location.  */
2024 static void
2025 remove_scratches (void)
2026 {
2027   int i;
2028   bool insn_changed_p;
2029   basic_block bb;
2030   rtx_insn *insn;
2031   rtx reg;
2032   lra_insn_recog_data_t id;
2033   struct lra_static_insn_data *static_id;
2034 
2035   scratches.create (get_max_uid ());
2036   bitmap_initialize (&scratch_bitmap, &reg_obstack);
2037   bitmap_initialize (&scratch_operand_bitmap, &reg_obstack);
2038   FOR_EACH_BB_FN (bb, cfun)
2039     FOR_BB_INSNS (bb, insn)
2040     if (INSN_P (insn))
2041       {
2042 	id = lra_get_insn_recog_data (insn);
2043 	static_id = id->insn_static_data;
2044 	insn_changed_p = false;
2045 	for (i = 0; i < static_id->n_operands; i++)
2046 	  if (GET_CODE (*id->operand_loc[i]) == SCRATCH
2047 	      && GET_MODE (*id->operand_loc[i]) != VOIDmode)
2048 	    {
2049 	      insn_changed_p = true;
2050 	      *id->operand_loc[i] = reg
2051 		= lra_create_new_reg (static_id->operand[i].mode,
2052 				      *id->operand_loc[i], ALL_REGS, NULL);
2053 	      lra_register_new_scratch_op (insn, i);
2054 	      if (lra_dump_file != NULL)
2055 		fprintf (lra_dump_file,
2056 			 "Removing SCRATCH in insn #%u (nop %d)\n",
2057 			 INSN_UID (insn), i);
2058 	    }
2059 	if (insn_changed_p)
2060 	  /* Because we might use DF right after caller-saves sub-pass
2061 	     we need to keep DF info up to date.  */
2062 	  df_insn_rescan (insn);
2063       }
2064 }
2065 
2066 /* Changes pseudos created by function remove_scratches onto scratches.	 */
2067 static void
2068 restore_scratches (void)
2069 {
2070   int regno;
2071   unsigned i;
2072   sloc_t loc;
2073   rtx_insn *last = NULL;
2074   lra_insn_recog_data_t id = NULL;
2075 
2076   for (i = 0; scratches.iterate (i, &loc); i++)
2077     {
2078       /* Ignore already deleted insns.  */
2079       if (NOTE_P (loc->insn)
2080 	  && NOTE_KIND (loc->insn) == NOTE_INSN_DELETED)
2081 	continue;
2082       if (last != loc->insn)
2083 	{
2084 	  last = loc->insn;
2085 	  id = lra_get_insn_recog_data (last);
2086 	}
2087       if (REG_P (*id->operand_loc[loc->nop])
2088 	  && ((regno = REGNO (*id->operand_loc[loc->nop]))
2089 	      >= FIRST_PSEUDO_REGISTER)
2090 	  && lra_get_regno_hard_regno (regno) < 0)
2091 	{
2092 	  /* It should be only case when scratch register with chosen
2093 	     constraint 'X' did not get memory or hard register.  */
2094 	  lra_assert (lra_former_scratch_p (regno));
2095 	  *id->operand_loc[loc->nop]
2096 	    = gen_rtx_SCRATCH (GET_MODE (*id->operand_loc[loc->nop]));
2097 	  lra_update_dup (id, loc->nop);
2098 	  if (lra_dump_file != NULL)
2099 	    fprintf (lra_dump_file, "Restoring SCRATCH in insn #%u(nop %d)\n",
2100 		     INSN_UID (loc->insn), loc->nop);
2101 	}
2102     }
2103   for (i = 0; scratches.iterate (i, &loc); i++)
2104     free (loc);
2105   scratches.release ();
2106   bitmap_clear (&scratch_bitmap);
2107   bitmap_clear (&scratch_operand_bitmap);
2108 }
2109 
2110 
2111 
2112 /* Function checks RTL for correctness.	 If FINAL_P is true, it is
2113    done at the end of LRA and the check is more rigorous.  */
2114 static void
2115 check_rtl (bool final_p)
2116 {
2117   basic_block bb;
2118   rtx_insn *insn;
2119 
2120   lra_assert (! final_p || reload_completed);
2121   FOR_EACH_BB_FN (bb, cfun)
2122     FOR_BB_INSNS (bb, insn)
2123     if (NONDEBUG_INSN_P (insn)
2124 	&& GET_CODE (PATTERN (insn)) != USE
2125 	&& GET_CODE (PATTERN (insn)) != CLOBBER
2126 	&& GET_CODE (PATTERN (insn)) != ASM_INPUT)
2127       {
2128 	if (final_p)
2129 	  {
2130 	    extract_constrain_insn (insn);
2131 	    continue;
2132 	  }
2133 	/* LRA code is based on assumption that all addresses can be
2134 	   correctly decomposed.  LRA can generate reloads for
2135 	   decomposable addresses.  The decomposition code checks the
2136 	   correctness of the addresses.  So we don't need to check
2137 	   the addresses here.  Don't call insn_invalid_p here, it can
2138 	   change the code at this stage.  */
2139 	if (recog_memoized (insn) < 0 && asm_noperands (PATTERN (insn)) < 0)
2140 	  fatal_insn_not_found (insn);
2141       }
2142 }
2143 
2144 /* Determine if the current function has an exception receiver block
2145    that reaches the exit block via non-exceptional edges  */
2146 static bool
2147 has_nonexceptional_receiver (void)
2148 {
2149   edge e;
2150   edge_iterator ei;
2151   basic_block *tos, *worklist, bb;
2152 
2153   /* If we're not optimizing, then just err on the safe side.  */
2154   if (!optimize)
2155     return true;
2156 
2157   /* First determine which blocks can reach exit via normal paths.  */
2158   tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
2159 
2160   FOR_EACH_BB_FN (bb, cfun)
2161     bb->flags &= ~BB_REACHABLE;
2162 
2163   /* Place the exit block on our worklist.  */
2164   EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_REACHABLE;
2165   *tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun);
2166 
2167   /* Iterate: find everything reachable from what we've already seen.  */
2168   while (tos != worklist)
2169     {
2170       bb = *--tos;
2171 
2172       FOR_EACH_EDGE (e, ei, bb->preds)
2173 	if (e->flags & EDGE_ABNORMAL)
2174 	  {
2175 	    free (worklist);
2176 	    return true;
2177 	  }
2178 	else
2179 	  {
2180 	    basic_block src = e->src;
2181 
2182 	    if (!(src->flags & BB_REACHABLE))
2183 	      {
2184 		src->flags |= BB_REACHABLE;
2185 		*tos++ = src;
2186 	      }
2187 	  }
2188     }
2189   free (worklist);
2190   /* No exceptional block reached exit unexceptionally.	 */
2191   return false;
2192 }
2193 
2194 
2195 /* Process recursively X of INSN and add REG_INC notes if necessary.  */
2196 static void
2197 add_auto_inc_notes (rtx_insn *insn, rtx x)
2198 {
2199   enum rtx_code code = GET_CODE (x);
2200   const char *fmt;
2201   int i, j;
2202 
2203   if (code == MEM && auto_inc_p (XEXP (x, 0)))
2204     {
2205       add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
2206       return;
2207     }
2208 
2209   /* Scan all X sub-expressions.  */
2210   fmt = GET_RTX_FORMAT (code);
2211   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2212     {
2213       if (fmt[i] == 'e')
2214 	add_auto_inc_notes (insn, XEXP (x, i));
2215       else if (fmt[i] == 'E')
2216 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2217 	  add_auto_inc_notes (insn, XVECEXP (x, i, j));
2218     }
2219 }
2220 
2221 
2222 /* Remove all REG_DEAD and REG_UNUSED notes and regenerate REG_INC.
2223    We change pseudos by hard registers without notification of DF and
2224    that can make the notes obsolete.  DF-infrastructure does not deal
2225    with REG_INC notes -- so we should regenerate them here.  */
2226 static void
2227 update_inc_notes (void)
2228 {
2229   rtx *pnote;
2230   basic_block bb;
2231   rtx_insn *insn;
2232 
2233   FOR_EACH_BB_FN (bb, cfun)
2234     FOR_BB_INSNS (bb, insn)
2235     if (NONDEBUG_INSN_P (insn))
2236       {
2237 	pnote = &REG_NOTES (insn);
2238 	while (*pnote != 0)
2239 	  {
2240 	    if (REG_NOTE_KIND (*pnote) == REG_DEAD
2241                 || REG_NOTE_KIND (*pnote) == REG_UNUSED
2242                 || REG_NOTE_KIND (*pnote) == REG_INC)
2243 	      *pnote = XEXP (*pnote, 1);
2244 	    else
2245 	      pnote = &XEXP (*pnote, 1);
2246 	  }
2247 
2248 	if (AUTO_INC_DEC)
2249 	  add_auto_inc_notes (insn, PATTERN (insn));
2250       }
2251 }
2252 
2253 /* Set to 1 while in lra.  */
2254 int lra_in_progress;
2255 
2256 /* Start of pseudo regnos before the LRA.  */
2257 int lra_new_regno_start;
2258 
2259 /* Start of reload pseudo regnos before the new spill pass.  */
2260 int lra_constraint_new_regno_start;
2261 
2262 /* Avoid spilling pseudos with regno more than the following value if
2263    it is possible.  */
2264 int lra_bad_spill_regno_start;
2265 
2266 /* Inheritance pseudo regnos before the new spill pass.	 */
2267 bitmap_head lra_inheritance_pseudos;
2268 
2269 /* Split regnos before the new spill pass.  */
2270 bitmap_head lra_split_regs;
2271 
2272 /* Reload pseudo regnos before the new assignment pass which still can
2273    be spilled after the assignment pass as memory is also accepted in
2274    insns for the reload pseudos.  */
2275 bitmap_head lra_optional_reload_pseudos;
2276 
2277 /* Pseudo regnos used for subreg reloads before the new assignment
2278    pass.  Such pseudos still can be spilled after the assignment
2279    pass.  */
2280 bitmap_head lra_subreg_reload_pseudos;
2281 
2282 /* File used for output of LRA debug information.  */
2283 FILE *lra_dump_file;
2284 
2285 /* True if we should try spill into registers of different classes
2286    instead of memory.  */
2287 bool lra_reg_spill_p;
2288 
2289 /* Set up value LRA_REG_SPILL_P.  */
2290 static void
2291 setup_reg_spill_flag (void)
2292 {
2293   int cl, mode;
2294 
2295   if (targetm.spill_class != NULL)
2296     for (cl = 0; cl < (int) LIM_REG_CLASSES; cl++)
2297       for (mode = 0; mode < MAX_MACHINE_MODE; mode++)
2298 	if (targetm.spill_class ((enum reg_class) cl,
2299 				 (machine_mode) mode) != NO_REGS)
2300 	  {
2301 	    lra_reg_spill_p = true;
2302 	    return;
2303 	  }
2304   lra_reg_spill_p = false;
2305 }
2306 
2307 /* True if the current function is too big to use regular algorithms
2308    in LRA. In other words, we should use simpler and faster algorithms
2309    in LRA.  It also means we should not worry about generation code
2310    for caller saves.  The value is set up in IRA.  */
2311 bool lra_simple_p;
2312 
2313 /* Major LRA entry function.  F is a file should be used to dump LRA
2314    debug info.  */
2315 void
2316 lra (FILE *f)
2317 {
2318   int i;
2319   bool live_p, inserted_p;
2320 
2321   lra_dump_file = f;
2322 
2323   timevar_push (TV_LRA);
2324 
2325   /* Make sure that the last insn is a note.  Some subsequent passes
2326      need it.  */
2327   emit_note (NOTE_INSN_DELETED);
2328 
2329   COPY_HARD_REG_SET (lra_no_alloc_regs, ira_no_alloc_regs);
2330 
2331   init_reg_info ();
2332   expand_reg_info ();
2333 
2334   init_insn_recog_data ();
2335 
2336   /* Some quick check on RTL generated by previous passes.  */
2337   if (flag_checking)
2338     check_rtl (false);
2339 
2340   lra_in_progress = 1;
2341 
2342   lra_live_range_iter = lra_coalesce_iter = lra_constraint_iter = 0;
2343   lra_assignment_iter = lra_assignment_iter_after_spill = 0;
2344   lra_inheritance_iter = lra_undo_inheritance_iter = 0;
2345   lra_rematerialization_iter = 0;
2346 
2347   setup_reg_spill_flag ();
2348 
2349   /* Function remove_scratches can creates new pseudos for clobbers --
2350      so set up lra_constraint_new_regno_start before its call to
2351      permit changing reg classes for pseudos created by this
2352      simplification.  */
2353   lra_constraint_new_regno_start = lra_new_regno_start = max_reg_num ();
2354   lra_bad_spill_regno_start = INT_MAX;
2355   remove_scratches ();
2356 
2357   /* A function that has a non-local label that can reach the exit
2358      block via non-exceptional paths must save all call-saved
2359      registers.	 */
2360   if (cfun->has_nonlocal_label && has_nonexceptional_receiver ())
2361     crtl->saves_all_registers = 1;
2362 
2363   if (crtl->saves_all_registers)
2364     for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2365       if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
2366 	df_set_regs_ever_live (i, true);
2367 
2368   /* We don't DF from now and avoid its using because it is to
2369      expensive when a lot of RTL changes are made.  */
2370   df_set_flags (DF_NO_INSN_RESCAN);
2371   lra_constraint_insn_stack.create (get_max_uid ());
2372   lra_constraint_insn_stack_bitmap = sbitmap_alloc (get_max_uid ());
2373   bitmap_clear (lra_constraint_insn_stack_bitmap);
2374   lra_live_ranges_init ();
2375   lra_constraints_init ();
2376   lra_curr_reload_num = 0;
2377   push_insns (get_last_insn (), NULL);
2378   /* It is needed for the 1st coalescing.  */
2379   bitmap_initialize (&lra_inheritance_pseudos, &reg_obstack);
2380   bitmap_initialize (&lra_split_regs, &reg_obstack);
2381   bitmap_initialize (&lra_optional_reload_pseudos, &reg_obstack);
2382   bitmap_initialize (&lra_subreg_reload_pseudos, &reg_obstack);
2383   live_p = false;
2384   if (get_frame_size () != 0 && crtl->stack_alignment_needed)
2385     /* If we have a stack frame, we must align it now.  The stack size
2386        may be a part of the offset computation for register
2387        elimination.  */
2388     assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
2389   lra_init_equiv ();
2390   for (;;)
2391     {
2392       for (;;)
2393 	{
2394 	  bool reloads_p = lra_constraints (lra_constraint_iter == 0);
2395 	  /* Constraint transformations may result in that eliminable
2396 	     hard regs become uneliminable and pseudos which use them
2397 	     should be spilled.	 It is better to do it before pseudo
2398 	     assignments.
2399 
2400 	     For example, rs6000 can make
2401 	     RS6000_PIC_OFFSET_TABLE_REGNUM uneliminable if we started
2402 	     to use a constant pool.  */
2403 	  lra_eliminate (false, false);
2404 	  /* We should try to assign hard registers to scratches even
2405 	     if there were no RTL transformations in lra_constraints.
2406 	     Also we should check IRA assignments on the first
2407 	     iteration as they can be wrong because of early clobbers
2408 	     operands which are ignored in IRA.  */
2409 	  if (! reloads_p && lra_constraint_iter > 1)
2410 	    {
2411 	      /* Stack is not empty here only when there are changes
2412 		 during the elimination sub-pass.  */
2413 	      if (bitmap_empty_p (lra_constraint_insn_stack_bitmap))
2414 		break;
2415 	      else
2416 		/* If there are no reloads but changing due
2417 		   elimination, restart the constraint sub-pass
2418 		   first.  */
2419 		continue;
2420 	    }
2421 	  /* Do inheritance only for regular algorithms.  */
2422 	  if (! lra_simple_p)
2423 	    {
2424 	      if (flag_ipa_ra)
2425 		{
2426 		  if (live_p)
2427 		    lra_clear_live_ranges ();
2428 		  /* As a side-effect of lra_create_live_ranges, we calculate
2429 		     actual_call_used_reg_set,  which is needed during
2430 		     lra_inheritance.  */
2431 		  lra_create_live_ranges (true, true);
2432 		  live_p = true;
2433 		}
2434 	      lra_inheritance ();
2435 	    }
2436 	  if (live_p)
2437 	    lra_clear_live_ranges ();
2438 	  /* We need live ranges for lra_assign -- so build them.  But
2439 	     don't remove dead insns or change global live info as we
2440 	     can undo inheritance transformations after inheritance
2441 	     pseudo assigning.  */
2442 	  lra_create_live_ranges (true, false);
2443 	  live_p = true;
2444 	  /* If we don't spill non-reload and non-inheritance pseudos,
2445 	     there is no sense to run memory-memory move coalescing.
2446 	     If inheritance pseudos were spilled, the memory-memory
2447 	     moves involving them will be removed by pass undoing
2448 	     inheritance.  */
2449 	  if (lra_simple_p)
2450 	    lra_assign ();
2451 	  else
2452 	    {
2453 	      bool spill_p = !lra_assign ();
2454 
2455 	      if (lra_undo_inheritance ())
2456 		live_p = false;
2457 	      if (spill_p)
2458 		{
2459 		  if (! live_p)
2460 		    {
2461 		      lra_create_live_ranges (true, true);
2462 		      live_p = true;
2463 		    }
2464 		  if (lra_coalesce ())
2465 		    live_p = false;
2466 		}
2467 	      if (! live_p)
2468 		lra_clear_live_ranges ();
2469 	    }
2470 	}
2471       /* Don't clear optional reloads bitmap until all constraints are
2472 	 satisfied as we need to differ them from regular reloads.  */
2473       bitmap_clear (&lra_optional_reload_pseudos);
2474       bitmap_clear (&lra_subreg_reload_pseudos);
2475       bitmap_clear (&lra_inheritance_pseudos);
2476       bitmap_clear (&lra_split_regs);
2477       if (! live_p)
2478 	{
2479 	  /* We need full live info for spilling pseudos into
2480 	     registers instead of memory.  */
2481 	  lra_create_live_ranges (lra_reg_spill_p, true);
2482 	  live_p = true;
2483 	}
2484       /* We should check necessity for spilling here as the above live
2485 	 range pass can remove spilled pseudos.  */
2486       if (! lra_need_for_spills_p ())
2487 	break;
2488       /* Now we know what pseudos should be spilled.  Try to
2489 	 rematerialize them first.  */
2490       if (lra_remat ())
2491 	{
2492 	  /* We need full live info -- see the comment above.  */
2493 	  lra_create_live_ranges (lra_reg_spill_p, true);
2494 	  live_p = true;
2495 	  if (! lra_need_for_spills_p ())
2496 	    break;
2497 	}
2498       lra_spill ();
2499       /* Assignment of stack slots changes elimination offsets for
2500 	 some eliminations.  So update the offsets here.  */
2501       lra_eliminate (false, false);
2502       lra_constraint_new_regno_start = max_reg_num ();
2503       if (lra_bad_spill_regno_start == INT_MAX
2504 	  && lra_inheritance_iter > LRA_MAX_INHERITANCE_PASSES
2505 	  && lra_rematerialization_iter > LRA_MAX_REMATERIALIZATION_PASSES)
2506 	/* After switching off inheritance and rematerialization
2507 	   passes, avoid spilling reload pseudos will be created to
2508 	   prevent LRA cycling in some complicated cases.  */
2509 	lra_bad_spill_regno_start = lra_constraint_new_regno_start;
2510       lra_assignment_iter_after_spill = 0;
2511     }
2512   restore_scratches ();
2513   lra_eliminate (true, false);
2514   lra_final_code_change ();
2515   lra_in_progress = 0;
2516   if (live_p)
2517     lra_clear_live_ranges ();
2518   lra_live_ranges_finish ();
2519   lra_constraints_finish ();
2520   finish_reg_info ();
2521   sbitmap_free (lra_constraint_insn_stack_bitmap);
2522   lra_constraint_insn_stack.release ();
2523   finish_insn_recog_data ();
2524   regstat_free_n_sets_and_refs ();
2525   regstat_free_ri ();
2526   reload_completed = 1;
2527   update_inc_notes ();
2528 
2529   inserted_p = fixup_abnormal_edges ();
2530 
2531   /* We've possibly turned single trapping insn into multiple ones.  */
2532   if (cfun->can_throw_non_call_exceptions)
2533     {
2534       auto_sbitmap blocks (last_basic_block_for_fn (cfun));
2535       bitmap_ones (blocks);
2536       find_many_sub_basic_blocks (blocks);
2537     }
2538 
2539   if (inserted_p)
2540     commit_edge_insertions ();
2541 
2542   /* Replacing pseudos with their memory equivalents might have
2543      created shared rtx.  Subsequent passes would get confused
2544      by this, so unshare everything here.  */
2545   unshare_all_rtl_again (get_insns ());
2546 
2547   if (flag_checking)
2548     check_rtl (true);
2549 
2550   timevar_pop (TV_LRA);
2551 }
2552 
2553 /* Called once per compiler to initialize LRA data once.  */
2554 void
2555 lra_init_once (void)
2556 {
2557   init_insn_code_data_once ();
2558 }
2559 
2560 /* Called once per compiler to finish LRA data which are initialize
2561    once.  */
2562 void
2563 lra_finish_once (void)
2564 {
2565   finish_insn_code_data_once ();
2566 }
2567