xref: /openbsd-src/gnu/gcc/gcc/global.c (revision 404b540a9034ac75a6199ad1a32d1bbc7a0d4210)
1 /* Allocate registers for pseudo-registers that span basic blocks.
2    Copyright (C) 1987, 1988, 1991, 1994, 1996, 1997, 1998,
3    1999, 2000, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA.  */
21 
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "reload.h"
37 #include "output.h"
38 #include "toplev.h"
39 #include "tree-pass.h"
40 #include "timevar.h"
41 #include "vecprim.h"
42 
43 /* This pass of the compiler performs global register allocation.
44    It assigns hard register numbers to all the pseudo registers
45    that were not handled in local_alloc.  Assignments are recorded
46    in the vector reg_renumber, not by changing the rtl code.
47    (Such changes are made by final).  The entry point is
48    the function global_alloc.
49 
50    After allocation is complete, the reload pass is run as a subroutine
51    of this pass, so that when a pseudo reg loses its hard reg due to
52    spilling it is possible to make a second attempt to find a hard
53    reg for it.  The reload pass is independent in other respects
54    and it is run even when stupid register allocation is in use.
55 
56    1. Assign allocation-numbers (allocnos) to the pseudo-registers
57    still needing allocations and to the pseudo-registers currently
58    allocated by local-alloc which may be spilled by reload.
59    Set up tables reg_allocno and allocno_reg to map
60    reg numbers to allocnos and vice versa.
61    max_allocno gets the number of allocnos in use.
62 
63    2. Allocate a max_allocno by max_allocno conflict bit matrix and clear it.
64    Allocate a max_allocno by FIRST_PSEUDO_REGISTER conflict matrix
65    for conflicts between allocnos and explicit hard register use
66    (which includes use of pseudo-registers allocated by local_alloc).
67 
68    3. For each basic block
69     walk forward through the block, recording which
70     pseudo-registers and which hardware registers are live.
71     Build the conflict matrix between the pseudo-registers
72     and another of pseudo-registers versus hardware registers.
73     Also record the preferred hardware registers
74     for each pseudo-register.
75 
76    4. Sort a table of the allocnos into order of
77    desirability of the variables.
78 
79    5. Allocate the variables in that order; each if possible into
80    a preferred register, else into another register.  */
81 
82 /* Number of pseudo-registers which are candidates for allocation.  */
83 
84 static int max_allocno;
85 
86 /* Indexed by (pseudo) reg number, gives the allocno, or -1
87    for pseudo registers which are not to be allocated.  */
88 
89 static int *reg_allocno;
90 
91 struct allocno
92 {
93   int reg;
94   /* Gives the number of consecutive hard registers needed by that
95      pseudo reg.  */
96   int size;
97 
98   /* Number of calls crossed by each allocno.  */
99   int calls_crossed;
100 
101   /* Number of calls that might throw crossed by each allocno.  */
102   int throwing_calls_crossed;
103 
104   /* Number of refs to each allocno.  */
105   int n_refs;
106 
107   /* Frequency of uses of each allocno.  */
108   int freq;
109 
110   /* Guess at live length of each allocno.
111      This is actually the max of the live lengths of the regs.  */
112   int live_length;
113 
114   /* Set of hard regs conflicting with allocno N.  */
115 
116   HARD_REG_SET hard_reg_conflicts;
117 
118   /* Set of hard regs preferred by allocno N.
119      This is used to make allocnos go into regs that are copied to or from them,
120      when possible, to reduce register shuffling.  */
121 
122   HARD_REG_SET hard_reg_preferences;
123 
124   /* Similar, but just counts register preferences made in simple copy
125      operations, rather than arithmetic.  These are given priority because
126      we can always eliminate an insn by using these, but using a register
127      in the above list won't always eliminate an insn.  */
128 
129   HARD_REG_SET hard_reg_copy_preferences;
130 
131   /* Similar to hard_reg_preferences, but includes bits for subsequent
132      registers when an allocno is multi-word.  The above variable is used for
133      allocation while this is used to build reg_someone_prefers, below.  */
134 
135   HARD_REG_SET hard_reg_full_preferences;
136 
137   /* Set of hard registers that some later allocno has a preference for.  */
138 
139   HARD_REG_SET regs_someone_prefers;
140 
141 #ifdef STACK_REGS
142   /* Set to true if allocno can't be allocated in the stack register.  */
143   bool no_stack_reg;
144 #endif
145 };
146 
147 static struct allocno *allocno;
148 
149 /* A vector of the integers from 0 to max_allocno-1,
150    sorted in the order of first-to-be-allocated first.  */
151 
152 static int *allocno_order;
153 
154 /* Indexed by (pseudo) reg number, gives the number of another
155    lower-numbered pseudo reg which can share a hard reg with this pseudo
156    *even if the two pseudos would otherwise appear to conflict*.  */
157 
158 static int *reg_may_share;
159 
160 /* Define the number of bits in each element of `conflicts' and what
161    type that element has.  We use the largest integer format on the
162    host machine.  */
163 
164 #define INT_BITS HOST_BITS_PER_WIDE_INT
165 #define INT_TYPE HOST_WIDE_INT
166 
167 /* max_allocno by max_allocno array of bits,
168    recording whether two allocno's conflict (can't go in the same
169    hardware register).
170 
171    `conflicts' is symmetric after the call to mirror_conflicts.  */
172 
173 static INT_TYPE *conflicts;
174 
175 /* Number of ints required to hold max_allocno bits.
176    This is the length of a row in `conflicts'.  */
177 
178 static int allocno_row_words;
179 
180 /* Two macros to test or store 1 in an element of `conflicts'.  */
181 
182 #define CONFLICTP(I, J) \
183  (conflicts[(I) * allocno_row_words + (unsigned) (J) / INT_BITS]	\
184   & ((INT_TYPE) 1 << ((unsigned) (J) % INT_BITS)))
185 
186 /* For any allocno set in ALLOCNO_SET, set ALLOCNO to that allocno,
187    and execute CODE.  */
188 #define EXECUTE_IF_SET_IN_ALLOCNO_SET(ALLOCNO_SET, ALLOCNO, CODE)	\
189 do {									\
190   int i_;								\
191   int allocno_;								\
192   INT_TYPE *p_ = (ALLOCNO_SET);						\
193 									\
194   for (i_ = allocno_row_words - 1, allocno_ = 0; i_ >= 0;		\
195        i_--, allocno_ += INT_BITS)					\
196     {									\
197       unsigned INT_TYPE word_ = (unsigned INT_TYPE) *p_++;		\
198 									\
199       for ((ALLOCNO) = allocno_; word_; word_ >>= 1, (ALLOCNO)++)	\
200 	{								\
201 	  if (word_ & 1)						\
202 	    {CODE;}							\
203 	}								\
204     }									\
205 } while (0)
206 
207 /* This doesn't work for non-GNU C due to the way CODE is macro expanded.  */
208 #if 0
209 /* For any allocno that conflicts with IN_ALLOCNO, set OUT_ALLOCNO to
210    the conflicting allocno, and execute CODE.  This macro assumes that
211    mirror_conflicts has been run.  */
212 #define EXECUTE_IF_CONFLICT(IN_ALLOCNO, OUT_ALLOCNO, CODE)\
213   EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + (IN_ALLOCNO) * allocno_row_words,\
214 				 OUT_ALLOCNO, (CODE))
215 #endif
216 
217 /* Set of hard regs currently live (during scan of all insns).  */
218 
219 static HARD_REG_SET hard_regs_live;
220 
221 /* Set of registers that global-alloc isn't supposed to use.  */
222 
223 static HARD_REG_SET no_global_alloc_regs;
224 
225 /* Set of registers used so far.  */
226 
227 static HARD_REG_SET regs_used_so_far;
228 
229 /* Number of refs to each hard reg, as used by local alloc.
230    It is zero for a reg that contains global pseudos or is explicitly used.  */
231 
232 static int local_reg_n_refs[FIRST_PSEUDO_REGISTER];
233 
234 /* Frequency of uses of given hard reg.  */
235 static int local_reg_freq[FIRST_PSEUDO_REGISTER];
236 
237 /* Guess at live length of each hard reg, as used by local alloc.
238    This is actually the sum of the live lengths of the specific regs.  */
239 
240 static int local_reg_live_length[FIRST_PSEUDO_REGISTER];
241 
242 /* Set to 1 a bit in a vector TABLE of HARD_REG_SETs, for vector
243    element I, and hard register number J.  */
244 
245 #define SET_REGBIT(TABLE, I, J)  SET_HARD_REG_BIT (allocno[I].TABLE, J)
246 
247 /* Bit mask for allocnos live at current point in the scan.  */
248 
249 static INT_TYPE *allocnos_live;
250 
251 /* Test, set or clear bit number I in allocnos_live,
252    a bit vector indexed by allocno.  */
253 
254 #define SET_ALLOCNO_LIVE(I)				\
255   (allocnos_live[(unsigned) (I) / INT_BITS]		\
256      |= ((INT_TYPE) 1 << ((unsigned) (I) % INT_BITS)))
257 
258 #define CLEAR_ALLOCNO_LIVE(I)				\
259   (allocnos_live[(unsigned) (I) / INT_BITS]		\
260      &= ~((INT_TYPE) 1 << ((unsigned) (I) % INT_BITS)))
261 
262 /* This is turned off because it doesn't work right for DImode.
263    (And it is only used for DImode, so the other cases are worthless.)
264    The problem is that it isn't true that there is NO possibility of conflict;
265    only that there is no conflict if the two pseudos get the exact same regs.
266    If they were allocated with a partial overlap, there would be a conflict.
267    We can't safely turn off the conflict unless we have another way to
268    prevent the partial overlap.
269 
270    Idea: change hard_reg_conflicts so that instead of recording which
271    hard regs the allocno may not overlap, it records where the allocno
272    may not start.  Change both where it is used and where it is updated.
273    Then there is a way to record that (reg:DI 108) may start at 10
274    but not at 9 or 11.  There is still the question of how to record
275    this semi-conflict between two pseudos.  */
276 #if 0
277 /* Reg pairs for which conflict after the current insn
278    is inhibited by a REG_NO_CONFLICT note.
279    If the table gets full, we ignore any other notes--that is conservative.  */
280 #define NUM_NO_CONFLICT_PAIRS 4
281 /* Number of pairs in use in this insn.  */
282 int n_no_conflict_pairs;
283 static struct { int allocno1, allocno2;}
284   no_conflict_pairs[NUM_NO_CONFLICT_PAIRS];
285 #endif /* 0 */
286 
287 /* Record all regs that are set in any one insn.
288    Communication from mark_reg_{store,clobber} and global_conflicts.  */
289 
290 static rtx *regs_set;
291 static int n_regs_set;
292 
293 /* All registers that can be eliminated.  */
294 
295 static HARD_REG_SET eliminable_regset;
296 
297 static int allocno_compare (const void *, const void *);
298 static void global_conflicts (void);
299 static void mirror_conflicts (void);
300 static void expand_preferences (void);
301 static void prune_preferences (void);
302 static void find_reg (int, HARD_REG_SET, int, int, int);
303 static void record_one_conflict (int);
304 static void record_conflicts (int *, int);
305 static void mark_reg_store (rtx, rtx, void *);
306 static void mark_reg_clobber (rtx, rtx, void *);
307 static void mark_reg_conflicts (rtx);
308 static void mark_reg_death (rtx);
309 static void mark_reg_live_nc (int, enum machine_mode);
310 static void set_preference (rtx, rtx);
311 static void dump_conflicts (FILE *);
312 static void reg_becomes_live (rtx, rtx, void *);
313 static void reg_dies (int, enum machine_mode, struct insn_chain *);
314 
315 static void allocate_bb_info (void);
316 static void free_bb_info (void);
317 static bool check_earlyclobber (rtx);
318 static void mark_reg_use_for_earlyclobber_1 (rtx *, void *);
319 static int mark_reg_use_for_earlyclobber (rtx *, void *);
320 static void calculate_local_reg_bb_info (void);
321 static void set_up_bb_rts_numbers (void);
322 static int rpost_cmp (const void *, const void *);
323 static void calculate_reg_pav (void);
324 static void modify_reg_pav (void);
325 static void make_accurate_live_analysis (void);
326 
327 
328 
329 /* Perform allocation of pseudo-registers not allocated by local_alloc.
330 
331    Return value is nonzero if reload failed
332    and we must not do any more for this function.  */
333 
334 static int
global_alloc(void)335 global_alloc (void)
336 {
337   int retval;
338 #ifdef ELIMINABLE_REGS
339   static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
340 #endif
341   int need_fp
342     = (! flag_omit_frame_pointer
343        || (current_function_calls_alloca && EXIT_IGNORE_STACK)
344        || FRAME_POINTER_REQUIRED);
345 
346   size_t i;
347   rtx x;
348 
349   make_accurate_live_analysis ();
350 
351   max_allocno = 0;
352 
353   /* A machine may have certain hard registers that
354      are safe to use only within a basic block.  */
355 
356   CLEAR_HARD_REG_SET (no_global_alloc_regs);
357 
358   /* Build the regset of all eliminable registers and show we can't use those
359      that we already know won't be eliminated.  */
360 #ifdef ELIMINABLE_REGS
361   for (i = 0; i < ARRAY_SIZE (eliminables); i++)
362     {
363       bool cannot_elim
364 	= (! CAN_ELIMINATE (eliminables[i].from, eliminables[i].to)
365 	   || (eliminables[i].to == STACK_POINTER_REGNUM && need_fp));
366 
367       if (!regs_asm_clobbered[eliminables[i].from])
368 	{
369 	  SET_HARD_REG_BIT (eliminable_regset, eliminables[i].from);
370 
371 	  if (cannot_elim)
372 	    SET_HARD_REG_BIT (no_global_alloc_regs, eliminables[i].from);
373 	}
374       else if (cannot_elim)
375 	error ("%s cannot be used in asm here",
376 	       reg_names[eliminables[i].from]);
377       else
378 	regs_ever_live[eliminables[i].from] = 1;
379     }
380 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
381   if (!regs_asm_clobbered[HARD_FRAME_POINTER_REGNUM])
382     {
383       SET_HARD_REG_BIT (eliminable_regset, HARD_FRAME_POINTER_REGNUM);
384       if (need_fp)
385 	SET_HARD_REG_BIT (no_global_alloc_regs, HARD_FRAME_POINTER_REGNUM);
386     }
387   else if (need_fp)
388     error ("%s cannot be used in asm here",
389 	   reg_names[HARD_FRAME_POINTER_REGNUM]);
390   else
391     regs_ever_live[HARD_FRAME_POINTER_REGNUM] = 1;
392 #endif
393 
394 #else
395   if (!regs_asm_clobbered[FRAME_POINTER_REGNUM])
396     {
397       SET_HARD_REG_BIT (eliminable_regset, FRAME_POINTER_REGNUM);
398       if (need_fp)
399 	SET_HARD_REG_BIT (no_global_alloc_regs, FRAME_POINTER_REGNUM);
400     }
401   else if (need_fp)
402     error ("%s cannot be used in asm here", reg_names[FRAME_POINTER_REGNUM]);
403   else
404     regs_ever_live[FRAME_POINTER_REGNUM] = 1;
405 #endif
406 
407   /* Track which registers have already been used.  Start with registers
408      explicitly in the rtl, then registers allocated by local register
409      allocation.  */
410 
411   CLEAR_HARD_REG_SET (regs_used_so_far);
412 #ifdef LEAF_REGISTERS
413   /* If we are doing the leaf function optimization, and this is a leaf
414      function, it means that the registers that take work to save are those
415      that need a register window.  So prefer the ones that can be used in
416      a leaf function.  */
417   {
418     const char *cheap_regs;
419     const char *const leaf_regs = LEAF_REGISTERS;
420 
421     if (only_leaf_regs_used () && leaf_function_p ())
422       cheap_regs = leaf_regs;
423     else
424       cheap_regs = call_used_regs;
425     for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
426       if (regs_ever_live[i] || cheap_regs[i])
427 	SET_HARD_REG_BIT (regs_used_so_far, i);
428   }
429 #else
430   /* We consider registers that do not have to be saved over calls as if
431      they were already used since there is no cost in using them.  */
432   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
433     if (regs_ever_live[i] || call_used_regs[i])
434       SET_HARD_REG_BIT (regs_used_so_far, i);
435 #endif
436 
437   for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
438     if (reg_renumber[i] >= 0)
439       SET_HARD_REG_BIT (regs_used_so_far, reg_renumber[i]);
440 
441   /* Establish mappings from register number to allocation number
442      and vice versa.  In the process, count the allocnos.  */
443 
444   reg_allocno = XNEWVEC (int, max_regno);
445 
446   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
447     reg_allocno[i] = -1;
448 
449   /* Initialize the shared-hard-reg mapping
450      from the list of pairs that may share.  */
451   reg_may_share = XCNEWVEC (int, max_regno);
452   for (x = regs_may_share; x; x = XEXP (XEXP (x, 1), 1))
453     {
454       int r1 = REGNO (XEXP (x, 0));
455       int r2 = REGNO (XEXP (XEXP (x, 1), 0));
456       if (r1 > r2)
457 	reg_may_share[r1] = r2;
458       else
459 	reg_may_share[r2] = r1;
460     }
461 
462   for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
463     /* Note that reg_live_length[i] < 0 indicates a "constant" reg
464        that we are supposed to refrain from putting in a hard reg.
465        -2 means do make an allocno but don't allocate it.  */
466     if (REG_N_REFS (i) != 0 && REG_LIVE_LENGTH (i) != -1
467 	/* Don't allocate pseudos that cross calls,
468 	   if this function receives a nonlocal goto.  */
469 	&& (! current_function_has_nonlocal_label
470 	    || REG_N_CALLS_CROSSED (i) == 0))
471       {
472 	if (reg_renumber[i] < 0
473 	    && reg_may_share[i] && reg_allocno[reg_may_share[i]] >= 0)
474 	  reg_allocno[i] = reg_allocno[reg_may_share[i]];
475 	else
476 	  reg_allocno[i] = max_allocno++;
477 	gcc_assert (REG_LIVE_LENGTH (i));
478       }
479     else
480       reg_allocno[i] = -1;
481 
482   allocno = XCNEWVEC (struct allocno, max_allocno);
483 
484   for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
485     if (reg_allocno[i] >= 0)
486       {
487 	int num = reg_allocno[i];
488 	allocno[num].reg = i;
489 	allocno[num].size = PSEUDO_REGNO_SIZE (i);
490 	allocno[num].calls_crossed += REG_N_CALLS_CROSSED (i);
491 	allocno[num].throwing_calls_crossed
492 	  += REG_N_THROWING_CALLS_CROSSED (i);
493 	allocno[num].n_refs += REG_N_REFS (i);
494 	allocno[num].freq += REG_FREQ (i);
495 	if (allocno[num].live_length < REG_LIVE_LENGTH (i))
496 	  allocno[num].live_length = REG_LIVE_LENGTH (i);
497       }
498 
499   /* Calculate amount of usage of each hard reg by pseudos
500      allocated by local-alloc.  This is to see if we want to
501      override it.  */
502   memset (local_reg_live_length, 0, sizeof local_reg_live_length);
503   memset (local_reg_n_refs, 0, sizeof local_reg_n_refs);
504   memset (local_reg_freq, 0, sizeof local_reg_freq);
505   for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
506     if (reg_renumber[i] >= 0)
507       {
508 	int regno = reg_renumber[i];
509 	int endregno = regno + hard_regno_nregs[regno][PSEUDO_REGNO_MODE (i)];
510 	int j;
511 
512 	for (j = regno; j < endregno; j++)
513 	  {
514 	    local_reg_n_refs[j] += REG_N_REFS (i);
515 	    local_reg_freq[j] += REG_FREQ (i);
516 	    local_reg_live_length[j] += REG_LIVE_LENGTH (i);
517 	  }
518       }
519 
520   /* We can't override local-alloc for a reg used not just by local-alloc.  */
521   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
522     if (regs_ever_live[i])
523       local_reg_n_refs[i] = 0, local_reg_freq[i] = 0;
524 
525   allocno_row_words = (max_allocno + INT_BITS - 1) / INT_BITS;
526 
527   /* We used to use alloca here, but the size of what it would try to
528      allocate would occasionally cause it to exceed the stack limit and
529      cause unpredictable core dumps.  Some examples were > 2Mb in size.  */
530   conflicts = XCNEWVEC (INT_TYPE, max_allocno * allocno_row_words);
531 
532   allocnos_live = XNEWVEC (INT_TYPE, allocno_row_words);
533 
534   /* If there is work to be done (at least one reg to allocate),
535      perform global conflict analysis and allocate the regs.  */
536 
537   if (max_allocno > 0)
538     {
539       /* Scan all the insns and compute the conflicts among allocnos
540 	 and between allocnos and hard regs.  */
541 
542       global_conflicts ();
543 
544       mirror_conflicts ();
545 
546       /* Eliminate conflicts between pseudos and eliminable registers.  If
547 	 the register is not eliminated, the pseudo won't really be able to
548 	 live in the eliminable register, so the conflict doesn't matter.
549 	 If we do eliminate the register, the conflict will no longer exist.
550 	 So in either case, we can ignore the conflict.  Likewise for
551 	 preferences.  */
552 
553       for (i = 0; i < (size_t) max_allocno; i++)
554 	{
555 	  AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_conflicts,
556 				  eliminable_regset);
557 	  AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_copy_preferences,
558 				  eliminable_regset);
559 	  AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_preferences,
560 				  eliminable_regset);
561 	}
562 
563       /* Try to expand the preferences by merging them between allocnos.  */
564 
565       expand_preferences ();
566 
567       /* Determine the order to allocate the remaining pseudo registers.  */
568 
569       allocno_order = XNEWVEC (int, max_allocno);
570       for (i = 0; i < (size_t) max_allocno; i++)
571 	allocno_order[i] = i;
572 
573       /* Default the size to 1, since allocno_compare uses it to divide by.
574 	 Also convert allocno_live_length of zero to -1.  A length of zero
575 	 can occur when all the registers for that allocno have reg_live_length
576 	 equal to -2.  In this case, we want to make an allocno, but not
577 	 allocate it.  So avoid the divide-by-zero and set it to a low
578 	 priority.  */
579 
580       for (i = 0; i < (size_t) max_allocno; i++)
581 	{
582 	  if (allocno[i].size == 0)
583 	    allocno[i].size = 1;
584 	  if (allocno[i].live_length == 0)
585 	    allocno[i].live_length = -1;
586 	}
587 
588       qsort (allocno_order, max_allocno, sizeof (int), allocno_compare);
589 
590       prune_preferences ();
591 
592       if (dump_file)
593 	dump_conflicts (dump_file);
594 
595       /* Try allocating them, one by one, in that order,
596 	 except for parameters marked with reg_live_length[regno] == -2.  */
597 
598       for (i = 0; i < (size_t) max_allocno; i++)
599 	if (reg_renumber[allocno[allocno_order[i]].reg] < 0
600 	    && REG_LIVE_LENGTH (allocno[allocno_order[i]].reg) >= 0)
601 	  {
602 	    /* If we have more than one register class,
603 	       first try allocating in the class that is cheapest
604 	       for this pseudo-reg.  If that fails, try any reg.  */
605 	    if (N_REG_CLASSES > 1)
606 	      {
607 		find_reg (allocno_order[i], 0, 0, 0, 0);
608 		if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
609 		  continue;
610 	      }
611 	    if (reg_alternate_class (allocno[allocno_order[i]].reg) != NO_REGS)
612 	      find_reg (allocno_order[i], 0, 1, 0, 0);
613 	  }
614 
615       free (allocno_order);
616     }
617 
618   /* Do the reloads now while the allocno data still exists, so that we can
619      try to assign new hard regs to any pseudo regs that are spilled.  */
620 
621 #if 0 /* We need to eliminate regs even if there is no rtl code,
622 	 for the sake of debugging information.  */
623   if (n_basic_blocks > NUM_FIXED_BLOCKS)
624 #endif
625     {
626       build_insn_chain (get_insns ());
627       retval = reload (get_insns (), 1);
628     }
629 
630   /* Clean up.  */
631   free (reg_allocno);
632   free (reg_may_share);
633   free (allocno);
634   free (conflicts);
635   free (allocnos_live);
636 
637   return retval;
638 }
639 
640 /* Sort predicate for ordering the allocnos.
641    Returns -1 (1) if *v1 should be allocated before (after) *v2.  */
642 
643 static int
allocno_compare(const void * v1p,const void * v2p)644 allocno_compare (const void *v1p, const void *v2p)
645 {
646   int v1 = *(const int *)v1p, v2 = *(const int *)v2p;
647   /* Note that the quotient will never be bigger than
648      the value of floor_log2 times the maximum number of
649      times a register can occur in one insn (surely less than 100)
650      weighted by the frequency (maximally REG_FREQ_MAX).
651      Multiplying this by 10000/REG_FREQ_MAX can't overflow.  */
652   int pri1
653     = (((double) (floor_log2 (allocno[v1].n_refs) * allocno[v1].freq)
654 	/ allocno[v1].live_length)
655        * (10000 / REG_FREQ_MAX) * allocno[v1].size);
656   int pri2
657     = (((double) (floor_log2 (allocno[v2].n_refs) * allocno[v2].freq)
658 	/ allocno[v2].live_length)
659        * (10000 / REG_FREQ_MAX) * allocno[v2].size);
660   if (pri2 - pri1)
661     return pri2 - pri1;
662 
663   /* If regs are equally good, sort by allocno,
664      so that the results of qsort leave nothing to chance.  */
665   return v1 - v2;
666 }
667 
668 /* Scan the rtl code and record all conflicts and register preferences in the
669    conflict matrices and preference tables.  */
670 
671 static void
global_conflicts(void)672 global_conflicts (void)
673 {
674   unsigned i;
675   basic_block b;
676   rtx insn;
677   int *block_start_allocnos;
678 
679   /* Make a vector that mark_reg_{store,clobber} will store in.  */
680   regs_set = XNEWVEC (rtx, max_parallel * 2);
681 
682   block_start_allocnos = XNEWVEC (int, max_allocno);
683 
684   FOR_EACH_BB (b)
685     {
686       memset (allocnos_live, 0, allocno_row_words * sizeof (INT_TYPE));
687 
688       /* Initialize table of registers currently live
689 	 to the state at the beginning of this basic block.
690 	 This also marks the conflicts among hard registers
691 	 and any allocnos that are live.
692 
693 	 For pseudo-regs, there is only one bit for each one
694 	 no matter how many hard regs it occupies.
695 	 This is ok; we know the size from PSEUDO_REGNO_SIZE.
696 	 For explicit hard regs, we cannot know the size that way
697 	 since one hard reg can be used with various sizes.
698 	 Therefore, we must require that all the hard regs
699 	 implicitly live as part of a multi-word hard reg
700 	 be explicitly marked in basic_block_live_at_start.  */
701 
702       {
703 	regset old = b->il.rtl->global_live_at_start;
704 	int ax = 0;
705 	reg_set_iterator rsi;
706 
707 	REG_SET_TO_HARD_REG_SET (hard_regs_live, old);
708 	EXECUTE_IF_SET_IN_REG_SET (old, FIRST_PSEUDO_REGISTER, i, rsi)
709 	  {
710 	    int a = reg_allocno[i];
711 	    if (a >= 0)
712 	      {
713 		SET_ALLOCNO_LIVE (a);
714 		block_start_allocnos[ax++] = a;
715 	      }
716 	    else if ((a = reg_renumber[i]) >= 0)
717 	      mark_reg_live_nc (a, PSEUDO_REGNO_MODE (i));
718 	  }
719 
720 	/* Record that each allocno now live conflicts with each hard reg
721 	   now live.
722 
723 	   It is not necessary to mark any conflicts between pseudos at
724 	   this point, even for pseudos which are live at the start of
725 	   the basic block.
726 
727 	     Given two pseudos X and Y and any point in the CFG P.
728 
729 	     On any path to point P where X and Y are live one of the
730 	     following conditions must be true:
731 
732 		1. X is live at some instruction on the path that
733 		   evaluates Y.
734 
735 		2. Y is live at some instruction on the path that
736 		   evaluates X.
737 
738 		3. Either X or Y is not evaluated on the path to P
739 		   (i.e. it is used uninitialized) and thus the
740 		   conflict can be ignored.
741 
742 	    In cases #1 and #2 the conflict will be recorded when we
743 	    scan the instruction that makes either X or Y become live.  */
744 	record_conflicts (block_start_allocnos, ax);
745 
746 #ifdef EH_RETURN_DATA_REGNO
747 	if (bb_has_eh_pred (b))
748 	  {
749 	    unsigned int i;
750 
751 	    for (i = 0; ; ++i)
752 	      {
753 		unsigned int regno = EH_RETURN_DATA_REGNO (i);
754 		if (regno == INVALID_REGNUM)
755 		  break;
756 		record_one_conflict (regno);
757 	      }
758 	  }
759 #endif
760 
761 	/* Pseudos can't go in stack regs at the start of a basic block that
762 	   is reached by an abnormal edge. Likewise for call clobbered regs,
763 	   because caller-save, fixup_abnormal_edges and possibly the table
764 	   driven EH machinery are not quite ready to handle such regs live
765 	   across such edges.  */
766 	{
767 	  edge e;
768 	  edge_iterator ei;
769 
770 	  FOR_EACH_EDGE (e, ei, b->preds)
771 	    if (e->flags & EDGE_ABNORMAL)
772 	      break;
773 
774 	  if (e != NULL)
775 	    {
776 #ifdef STACK_REGS
777 	      EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, ax,
778 					     {
779 					       allocno[ax].no_stack_reg = 1;
780 					     });
781 	      for (ax = FIRST_STACK_REG; ax <= LAST_STACK_REG; ax++)
782 		record_one_conflict (ax);
783 #endif
784 
785 	      /* No need to record conflicts for call clobbered regs if we have
786 		 nonlocal labels around, as we don't ever try to allocate such
787 		 regs in this case.  */
788 	      if (! current_function_has_nonlocal_label)
789 		for (ax = 0; ax < FIRST_PSEUDO_REGISTER; ax++)
790 		  if (call_used_regs [ax])
791 		    record_one_conflict (ax);
792 	    }
793 	}
794       }
795 
796       insn = BB_HEAD (b);
797 
798       /* Scan the code of this basic block, noting which allocnos
799 	 and hard regs are born or die.  When one is born,
800 	 record a conflict with all others currently live.  */
801 
802       while (1)
803 	{
804 	  RTX_CODE code = GET_CODE (insn);
805 	  rtx link;
806 
807 	  /* Make regs_set an empty set.  */
808 
809 	  n_regs_set = 0;
810 
811 	  if (code == INSN || code == CALL_INSN || code == JUMP_INSN)
812 	    {
813 
814 #if 0
815 	      int i = 0;
816 	      for (link = REG_NOTES (insn);
817 		   link && i < NUM_NO_CONFLICT_PAIRS;
818 		   link = XEXP (link, 1))
819 		if (REG_NOTE_KIND (link) == REG_NO_CONFLICT)
820 		  {
821 		    no_conflict_pairs[i].allocno1
822 		      = reg_allocno[REGNO (SET_DEST (PATTERN (insn)))];
823 		    no_conflict_pairs[i].allocno2
824 		      = reg_allocno[REGNO (XEXP (link, 0))];
825 		    i++;
826 		  }
827 #endif /* 0 */
828 
829 	      /* Mark any registers clobbered by INSN as live,
830 		 so they conflict with the inputs.  */
831 
832 	      note_stores (PATTERN (insn), mark_reg_clobber, NULL);
833 
834 	      /* Mark any registers dead after INSN as dead now.  */
835 
836 	      for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
837 		if (REG_NOTE_KIND (link) == REG_DEAD)
838 		  mark_reg_death (XEXP (link, 0));
839 
840 	      /* Mark any registers set in INSN as live,
841 		 and mark them as conflicting with all other live regs.
842 		 Clobbers are processed again, so they conflict with
843 		 the registers that are set.  */
844 
845 	      note_stores (PATTERN (insn), mark_reg_store, NULL);
846 
847 #ifdef AUTO_INC_DEC
848 	      for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
849 		if (REG_NOTE_KIND (link) == REG_INC)
850 		  mark_reg_store (XEXP (link, 0), NULL_RTX, NULL);
851 #endif
852 
853 	      /* If INSN has multiple outputs, then any reg that dies here
854 		 and is used inside of an output
855 		 must conflict with the other outputs.
856 
857 		 It is unsafe to use !single_set here since it will ignore an
858 		 unused output.  Just because an output is unused does not mean
859 		 the compiler can assume the side effect will not occur.
860 		 Consider if REG appears in the address of an output and we
861 		 reload the output.  If we allocate REG to the same hard
862 		 register as an unused output we could set the hard register
863 		 before the output reload insn.  */
864 	      if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
865 		for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
866 		  if (REG_NOTE_KIND (link) == REG_DEAD)
867 		    {
868 		      int used_in_output = 0;
869 		      int i;
870 		      rtx reg = XEXP (link, 0);
871 
872 		      for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
873 			{
874 			  rtx set = XVECEXP (PATTERN (insn), 0, i);
875 			  if (GET_CODE (set) == SET
876 			      && !REG_P (SET_DEST (set))
877 			      && !rtx_equal_p (reg, SET_DEST (set))
878 			      && reg_overlap_mentioned_p (reg, SET_DEST (set)))
879 			    used_in_output = 1;
880 			}
881 		      if (used_in_output)
882 			mark_reg_conflicts (reg);
883 		    }
884 
885 	      /* Mark any registers set in INSN and then never used.  */
886 
887 	      while (n_regs_set-- > 0)
888 		{
889 		  rtx note = find_regno_note (insn, REG_UNUSED,
890 					      REGNO (regs_set[n_regs_set]));
891 		  if (note)
892 		    mark_reg_death (XEXP (note, 0));
893 		}
894 	    }
895 
896 	  if (insn == BB_END (b))
897 	    break;
898 	  insn = NEXT_INSN (insn);
899 	}
900     }
901 
902   /* Clean up.  */
903   free (block_start_allocnos);
904   free (regs_set);
905 }
906 /* Expand the preference information by looking for cases where one allocno
907    dies in an insn that sets an allocno.  If those two allocnos don't conflict,
908    merge any preferences between those allocnos.  */
909 
910 static void
expand_preferences(void)911 expand_preferences (void)
912 {
913   rtx insn;
914   rtx link;
915   rtx set;
916 
917   /* We only try to handle the most common cases here.  Most of the cases
918      where this wins are reg-reg copies.  */
919 
920   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
921     if (INSN_P (insn)
922 	&& (set = single_set (insn)) != 0
923 	&& REG_P (SET_DEST (set))
924 	&& reg_allocno[REGNO (SET_DEST (set))] >= 0)
925       for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
926 	if (REG_NOTE_KIND (link) == REG_DEAD
927 	    && REG_P (XEXP (link, 0))
928 	    && reg_allocno[REGNO (XEXP (link, 0))] >= 0
929 	    && ! CONFLICTP (reg_allocno[REGNO (SET_DEST (set))],
930 			    reg_allocno[REGNO (XEXP (link, 0))]))
931 	  {
932 	    int a1 = reg_allocno[REGNO (SET_DEST (set))];
933 	    int a2 = reg_allocno[REGNO (XEXP (link, 0))];
934 
935 	    if (XEXP (link, 0) == SET_SRC (set))
936 	      {
937 		IOR_HARD_REG_SET (allocno[a1].hard_reg_copy_preferences,
938 				  allocno[a2].hard_reg_copy_preferences);
939 		IOR_HARD_REG_SET (allocno[a2].hard_reg_copy_preferences,
940 				  allocno[a1].hard_reg_copy_preferences);
941 	      }
942 
943 	    IOR_HARD_REG_SET (allocno[a1].hard_reg_preferences,
944 			      allocno[a2].hard_reg_preferences);
945 	    IOR_HARD_REG_SET (allocno[a2].hard_reg_preferences,
946 			      allocno[a1].hard_reg_preferences);
947 	    IOR_HARD_REG_SET (allocno[a1].hard_reg_full_preferences,
948 			      allocno[a2].hard_reg_full_preferences);
949 	    IOR_HARD_REG_SET (allocno[a2].hard_reg_full_preferences,
950 			      allocno[a1].hard_reg_full_preferences);
951 	  }
952 }
953 
954 /* Prune the preferences for global registers to exclude registers that cannot
955    be used.
956 
957    Compute `regs_someone_prefers', which is a bitmask of the hard registers
958    that are preferred by conflicting registers of lower priority.  If possible,
959    we will avoid using these registers.  */
960 
961 static void
prune_preferences(void)962 prune_preferences (void)
963 {
964   int i;
965   int num;
966   int *allocno_to_order = XNEWVEC (int, max_allocno);
967 
968   /* Scan least most important to most important.
969      For each allocno, remove from preferences registers that cannot be used,
970      either because of conflicts or register type.  Then compute all registers
971      preferred by each lower-priority register that conflicts.  */
972 
973   for (i = max_allocno - 1; i >= 0; i--)
974     {
975       HARD_REG_SET temp;
976 
977       num = allocno_order[i];
978       allocno_to_order[num] = i;
979       COPY_HARD_REG_SET (temp, allocno[num].hard_reg_conflicts);
980 
981       if (allocno[num].calls_crossed == 0)
982 	IOR_HARD_REG_SET (temp, fixed_reg_set);
983       else
984 	IOR_HARD_REG_SET (temp,	call_used_reg_set);
985 
986       IOR_COMPL_HARD_REG_SET
987 	(temp,
988 	 reg_class_contents[(int) reg_preferred_class (allocno[num].reg)]);
989 
990       AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_preferences, temp);
991       AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_copy_preferences, temp);
992       AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_full_preferences, temp);
993     }
994 
995   for (i = max_allocno - 1; i >= 0; i--)
996     {
997       /* Merge in the preferences of lower-priority registers (they have
998 	 already been pruned).  If we also prefer some of those registers,
999 	 don't exclude them unless we are of a smaller size (in which case
1000 	 we want to give the lower-priority allocno the first chance for
1001 	 these registers).  */
1002       HARD_REG_SET temp, temp2;
1003       int allocno2;
1004 
1005       num = allocno_order[i];
1006 
1007       CLEAR_HARD_REG_SET (temp);
1008       CLEAR_HARD_REG_SET (temp2);
1009 
1010       EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + num * allocno_row_words,
1011 				     allocno2,
1012 	{
1013 	  if (allocno_to_order[allocno2] > i)
1014 	    {
1015 	      if (allocno[allocno2].size <= allocno[num].size)
1016 		IOR_HARD_REG_SET (temp,
1017 				  allocno[allocno2].hard_reg_full_preferences);
1018 	      else
1019 		IOR_HARD_REG_SET (temp2,
1020 				  allocno[allocno2].hard_reg_full_preferences);
1021 	    }
1022 	});
1023 
1024       AND_COMPL_HARD_REG_SET (temp, allocno[num].hard_reg_full_preferences);
1025       IOR_HARD_REG_SET (temp, temp2);
1026       COPY_HARD_REG_SET (allocno[num].regs_someone_prefers, temp);
1027     }
1028   free (allocno_to_order);
1029 }
1030 
1031 /* Assign a hard register to allocno NUM; look for one that is the beginning
1032    of a long enough stretch of hard regs none of which conflicts with ALLOCNO.
1033    The registers marked in PREFREGS are tried first.
1034 
1035    LOSERS, if nonzero, is a HARD_REG_SET indicating registers that cannot
1036    be used for this allocation.
1037 
1038    If ALT_REGS_P is zero, consider only the preferred class of ALLOCNO's reg.
1039    Otherwise ignore that preferred class and use the alternate class.
1040 
1041    If ACCEPT_CALL_CLOBBERED is nonzero, accept a call-clobbered hard reg that
1042    will have to be saved and restored at calls.
1043 
1044    RETRYING is nonzero if this is called from retry_global_alloc.
1045 
1046    If we find one, record it in reg_renumber.
1047    If not, do nothing.  */
1048 
1049 static void
find_reg(int num,HARD_REG_SET losers,int alt_regs_p,int accept_call_clobbered,int retrying)1050 find_reg (int num, HARD_REG_SET losers, int alt_regs_p, int accept_call_clobbered, int retrying)
1051 {
1052   int i, best_reg, pass;
1053   HARD_REG_SET used, used1, used2;
1054 
1055   enum reg_class class = (alt_regs_p
1056 			  ? reg_alternate_class (allocno[num].reg)
1057 			  : reg_preferred_class (allocno[num].reg));
1058   enum machine_mode mode = PSEUDO_REGNO_MODE (allocno[num].reg);
1059 
1060   if (accept_call_clobbered)
1061     COPY_HARD_REG_SET (used1, call_fixed_reg_set);
1062   else if (allocno[num].calls_crossed == 0)
1063     COPY_HARD_REG_SET (used1, fixed_reg_set);
1064   else
1065     COPY_HARD_REG_SET (used1, call_used_reg_set);
1066 
1067   /* Some registers should not be allocated in global-alloc.  */
1068   IOR_HARD_REG_SET (used1, no_global_alloc_regs);
1069   if (losers)
1070     IOR_HARD_REG_SET (used1, losers);
1071 
1072   IOR_COMPL_HARD_REG_SET (used1, reg_class_contents[(int) class]);
1073   COPY_HARD_REG_SET (used2, used1);
1074 
1075   IOR_HARD_REG_SET (used1, allocno[num].hard_reg_conflicts);
1076 
1077 #ifdef CANNOT_CHANGE_MODE_CLASS
1078   cannot_change_mode_set_regs (&used1, mode, allocno[num].reg);
1079 #endif
1080 
1081   /* Try each hard reg to see if it fits.  Do this in two passes.
1082      In the first pass, skip registers that are preferred by some other pseudo
1083      to give it a better chance of getting one of those registers.  Only if
1084      we can't get a register when excluding those do we take one of them.
1085      However, we never allocate a register for the first time in pass 0.  */
1086 
1087   COPY_HARD_REG_SET (used, used1);
1088   IOR_COMPL_HARD_REG_SET (used, regs_used_so_far);
1089   IOR_HARD_REG_SET (used, allocno[num].regs_someone_prefers);
1090 
1091   best_reg = -1;
1092   for (i = FIRST_PSEUDO_REGISTER, pass = 0;
1093        pass <= 1 && i >= FIRST_PSEUDO_REGISTER;
1094        pass++)
1095     {
1096       if (pass == 1)
1097 	COPY_HARD_REG_SET (used, used1);
1098       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1099 	{
1100 #ifdef REG_ALLOC_ORDER
1101 	  int regno = reg_alloc_order[i];
1102 #else
1103 	  int regno = i;
1104 #endif
1105 	  if (! TEST_HARD_REG_BIT (used, regno)
1106 	      && HARD_REGNO_MODE_OK (regno, mode)
1107 	      && (allocno[num].calls_crossed == 0
1108 		  || accept_call_clobbered
1109 		  || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode)))
1110 	    {
1111 	      int j;
1112 	      int lim = regno + hard_regno_nregs[regno][mode];
1113 	      for (j = regno + 1;
1114 		   (j < lim
1115 		    && ! TEST_HARD_REG_BIT (used, j));
1116 		   j++);
1117 	      if (j == lim)
1118 		{
1119 		  best_reg = regno;
1120 		  break;
1121 		}
1122 #ifndef REG_ALLOC_ORDER
1123 	      i = j;			/* Skip starting points we know will lose */
1124 #endif
1125 	    }
1126 	  }
1127       }
1128 
1129   /* See if there is a preferred register with the same class as the register
1130      we allocated above.  Making this restriction prevents register
1131      preferencing from creating worse register allocation.
1132 
1133      Remove from the preferred registers and conflicting registers.  Note that
1134      additional conflicts may have been added after `prune_preferences' was
1135      called.
1136 
1137      First do this for those register with copy preferences, then all
1138      preferred registers.  */
1139 
1140   AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_copy_preferences, used);
1141   GO_IF_HARD_REG_SUBSET (allocno[num].hard_reg_copy_preferences,
1142 			 reg_class_contents[(int) NO_REGS], no_copy_prefs);
1143 
1144   if (best_reg >= 0)
1145     {
1146       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1147 	if (TEST_HARD_REG_BIT (allocno[num].hard_reg_copy_preferences, i)
1148 	    && HARD_REGNO_MODE_OK (i, mode)
1149 	    && (allocno[num].calls_crossed == 0
1150 		|| accept_call_clobbered
1151 		|| ! HARD_REGNO_CALL_PART_CLOBBERED (i, mode))
1152 	    && (REGNO_REG_CLASS (i) == REGNO_REG_CLASS (best_reg)
1153 		|| reg_class_subset_p (REGNO_REG_CLASS (i),
1154 				       REGNO_REG_CLASS (best_reg))
1155 		|| reg_class_subset_p (REGNO_REG_CLASS (best_reg),
1156 				       REGNO_REG_CLASS (i))))
1157 	    {
1158 	      int j;
1159 	      int lim = i + hard_regno_nregs[i][mode];
1160 	      for (j = i + 1;
1161 		   (j < lim
1162 		    && ! TEST_HARD_REG_BIT (used, j)
1163 		    && (REGNO_REG_CLASS (j)
1164 			== REGNO_REG_CLASS (best_reg + (j - i))
1165 			|| reg_class_subset_p (REGNO_REG_CLASS (j),
1166 					       REGNO_REG_CLASS (best_reg + (j - i)))
1167 			|| reg_class_subset_p (REGNO_REG_CLASS (best_reg + (j - i)),
1168 					       REGNO_REG_CLASS (j))));
1169 		   j++);
1170 	      if (j == lim)
1171 		{
1172 		  best_reg = i;
1173 		  goto no_prefs;
1174 		}
1175 	    }
1176     }
1177  no_copy_prefs:
1178 
1179   AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_preferences, used);
1180   GO_IF_HARD_REG_SUBSET (allocno[num].hard_reg_preferences,
1181 			 reg_class_contents[(int) NO_REGS], no_prefs);
1182 
1183   if (best_reg >= 0)
1184     {
1185       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1186 	if (TEST_HARD_REG_BIT (allocno[num].hard_reg_preferences, i)
1187 	    && HARD_REGNO_MODE_OK (i, mode)
1188 	    && (allocno[num].calls_crossed == 0
1189 		|| accept_call_clobbered
1190 		|| ! HARD_REGNO_CALL_PART_CLOBBERED (i, mode))
1191 	    && (REGNO_REG_CLASS (i) == REGNO_REG_CLASS (best_reg)
1192 		|| reg_class_subset_p (REGNO_REG_CLASS (i),
1193 				       REGNO_REG_CLASS (best_reg))
1194 		|| reg_class_subset_p (REGNO_REG_CLASS (best_reg),
1195 				       REGNO_REG_CLASS (i))))
1196 	    {
1197 	      int j;
1198 	      int lim = i + hard_regno_nregs[i][mode];
1199 	      for (j = i + 1;
1200 		   (j < lim
1201 		    && ! TEST_HARD_REG_BIT (used, j)
1202 		    && (REGNO_REG_CLASS (j)
1203 			== REGNO_REG_CLASS (best_reg + (j - i))
1204 			|| reg_class_subset_p (REGNO_REG_CLASS (j),
1205 					       REGNO_REG_CLASS (best_reg + (j - i)))
1206 			|| reg_class_subset_p (REGNO_REG_CLASS (best_reg + (j - i)),
1207 					       REGNO_REG_CLASS (j))));
1208 		   j++);
1209 	      if (j == lim)
1210 		{
1211 		  best_reg = i;
1212 		  break;
1213 		}
1214 	    }
1215     }
1216  no_prefs:
1217 
1218   /* If we haven't succeeded yet, try with caller-saves.
1219      We need not check to see if the current function has nonlocal
1220      labels because we don't put any pseudos that are live over calls in
1221      registers in that case.  */
1222 
1223   if (flag_caller_saves && best_reg < 0)
1224     {
1225       /* Did not find a register.  If it would be profitable to
1226 	 allocate a call-clobbered register and save and restore it
1227 	 around calls, do that.  Don't do this if it crosses any calls
1228 	 that might throw.  */
1229       if (! accept_call_clobbered
1230 	  && allocno[num].calls_crossed != 0
1231 	  && allocno[num].throwing_calls_crossed == 0
1232 	  && CALLER_SAVE_PROFITABLE (allocno[num].n_refs,
1233 				     allocno[num].calls_crossed))
1234 	{
1235 	  HARD_REG_SET new_losers;
1236 	  if (! losers)
1237 	    CLEAR_HARD_REG_SET (new_losers);
1238 	  else
1239 	    COPY_HARD_REG_SET (new_losers, losers);
1240 
1241 	  IOR_HARD_REG_SET(new_losers, losing_caller_save_reg_set);
1242 	  find_reg (num, new_losers, alt_regs_p, 1, retrying);
1243 	  if (reg_renumber[allocno[num].reg] >= 0)
1244 	    {
1245 	      caller_save_needed = 1;
1246 	      return;
1247 	    }
1248 	}
1249     }
1250 
1251   /* If we haven't succeeded yet,
1252      see if some hard reg that conflicts with us
1253      was utilized poorly by local-alloc.
1254      If so, kick out the regs that were put there by local-alloc
1255      so we can use it instead.  */
1256   if (best_reg < 0 && !retrying
1257       /* Let's not bother with multi-reg allocnos.  */
1258       && allocno[num].size == 1
1259       && REG_BASIC_BLOCK (allocno[num].reg) == REG_BLOCK_GLOBAL)
1260     {
1261       /* Count from the end, to find the least-used ones first.  */
1262       for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1263 	{
1264 #ifdef REG_ALLOC_ORDER
1265 	  int regno = reg_alloc_order[i];
1266 #else
1267 	  int regno = i;
1268 #endif
1269 
1270 	  if (local_reg_n_refs[regno] != 0
1271 	      /* Don't use a reg no good for this pseudo.  */
1272 	      && ! TEST_HARD_REG_BIT (used2, regno)
1273 	      && HARD_REGNO_MODE_OK (regno, mode)
1274 	      /* The code below assumes that we need only a single
1275 		 register, but the check of allocno[num].size above
1276 		 was not enough.  Sometimes we need more than one
1277 		 register for a single-word value.  */
1278 	      && hard_regno_nregs[regno][mode] == 1
1279 	      && (allocno[num].calls_crossed == 0
1280 		  || accept_call_clobbered
1281 		  || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode))
1282 #ifdef CANNOT_CHANGE_MODE_CLASS
1283 	      && ! invalid_mode_change_p (regno, REGNO_REG_CLASS (regno),
1284 					  mode)
1285 #endif
1286 #ifdef STACK_REGS
1287 	     && (!allocno[num].no_stack_reg
1288 		 || regno < FIRST_STACK_REG || regno > LAST_STACK_REG)
1289 #endif
1290 	      )
1291 	    {
1292 	      /* We explicitly evaluate the divide results into temporary
1293 		 variables so as to avoid excess precision problems that occur
1294 		 on an i386-unknown-sysv4.2 (unixware) host.  */
1295 
1296 	      double tmp1 = ((double) local_reg_freq[regno] * local_reg_n_refs[regno]
1297 			    / local_reg_live_length[regno]);
1298 	      double tmp2 = ((double) allocno[num].freq * allocno[num].n_refs
1299 			     / allocno[num].live_length);
1300 
1301 	      if (tmp1 < tmp2)
1302 		{
1303 		  /* Hard reg REGNO was used less in total by local regs
1304 		     than it would be used by this one allocno!  */
1305 		  int k;
1306 		  if (dump_file)
1307 		    {
1308 		      fprintf (dump_file, "Regno %d better for global %d, ",
1309 		      	       regno, allocno[num].reg);
1310 		      fprintf (dump_file, "fr:%d, ll:%d, nr:%d ",
1311 			       allocno[num].freq, allocno[num].live_length,
1312 			       allocno[num].n_refs);
1313 		      fprintf (dump_file, "(was: fr:%d, ll:%d, nr:%d)\n",
1314 			       local_reg_freq[regno],
1315 			       local_reg_live_length[regno],
1316 			       local_reg_n_refs[regno]);
1317 		    }
1318 
1319 		  for (k = 0; k < max_regno; k++)
1320 		    if (reg_renumber[k] >= 0)
1321 		      {
1322 			int r = reg_renumber[k];
1323 			int endregno
1324 			  = r + hard_regno_nregs[r][PSEUDO_REGNO_MODE (k)];
1325 
1326 			if (regno >= r && regno < endregno)
1327 			  {
1328 			    if (dump_file)
1329 			      fprintf (dump_file,
1330 				       "Local Reg %d now on stack\n", k);
1331 			    reg_renumber[k] = -1;
1332 			  }
1333 		      }
1334 
1335 		  best_reg = regno;
1336 		  break;
1337 		}
1338 	    }
1339 	}
1340     }
1341 
1342   /* Did we find a register?  */
1343 
1344   if (best_reg >= 0)
1345     {
1346       int lim, j;
1347       HARD_REG_SET this_reg;
1348 
1349       /* Yes.  Record it as the hard register of this pseudo-reg.  */
1350       reg_renumber[allocno[num].reg] = best_reg;
1351       /* Also of any pseudo-regs that share with it.  */
1352       if (reg_may_share[allocno[num].reg])
1353 	for (j = FIRST_PSEUDO_REGISTER; j < max_regno; j++)
1354 	  if (reg_allocno[j] == num)
1355 	    reg_renumber[j] = best_reg;
1356 
1357       /* Make a set of the hard regs being allocated.  */
1358       CLEAR_HARD_REG_SET (this_reg);
1359       lim = best_reg + hard_regno_nregs[best_reg][mode];
1360       for (j = best_reg; j < lim; j++)
1361 	{
1362 	  SET_HARD_REG_BIT (this_reg, j);
1363 	  SET_HARD_REG_BIT (regs_used_so_far, j);
1364 	  /* This is no longer a reg used just by local regs.  */
1365 	  local_reg_n_refs[j] = 0;
1366 	  local_reg_freq[j] = 0;
1367 	}
1368       /* For each other pseudo-reg conflicting with this one,
1369 	 mark it as conflicting with the hard regs this one occupies.  */
1370       lim = num;
1371       EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + lim * allocno_row_words, j,
1372 	{
1373 	  IOR_HARD_REG_SET (allocno[j].hard_reg_conflicts, this_reg);
1374 	});
1375     }
1376 }
1377 
1378 /* Called from `reload' to look for a hard reg to put pseudo reg REGNO in.
1379    Perhaps it had previously seemed not worth a hard reg,
1380    or perhaps its old hard reg has been commandeered for reloads.
1381    FORBIDDEN_REGS indicates certain hard regs that may not be used, even if
1382    they do not appear to be allocated.
1383    If FORBIDDEN_REGS is zero, no regs are forbidden.  */
1384 
1385 void
retry_global_alloc(int regno,HARD_REG_SET forbidden_regs)1386 retry_global_alloc (int regno, HARD_REG_SET forbidden_regs)
1387 {
1388   int alloc_no = reg_allocno[regno];
1389   if (alloc_no >= 0)
1390     {
1391       /* If we have more than one register class,
1392 	 first try allocating in the class that is cheapest
1393 	 for this pseudo-reg.  If that fails, try any reg.  */
1394       if (N_REG_CLASSES > 1)
1395 	find_reg (alloc_no, forbidden_regs, 0, 0, 1);
1396       if (reg_renumber[regno] < 0
1397 	  && reg_alternate_class (regno) != NO_REGS)
1398 	find_reg (alloc_no, forbidden_regs, 1, 0, 1);
1399 
1400       /* If we found a register, modify the RTL for the register to
1401 	 show the hard register, and mark that register live.  */
1402       if (reg_renumber[regno] >= 0)
1403 	{
1404 	  REGNO (regno_reg_rtx[regno]) = reg_renumber[regno];
1405 	  mark_home_live (regno);
1406 	}
1407     }
1408 }
1409 
1410 /* Record a conflict between register REGNO
1411    and everything currently live.
1412    REGNO must not be a pseudo reg that was allocated
1413    by local_alloc; such numbers must be translated through
1414    reg_renumber before calling here.  */
1415 
1416 static void
record_one_conflict(int regno)1417 record_one_conflict (int regno)
1418 {
1419   int j;
1420 
1421   if (regno < FIRST_PSEUDO_REGISTER)
1422     /* When a hard register becomes live,
1423        record conflicts with live pseudo regs.  */
1424     EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, j,
1425       {
1426 	SET_HARD_REG_BIT (allocno[j].hard_reg_conflicts, regno);
1427       });
1428   else
1429     /* When a pseudo-register becomes live,
1430        record conflicts first with hard regs,
1431        then with other pseudo regs.  */
1432     {
1433       int ialloc = reg_allocno[regno];
1434       int ialloc_prod = ialloc * allocno_row_words;
1435 
1436       IOR_HARD_REG_SET (allocno[ialloc].hard_reg_conflicts, hard_regs_live);
1437       for (j = allocno_row_words - 1; j >= 0; j--)
1438 	conflicts[ialloc_prod + j] |= allocnos_live[j];
1439     }
1440 }
1441 
1442 /* Record all allocnos currently live as conflicting
1443    with all hard regs currently live.
1444 
1445    ALLOCNO_VEC is a vector of LEN allocnos, all allocnos that
1446    are currently live.  Their bits are also flagged in allocnos_live.  */
1447 
1448 static void
record_conflicts(int * allocno_vec,int len)1449 record_conflicts (int *allocno_vec, int len)
1450 {
1451   while (--len >= 0)
1452     IOR_HARD_REG_SET (allocno[allocno_vec[len]].hard_reg_conflicts,
1453                       hard_regs_live);
1454 }
1455 
1456 /* If CONFLICTP (i, j) is true, make sure CONFLICTP (j, i) is also true.  */
1457 static void
mirror_conflicts(void)1458 mirror_conflicts (void)
1459 {
1460   int i, j;
1461   int rw = allocno_row_words;
1462   int rwb = rw * INT_BITS;
1463   INT_TYPE *p = conflicts;
1464   INT_TYPE *q0 = conflicts, *q1, *q2;
1465   unsigned INT_TYPE mask;
1466 
1467   for (i = max_allocno - 1, mask = 1; i >= 0; i--, mask <<= 1)
1468     {
1469       if (! mask)
1470 	{
1471 	  mask = 1;
1472 	  q0++;
1473 	}
1474       for (j = allocno_row_words - 1, q1 = q0; j >= 0; j--, q1 += rwb)
1475 	{
1476 	  unsigned INT_TYPE word;
1477 
1478 	  for (word = (unsigned INT_TYPE) *p++, q2 = q1; word;
1479 	       word >>= 1, q2 += rw)
1480 	    {
1481 	      if (word & 1)
1482 		*q2 |= mask;
1483 	    }
1484 	}
1485     }
1486 }
1487 
1488 /* Handle the case where REG is set by the insn being scanned,
1489    during the forward scan to accumulate conflicts.
1490    Store a 1 in regs_live or allocnos_live for this register, record how many
1491    consecutive hardware registers it actually needs,
1492    and record a conflict with all other registers already live.
1493 
1494    Note that even if REG does not remain alive after this insn,
1495    we must mark it here as live, to ensure a conflict between
1496    REG and any other regs set in this insn that really do live.
1497    This is because those other regs could be considered after this.
1498 
1499    REG might actually be something other than a register;
1500    if so, we do nothing.
1501 
1502    SETTER is 0 if this register was modified by an auto-increment (i.e.,
1503    a REG_INC note was found for it).  */
1504 
1505 static void
mark_reg_store(rtx reg,rtx setter,void * data ATTRIBUTE_UNUSED)1506 mark_reg_store (rtx reg, rtx setter, void *data ATTRIBUTE_UNUSED)
1507 {
1508   int regno;
1509 
1510   if (GET_CODE (reg) == SUBREG)
1511     reg = SUBREG_REG (reg);
1512 
1513   if (!REG_P (reg))
1514     return;
1515 
1516   regs_set[n_regs_set++] = reg;
1517 
1518   if (setter && GET_CODE (setter) != CLOBBER)
1519     set_preference (reg, SET_SRC (setter));
1520 
1521   regno = REGNO (reg);
1522 
1523   /* Either this is one of the max_allocno pseudo regs not allocated,
1524      or it is or has a hardware reg.  First handle the pseudo-regs.  */
1525   if (regno >= FIRST_PSEUDO_REGISTER)
1526     {
1527       if (reg_allocno[regno] >= 0)
1528 	{
1529 	  SET_ALLOCNO_LIVE (reg_allocno[regno]);
1530 	  record_one_conflict (regno);
1531 	}
1532     }
1533 
1534   if (reg_renumber[regno] >= 0)
1535     regno = reg_renumber[regno];
1536 
1537   /* Handle hardware regs (and pseudos allocated to hard regs).  */
1538   if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1539     {
1540       int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
1541       while (regno < last)
1542 	{
1543 	  record_one_conflict (regno);
1544 	  SET_HARD_REG_BIT (hard_regs_live, regno);
1545 	  regno++;
1546 	}
1547     }
1548 }
1549 
1550 /* Like mark_reg_store except notice just CLOBBERs; ignore SETs.  */
1551 
1552 static void
mark_reg_clobber(rtx reg,rtx setter,void * data)1553 mark_reg_clobber (rtx reg, rtx setter, void *data)
1554 {
1555   if (GET_CODE (setter) == CLOBBER)
1556     mark_reg_store (reg, setter, data);
1557 }
1558 
1559 /* Record that REG has conflicts with all the regs currently live.
1560    Do not mark REG itself as live.  */
1561 
1562 static void
mark_reg_conflicts(rtx reg)1563 mark_reg_conflicts (rtx reg)
1564 {
1565   int regno;
1566 
1567   if (GET_CODE (reg) == SUBREG)
1568     reg = SUBREG_REG (reg);
1569 
1570   if (!REG_P (reg))
1571     return;
1572 
1573   regno = REGNO (reg);
1574 
1575   /* Either this is one of the max_allocno pseudo regs not allocated,
1576      or it is or has a hardware reg.  First handle the pseudo-regs.  */
1577   if (regno >= FIRST_PSEUDO_REGISTER)
1578     {
1579       if (reg_allocno[regno] >= 0)
1580 	record_one_conflict (regno);
1581     }
1582 
1583   if (reg_renumber[regno] >= 0)
1584     regno = reg_renumber[regno];
1585 
1586   /* Handle hardware regs (and pseudos allocated to hard regs).  */
1587   if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1588     {
1589       int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
1590       while (regno < last)
1591 	{
1592 	  record_one_conflict (regno);
1593 	  regno++;
1594 	}
1595     }
1596 }
1597 
1598 /* Mark REG as being dead (following the insn being scanned now).
1599    Store a 0 in regs_live or allocnos_live for this register.  */
1600 
1601 static void
mark_reg_death(rtx reg)1602 mark_reg_death (rtx reg)
1603 {
1604   int regno = REGNO (reg);
1605 
1606   /* Either this is one of the max_allocno pseudo regs not allocated,
1607      or it is a hardware reg.  First handle the pseudo-regs.  */
1608   if (regno >= FIRST_PSEUDO_REGISTER)
1609     {
1610       if (reg_allocno[regno] >= 0)
1611 	CLEAR_ALLOCNO_LIVE (reg_allocno[regno]);
1612     }
1613 
1614   /* For pseudo reg, see if it has been assigned a hardware reg.  */
1615   if (reg_renumber[regno] >= 0)
1616     regno = reg_renumber[regno];
1617 
1618   /* Handle hardware regs (and pseudos allocated to hard regs).  */
1619   if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1620     {
1621       /* Pseudo regs already assigned hardware regs are treated
1622 	 almost the same as explicit hardware regs.  */
1623       int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
1624       while (regno < last)
1625 	{
1626 	  CLEAR_HARD_REG_BIT (hard_regs_live, regno);
1627 	  regno++;
1628 	}
1629     }
1630 }
1631 
1632 /* Mark hard reg REGNO as currently live, assuming machine mode MODE
1633    for the value stored in it.  MODE determines how many consecutive
1634    registers are actually in use.  Do not record conflicts;
1635    it is assumed that the caller will do that.  */
1636 
1637 static void
mark_reg_live_nc(int regno,enum machine_mode mode)1638 mark_reg_live_nc (int regno, enum machine_mode mode)
1639 {
1640   int last = regno + hard_regno_nregs[regno][mode];
1641   while (regno < last)
1642     {
1643       SET_HARD_REG_BIT (hard_regs_live, regno);
1644       regno++;
1645     }
1646 }
1647 
1648 /* Try to set a preference for an allocno to a hard register.
1649    We are passed DEST and SRC which are the operands of a SET.  It is known
1650    that SRC is a register.  If SRC or the first operand of SRC is a register,
1651    try to set a preference.  If one of the two is a hard register and the other
1652    is a pseudo-register, mark the preference.
1653 
1654    Note that we are not as aggressive as local-alloc in trying to tie a
1655    pseudo-register to a hard register.  */
1656 
1657 static void
set_preference(rtx dest,rtx src)1658 set_preference (rtx dest, rtx src)
1659 {
1660   unsigned int src_regno, dest_regno;
1661   /* Amount to add to the hard regno for SRC, or subtract from that for DEST,
1662      to compensate for subregs in SRC or DEST.  */
1663   int offset = 0;
1664   unsigned int i;
1665   int copy = 1;
1666 
1667   if (GET_RTX_FORMAT (GET_CODE (src))[0] == 'e')
1668     src = XEXP (src, 0), copy = 0;
1669 
1670   /* Get the reg number for both SRC and DEST.
1671      If neither is a reg, give up.  */
1672 
1673   if (REG_P (src))
1674     src_regno = REGNO (src);
1675   else if (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src)))
1676     {
1677       src_regno = REGNO (SUBREG_REG (src));
1678 
1679       if (REGNO (SUBREG_REG (src)) < FIRST_PSEUDO_REGISTER)
1680 	offset += subreg_regno_offset (REGNO (SUBREG_REG (src)),
1681 				       GET_MODE (SUBREG_REG (src)),
1682 				       SUBREG_BYTE (src),
1683 				       GET_MODE (src));
1684       else
1685 	offset += (SUBREG_BYTE (src)
1686 		   / REGMODE_NATURAL_SIZE (GET_MODE (src)));
1687     }
1688   else
1689     return;
1690 
1691   if (REG_P (dest))
1692     dest_regno = REGNO (dest);
1693   else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest)))
1694     {
1695       dest_regno = REGNO (SUBREG_REG (dest));
1696 
1697       if (REGNO (SUBREG_REG (dest)) < FIRST_PSEUDO_REGISTER)
1698 	offset -= subreg_regno_offset (REGNO (SUBREG_REG (dest)),
1699 				       GET_MODE (SUBREG_REG (dest)),
1700 				       SUBREG_BYTE (dest),
1701 				       GET_MODE (dest));
1702       else
1703 	offset -= (SUBREG_BYTE (dest)
1704 		   / REGMODE_NATURAL_SIZE (GET_MODE (dest)));
1705     }
1706   else
1707     return;
1708 
1709   /* Convert either or both to hard reg numbers.  */
1710 
1711   if (reg_renumber[src_regno] >= 0)
1712     src_regno = reg_renumber[src_regno];
1713 
1714   if (reg_renumber[dest_regno] >= 0)
1715     dest_regno = reg_renumber[dest_regno];
1716 
1717   /* Now if one is a hard reg and the other is a global pseudo
1718      then give the other a preference.  */
1719 
1720   if (dest_regno < FIRST_PSEUDO_REGISTER && src_regno >= FIRST_PSEUDO_REGISTER
1721       && reg_allocno[src_regno] >= 0)
1722     {
1723       dest_regno -= offset;
1724       if (dest_regno < FIRST_PSEUDO_REGISTER)
1725 	{
1726 	  if (copy)
1727 	    SET_REGBIT (hard_reg_copy_preferences,
1728 			reg_allocno[src_regno], dest_regno);
1729 
1730 	  SET_REGBIT (hard_reg_preferences,
1731 		      reg_allocno[src_regno], dest_regno);
1732 	  for (i = dest_regno;
1733 	       i < dest_regno + hard_regno_nregs[dest_regno][GET_MODE (dest)];
1734 	       i++)
1735 	    SET_REGBIT (hard_reg_full_preferences, reg_allocno[src_regno], i);
1736 	}
1737     }
1738 
1739   if (src_regno < FIRST_PSEUDO_REGISTER && dest_regno >= FIRST_PSEUDO_REGISTER
1740       && reg_allocno[dest_regno] >= 0)
1741     {
1742       src_regno += offset;
1743       if (src_regno < FIRST_PSEUDO_REGISTER)
1744 	{
1745 	  if (copy)
1746 	    SET_REGBIT (hard_reg_copy_preferences,
1747 			reg_allocno[dest_regno], src_regno);
1748 
1749 	  SET_REGBIT (hard_reg_preferences,
1750 		      reg_allocno[dest_regno], src_regno);
1751 	  for (i = src_regno;
1752 	       i < src_regno + hard_regno_nregs[src_regno][GET_MODE (src)];
1753 	       i++)
1754 	    SET_REGBIT (hard_reg_full_preferences, reg_allocno[dest_regno], i);
1755 	}
1756     }
1757 }
1758 
1759 /* Indicate that hard register number FROM was eliminated and replaced with
1760    an offset from hard register number TO.  The status of hard registers live
1761    at the start of a basic block is updated by replacing a use of FROM with
1762    a use of TO.  */
1763 
1764 void
mark_elimination(int from,int to)1765 mark_elimination (int from, int to)
1766 {
1767   basic_block bb;
1768 
1769   FOR_EACH_BB (bb)
1770     {
1771       regset r = bb->il.rtl->global_live_at_start;
1772       if (REGNO_REG_SET_P (r, from))
1773 	{
1774 	  CLEAR_REGNO_REG_SET (r, from);
1775 	  SET_REGNO_REG_SET (r, to);
1776 	}
1777     }
1778 }
1779 
1780 /* Used for communication between the following functions.  Holds the
1781    current life information.  */
1782 static regset live_relevant_regs;
1783 
1784 /* Record in live_relevant_regs and REGS_SET that register REG became live.
1785    This is called via note_stores.  */
1786 static void
reg_becomes_live(rtx reg,rtx setter ATTRIBUTE_UNUSED,void * regs_set)1787 reg_becomes_live (rtx reg, rtx setter ATTRIBUTE_UNUSED, void *regs_set)
1788 {
1789   int regno;
1790 
1791   if (GET_CODE (reg) == SUBREG)
1792     reg = SUBREG_REG (reg);
1793 
1794   if (!REG_P (reg))
1795     return;
1796 
1797   regno = REGNO (reg);
1798   if (regno < FIRST_PSEUDO_REGISTER)
1799     {
1800       int nregs = hard_regno_nregs[regno][GET_MODE (reg)];
1801       while (nregs-- > 0)
1802 	{
1803 	  SET_REGNO_REG_SET (live_relevant_regs, regno);
1804 	  if (! fixed_regs[regno])
1805 	    SET_REGNO_REG_SET ((regset) regs_set, regno);
1806 	  regno++;
1807 	}
1808     }
1809   else if (reg_renumber[regno] >= 0)
1810     {
1811       SET_REGNO_REG_SET (live_relevant_regs, regno);
1812       SET_REGNO_REG_SET ((regset) regs_set, regno);
1813     }
1814 }
1815 
1816 /* Record in live_relevant_regs that register REGNO died.  */
1817 static void
reg_dies(int regno,enum machine_mode mode,struct insn_chain * chain)1818 reg_dies (int regno, enum machine_mode mode, struct insn_chain *chain)
1819 {
1820   if (regno < FIRST_PSEUDO_REGISTER)
1821     {
1822       int nregs = hard_regno_nregs[regno][mode];
1823       while (nregs-- > 0)
1824 	{
1825 	  CLEAR_REGNO_REG_SET (live_relevant_regs, regno);
1826 	  if (! fixed_regs[regno])
1827 	    SET_REGNO_REG_SET (&chain->dead_or_set, regno);
1828 	  regno++;
1829 	}
1830     }
1831   else
1832     {
1833       CLEAR_REGNO_REG_SET (live_relevant_regs, regno);
1834       if (reg_renumber[regno] >= 0)
1835 	SET_REGNO_REG_SET (&chain->dead_or_set, regno);
1836     }
1837 }
1838 
1839 /* Walk the insns of the current function and build reload_insn_chain,
1840    and record register life information.  */
1841 void
build_insn_chain(rtx first)1842 build_insn_chain (rtx first)
1843 {
1844   struct insn_chain **p = &reload_insn_chain;
1845   struct insn_chain *prev = 0;
1846   basic_block b = ENTRY_BLOCK_PTR->next_bb;
1847 
1848   live_relevant_regs = ALLOC_REG_SET (&reg_obstack);
1849 
1850   for (; first; first = NEXT_INSN (first))
1851     {
1852       struct insn_chain *c;
1853 
1854       if (first == BB_HEAD (b))
1855 	{
1856 	  unsigned i;
1857 	  bitmap_iterator bi;
1858 
1859 	  CLEAR_REG_SET (live_relevant_regs);
1860 
1861 	  EXECUTE_IF_SET_IN_BITMAP (b->il.rtl->global_live_at_start, 0, i, bi)
1862 	    {
1863 	      if (i < FIRST_PSEUDO_REGISTER
1864 		  ? ! TEST_HARD_REG_BIT (eliminable_regset, i)
1865 		  : reg_renumber[i] >= 0)
1866 		SET_REGNO_REG_SET (live_relevant_regs, i);
1867 	    }
1868 	}
1869 
1870       if (!NOTE_P (first) && !BARRIER_P (first))
1871 	{
1872 	  c = new_insn_chain ();
1873 	  c->prev = prev;
1874 	  prev = c;
1875 	  *p = c;
1876 	  p = &c->next;
1877 	  c->insn = first;
1878 	  c->block = b->index;
1879 
1880 	  if (INSN_P (first))
1881 	    {
1882 	      rtx link;
1883 
1884 	      /* Mark the death of everything that dies in this instruction.  */
1885 
1886 	      for (link = REG_NOTES (first); link; link = XEXP (link, 1))
1887 		if (REG_NOTE_KIND (link) == REG_DEAD
1888 		    && REG_P (XEXP (link, 0)))
1889 		  reg_dies (REGNO (XEXP (link, 0)), GET_MODE (XEXP (link, 0)),
1890 			    c);
1891 
1892 	      COPY_REG_SET (&c->live_throughout, live_relevant_regs);
1893 
1894 	      /* Mark everything born in this instruction as live.  */
1895 
1896 	      note_stores (PATTERN (first), reg_becomes_live,
1897 			   &c->dead_or_set);
1898 	    }
1899 	  else
1900 	    COPY_REG_SET (&c->live_throughout, live_relevant_regs);
1901 
1902 	  if (INSN_P (first))
1903 	    {
1904 	      rtx link;
1905 
1906 	      /* Mark anything that is set in this insn and then unused as dying.  */
1907 
1908 	      for (link = REG_NOTES (first); link; link = XEXP (link, 1))
1909 		if (REG_NOTE_KIND (link) == REG_UNUSED
1910 		    && REG_P (XEXP (link, 0)))
1911 		  reg_dies (REGNO (XEXP (link, 0)), GET_MODE (XEXP (link, 0)),
1912 			    c);
1913 	    }
1914 	}
1915 
1916       if (first == BB_END (b))
1917 	b = b->next_bb;
1918 
1919       /* Stop after we pass the end of the last basic block.  Verify that
1920 	 no real insns are after the end of the last basic block.
1921 
1922 	 We may want to reorganize the loop somewhat since this test should
1923 	 always be the right exit test.  Allow an ADDR_VEC or ADDR_DIF_VEC if
1924 	 the previous real insn is a JUMP_INSN.  */
1925       if (b == EXIT_BLOCK_PTR)
1926 	{
1927 #ifdef ENABLE_CHECKING
1928 	  for (first = NEXT_INSN (first); first; first = NEXT_INSN (first))
1929 	    gcc_assert (!INSN_P (first)
1930 			|| GET_CODE (PATTERN (first)) == USE
1931 			|| ((GET_CODE (PATTERN (first)) == ADDR_VEC
1932 			     || GET_CODE (PATTERN (first)) == ADDR_DIFF_VEC)
1933 			    && prev_real_insn (first) != 0
1934 			    && JUMP_P (prev_real_insn (first))));
1935 #endif
1936 	  break;
1937 	}
1938     }
1939   FREE_REG_SET (live_relevant_regs);
1940   *p = 0;
1941 }
1942 
1943 /* Print debugging trace information if -dg switch is given,
1944    showing the information on which the allocation decisions are based.  */
1945 
1946 static void
dump_conflicts(FILE * file)1947 dump_conflicts (FILE *file)
1948 {
1949   int i;
1950   int has_preferences;
1951   int nregs;
1952   nregs = 0;
1953   for (i = 0; i < max_allocno; i++)
1954     {
1955       if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
1956 	continue;
1957       nregs++;
1958     }
1959   fprintf (file, ";; %d regs to allocate:", nregs);
1960   for (i = 0; i < max_allocno; i++)
1961     {
1962       int j;
1963       if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
1964 	continue;
1965       fprintf (file, " %d", allocno[allocno_order[i]].reg);
1966       for (j = 0; j < max_regno; j++)
1967 	if (reg_allocno[j] == allocno_order[i]
1968 	    && j != allocno[allocno_order[i]].reg)
1969 	  fprintf (file, "+%d", j);
1970       if (allocno[allocno_order[i]].size != 1)
1971 	fprintf (file, " (%d)", allocno[allocno_order[i]].size);
1972     }
1973   fprintf (file, "\n");
1974 
1975   for (i = 0; i < max_allocno; i++)
1976     {
1977       int j;
1978       fprintf (file, ";; %d conflicts:", allocno[i].reg);
1979       for (j = 0; j < max_allocno; j++)
1980 	if (CONFLICTP (j, i))
1981 	  fprintf (file, " %d", allocno[j].reg);
1982       for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1983 	if (TEST_HARD_REG_BIT (allocno[i].hard_reg_conflicts, j))
1984 	  fprintf (file, " %d", j);
1985       fprintf (file, "\n");
1986 
1987       has_preferences = 0;
1988       for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1989 	if (TEST_HARD_REG_BIT (allocno[i].hard_reg_preferences, j))
1990 	  has_preferences = 1;
1991 
1992       if (! has_preferences)
1993 	continue;
1994       fprintf (file, ";; %d preferences:", allocno[i].reg);
1995       for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1996 	if (TEST_HARD_REG_BIT (allocno[i].hard_reg_preferences, j))
1997 	  fprintf (file, " %d", j);
1998       fprintf (file, "\n");
1999     }
2000   fprintf (file, "\n");
2001 }
2002 
2003 void
dump_global_regs(FILE * file)2004 dump_global_regs (FILE *file)
2005 {
2006   int i, j;
2007 
2008   fprintf (file, ";; Register dispositions:\n");
2009   for (i = FIRST_PSEUDO_REGISTER, j = 0; i < max_regno; i++)
2010     if (reg_renumber[i] >= 0)
2011       {
2012 	fprintf (file, "%d in %d  ", i, reg_renumber[i]);
2013 	if (++j % 6 == 0)
2014 	  fprintf (file, "\n");
2015       }
2016 
2017   fprintf (file, "\n\n;; Hard regs used: ");
2018   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2019     if (regs_ever_live[i])
2020       fprintf (file, " %d", i);
2021   fprintf (file, "\n\n");
2022 }
2023 
2024 
2025 
2026 /* This page contains code to make live information more accurate.
2027    The accurate register liveness at program point P means:
2028      o there is a path from P to usage of the register and the
2029        register is not redefined or killed on the path.
2030      o register at P is partially available, i.e. there is a path from
2031        a register definition to the point P and the register is not
2032        killed (clobbered) on the path
2033 
2034    The standard GCC live information means only the first condition.
2035    Without the partial availability, there will be more register
2036    conflicts and as a consequence worse register allocation.  The
2037    typical example where the information can be different is a
2038    register initialized in the loop at the basic block preceding the
2039    loop in CFG.  */
2040 
2041 /* The following structure contains basic block data flow information
2042    used to calculate partial availability of registers.  */
2043 
2044 struct bb_info
2045 {
2046   /* The basic block reverse post-order number.  */
2047   int rts_number;
2048   /* Registers used uninitialized in an insn in which there is an
2049      early clobbered register might get the same hard register.  */
2050   bitmap earlyclobber;
2051   /* Registers correspondingly killed (clobbered) and defined but not
2052      killed afterward in the basic block.  */
2053   bitmap killed, avloc;
2054   /* Registers partially available and living (in other words whose
2055      values were calculated and used) correspondingly at the start
2056      and end of the basic block.  */
2057   bitmap live_pavin, live_pavout;
2058 };
2059 
2060 /* Macros for accessing data flow information of basic blocks.  */
2061 
2062 #define BB_INFO(BB) ((struct bb_info *) (BB)->aux)
2063 #define BB_INFO_BY_INDEX(N) BB_INFO (BASIC_BLOCK(N))
2064 
2065 static struct bitmap_obstack greg_obstack;
2066 /* The function allocates the info structures of each basic block.  It
2067    also initialized LIVE_PAVIN and LIVE_PAVOUT as if all hard
2068    registers were partially available.  */
2069 
2070 static void
allocate_bb_info(void)2071 allocate_bb_info (void)
2072 {
2073   int i;
2074   basic_block bb;
2075   struct bb_info *bb_info;
2076   bitmap init;
2077 
2078   alloc_aux_for_blocks (sizeof (struct bb_info));
2079   init = BITMAP_ALLOC (NULL);
2080   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2081     bitmap_set_bit (init, i);
2082   bitmap_obstack_initialize (&greg_obstack);
2083   FOR_EACH_BB (bb)
2084     {
2085       bb_info = bb->aux;
2086       bb_info->earlyclobber = BITMAP_ALLOC (&greg_obstack);
2087       bb_info->avloc = BITMAP_ALLOC (&greg_obstack);
2088       bb_info->killed = BITMAP_ALLOC (&greg_obstack);
2089       bb_info->live_pavin = BITMAP_ALLOC (&greg_obstack);
2090       bb_info->live_pavout = BITMAP_ALLOC (&greg_obstack);
2091       bitmap_copy (bb_info->live_pavin, init);
2092       bitmap_copy (bb_info->live_pavout, init);
2093     }
2094   BITMAP_FREE (init);
2095 }
2096 
2097 /* The function frees the allocated info of all basic blocks.  */
2098 
2099 static void
free_bb_info(void)2100 free_bb_info (void)
2101 {
2102   bitmap_obstack_release (&greg_obstack);
2103   free_aux_for_blocks ();
2104 }
2105 
2106 /* The function modifies local info for register REG being changed in
2107    SETTER.  DATA is used to pass the current basic block info.  */
2108 
2109 static void
mark_reg_change(rtx reg,rtx setter,void * data)2110 mark_reg_change (rtx reg, rtx setter, void *data)
2111 {
2112   int regno;
2113   basic_block bb = data;
2114   struct bb_info *bb_info = BB_INFO (bb);
2115 
2116   if (GET_CODE (reg) == SUBREG)
2117     reg = SUBREG_REG (reg);
2118 
2119   if (!REG_P (reg))
2120     return;
2121 
2122   regno = REGNO (reg);
2123   bitmap_set_bit (bb_info->killed, regno);
2124 
2125   if (GET_CODE (setter) != CLOBBER)
2126     bitmap_set_bit (bb_info->avloc, regno);
2127   else
2128     bitmap_clear_bit (bb_info->avloc, regno);
2129 }
2130 
2131 /* Classes of registers which could be early clobbered in the current
2132    insn.  */
2133 
2134 static VEC(int,heap) *earlyclobber_regclass;
2135 
2136 /* This function finds and stores register classes that could be early
2137    clobbered in INSN.  If any earlyclobber classes are found, the function
2138    returns TRUE, in all other cases it returns FALSE.  */
2139 
2140 static bool
check_earlyclobber(rtx insn)2141 check_earlyclobber (rtx insn)
2142 {
2143   int opno;
2144   bool found = false;
2145 
2146   extract_insn (insn);
2147 
2148   VEC_truncate (int, earlyclobber_regclass, 0);
2149   for (opno = 0; opno < recog_data.n_operands; opno++)
2150     {
2151       char c;
2152       bool amp_p;
2153       int i;
2154       enum reg_class class;
2155       const char *p = recog_data.constraints[opno];
2156 
2157       class = NO_REGS;
2158       amp_p = false;
2159       for (;;)
2160 	{
2161 	  c = *p;
2162 	  switch (c)
2163 	    {
2164 	    case '=':  case '+':  case '?':
2165 	    case '#':  case '!':
2166 	    case '*':  case '%':
2167 	    case 'm':  case '<':  case '>':  case 'V':  case 'o':
2168 	    case 'E':  case 'F':  case 'G':  case 'H':
2169 	    case 's':  case 'i':  case 'n':
2170 	    case 'I':  case 'J':  case 'K':  case 'L':
2171 	    case 'M':  case 'N':  case 'O':  case 'P':
2172 	    case 'X':
2173 	    case '0': case '1':  case '2':  case '3':  case '4':
2174 	    case '5': case '6':  case '7':  case '8':  case '9':
2175 	      /* These don't say anything we care about.  */
2176 	      break;
2177 
2178 	    case '&':
2179 	      amp_p = true;
2180 	      break;
2181 	    case '\0':
2182 	    case ',':
2183 	      if (amp_p && class != NO_REGS)
2184 		{
2185 		  int rc;
2186 
2187 		  found = true;
2188 		  for (i = 0;
2189 		       VEC_iterate (int, earlyclobber_regclass, i, rc);
2190 		       i++)
2191 		    {
2192 		      if (rc == (int) class)
2193 			goto found_rc;
2194 		    }
2195 
2196 		  /* We use VEC_quick_push here because
2197 		     earlyclobber_regclass holds no more than
2198 		     N_REG_CLASSES elements. */
2199 		  VEC_quick_push (int, earlyclobber_regclass, (int) class);
2200 		found_rc:
2201 		  ;
2202 		}
2203 
2204 	      amp_p = false;
2205 	      class = NO_REGS;
2206 	      break;
2207 
2208 	    case 'r':
2209 	      class = GENERAL_REGS;
2210 	      break;
2211 
2212 	    default:
2213 	      class = REG_CLASS_FROM_CONSTRAINT (c, p);
2214 	      break;
2215 	    }
2216 	  if (c == '\0')
2217 	    break;
2218 	  p += CONSTRAINT_LEN (c, p);
2219 	}
2220     }
2221 
2222   return found;
2223 }
2224 
2225 /* The function checks that pseudo-register *X has a class
2226    intersecting with the class of pseudo-register could be early
2227    clobbered in the same insn.
2228    This function is a no-op if earlyclobber_regclass is empty.  */
2229 
2230 static int
mark_reg_use_for_earlyclobber(rtx * x,void * data ATTRIBUTE_UNUSED)2231 mark_reg_use_for_earlyclobber (rtx *x, void *data ATTRIBUTE_UNUSED)
2232 {
2233   enum reg_class pref_class, alt_class;
2234   int i, regno;
2235   basic_block bb = data;
2236   struct bb_info *bb_info = BB_INFO (bb);
2237 
2238   if (REG_P (*x) && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
2239     {
2240       int rc;
2241 
2242       regno = REGNO (*x);
2243       if (bitmap_bit_p (bb_info->killed, regno)
2244 	  || bitmap_bit_p (bb_info->avloc, regno))
2245 	return 0;
2246       pref_class = reg_preferred_class (regno);
2247       alt_class = reg_alternate_class (regno);
2248       for (i = 0; VEC_iterate (int, earlyclobber_regclass, i, rc); i++)
2249 	{
2250 	  if (reg_classes_intersect_p (rc, pref_class)
2251 	      || (rc != NO_REGS
2252 		  && reg_classes_intersect_p (rc, alt_class)))
2253 	    {
2254 	      bitmap_set_bit (bb_info->earlyclobber, regno);
2255 	      break;
2256 	    }
2257 	}
2258     }
2259   return 0;
2260 }
2261 
2262 /* The function processes all pseudo-registers in *X with the aid of
2263    previous function.  */
2264 
2265 static void
mark_reg_use_for_earlyclobber_1(rtx * x,void * data)2266 mark_reg_use_for_earlyclobber_1 (rtx *x, void *data)
2267 {
2268   for_each_rtx (x, mark_reg_use_for_earlyclobber, data);
2269 }
2270 
2271 /* The function calculates local info for each basic block.  */
2272 
2273 static void
calculate_local_reg_bb_info(void)2274 calculate_local_reg_bb_info (void)
2275 {
2276   basic_block bb;
2277   rtx insn, bound;
2278 
2279   /* We know that earlyclobber_regclass holds no more than
2280     N_REG_CLASSES elements.  See check_earlyclobber.  */
2281   earlyclobber_regclass = VEC_alloc (int, heap, N_REG_CLASSES);
2282   FOR_EACH_BB (bb)
2283     {
2284       bound = NEXT_INSN (BB_END (bb));
2285       for (insn = BB_HEAD (bb); insn != bound; insn = NEXT_INSN (insn))
2286 	if (INSN_P (insn))
2287 	  {
2288 	    note_stores (PATTERN (insn), mark_reg_change, bb);
2289 	    if (check_earlyclobber (insn))
2290 	      note_uses (&PATTERN (insn), mark_reg_use_for_earlyclobber_1, bb);
2291 	  }
2292     }
2293   VEC_free (int, heap, earlyclobber_regclass);
2294 }
2295 
2296 /* The function sets up reverse post-order number of each basic
2297    block.  */
2298 
2299 static void
set_up_bb_rts_numbers(void)2300 set_up_bb_rts_numbers (void)
2301 {
2302   int i;
2303   int *rts_order;
2304 
2305   rts_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
2306   post_order_compute (rts_order, false);
2307   for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
2308     BB_INFO_BY_INDEX (rts_order [i])->rts_number = i;
2309   free (rts_order);
2310 }
2311 
2312 /* Compare function for sorting blocks in reverse postorder.  */
2313 
2314 static int
rpost_cmp(const void * bb1,const void * bb2)2315 rpost_cmp (const void *bb1, const void *bb2)
2316 {
2317   basic_block b1 = *(basic_block *) bb1, b2 = *(basic_block *) bb2;
2318 
2319   return BB_INFO (b2)->rts_number - BB_INFO (b1)->rts_number;
2320 }
2321 
2322 /* Temporary bitmap used for live_pavin, live_pavout calculation.  */
2323 static bitmap temp_bitmap;
2324 
2325 /* The function calculates partial register availability according to
2326    the following equations:
2327 
2328      bb.live_pavin
2329        = empty for entry block
2330          | union (live_pavout of predecessors) & global_live_at_start
2331      bb.live_pavout = union (bb.live_pavin - bb.killed, bb.avloc)
2332                       & global_live_at_end  */
2333 
2334 static void
calculate_reg_pav(void)2335 calculate_reg_pav (void)
2336 {
2337   basic_block bb, succ;
2338   edge e;
2339   int i, nel;
2340   VEC(basic_block,heap) *bbs, *new_bbs, *temp;
2341   basic_block *bb_array;
2342   sbitmap wset;
2343 
2344   bbs = VEC_alloc (basic_block, heap, n_basic_blocks);
2345   new_bbs = VEC_alloc (basic_block, heap, n_basic_blocks);
2346   temp_bitmap = BITMAP_ALLOC (NULL);
2347   FOR_EACH_BB (bb)
2348     {
2349       VEC_quick_push (basic_block, bbs, bb);
2350     }
2351   wset = sbitmap_alloc (n_basic_blocks + 1);
2352   while (VEC_length (basic_block, bbs))
2353     {
2354       bb_array = VEC_address (basic_block, bbs);
2355       nel = VEC_length (basic_block, bbs);
2356       qsort (bb_array, nel, sizeof (basic_block), rpost_cmp);
2357       sbitmap_zero (wset);
2358       for (i = 0; i < nel; i++)
2359 	{
2360 	  edge_iterator ei;
2361 	  struct bb_info *bb_info;
2362 	  bitmap bb_live_pavin, bb_live_pavout;
2363 
2364 	  bb = bb_array [i];
2365 	  bb_info = BB_INFO (bb);
2366 	  bb_live_pavin = bb_info->live_pavin;
2367 	  bb_live_pavout = bb_info->live_pavout;
2368 	  FOR_EACH_EDGE (e, ei, bb->preds)
2369 	    {
2370 	      basic_block pred = e->src;
2371 
2372 	      if (pred->index != ENTRY_BLOCK)
2373 		bitmap_ior_into (bb_live_pavin, BB_INFO (pred)->live_pavout);
2374 	    }
2375 	  bitmap_and_into (bb_live_pavin, bb->il.rtl->global_live_at_start);
2376 	  bitmap_ior_and_compl (temp_bitmap, bb_info->avloc,
2377 				bb_live_pavin, bb_info->killed);
2378 	  bitmap_and_into (temp_bitmap, bb->il.rtl->global_live_at_end);
2379 	  if (! bitmap_equal_p (temp_bitmap, bb_live_pavout))
2380 	    {
2381 	      bitmap_copy (bb_live_pavout, temp_bitmap);
2382 	      FOR_EACH_EDGE (e, ei, bb->succs)
2383 		{
2384 		  succ = e->dest;
2385 		  if (succ->index != EXIT_BLOCK
2386 		      && !TEST_BIT (wset, succ->index))
2387 		    {
2388 		      SET_BIT (wset, succ->index);
2389 		      VEC_quick_push (basic_block, new_bbs, succ);
2390 		    }
2391 		}
2392 	    }
2393 	}
2394       temp = bbs;
2395       bbs = new_bbs;
2396       new_bbs = temp;
2397       VEC_truncate (basic_block, new_bbs, 0);
2398     }
2399   sbitmap_free (wset);
2400   BITMAP_FREE (temp_bitmap);
2401   VEC_free (basic_block, heap, new_bbs);
2402   VEC_free (basic_block, heap, bbs);
2403 }
2404 
2405 /* The function modifies partial availability information for two
2406    special cases to prevent incorrect work of the subsequent passes
2407    with the accurate live information based on the partial
2408    availability.  */
2409 
2410 static void
modify_reg_pav(void)2411 modify_reg_pav (void)
2412 {
2413   basic_block bb;
2414   struct bb_info *bb_info;
2415 #ifdef STACK_REGS
2416   int i;
2417   HARD_REG_SET zero, stack_hard_regs, used;
2418   bitmap stack_regs;
2419 
2420   CLEAR_HARD_REG_SET (zero);
2421   CLEAR_HARD_REG_SET (stack_hard_regs);
2422   for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
2423     SET_HARD_REG_BIT(stack_hard_regs, i);
2424   stack_regs = BITMAP_ALLOC (&greg_obstack);
2425   for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2426     {
2427       COPY_HARD_REG_SET (used, reg_class_contents[reg_preferred_class (i)]);
2428       IOR_HARD_REG_SET (used, reg_class_contents[reg_alternate_class (i)]);
2429       AND_HARD_REG_SET (used, stack_hard_regs);
2430       GO_IF_HARD_REG_EQUAL(used, zero, skip);
2431       bitmap_set_bit (stack_regs, i);
2432     skip:
2433       ;
2434     }
2435 #endif
2436   FOR_EACH_BB (bb)
2437     {
2438       bb_info = BB_INFO (bb);
2439 
2440       /* Reload can assign the same hard register to uninitialized
2441 	 pseudo-register and early clobbered pseudo-register in an
2442 	 insn if the pseudo-register is used first time in given BB
2443 	 and not lived at the BB start.  To prevent this we don't
2444 	 change life information for such pseudo-registers.  */
2445       bitmap_ior_into (bb_info->live_pavin, bb_info->earlyclobber);
2446 #ifdef STACK_REGS
2447       /* We can not use the same stack register for uninitialized
2448 	 pseudo-register and another living pseudo-register because if the
2449 	 uninitialized pseudo-register dies, subsequent pass reg-stack
2450 	 will be confused (it will believe that the other register
2451 	 dies).  */
2452       bitmap_ior_into (bb_info->live_pavin, stack_regs);
2453 #endif
2454     }
2455 #ifdef STACK_REGS
2456   BITMAP_FREE (stack_regs);
2457 #endif
2458 }
2459 
2460 /* The following function makes live information more accurate by
2461    modifying global_live_at_start and global_live_at_end of basic
2462    blocks.
2463 
2464    The standard GCC life analysis permits registers to live
2465    uninitialized, for example:
2466 
2467        R is never used
2468        .....
2469        Loop:
2470          R is defined
2471        ...
2472        R is used.
2473 
2474    With normal life_analysis, R would be live before "Loop:".
2475    The result is that R causes many interferences that do not
2476    serve any purpose.
2477 
2478    After the function call a register lives at a program point
2479    only if it is initialized on a path from CFG entry to the
2480    program point.  */
2481 
2482 static void
make_accurate_live_analysis(void)2483 make_accurate_live_analysis (void)
2484 {
2485   basic_block bb;
2486   struct bb_info *bb_info;
2487 
2488   max_regno = max_reg_num ();
2489   compact_blocks ();
2490   allocate_bb_info ();
2491   calculate_local_reg_bb_info ();
2492   set_up_bb_rts_numbers ();
2493   calculate_reg_pav ();
2494   modify_reg_pav ();
2495   FOR_EACH_BB (bb)
2496     {
2497       bb_info = BB_INFO (bb);
2498 
2499       bitmap_and_into (bb->il.rtl->global_live_at_start, bb_info->live_pavin);
2500       bitmap_and_into (bb->il.rtl->global_live_at_end, bb_info->live_pavout);
2501     }
2502   free_bb_info ();
2503 }
2504 /* Run old register allocator.  Return TRUE if we must exit
2505    rest_of_compilation upon return.  */
2506 static unsigned int
rest_of_handle_global_alloc(void)2507 rest_of_handle_global_alloc (void)
2508 {
2509   bool failure;
2510 
2511   /* If optimizing, allocate remaining pseudo-regs.  Do the reload
2512      pass fixing up any insns that are invalid.  */
2513 
2514   if (optimize)
2515     failure = global_alloc ();
2516   else
2517     {
2518       build_insn_chain (get_insns ());
2519       failure = reload (get_insns (), 0);
2520     }
2521 
2522   if (dump_enabled_p (pass_global_alloc.static_pass_number))
2523     {
2524       timevar_push (TV_DUMP);
2525       dump_global_regs (dump_file);
2526       timevar_pop (TV_DUMP);
2527     }
2528 
2529   gcc_assert (reload_completed || failure);
2530   reload_completed = !failure;
2531   return 0;
2532 }
2533 
2534 struct tree_opt_pass pass_global_alloc =
2535 {
2536   "greg",                               /* name */
2537   NULL,                                 /* gate */
2538   rest_of_handle_global_alloc,          /* execute */
2539   NULL,                                 /* sub */
2540   NULL,                                 /* next */
2541   0,                                    /* static_pass_number */
2542   TV_GLOBAL_ALLOC,                      /* tv_id */
2543   0,                                    /* properties_required */
2544   0,                                    /* properties_provided */
2545   0,                                    /* properties_destroyed */
2546   0,                                    /* todo_flags_start */
2547   TODO_dump_func |
2548   TODO_ggc_collect,                     /* todo_flags_finish */
2549   'g'                                   /* letter */
2550 };
2551 
2552