1 /* Allocate registers within a basic block, for GNU compiler.
2 Copyright (C) 1987, 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
4 Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
22
23 /* Allocation of hard register numbers to pseudo registers is done in
24 two passes. In this pass we consider only regs that are born and
25 die once within one basic block. We do this one basic block at a
26 time. Then the next pass allocates the registers that remain.
27 Two passes are used because this pass uses methods that work only
28 on linear code, but that do a better job than the general methods
29 used in global_alloc, and more quickly too.
30
31 The assignments made are recorded in the vector reg_renumber
32 whose space is allocated here. The rtl code itself is not altered.
33
34 We assign each instruction in the basic block a number
35 which is its order from the beginning of the block.
36 Then we can represent the lifetime of a pseudo register with
37 a pair of numbers, and check for conflicts easily.
38 We can record the availability of hard registers with a
39 HARD_REG_SET for each instruction. The HARD_REG_SET
40 contains 0 or 1 for each hard reg.
41
42 To avoid register shuffling, we tie registers together when one
43 dies by being copied into another, or dies in an instruction that
44 does arithmetic to produce another. The tied registers are
45 allocated as one. Registers with different reg class preferences
46 can never be tied unless the class preferred by one is a subclass
47 of the one preferred by the other.
48
49 Tying is represented with "quantity numbers".
50 A non-tied register is given a new quantity number.
51 Tied registers have the same quantity number.
52
53 We have provision to exempt registers, even when they are contained
54 within the block, that can be tied to others that are not contained in it.
55 This is so that global_alloc could process them both and tie them then.
56 But this is currently disabled since tying in global_alloc is not
57 yet implemented. */
58
59 /* Pseudos allocated here can be reallocated by global.c if the hard register
60 is used as a spill register. Currently we don't allocate such pseudos
61 here if their preferred class is likely to be used by spills. */
62
63 #include "config.h"
64 #include "system.h"
65 #include "coretypes.h"
66 #include "tm.h"
67 #include "hard-reg-set.h"
68 #include "rtl.h"
69 #include "tm_p.h"
70 #include "flags.h"
71 #include "regs.h"
72 #include "function.h"
73 #include "insn-config.h"
74 #include "insn-attr.h"
75 #include "recog.h"
76 #include "output.h"
77 #include "toplev.h"
78 #include "except.h"
79 #include "integrate.h"
80 #include "reload.h"
81 #include "ggc.h"
82 #include "timevar.h"
83 #include "tree-pass.h"
84
85 /* Next quantity number available for allocation. */
86
87 static int next_qty;
88
89 /* Information we maintain about each quantity. */
90 struct qty
91 {
92 /* The number of refs to quantity Q. */
93
94 int n_refs;
95
96 /* The frequency of uses of quantity Q. */
97
98 int freq;
99
100 /* Insn number (counting from head of basic block)
101 where quantity Q was born. -1 if birth has not been recorded. */
102
103 int birth;
104
105 /* Insn number (counting from head of basic block)
106 where given quantity died. Due to the way tying is done,
107 and the fact that we consider in this pass only regs that die but once,
108 a quantity can die only once. Each quantity's life span
109 is a set of consecutive insns. -1 if death has not been recorded. */
110
111 int death;
112
113 /* Number of words needed to hold the data in given quantity.
114 This depends on its machine mode. It is used for these purposes:
115 1. It is used in computing the relative importance of qtys,
116 which determines the order in which we look for regs for them.
117 2. It is used in rules that prevent tying several registers of
118 different sizes in a way that is geometrically impossible
119 (see combine_regs). */
120
121 int size;
122
123 /* Number of times a reg tied to given qty lives across a CALL_INSN. */
124
125 int n_calls_crossed;
126
127 /* Number of times a reg tied to given qty lives across a CALL_INSN
128 that might throw. */
129
130 int n_throwing_calls_crossed;
131
132 /* The register number of one pseudo register whose reg_qty value is Q.
133 This register should be the head of the chain
134 maintained in reg_next_in_qty. */
135
136 int first_reg;
137
138 /* Reg class contained in (smaller than) the preferred classes of all
139 the pseudo regs that are tied in given quantity.
140 This is the preferred class for allocating that quantity. */
141
142 enum reg_class min_class;
143
144 /* Register class within which we allocate given qty if we can't get
145 its preferred class. */
146
147 enum reg_class alternate_class;
148
149 /* This holds the mode of the registers that are tied to given qty,
150 or VOIDmode if registers with differing modes are tied together. */
151
152 enum machine_mode mode;
153
154 /* the hard reg number chosen for given quantity,
155 or -1 if none was found. */
156
157 short phys_reg;
158 };
159
160 static struct qty *qty;
161
162 /* These fields are kept separately to speedup their clearing. */
163
164 /* We maintain two hard register sets that indicate suggested hard registers
165 for each quantity. The first, phys_copy_sugg, contains hard registers
166 that are tied to the quantity by a simple copy. The second contains all
167 hard registers that are tied to the quantity via an arithmetic operation.
168
169 The former register set is given priority for allocation. This tends to
170 eliminate copy insns. */
171
172 /* Element Q is a set of hard registers that are suggested for quantity Q by
173 copy insns. */
174
175 static HARD_REG_SET *qty_phys_copy_sugg;
176
177 /* Element Q is a set of hard registers that are suggested for quantity Q by
178 arithmetic insns. */
179
180 static HARD_REG_SET *qty_phys_sugg;
181
182 /* Element Q is the number of suggested registers in qty_phys_copy_sugg. */
183
184 static short *qty_phys_num_copy_sugg;
185
186 /* Element Q is the number of suggested registers in qty_phys_sugg. */
187
188 static short *qty_phys_num_sugg;
189
190 /* If (REG N) has been assigned a quantity number, is a register number
191 of another register assigned the same quantity number, or -1 for the
192 end of the chain. qty->first_reg point to the head of this chain. */
193
194 static int *reg_next_in_qty;
195
196 /* reg_qty[N] (where N is a pseudo reg number) is the qty number of that reg
197 if it is >= 0,
198 of -1 if this register cannot be allocated by local-alloc,
199 or -2 if not known yet.
200
201 Note that if we see a use or death of pseudo register N with
202 reg_qty[N] == -2, register N must be local to the current block. If
203 it were used in more than one block, we would have reg_qty[N] == -1.
204 This relies on the fact that if reg_basic_block[N] is >= 0, register N
205 will not appear in any other block. We save a considerable number of
206 tests by exploiting this.
207
208 If N is < FIRST_PSEUDO_REGISTER, reg_qty[N] is undefined and should not
209 be referenced. */
210
211 static int *reg_qty;
212
213 /* The offset (in words) of register N within its quantity.
214 This can be nonzero if register N is SImode, and has been tied
215 to a subreg of a DImode register. */
216
217 static char *reg_offset;
218
219 /* Vector of substitutions of register numbers,
220 used to map pseudo regs into hardware regs.
221 This is set up as a result of register allocation.
222 Element N is the hard reg assigned to pseudo reg N,
223 or is -1 if no hard reg was assigned.
224 If N is a hard reg number, element N is N. */
225
226 short *reg_renumber;
227
228 /* Set of hard registers live at the current point in the scan
229 of the instructions in a basic block. */
230
231 static HARD_REG_SET regs_live;
232
233 /* Each set of hard registers indicates registers live at a particular
234 point in the basic block. For N even, regs_live_at[N] says which
235 hard registers are needed *after* insn N/2 (i.e., they may not
236 conflict with the outputs of insn N/2 or the inputs of insn N/2 + 1.
237
238 If an object is to conflict with the inputs of insn J but not the
239 outputs of insn J + 1, we say it is born at index J*2 - 1. Similarly,
240 if it is to conflict with the outputs of insn J but not the inputs of
241 insn J + 1, it is said to die at index J*2 + 1. */
242
243 static HARD_REG_SET *regs_live_at;
244
245 /* Communicate local vars `insn_number' and `insn'
246 from `block_alloc' to `reg_is_set', `wipe_dead_reg', and `alloc_qty'. */
247 static int this_insn_number;
248 static rtx this_insn;
249
250 struct equivalence
251 {
252 /* Set when an attempt should be made to replace a register
253 with the associated src_p entry. */
254
255 char replace;
256
257 /* Set when a REG_EQUIV note is found or created. Use to
258 keep track of what memory accesses might be created later,
259 e.g. by reload. */
260
261 rtx replacement;
262
263 rtx *src_p;
264
265 /* Loop depth is used to recognize equivalences which appear
266 to be present within the same loop (or in an inner loop). */
267
268 int loop_depth;
269
270 /* The list of each instruction which initializes this register. */
271
272 rtx init_insns;
273
274 /* Nonzero if this had a preexisting REG_EQUIV note. */
275
276 int is_arg_equivalence;
277 };
278
279 /* reg_equiv[N] (where N is a pseudo reg number) is the equivalence
280 structure for that register. */
281
282 static struct equivalence *reg_equiv;
283
284 /* Nonzero if we recorded an equivalence for a LABEL_REF. */
285 static int recorded_label_ref;
286
287 static void alloc_qty (int, enum machine_mode, int, int);
288 static void validate_equiv_mem_from_store (rtx, rtx, void *);
289 static int validate_equiv_mem (rtx, rtx, rtx);
290 static int equiv_init_varies_p (rtx);
291 static int equiv_init_movable_p (rtx, int);
292 static int contains_replace_regs (rtx);
293 static int memref_referenced_p (rtx, rtx);
294 static int memref_used_between_p (rtx, rtx, rtx);
295 static void update_equiv_regs (void);
296 static void no_equiv (rtx, rtx, void *);
297 static void block_alloc (int);
298 static int qty_sugg_compare (int, int);
299 static int qty_sugg_compare_1 (const void *, const void *);
300 static int qty_compare (int, int);
301 static int qty_compare_1 (const void *, const void *);
302 static int combine_regs (rtx, rtx, int, int, rtx, int);
303 static int reg_meets_class_p (int, enum reg_class);
304 static void update_qty_class (int, int);
305 static void reg_is_set (rtx, rtx, void *);
306 static void reg_is_born (rtx, int);
307 static void wipe_dead_reg (rtx, int);
308 static int find_free_reg (enum reg_class, enum machine_mode, int, int, int,
309 int, int);
310 static void mark_life (int, enum machine_mode, int);
311 static void post_mark_life (int, enum machine_mode, int, int, int);
312 static int no_conflict_p (rtx, rtx, rtx);
313 static int requires_inout (const char *);
314
315 /* Allocate a new quantity (new within current basic block)
316 for register number REGNO which is born at index BIRTH
317 within the block. MODE and SIZE are info on reg REGNO. */
318
319 static void
alloc_qty(int regno,enum machine_mode mode,int size,int birth)320 alloc_qty (int regno, enum machine_mode mode, int size, int birth)
321 {
322 int qtyno = next_qty++;
323
324 reg_qty[regno] = qtyno;
325 reg_offset[regno] = 0;
326 reg_next_in_qty[regno] = -1;
327
328 qty[qtyno].first_reg = regno;
329 qty[qtyno].size = size;
330 qty[qtyno].mode = mode;
331 qty[qtyno].birth = birth;
332 qty[qtyno].n_calls_crossed = REG_N_CALLS_CROSSED (regno);
333 qty[qtyno].n_throwing_calls_crossed = REG_N_THROWING_CALLS_CROSSED (regno);
334 qty[qtyno].min_class = reg_preferred_class (regno);
335 qty[qtyno].alternate_class = reg_alternate_class (regno);
336 qty[qtyno].n_refs = REG_N_REFS (regno);
337 qty[qtyno].freq = REG_FREQ (regno);
338 }
339
340 /* Main entry point of this file. */
341
342 static int
local_alloc(void)343 local_alloc (void)
344 {
345 int i;
346 int max_qty;
347 basic_block b;
348
349 /* We need to keep track of whether or not we recorded a LABEL_REF so
350 that we know if the jump optimizer needs to be rerun. */
351 recorded_label_ref = 0;
352
353 /* Leaf functions and non-leaf functions have different needs.
354 If defined, let the machine say what kind of ordering we
355 should use. */
356 #ifdef ORDER_REGS_FOR_LOCAL_ALLOC
357 ORDER_REGS_FOR_LOCAL_ALLOC;
358 #endif
359
360 /* Promote REG_EQUAL notes to REG_EQUIV notes and adjust status of affected
361 registers. */
362 update_equiv_regs ();
363
364 /* This sets the maximum number of quantities we can have. Quantity
365 numbers start at zero and we can have one for each pseudo. */
366 max_qty = (max_regno - FIRST_PSEUDO_REGISTER);
367
368 /* Allocate vectors of temporary data.
369 See the declarations of these variables, above,
370 for what they mean. */
371
372 qty = XNEWVEC (struct qty, max_qty);
373 qty_phys_copy_sugg = XNEWVEC (HARD_REG_SET, max_qty);
374 qty_phys_num_copy_sugg = XNEWVEC (short, max_qty);
375 qty_phys_sugg = XNEWVEC (HARD_REG_SET, max_qty);
376 qty_phys_num_sugg = XNEWVEC (short, max_qty);
377
378 reg_qty = XNEWVEC (int, max_regno);
379 reg_offset = XNEWVEC (char, max_regno);
380 reg_next_in_qty = XNEWVEC (int, max_regno);
381
382 /* Determine which pseudo-registers can be allocated by local-alloc.
383 In general, these are the registers used only in a single block and
384 which only die once.
385
386 We need not be concerned with which block actually uses the register
387 since we will never see it outside that block. */
388
389 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
390 {
391 if (REG_BASIC_BLOCK (i) >= 0 && REG_N_DEATHS (i) == 1)
392 reg_qty[i] = -2;
393 else
394 reg_qty[i] = -1;
395 }
396
397 /* Force loop below to initialize entire quantity array. */
398 next_qty = max_qty;
399
400 /* Allocate each block's local registers, block by block. */
401
402 FOR_EACH_BB (b)
403 {
404 /* NEXT_QTY indicates which elements of the `qty_...'
405 vectors might need to be initialized because they were used
406 for the previous block; it is set to the entire array before
407 block 0. Initialize those, with explicit loop if there are few,
408 else with bzero and bcopy. Do not initialize vectors that are
409 explicit set by `alloc_qty'. */
410
411 if (next_qty < 6)
412 {
413 for (i = 0; i < next_qty; i++)
414 {
415 CLEAR_HARD_REG_SET (qty_phys_copy_sugg[i]);
416 qty_phys_num_copy_sugg[i] = 0;
417 CLEAR_HARD_REG_SET (qty_phys_sugg[i]);
418 qty_phys_num_sugg[i] = 0;
419 }
420 }
421 else
422 {
423 #define CLEAR(vector) \
424 memset ((vector), 0, (sizeof (*(vector))) * next_qty);
425
426 CLEAR (qty_phys_copy_sugg);
427 CLEAR (qty_phys_num_copy_sugg);
428 CLEAR (qty_phys_sugg);
429 CLEAR (qty_phys_num_sugg);
430 }
431
432 next_qty = 0;
433
434 block_alloc (b->index);
435 }
436
437 free (qty);
438 free (qty_phys_copy_sugg);
439 free (qty_phys_num_copy_sugg);
440 free (qty_phys_sugg);
441 free (qty_phys_num_sugg);
442
443 free (reg_qty);
444 free (reg_offset);
445 free (reg_next_in_qty);
446
447 return recorded_label_ref;
448 }
449
450 /* Used for communication between the following two functions: contains
451 a MEM that we wish to ensure remains unchanged. */
452 static rtx equiv_mem;
453
454 /* Set nonzero if EQUIV_MEM is modified. */
455 static int equiv_mem_modified;
456
457 /* If EQUIV_MEM is modified by modifying DEST, indicate that it is modified.
458 Called via note_stores. */
459
460 static void
validate_equiv_mem_from_store(rtx dest,rtx set ATTRIBUTE_UNUSED,void * data ATTRIBUTE_UNUSED)461 validate_equiv_mem_from_store (rtx dest, rtx set ATTRIBUTE_UNUSED,
462 void *data ATTRIBUTE_UNUSED)
463 {
464 if ((REG_P (dest)
465 && reg_overlap_mentioned_p (dest, equiv_mem))
466 || (MEM_P (dest)
467 && true_dependence (dest, VOIDmode, equiv_mem, rtx_varies_p)))
468 equiv_mem_modified = 1;
469 }
470
471 /* Verify that no store between START and the death of REG invalidates
472 MEMREF. MEMREF is invalidated by modifying a register used in MEMREF,
473 by storing into an overlapping memory location, or with a non-const
474 CALL_INSN.
475
476 Return 1 if MEMREF remains valid. */
477
478 static int
validate_equiv_mem(rtx start,rtx reg,rtx memref)479 validate_equiv_mem (rtx start, rtx reg, rtx memref)
480 {
481 rtx insn;
482 rtx note;
483
484 equiv_mem = memref;
485 equiv_mem_modified = 0;
486
487 /* If the memory reference has side effects or is volatile, it isn't a
488 valid equivalence. */
489 if (side_effects_p (memref))
490 return 0;
491
492 for (insn = start; insn && ! equiv_mem_modified; insn = NEXT_INSN (insn))
493 {
494 if (! INSN_P (insn))
495 continue;
496
497 if (find_reg_note (insn, REG_DEAD, reg))
498 return 1;
499
500 if (CALL_P (insn) && ! MEM_READONLY_P (memref)
501 && ! CONST_OR_PURE_CALL_P (insn))
502 return 0;
503
504 note_stores (PATTERN (insn), validate_equiv_mem_from_store, NULL);
505
506 /* If a register mentioned in MEMREF is modified via an
507 auto-increment, we lose the equivalence. Do the same if one
508 dies; although we could extend the life, it doesn't seem worth
509 the trouble. */
510
511 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
512 if ((REG_NOTE_KIND (note) == REG_INC
513 || REG_NOTE_KIND (note) == REG_DEAD)
514 && REG_P (XEXP (note, 0))
515 && reg_overlap_mentioned_p (XEXP (note, 0), memref))
516 return 0;
517 }
518
519 return 0;
520 }
521
522 /* Returns zero if X is known to be invariant. */
523
524 static int
equiv_init_varies_p(rtx x)525 equiv_init_varies_p (rtx x)
526 {
527 RTX_CODE code = GET_CODE (x);
528 int i;
529 const char *fmt;
530
531 switch (code)
532 {
533 case MEM:
534 return !MEM_READONLY_P (x) || equiv_init_varies_p (XEXP (x, 0));
535
536 case CONST:
537 case CONST_INT:
538 case CONST_DOUBLE:
539 case CONST_VECTOR:
540 case SYMBOL_REF:
541 case LABEL_REF:
542 return 0;
543
544 case REG:
545 return reg_equiv[REGNO (x)].replace == 0 && rtx_varies_p (x, 0);
546
547 case ASM_OPERANDS:
548 if (MEM_VOLATILE_P (x))
549 return 1;
550
551 /* Fall through. */
552
553 default:
554 break;
555 }
556
557 fmt = GET_RTX_FORMAT (code);
558 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
559 if (fmt[i] == 'e')
560 {
561 if (equiv_init_varies_p (XEXP (x, i)))
562 return 1;
563 }
564 else if (fmt[i] == 'E')
565 {
566 int j;
567 for (j = 0; j < XVECLEN (x, i); j++)
568 if (equiv_init_varies_p (XVECEXP (x, i, j)))
569 return 1;
570 }
571
572 return 0;
573 }
574
575 /* Returns nonzero if X (used to initialize register REGNO) is movable.
576 X is only movable if the registers it uses have equivalent initializations
577 which appear to be within the same loop (or in an inner loop) and movable
578 or if they are not candidates for local_alloc and don't vary. */
579
580 static int
equiv_init_movable_p(rtx x,int regno)581 equiv_init_movable_p (rtx x, int regno)
582 {
583 int i, j;
584 const char *fmt;
585 enum rtx_code code = GET_CODE (x);
586
587 switch (code)
588 {
589 case SET:
590 return equiv_init_movable_p (SET_SRC (x), regno);
591
592 case CC0:
593 case CLOBBER:
594 return 0;
595
596 case PRE_INC:
597 case PRE_DEC:
598 case POST_INC:
599 case POST_DEC:
600 case PRE_MODIFY:
601 case POST_MODIFY:
602 return 0;
603
604 case REG:
605 return (reg_equiv[REGNO (x)].loop_depth >= reg_equiv[regno].loop_depth
606 && reg_equiv[REGNO (x)].replace)
607 || (REG_BASIC_BLOCK (REGNO (x)) < 0 && ! rtx_varies_p (x, 0));
608
609 case UNSPEC_VOLATILE:
610 return 0;
611
612 case ASM_OPERANDS:
613 if (MEM_VOLATILE_P (x))
614 return 0;
615
616 /* Fall through. */
617
618 default:
619 break;
620 }
621
622 fmt = GET_RTX_FORMAT (code);
623 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
624 switch (fmt[i])
625 {
626 case 'e':
627 if (! equiv_init_movable_p (XEXP (x, i), regno))
628 return 0;
629 break;
630 case 'E':
631 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
632 if (! equiv_init_movable_p (XVECEXP (x, i, j), regno))
633 return 0;
634 break;
635 }
636
637 return 1;
638 }
639
640 /* TRUE if X uses any registers for which reg_equiv[REGNO].replace is true. */
641
642 static int
contains_replace_regs(rtx x)643 contains_replace_regs (rtx x)
644 {
645 int i, j;
646 const char *fmt;
647 enum rtx_code code = GET_CODE (x);
648
649 switch (code)
650 {
651 case CONST_INT:
652 case CONST:
653 case LABEL_REF:
654 case SYMBOL_REF:
655 case CONST_DOUBLE:
656 case CONST_VECTOR:
657 case PC:
658 case CC0:
659 case HIGH:
660 return 0;
661
662 case REG:
663 return reg_equiv[REGNO (x)].replace;
664
665 default:
666 break;
667 }
668
669 fmt = GET_RTX_FORMAT (code);
670 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
671 switch (fmt[i])
672 {
673 case 'e':
674 if (contains_replace_regs (XEXP (x, i)))
675 return 1;
676 break;
677 case 'E':
678 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
679 if (contains_replace_regs (XVECEXP (x, i, j)))
680 return 1;
681 break;
682 }
683
684 return 0;
685 }
686
687 /* TRUE if X references a memory location that would be affected by a store
688 to MEMREF. */
689
690 static int
memref_referenced_p(rtx memref,rtx x)691 memref_referenced_p (rtx memref, rtx x)
692 {
693 int i, j;
694 const char *fmt;
695 enum rtx_code code = GET_CODE (x);
696
697 switch (code)
698 {
699 case CONST_INT:
700 case CONST:
701 case LABEL_REF:
702 case SYMBOL_REF:
703 case CONST_DOUBLE:
704 case CONST_VECTOR:
705 case PC:
706 case CC0:
707 case HIGH:
708 case LO_SUM:
709 return 0;
710
711 case REG:
712 return (reg_equiv[REGNO (x)].replacement
713 && memref_referenced_p (memref,
714 reg_equiv[REGNO (x)].replacement));
715
716 case MEM:
717 if (true_dependence (memref, VOIDmode, x, rtx_varies_p))
718 return 1;
719 break;
720
721 case SET:
722 /* If we are setting a MEM, it doesn't count (its address does), but any
723 other SET_DEST that has a MEM in it is referencing the MEM. */
724 if (MEM_P (SET_DEST (x)))
725 {
726 if (memref_referenced_p (memref, XEXP (SET_DEST (x), 0)))
727 return 1;
728 }
729 else if (memref_referenced_p (memref, SET_DEST (x)))
730 return 1;
731
732 return memref_referenced_p (memref, SET_SRC (x));
733
734 default:
735 break;
736 }
737
738 fmt = GET_RTX_FORMAT (code);
739 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
740 switch (fmt[i])
741 {
742 case 'e':
743 if (memref_referenced_p (memref, XEXP (x, i)))
744 return 1;
745 break;
746 case 'E':
747 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
748 if (memref_referenced_p (memref, XVECEXP (x, i, j)))
749 return 1;
750 break;
751 }
752
753 return 0;
754 }
755
756 /* TRUE if some insn in the range (START, END] references a memory location
757 that would be affected by a store to MEMREF. */
758
759 static int
memref_used_between_p(rtx memref,rtx start,rtx end)760 memref_used_between_p (rtx memref, rtx start, rtx end)
761 {
762 rtx insn;
763
764 for (insn = NEXT_INSN (start); insn != NEXT_INSN (end);
765 insn = NEXT_INSN (insn))
766 {
767 if (!INSN_P (insn))
768 continue;
769
770 if (memref_referenced_p (memref, PATTERN (insn)))
771 return 1;
772
773 /* Nonconst functions may access memory. */
774 if (CALL_P (insn)
775 && (! CONST_OR_PURE_CALL_P (insn)
776 || pure_call_p (insn)))
777 return 1;
778 }
779
780 return 0;
781 }
782
783 /* Find registers that are equivalent to a single value throughout the
784 compilation (either because they can be referenced in memory or are set once
785 from a single constant). Lower their priority for a register.
786
787 If such a register is only referenced once, try substituting its value
788 into the using insn. If it succeeds, we can eliminate the register
789 completely.
790
791 Initialize the REG_EQUIV_INIT array of initializing insns. */
792
793 static void
update_equiv_regs(void)794 update_equiv_regs (void)
795 {
796 rtx insn;
797 basic_block bb;
798 int loop_depth;
799 regset_head cleared_regs;
800 int clear_regnos = 0;
801
802 reg_equiv = XCNEWVEC (struct equivalence, max_regno);
803 INIT_REG_SET (&cleared_regs);
804 reg_equiv_init = ggc_alloc_cleared (max_regno * sizeof (rtx));
805 reg_equiv_init_size = max_regno;
806
807 init_alias_analysis ();
808
809 /* Scan the insns and find which registers have equivalences. Do this
810 in a separate scan of the insns because (due to -fcse-follow-jumps)
811 a register can be set below its use. */
812 FOR_EACH_BB (bb)
813 {
814 loop_depth = bb->loop_depth;
815
816 for (insn = BB_HEAD (bb);
817 insn != NEXT_INSN (BB_END (bb));
818 insn = NEXT_INSN (insn))
819 {
820 rtx note;
821 rtx set;
822 rtx dest, src;
823 int regno;
824
825 if (! INSN_P (insn))
826 continue;
827
828 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
829 if (REG_NOTE_KIND (note) == REG_INC)
830 no_equiv (XEXP (note, 0), note, NULL);
831
832 set = single_set (insn);
833
834 /* If this insn contains more (or less) than a single SET,
835 only mark all destinations as having no known equivalence. */
836 if (set == 0)
837 {
838 note_stores (PATTERN (insn), no_equiv, NULL);
839 continue;
840 }
841 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
842 {
843 int i;
844
845 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
846 {
847 rtx part = XVECEXP (PATTERN (insn), 0, i);
848 if (part != set)
849 note_stores (part, no_equiv, NULL);
850 }
851 }
852
853 dest = SET_DEST (set);
854 src = SET_SRC (set);
855
856 /* See if this is setting up the equivalence between an argument
857 register and its stack slot. */
858 note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
859 if (note)
860 {
861 gcc_assert (REG_P (dest));
862 regno = REGNO (dest);
863
864 /* Note that we don't want to clear reg_equiv_init even if there
865 are multiple sets of this register. */
866 reg_equiv[regno].is_arg_equivalence = 1;
867
868 /* Record for reload that this is an equivalencing insn. */
869 if (rtx_equal_p (src, XEXP (note, 0)))
870 reg_equiv_init[regno]
871 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[regno]);
872
873 /* Continue normally in case this is a candidate for
874 replacements. */
875 }
876
877 if (!optimize)
878 continue;
879
880 /* We only handle the case of a pseudo register being set
881 once, or always to the same value. */
882 /* ??? The mn10200 port breaks if we add equivalences for
883 values that need an ADDRESS_REGS register and set them equivalent
884 to a MEM of a pseudo. The actual problem is in the over-conservative
885 handling of INPADDR_ADDRESS / INPUT_ADDRESS / INPUT triples in
886 calculate_needs, but we traditionally work around this problem
887 here by rejecting equivalences when the destination is in a register
888 that's likely spilled. This is fragile, of course, since the
889 preferred class of a pseudo depends on all instructions that set
890 or use it. */
891
892 if (!REG_P (dest)
893 || (regno = REGNO (dest)) < FIRST_PSEUDO_REGISTER
894 || reg_equiv[regno].init_insns == const0_rtx
895 || (CLASS_LIKELY_SPILLED_P (reg_preferred_class (regno))
896 && MEM_P (src) && ! reg_equiv[regno].is_arg_equivalence))
897 {
898 /* This might be setting a SUBREG of a pseudo, a pseudo that is
899 also set somewhere else to a constant. */
900 note_stores (set, no_equiv, NULL);
901 continue;
902 }
903
904 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
905
906 /* cse sometimes generates function invariants, but doesn't put a
907 REG_EQUAL note on the insn. Since this note would be redundant,
908 there's no point creating it earlier than here. */
909 if (! note && ! rtx_varies_p (src, 0))
910 note = set_unique_reg_note (insn, REG_EQUAL, src);
911
912 /* Don't bother considering a REG_EQUAL note containing an EXPR_LIST
913 since it represents a function call */
914 if (note && GET_CODE (XEXP (note, 0)) == EXPR_LIST)
915 note = NULL_RTX;
916
917 if (REG_N_SETS (regno) != 1
918 && (! note
919 || rtx_varies_p (XEXP (note, 0), 0)
920 || (reg_equiv[regno].replacement
921 && ! rtx_equal_p (XEXP (note, 0),
922 reg_equiv[regno].replacement))))
923 {
924 no_equiv (dest, set, NULL);
925 continue;
926 }
927 /* Record this insn as initializing this register. */
928 reg_equiv[regno].init_insns
929 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv[regno].init_insns);
930
931 /* If this register is known to be equal to a constant, record that
932 it is always equivalent to the constant. */
933 if (note && ! rtx_varies_p (XEXP (note, 0), 0))
934 PUT_MODE (note, (enum machine_mode) REG_EQUIV);
935
936 /* If this insn introduces a "constant" register, decrease the priority
937 of that register. Record this insn if the register is only used once
938 more and the equivalence value is the same as our source.
939
940 The latter condition is checked for two reasons: First, it is an
941 indication that it may be more efficient to actually emit the insn
942 as written (if no registers are available, reload will substitute
943 the equivalence). Secondly, it avoids problems with any registers
944 dying in this insn whose death notes would be missed.
945
946 If we don't have a REG_EQUIV note, see if this insn is loading
947 a register used only in one basic block from a MEM. If so, and the
948 MEM remains unchanged for the life of the register, add a REG_EQUIV
949 note. */
950
951 note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
952
953 if (note == 0 && REG_BASIC_BLOCK (regno) >= 0
954 && MEM_P (SET_SRC (set))
955 && validate_equiv_mem (insn, dest, SET_SRC (set)))
956 REG_NOTES (insn) = note = gen_rtx_EXPR_LIST (REG_EQUIV, SET_SRC (set),
957 REG_NOTES (insn));
958
959 if (note)
960 {
961 int regno = REGNO (dest);
962 rtx x = XEXP (note, 0);
963
964 /* If we haven't done so, record for reload that this is an
965 equivalencing insn. */
966 if (!reg_equiv[regno].is_arg_equivalence)
967 reg_equiv_init[regno]
968 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[regno]);
969
970 /* Record whether or not we created a REG_EQUIV note for a LABEL_REF.
971 We might end up substituting the LABEL_REF for uses of the
972 pseudo here or later. That kind of transformation may turn an
973 indirect jump into a direct jump, in which case we must rerun the
974 jump optimizer to ensure that the JUMP_LABEL fields are valid. */
975 if (GET_CODE (x) == LABEL_REF
976 || (GET_CODE (x) == CONST
977 && GET_CODE (XEXP (x, 0)) == PLUS
978 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)))
979 recorded_label_ref = 1;
980
981 reg_equiv[regno].replacement = x;
982 reg_equiv[regno].src_p = &SET_SRC (set);
983 reg_equiv[regno].loop_depth = loop_depth;
984
985 /* Don't mess with things live during setjmp. */
986 if (REG_LIVE_LENGTH (regno) >= 0 && optimize)
987 {
988 /* Note that the statement below does not affect the priority
989 in local-alloc! */
990 REG_LIVE_LENGTH (regno) *= 2;
991
992 /* If the register is referenced exactly twice, meaning it is
993 set once and used once, indicate that the reference may be
994 replaced by the equivalence we computed above. Do this
995 even if the register is only used in one block so that
996 dependencies can be handled where the last register is
997 used in a different block (i.e. HIGH / LO_SUM sequences)
998 and to reduce the number of registers alive across
999 calls. */
1000
1001 if (REG_N_REFS (regno) == 2
1002 && (rtx_equal_p (x, src)
1003 || ! equiv_init_varies_p (src))
1004 && NONJUMP_INSN_P (insn)
1005 && equiv_init_movable_p (PATTERN (insn), regno))
1006 reg_equiv[regno].replace = 1;
1007 }
1008 }
1009 }
1010 }
1011
1012 if (!optimize)
1013 goto out;
1014
1015 /* A second pass, to gather additional equivalences with memory. This needs
1016 to be done after we know which registers we are going to replace. */
1017
1018 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1019 {
1020 rtx set, src, dest;
1021 unsigned regno;
1022
1023 if (! INSN_P (insn))
1024 continue;
1025
1026 set = single_set (insn);
1027 if (! set)
1028 continue;
1029
1030 dest = SET_DEST (set);
1031 src = SET_SRC (set);
1032
1033 /* If this sets a MEM to the contents of a REG that is only used
1034 in a single basic block, see if the register is always equivalent
1035 to that memory location and if moving the store from INSN to the
1036 insn that set REG is safe. If so, put a REG_EQUIV note on the
1037 initializing insn.
1038
1039 Don't add a REG_EQUIV note if the insn already has one. The existing
1040 REG_EQUIV is likely more useful than the one we are adding.
1041
1042 If one of the regs in the address has reg_equiv[REGNO].replace set,
1043 then we can't add this REG_EQUIV note. The reg_equiv[REGNO].replace
1044 optimization may move the set of this register immediately before
1045 insn, which puts it after reg_equiv[REGNO].init_insns, and hence
1046 the mention in the REG_EQUIV note would be to an uninitialized
1047 pseudo. */
1048
1049 if (MEM_P (dest) && REG_P (src)
1050 && (regno = REGNO (src)) >= FIRST_PSEUDO_REGISTER
1051 && REG_BASIC_BLOCK (regno) >= 0
1052 && REG_N_SETS (regno) == 1
1053 && reg_equiv[regno].init_insns != 0
1054 && reg_equiv[regno].init_insns != const0_rtx
1055 && ! find_reg_note (XEXP (reg_equiv[regno].init_insns, 0),
1056 REG_EQUIV, NULL_RTX)
1057 && ! contains_replace_regs (XEXP (dest, 0)))
1058 {
1059 rtx init_insn = XEXP (reg_equiv[regno].init_insns, 0);
1060 if (validate_equiv_mem (init_insn, src, dest)
1061 && ! memref_used_between_p (dest, init_insn, insn))
1062 {
1063 REG_NOTES (init_insn)
1064 = gen_rtx_EXPR_LIST (REG_EQUIV, dest,
1065 REG_NOTES (init_insn));
1066 /* This insn makes the equivalence, not the one initializing
1067 the register. */
1068 reg_equiv_init[regno]
1069 = gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
1070 }
1071 }
1072 }
1073
1074 /* Now scan all regs killed in an insn to see if any of them are
1075 registers only used that once. If so, see if we can replace the
1076 reference with the equivalent form. If we can, delete the
1077 initializing reference and this register will go away. If we
1078 can't replace the reference, and the initializing reference is
1079 within the same loop (or in an inner loop), then move the register
1080 initialization just before the use, so that they are in the same
1081 basic block. */
1082 FOR_EACH_BB_REVERSE (bb)
1083 {
1084 loop_depth = bb->loop_depth;
1085 for (insn = BB_END (bb);
1086 insn != PREV_INSN (BB_HEAD (bb));
1087 insn = PREV_INSN (insn))
1088 {
1089 rtx link;
1090
1091 if (! INSN_P (insn))
1092 continue;
1093
1094 /* Don't substitute into a non-local goto, this confuses CFG. */
1095 if (JUMP_P (insn)
1096 && find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1097 continue;
1098
1099 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1100 {
1101 if (REG_NOTE_KIND (link) == REG_DEAD
1102 /* Make sure this insn still refers to the register. */
1103 && reg_mentioned_p (XEXP (link, 0), PATTERN (insn)))
1104 {
1105 int regno = REGNO (XEXP (link, 0));
1106 rtx equiv_insn;
1107
1108 if (! reg_equiv[regno].replace
1109 || reg_equiv[regno].loop_depth < loop_depth)
1110 continue;
1111
1112 /* reg_equiv[REGNO].replace gets set only when
1113 REG_N_REFS[REGNO] is 2, i.e. the register is set
1114 once and used once. (If it were only set, but not used,
1115 flow would have deleted the setting insns.) Hence
1116 there can only be one insn in reg_equiv[REGNO].init_insns. */
1117 gcc_assert (reg_equiv[regno].init_insns
1118 && !XEXP (reg_equiv[regno].init_insns, 1));
1119 equiv_insn = XEXP (reg_equiv[regno].init_insns, 0);
1120
1121 /* We may not move instructions that can throw, since
1122 that changes basic block boundaries and we are not
1123 prepared to adjust the CFG to match. */
1124 if (can_throw_internal (equiv_insn))
1125 continue;
1126
1127 if (asm_noperands (PATTERN (equiv_insn)) < 0
1128 && validate_replace_rtx (regno_reg_rtx[regno],
1129 *(reg_equiv[regno].src_p), insn))
1130 {
1131 rtx equiv_link;
1132 rtx last_link;
1133 rtx note;
1134
1135 /* Find the last note. */
1136 for (last_link = link; XEXP (last_link, 1);
1137 last_link = XEXP (last_link, 1))
1138 ;
1139
1140 /* Append the REG_DEAD notes from equiv_insn. */
1141 equiv_link = REG_NOTES (equiv_insn);
1142 while (equiv_link)
1143 {
1144 note = equiv_link;
1145 equiv_link = XEXP (equiv_link, 1);
1146 if (REG_NOTE_KIND (note) == REG_DEAD)
1147 {
1148 remove_note (equiv_insn, note);
1149 XEXP (last_link, 1) = note;
1150 XEXP (note, 1) = NULL_RTX;
1151 last_link = note;
1152 }
1153 }
1154
1155 remove_death (regno, insn);
1156 REG_N_REFS (regno) = 0;
1157 REG_FREQ (regno) = 0;
1158 delete_insn (equiv_insn);
1159
1160 reg_equiv[regno].init_insns
1161 = XEXP (reg_equiv[regno].init_insns, 1);
1162
1163 /* Remember to clear REGNO from all basic block's live
1164 info. */
1165 SET_REGNO_REG_SET (&cleared_regs, regno);
1166 clear_regnos++;
1167 reg_equiv_init[regno] = NULL_RTX;
1168 }
1169 /* Move the initialization of the register to just before
1170 INSN. Update the flow information. */
1171 else if (PREV_INSN (insn) != equiv_insn)
1172 {
1173 rtx new_insn;
1174
1175 new_insn = emit_insn_before (PATTERN (equiv_insn), insn);
1176 REG_NOTES (new_insn) = REG_NOTES (equiv_insn);
1177 REG_NOTES (equiv_insn) = 0;
1178
1179 /* Make sure this insn is recognized before
1180 reload begins, otherwise
1181 eliminate_regs_in_insn will die. */
1182 INSN_CODE (new_insn) = INSN_CODE (equiv_insn);
1183
1184 delete_insn (equiv_insn);
1185
1186 XEXP (reg_equiv[regno].init_insns, 0) = new_insn;
1187
1188 REG_BASIC_BLOCK (regno) = bb->index;
1189 REG_N_CALLS_CROSSED (regno) = 0;
1190 REG_N_THROWING_CALLS_CROSSED (regno) = 0;
1191 REG_LIVE_LENGTH (regno) = 2;
1192
1193 if (insn == BB_HEAD (bb))
1194 BB_HEAD (bb) = PREV_INSN (insn);
1195
1196 /* Remember to clear REGNO from all basic block's live
1197 info. */
1198 SET_REGNO_REG_SET (&cleared_regs, regno);
1199 clear_regnos++;
1200 reg_equiv_init[regno]
1201 = gen_rtx_INSN_LIST (VOIDmode, new_insn, NULL_RTX);
1202 }
1203 }
1204 }
1205 }
1206 }
1207
1208 /* Clear all dead REGNOs from all basic block's live info. */
1209 if (clear_regnos)
1210 {
1211 unsigned j;
1212
1213 if (clear_regnos > 8)
1214 {
1215 FOR_EACH_BB (bb)
1216 {
1217 AND_COMPL_REG_SET (bb->il.rtl->global_live_at_start,
1218 &cleared_regs);
1219 AND_COMPL_REG_SET (bb->il.rtl->global_live_at_end,
1220 &cleared_regs);
1221 }
1222 }
1223 else
1224 {
1225 reg_set_iterator rsi;
1226 EXECUTE_IF_SET_IN_REG_SET (&cleared_regs, 0, j, rsi)
1227 {
1228 FOR_EACH_BB (bb)
1229 {
1230 CLEAR_REGNO_REG_SET (bb->il.rtl->global_live_at_start, j);
1231 CLEAR_REGNO_REG_SET (bb->il.rtl->global_live_at_end, j);
1232 }
1233 }
1234 }
1235 }
1236
1237 out:
1238 /* Clean up. */
1239 end_alias_analysis ();
1240 CLEAR_REG_SET (&cleared_regs);
1241 free (reg_equiv);
1242 }
1243
1244 /* Mark REG as having no known equivalence.
1245 Some instructions might have been processed before and furnished
1246 with REG_EQUIV notes for this register; these notes will have to be
1247 removed.
1248 STORE is the piece of RTL that does the non-constant / conflicting
1249 assignment - a SET, CLOBBER or REG_INC note. It is currently not used,
1250 but needs to be there because this function is called from note_stores. */
1251 static void
no_equiv(rtx reg,rtx store ATTRIBUTE_UNUSED,void * data ATTRIBUTE_UNUSED)1252 no_equiv (rtx reg, rtx store ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
1253 {
1254 int regno;
1255 rtx list;
1256
1257 if (!REG_P (reg))
1258 return;
1259 regno = REGNO (reg);
1260 list = reg_equiv[regno].init_insns;
1261 if (list == const0_rtx)
1262 return;
1263 reg_equiv[regno].init_insns = const0_rtx;
1264 reg_equiv[regno].replacement = NULL_RTX;
1265 /* This doesn't matter for equivalences made for argument registers, we
1266 should keep their initialization insns. */
1267 if (reg_equiv[regno].is_arg_equivalence)
1268 return;
1269 reg_equiv_init[regno] = NULL_RTX;
1270 for (; list; list = XEXP (list, 1))
1271 {
1272 rtx insn = XEXP (list, 0);
1273 remove_note (insn, find_reg_note (insn, REG_EQUIV, NULL_RTX));
1274 }
1275 }
1276
1277 /* Allocate hard regs to the pseudo regs used only within block number B.
1278 Only the pseudos that die but once can be handled. */
1279
1280 static void
block_alloc(int b)1281 block_alloc (int b)
1282 {
1283 int i, q;
1284 rtx insn;
1285 rtx note, hard_reg;
1286 int insn_number = 0;
1287 int insn_count = 0;
1288 int max_uid = get_max_uid ();
1289 int *qty_order;
1290 int no_conflict_combined_regno = -1;
1291
1292 /* Count the instructions in the basic block. */
1293
1294 insn = BB_END (BASIC_BLOCK (b));
1295 while (1)
1296 {
1297 if (!NOTE_P (insn))
1298 {
1299 ++insn_count;
1300 gcc_assert (insn_count <= max_uid);
1301 }
1302 if (insn == BB_HEAD (BASIC_BLOCK (b)))
1303 break;
1304 insn = PREV_INSN (insn);
1305 }
1306
1307 /* +2 to leave room for a post_mark_life at the last insn and for
1308 the birth of a CLOBBER in the first insn. */
1309 regs_live_at = XCNEWVEC (HARD_REG_SET, 2 * insn_count + 2);
1310
1311 /* Initialize table of hardware registers currently live. */
1312
1313 REG_SET_TO_HARD_REG_SET (regs_live,
1314 BASIC_BLOCK (b)->il.rtl->global_live_at_start);
1315
1316 /* This loop scans the instructions of the basic block
1317 and assigns quantities to registers.
1318 It computes which registers to tie. */
1319
1320 insn = BB_HEAD (BASIC_BLOCK (b));
1321 while (1)
1322 {
1323 if (!NOTE_P (insn))
1324 insn_number++;
1325
1326 if (INSN_P (insn))
1327 {
1328 rtx link, set;
1329 int win = 0;
1330 rtx r0, r1 = NULL_RTX;
1331 int combined_regno = -1;
1332 int i;
1333
1334 this_insn_number = insn_number;
1335 this_insn = insn;
1336
1337 extract_insn (insn);
1338 which_alternative = -1;
1339
1340 /* Is this insn suitable for tying two registers?
1341 If so, try doing that.
1342 Suitable insns are those with at least two operands and where
1343 operand 0 is an output that is a register that is not
1344 earlyclobber.
1345
1346 We can tie operand 0 with some operand that dies in this insn.
1347 First look for operands that are required to be in the same
1348 register as operand 0. If we find such, only try tying that
1349 operand or one that can be put into that operand if the
1350 operation is commutative. If we don't find an operand
1351 that is required to be in the same register as operand 0,
1352 we can tie with any operand.
1353
1354 Subregs in place of regs are also ok.
1355
1356 If tying is done, WIN is set nonzero. */
1357
1358 if (optimize
1359 && recog_data.n_operands > 1
1360 && recog_data.constraints[0][0] == '='
1361 && recog_data.constraints[0][1] != '&')
1362 {
1363 /* If non-negative, is an operand that must match operand 0. */
1364 int must_match_0 = -1;
1365 /* Counts number of alternatives that require a match with
1366 operand 0. */
1367 int n_matching_alts = 0;
1368
1369 for (i = 1; i < recog_data.n_operands; i++)
1370 {
1371 const char *p = recog_data.constraints[i];
1372 int this_match = requires_inout (p);
1373
1374 n_matching_alts += this_match;
1375 if (this_match == recog_data.n_alternatives)
1376 must_match_0 = i;
1377 }
1378
1379 r0 = recog_data.operand[0];
1380 for (i = 1; i < recog_data.n_operands; i++)
1381 {
1382 /* Skip this operand if we found an operand that
1383 must match operand 0 and this operand isn't it
1384 and can't be made to be it by commutativity. */
1385
1386 if (must_match_0 >= 0 && i != must_match_0
1387 && ! (i == must_match_0 + 1
1388 && recog_data.constraints[i-1][0] == '%')
1389 && ! (i == must_match_0 - 1
1390 && recog_data.constraints[i][0] == '%'))
1391 continue;
1392
1393 /* Likewise if each alternative has some operand that
1394 must match operand zero. In that case, skip any
1395 operand that doesn't list operand 0 since we know that
1396 the operand always conflicts with operand 0. We
1397 ignore commutativity in this case to keep things simple. */
1398 if (n_matching_alts == recog_data.n_alternatives
1399 && 0 == requires_inout (recog_data.constraints[i]))
1400 continue;
1401
1402 r1 = recog_data.operand[i];
1403
1404 /* If the operand is an address, find a register in it.
1405 There may be more than one register, but we only try one
1406 of them. */
1407 if (recog_data.constraints[i][0] == 'p'
1408 || EXTRA_ADDRESS_CONSTRAINT (recog_data.constraints[i][0],
1409 recog_data.constraints[i]))
1410 while (GET_CODE (r1) == PLUS || GET_CODE (r1) == MULT)
1411 r1 = XEXP (r1, 0);
1412
1413 /* Avoid making a call-saved register unnecessarily
1414 clobbered. */
1415 hard_reg = get_hard_reg_initial_reg (cfun, r1);
1416 if (hard_reg != NULL_RTX)
1417 {
1418 if (REG_P (hard_reg)
1419 && REGNO (hard_reg) < FIRST_PSEUDO_REGISTER
1420 && !call_used_regs[REGNO (hard_reg)])
1421 continue;
1422 }
1423
1424 if (REG_P (r0) || GET_CODE (r0) == SUBREG)
1425 {
1426 /* We have two priorities for hard register preferences.
1427 If we have a move insn or an insn whose first input
1428 can only be in the same register as the output, give
1429 priority to an equivalence found from that insn. */
1430 int may_save_copy
1431 = (r1 == recog_data.operand[i] && must_match_0 >= 0);
1432
1433 if (REG_P (r1) || GET_CODE (r1) == SUBREG)
1434 win = combine_regs (r1, r0, may_save_copy,
1435 insn_number, insn, 0);
1436 }
1437 if (win)
1438 break;
1439 }
1440 }
1441
1442 /* Recognize an insn sequence with an ultimate result
1443 which can safely overlap one of the inputs.
1444 The sequence begins with a CLOBBER of its result,
1445 and ends with an insn that copies the result to itself
1446 and has a REG_EQUAL note for an equivalent formula.
1447 That note indicates what the inputs are.
1448 The result and the input can overlap if each insn in
1449 the sequence either doesn't mention the input
1450 or has a REG_NO_CONFLICT note to inhibit the conflict.
1451
1452 We do the combining test at the CLOBBER so that the
1453 destination register won't have had a quantity number
1454 assigned, since that would prevent combining. */
1455
1456 if (optimize
1457 && GET_CODE (PATTERN (insn)) == CLOBBER
1458 && (r0 = XEXP (PATTERN (insn), 0),
1459 REG_P (r0))
1460 && (link = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0
1461 && XEXP (link, 0) != 0
1462 && NONJUMP_INSN_P (XEXP (link, 0))
1463 && (set = single_set (XEXP (link, 0))) != 0
1464 && SET_DEST (set) == r0 && SET_SRC (set) == r0
1465 && (note = find_reg_note (XEXP (link, 0), REG_EQUAL,
1466 NULL_RTX)) != 0)
1467 {
1468 if (r1 = XEXP (note, 0), REG_P (r1)
1469 /* Check that we have such a sequence. */
1470 && no_conflict_p (insn, r0, r1))
1471 win = combine_regs (r1, r0, 1, insn_number, insn, 1);
1472 else if (GET_RTX_FORMAT (GET_CODE (XEXP (note, 0)))[0] == 'e'
1473 && (r1 = XEXP (XEXP (note, 0), 0),
1474 REG_P (r1) || GET_CODE (r1) == SUBREG)
1475 && no_conflict_p (insn, r0, r1))
1476 win = combine_regs (r1, r0, 0, insn_number, insn, 1);
1477
1478 /* Here we care if the operation to be computed is
1479 commutative. */
1480 else if (COMMUTATIVE_P (XEXP (note, 0))
1481 && (r1 = XEXP (XEXP (note, 0), 1),
1482 (REG_P (r1) || GET_CODE (r1) == SUBREG))
1483 && no_conflict_p (insn, r0, r1))
1484 win = combine_regs (r1, r0, 0, insn_number, insn, 1);
1485
1486 /* If we did combine something, show the register number
1487 in question so that we know to ignore its death. */
1488 if (win)
1489 no_conflict_combined_regno = REGNO (r1);
1490 }
1491
1492 /* If registers were just tied, set COMBINED_REGNO
1493 to the number of the register used in this insn
1494 that was tied to the register set in this insn.
1495 This register's qty should not be "killed". */
1496
1497 if (win)
1498 {
1499 while (GET_CODE (r1) == SUBREG)
1500 r1 = SUBREG_REG (r1);
1501 combined_regno = REGNO (r1);
1502 }
1503
1504 /* Mark the death of everything that dies in this instruction,
1505 except for anything that was just combined. */
1506
1507 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1508 if (REG_NOTE_KIND (link) == REG_DEAD
1509 && REG_P (XEXP (link, 0))
1510 && combined_regno != (int) REGNO (XEXP (link, 0))
1511 && (no_conflict_combined_regno != (int) REGNO (XEXP (link, 0))
1512 || ! find_reg_note (insn, REG_NO_CONFLICT,
1513 XEXP (link, 0))))
1514 wipe_dead_reg (XEXP (link, 0), 0);
1515
1516 /* Allocate qty numbers for all registers local to this block
1517 that are born (set) in this instruction.
1518 A pseudo that already has a qty is not changed. */
1519
1520 note_stores (PATTERN (insn), reg_is_set, NULL);
1521
1522 /* If anything is set in this insn and then unused, mark it as dying
1523 after this insn, so it will conflict with our outputs. This
1524 can't match with something that combined, and it doesn't matter
1525 if it did. Do this after the calls to reg_is_set since these
1526 die after, not during, the current insn. */
1527
1528 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1529 if (REG_NOTE_KIND (link) == REG_UNUSED
1530 && REG_P (XEXP (link, 0)))
1531 wipe_dead_reg (XEXP (link, 0), 1);
1532
1533 /* If this is an insn that has a REG_RETVAL note pointing at a
1534 CLOBBER insn, we have reached the end of a REG_NO_CONFLICT
1535 block, so clear any register number that combined within it. */
1536 if ((note = find_reg_note (insn, REG_RETVAL, NULL_RTX)) != 0
1537 && NONJUMP_INSN_P (XEXP (note, 0))
1538 && GET_CODE (PATTERN (XEXP (note, 0))) == CLOBBER)
1539 no_conflict_combined_regno = -1;
1540 }
1541
1542 /* Set the registers live after INSN_NUMBER. Note that we never
1543 record the registers live before the block's first insn, since no
1544 pseudos we care about are live before that insn. */
1545
1546 IOR_HARD_REG_SET (regs_live_at[2 * insn_number], regs_live);
1547 IOR_HARD_REG_SET (regs_live_at[2 * insn_number + 1], regs_live);
1548
1549 if (insn == BB_END (BASIC_BLOCK (b)))
1550 break;
1551
1552 insn = NEXT_INSN (insn);
1553 }
1554
1555 /* Now every register that is local to this basic block
1556 should have been given a quantity, or else -1 meaning ignore it.
1557 Every quantity should have a known birth and death.
1558
1559 Order the qtys so we assign them registers in order of the
1560 number of suggested registers they need so we allocate those with
1561 the most restrictive needs first. */
1562
1563 qty_order = XNEWVEC (int, next_qty);
1564 for (i = 0; i < next_qty; i++)
1565 qty_order[i] = i;
1566
1567 #define EXCHANGE(I1, I2) \
1568 { i = qty_order[I1]; qty_order[I1] = qty_order[I2]; qty_order[I2] = i; }
1569
1570 switch (next_qty)
1571 {
1572 case 3:
1573 /* Make qty_order[2] be the one to allocate last. */
1574 if (qty_sugg_compare (0, 1) > 0)
1575 EXCHANGE (0, 1);
1576 if (qty_sugg_compare (1, 2) > 0)
1577 EXCHANGE (2, 1);
1578
1579 /* ... Fall through ... */
1580 case 2:
1581 /* Put the best one to allocate in qty_order[0]. */
1582 if (qty_sugg_compare (0, 1) > 0)
1583 EXCHANGE (0, 1);
1584
1585 /* ... Fall through ... */
1586
1587 case 1:
1588 case 0:
1589 /* Nothing to do here. */
1590 break;
1591
1592 default:
1593 qsort (qty_order, next_qty, sizeof (int), qty_sugg_compare_1);
1594 }
1595
1596 /* Try to put each quantity in a suggested physical register, if it has one.
1597 This may cause registers to be allocated that otherwise wouldn't be, but
1598 this seems acceptable in local allocation (unlike global allocation). */
1599 for (i = 0; i < next_qty; i++)
1600 {
1601 q = qty_order[i];
1602 if (qty_phys_num_sugg[q] != 0 || qty_phys_num_copy_sugg[q] != 0)
1603 qty[q].phys_reg = find_free_reg (qty[q].min_class, qty[q].mode, q,
1604 0, 1, qty[q].birth, qty[q].death);
1605 else
1606 qty[q].phys_reg = -1;
1607 }
1608
1609 /* Order the qtys so we assign them registers in order of
1610 decreasing length of life. Normally call qsort, but if we
1611 have only a very small number of quantities, sort them ourselves. */
1612
1613 for (i = 0; i < next_qty; i++)
1614 qty_order[i] = i;
1615
1616 #define EXCHANGE(I1, I2) \
1617 { i = qty_order[I1]; qty_order[I1] = qty_order[I2]; qty_order[I2] = i; }
1618
1619 switch (next_qty)
1620 {
1621 case 3:
1622 /* Make qty_order[2] be the one to allocate last. */
1623 if (qty_compare (0, 1) > 0)
1624 EXCHANGE (0, 1);
1625 if (qty_compare (1, 2) > 0)
1626 EXCHANGE (2, 1);
1627
1628 /* ... Fall through ... */
1629 case 2:
1630 /* Put the best one to allocate in qty_order[0]. */
1631 if (qty_compare (0, 1) > 0)
1632 EXCHANGE (0, 1);
1633
1634 /* ... Fall through ... */
1635
1636 case 1:
1637 case 0:
1638 /* Nothing to do here. */
1639 break;
1640
1641 default:
1642 qsort (qty_order, next_qty, sizeof (int), qty_compare_1);
1643 }
1644
1645 /* Now for each qty that is not a hardware register,
1646 look for a hardware register to put it in.
1647 First try the register class that is cheapest for this qty,
1648 if there is more than one class. */
1649
1650 for (i = 0; i < next_qty; i++)
1651 {
1652 q = qty_order[i];
1653 if (qty[q].phys_reg < 0)
1654 {
1655 #ifdef INSN_SCHEDULING
1656 /* These values represent the adjusted lifetime of a qty so
1657 that it conflicts with qtys which appear near the start/end
1658 of this qty's lifetime.
1659
1660 The purpose behind extending the lifetime of this qty is to
1661 discourage the register allocator from creating false
1662 dependencies.
1663
1664 The adjustment value is chosen to indicate that this qty
1665 conflicts with all the qtys in the instructions immediately
1666 before and after the lifetime of this qty.
1667
1668 Experiments have shown that higher values tend to hurt
1669 overall code performance.
1670
1671 If allocation using the extended lifetime fails we will try
1672 again with the qty's unadjusted lifetime. */
1673 int fake_birth = MAX (0, qty[q].birth - 2 + qty[q].birth % 2);
1674 int fake_death = MIN (insn_number * 2 + 1,
1675 qty[q].death + 2 - qty[q].death % 2);
1676 #endif
1677
1678 if (N_REG_CLASSES > 1)
1679 {
1680 #ifdef INSN_SCHEDULING
1681 /* We try to avoid using hard registers allocated to qtys which
1682 are born immediately after this qty or die immediately before
1683 this qty.
1684
1685 This optimization is only appropriate when we will run
1686 a scheduling pass after reload and we are not optimizing
1687 for code size. */
1688 if (flag_schedule_insns_after_reload
1689 && !optimize_size
1690 && !SMALL_REGISTER_CLASSES)
1691 {
1692 qty[q].phys_reg = find_free_reg (qty[q].min_class,
1693 qty[q].mode, q, 0, 0,
1694 fake_birth, fake_death);
1695 if (qty[q].phys_reg >= 0)
1696 continue;
1697 }
1698 #endif
1699 qty[q].phys_reg = find_free_reg (qty[q].min_class,
1700 qty[q].mode, q, 0, 0,
1701 qty[q].birth, qty[q].death);
1702 if (qty[q].phys_reg >= 0)
1703 continue;
1704 }
1705
1706 #ifdef INSN_SCHEDULING
1707 /* Similarly, avoid false dependencies. */
1708 if (flag_schedule_insns_after_reload
1709 && !optimize_size
1710 && !SMALL_REGISTER_CLASSES
1711 && qty[q].alternate_class != NO_REGS)
1712 qty[q].phys_reg = find_free_reg (qty[q].alternate_class,
1713 qty[q].mode, q, 0, 0,
1714 fake_birth, fake_death);
1715 #endif
1716 if (qty[q].alternate_class != NO_REGS)
1717 qty[q].phys_reg = find_free_reg (qty[q].alternate_class,
1718 qty[q].mode, q, 0, 0,
1719 qty[q].birth, qty[q].death);
1720 }
1721 }
1722
1723 /* Now propagate the register assignments
1724 to the pseudo regs belonging to the qtys. */
1725
1726 for (q = 0; q < next_qty; q++)
1727 if (qty[q].phys_reg >= 0)
1728 {
1729 for (i = qty[q].first_reg; i >= 0; i = reg_next_in_qty[i])
1730 reg_renumber[i] = qty[q].phys_reg + reg_offset[i];
1731 }
1732
1733 /* Clean up. */
1734 free (regs_live_at);
1735 free (qty_order);
1736 }
1737
1738 /* Compare two quantities' priority for getting real registers.
1739 We give shorter-lived quantities higher priority.
1740 Quantities with more references are also preferred, as are quantities that
1741 require multiple registers. This is the identical prioritization as
1742 done by global-alloc.
1743
1744 We used to give preference to registers with *longer* lives, but using
1745 the same algorithm in both local- and global-alloc can speed up execution
1746 of some programs by as much as a factor of three! */
1747
1748 /* Note that the quotient will never be bigger than
1749 the value of floor_log2 times the maximum number of
1750 times a register can occur in one insn (surely less than 100)
1751 weighted by frequency (max REG_FREQ_MAX).
1752 Multiplying this by 10000/REG_FREQ_MAX can't overflow.
1753 QTY_CMP_PRI is also used by qty_sugg_compare. */
1754
1755 #define QTY_CMP_PRI(q) \
1756 ((int) (((double) (floor_log2 (qty[q].n_refs) * qty[q].freq * qty[q].size) \
1757 / (qty[q].death - qty[q].birth)) * (10000 / REG_FREQ_MAX)))
1758
1759 static int
qty_compare(int q1,int q2)1760 qty_compare (int q1, int q2)
1761 {
1762 return QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1763 }
1764
1765 static int
qty_compare_1(const void * q1p,const void * q2p)1766 qty_compare_1 (const void *q1p, const void *q2p)
1767 {
1768 int q1 = *(const int *) q1p, q2 = *(const int *) q2p;
1769 int tem = QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1770
1771 if (tem != 0)
1772 return tem;
1773
1774 /* If qtys are equally good, sort by qty number,
1775 so that the results of qsort leave nothing to chance. */
1776 return q1 - q2;
1777 }
1778
1779 /* Compare two quantities' priority for getting real registers. This version
1780 is called for quantities that have suggested hard registers. First priority
1781 goes to quantities that have copy preferences, then to those that have
1782 normal preferences. Within those groups, quantities with the lower
1783 number of preferences have the highest priority. Of those, we use the same
1784 algorithm as above. */
1785
1786 #define QTY_CMP_SUGG(q) \
1787 (qty_phys_num_copy_sugg[q] \
1788 ? qty_phys_num_copy_sugg[q] \
1789 : qty_phys_num_sugg[q] * FIRST_PSEUDO_REGISTER)
1790
1791 static int
qty_sugg_compare(int q1,int q2)1792 qty_sugg_compare (int q1, int q2)
1793 {
1794 int tem = QTY_CMP_SUGG (q1) - QTY_CMP_SUGG (q2);
1795
1796 if (tem != 0)
1797 return tem;
1798
1799 return QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1800 }
1801
1802 static int
qty_sugg_compare_1(const void * q1p,const void * q2p)1803 qty_sugg_compare_1 (const void *q1p, const void *q2p)
1804 {
1805 int q1 = *(const int *) q1p, q2 = *(const int *) q2p;
1806 int tem = QTY_CMP_SUGG (q1) - QTY_CMP_SUGG (q2);
1807
1808 if (tem != 0)
1809 return tem;
1810
1811 tem = QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1812 if (tem != 0)
1813 return tem;
1814
1815 /* If qtys are equally good, sort by qty number,
1816 so that the results of qsort leave nothing to chance. */
1817 return q1 - q2;
1818 }
1819
1820 #undef QTY_CMP_SUGG
1821 #undef QTY_CMP_PRI
1822
1823 /* Attempt to combine the two registers (rtx's) USEDREG and SETREG.
1824 Returns 1 if have done so, or 0 if cannot.
1825
1826 Combining registers means marking them as having the same quantity
1827 and adjusting the offsets within the quantity if either of
1828 them is a SUBREG.
1829
1830 We don't actually combine a hard reg with a pseudo; instead
1831 we just record the hard reg as the suggestion for the pseudo's quantity.
1832 If we really combined them, we could lose if the pseudo lives
1833 across an insn that clobbers the hard reg (eg, movmem).
1834
1835 ALREADY_DEAD is nonzero if USEDREG is known to be dead even though
1836 there is no REG_DEAD note on INSN. This occurs during the processing
1837 of REG_NO_CONFLICT blocks.
1838
1839 MAY_SAVE_COPY is nonzero if this insn is simply copying USEDREG to
1840 SETREG or if the input and output must share a register.
1841 In that case, we record a hard reg suggestion in QTY_PHYS_COPY_SUGG.
1842
1843 There are elaborate checks for the validity of combining. */
1844
1845 static int
combine_regs(rtx usedreg,rtx setreg,int may_save_copy,int insn_number,rtx insn,int already_dead)1846 combine_regs (rtx usedreg, rtx setreg, int may_save_copy, int insn_number,
1847 rtx insn, int already_dead)
1848 {
1849 int ureg, sreg;
1850 int offset = 0;
1851 int usize, ssize;
1852 int sqty;
1853
1854 /* Determine the numbers and sizes of registers being used. If a subreg
1855 is present that does not change the entire register, don't consider
1856 this a copy insn. */
1857
1858 while (GET_CODE (usedreg) == SUBREG)
1859 {
1860 rtx subreg = SUBREG_REG (usedreg);
1861
1862 if (REG_P (subreg))
1863 {
1864 if (GET_MODE_SIZE (GET_MODE (subreg)) > UNITS_PER_WORD)
1865 may_save_copy = 0;
1866
1867 if (REGNO (subreg) < FIRST_PSEUDO_REGISTER)
1868 offset += subreg_regno_offset (REGNO (subreg),
1869 GET_MODE (subreg),
1870 SUBREG_BYTE (usedreg),
1871 GET_MODE (usedreg));
1872 else
1873 offset += (SUBREG_BYTE (usedreg)
1874 / REGMODE_NATURAL_SIZE (GET_MODE (usedreg)));
1875 }
1876
1877 usedreg = subreg;
1878 }
1879
1880 if (!REG_P (usedreg))
1881 return 0;
1882
1883 ureg = REGNO (usedreg);
1884 if (ureg < FIRST_PSEUDO_REGISTER)
1885 usize = hard_regno_nregs[ureg][GET_MODE (usedreg)];
1886 else
1887 usize = ((GET_MODE_SIZE (GET_MODE (usedreg))
1888 + (REGMODE_NATURAL_SIZE (GET_MODE (usedreg)) - 1))
1889 / REGMODE_NATURAL_SIZE (GET_MODE (usedreg)));
1890
1891 while (GET_CODE (setreg) == SUBREG)
1892 {
1893 rtx subreg = SUBREG_REG (setreg);
1894
1895 if (REG_P (subreg))
1896 {
1897 if (GET_MODE_SIZE (GET_MODE (subreg)) > UNITS_PER_WORD)
1898 may_save_copy = 0;
1899
1900 if (REGNO (subreg) < FIRST_PSEUDO_REGISTER)
1901 offset -= subreg_regno_offset (REGNO (subreg),
1902 GET_MODE (subreg),
1903 SUBREG_BYTE (setreg),
1904 GET_MODE (setreg));
1905 else
1906 offset -= (SUBREG_BYTE (setreg)
1907 / REGMODE_NATURAL_SIZE (GET_MODE (setreg)));
1908 }
1909
1910 setreg = subreg;
1911 }
1912
1913 if (!REG_P (setreg))
1914 return 0;
1915
1916 sreg = REGNO (setreg);
1917 if (sreg < FIRST_PSEUDO_REGISTER)
1918 ssize = hard_regno_nregs[sreg][GET_MODE (setreg)];
1919 else
1920 ssize = ((GET_MODE_SIZE (GET_MODE (setreg))
1921 + (REGMODE_NATURAL_SIZE (GET_MODE (setreg)) - 1))
1922 / REGMODE_NATURAL_SIZE (GET_MODE (setreg)));
1923
1924 /* If UREG is a pseudo-register that hasn't already been assigned a
1925 quantity number, it means that it is not local to this block or dies
1926 more than once. In either event, we can't do anything with it. */
1927 if ((ureg >= FIRST_PSEUDO_REGISTER && reg_qty[ureg] < 0)
1928 /* Do not combine registers unless one fits within the other. */
1929 || (offset > 0 && usize + offset > ssize)
1930 || (offset < 0 && usize + offset < ssize)
1931 /* Do not combine with a smaller already-assigned object
1932 if that smaller object is already combined with something bigger. */
1933 || (ssize > usize && ureg >= FIRST_PSEUDO_REGISTER
1934 && usize < qty[reg_qty[ureg]].size)
1935 /* Can't combine if SREG is not a register we can allocate. */
1936 || (sreg >= FIRST_PSEUDO_REGISTER && reg_qty[sreg] == -1)
1937 /* Don't combine with a pseudo mentioned in a REG_NO_CONFLICT note.
1938 These have already been taken care of. This probably wouldn't
1939 combine anyway, but don't take any chances. */
1940 || (ureg >= FIRST_PSEUDO_REGISTER
1941 && find_reg_note (insn, REG_NO_CONFLICT, usedreg))
1942 /* Don't tie something to itself. In most cases it would make no
1943 difference, but it would screw up if the reg being tied to itself
1944 also dies in this insn. */
1945 || ureg == sreg
1946 /* Don't try to connect two different hardware registers. */
1947 || (ureg < FIRST_PSEUDO_REGISTER && sreg < FIRST_PSEUDO_REGISTER)
1948 /* Don't connect two different machine modes if they have different
1949 implications as to which registers may be used. */
1950 || !MODES_TIEABLE_P (GET_MODE (usedreg), GET_MODE (setreg)))
1951 return 0;
1952
1953 /* Now, if UREG is a hard reg and SREG is a pseudo, record the hard reg in
1954 qty_phys_sugg for the pseudo instead of tying them.
1955
1956 Return "failure" so that the lifespan of UREG is terminated here;
1957 that way the two lifespans will be disjoint and nothing will prevent
1958 the pseudo reg from being given this hard reg. */
1959
1960 if (ureg < FIRST_PSEUDO_REGISTER)
1961 {
1962 /* Allocate a quantity number so we have a place to put our
1963 suggestions. */
1964 if (reg_qty[sreg] == -2)
1965 reg_is_born (setreg, 2 * insn_number);
1966
1967 if (reg_qty[sreg] >= 0)
1968 {
1969 if (may_save_copy
1970 && ! TEST_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[sreg]], ureg))
1971 {
1972 SET_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[sreg]], ureg);
1973 qty_phys_num_copy_sugg[reg_qty[sreg]]++;
1974 }
1975 else if (! TEST_HARD_REG_BIT (qty_phys_sugg[reg_qty[sreg]], ureg))
1976 {
1977 SET_HARD_REG_BIT (qty_phys_sugg[reg_qty[sreg]], ureg);
1978 qty_phys_num_sugg[reg_qty[sreg]]++;
1979 }
1980 }
1981 return 0;
1982 }
1983
1984 /* Similarly for SREG a hard register and UREG a pseudo register. */
1985
1986 if (sreg < FIRST_PSEUDO_REGISTER)
1987 {
1988 if (may_save_copy
1989 && ! TEST_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[ureg]], sreg))
1990 {
1991 SET_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[ureg]], sreg);
1992 qty_phys_num_copy_sugg[reg_qty[ureg]]++;
1993 }
1994 else if (! TEST_HARD_REG_BIT (qty_phys_sugg[reg_qty[ureg]], sreg))
1995 {
1996 SET_HARD_REG_BIT (qty_phys_sugg[reg_qty[ureg]], sreg);
1997 qty_phys_num_sugg[reg_qty[ureg]]++;
1998 }
1999 return 0;
2000 }
2001
2002 /* At this point we know that SREG and UREG are both pseudos.
2003 Do nothing if SREG already has a quantity or is a register that we
2004 don't allocate. */
2005 if (reg_qty[sreg] >= -1
2006 /* If we are not going to let any regs live across calls,
2007 don't tie a call-crossing reg to a non-call-crossing reg. */
2008 || (current_function_has_nonlocal_label
2009 && ((REG_N_CALLS_CROSSED (ureg) > 0)
2010 != (REG_N_CALLS_CROSSED (sreg) > 0))))
2011 return 0;
2012
2013 /* We don't already know about SREG, so tie it to UREG
2014 if this is the last use of UREG, provided the classes they want
2015 are compatible. */
2016
2017 if ((already_dead || find_regno_note (insn, REG_DEAD, ureg))
2018 && reg_meets_class_p (sreg, qty[reg_qty[ureg]].min_class))
2019 {
2020 /* Add SREG to UREG's quantity. */
2021 sqty = reg_qty[ureg];
2022 reg_qty[sreg] = sqty;
2023 reg_offset[sreg] = reg_offset[ureg] + offset;
2024 reg_next_in_qty[sreg] = qty[sqty].first_reg;
2025 qty[sqty].first_reg = sreg;
2026
2027 /* If SREG's reg class is smaller, set qty[SQTY].min_class. */
2028 update_qty_class (sqty, sreg);
2029
2030 /* Update info about quantity SQTY. */
2031 qty[sqty].n_calls_crossed += REG_N_CALLS_CROSSED (sreg);
2032 qty[sqty].n_throwing_calls_crossed
2033 += REG_N_THROWING_CALLS_CROSSED (sreg);
2034 qty[sqty].n_refs += REG_N_REFS (sreg);
2035 qty[sqty].freq += REG_FREQ (sreg);
2036 if (usize < ssize)
2037 {
2038 int i;
2039
2040 for (i = qty[sqty].first_reg; i >= 0; i = reg_next_in_qty[i])
2041 reg_offset[i] -= offset;
2042
2043 qty[sqty].size = ssize;
2044 qty[sqty].mode = GET_MODE (setreg);
2045 }
2046 }
2047 else
2048 return 0;
2049
2050 return 1;
2051 }
2052
2053 /* Return 1 if the preferred class of REG allows it to be tied
2054 to a quantity or register whose class is CLASS.
2055 True if REG's reg class either contains or is contained in CLASS. */
2056
2057 static int
reg_meets_class_p(int reg,enum reg_class class)2058 reg_meets_class_p (int reg, enum reg_class class)
2059 {
2060 enum reg_class rclass = reg_preferred_class (reg);
2061 return (reg_class_subset_p (rclass, class)
2062 || reg_class_subset_p (class, rclass));
2063 }
2064
2065 /* Update the class of QTYNO assuming that REG is being tied to it. */
2066
2067 static void
update_qty_class(int qtyno,int reg)2068 update_qty_class (int qtyno, int reg)
2069 {
2070 enum reg_class rclass = reg_preferred_class (reg);
2071 if (reg_class_subset_p (rclass, qty[qtyno].min_class))
2072 qty[qtyno].min_class = rclass;
2073
2074 rclass = reg_alternate_class (reg);
2075 if (reg_class_subset_p (rclass, qty[qtyno].alternate_class))
2076 qty[qtyno].alternate_class = rclass;
2077 }
2078
2079 /* Handle something which alters the value of an rtx REG.
2080
2081 REG is whatever is set or clobbered. SETTER is the rtx that
2082 is modifying the register.
2083
2084 If it is not really a register, we do nothing.
2085 The file-global variables `this_insn' and `this_insn_number'
2086 carry info from `block_alloc'. */
2087
2088 static void
reg_is_set(rtx reg,rtx setter,void * data ATTRIBUTE_UNUSED)2089 reg_is_set (rtx reg, rtx setter, void *data ATTRIBUTE_UNUSED)
2090 {
2091 /* Note that note_stores will only pass us a SUBREG if it is a SUBREG of
2092 a hard register. These may actually not exist any more. */
2093
2094 if (GET_CODE (reg) != SUBREG
2095 && !REG_P (reg))
2096 return;
2097
2098 /* Mark this register as being born. If it is used in a CLOBBER, mark
2099 it as being born halfway between the previous insn and this insn so that
2100 it conflicts with our inputs but not the outputs of the previous insn. */
2101
2102 reg_is_born (reg, 2 * this_insn_number - (GET_CODE (setter) == CLOBBER));
2103 }
2104
2105 /* Handle beginning of the life of register REG.
2106 BIRTH is the index at which this is happening. */
2107
2108 static void
reg_is_born(rtx reg,int birth)2109 reg_is_born (rtx reg, int birth)
2110 {
2111 int regno;
2112
2113 if (GET_CODE (reg) == SUBREG)
2114 {
2115 regno = REGNO (SUBREG_REG (reg));
2116 if (regno < FIRST_PSEUDO_REGISTER)
2117 regno = subreg_regno (reg);
2118 }
2119 else
2120 regno = REGNO (reg);
2121
2122 if (regno < FIRST_PSEUDO_REGISTER)
2123 {
2124 mark_life (regno, GET_MODE (reg), 1);
2125
2126 /* If the register was to have been born earlier that the present
2127 insn, mark it as live where it is actually born. */
2128 if (birth < 2 * this_insn_number)
2129 post_mark_life (regno, GET_MODE (reg), 1, birth, 2 * this_insn_number);
2130 }
2131 else
2132 {
2133 if (reg_qty[regno] == -2)
2134 alloc_qty (regno, GET_MODE (reg), PSEUDO_REGNO_SIZE (regno), birth);
2135
2136 /* If this register has a quantity number, show that it isn't dead. */
2137 if (reg_qty[regno] >= 0)
2138 qty[reg_qty[regno]].death = -1;
2139 }
2140 }
2141
2142 /* Record the death of REG in the current insn. If OUTPUT_P is nonzero,
2143 REG is an output that is dying (i.e., it is never used), otherwise it
2144 is an input (the normal case).
2145 If OUTPUT_P is 1, then we extend the life past the end of this insn. */
2146
2147 static void
wipe_dead_reg(rtx reg,int output_p)2148 wipe_dead_reg (rtx reg, int output_p)
2149 {
2150 int regno = REGNO (reg);
2151
2152 /* If this insn has multiple results,
2153 and the dead reg is used in one of the results,
2154 extend its life to after this insn,
2155 so it won't get allocated together with any other result of this insn.
2156
2157 It is unsafe to use !single_set here since it will ignore an unused
2158 output. Just because an output is unused does not mean the compiler
2159 can assume the side effect will not occur. Consider if REG appears
2160 in the address of an output and we reload the output. If we allocate
2161 REG to the same hard register as an unused output we could set the hard
2162 register before the output reload insn. */
2163 if (GET_CODE (PATTERN (this_insn)) == PARALLEL
2164 && multiple_sets (this_insn))
2165 {
2166 int i;
2167 for (i = XVECLEN (PATTERN (this_insn), 0) - 1; i >= 0; i--)
2168 {
2169 rtx set = XVECEXP (PATTERN (this_insn), 0, i);
2170 if (GET_CODE (set) == SET
2171 && !REG_P (SET_DEST (set))
2172 && !rtx_equal_p (reg, SET_DEST (set))
2173 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
2174 output_p = 1;
2175 }
2176 }
2177
2178 /* If this register is used in an auto-increment address, then extend its
2179 life to after this insn, so that it won't get allocated together with
2180 the result of this insn. */
2181 if (! output_p && find_regno_note (this_insn, REG_INC, regno))
2182 output_p = 1;
2183
2184 if (regno < FIRST_PSEUDO_REGISTER)
2185 {
2186 mark_life (regno, GET_MODE (reg), 0);
2187
2188 /* If a hard register is dying as an output, mark it as in use at
2189 the beginning of this insn (the above statement would cause this
2190 not to happen). */
2191 if (output_p)
2192 post_mark_life (regno, GET_MODE (reg), 1,
2193 2 * this_insn_number, 2 * this_insn_number + 1);
2194 }
2195
2196 else if (reg_qty[regno] >= 0)
2197 qty[reg_qty[regno]].death = 2 * this_insn_number + output_p;
2198 }
2199
2200 /* Find a block of SIZE words of hard regs in reg_class CLASS
2201 that can hold something of machine-mode MODE
2202 (but actually we test only the first of the block for holding MODE)
2203 and still free between insn BORN_INDEX and insn DEAD_INDEX,
2204 and return the number of the first of them.
2205 Return -1 if such a block cannot be found.
2206 If QTYNO crosses calls, insist on a register preserved by calls,
2207 unless ACCEPT_CALL_CLOBBERED is nonzero.
2208
2209 If JUST_TRY_SUGGESTED is nonzero, only try to see if the suggested
2210 register is available. If not, return -1. */
2211
2212 static int
find_free_reg(enum reg_class class,enum machine_mode mode,int qtyno,int accept_call_clobbered,int just_try_suggested,int born_index,int dead_index)2213 find_free_reg (enum reg_class class, enum machine_mode mode, int qtyno,
2214 int accept_call_clobbered, int just_try_suggested,
2215 int born_index, int dead_index)
2216 {
2217 int i, ins;
2218 HARD_REG_SET first_used, used;
2219 #ifdef ELIMINABLE_REGS
2220 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
2221 #endif
2222
2223 /* Validate our parameters. */
2224 gcc_assert (born_index >= 0 && born_index <= dead_index);
2225
2226 /* Don't let a pseudo live in a reg across a function call
2227 if we might get a nonlocal goto. */
2228 if (current_function_has_nonlocal_label
2229 && qty[qtyno].n_calls_crossed > 0)
2230 return -1;
2231
2232 if (accept_call_clobbered)
2233 COPY_HARD_REG_SET (used, call_fixed_reg_set);
2234 else if (qty[qtyno].n_calls_crossed == 0)
2235 COPY_HARD_REG_SET (used, fixed_reg_set);
2236 else
2237 COPY_HARD_REG_SET (used, call_used_reg_set);
2238
2239 if (accept_call_clobbered)
2240 IOR_HARD_REG_SET (used, losing_caller_save_reg_set);
2241
2242 for (ins = born_index; ins < dead_index; ins++)
2243 IOR_HARD_REG_SET (used, regs_live_at[ins]);
2244
2245 IOR_COMPL_HARD_REG_SET (used, reg_class_contents[(int) class]);
2246
2247 /* Don't use the frame pointer reg in local-alloc even if
2248 we may omit the frame pointer, because if we do that and then we
2249 need a frame pointer, reload won't know how to move the pseudo
2250 to another hard reg. It can move only regs made by global-alloc.
2251
2252 This is true of any register that can be eliminated. */
2253 #ifdef ELIMINABLE_REGS
2254 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
2255 SET_HARD_REG_BIT (used, eliminables[i].from);
2256 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2257 /* If FRAME_POINTER_REGNUM is not a real register, then protect the one
2258 that it might be eliminated into. */
2259 SET_HARD_REG_BIT (used, HARD_FRAME_POINTER_REGNUM);
2260 #endif
2261 #else
2262 SET_HARD_REG_BIT (used, FRAME_POINTER_REGNUM);
2263 #endif
2264
2265 #ifdef CANNOT_CHANGE_MODE_CLASS
2266 cannot_change_mode_set_regs (&used, mode, qty[qtyno].first_reg);
2267 #endif
2268
2269 /* Normally, the registers that can be used for the first register in
2270 a multi-register quantity are the same as those that can be used for
2271 subsequent registers. However, if just trying suggested registers,
2272 restrict our consideration to them. If there are copy-suggested
2273 register, try them. Otherwise, try the arithmetic-suggested
2274 registers. */
2275 COPY_HARD_REG_SET (first_used, used);
2276
2277 if (just_try_suggested)
2278 {
2279 if (qty_phys_num_copy_sugg[qtyno] != 0)
2280 IOR_COMPL_HARD_REG_SET (first_used, qty_phys_copy_sugg[qtyno]);
2281 else
2282 IOR_COMPL_HARD_REG_SET (first_used, qty_phys_sugg[qtyno]);
2283 }
2284
2285 /* If all registers are excluded, we can't do anything. */
2286 GO_IF_HARD_REG_SUBSET (reg_class_contents[(int) ALL_REGS], first_used, fail);
2287
2288 /* If at least one would be suitable, test each hard reg. */
2289
2290 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2291 {
2292 #ifdef REG_ALLOC_ORDER
2293 int regno = reg_alloc_order[i];
2294 #else
2295 int regno = i;
2296 #endif
2297 if (! TEST_HARD_REG_BIT (first_used, regno)
2298 && HARD_REGNO_MODE_OK (regno, mode)
2299 && (qty[qtyno].n_calls_crossed == 0
2300 || accept_call_clobbered
2301 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode)))
2302 {
2303 int j;
2304 int size1 = hard_regno_nregs[regno][mode];
2305 for (j = 1; j < size1 && ! TEST_HARD_REG_BIT (used, regno + j); j++);
2306 if (j == size1)
2307 {
2308 /* Mark that this register is in use between its birth and death
2309 insns. */
2310 post_mark_life (regno, mode, 1, born_index, dead_index);
2311 return regno;
2312 }
2313 #ifndef REG_ALLOC_ORDER
2314 /* Skip starting points we know will lose. */
2315 i += j;
2316 #endif
2317 }
2318 }
2319
2320 fail:
2321 /* If we are just trying suggested register, we have just tried copy-
2322 suggested registers, and there are arithmetic-suggested registers,
2323 try them. */
2324
2325 /* If it would be profitable to allocate a call-clobbered register
2326 and save and restore it around calls, do that. */
2327 if (just_try_suggested && qty_phys_num_copy_sugg[qtyno] != 0
2328 && qty_phys_num_sugg[qtyno] != 0)
2329 {
2330 /* Don't try the copy-suggested regs again. */
2331 qty_phys_num_copy_sugg[qtyno] = 0;
2332 return find_free_reg (class, mode, qtyno, accept_call_clobbered, 1,
2333 born_index, dead_index);
2334 }
2335
2336 /* We need not check to see if the current function has nonlocal
2337 labels because we don't put any pseudos that are live over calls in
2338 registers in that case. Avoid putting pseudos crossing calls that
2339 might throw into call used registers. */
2340
2341 if (! accept_call_clobbered
2342 && flag_caller_saves
2343 && ! just_try_suggested
2344 && qty[qtyno].n_calls_crossed != 0
2345 && qty[qtyno].n_throwing_calls_crossed == 0
2346 && CALLER_SAVE_PROFITABLE (qty[qtyno].n_refs,
2347 qty[qtyno].n_calls_crossed))
2348 {
2349 i = find_free_reg (class, mode, qtyno, 1, 0, born_index, dead_index);
2350 if (i >= 0)
2351 caller_save_needed = 1;
2352 return i;
2353 }
2354 return -1;
2355 }
2356
2357 /* Mark that REGNO with machine-mode MODE is live starting from the current
2358 insn (if LIFE is nonzero) or dead starting at the current insn (if LIFE
2359 is zero). */
2360
2361 static void
mark_life(int regno,enum machine_mode mode,int life)2362 mark_life (int regno, enum machine_mode mode, int life)
2363 {
2364 int j = hard_regno_nregs[regno][mode];
2365 if (life)
2366 while (--j >= 0)
2367 SET_HARD_REG_BIT (regs_live, regno + j);
2368 else
2369 while (--j >= 0)
2370 CLEAR_HARD_REG_BIT (regs_live, regno + j);
2371 }
2372
2373 /* Mark register number REGNO (with machine-mode MODE) as live (if LIFE
2374 is nonzero) or dead (if LIFE is zero) from insn number BIRTH (inclusive)
2375 to insn number DEATH (exclusive). */
2376
2377 static void
post_mark_life(int regno,enum machine_mode mode,int life,int birth,int death)2378 post_mark_life (int regno, enum machine_mode mode, int life, int birth,
2379 int death)
2380 {
2381 int j = hard_regno_nregs[regno][mode];
2382 HARD_REG_SET this_reg;
2383
2384 CLEAR_HARD_REG_SET (this_reg);
2385 while (--j >= 0)
2386 SET_HARD_REG_BIT (this_reg, regno + j);
2387
2388 if (life)
2389 while (birth < death)
2390 {
2391 IOR_HARD_REG_SET (regs_live_at[birth], this_reg);
2392 birth++;
2393 }
2394 else
2395 while (birth < death)
2396 {
2397 AND_COMPL_HARD_REG_SET (regs_live_at[birth], this_reg);
2398 birth++;
2399 }
2400 }
2401
2402 /* INSN is the CLOBBER insn that starts a REG_NO_NOCONFLICT block, R0
2403 is the register being clobbered, and R1 is a register being used in
2404 the equivalent expression.
2405
2406 If R1 dies in the block and has a REG_NO_CONFLICT note on every insn
2407 in which it is used, return 1.
2408
2409 Otherwise, return 0. */
2410
2411 static int
no_conflict_p(rtx insn,rtx r0 ATTRIBUTE_UNUSED,rtx r1)2412 no_conflict_p (rtx insn, rtx r0 ATTRIBUTE_UNUSED, rtx r1)
2413 {
2414 int ok = 0;
2415 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
2416 rtx p, last;
2417
2418 /* If R1 is a hard register, return 0 since we handle this case
2419 when we scan the insns that actually use it. */
2420
2421 if (note == 0
2422 || (REG_P (r1) && REGNO (r1) < FIRST_PSEUDO_REGISTER)
2423 || (GET_CODE (r1) == SUBREG && REG_P (SUBREG_REG (r1))
2424 && REGNO (SUBREG_REG (r1)) < FIRST_PSEUDO_REGISTER))
2425 return 0;
2426
2427 last = XEXP (note, 0);
2428
2429 for (p = NEXT_INSN (insn); p && p != last; p = NEXT_INSN (p))
2430 if (INSN_P (p))
2431 {
2432 if (find_reg_note (p, REG_DEAD, r1))
2433 ok = 1;
2434
2435 /* There must be a REG_NO_CONFLICT note on every insn, otherwise
2436 some earlier optimization pass has inserted instructions into
2437 the sequence, and it is not safe to perform this optimization.
2438 Note that emit_no_conflict_block always ensures that this is
2439 true when these sequences are created. */
2440 if (! find_reg_note (p, REG_NO_CONFLICT, r1))
2441 return 0;
2442 }
2443
2444 return ok;
2445 }
2446
2447 /* Return the number of alternatives for which the constraint string P
2448 indicates that the operand must be equal to operand 0 and that no register
2449 is acceptable. */
2450
2451 static int
requires_inout(const char * p)2452 requires_inout (const char *p)
2453 {
2454 char c;
2455 int found_zero = 0;
2456 int reg_allowed = 0;
2457 int num_matching_alts = 0;
2458 int len;
2459
2460 for ( ; (c = *p); p += len)
2461 {
2462 len = CONSTRAINT_LEN (c, p);
2463 switch (c)
2464 {
2465 case '=': case '+': case '?':
2466 case '#': case '&': case '!':
2467 case '*': case '%':
2468 case 'm': case '<': case '>': case 'V': case 'o':
2469 case 'E': case 'F': case 'G': case 'H':
2470 case 's': case 'i': case 'n':
2471 case 'I': case 'J': case 'K': case 'L':
2472 case 'M': case 'N': case 'O': case 'P':
2473 case 'X':
2474 /* These don't say anything we care about. */
2475 break;
2476
2477 case ',':
2478 if (found_zero && ! reg_allowed)
2479 num_matching_alts++;
2480
2481 found_zero = reg_allowed = 0;
2482 break;
2483
2484 case '0':
2485 found_zero = 1;
2486 break;
2487
2488 case '1': case '2': case '3': case '4': case '5':
2489 case '6': case '7': case '8': case '9':
2490 /* Skip the balance of the matching constraint. */
2491 do
2492 p++;
2493 while (ISDIGIT (*p));
2494 len = 0;
2495 break;
2496
2497 default:
2498 if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS
2499 && !EXTRA_ADDRESS_CONSTRAINT (c, p))
2500 break;
2501 /* Fall through. */
2502 case 'p':
2503 case 'g': case 'r':
2504 reg_allowed = 1;
2505 break;
2506 }
2507 }
2508
2509 if (found_zero && ! reg_allowed)
2510 num_matching_alts++;
2511
2512 return num_matching_alts;
2513 }
2514
2515 void
dump_local_alloc(FILE * file)2516 dump_local_alloc (FILE *file)
2517 {
2518 int i;
2519 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2520 if (reg_renumber[i] != -1)
2521 fprintf (file, ";; Register %d in %d.\n", i, reg_renumber[i]);
2522 }
2523
2524 /* Run old register allocator. Return TRUE if we must exit
2525 rest_of_compilation upon return. */
2526 static unsigned int
rest_of_handle_local_alloc(void)2527 rest_of_handle_local_alloc (void)
2528 {
2529 int rebuild_notes;
2530
2531 /* Determine if the current function is a leaf before running reload
2532 since this can impact optimizations done by the prologue and
2533 epilogue thus changing register elimination offsets. */
2534 current_function_is_leaf = leaf_function_p ();
2535
2536 /* Allocate the reg_renumber array. */
2537 allocate_reg_info (max_regno, FALSE, TRUE);
2538
2539 /* And the reg_equiv_memory_loc array. */
2540 VEC_safe_grow (rtx, gc, reg_equiv_memory_loc_vec, max_regno);
2541 memset (VEC_address (rtx, reg_equiv_memory_loc_vec), 0,
2542 sizeof (rtx) * max_regno);
2543 reg_equiv_memory_loc = VEC_address (rtx, reg_equiv_memory_loc_vec);
2544
2545 allocate_initial_values (reg_equiv_memory_loc);
2546
2547 regclass (get_insns (), max_reg_num ());
2548 rebuild_notes = local_alloc ();
2549
2550 /* Local allocation may have turned an indirect jump into a direct
2551 jump. If so, we must rebuild the JUMP_LABEL fields of jumping
2552 instructions. */
2553 if (rebuild_notes)
2554 {
2555 timevar_push (TV_JUMP);
2556
2557 rebuild_jump_labels (get_insns ());
2558 purge_all_dead_edges ();
2559 delete_unreachable_blocks ();
2560
2561 timevar_pop (TV_JUMP);
2562 }
2563
2564 if (dump_file && (dump_flags & TDF_DETAILS))
2565 {
2566 timevar_push (TV_DUMP);
2567 dump_flow_info (dump_file, dump_flags);
2568 dump_local_alloc (dump_file);
2569 timevar_pop (TV_DUMP);
2570 }
2571 return 0;
2572 }
2573
2574 struct tree_opt_pass pass_local_alloc =
2575 {
2576 "lreg", /* name */
2577 NULL, /* gate */
2578 rest_of_handle_local_alloc, /* execute */
2579 NULL, /* sub */
2580 NULL, /* next */
2581 0, /* static_pass_number */
2582 TV_LOCAL_ALLOC, /* tv_id */
2583 0, /* properties_required */
2584 0, /* properties_provided */
2585 0, /* properties_destroyed */
2586 0, /* todo_flags_start */
2587 TODO_dump_func |
2588 TODO_ggc_collect, /* todo_flags_finish */
2589 'l' /* letter */
2590 };
2591
2592