xref: /openbsd-src/gnu/gcc/gcc/emit-rtl.c (revision 404b540a9034ac75a6199ad1a32d1bbc7a0d4210)
1*404b540aSrobert /* Emit RTL for the GCC expander.
2*404b540aSrobert    Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3*404b540aSrobert    1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
4*404b540aSrobert    Free Software Foundation, Inc.
5*404b540aSrobert 
6*404b540aSrobert This file is part of GCC.
7*404b540aSrobert 
8*404b540aSrobert GCC is free software; you can redistribute it and/or modify it under
9*404b540aSrobert the terms of the GNU General Public License as published by the Free
10*404b540aSrobert Software Foundation; either version 2, or (at your option) any later
11*404b540aSrobert version.
12*404b540aSrobert 
13*404b540aSrobert GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14*404b540aSrobert WARRANTY; without even the implied warranty of MERCHANTABILITY or
15*404b540aSrobert FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16*404b540aSrobert for more details.
17*404b540aSrobert 
18*404b540aSrobert You should have received a copy of the GNU General Public License
19*404b540aSrobert along with GCC; see the file COPYING.  If not, write to the Free
20*404b540aSrobert Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21*404b540aSrobert 02110-1301, USA.  */
22*404b540aSrobert 
23*404b540aSrobert 
24*404b540aSrobert /* Middle-to-low level generation of rtx code and insns.
25*404b540aSrobert 
26*404b540aSrobert    This file contains support functions for creating rtl expressions
27*404b540aSrobert    and manipulating them in the doubly-linked chain of insns.
28*404b540aSrobert 
29*404b540aSrobert    The patterns of the insns are created by machine-dependent
30*404b540aSrobert    routines in insn-emit.c, which is generated automatically from
31*404b540aSrobert    the machine description.  These routines make the individual rtx's
32*404b540aSrobert    of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33*404b540aSrobert    which are automatically generated from rtl.def; what is machine
34*404b540aSrobert    dependent is the kind of rtx's they make and what arguments they
35*404b540aSrobert    use.  */
36*404b540aSrobert 
37*404b540aSrobert #include "config.h"
38*404b540aSrobert #include "system.h"
39*404b540aSrobert #include "coretypes.h"
40*404b540aSrobert #include "tm.h"
41*404b540aSrobert #include "toplev.h"
42*404b540aSrobert #include "rtl.h"
43*404b540aSrobert #include "tree.h"
44*404b540aSrobert #include "tm_p.h"
45*404b540aSrobert #include "flags.h"
46*404b540aSrobert #include "function.h"
47*404b540aSrobert #include "expr.h"
48*404b540aSrobert #include "regs.h"
49*404b540aSrobert #include "hard-reg-set.h"
50*404b540aSrobert #include "hashtab.h"
51*404b540aSrobert #include "insn-config.h"
52*404b540aSrobert #include "recog.h"
53*404b540aSrobert #include "real.h"
54*404b540aSrobert #include "bitmap.h"
55*404b540aSrobert #include "basic-block.h"
56*404b540aSrobert #include "ggc.h"
57*404b540aSrobert #include "debug.h"
58*404b540aSrobert #include "langhooks.h"
59*404b540aSrobert #include "tree-pass.h"
60*404b540aSrobert 
61*404b540aSrobert /* Commonly used modes.  */
62*404b540aSrobert 
63*404b540aSrobert enum machine_mode byte_mode;	/* Mode whose width is BITS_PER_UNIT.  */
64*404b540aSrobert enum machine_mode word_mode;	/* Mode whose width is BITS_PER_WORD.  */
65*404b540aSrobert enum machine_mode double_mode;	/* Mode whose width is DOUBLE_TYPE_SIZE.  */
66*404b540aSrobert enum machine_mode ptr_mode;	/* Mode whose width is POINTER_SIZE.  */
67*404b540aSrobert 
68*404b540aSrobert 
69*404b540aSrobert /* This is *not* reset after each function.  It gives each CODE_LABEL
70*404b540aSrobert    in the entire compilation a unique label number.  */
71*404b540aSrobert 
72*404b540aSrobert static GTY(()) int label_num = 1;
73*404b540aSrobert 
74*404b540aSrobert /* Nonzero means do not generate NOTEs for source line numbers.  */
75*404b540aSrobert 
76*404b540aSrobert static int no_line_numbers;
77*404b540aSrobert 
78*404b540aSrobert /* Commonly used rtx's, so that we only need space for one copy.
79*404b540aSrobert    These are initialized once for the entire compilation.
80*404b540aSrobert    All of these are unique; no other rtx-object will be equal to any
81*404b540aSrobert    of these.  */
82*404b540aSrobert 
83*404b540aSrobert rtx global_rtl[GR_MAX];
84*404b540aSrobert 
85*404b540aSrobert /* Commonly used RTL for hard registers.  These objects are not necessarily
86*404b540aSrobert    unique, so we allocate them separately from global_rtl.  They are
87*404b540aSrobert    initialized once per compilation unit, then copied into regno_reg_rtx
88*404b540aSrobert    at the beginning of each function.  */
89*404b540aSrobert static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
90*404b540aSrobert 
91*404b540aSrobert /* We record floating-point CONST_DOUBLEs in each floating-point mode for
92*404b540aSrobert    the values of 0, 1, and 2.  For the integer entries and VOIDmode, we
93*404b540aSrobert    record a copy of const[012]_rtx.  */
94*404b540aSrobert 
95*404b540aSrobert rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
96*404b540aSrobert 
97*404b540aSrobert rtx const_true_rtx;
98*404b540aSrobert 
99*404b540aSrobert REAL_VALUE_TYPE dconst0;
100*404b540aSrobert REAL_VALUE_TYPE dconst1;
101*404b540aSrobert REAL_VALUE_TYPE dconst2;
102*404b540aSrobert REAL_VALUE_TYPE dconst3;
103*404b540aSrobert REAL_VALUE_TYPE dconst10;
104*404b540aSrobert REAL_VALUE_TYPE dconstm1;
105*404b540aSrobert REAL_VALUE_TYPE dconstm2;
106*404b540aSrobert REAL_VALUE_TYPE dconsthalf;
107*404b540aSrobert REAL_VALUE_TYPE dconstthird;
108*404b540aSrobert REAL_VALUE_TYPE dconstpi;
109*404b540aSrobert REAL_VALUE_TYPE dconste;
110*404b540aSrobert 
111*404b540aSrobert /* All references to the following fixed hard registers go through
112*404b540aSrobert    these unique rtl objects.  On machines where the frame-pointer and
113*404b540aSrobert    arg-pointer are the same register, they use the same unique object.
114*404b540aSrobert 
115*404b540aSrobert    After register allocation, other rtl objects which used to be pseudo-regs
116*404b540aSrobert    may be clobbered to refer to the frame-pointer register.
117*404b540aSrobert    But references that were originally to the frame-pointer can be
118*404b540aSrobert    distinguished from the others because they contain frame_pointer_rtx.
119*404b540aSrobert 
120*404b540aSrobert    When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
121*404b540aSrobert    tricky: until register elimination has taken place hard_frame_pointer_rtx
122*404b540aSrobert    should be used if it is being set, and frame_pointer_rtx otherwise.  After
123*404b540aSrobert    register elimination hard_frame_pointer_rtx should always be used.
124*404b540aSrobert    On machines where the two registers are same (most) then these are the
125*404b540aSrobert    same.
126*404b540aSrobert 
127*404b540aSrobert    In an inline procedure, the stack and frame pointer rtxs may not be
128*404b540aSrobert    used for anything else.  */
129*404b540aSrobert rtx static_chain_rtx;		/* (REG:Pmode STATIC_CHAIN_REGNUM) */
130*404b540aSrobert rtx static_chain_incoming_rtx;	/* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
131*404b540aSrobert rtx pic_offset_table_rtx;	/* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
132*404b540aSrobert 
133*404b540aSrobert /* This is used to implement __builtin_return_address for some machines.
134*404b540aSrobert    See for instance the MIPS port.  */
135*404b540aSrobert rtx return_address_pointer_rtx;	/* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
136*404b540aSrobert 
137*404b540aSrobert /* We make one copy of (const_int C) where C is in
138*404b540aSrobert    [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
139*404b540aSrobert    to save space during the compilation and simplify comparisons of
140*404b540aSrobert    integers.  */
141*404b540aSrobert 
142*404b540aSrobert rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
143*404b540aSrobert 
144*404b540aSrobert /* A hash table storing CONST_INTs whose absolute value is greater
145*404b540aSrobert    than MAX_SAVED_CONST_INT.  */
146*404b540aSrobert 
147*404b540aSrobert static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
148*404b540aSrobert      htab_t const_int_htab;
149*404b540aSrobert 
150*404b540aSrobert /* A hash table storing memory attribute structures.  */
151*404b540aSrobert static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
152*404b540aSrobert      htab_t mem_attrs_htab;
153*404b540aSrobert 
154*404b540aSrobert /* A hash table storing register attribute structures.  */
155*404b540aSrobert static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
156*404b540aSrobert      htab_t reg_attrs_htab;
157*404b540aSrobert 
158*404b540aSrobert /* A hash table storing all CONST_DOUBLEs.  */
159*404b540aSrobert static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
160*404b540aSrobert      htab_t const_double_htab;
161*404b540aSrobert 
162*404b540aSrobert #define first_insn (cfun->emit->x_first_insn)
163*404b540aSrobert #define last_insn (cfun->emit->x_last_insn)
164*404b540aSrobert #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
165*404b540aSrobert #define last_location (cfun->emit->x_last_location)
166*404b540aSrobert #define first_label_num (cfun->emit->x_first_label_num)
167*404b540aSrobert 
168*404b540aSrobert static rtx make_call_insn_raw (rtx);
169*404b540aSrobert static rtx find_line_note (rtx);
170*404b540aSrobert static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
171*404b540aSrobert static void unshare_all_decls (tree);
172*404b540aSrobert static void reset_used_decls (tree);
173*404b540aSrobert static void mark_label_nuses (rtx);
174*404b540aSrobert static hashval_t const_int_htab_hash (const void *);
175*404b540aSrobert static int const_int_htab_eq (const void *, const void *);
176*404b540aSrobert static hashval_t const_double_htab_hash (const void *);
177*404b540aSrobert static int const_double_htab_eq (const void *, const void *);
178*404b540aSrobert static rtx lookup_const_double (rtx);
179*404b540aSrobert static hashval_t mem_attrs_htab_hash (const void *);
180*404b540aSrobert static int mem_attrs_htab_eq (const void *, const void *);
181*404b540aSrobert static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
182*404b540aSrobert 				 enum machine_mode);
183*404b540aSrobert static hashval_t reg_attrs_htab_hash (const void *);
184*404b540aSrobert static int reg_attrs_htab_eq (const void *, const void *);
185*404b540aSrobert static reg_attrs *get_reg_attrs (tree, int);
186*404b540aSrobert static tree component_ref_for_mem_expr (tree);
187*404b540aSrobert static rtx gen_const_vector (enum machine_mode, int);
188*404b540aSrobert static void copy_rtx_if_shared_1 (rtx *orig);
189*404b540aSrobert 
190*404b540aSrobert /* Probability of the conditional branch currently proceeded by try_split.
191*404b540aSrobert    Set to -1 otherwise.  */
192*404b540aSrobert int split_branch_probability = -1;
193*404b540aSrobert 
194*404b540aSrobert /* Returns a hash code for X (which is a really a CONST_INT).  */
195*404b540aSrobert 
196*404b540aSrobert static hashval_t
const_int_htab_hash(const void * x)197*404b540aSrobert const_int_htab_hash (const void *x)
198*404b540aSrobert {
199*404b540aSrobert   return (hashval_t) INTVAL ((rtx) x);
200*404b540aSrobert }
201*404b540aSrobert 
202*404b540aSrobert /* Returns nonzero if the value represented by X (which is really a
203*404b540aSrobert    CONST_INT) is the same as that given by Y (which is really a
204*404b540aSrobert    HOST_WIDE_INT *).  */
205*404b540aSrobert 
206*404b540aSrobert static int
const_int_htab_eq(const void * x,const void * y)207*404b540aSrobert const_int_htab_eq (const void *x, const void *y)
208*404b540aSrobert {
209*404b540aSrobert   return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
210*404b540aSrobert }
211*404b540aSrobert 
212*404b540aSrobert /* Returns a hash code for X (which is really a CONST_DOUBLE).  */
213*404b540aSrobert static hashval_t
const_double_htab_hash(const void * x)214*404b540aSrobert const_double_htab_hash (const void *x)
215*404b540aSrobert {
216*404b540aSrobert   rtx value = (rtx) x;
217*404b540aSrobert   hashval_t h;
218*404b540aSrobert 
219*404b540aSrobert   if (GET_MODE (value) == VOIDmode)
220*404b540aSrobert     h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
221*404b540aSrobert   else
222*404b540aSrobert     {
223*404b540aSrobert       h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
224*404b540aSrobert       /* MODE is used in the comparison, so it should be in the hash.  */
225*404b540aSrobert       h ^= GET_MODE (value);
226*404b540aSrobert     }
227*404b540aSrobert   return h;
228*404b540aSrobert }
229*404b540aSrobert 
230*404b540aSrobert /* Returns nonzero if the value represented by X (really a ...)
231*404b540aSrobert    is the same as that represented by Y (really a ...) */
232*404b540aSrobert static int
const_double_htab_eq(const void * x,const void * y)233*404b540aSrobert const_double_htab_eq (const void *x, const void *y)
234*404b540aSrobert {
235*404b540aSrobert   rtx a = (rtx)x, b = (rtx)y;
236*404b540aSrobert 
237*404b540aSrobert   if (GET_MODE (a) != GET_MODE (b))
238*404b540aSrobert     return 0;
239*404b540aSrobert   if (GET_MODE (a) == VOIDmode)
240*404b540aSrobert     return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
241*404b540aSrobert 	    && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
242*404b540aSrobert   else
243*404b540aSrobert     return real_identical (CONST_DOUBLE_REAL_VALUE (a),
244*404b540aSrobert 			   CONST_DOUBLE_REAL_VALUE (b));
245*404b540aSrobert }
246*404b540aSrobert 
247*404b540aSrobert /* Returns a hash code for X (which is a really a mem_attrs *).  */
248*404b540aSrobert 
249*404b540aSrobert static hashval_t
mem_attrs_htab_hash(const void * x)250*404b540aSrobert mem_attrs_htab_hash (const void *x)
251*404b540aSrobert {
252*404b540aSrobert   mem_attrs *p = (mem_attrs *) x;
253*404b540aSrobert 
254*404b540aSrobert   return (p->alias ^ (p->align * 1000)
255*404b540aSrobert 	  ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
256*404b540aSrobert 	  ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
257*404b540aSrobert 	  ^ (size_t) iterative_hash_expr (p->expr, 0));
258*404b540aSrobert }
259*404b540aSrobert 
260*404b540aSrobert /* Returns nonzero if the value represented by X (which is really a
261*404b540aSrobert    mem_attrs *) is the same as that given by Y (which is also really a
262*404b540aSrobert    mem_attrs *).  */
263*404b540aSrobert 
264*404b540aSrobert static int
mem_attrs_htab_eq(const void * x,const void * y)265*404b540aSrobert mem_attrs_htab_eq (const void *x, const void *y)
266*404b540aSrobert {
267*404b540aSrobert   mem_attrs *p = (mem_attrs *) x;
268*404b540aSrobert   mem_attrs *q = (mem_attrs *) y;
269*404b540aSrobert 
270*404b540aSrobert   return (p->alias == q->alias && p->offset == q->offset
271*404b540aSrobert 	  && p->size == q->size && p->align == q->align
272*404b540aSrobert 	  && (p->expr == q->expr
273*404b540aSrobert 	      || (p->expr != NULL_TREE && q->expr != NULL_TREE
274*404b540aSrobert 		  && operand_equal_p (p->expr, q->expr, 0))));
275*404b540aSrobert }
276*404b540aSrobert 
277*404b540aSrobert /* Allocate a new mem_attrs structure and insert it into the hash table if
278*404b540aSrobert    one identical to it is not already in the table.  We are doing this for
279*404b540aSrobert    MEM of mode MODE.  */
280*404b540aSrobert 
281*404b540aSrobert static mem_attrs *
get_mem_attrs(HOST_WIDE_INT alias,tree expr,rtx offset,rtx size,unsigned int align,enum machine_mode mode)282*404b540aSrobert get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,
283*404b540aSrobert 	       unsigned int align, enum machine_mode mode)
284*404b540aSrobert {
285*404b540aSrobert   mem_attrs attrs;
286*404b540aSrobert   void **slot;
287*404b540aSrobert 
288*404b540aSrobert   /* If everything is the default, we can just return zero.
289*404b540aSrobert      This must match what the corresponding MEM_* macros return when the
290*404b540aSrobert      field is not present.  */
291*404b540aSrobert   if (alias == 0 && expr == 0 && offset == 0
292*404b540aSrobert       && (size == 0
293*404b540aSrobert 	  || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
294*404b540aSrobert       && (STRICT_ALIGNMENT && mode != BLKmode
295*404b540aSrobert 	  ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
296*404b540aSrobert     return 0;
297*404b540aSrobert 
298*404b540aSrobert   attrs.alias = alias;
299*404b540aSrobert   attrs.expr = expr;
300*404b540aSrobert   attrs.offset = offset;
301*404b540aSrobert   attrs.size = size;
302*404b540aSrobert   attrs.align = align;
303*404b540aSrobert 
304*404b540aSrobert   slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
305*404b540aSrobert   if (*slot == 0)
306*404b540aSrobert     {
307*404b540aSrobert       *slot = ggc_alloc (sizeof (mem_attrs));
308*404b540aSrobert       memcpy (*slot, &attrs, sizeof (mem_attrs));
309*404b540aSrobert     }
310*404b540aSrobert 
311*404b540aSrobert   return *slot;
312*404b540aSrobert }
313*404b540aSrobert 
314*404b540aSrobert /* Returns a hash code for X (which is a really a reg_attrs *).  */
315*404b540aSrobert 
316*404b540aSrobert static hashval_t
reg_attrs_htab_hash(const void * x)317*404b540aSrobert reg_attrs_htab_hash (const void *x)
318*404b540aSrobert {
319*404b540aSrobert   reg_attrs *p = (reg_attrs *) x;
320*404b540aSrobert 
321*404b540aSrobert   return ((p->offset * 1000) ^ (long) p->decl);
322*404b540aSrobert }
323*404b540aSrobert 
324*404b540aSrobert /* Returns nonzero if the value represented by X (which is really a
325*404b540aSrobert    reg_attrs *) is the same as that given by Y (which is also really a
326*404b540aSrobert    reg_attrs *).  */
327*404b540aSrobert 
328*404b540aSrobert static int
reg_attrs_htab_eq(const void * x,const void * y)329*404b540aSrobert reg_attrs_htab_eq (const void *x, const void *y)
330*404b540aSrobert {
331*404b540aSrobert   reg_attrs *p = (reg_attrs *) x;
332*404b540aSrobert   reg_attrs *q = (reg_attrs *) y;
333*404b540aSrobert 
334*404b540aSrobert   return (p->decl == q->decl && p->offset == q->offset);
335*404b540aSrobert }
336*404b540aSrobert /* Allocate a new reg_attrs structure and insert it into the hash table if
337*404b540aSrobert    one identical to it is not already in the table.  We are doing this for
338*404b540aSrobert    MEM of mode MODE.  */
339*404b540aSrobert 
340*404b540aSrobert static reg_attrs *
get_reg_attrs(tree decl,int offset)341*404b540aSrobert get_reg_attrs (tree decl, int offset)
342*404b540aSrobert {
343*404b540aSrobert   reg_attrs attrs;
344*404b540aSrobert   void **slot;
345*404b540aSrobert 
346*404b540aSrobert   /* If everything is the default, we can just return zero.  */
347*404b540aSrobert   if (decl == 0 && offset == 0)
348*404b540aSrobert     return 0;
349*404b540aSrobert 
350*404b540aSrobert   attrs.decl = decl;
351*404b540aSrobert   attrs.offset = offset;
352*404b540aSrobert 
353*404b540aSrobert   slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
354*404b540aSrobert   if (*slot == 0)
355*404b540aSrobert     {
356*404b540aSrobert       *slot = ggc_alloc (sizeof (reg_attrs));
357*404b540aSrobert       memcpy (*slot, &attrs, sizeof (reg_attrs));
358*404b540aSrobert     }
359*404b540aSrobert 
360*404b540aSrobert   return *slot;
361*404b540aSrobert }
362*404b540aSrobert 
363*404b540aSrobert /* Generate a new REG rtx.  Make sure ORIGINAL_REGNO is set properly, and
364*404b540aSrobert    don't attempt to share with the various global pieces of rtl (such as
365*404b540aSrobert    frame_pointer_rtx).  */
366*404b540aSrobert 
367*404b540aSrobert rtx
gen_raw_REG(enum machine_mode mode,int regno)368*404b540aSrobert gen_raw_REG (enum machine_mode mode, int regno)
369*404b540aSrobert {
370*404b540aSrobert   rtx x = gen_rtx_raw_REG (mode, regno);
371*404b540aSrobert   ORIGINAL_REGNO (x) = regno;
372*404b540aSrobert   return x;
373*404b540aSrobert }
374*404b540aSrobert 
375*404b540aSrobert /* There are some RTL codes that require special attention; the generation
376*404b540aSrobert    functions do the raw handling.  If you add to this list, modify
377*404b540aSrobert    special_rtx in gengenrtl.c as well.  */
378*404b540aSrobert 
379*404b540aSrobert rtx
gen_rtx_CONST_INT(enum machine_mode mode ATTRIBUTE_UNUSED,HOST_WIDE_INT arg)380*404b540aSrobert gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
381*404b540aSrobert {
382*404b540aSrobert   void **slot;
383*404b540aSrobert 
384*404b540aSrobert   if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
385*404b540aSrobert     return const_int_rtx[arg + MAX_SAVED_CONST_INT];
386*404b540aSrobert 
387*404b540aSrobert #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
388*404b540aSrobert   if (const_true_rtx && arg == STORE_FLAG_VALUE)
389*404b540aSrobert     return const_true_rtx;
390*404b540aSrobert #endif
391*404b540aSrobert 
392*404b540aSrobert   /* Look up the CONST_INT in the hash table.  */
393*404b540aSrobert   slot = htab_find_slot_with_hash (const_int_htab, &arg,
394*404b540aSrobert 				   (hashval_t) arg, INSERT);
395*404b540aSrobert   if (*slot == 0)
396*404b540aSrobert     *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
397*404b540aSrobert 
398*404b540aSrobert   return (rtx) *slot;
399*404b540aSrobert }
400*404b540aSrobert 
401*404b540aSrobert rtx
gen_int_mode(HOST_WIDE_INT c,enum machine_mode mode)402*404b540aSrobert gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
403*404b540aSrobert {
404*404b540aSrobert   return GEN_INT (trunc_int_for_mode (c, mode));
405*404b540aSrobert }
406*404b540aSrobert 
407*404b540aSrobert /* CONST_DOUBLEs might be created from pairs of integers, or from
408*404b540aSrobert    REAL_VALUE_TYPEs.  Also, their length is known only at run time,
409*404b540aSrobert    so we cannot use gen_rtx_raw_CONST_DOUBLE.  */
410*404b540aSrobert 
411*404b540aSrobert /* Determine whether REAL, a CONST_DOUBLE, already exists in the
412*404b540aSrobert    hash table.  If so, return its counterpart; otherwise add it
413*404b540aSrobert    to the hash table and return it.  */
414*404b540aSrobert static rtx
lookup_const_double(rtx real)415*404b540aSrobert lookup_const_double (rtx real)
416*404b540aSrobert {
417*404b540aSrobert   void **slot = htab_find_slot (const_double_htab, real, INSERT);
418*404b540aSrobert   if (*slot == 0)
419*404b540aSrobert     *slot = real;
420*404b540aSrobert 
421*404b540aSrobert   return (rtx) *slot;
422*404b540aSrobert }
423*404b540aSrobert 
424*404b540aSrobert /* Return a CONST_DOUBLE rtx for a floating-point value specified by
425*404b540aSrobert    VALUE in mode MODE.  */
426*404b540aSrobert rtx
const_double_from_real_value(REAL_VALUE_TYPE value,enum machine_mode mode)427*404b540aSrobert const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
428*404b540aSrobert {
429*404b540aSrobert   rtx real = rtx_alloc (CONST_DOUBLE);
430*404b540aSrobert   PUT_MODE (real, mode);
431*404b540aSrobert 
432*404b540aSrobert   real->u.rv = value;
433*404b540aSrobert 
434*404b540aSrobert   return lookup_const_double (real);
435*404b540aSrobert }
436*404b540aSrobert 
437*404b540aSrobert /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
438*404b540aSrobert    of ints: I0 is the low-order word and I1 is the high-order word.
439*404b540aSrobert    Do not use this routine for non-integer modes; convert to
440*404b540aSrobert    REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE.  */
441*404b540aSrobert 
442*404b540aSrobert rtx
immed_double_const(HOST_WIDE_INT i0,HOST_WIDE_INT i1,enum machine_mode mode)443*404b540aSrobert immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
444*404b540aSrobert {
445*404b540aSrobert   rtx value;
446*404b540aSrobert   unsigned int i;
447*404b540aSrobert 
448*404b540aSrobert   /* There are the following cases (note that there are no modes with
449*404b540aSrobert      HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
450*404b540aSrobert 
451*404b540aSrobert      1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
452*404b540aSrobert 	gen_int_mode.
453*404b540aSrobert      2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
454*404b540aSrobert 	the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
455*404b540aSrobert 	from copies of the sign bit, and sign of i0 and i1 are the same),  then
456*404b540aSrobert 	we return a CONST_INT for i0.
457*404b540aSrobert      3) Otherwise, we create a CONST_DOUBLE for i0 and i1.  */
458*404b540aSrobert   if (mode != VOIDmode)
459*404b540aSrobert     {
460*404b540aSrobert       gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
461*404b540aSrobert 		  || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
462*404b540aSrobert 		  /* We can get a 0 for an error mark.  */
463*404b540aSrobert 		  || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
464*404b540aSrobert 		  || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
465*404b540aSrobert 
466*404b540aSrobert       if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
467*404b540aSrobert 	return gen_int_mode (i0, mode);
468*404b540aSrobert 
469*404b540aSrobert       gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
470*404b540aSrobert     }
471*404b540aSrobert 
472*404b540aSrobert   /* If this integer fits in one word, return a CONST_INT.  */
473*404b540aSrobert   if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
474*404b540aSrobert     return GEN_INT (i0);
475*404b540aSrobert 
476*404b540aSrobert   /* We use VOIDmode for integers.  */
477*404b540aSrobert   value = rtx_alloc (CONST_DOUBLE);
478*404b540aSrobert   PUT_MODE (value, VOIDmode);
479*404b540aSrobert 
480*404b540aSrobert   CONST_DOUBLE_LOW (value) = i0;
481*404b540aSrobert   CONST_DOUBLE_HIGH (value) = i1;
482*404b540aSrobert 
483*404b540aSrobert   for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
484*404b540aSrobert     XWINT (value, i) = 0;
485*404b540aSrobert 
486*404b540aSrobert   return lookup_const_double (value);
487*404b540aSrobert }
488*404b540aSrobert 
489*404b540aSrobert rtx
gen_rtx_REG(enum machine_mode mode,unsigned int regno)490*404b540aSrobert gen_rtx_REG (enum machine_mode mode, unsigned int regno)
491*404b540aSrobert {
492*404b540aSrobert   /* In case the MD file explicitly references the frame pointer, have
493*404b540aSrobert      all such references point to the same frame pointer.  This is
494*404b540aSrobert      used during frame pointer elimination to distinguish the explicit
495*404b540aSrobert      references to these registers from pseudos that happened to be
496*404b540aSrobert      assigned to them.
497*404b540aSrobert 
498*404b540aSrobert      If we have eliminated the frame pointer or arg pointer, we will
499*404b540aSrobert      be using it as a normal register, for example as a spill
500*404b540aSrobert      register.  In such cases, we might be accessing it in a mode that
501*404b540aSrobert      is not Pmode and therefore cannot use the pre-allocated rtx.
502*404b540aSrobert 
503*404b540aSrobert      Also don't do this when we are making new REGs in reload, since
504*404b540aSrobert      we don't want to get confused with the real pointers.  */
505*404b540aSrobert 
506*404b540aSrobert   if (mode == Pmode && !reload_in_progress)
507*404b540aSrobert     {
508*404b540aSrobert       if (regno == FRAME_POINTER_REGNUM
509*404b540aSrobert 	  && (!reload_completed || frame_pointer_needed))
510*404b540aSrobert 	return frame_pointer_rtx;
511*404b540aSrobert #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
512*404b540aSrobert       if (regno == HARD_FRAME_POINTER_REGNUM
513*404b540aSrobert 	  && (!reload_completed || frame_pointer_needed))
514*404b540aSrobert 	return hard_frame_pointer_rtx;
515*404b540aSrobert #endif
516*404b540aSrobert #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
517*404b540aSrobert       if (regno == ARG_POINTER_REGNUM)
518*404b540aSrobert 	return arg_pointer_rtx;
519*404b540aSrobert #endif
520*404b540aSrobert #ifdef RETURN_ADDRESS_POINTER_REGNUM
521*404b540aSrobert       if (regno == RETURN_ADDRESS_POINTER_REGNUM)
522*404b540aSrobert 	return return_address_pointer_rtx;
523*404b540aSrobert #endif
524*404b540aSrobert       if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
525*404b540aSrobert 	  && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
526*404b540aSrobert 	return pic_offset_table_rtx;
527*404b540aSrobert       if (regno == STACK_POINTER_REGNUM)
528*404b540aSrobert 	return stack_pointer_rtx;
529*404b540aSrobert     }
530*404b540aSrobert 
531*404b540aSrobert #if 0
532*404b540aSrobert   /* If the per-function register table has been set up, try to re-use
533*404b540aSrobert      an existing entry in that table to avoid useless generation of RTL.
534*404b540aSrobert 
535*404b540aSrobert      This code is disabled for now until we can fix the various backends
536*404b540aSrobert      which depend on having non-shared hard registers in some cases.   Long
537*404b540aSrobert      term we want to re-enable this code as it can significantly cut down
538*404b540aSrobert      on the amount of useless RTL that gets generated.
539*404b540aSrobert 
540*404b540aSrobert      We'll also need to fix some code that runs after reload that wants to
541*404b540aSrobert      set ORIGINAL_REGNO.  */
542*404b540aSrobert 
543*404b540aSrobert   if (cfun
544*404b540aSrobert       && cfun->emit
545*404b540aSrobert       && regno_reg_rtx
546*404b540aSrobert       && regno < FIRST_PSEUDO_REGISTER
547*404b540aSrobert       && reg_raw_mode[regno] == mode)
548*404b540aSrobert     return regno_reg_rtx[regno];
549*404b540aSrobert #endif
550*404b540aSrobert 
551*404b540aSrobert   return gen_raw_REG (mode, regno);
552*404b540aSrobert }
553*404b540aSrobert 
554*404b540aSrobert rtx
gen_rtx_MEM(enum machine_mode mode,rtx addr)555*404b540aSrobert gen_rtx_MEM (enum machine_mode mode, rtx addr)
556*404b540aSrobert {
557*404b540aSrobert   rtx rt = gen_rtx_raw_MEM (mode, addr);
558*404b540aSrobert 
559*404b540aSrobert   /* This field is not cleared by the mere allocation of the rtx, so
560*404b540aSrobert      we clear it here.  */
561*404b540aSrobert   MEM_ATTRS (rt) = 0;
562*404b540aSrobert 
563*404b540aSrobert   return rt;
564*404b540aSrobert }
565*404b540aSrobert 
566*404b540aSrobert /* Generate a memory referring to non-trapping constant memory.  */
567*404b540aSrobert 
568*404b540aSrobert rtx
gen_const_mem(enum machine_mode mode,rtx addr)569*404b540aSrobert gen_const_mem (enum machine_mode mode, rtx addr)
570*404b540aSrobert {
571*404b540aSrobert   rtx mem = gen_rtx_MEM (mode, addr);
572*404b540aSrobert   MEM_READONLY_P (mem) = 1;
573*404b540aSrobert   MEM_NOTRAP_P (mem) = 1;
574*404b540aSrobert   return mem;
575*404b540aSrobert }
576*404b540aSrobert 
577*404b540aSrobert /* Generate a MEM referring to fixed portions of the frame, e.g., register
578*404b540aSrobert    save areas.  */
579*404b540aSrobert 
580*404b540aSrobert rtx
gen_frame_mem(enum machine_mode mode,rtx addr)581*404b540aSrobert gen_frame_mem (enum machine_mode mode, rtx addr)
582*404b540aSrobert {
583*404b540aSrobert   rtx mem = gen_rtx_MEM (mode, addr);
584*404b540aSrobert   MEM_NOTRAP_P (mem) = 1;
585*404b540aSrobert   set_mem_alias_set (mem, get_frame_alias_set ());
586*404b540aSrobert   return mem;
587*404b540aSrobert }
588*404b540aSrobert 
589*404b540aSrobert /* Generate a MEM referring to a temporary use of the stack, not part
590*404b540aSrobert     of the fixed stack frame.  For example, something which is pushed
591*404b540aSrobert     by a target splitter.  */
592*404b540aSrobert rtx
gen_tmp_stack_mem(enum machine_mode mode,rtx addr)593*404b540aSrobert gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
594*404b540aSrobert {
595*404b540aSrobert   rtx mem = gen_rtx_MEM (mode, addr);
596*404b540aSrobert   MEM_NOTRAP_P (mem) = 1;
597*404b540aSrobert   if (!current_function_calls_alloca)
598*404b540aSrobert     set_mem_alias_set (mem, get_frame_alias_set ());
599*404b540aSrobert   return mem;
600*404b540aSrobert }
601*404b540aSrobert 
602*404b540aSrobert /* We want to create (subreg:OMODE (obj:IMODE) OFFSET).  Return true if
603*404b540aSrobert    this construct would be valid, and false otherwise.  */
604*404b540aSrobert 
605*404b540aSrobert bool
validate_subreg(enum machine_mode omode,enum machine_mode imode,rtx reg,unsigned int offset)606*404b540aSrobert validate_subreg (enum machine_mode omode, enum machine_mode imode,
607*404b540aSrobert 		 rtx reg, unsigned int offset)
608*404b540aSrobert {
609*404b540aSrobert   unsigned int isize = GET_MODE_SIZE (imode);
610*404b540aSrobert   unsigned int osize = GET_MODE_SIZE (omode);
611*404b540aSrobert 
612*404b540aSrobert   /* All subregs must be aligned.  */
613*404b540aSrobert   if (offset % osize != 0)
614*404b540aSrobert     return false;
615*404b540aSrobert 
616*404b540aSrobert   /* The subreg offset cannot be outside the inner object.  */
617*404b540aSrobert   if (offset >= isize)
618*404b540aSrobert     return false;
619*404b540aSrobert 
620*404b540aSrobert   /* ??? This should not be here.  Temporarily continue to allow word_mode
621*404b540aSrobert      subregs of anything.  The most common offender is (subreg:SI (reg:DF)).
622*404b540aSrobert      Generally, backends are doing something sketchy but it'll take time to
623*404b540aSrobert      fix them all.  */
624*404b540aSrobert   if (omode == word_mode)
625*404b540aSrobert     ;
626*404b540aSrobert   /* ??? Similarly, e.g. with (subreg:DF (reg:TI)).  Though store_bit_field
627*404b540aSrobert      is the culprit here, and not the backends.  */
628*404b540aSrobert   else if (osize >= UNITS_PER_WORD && isize >= osize)
629*404b540aSrobert     ;
630*404b540aSrobert   /* Allow component subregs of complex and vector.  Though given the below
631*404b540aSrobert      extraction rules, it's not always clear what that means.  */
632*404b540aSrobert   else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
633*404b540aSrobert 	   && GET_MODE_INNER (imode) == omode)
634*404b540aSrobert     ;
635*404b540aSrobert   /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
636*404b540aSrobert      i.e. (subreg:V4SF (reg:SF) 0).  This surely isn't the cleanest way to
637*404b540aSrobert      represent this.  It's questionable if this ought to be represented at
638*404b540aSrobert      all -- why can't this all be hidden in post-reload splitters that make
639*404b540aSrobert      arbitrarily mode changes to the registers themselves.  */
640*404b540aSrobert   else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
641*404b540aSrobert     ;
642*404b540aSrobert   /* Subregs involving floating point modes are not allowed to
643*404b540aSrobert      change size.  Therefore (subreg:DI (reg:DF) 0) is fine, but
644*404b540aSrobert      (subreg:SI (reg:DF) 0) isn't.  */
645*404b540aSrobert   else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
646*404b540aSrobert     {
647*404b540aSrobert       if (isize != osize)
648*404b540aSrobert 	return false;
649*404b540aSrobert     }
650*404b540aSrobert 
651*404b540aSrobert   /* Paradoxical subregs must have offset zero.  */
652*404b540aSrobert   if (osize > isize)
653*404b540aSrobert     return offset == 0;
654*404b540aSrobert 
655*404b540aSrobert   /* This is a normal subreg.  Verify that the offset is representable.  */
656*404b540aSrobert 
657*404b540aSrobert   /* For hard registers, we already have most of these rules collected in
658*404b540aSrobert      subreg_offset_representable_p.  */
659*404b540aSrobert   if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
660*404b540aSrobert     {
661*404b540aSrobert       unsigned int regno = REGNO (reg);
662*404b540aSrobert 
663*404b540aSrobert #ifdef CANNOT_CHANGE_MODE_CLASS
664*404b540aSrobert       if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
665*404b540aSrobert 	  && GET_MODE_INNER (imode) == omode)
666*404b540aSrobert 	;
667*404b540aSrobert       else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
668*404b540aSrobert 	return false;
669*404b540aSrobert #endif
670*404b540aSrobert 
671*404b540aSrobert       return subreg_offset_representable_p (regno, imode, offset, omode);
672*404b540aSrobert     }
673*404b540aSrobert 
674*404b540aSrobert   /* For pseudo registers, we want most of the same checks.  Namely:
675*404b540aSrobert      If the register no larger than a word, the subreg must be lowpart.
676*404b540aSrobert      If the register is larger than a word, the subreg must be the lowpart
677*404b540aSrobert      of a subword.  A subreg does *not* perform arbitrary bit extraction.
678*404b540aSrobert      Given that we've already checked mode/offset alignment, we only have
679*404b540aSrobert      to check subword subregs here.  */
680*404b540aSrobert   if (osize < UNITS_PER_WORD)
681*404b540aSrobert     {
682*404b540aSrobert       enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
683*404b540aSrobert       unsigned int low_off = subreg_lowpart_offset (omode, wmode);
684*404b540aSrobert       if (offset % UNITS_PER_WORD != low_off)
685*404b540aSrobert 	return false;
686*404b540aSrobert     }
687*404b540aSrobert   return true;
688*404b540aSrobert }
689*404b540aSrobert 
690*404b540aSrobert rtx
gen_rtx_SUBREG(enum machine_mode mode,rtx reg,int offset)691*404b540aSrobert gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
692*404b540aSrobert {
693*404b540aSrobert   gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
694*404b540aSrobert   return gen_rtx_raw_SUBREG (mode, reg, offset);
695*404b540aSrobert }
696*404b540aSrobert 
697*404b540aSrobert /* Generate a SUBREG representing the least-significant part of REG if MODE
698*404b540aSrobert    is smaller than mode of REG, otherwise paradoxical SUBREG.  */
699*404b540aSrobert 
700*404b540aSrobert rtx
gen_lowpart_SUBREG(enum machine_mode mode,rtx reg)701*404b540aSrobert gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
702*404b540aSrobert {
703*404b540aSrobert   enum machine_mode inmode;
704*404b540aSrobert 
705*404b540aSrobert   inmode = GET_MODE (reg);
706*404b540aSrobert   if (inmode == VOIDmode)
707*404b540aSrobert     inmode = mode;
708*404b540aSrobert   return gen_rtx_SUBREG (mode, reg,
709*404b540aSrobert 			 subreg_lowpart_offset (mode, inmode));
710*404b540aSrobert }
711*404b540aSrobert 
712*404b540aSrobert /* gen_rtvec (n, [rt1, ..., rtn])
713*404b540aSrobert **
714*404b540aSrobert **	    This routine creates an rtvec and stores within it the
715*404b540aSrobert **	pointers to rtx's which are its arguments.
716*404b540aSrobert */
717*404b540aSrobert 
718*404b540aSrobert /*VARARGS1*/
719*404b540aSrobert rtvec
gen_rtvec(int n,...)720*404b540aSrobert gen_rtvec (int n, ...)
721*404b540aSrobert {
722*404b540aSrobert   int i, save_n;
723*404b540aSrobert   rtx *vector;
724*404b540aSrobert   va_list p;
725*404b540aSrobert 
726*404b540aSrobert   va_start (p, n);
727*404b540aSrobert 
728*404b540aSrobert   if (n == 0)
729*404b540aSrobert     return NULL_RTVEC;		/* Don't allocate an empty rtvec...	*/
730*404b540aSrobert 
731*404b540aSrobert   vector = alloca (n * sizeof (rtx));
732*404b540aSrobert 
733*404b540aSrobert   for (i = 0; i < n; i++)
734*404b540aSrobert     vector[i] = va_arg (p, rtx);
735*404b540aSrobert 
736*404b540aSrobert   /* The definition of VA_* in K&R C causes `n' to go out of scope.  */
737*404b540aSrobert   save_n = n;
738*404b540aSrobert   va_end (p);
739*404b540aSrobert 
740*404b540aSrobert   return gen_rtvec_v (save_n, vector);
741*404b540aSrobert }
742*404b540aSrobert 
743*404b540aSrobert rtvec
gen_rtvec_v(int n,rtx * argp)744*404b540aSrobert gen_rtvec_v (int n, rtx *argp)
745*404b540aSrobert {
746*404b540aSrobert   int i;
747*404b540aSrobert   rtvec rt_val;
748*404b540aSrobert 
749*404b540aSrobert   if (n == 0)
750*404b540aSrobert     return NULL_RTVEC;		/* Don't allocate an empty rtvec...	*/
751*404b540aSrobert 
752*404b540aSrobert   rt_val = rtvec_alloc (n);	/* Allocate an rtvec...			*/
753*404b540aSrobert 
754*404b540aSrobert   for (i = 0; i < n; i++)
755*404b540aSrobert     rt_val->elem[i] = *argp++;
756*404b540aSrobert 
757*404b540aSrobert   return rt_val;
758*404b540aSrobert }
759*404b540aSrobert 
760*404b540aSrobert /* Generate a REG rtx for a new pseudo register of mode MODE.
761*404b540aSrobert    This pseudo is assigned the next sequential register number.  */
762*404b540aSrobert 
763*404b540aSrobert rtx
gen_reg_rtx(enum machine_mode mode)764*404b540aSrobert gen_reg_rtx (enum machine_mode mode)
765*404b540aSrobert {
766*404b540aSrobert   struct function *f = cfun;
767*404b540aSrobert   rtx val;
768*404b540aSrobert 
769*404b540aSrobert   /* Don't let anything called after initial flow analysis create new
770*404b540aSrobert      registers.  */
771*404b540aSrobert   gcc_assert (!no_new_pseudos);
772*404b540aSrobert 
773*404b540aSrobert   if (generating_concat_p
774*404b540aSrobert       && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
775*404b540aSrobert 	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
776*404b540aSrobert     {
777*404b540aSrobert       /* For complex modes, don't make a single pseudo.
778*404b540aSrobert 	 Instead, make a CONCAT of two pseudos.
779*404b540aSrobert 	 This allows noncontiguous allocation of the real and imaginary parts,
780*404b540aSrobert 	 which makes much better code.  Besides, allocating DCmode
781*404b540aSrobert 	 pseudos overstrains reload on some machines like the 386.  */
782*404b540aSrobert       rtx realpart, imagpart;
783*404b540aSrobert       enum machine_mode partmode = GET_MODE_INNER (mode);
784*404b540aSrobert 
785*404b540aSrobert       realpart = gen_reg_rtx (partmode);
786*404b540aSrobert       imagpart = gen_reg_rtx (partmode);
787*404b540aSrobert       return gen_rtx_CONCAT (mode, realpart, imagpart);
788*404b540aSrobert     }
789*404b540aSrobert 
790*404b540aSrobert   /* Make sure regno_pointer_align, and regno_reg_rtx are large
791*404b540aSrobert      enough to have an element for this pseudo reg number.  */
792*404b540aSrobert 
793*404b540aSrobert   if (reg_rtx_no == f->emit->regno_pointer_align_length)
794*404b540aSrobert     {
795*404b540aSrobert       int old_size = f->emit->regno_pointer_align_length;
796*404b540aSrobert       char *new;
797*404b540aSrobert       rtx *new1;
798*404b540aSrobert 
799*404b540aSrobert       new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
800*404b540aSrobert       memset (new + old_size, 0, old_size);
801*404b540aSrobert       f->emit->regno_pointer_align = (unsigned char *) new;
802*404b540aSrobert 
803*404b540aSrobert       new1 = ggc_realloc (f->emit->x_regno_reg_rtx,
804*404b540aSrobert 			  old_size * 2 * sizeof (rtx));
805*404b540aSrobert       memset (new1 + old_size, 0, old_size * sizeof (rtx));
806*404b540aSrobert       regno_reg_rtx = new1;
807*404b540aSrobert 
808*404b540aSrobert       f->emit->regno_pointer_align_length = old_size * 2;
809*404b540aSrobert     }
810*404b540aSrobert 
811*404b540aSrobert   val = gen_raw_REG (mode, reg_rtx_no);
812*404b540aSrobert   regno_reg_rtx[reg_rtx_no++] = val;
813*404b540aSrobert   return val;
814*404b540aSrobert }
815*404b540aSrobert 
816*404b540aSrobert /* Generate a register with same attributes as REG, but offsetted by OFFSET.
817*404b540aSrobert    Do the big endian correction if needed.  */
818*404b540aSrobert 
819*404b540aSrobert rtx
gen_rtx_REG_offset(rtx reg,enum machine_mode mode,unsigned int regno,int offset)820*404b540aSrobert gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset)
821*404b540aSrobert {
822*404b540aSrobert   rtx new = gen_rtx_REG (mode, regno);
823*404b540aSrobert   tree decl;
824*404b540aSrobert   HOST_WIDE_INT var_size;
825*404b540aSrobert 
826*404b540aSrobert   /* PR middle-end/14084
827*404b540aSrobert      The problem appears when a variable is stored in a larger register
828*404b540aSrobert      and later it is used in the original mode or some mode in between
829*404b540aSrobert      or some part of variable is accessed.
830*404b540aSrobert 
831*404b540aSrobert      On little endian machines there is no problem because
832*404b540aSrobert      the REG_OFFSET of the start of the variable is the same when
833*404b540aSrobert      accessed in any mode (it is 0).
834*404b540aSrobert 
835*404b540aSrobert      However, this is not true on big endian machines.
836*404b540aSrobert      The offset of the start of the variable is different when accessed
837*404b540aSrobert      in different modes.
838*404b540aSrobert      When we are taking a part of the REG we have to change the OFFSET
839*404b540aSrobert      from offset WRT size of mode of REG to offset WRT size of variable.
840*404b540aSrobert 
841*404b540aSrobert      If we would not do the big endian correction the resulting REG_OFFSET
842*404b540aSrobert      would be larger than the size of the DECL.
843*404b540aSrobert 
844*404b540aSrobert      Examples of correction, for BYTES_BIG_ENDIAN WORDS_BIG_ENDIAN machine:
845*404b540aSrobert 
846*404b540aSrobert      REG.mode  MODE  DECL size  old offset  new offset  description
847*404b540aSrobert      DI        SI    4          4           0           int32 in SImode
848*404b540aSrobert      DI        SI    1          4           0           char in SImode
849*404b540aSrobert      DI        QI    1          7           0           char in QImode
850*404b540aSrobert      DI        QI    4          5           1           1st element in QImode
851*404b540aSrobert                                                         of char[4]
852*404b540aSrobert      DI        HI    4          6           2           1st element in HImode
853*404b540aSrobert                                                         of int16[2]
854*404b540aSrobert 
855*404b540aSrobert      If the size of DECL is equal or greater than the size of REG
856*404b540aSrobert      we can't do this correction because the register holds the
857*404b540aSrobert      whole variable or a part of the variable and thus the REG_OFFSET
858*404b540aSrobert      is already correct.  */
859*404b540aSrobert 
860*404b540aSrobert   decl = REG_EXPR (reg);
861*404b540aSrobert   if ((BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN)
862*404b540aSrobert       && decl != NULL
863*404b540aSrobert       && offset > 0
864*404b540aSrobert       && GET_MODE_SIZE (GET_MODE (reg)) > GET_MODE_SIZE (mode)
865*404b540aSrobert       && ((var_size = int_size_in_bytes (TREE_TYPE (decl))) > 0
866*404b540aSrobert 	  && var_size < GET_MODE_SIZE (GET_MODE (reg))))
867*404b540aSrobert     {
868*404b540aSrobert       int offset_le;
869*404b540aSrobert 
870*404b540aSrobert       /* Convert machine endian to little endian WRT size of mode of REG.  */
871*404b540aSrobert       if (WORDS_BIG_ENDIAN)
872*404b540aSrobert 	offset_le = ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
873*404b540aSrobert 		     / UNITS_PER_WORD) * UNITS_PER_WORD;
874*404b540aSrobert       else
875*404b540aSrobert 	offset_le = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
876*404b540aSrobert 
877*404b540aSrobert       if (BYTES_BIG_ENDIAN)
878*404b540aSrobert 	offset_le += ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
879*404b540aSrobert 		      % UNITS_PER_WORD);
880*404b540aSrobert       else
881*404b540aSrobert 	offset_le += offset % UNITS_PER_WORD;
882*404b540aSrobert 
883*404b540aSrobert       if (offset_le >= var_size)
884*404b540aSrobert 	{
885*404b540aSrobert 	  /* MODE is wider than the variable so the new reg will cover
886*404b540aSrobert 	     the whole variable so the resulting OFFSET should be 0.  */
887*404b540aSrobert 	  offset = 0;
888*404b540aSrobert 	}
889*404b540aSrobert       else
890*404b540aSrobert 	{
891*404b540aSrobert 	  /* Convert little endian to machine endian WRT size of variable.  */
892*404b540aSrobert 	  if (WORDS_BIG_ENDIAN)
893*404b540aSrobert 	    offset = ((var_size - 1 - offset_le)
894*404b540aSrobert 		      / UNITS_PER_WORD) * UNITS_PER_WORD;
895*404b540aSrobert 	  else
896*404b540aSrobert 	    offset = (offset_le / UNITS_PER_WORD) * UNITS_PER_WORD;
897*404b540aSrobert 
898*404b540aSrobert 	  if (BYTES_BIG_ENDIAN)
899*404b540aSrobert 	    offset += ((var_size - 1 - offset_le)
900*404b540aSrobert 		       % UNITS_PER_WORD);
901*404b540aSrobert 	  else
902*404b540aSrobert 	    offset += offset_le % UNITS_PER_WORD;
903*404b540aSrobert 	}
904*404b540aSrobert     }
905*404b540aSrobert 
906*404b540aSrobert   REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
907*404b540aSrobert 				   REG_OFFSET (reg) + offset);
908*404b540aSrobert   return new;
909*404b540aSrobert }
910*404b540aSrobert 
911*404b540aSrobert /* Set the decl for MEM to DECL.  */
912*404b540aSrobert 
913*404b540aSrobert void
set_reg_attrs_from_mem(rtx reg,rtx mem)914*404b540aSrobert set_reg_attrs_from_mem (rtx reg, rtx mem)
915*404b540aSrobert {
916*404b540aSrobert   if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
917*404b540aSrobert     REG_ATTRS (reg)
918*404b540aSrobert       = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
919*404b540aSrobert }
920*404b540aSrobert 
921*404b540aSrobert /* Set the register attributes for registers contained in PARM_RTX.
922*404b540aSrobert    Use needed values from memory attributes of MEM.  */
923*404b540aSrobert 
924*404b540aSrobert void
set_reg_attrs_for_parm(rtx parm_rtx,rtx mem)925*404b540aSrobert set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
926*404b540aSrobert {
927*404b540aSrobert   if (REG_P (parm_rtx))
928*404b540aSrobert     set_reg_attrs_from_mem (parm_rtx, mem);
929*404b540aSrobert   else if (GET_CODE (parm_rtx) == PARALLEL)
930*404b540aSrobert     {
931*404b540aSrobert       /* Check for a NULL entry in the first slot, used to indicate that the
932*404b540aSrobert 	 parameter goes both on the stack and in registers.  */
933*404b540aSrobert       int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
934*404b540aSrobert       for (; i < XVECLEN (parm_rtx, 0); i++)
935*404b540aSrobert 	{
936*404b540aSrobert 	  rtx x = XVECEXP (parm_rtx, 0, i);
937*404b540aSrobert 	  if (REG_P (XEXP (x, 0)))
938*404b540aSrobert 	    REG_ATTRS (XEXP (x, 0))
939*404b540aSrobert 	      = get_reg_attrs (MEM_EXPR (mem),
940*404b540aSrobert 			       INTVAL (XEXP (x, 1)));
941*404b540aSrobert 	}
942*404b540aSrobert     }
943*404b540aSrobert }
944*404b540aSrobert 
945*404b540aSrobert /* Assign the RTX X to declaration T.  */
946*404b540aSrobert void
set_decl_rtl(tree t,rtx x)947*404b540aSrobert set_decl_rtl (tree t, rtx x)
948*404b540aSrobert {
949*404b540aSrobert   DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
950*404b540aSrobert 
951*404b540aSrobert   if (!x)
952*404b540aSrobert     return;
953*404b540aSrobert   /* For register, we maintain the reverse information too.  */
954*404b540aSrobert   if (REG_P (x))
955*404b540aSrobert     REG_ATTRS (x) = get_reg_attrs (t, 0);
956*404b540aSrobert   else if (GET_CODE (x) == SUBREG)
957*404b540aSrobert     REG_ATTRS (SUBREG_REG (x))
958*404b540aSrobert       = get_reg_attrs (t, -SUBREG_BYTE (x));
959*404b540aSrobert   if (GET_CODE (x) == CONCAT)
960*404b540aSrobert     {
961*404b540aSrobert       if (REG_P (XEXP (x, 0)))
962*404b540aSrobert         REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
963*404b540aSrobert       if (REG_P (XEXP (x, 1)))
964*404b540aSrobert 	REG_ATTRS (XEXP (x, 1))
965*404b540aSrobert 	  = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
966*404b540aSrobert     }
967*404b540aSrobert   if (GET_CODE (x) == PARALLEL)
968*404b540aSrobert     {
969*404b540aSrobert       int i;
970*404b540aSrobert       for (i = 0; i < XVECLEN (x, 0); i++)
971*404b540aSrobert 	{
972*404b540aSrobert 	  rtx y = XVECEXP (x, 0, i);
973*404b540aSrobert 	  if (REG_P (XEXP (y, 0)))
974*404b540aSrobert 	    REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
975*404b540aSrobert 	}
976*404b540aSrobert     }
977*404b540aSrobert }
978*404b540aSrobert 
979*404b540aSrobert /* Assign the RTX X to parameter declaration T.  */
980*404b540aSrobert void
set_decl_incoming_rtl(tree t,rtx x)981*404b540aSrobert set_decl_incoming_rtl (tree t, rtx x)
982*404b540aSrobert {
983*404b540aSrobert   DECL_INCOMING_RTL (t) = x;
984*404b540aSrobert 
985*404b540aSrobert   if (!x)
986*404b540aSrobert     return;
987*404b540aSrobert   /* For register, we maintain the reverse information too.  */
988*404b540aSrobert   if (REG_P (x))
989*404b540aSrobert     REG_ATTRS (x) = get_reg_attrs (t, 0);
990*404b540aSrobert   else if (GET_CODE (x) == SUBREG)
991*404b540aSrobert     REG_ATTRS (SUBREG_REG (x))
992*404b540aSrobert       = get_reg_attrs (t, -SUBREG_BYTE (x));
993*404b540aSrobert   if (GET_CODE (x) == CONCAT)
994*404b540aSrobert     {
995*404b540aSrobert       if (REG_P (XEXP (x, 0)))
996*404b540aSrobert         REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
997*404b540aSrobert       if (REG_P (XEXP (x, 1)))
998*404b540aSrobert 	REG_ATTRS (XEXP (x, 1))
999*404b540aSrobert 	  = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1000*404b540aSrobert     }
1001*404b540aSrobert   if (GET_CODE (x) == PARALLEL)
1002*404b540aSrobert     {
1003*404b540aSrobert       int i, start;
1004*404b540aSrobert 
1005*404b540aSrobert       /* Check for a NULL entry, used to indicate that the parameter goes
1006*404b540aSrobert 	 both on the stack and in registers.  */
1007*404b540aSrobert       if (XEXP (XVECEXP (x, 0, 0), 0))
1008*404b540aSrobert 	start = 0;
1009*404b540aSrobert       else
1010*404b540aSrobert 	start = 1;
1011*404b540aSrobert 
1012*404b540aSrobert       for (i = start; i < XVECLEN (x, 0); i++)
1013*404b540aSrobert 	{
1014*404b540aSrobert 	  rtx y = XVECEXP (x, 0, i);
1015*404b540aSrobert 	  if (REG_P (XEXP (y, 0)))
1016*404b540aSrobert 	    REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1017*404b540aSrobert 	}
1018*404b540aSrobert     }
1019*404b540aSrobert }
1020*404b540aSrobert 
1021*404b540aSrobert /* Identify REG (which may be a CONCAT) as a user register.  */
1022*404b540aSrobert 
1023*404b540aSrobert void
mark_user_reg(rtx reg)1024*404b540aSrobert mark_user_reg (rtx reg)
1025*404b540aSrobert {
1026*404b540aSrobert   if (GET_CODE (reg) == CONCAT)
1027*404b540aSrobert     {
1028*404b540aSrobert       REG_USERVAR_P (XEXP (reg, 0)) = 1;
1029*404b540aSrobert       REG_USERVAR_P (XEXP (reg, 1)) = 1;
1030*404b540aSrobert     }
1031*404b540aSrobert   else
1032*404b540aSrobert     {
1033*404b540aSrobert       gcc_assert (REG_P (reg));
1034*404b540aSrobert       REG_USERVAR_P (reg) = 1;
1035*404b540aSrobert     }
1036*404b540aSrobert }
1037*404b540aSrobert 
1038*404b540aSrobert /* Identify REG as a probable pointer register and show its alignment
1039*404b540aSrobert    as ALIGN, if nonzero.  */
1040*404b540aSrobert 
1041*404b540aSrobert void
mark_reg_pointer(rtx reg,int align)1042*404b540aSrobert mark_reg_pointer (rtx reg, int align)
1043*404b540aSrobert {
1044*404b540aSrobert   if (! REG_POINTER (reg))
1045*404b540aSrobert     {
1046*404b540aSrobert       REG_POINTER (reg) = 1;
1047*404b540aSrobert 
1048*404b540aSrobert       if (align)
1049*404b540aSrobert 	REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1050*404b540aSrobert     }
1051*404b540aSrobert   else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1052*404b540aSrobert     /* We can no-longer be sure just how aligned this pointer is.  */
1053*404b540aSrobert     REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1054*404b540aSrobert }
1055*404b540aSrobert 
1056*404b540aSrobert /* Return 1 plus largest pseudo reg number used in the current function.  */
1057*404b540aSrobert 
1058*404b540aSrobert int
max_reg_num(void)1059*404b540aSrobert max_reg_num (void)
1060*404b540aSrobert {
1061*404b540aSrobert   return reg_rtx_no;
1062*404b540aSrobert }
1063*404b540aSrobert 
1064*404b540aSrobert /* Return 1 + the largest label number used so far in the current function.  */
1065*404b540aSrobert 
1066*404b540aSrobert int
max_label_num(void)1067*404b540aSrobert max_label_num (void)
1068*404b540aSrobert {
1069*404b540aSrobert   return label_num;
1070*404b540aSrobert }
1071*404b540aSrobert 
1072*404b540aSrobert /* Return first label number used in this function (if any were used).  */
1073*404b540aSrobert 
1074*404b540aSrobert int
get_first_label_num(void)1075*404b540aSrobert get_first_label_num (void)
1076*404b540aSrobert {
1077*404b540aSrobert   return first_label_num;
1078*404b540aSrobert }
1079*404b540aSrobert 
1080*404b540aSrobert /* If the rtx for label was created during the expansion of a nested
1081*404b540aSrobert    function, then first_label_num won't include this label number.
1082*404b540aSrobert    Fix this now so that array indicies work later.  */
1083*404b540aSrobert 
1084*404b540aSrobert void
maybe_set_first_label_num(rtx x)1085*404b540aSrobert maybe_set_first_label_num (rtx x)
1086*404b540aSrobert {
1087*404b540aSrobert   if (CODE_LABEL_NUMBER (x) < first_label_num)
1088*404b540aSrobert     first_label_num = CODE_LABEL_NUMBER (x);
1089*404b540aSrobert }
1090*404b540aSrobert 
1091*404b540aSrobert /* Return a value representing some low-order bits of X, where the number
1092*404b540aSrobert    of low-order bits is given by MODE.  Note that no conversion is done
1093*404b540aSrobert    between floating-point and fixed-point values, rather, the bit
1094*404b540aSrobert    representation is returned.
1095*404b540aSrobert 
1096*404b540aSrobert    This function handles the cases in common between gen_lowpart, below,
1097*404b540aSrobert    and two variants in cse.c and combine.c.  These are the cases that can
1098*404b540aSrobert    be safely handled at all points in the compilation.
1099*404b540aSrobert 
1100*404b540aSrobert    If this is not a case we can handle, return 0.  */
1101*404b540aSrobert 
1102*404b540aSrobert rtx
gen_lowpart_common(enum machine_mode mode,rtx x)1103*404b540aSrobert gen_lowpart_common (enum machine_mode mode, rtx x)
1104*404b540aSrobert {
1105*404b540aSrobert   int msize = GET_MODE_SIZE (mode);
1106*404b540aSrobert   int xsize;
1107*404b540aSrobert   int offset = 0;
1108*404b540aSrobert   enum machine_mode innermode;
1109*404b540aSrobert 
1110*404b540aSrobert   /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1111*404b540aSrobert      so we have to make one up.  Yuk.  */
1112*404b540aSrobert   innermode = GET_MODE (x);
1113*404b540aSrobert   if (GET_CODE (x) == CONST_INT
1114*404b540aSrobert       && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1115*404b540aSrobert     innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1116*404b540aSrobert   else if (innermode == VOIDmode)
1117*404b540aSrobert     innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1118*404b540aSrobert 
1119*404b540aSrobert   xsize = GET_MODE_SIZE (innermode);
1120*404b540aSrobert 
1121*404b540aSrobert   gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1122*404b540aSrobert 
1123*404b540aSrobert   if (innermode == mode)
1124*404b540aSrobert     return x;
1125*404b540aSrobert 
1126*404b540aSrobert   /* MODE must occupy no more words than the mode of X.  */
1127*404b540aSrobert   if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1128*404b540aSrobert       > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1129*404b540aSrobert     return 0;
1130*404b540aSrobert 
1131*404b540aSrobert   /* Don't allow generating paradoxical FLOAT_MODE subregs.  */
1132*404b540aSrobert   if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1133*404b540aSrobert     return 0;
1134*404b540aSrobert 
1135*404b540aSrobert   offset = subreg_lowpart_offset (mode, innermode);
1136*404b540aSrobert 
1137*404b540aSrobert   if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1138*404b540aSrobert       && (GET_MODE_CLASS (mode) == MODE_INT
1139*404b540aSrobert 	  || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1140*404b540aSrobert     {
1141*404b540aSrobert       /* If we are getting the low-order part of something that has been
1142*404b540aSrobert 	 sign- or zero-extended, we can either just use the object being
1143*404b540aSrobert 	 extended or make a narrower extension.  If we want an even smaller
1144*404b540aSrobert 	 piece than the size of the object being extended, call ourselves
1145*404b540aSrobert 	 recursively.
1146*404b540aSrobert 
1147*404b540aSrobert 	 This case is used mostly by combine and cse.  */
1148*404b540aSrobert 
1149*404b540aSrobert       if (GET_MODE (XEXP (x, 0)) == mode)
1150*404b540aSrobert 	return XEXP (x, 0);
1151*404b540aSrobert       else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1152*404b540aSrobert 	return gen_lowpart_common (mode, XEXP (x, 0));
1153*404b540aSrobert       else if (msize < xsize)
1154*404b540aSrobert 	return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1155*404b540aSrobert     }
1156*404b540aSrobert   else if (GET_CODE (x) == SUBREG || REG_P (x)
1157*404b540aSrobert 	   || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1158*404b540aSrobert 	   || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
1159*404b540aSrobert     return simplify_gen_subreg (mode, x, innermode, offset);
1160*404b540aSrobert 
1161*404b540aSrobert   /* Otherwise, we can't do this.  */
1162*404b540aSrobert   return 0;
1163*404b540aSrobert }
1164*404b540aSrobert 
1165*404b540aSrobert rtx
gen_highpart(enum machine_mode mode,rtx x)1166*404b540aSrobert gen_highpart (enum machine_mode mode, rtx x)
1167*404b540aSrobert {
1168*404b540aSrobert   unsigned int msize = GET_MODE_SIZE (mode);
1169*404b540aSrobert   rtx result;
1170*404b540aSrobert 
1171*404b540aSrobert   /* This case loses if X is a subreg.  To catch bugs early,
1172*404b540aSrobert      complain if an invalid MODE is used even in other cases.  */
1173*404b540aSrobert   gcc_assert (msize <= UNITS_PER_WORD
1174*404b540aSrobert 	      || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1175*404b540aSrobert 
1176*404b540aSrobert   result = simplify_gen_subreg (mode, x, GET_MODE (x),
1177*404b540aSrobert 				subreg_highpart_offset (mode, GET_MODE (x)));
1178*404b540aSrobert   gcc_assert (result);
1179*404b540aSrobert 
1180*404b540aSrobert   /* simplify_gen_subreg is not guaranteed to return a valid operand for
1181*404b540aSrobert      the target if we have a MEM.  gen_highpart must return a valid operand,
1182*404b540aSrobert      emitting code if necessary to do so.  */
1183*404b540aSrobert   if (MEM_P (result))
1184*404b540aSrobert     {
1185*404b540aSrobert       result = validize_mem (result);
1186*404b540aSrobert       gcc_assert (result);
1187*404b540aSrobert     }
1188*404b540aSrobert 
1189*404b540aSrobert   return result;
1190*404b540aSrobert }
1191*404b540aSrobert 
1192*404b540aSrobert /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1193*404b540aSrobert    be VOIDmode constant.  */
1194*404b540aSrobert rtx
gen_highpart_mode(enum machine_mode outermode,enum machine_mode innermode,rtx exp)1195*404b540aSrobert gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1196*404b540aSrobert {
1197*404b540aSrobert   if (GET_MODE (exp) != VOIDmode)
1198*404b540aSrobert     {
1199*404b540aSrobert       gcc_assert (GET_MODE (exp) == innermode);
1200*404b540aSrobert       return gen_highpart (outermode, exp);
1201*404b540aSrobert     }
1202*404b540aSrobert   return simplify_gen_subreg (outermode, exp, innermode,
1203*404b540aSrobert 			      subreg_highpart_offset (outermode, innermode));
1204*404b540aSrobert }
1205*404b540aSrobert 
1206*404b540aSrobert /* Return offset in bytes to get OUTERMODE low part
1207*404b540aSrobert    of the value in mode INNERMODE stored in memory in target format.  */
1208*404b540aSrobert 
1209*404b540aSrobert unsigned int
subreg_lowpart_offset(enum machine_mode outermode,enum machine_mode innermode)1210*404b540aSrobert subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1211*404b540aSrobert {
1212*404b540aSrobert   unsigned int offset = 0;
1213*404b540aSrobert   int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1214*404b540aSrobert 
1215*404b540aSrobert   if (difference > 0)
1216*404b540aSrobert     {
1217*404b540aSrobert       if (WORDS_BIG_ENDIAN)
1218*404b540aSrobert 	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1219*404b540aSrobert       if (BYTES_BIG_ENDIAN)
1220*404b540aSrobert 	offset += difference % UNITS_PER_WORD;
1221*404b540aSrobert     }
1222*404b540aSrobert 
1223*404b540aSrobert   return offset;
1224*404b540aSrobert }
1225*404b540aSrobert 
1226*404b540aSrobert /* Return offset in bytes to get OUTERMODE high part
1227*404b540aSrobert    of the value in mode INNERMODE stored in memory in target format.  */
1228*404b540aSrobert unsigned int
subreg_highpart_offset(enum machine_mode outermode,enum machine_mode innermode)1229*404b540aSrobert subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1230*404b540aSrobert {
1231*404b540aSrobert   unsigned int offset = 0;
1232*404b540aSrobert   int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1233*404b540aSrobert 
1234*404b540aSrobert   gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1235*404b540aSrobert 
1236*404b540aSrobert   if (difference > 0)
1237*404b540aSrobert     {
1238*404b540aSrobert       if (! WORDS_BIG_ENDIAN)
1239*404b540aSrobert 	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1240*404b540aSrobert       if (! BYTES_BIG_ENDIAN)
1241*404b540aSrobert 	offset += difference % UNITS_PER_WORD;
1242*404b540aSrobert     }
1243*404b540aSrobert 
1244*404b540aSrobert   return offset;
1245*404b540aSrobert }
1246*404b540aSrobert 
1247*404b540aSrobert /* Return 1 iff X, assumed to be a SUBREG,
1248*404b540aSrobert    refers to the least significant part of its containing reg.
1249*404b540aSrobert    If X is not a SUBREG, always return 1 (it is its own low part!).  */
1250*404b540aSrobert 
1251*404b540aSrobert int
subreg_lowpart_p(rtx x)1252*404b540aSrobert subreg_lowpart_p (rtx x)
1253*404b540aSrobert {
1254*404b540aSrobert   if (GET_CODE (x) != SUBREG)
1255*404b540aSrobert     return 1;
1256*404b540aSrobert   else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1257*404b540aSrobert     return 0;
1258*404b540aSrobert 
1259*404b540aSrobert   return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1260*404b540aSrobert 	  == SUBREG_BYTE (x));
1261*404b540aSrobert }
1262*404b540aSrobert 
1263*404b540aSrobert /* Return subword OFFSET of operand OP.
1264*404b540aSrobert    The word number, OFFSET, is interpreted as the word number starting
1265*404b540aSrobert    at the low-order address.  OFFSET 0 is the low-order word if not
1266*404b540aSrobert    WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1267*404b540aSrobert 
1268*404b540aSrobert    If we cannot extract the required word, we return zero.  Otherwise,
1269*404b540aSrobert    an rtx corresponding to the requested word will be returned.
1270*404b540aSrobert 
1271*404b540aSrobert    VALIDATE_ADDRESS is nonzero if the address should be validated.  Before
1272*404b540aSrobert    reload has completed, a valid address will always be returned.  After
1273*404b540aSrobert    reload, if a valid address cannot be returned, we return zero.
1274*404b540aSrobert 
1275*404b540aSrobert    If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1276*404b540aSrobert    it is the responsibility of the caller.
1277*404b540aSrobert 
1278*404b540aSrobert    MODE is the mode of OP in case it is a CONST_INT.
1279*404b540aSrobert 
1280*404b540aSrobert    ??? This is still rather broken for some cases.  The problem for the
1281*404b540aSrobert    moment is that all callers of this thing provide no 'goal mode' to
1282*404b540aSrobert    tell us to work with.  This exists because all callers were written
1283*404b540aSrobert    in a word based SUBREG world.
1284*404b540aSrobert    Now use of this function can be deprecated by simplify_subreg in most
1285*404b540aSrobert    cases.
1286*404b540aSrobert  */
1287*404b540aSrobert 
1288*404b540aSrobert rtx
operand_subword(rtx op,unsigned int offset,int validate_address,enum machine_mode mode)1289*404b540aSrobert operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1290*404b540aSrobert {
1291*404b540aSrobert   if (mode == VOIDmode)
1292*404b540aSrobert     mode = GET_MODE (op);
1293*404b540aSrobert 
1294*404b540aSrobert   gcc_assert (mode != VOIDmode);
1295*404b540aSrobert 
1296*404b540aSrobert   /* If OP is narrower than a word, fail.  */
1297*404b540aSrobert   if (mode != BLKmode
1298*404b540aSrobert       && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1299*404b540aSrobert     return 0;
1300*404b540aSrobert 
1301*404b540aSrobert   /* If we want a word outside OP, return zero.  */
1302*404b540aSrobert   if (mode != BLKmode
1303*404b540aSrobert       && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1304*404b540aSrobert     return const0_rtx;
1305*404b540aSrobert 
1306*404b540aSrobert   /* Form a new MEM at the requested address.  */
1307*404b540aSrobert   if (MEM_P (op))
1308*404b540aSrobert     {
1309*404b540aSrobert       rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1310*404b540aSrobert 
1311*404b540aSrobert       if (! validate_address)
1312*404b540aSrobert 	return new;
1313*404b540aSrobert 
1314*404b540aSrobert       else if (reload_completed)
1315*404b540aSrobert 	{
1316*404b540aSrobert 	  if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1317*404b540aSrobert 	    return 0;
1318*404b540aSrobert 	}
1319*404b540aSrobert       else
1320*404b540aSrobert 	return replace_equiv_address (new, XEXP (new, 0));
1321*404b540aSrobert     }
1322*404b540aSrobert 
1323*404b540aSrobert   /* Rest can be handled by simplify_subreg.  */
1324*404b540aSrobert   return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1325*404b540aSrobert }
1326*404b540aSrobert 
1327*404b540aSrobert /* Similar to `operand_subword', but never return 0.  If we can't
1328*404b540aSrobert    extract the required subword, put OP into a register and try again.
1329*404b540aSrobert    The second attempt must succeed.  We always validate the address in
1330*404b540aSrobert    this case.
1331*404b540aSrobert 
1332*404b540aSrobert    MODE is the mode of OP, in case it is CONST_INT.  */
1333*404b540aSrobert 
1334*404b540aSrobert rtx
operand_subword_force(rtx op,unsigned int offset,enum machine_mode mode)1335*404b540aSrobert operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1336*404b540aSrobert {
1337*404b540aSrobert   rtx result = operand_subword (op, offset, 1, mode);
1338*404b540aSrobert 
1339*404b540aSrobert   if (result)
1340*404b540aSrobert     return result;
1341*404b540aSrobert 
1342*404b540aSrobert   if (mode != BLKmode && mode != VOIDmode)
1343*404b540aSrobert     {
1344*404b540aSrobert       /* If this is a register which can not be accessed by words, copy it
1345*404b540aSrobert 	 to a pseudo register.  */
1346*404b540aSrobert       if (REG_P (op))
1347*404b540aSrobert 	op = copy_to_reg (op);
1348*404b540aSrobert       else
1349*404b540aSrobert 	op = force_reg (mode, op);
1350*404b540aSrobert     }
1351*404b540aSrobert 
1352*404b540aSrobert   result = operand_subword (op, offset, 1, mode);
1353*404b540aSrobert   gcc_assert (result);
1354*404b540aSrobert 
1355*404b540aSrobert   return result;
1356*404b540aSrobert }
1357*404b540aSrobert 
1358*404b540aSrobert /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1359*404b540aSrobert    or (2) a component ref of something variable.  Represent the later with
1360*404b540aSrobert    a NULL expression.  */
1361*404b540aSrobert 
1362*404b540aSrobert static tree
component_ref_for_mem_expr(tree ref)1363*404b540aSrobert component_ref_for_mem_expr (tree ref)
1364*404b540aSrobert {
1365*404b540aSrobert   tree inner = TREE_OPERAND (ref, 0);
1366*404b540aSrobert 
1367*404b540aSrobert   if (TREE_CODE (inner) == COMPONENT_REF)
1368*404b540aSrobert     inner = component_ref_for_mem_expr (inner);
1369*404b540aSrobert   else
1370*404b540aSrobert     {
1371*404b540aSrobert       /* Now remove any conversions: they don't change what the underlying
1372*404b540aSrobert 	 object is.  Likewise for SAVE_EXPR.  */
1373*404b540aSrobert       while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1374*404b540aSrobert 	     || TREE_CODE (inner) == NON_LVALUE_EXPR
1375*404b540aSrobert 	     || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1376*404b540aSrobert 	     || TREE_CODE (inner) == SAVE_EXPR)
1377*404b540aSrobert 	inner = TREE_OPERAND (inner, 0);
1378*404b540aSrobert 
1379*404b540aSrobert       if (! DECL_P (inner))
1380*404b540aSrobert 	inner = NULL_TREE;
1381*404b540aSrobert     }
1382*404b540aSrobert 
1383*404b540aSrobert   if (inner == TREE_OPERAND (ref, 0))
1384*404b540aSrobert     return ref;
1385*404b540aSrobert   else
1386*404b540aSrobert     return build3 (COMPONENT_REF, TREE_TYPE (ref), inner,
1387*404b540aSrobert 		   TREE_OPERAND (ref, 1), NULL_TREE);
1388*404b540aSrobert }
1389*404b540aSrobert 
1390*404b540aSrobert /* Returns 1 if both MEM_EXPR can be considered equal
1391*404b540aSrobert    and 0 otherwise.  */
1392*404b540aSrobert 
1393*404b540aSrobert int
mem_expr_equal_p(tree expr1,tree expr2)1394*404b540aSrobert mem_expr_equal_p (tree expr1, tree expr2)
1395*404b540aSrobert {
1396*404b540aSrobert   if (expr1 == expr2)
1397*404b540aSrobert     return 1;
1398*404b540aSrobert 
1399*404b540aSrobert   if (! expr1 || ! expr2)
1400*404b540aSrobert     return 0;
1401*404b540aSrobert 
1402*404b540aSrobert   if (TREE_CODE (expr1) != TREE_CODE (expr2))
1403*404b540aSrobert     return 0;
1404*404b540aSrobert 
1405*404b540aSrobert   if (TREE_CODE (expr1) == COMPONENT_REF)
1406*404b540aSrobert     return
1407*404b540aSrobert       mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1408*404b540aSrobert 			TREE_OPERAND (expr2, 0))
1409*404b540aSrobert       && mem_expr_equal_p (TREE_OPERAND (expr1, 1), /* field decl */
1410*404b540aSrobert 			   TREE_OPERAND (expr2, 1));
1411*404b540aSrobert 
1412*404b540aSrobert   if (INDIRECT_REF_P (expr1))
1413*404b540aSrobert     return mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1414*404b540aSrobert 			     TREE_OPERAND (expr2, 0));
1415*404b540aSrobert 
1416*404b540aSrobert   /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
1417*404b540aSrobert 	      have been resolved here.  */
1418*404b540aSrobert   gcc_assert (DECL_P (expr1));
1419*404b540aSrobert 
1420*404b540aSrobert   /* Decls with different pointers can't be equal.  */
1421*404b540aSrobert   return 0;
1422*404b540aSrobert }
1423*404b540aSrobert 
1424*404b540aSrobert /* Given REF, a MEM, and T, either the type of X or the expression
1425*404b540aSrobert    corresponding to REF, set the memory attributes.  OBJECTP is nonzero
1426*404b540aSrobert    if we are making a new object of this type.  BITPOS is nonzero if
1427*404b540aSrobert    there is an offset outstanding on T that will be applied later.  */
1428*404b540aSrobert 
1429*404b540aSrobert void
set_mem_attributes_minus_bitpos(rtx ref,tree t,int objectp,HOST_WIDE_INT bitpos)1430*404b540aSrobert set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1431*404b540aSrobert 				 HOST_WIDE_INT bitpos)
1432*404b540aSrobert {
1433*404b540aSrobert   HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1434*404b540aSrobert   tree expr = MEM_EXPR (ref);
1435*404b540aSrobert   rtx offset = MEM_OFFSET (ref);
1436*404b540aSrobert   rtx size = MEM_SIZE (ref);
1437*404b540aSrobert   unsigned int align = MEM_ALIGN (ref);
1438*404b540aSrobert   HOST_WIDE_INT apply_bitpos = 0;
1439*404b540aSrobert   tree type;
1440*404b540aSrobert 
1441*404b540aSrobert   /* It can happen that type_for_mode was given a mode for which there
1442*404b540aSrobert      is no language-level type.  In which case it returns NULL, which
1443*404b540aSrobert      we can see here.  */
1444*404b540aSrobert   if (t == NULL_TREE)
1445*404b540aSrobert     return;
1446*404b540aSrobert 
1447*404b540aSrobert   type = TYPE_P (t) ? t : TREE_TYPE (t);
1448*404b540aSrobert   if (type == error_mark_node)
1449*404b540aSrobert     return;
1450*404b540aSrobert 
1451*404b540aSrobert   /* If we have already set DECL_RTL = ref, get_alias_set will get the
1452*404b540aSrobert      wrong answer, as it assumes that DECL_RTL already has the right alias
1453*404b540aSrobert      info.  Callers should not set DECL_RTL until after the call to
1454*404b540aSrobert      set_mem_attributes.  */
1455*404b540aSrobert   gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1456*404b540aSrobert 
1457*404b540aSrobert   /* Get the alias set from the expression or type (perhaps using a
1458*404b540aSrobert      front-end routine) and use it.  */
1459*404b540aSrobert   alias = get_alias_set (t);
1460*404b540aSrobert 
1461*404b540aSrobert   MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1462*404b540aSrobert   MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1463*404b540aSrobert   MEM_POINTER (ref) = POINTER_TYPE_P (type);
1464*404b540aSrobert 
1465*404b540aSrobert   /* If we are making an object of this type, or if this is a DECL, we know
1466*404b540aSrobert      that it is a scalar if the type is not an aggregate.  */
1467*404b540aSrobert   if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1468*404b540aSrobert     MEM_SCALAR_P (ref) = 1;
1469*404b540aSrobert 
1470*404b540aSrobert   /* We can set the alignment from the type if we are making an object,
1471*404b540aSrobert      this is an INDIRECT_REF, or if TYPE_ALIGN_OK.  */
1472*404b540aSrobert   if (objectp || TREE_CODE (t) == INDIRECT_REF
1473*404b540aSrobert       || TREE_CODE (t) == ALIGN_INDIRECT_REF
1474*404b540aSrobert       || TYPE_ALIGN_OK (type))
1475*404b540aSrobert     align = MAX (align, TYPE_ALIGN (type));
1476*404b540aSrobert   else
1477*404b540aSrobert     if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1478*404b540aSrobert       {
1479*404b540aSrobert 	if (integer_zerop (TREE_OPERAND (t, 1)))
1480*404b540aSrobert 	  /* We don't know anything about the alignment.  */
1481*404b540aSrobert 	  align = BITS_PER_UNIT;
1482*404b540aSrobert 	else
1483*404b540aSrobert 	  align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1484*404b540aSrobert       }
1485*404b540aSrobert 
1486*404b540aSrobert   /* If the size is known, we can set that.  */
1487*404b540aSrobert   if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1488*404b540aSrobert     size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1489*404b540aSrobert 
1490*404b540aSrobert   /* If T is not a type, we may be able to deduce some more information about
1491*404b540aSrobert      the expression.  */
1492*404b540aSrobert   if (! TYPE_P (t))
1493*404b540aSrobert     {
1494*404b540aSrobert       tree base;
1495*404b540aSrobert 
1496*404b540aSrobert       if (TREE_THIS_VOLATILE (t))
1497*404b540aSrobert 	MEM_VOLATILE_P (ref) = 1;
1498*404b540aSrobert 
1499*404b540aSrobert       /* Now remove any conversions: they don't change what the underlying
1500*404b540aSrobert 	 object is.  Likewise for SAVE_EXPR.  */
1501*404b540aSrobert       while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1502*404b540aSrobert 	     || TREE_CODE (t) == NON_LVALUE_EXPR
1503*404b540aSrobert 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
1504*404b540aSrobert 	     || TREE_CODE (t) == SAVE_EXPR)
1505*404b540aSrobert 	t = TREE_OPERAND (t, 0);
1506*404b540aSrobert 
1507*404b540aSrobert       /* We may look through structure-like accesses for the purposes of
1508*404b540aSrobert 	 examining TREE_THIS_NOTRAP, but not array-like accesses.  */
1509*404b540aSrobert       base = t;
1510*404b540aSrobert       while (TREE_CODE (base) == COMPONENT_REF
1511*404b540aSrobert 	     || TREE_CODE (base) == REALPART_EXPR
1512*404b540aSrobert 	     || TREE_CODE (base) == IMAGPART_EXPR
1513*404b540aSrobert 	     || TREE_CODE (base) == BIT_FIELD_REF)
1514*404b540aSrobert 	base = TREE_OPERAND (base, 0);
1515*404b540aSrobert 
1516*404b540aSrobert       if (DECL_P (base))
1517*404b540aSrobert 	{
1518*404b540aSrobert 	  if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1519*404b540aSrobert 	    MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1520*404b540aSrobert 	  else
1521*404b540aSrobert 	    MEM_NOTRAP_P (ref) = 1;
1522*404b540aSrobert 	}
1523*404b540aSrobert       else
1524*404b540aSrobert 	MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1525*404b540aSrobert 
1526*404b540aSrobert       base = get_base_address (base);
1527*404b540aSrobert       if (base && DECL_P (base)
1528*404b540aSrobert 	  && TREE_READONLY (base)
1529*404b540aSrobert 	  && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1530*404b540aSrobert 	{
1531*404b540aSrobert 	  tree base_type = TREE_TYPE (base);
1532*404b540aSrobert 	  gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type))
1533*404b540aSrobert 		      || DECL_ARTIFICIAL (base));
1534*404b540aSrobert 	  MEM_READONLY_P (ref) = 1;
1535*404b540aSrobert 	}
1536*404b540aSrobert 
1537*404b540aSrobert       /* If this expression uses it's parent's alias set, mark it such
1538*404b540aSrobert 	 that we won't change it.  */
1539*404b540aSrobert       if (component_uses_parent_alias_set (t))
1540*404b540aSrobert 	MEM_KEEP_ALIAS_SET_P (ref) = 1;
1541*404b540aSrobert 
1542*404b540aSrobert       /* If this is a decl, set the attributes of the MEM from it.  */
1543*404b540aSrobert       if (DECL_P (t))
1544*404b540aSrobert 	{
1545*404b540aSrobert 	  expr = t;
1546*404b540aSrobert 	  offset = const0_rtx;
1547*404b540aSrobert 	  apply_bitpos = bitpos;
1548*404b540aSrobert 	  size = (DECL_SIZE_UNIT (t)
1549*404b540aSrobert 		  && host_integerp (DECL_SIZE_UNIT (t), 1)
1550*404b540aSrobert 		  ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1551*404b540aSrobert 	  align = DECL_ALIGN (t);
1552*404b540aSrobert 	}
1553*404b540aSrobert 
1554*404b540aSrobert       /* If this is a constant, we know the alignment.  */
1555*404b540aSrobert       else if (CONSTANT_CLASS_P (t))
1556*404b540aSrobert 	{
1557*404b540aSrobert 	  align = TYPE_ALIGN (type);
1558*404b540aSrobert #ifdef CONSTANT_ALIGNMENT
1559*404b540aSrobert 	  align = CONSTANT_ALIGNMENT (t, align);
1560*404b540aSrobert #endif
1561*404b540aSrobert 	}
1562*404b540aSrobert 
1563*404b540aSrobert       /* If this is a field reference and not a bit-field, record it.  */
1564*404b540aSrobert       /* ??? There is some information that can be gleened from bit-fields,
1565*404b540aSrobert 	 such as the word offset in the structure that might be modified.
1566*404b540aSrobert 	 But skip it for now.  */
1567*404b540aSrobert       else if (TREE_CODE (t) == COMPONENT_REF
1568*404b540aSrobert 	       && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1569*404b540aSrobert 	{
1570*404b540aSrobert 	  expr = component_ref_for_mem_expr (t);
1571*404b540aSrobert 	  offset = const0_rtx;
1572*404b540aSrobert 	  apply_bitpos = bitpos;
1573*404b540aSrobert 	  /* ??? Any reason the field size would be different than
1574*404b540aSrobert 	     the size we got from the type?  */
1575*404b540aSrobert 	}
1576*404b540aSrobert 
1577*404b540aSrobert       /* If this is an array reference, look for an outer field reference.  */
1578*404b540aSrobert       else if (TREE_CODE (t) == ARRAY_REF)
1579*404b540aSrobert 	{
1580*404b540aSrobert 	  tree off_tree = size_zero_node;
1581*404b540aSrobert 	  /* We can't modify t, because we use it at the end of the
1582*404b540aSrobert 	     function.  */
1583*404b540aSrobert 	  tree t2 = t;
1584*404b540aSrobert 
1585*404b540aSrobert 	  do
1586*404b540aSrobert 	    {
1587*404b540aSrobert 	      tree index = TREE_OPERAND (t2, 1);
1588*404b540aSrobert 	      tree low_bound = array_ref_low_bound (t2);
1589*404b540aSrobert 	      tree unit_size = array_ref_element_size (t2);
1590*404b540aSrobert 
1591*404b540aSrobert 	      /* We assume all arrays have sizes that are a multiple of a byte.
1592*404b540aSrobert 		 First subtract the lower bound, if any, in the type of the
1593*404b540aSrobert 		 index, then convert to sizetype and multiply by the size of
1594*404b540aSrobert 		 the array element.  */
1595*404b540aSrobert 	      if (! integer_zerop (low_bound))
1596*404b540aSrobert 		index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1597*404b540aSrobert 				     index, low_bound);
1598*404b540aSrobert 
1599*404b540aSrobert 	      off_tree = size_binop (PLUS_EXPR,
1600*404b540aSrobert 				     size_binop (MULT_EXPR,
1601*404b540aSrobert 						 fold_convert (sizetype,
1602*404b540aSrobert 							       index),
1603*404b540aSrobert 						 unit_size),
1604*404b540aSrobert 				     off_tree);
1605*404b540aSrobert 	      t2 = TREE_OPERAND (t2, 0);
1606*404b540aSrobert 	    }
1607*404b540aSrobert 	  while (TREE_CODE (t2) == ARRAY_REF);
1608*404b540aSrobert 
1609*404b540aSrobert 	  if (DECL_P (t2))
1610*404b540aSrobert 	    {
1611*404b540aSrobert 	      expr = t2;
1612*404b540aSrobert 	      offset = NULL;
1613*404b540aSrobert 	      if (host_integerp (off_tree, 1))
1614*404b540aSrobert 		{
1615*404b540aSrobert 		  HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1616*404b540aSrobert 		  HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1617*404b540aSrobert 		  align = DECL_ALIGN (t2);
1618*404b540aSrobert 		  if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1619*404b540aSrobert 	            align = aoff;
1620*404b540aSrobert 		  offset = GEN_INT (ioff);
1621*404b540aSrobert 		  apply_bitpos = bitpos;
1622*404b540aSrobert 		}
1623*404b540aSrobert 	    }
1624*404b540aSrobert 	  else if (TREE_CODE (t2) == COMPONENT_REF)
1625*404b540aSrobert 	    {
1626*404b540aSrobert 	      expr = component_ref_for_mem_expr (t2);
1627*404b540aSrobert 	      if (host_integerp (off_tree, 1))
1628*404b540aSrobert 		{
1629*404b540aSrobert 		  offset = GEN_INT (tree_low_cst (off_tree, 1));
1630*404b540aSrobert 		  apply_bitpos = bitpos;
1631*404b540aSrobert 		}
1632*404b540aSrobert 	      /* ??? Any reason the field size would be different than
1633*404b540aSrobert 		 the size we got from the type?  */
1634*404b540aSrobert 	    }
1635*404b540aSrobert 	  else if (flag_argument_noalias > 1
1636*404b540aSrobert 		   && (INDIRECT_REF_P (t2))
1637*404b540aSrobert 		   && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
1638*404b540aSrobert 	    {
1639*404b540aSrobert 	      expr = t2;
1640*404b540aSrobert 	      offset = NULL;
1641*404b540aSrobert 	    }
1642*404b540aSrobert 	}
1643*404b540aSrobert 
1644*404b540aSrobert       /* If this is a Fortran indirect argument reference, record the
1645*404b540aSrobert 	 parameter decl.  */
1646*404b540aSrobert       else if (flag_argument_noalias > 1
1647*404b540aSrobert 	       && (INDIRECT_REF_P (t))
1648*404b540aSrobert 	       && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1649*404b540aSrobert 	{
1650*404b540aSrobert 	  expr = t;
1651*404b540aSrobert 	  offset = NULL;
1652*404b540aSrobert 	}
1653*404b540aSrobert     }
1654*404b540aSrobert 
1655*404b540aSrobert   /* If we modified OFFSET based on T, then subtract the outstanding
1656*404b540aSrobert      bit position offset.  Similarly, increase the size of the accessed
1657*404b540aSrobert      object to contain the negative offset.  */
1658*404b540aSrobert   if (apply_bitpos)
1659*404b540aSrobert     {
1660*404b540aSrobert       offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1661*404b540aSrobert       if (size)
1662*404b540aSrobert 	size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1663*404b540aSrobert     }
1664*404b540aSrobert 
1665*404b540aSrobert   if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1666*404b540aSrobert     {
1667*404b540aSrobert       /* Force EXPR and OFFSE to NULL, since we don't know exactly what
1668*404b540aSrobert 	 we're overlapping.  */
1669*404b540aSrobert       offset = NULL;
1670*404b540aSrobert       expr = NULL;
1671*404b540aSrobert     }
1672*404b540aSrobert 
1673*404b540aSrobert   /* Now set the attributes we computed above.  */
1674*404b540aSrobert   MEM_ATTRS (ref)
1675*404b540aSrobert     = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1676*404b540aSrobert 
1677*404b540aSrobert   /* If this is already known to be a scalar or aggregate, we are done.  */
1678*404b540aSrobert   if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1679*404b540aSrobert     return;
1680*404b540aSrobert 
1681*404b540aSrobert   /* If it is a reference into an aggregate, this is part of an aggregate.
1682*404b540aSrobert      Otherwise we don't know.  */
1683*404b540aSrobert   else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1684*404b540aSrobert 	   || TREE_CODE (t) == ARRAY_RANGE_REF
1685*404b540aSrobert 	   || TREE_CODE (t) == BIT_FIELD_REF)
1686*404b540aSrobert     MEM_IN_STRUCT_P (ref) = 1;
1687*404b540aSrobert }
1688*404b540aSrobert 
1689*404b540aSrobert void
set_mem_attributes(rtx ref,tree t,int objectp)1690*404b540aSrobert set_mem_attributes (rtx ref, tree t, int objectp)
1691*404b540aSrobert {
1692*404b540aSrobert   set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1693*404b540aSrobert }
1694*404b540aSrobert 
1695*404b540aSrobert /* Set the decl for MEM to DECL.  */
1696*404b540aSrobert 
1697*404b540aSrobert void
set_mem_attrs_from_reg(rtx mem,rtx reg)1698*404b540aSrobert set_mem_attrs_from_reg (rtx mem, rtx reg)
1699*404b540aSrobert {
1700*404b540aSrobert   MEM_ATTRS (mem)
1701*404b540aSrobert     = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
1702*404b540aSrobert 		     GEN_INT (REG_OFFSET (reg)),
1703*404b540aSrobert 		     MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1704*404b540aSrobert }
1705*404b540aSrobert 
1706*404b540aSrobert /* Set the alias set of MEM to SET.  */
1707*404b540aSrobert 
1708*404b540aSrobert void
set_mem_alias_set(rtx mem,HOST_WIDE_INT set)1709*404b540aSrobert set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
1710*404b540aSrobert {
1711*404b540aSrobert #ifdef ENABLE_CHECKING
1712*404b540aSrobert   /* If the new and old alias sets don't conflict, something is wrong.  */
1713*404b540aSrobert   gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1714*404b540aSrobert #endif
1715*404b540aSrobert 
1716*404b540aSrobert   MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1717*404b540aSrobert 				   MEM_SIZE (mem), MEM_ALIGN (mem),
1718*404b540aSrobert 				   GET_MODE (mem));
1719*404b540aSrobert }
1720*404b540aSrobert 
1721*404b540aSrobert /* Set the alignment of MEM to ALIGN bits.  */
1722*404b540aSrobert 
1723*404b540aSrobert void
set_mem_align(rtx mem,unsigned int align)1724*404b540aSrobert set_mem_align (rtx mem, unsigned int align)
1725*404b540aSrobert {
1726*404b540aSrobert   MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1727*404b540aSrobert 				   MEM_OFFSET (mem), MEM_SIZE (mem), align,
1728*404b540aSrobert 				   GET_MODE (mem));
1729*404b540aSrobert }
1730*404b540aSrobert 
1731*404b540aSrobert /* Set the expr for MEM to EXPR.  */
1732*404b540aSrobert 
1733*404b540aSrobert void
set_mem_expr(rtx mem,tree expr)1734*404b540aSrobert set_mem_expr (rtx mem, tree expr)
1735*404b540aSrobert {
1736*404b540aSrobert   MEM_ATTRS (mem)
1737*404b540aSrobert     = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1738*404b540aSrobert 		     MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1739*404b540aSrobert }
1740*404b540aSrobert 
1741*404b540aSrobert /* Set the offset of MEM to OFFSET.  */
1742*404b540aSrobert 
1743*404b540aSrobert void
set_mem_offset(rtx mem,rtx offset)1744*404b540aSrobert set_mem_offset (rtx mem, rtx offset)
1745*404b540aSrobert {
1746*404b540aSrobert   MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1747*404b540aSrobert 				   offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1748*404b540aSrobert 				   GET_MODE (mem));
1749*404b540aSrobert }
1750*404b540aSrobert 
1751*404b540aSrobert /* Set the size of MEM to SIZE.  */
1752*404b540aSrobert 
1753*404b540aSrobert void
set_mem_size(rtx mem,rtx size)1754*404b540aSrobert set_mem_size (rtx mem, rtx size)
1755*404b540aSrobert {
1756*404b540aSrobert   MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1757*404b540aSrobert 				   MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1758*404b540aSrobert 				   GET_MODE (mem));
1759*404b540aSrobert }
1760*404b540aSrobert 
1761*404b540aSrobert /* Return a memory reference like MEMREF, but with its mode changed to MODE
1762*404b540aSrobert    and its address changed to ADDR.  (VOIDmode means don't change the mode.
1763*404b540aSrobert    NULL for ADDR means don't change the address.)  VALIDATE is nonzero if the
1764*404b540aSrobert    returned memory location is required to be valid.  The memory
1765*404b540aSrobert    attributes are not changed.  */
1766*404b540aSrobert 
1767*404b540aSrobert static rtx
change_address_1(rtx memref,enum machine_mode mode,rtx addr,int validate)1768*404b540aSrobert change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1769*404b540aSrobert {
1770*404b540aSrobert   rtx new;
1771*404b540aSrobert 
1772*404b540aSrobert   gcc_assert (MEM_P (memref));
1773*404b540aSrobert   if (mode == VOIDmode)
1774*404b540aSrobert     mode = GET_MODE (memref);
1775*404b540aSrobert   if (addr == 0)
1776*404b540aSrobert     addr = XEXP (memref, 0);
1777*404b540aSrobert   if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1778*404b540aSrobert       && (!validate || memory_address_p (mode, addr)))
1779*404b540aSrobert     return memref;
1780*404b540aSrobert 
1781*404b540aSrobert   if (validate)
1782*404b540aSrobert     {
1783*404b540aSrobert       if (reload_in_progress || reload_completed)
1784*404b540aSrobert 	gcc_assert (memory_address_p (mode, addr));
1785*404b540aSrobert       else
1786*404b540aSrobert 	addr = memory_address (mode, addr);
1787*404b540aSrobert     }
1788*404b540aSrobert 
1789*404b540aSrobert   if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1790*404b540aSrobert     return memref;
1791*404b540aSrobert 
1792*404b540aSrobert   new = gen_rtx_MEM (mode, addr);
1793*404b540aSrobert   MEM_COPY_ATTRIBUTES (new, memref);
1794*404b540aSrobert   return new;
1795*404b540aSrobert }
1796*404b540aSrobert 
1797*404b540aSrobert /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1798*404b540aSrobert    way we are changing MEMREF, so we only preserve the alias set.  */
1799*404b540aSrobert 
1800*404b540aSrobert rtx
change_address(rtx memref,enum machine_mode mode,rtx addr)1801*404b540aSrobert change_address (rtx memref, enum machine_mode mode, rtx addr)
1802*404b540aSrobert {
1803*404b540aSrobert   rtx new = change_address_1 (memref, mode, addr, 1), size;
1804*404b540aSrobert   enum machine_mode mmode = GET_MODE (new);
1805*404b540aSrobert   unsigned int align;
1806*404b540aSrobert 
1807*404b540aSrobert   size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1808*404b540aSrobert   align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1809*404b540aSrobert 
1810*404b540aSrobert   /* If there are no changes, just return the original memory reference.  */
1811*404b540aSrobert   if (new == memref)
1812*404b540aSrobert     {
1813*404b540aSrobert       if (MEM_ATTRS (memref) == 0
1814*404b540aSrobert 	  || (MEM_EXPR (memref) == NULL
1815*404b540aSrobert 	      && MEM_OFFSET (memref) == NULL
1816*404b540aSrobert 	      && MEM_SIZE (memref) == size
1817*404b540aSrobert 	      && MEM_ALIGN (memref) == align))
1818*404b540aSrobert 	return new;
1819*404b540aSrobert 
1820*404b540aSrobert       new = gen_rtx_MEM (mmode, XEXP (memref, 0));
1821*404b540aSrobert       MEM_COPY_ATTRIBUTES (new, memref);
1822*404b540aSrobert     }
1823*404b540aSrobert 
1824*404b540aSrobert   MEM_ATTRS (new)
1825*404b540aSrobert     = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
1826*404b540aSrobert 
1827*404b540aSrobert   return new;
1828*404b540aSrobert }
1829*404b540aSrobert 
1830*404b540aSrobert /* Return a memory reference like MEMREF, but with its mode changed
1831*404b540aSrobert    to MODE and its address offset by OFFSET bytes.  If VALIDATE is
1832*404b540aSrobert    nonzero, the memory address is forced to be valid.
1833*404b540aSrobert    If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1834*404b540aSrobert    and caller is responsible for adjusting MEMREF base register.  */
1835*404b540aSrobert 
1836*404b540aSrobert rtx
adjust_address_1(rtx memref,enum machine_mode mode,HOST_WIDE_INT offset,int validate,int adjust)1837*404b540aSrobert adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1838*404b540aSrobert 		  int validate, int adjust)
1839*404b540aSrobert {
1840*404b540aSrobert   rtx addr = XEXP (memref, 0);
1841*404b540aSrobert   rtx new;
1842*404b540aSrobert   rtx memoffset = MEM_OFFSET (memref);
1843*404b540aSrobert   rtx size = 0;
1844*404b540aSrobert   unsigned int memalign = MEM_ALIGN (memref);
1845*404b540aSrobert 
1846*404b540aSrobert   /* If there are no changes, just return the original memory reference.  */
1847*404b540aSrobert   if (mode == GET_MODE (memref) && !offset
1848*404b540aSrobert       && (!validate || memory_address_p (mode, addr)))
1849*404b540aSrobert     return memref;
1850*404b540aSrobert 
1851*404b540aSrobert   /* ??? Prefer to create garbage instead of creating shared rtl.
1852*404b540aSrobert      This may happen even if offset is nonzero -- consider
1853*404b540aSrobert      (plus (plus reg reg) const_int) -- so do this always.  */
1854*404b540aSrobert   addr = copy_rtx (addr);
1855*404b540aSrobert 
1856*404b540aSrobert   if (adjust)
1857*404b540aSrobert     {
1858*404b540aSrobert       /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1859*404b540aSrobert 	 object, we can merge it into the LO_SUM.  */
1860*404b540aSrobert       if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1861*404b540aSrobert 	  && offset >= 0
1862*404b540aSrobert 	  && (unsigned HOST_WIDE_INT) offset
1863*404b540aSrobert 	      < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1864*404b540aSrobert 	addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1865*404b540aSrobert 			       plus_constant (XEXP (addr, 1), offset));
1866*404b540aSrobert       else
1867*404b540aSrobert 	addr = plus_constant (addr, offset);
1868*404b540aSrobert     }
1869*404b540aSrobert 
1870*404b540aSrobert   new = change_address_1 (memref, mode, addr, validate);
1871*404b540aSrobert 
1872*404b540aSrobert   /* Compute the new values of the memory attributes due to this adjustment.
1873*404b540aSrobert      We add the offsets and update the alignment.  */
1874*404b540aSrobert   if (memoffset)
1875*404b540aSrobert     memoffset = GEN_INT (offset + INTVAL (memoffset));
1876*404b540aSrobert 
1877*404b540aSrobert   /* Compute the new alignment by taking the MIN of the alignment and the
1878*404b540aSrobert      lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1879*404b540aSrobert      if zero.  */
1880*404b540aSrobert   if (offset != 0)
1881*404b540aSrobert     memalign
1882*404b540aSrobert       = MIN (memalign,
1883*404b540aSrobert 	     (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
1884*404b540aSrobert 
1885*404b540aSrobert   /* We can compute the size in a number of ways.  */
1886*404b540aSrobert   if (GET_MODE (new) != BLKmode)
1887*404b540aSrobert     size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
1888*404b540aSrobert   else if (MEM_SIZE (memref))
1889*404b540aSrobert     size = plus_constant (MEM_SIZE (memref), -offset);
1890*404b540aSrobert 
1891*404b540aSrobert   MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
1892*404b540aSrobert 				   memoffset, size, memalign, GET_MODE (new));
1893*404b540aSrobert 
1894*404b540aSrobert   /* At some point, we should validate that this offset is within the object,
1895*404b540aSrobert      if all the appropriate values are known.  */
1896*404b540aSrobert   return new;
1897*404b540aSrobert }
1898*404b540aSrobert 
1899*404b540aSrobert /* Return a memory reference like MEMREF, but with its mode changed
1900*404b540aSrobert    to MODE and its address changed to ADDR, which is assumed to be
1901*404b540aSrobert    MEMREF offseted by OFFSET bytes.  If VALIDATE is
1902*404b540aSrobert    nonzero, the memory address is forced to be valid.  */
1903*404b540aSrobert 
1904*404b540aSrobert rtx
adjust_automodify_address_1(rtx memref,enum machine_mode mode,rtx addr,HOST_WIDE_INT offset,int validate)1905*404b540aSrobert adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
1906*404b540aSrobert 			     HOST_WIDE_INT offset, int validate)
1907*404b540aSrobert {
1908*404b540aSrobert   memref = change_address_1 (memref, VOIDmode, addr, validate);
1909*404b540aSrobert   return adjust_address_1 (memref, mode, offset, validate, 0);
1910*404b540aSrobert }
1911*404b540aSrobert 
1912*404b540aSrobert /* Return a memory reference like MEMREF, but whose address is changed by
1913*404b540aSrobert    adding OFFSET, an RTX, to it.  POW2 is the highest power of two factor
1914*404b540aSrobert    known to be in OFFSET (possibly 1).  */
1915*404b540aSrobert 
1916*404b540aSrobert rtx
offset_address(rtx memref,rtx offset,unsigned HOST_WIDE_INT pow2)1917*404b540aSrobert offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
1918*404b540aSrobert {
1919*404b540aSrobert   rtx new, addr = XEXP (memref, 0);
1920*404b540aSrobert 
1921*404b540aSrobert   new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1922*404b540aSrobert 
1923*404b540aSrobert   /* At this point we don't know _why_ the address is invalid.  It
1924*404b540aSrobert      could have secondary memory references, multiplies or anything.
1925*404b540aSrobert 
1926*404b540aSrobert      However, if we did go and rearrange things, we can wind up not
1927*404b540aSrobert      being able to recognize the magic around pic_offset_table_rtx.
1928*404b540aSrobert      This stuff is fragile, and is yet another example of why it is
1929*404b540aSrobert      bad to expose PIC machinery too early.  */
1930*404b540aSrobert   if (! memory_address_p (GET_MODE (memref), new)
1931*404b540aSrobert       && GET_CODE (addr) == PLUS
1932*404b540aSrobert       && XEXP (addr, 0) == pic_offset_table_rtx)
1933*404b540aSrobert     {
1934*404b540aSrobert       addr = force_reg (GET_MODE (addr), addr);
1935*404b540aSrobert       new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1936*404b540aSrobert     }
1937*404b540aSrobert 
1938*404b540aSrobert   update_temp_slot_address (XEXP (memref, 0), new);
1939*404b540aSrobert   new = change_address_1 (memref, VOIDmode, new, 1);
1940*404b540aSrobert 
1941*404b540aSrobert   /* If there are no changes, just return the original memory reference.  */
1942*404b540aSrobert   if (new == memref)
1943*404b540aSrobert     return new;
1944*404b540aSrobert 
1945*404b540aSrobert   /* Update the alignment to reflect the offset.  Reset the offset, which
1946*404b540aSrobert      we don't know.  */
1947*404b540aSrobert   MEM_ATTRS (new)
1948*404b540aSrobert     = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
1949*404b540aSrobert 		     MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
1950*404b540aSrobert 		     GET_MODE (new));
1951*404b540aSrobert   return new;
1952*404b540aSrobert }
1953*404b540aSrobert 
1954*404b540aSrobert /* Return a memory reference like MEMREF, but with its address changed to
1955*404b540aSrobert    ADDR.  The caller is asserting that the actual piece of memory pointed
1956*404b540aSrobert    to is the same, just the form of the address is being changed, such as
1957*404b540aSrobert    by putting something into a register.  */
1958*404b540aSrobert 
1959*404b540aSrobert rtx
replace_equiv_address(rtx memref,rtx addr)1960*404b540aSrobert replace_equiv_address (rtx memref, rtx addr)
1961*404b540aSrobert {
1962*404b540aSrobert   /* change_address_1 copies the memory attribute structure without change
1963*404b540aSrobert      and that's exactly what we want here.  */
1964*404b540aSrobert   update_temp_slot_address (XEXP (memref, 0), addr);
1965*404b540aSrobert   return change_address_1 (memref, VOIDmode, addr, 1);
1966*404b540aSrobert }
1967*404b540aSrobert 
1968*404b540aSrobert /* Likewise, but the reference is not required to be valid.  */
1969*404b540aSrobert 
1970*404b540aSrobert rtx
replace_equiv_address_nv(rtx memref,rtx addr)1971*404b540aSrobert replace_equiv_address_nv (rtx memref, rtx addr)
1972*404b540aSrobert {
1973*404b540aSrobert   return change_address_1 (memref, VOIDmode, addr, 0);
1974*404b540aSrobert }
1975*404b540aSrobert 
1976*404b540aSrobert /* Return a memory reference like MEMREF, but with its mode widened to
1977*404b540aSrobert    MODE and offset by OFFSET.  This would be used by targets that e.g.
1978*404b540aSrobert    cannot issue QImode memory operations and have to use SImode memory
1979*404b540aSrobert    operations plus masking logic.  */
1980*404b540aSrobert 
1981*404b540aSrobert rtx
widen_memory_access(rtx memref,enum machine_mode mode,HOST_WIDE_INT offset)1982*404b540aSrobert widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
1983*404b540aSrobert {
1984*404b540aSrobert   rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
1985*404b540aSrobert   tree expr = MEM_EXPR (new);
1986*404b540aSrobert   rtx memoffset = MEM_OFFSET (new);
1987*404b540aSrobert   unsigned int size = GET_MODE_SIZE (mode);
1988*404b540aSrobert 
1989*404b540aSrobert   /* If there are no changes, just return the original memory reference.  */
1990*404b540aSrobert   if (new == memref)
1991*404b540aSrobert     return new;
1992*404b540aSrobert 
1993*404b540aSrobert   /* If we don't know what offset we were at within the expression, then
1994*404b540aSrobert      we can't know if we've overstepped the bounds.  */
1995*404b540aSrobert   if (! memoffset)
1996*404b540aSrobert     expr = NULL_TREE;
1997*404b540aSrobert 
1998*404b540aSrobert   while (expr)
1999*404b540aSrobert     {
2000*404b540aSrobert       if (TREE_CODE (expr) == COMPONENT_REF)
2001*404b540aSrobert 	{
2002*404b540aSrobert 	  tree field = TREE_OPERAND (expr, 1);
2003*404b540aSrobert 	  tree offset = component_ref_field_offset (expr);
2004*404b540aSrobert 
2005*404b540aSrobert 	  if (! DECL_SIZE_UNIT (field))
2006*404b540aSrobert 	    {
2007*404b540aSrobert 	      expr = NULL_TREE;
2008*404b540aSrobert 	      break;
2009*404b540aSrobert 	    }
2010*404b540aSrobert 
2011*404b540aSrobert 	  /* Is the field at least as large as the access?  If so, ok,
2012*404b540aSrobert 	     otherwise strip back to the containing structure.  */
2013*404b540aSrobert 	  if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2014*404b540aSrobert 	      && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2015*404b540aSrobert 	      && INTVAL (memoffset) >= 0)
2016*404b540aSrobert 	    break;
2017*404b540aSrobert 
2018*404b540aSrobert 	  if (! host_integerp (offset, 1))
2019*404b540aSrobert 	    {
2020*404b540aSrobert 	      expr = NULL_TREE;
2021*404b540aSrobert 	      break;
2022*404b540aSrobert 	    }
2023*404b540aSrobert 
2024*404b540aSrobert 	  expr = TREE_OPERAND (expr, 0);
2025*404b540aSrobert 	  memoffset
2026*404b540aSrobert 	    = (GEN_INT (INTVAL (memoffset)
2027*404b540aSrobert 			+ tree_low_cst (offset, 1)
2028*404b540aSrobert 			+ (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2029*404b540aSrobert 			   / BITS_PER_UNIT)));
2030*404b540aSrobert 	}
2031*404b540aSrobert       /* Similarly for the decl.  */
2032*404b540aSrobert       else if (DECL_P (expr)
2033*404b540aSrobert 	       && DECL_SIZE_UNIT (expr)
2034*404b540aSrobert 	       && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2035*404b540aSrobert 	       && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2036*404b540aSrobert 	       && (! memoffset || INTVAL (memoffset) >= 0))
2037*404b540aSrobert 	break;
2038*404b540aSrobert       else
2039*404b540aSrobert 	{
2040*404b540aSrobert 	  /* The widened memory access overflows the expression, which means
2041*404b540aSrobert 	     that it could alias another expression.  Zap it.  */
2042*404b540aSrobert 	  expr = NULL_TREE;
2043*404b540aSrobert 	  break;
2044*404b540aSrobert 	}
2045*404b540aSrobert     }
2046*404b540aSrobert 
2047*404b540aSrobert   if (! expr)
2048*404b540aSrobert     memoffset = NULL_RTX;
2049*404b540aSrobert 
2050*404b540aSrobert   /* The widened memory may alias other stuff, so zap the alias set.  */
2051*404b540aSrobert   /* ??? Maybe use get_alias_set on any remaining expression.  */
2052*404b540aSrobert 
2053*404b540aSrobert   MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2054*404b540aSrobert 				   MEM_ALIGN (new), mode);
2055*404b540aSrobert 
2056*404b540aSrobert   return new;
2057*404b540aSrobert }
2058*404b540aSrobert 
2059*404b540aSrobert /* Return a newly created CODE_LABEL rtx with a unique label number.  */
2060*404b540aSrobert 
2061*404b540aSrobert rtx
gen_label_rtx(void)2062*404b540aSrobert gen_label_rtx (void)
2063*404b540aSrobert {
2064*404b540aSrobert   return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2065*404b540aSrobert 			     NULL, label_num++, NULL);
2066*404b540aSrobert }
2067*404b540aSrobert 
2068*404b540aSrobert /* For procedure integration.  */
2069*404b540aSrobert 
2070*404b540aSrobert /* Install new pointers to the first and last insns in the chain.
2071*404b540aSrobert    Also, set cur_insn_uid to one higher than the last in use.
2072*404b540aSrobert    Used for an inline-procedure after copying the insn chain.  */
2073*404b540aSrobert 
2074*404b540aSrobert void
set_new_first_and_last_insn(rtx first,rtx last)2075*404b540aSrobert set_new_first_and_last_insn (rtx first, rtx last)
2076*404b540aSrobert {
2077*404b540aSrobert   rtx insn;
2078*404b540aSrobert 
2079*404b540aSrobert   first_insn = first;
2080*404b540aSrobert   last_insn = last;
2081*404b540aSrobert   cur_insn_uid = 0;
2082*404b540aSrobert 
2083*404b540aSrobert   for (insn = first; insn; insn = NEXT_INSN (insn))
2084*404b540aSrobert     cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2085*404b540aSrobert 
2086*404b540aSrobert   cur_insn_uid++;
2087*404b540aSrobert }
2088*404b540aSrobert 
2089*404b540aSrobert /* Go through all the RTL insn bodies and copy any invalid shared
2090*404b540aSrobert    structure.  This routine should only be called once.  */
2091*404b540aSrobert 
2092*404b540aSrobert static void
unshare_all_rtl_1(tree fndecl,rtx insn)2093*404b540aSrobert unshare_all_rtl_1 (tree fndecl, rtx insn)
2094*404b540aSrobert {
2095*404b540aSrobert   tree decl;
2096*404b540aSrobert 
2097*404b540aSrobert   /* Make sure that virtual parameters are not shared.  */
2098*404b540aSrobert   for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2099*404b540aSrobert     SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2100*404b540aSrobert 
2101*404b540aSrobert   /* Make sure that virtual stack slots are not shared.  */
2102*404b540aSrobert   unshare_all_decls (DECL_INITIAL (fndecl));
2103*404b540aSrobert 
2104*404b540aSrobert   /* Unshare just about everything else.  */
2105*404b540aSrobert   unshare_all_rtl_in_chain (insn);
2106*404b540aSrobert 
2107*404b540aSrobert   /* Make sure the addresses of stack slots found outside the insn chain
2108*404b540aSrobert      (such as, in DECL_RTL of a variable) are not shared
2109*404b540aSrobert      with the insn chain.
2110*404b540aSrobert 
2111*404b540aSrobert      This special care is necessary when the stack slot MEM does not
2112*404b540aSrobert      actually appear in the insn chain.  If it does appear, its address
2113*404b540aSrobert      is unshared from all else at that point.  */
2114*404b540aSrobert   stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2115*404b540aSrobert }
2116*404b540aSrobert 
2117*404b540aSrobert /* Go through all the RTL insn bodies and copy any invalid shared
2118*404b540aSrobert    structure, again.  This is a fairly expensive thing to do so it
2119*404b540aSrobert    should be done sparingly.  */
2120*404b540aSrobert 
2121*404b540aSrobert void
unshare_all_rtl_again(rtx insn)2122*404b540aSrobert unshare_all_rtl_again (rtx insn)
2123*404b540aSrobert {
2124*404b540aSrobert   rtx p;
2125*404b540aSrobert   tree decl;
2126*404b540aSrobert 
2127*404b540aSrobert   for (p = insn; p; p = NEXT_INSN (p))
2128*404b540aSrobert     if (INSN_P (p))
2129*404b540aSrobert       {
2130*404b540aSrobert 	reset_used_flags (PATTERN (p));
2131*404b540aSrobert 	reset_used_flags (REG_NOTES (p));
2132*404b540aSrobert 	reset_used_flags (LOG_LINKS (p));
2133*404b540aSrobert       }
2134*404b540aSrobert 
2135*404b540aSrobert   /* Make sure that virtual stack slots are not shared.  */
2136*404b540aSrobert   reset_used_decls (DECL_INITIAL (cfun->decl));
2137*404b540aSrobert 
2138*404b540aSrobert   /* Make sure that virtual parameters are not shared.  */
2139*404b540aSrobert   for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2140*404b540aSrobert     reset_used_flags (DECL_RTL (decl));
2141*404b540aSrobert 
2142*404b540aSrobert   reset_used_flags (stack_slot_list);
2143*404b540aSrobert 
2144*404b540aSrobert   unshare_all_rtl_1 (cfun->decl, insn);
2145*404b540aSrobert }
2146*404b540aSrobert 
2147*404b540aSrobert unsigned int
unshare_all_rtl(void)2148*404b540aSrobert unshare_all_rtl (void)
2149*404b540aSrobert {
2150*404b540aSrobert   unshare_all_rtl_1 (current_function_decl, get_insns ());
2151*404b540aSrobert   return 0;
2152*404b540aSrobert }
2153*404b540aSrobert 
2154*404b540aSrobert struct tree_opt_pass pass_unshare_all_rtl =
2155*404b540aSrobert {
2156*404b540aSrobert   "unshare",                            /* name */
2157*404b540aSrobert   NULL,                                 /* gate */
2158*404b540aSrobert   unshare_all_rtl,                      /* execute */
2159*404b540aSrobert   NULL,                                 /* sub */
2160*404b540aSrobert   NULL,                                 /* next */
2161*404b540aSrobert   0,                                    /* static_pass_number */
2162*404b540aSrobert   0,                                    /* tv_id */
2163*404b540aSrobert   0,                                    /* properties_required */
2164*404b540aSrobert   0,                                    /* properties_provided */
2165*404b540aSrobert   0,                                    /* properties_destroyed */
2166*404b540aSrobert   0,                                    /* todo_flags_start */
2167*404b540aSrobert   TODO_dump_func,                       /* todo_flags_finish */
2168*404b540aSrobert   0                                     /* letter */
2169*404b540aSrobert };
2170*404b540aSrobert 
2171*404b540aSrobert 
2172*404b540aSrobert /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2173*404b540aSrobert    Recursively does the same for subexpressions.  */
2174*404b540aSrobert 
2175*404b540aSrobert static void
verify_rtx_sharing(rtx orig,rtx insn)2176*404b540aSrobert verify_rtx_sharing (rtx orig, rtx insn)
2177*404b540aSrobert {
2178*404b540aSrobert   rtx x = orig;
2179*404b540aSrobert   int i;
2180*404b540aSrobert   enum rtx_code code;
2181*404b540aSrobert   const char *format_ptr;
2182*404b540aSrobert 
2183*404b540aSrobert   if (x == 0)
2184*404b540aSrobert     return;
2185*404b540aSrobert 
2186*404b540aSrobert   code = GET_CODE (x);
2187*404b540aSrobert 
2188*404b540aSrobert   /* These types may be freely shared.  */
2189*404b540aSrobert 
2190*404b540aSrobert   switch (code)
2191*404b540aSrobert     {
2192*404b540aSrobert     case REG:
2193*404b540aSrobert     case CONST_INT:
2194*404b540aSrobert     case CONST_DOUBLE:
2195*404b540aSrobert     case CONST_VECTOR:
2196*404b540aSrobert     case SYMBOL_REF:
2197*404b540aSrobert     case LABEL_REF:
2198*404b540aSrobert     case CODE_LABEL:
2199*404b540aSrobert     case PC:
2200*404b540aSrobert     case CC0:
2201*404b540aSrobert     case SCRATCH:
2202*404b540aSrobert       return;
2203*404b540aSrobert       /* SCRATCH must be shared because they represent distinct values.  */
2204*404b540aSrobert     case CLOBBER:
2205*404b540aSrobert       if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2206*404b540aSrobert 	return;
2207*404b540aSrobert       break;
2208*404b540aSrobert 
2209*404b540aSrobert     case CONST:
2210*404b540aSrobert       /* CONST can be shared if it contains a SYMBOL_REF.  If it contains
2211*404b540aSrobert 	 a LABEL_REF, it isn't sharable.  */
2212*404b540aSrobert       if (GET_CODE (XEXP (x, 0)) == PLUS
2213*404b540aSrobert 	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2214*404b540aSrobert 	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2215*404b540aSrobert 	return;
2216*404b540aSrobert       break;
2217*404b540aSrobert 
2218*404b540aSrobert     case MEM:
2219*404b540aSrobert       /* A MEM is allowed to be shared if its address is constant.  */
2220*404b540aSrobert       if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2221*404b540aSrobert 	  || reload_completed || reload_in_progress)
2222*404b540aSrobert 	return;
2223*404b540aSrobert 
2224*404b540aSrobert       break;
2225*404b540aSrobert 
2226*404b540aSrobert     default:
2227*404b540aSrobert       break;
2228*404b540aSrobert     }
2229*404b540aSrobert 
2230*404b540aSrobert   /* This rtx may not be shared.  If it has already been seen,
2231*404b540aSrobert      replace it with a copy of itself.  */
2232*404b540aSrobert #ifdef ENABLE_CHECKING
2233*404b540aSrobert   if (RTX_FLAG (x, used))
2234*404b540aSrobert     {
2235*404b540aSrobert       error ("invalid rtl sharing found in the insn");
2236*404b540aSrobert       debug_rtx (insn);
2237*404b540aSrobert       error ("shared rtx");
2238*404b540aSrobert       debug_rtx (x);
2239*404b540aSrobert       internal_error ("internal consistency failure");
2240*404b540aSrobert     }
2241*404b540aSrobert #endif
2242*404b540aSrobert   gcc_assert (!RTX_FLAG (x, used));
2243*404b540aSrobert 
2244*404b540aSrobert   RTX_FLAG (x, used) = 1;
2245*404b540aSrobert 
2246*404b540aSrobert   /* Now scan the subexpressions recursively.  */
2247*404b540aSrobert 
2248*404b540aSrobert   format_ptr = GET_RTX_FORMAT (code);
2249*404b540aSrobert 
2250*404b540aSrobert   for (i = 0; i < GET_RTX_LENGTH (code); i++)
2251*404b540aSrobert     {
2252*404b540aSrobert       switch (*format_ptr++)
2253*404b540aSrobert 	{
2254*404b540aSrobert 	case 'e':
2255*404b540aSrobert 	  verify_rtx_sharing (XEXP (x, i), insn);
2256*404b540aSrobert 	  break;
2257*404b540aSrobert 
2258*404b540aSrobert 	case 'E':
2259*404b540aSrobert 	  if (XVEC (x, i) != NULL)
2260*404b540aSrobert 	    {
2261*404b540aSrobert 	      int j;
2262*404b540aSrobert 	      int len = XVECLEN (x, i);
2263*404b540aSrobert 
2264*404b540aSrobert 	      for (j = 0; j < len; j++)
2265*404b540aSrobert 		{
2266*404b540aSrobert 		  /* We allow sharing of ASM_OPERANDS inside single
2267*404b540aSrobert 		     instruction.  */
2268*404b540aSrobert 		  if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2269*404b540aSrobert 		      && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2270*404b540aSrobert 			  == ASM_OPERANDS))
2271*404b540aSrobert 		    verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2272*404b540aSrobert 		  else
2273*404b540aSrobert 		    verify_rtx_sharing (XVECEXP (x, i, j), insn);
2274*404b540aSrobert 		}
2275*404b540aSrobert 	    }
2276*404b540aSrobert 	  break;
2277*404b540aSrobert 	}
2278*404b540aSrobert     }
2279*404b540aSrobert   return;
2280*404b540aSrobert }
2281*404b540aSrobert 
2282*404b540aSrobert /* Go through all the RTL insn bodies and check that there is no unexpected
2283*404b540aSrobert    sharing in between the subexpressions.  */
2284*404b540aSrobert 
2285*404b540aSrobert void
verify_rtl_sharing(void)2286*404b540aSrobert verify_rtl_sharing (void)
2287*404b540aSrobert {
2288*404b540aSrobert   rtx p;
2289*404b540aSrobert 
2290*404b540aSrobert   for (p = get_insns (); p; p = NEXT_INSN (p))
2291*404b540aSrobert     if (INSN_P (p))
2292*404b540aSrobert       {
2293*404b540aSrobert 	reset_used_flags (PATTERN (p));
2294*404b540aSrobert 	reset_used_flags (REG_NOTES (p));
2295*404b540aSrobert 	reset_used_flags (LOG_LINKS (p));
2296*404b540aSrobert       }
2297*404b540aSrobert 
2298*404b540aSrobert   for (p = get_insns (); p; p = NEXT_INSN (p))
2299*404b540aSrobert     if (INSN_P (p))
2300*404b540aSrobert       {
2301*404b540aSrobert 	verify_rtx_sharing (PATTERN (p), p);
2302*404b540aSrobert 	verify_rtx_sharing (REG_NOTES (p), p);
2303*404b540aSrobert 	verify_rtx_sharing (LOG_LINKS (p), p);
2304*404b540aSrobert       }
2305*404b540aSrobert }
2306*404b540aSrobert 
2307*404b540aSrobert /* Go through all the RTL insn bodies and copy any invalid shared structure.
2308*404b540aSrobert    Assumes the mark bits are cleared at entry.  */
2309*404b540aSrobert 
2310*404b540aSrobert void
unshare_all_rtl_in_chain(rtx insn)2311*404b540aSrobert unshare_all_rtl_in_chain (rtx insn)
2312*404b540aSrobert {
2313*404b540aSrobert   for (; insn; insn = NEXT_INSN (insn))
2314*404b540aSrobert     if (INSN_P (insn))
2315*404b540aSrobert       {
2316*404b540aSrobert 	PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2317*404b540aSrobert 	REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2318*404b540aSrobert 	LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2319*404b540aSrobert       }
2320*404b540aSrobert }
2321*404b540aSrobert 
2322*404b540aSrobert /* Go through all virtual stack slots of a function and copy any
2323*404b540aSrobert    shared structure.  */
2324*404b540aSrobert static void
unshare_all_decls(tree blk)2325*404b540aSrobert unshare_all_decls (tree blk)
2326*404b540aSrobert {
2327*404b540aSrobert   tree t;
2328*404b540aSrobert 
2329*404b540aSrobert   /* Copy shared decls.  */
2330*404b540aSrobert   for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2331*404b540aSrobert     if (DECL_RTL_SET_P (t))
2332*404b540aSrobert       SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2333*404b540aSrobert 
2334*404b540aSrobert   /* Now process sub-blocks.  */
2335*404b540aSrobert   for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2336*404b540aSrobert     unshare_all_decls (t);
2337*404b540aSrobert }
2338*404b540aSrobert 
2339*404b540aSrobert /* Go through all virtual stack slots of a function and mark them as
2340*404b540aSrobert    not shared.  */
2341*404b540aSrobert static void
reset_used_decls(tree blk)2342*404b540aSrobert reset_used_decls (tree blk)
2343*404b540aSrobert {
2344*404b540aSrobert   tree t;
2345*404b540aSrobert 
2346*404b540aSrobert   /* Mark decls.  */
2347*404b540aSrobert   for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2348*404b540aSrobert     if (DECL_RTL_SET_P (t))
2349*404b540aSrobert       reset_used_flags (DECL_RTL (t));
2350*404b540aSrobert 
2351*404b540aSrobert   /* Now process sub-blocks.  */
2352*404b540aSrobert   for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2353*404b540aSrobert     reset_used_decls (t);
2354*404b540aSrobert }
2355*404b540aSrobert 
2356*404b540aSrobert /* Mark ORIG as in use, and return a copy of it if it was already in use.
2357*404b540aSrobert    Recursively does the same for subexpressions.  Uses
2358*404b540aSrobert    copy_rtx_if_shared_1 to reduce stack space.  */
2359*404b540aSrobert 
2360*404b540aSrobert rtx
copy_rtx_if_shared(rtx orig)2361*404b540aSrobert copy_rtx_if_shared (rtx orig)
2362*404b540aSrobert {
2363*404b540aSrobert   copy_rtx_if_shared_1 (&orig);
2364*404b540aSrobert   return orig;
2365*404b540aSrobert }
2366*404b540aSrobert 
2367*404b540aSrobert /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2368*404b540aSrobert    use.  Recursively does the same for subexpressions.  */
2369*404b540aSrobert 
2370*404b540aSrobert static void
copy_rtx_if_shared_1(rtx * orig1)2371*404b540aSrobert copy_rtx_if_shared_1 (rtx *orig1)
2372*404b540aSrobert {
2373*404b540aSrobert   rtx x;
2374*404b540aSrobert   int i;
2375*404b540aSrobert   enum rtx_code code;
2376*404b540aSrobert   rtx *last_ptr;
2377*404b540aSrobert   const char *format_ptr;
2378*404b540aSrobert   int copied = 0;
2379*404b540aSrobert   int length;
2380*404b540aSrobert 
2381*404b540aSrobert   /* Repeat is used to turn tail-recursion into iteration.  */
2382*404b540aSrobert repeat:
2383*404b540aSrobert   x = *orig1;
2384*404b540aSrobert 
2385*404b540aSrobert   if (x == 0)
2386*404b540aSrobert     return;
2387*404b540aSrobert 
2388*404b540aSrobert   code = GET_CODE (x);
2389*404b540aSrobert 
2390*404b540aSrobert   /* These types may be freely shared.  */
2391*404b540aSrobert 
2392*404b540aSrobert   switch (code)
2393*404b540aSrobert     {
2394*404b540aSrobert     case REG:
2395*404b540aSrobert     case CONST_INT:
2396*404b540aSrobert     case CONST_DOUBLE:
2397*404b540aSrobert     case CONST_VECTOR:
2398*404b540aSrobert     case SYMBOL_REF:
2399*404b540aSrobert     case LABEL_REF:
2400*404b540aSrobert     case CODE_LABEL:
2401*404b540aSrobert     case PC:
2402*404b540aSrobert     case CC0:
2403*404b540aSrobert     case SCRATCH:
2404*404b540aSrobert       /* SCRATCH must be shared because they represent distinct values.  */
2405*404b540aSrobert       return;
2406*404b540aSrobert     case CLOBBER:
2407*404b540aSrobert       if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2408*404b540aSrobert 	return;
2409*404b540aSrobert       break;
2410*404b540aSrobert 
2411*404b540aSrobert     case CONST:
2412*404b540aSrobert       /* CONST can be shared if it contains a SYMBOL_REF.  If it contains
2413*404b540aSrobert 	 a LABEL_REF, it isn't sharable.  */
2414*404b540aSrobert       if (GET_CODE (XEXP (x, 0)) == PLUS
2415*404b540aSrobert 	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2416*404b540aSrobert 	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2417*404b540aSrobert 	return;
2418*404b540aSrobert       break;
2419*404b540aSrobert 
2420*404b540aSrobert     case INSN:
2421*404b540aSrobert     case JUMP_INSN:
2422*404b540aSrobert     case CALL_INSN:
2423*404b540aSrobert     case NOTE:
2424*404b540aSrobert     case BARRIER:
2425*404b540aSrobert       /* The chain of insns is not being copied.  */
2426*404b540aSrobert       return;
2427*404b540aSrobert 
2428*404b540aSrobert     default:
2429*404b540aSrobert       break;
2430*404b540aSrobert     }
2431*404b540aSrobert 
2432*404b540aSrobert   /* This rtx may not be shared.  If it has already been seen,
2433*404b540aSrobert      replace it with a copy of itself.  */
2434*404b540aSrobert 
2435*404b540aSrobert   if (RTX_FLAG (x, used))
2436*404b540aSrobert     {
2437*404b540aSrobert       x = shallow_copy_rtx (x);
2438*404b540aSrobert       copied = 1;
2439*404b540aSrobert     }
2440*404b540aSrobert   RTX_FLAG (x, used) = 1;
2441*404b540aSrobert 
2442*404b540aSrobert   /* Now scan the subexpressions recursively.
2443*404b540aSrobert      We can store any replaced subexpressions directly into X
2444*404b540aSrobert      since we know X is not shared!  Any vectors in X
2445*404b540aSrobert      must be copied if X was copied.  */
2446*404b540aSrobert 
2447*404b540aSrobert   format_ptr = GET_RTX_FORMAT (code);
2448*404b540aSrobert   length = GET_RTX_LENGTH (code);
2449*404b540aSrobert   last_ptr = NULL;
2450*404b540aSrobert 
2451*404b540aSrobert   for (i = 0; i < length; i++)
2452*404b540aSrobert     {
2453*404b540aSrobert       switch (*format_ptr++)
2454*404b540aSrobert 	{
2455*404b540aSrobert 	case 'e':
2456*404b540aSrobert           if (last_ptr)
2457*404b540aSrobert             copy_rtx_if_shared_1 (last_ptr);
2458*404b540aSrobert 	  last_ptr = &XEXP (x, i);
2459*404b540aSrobert 	  break;
2460*404b540aSrobert 
2461*404b540aSrobert 	case 'E':
2462*404b540aSrobert 	  if (XVEC (x, i) != NULL)
2463*404b540aSrobert 	    {
2464*404b540aSrobert 	      int j;
2465*404b540aSrobert 	      int len = XVECLEN (x, i);
2466*404b540aSrobert 
2467*404b540aSrobert               /* Copy the vector iff I copied the rtx and the length
2468*404b540aSrobert 		 is nonzero.  */
2469*404b540aSrobert 	      if (copied && len > 0)
2470*404b540aSrobert 		XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2471*404b540aSrobert 
2472*404b540aSrobert               /* Call recursively on all inside the vector.  */
2473*404b540aSrobert 	      for (j = 0; j < len; j++)
2474*404b540aSrobert                 {
2475*404b540aSrobert 		  if (last_ptr)
2476*404b540aSrobert 		    copy_rtx_if_shared_1 (last_ptr);
2477*404b540aSrobert                   last_ptr = &XVECEXP (x, i, j);
2478*404b540aSrobert                 }
2479*404b540aSrobert 	    }
2480*404b540aSrobert 	  break;
2481*404b540aSrobert 	}
2482*404b540aSrobert     }
2483*404b540aSrobert   *orig1 = x;
2484*404b540aSrobert   if (last_ptr)
2485*404b540aSrobert     {
2486*404b540aSrobert       orig1 = last_ptr;
2487*404b540aSrobert       goto repeat;
2488*404b540aSrobert     }
2489*404b540aSrobert   return;
2490*404b540aSrobert }
2491*404b540aSrobert 
2492*404b540aSrobert /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2493*404b540aSrobert    to look for shared sub-parts.  */
2494*404b540aSrobert 
2495*404b540aSrobert void
reset_used_flags(rtx x)2496*404b540aSrobert reset_used_flags (rtx x)
2497*404b540aSrobert {
2498*404b540aSrobert   int i, j;
2499*404b540aSrobert   enum rtx_code code;
2500*404b540aSrobert   const char *format_ptr;
2501*404b540aSrobert   int length;
2502*404b540aSrobert 
2503*404b540aSrobert   /* Repeat is used to turn tail-recursion into iteration.  */
2504*404b540aSrobert repeat:
2505*404b540aSrobert   if (x == 0)
2506*404b540aSrobert     return;
2507*404b540aSrobert 
2508*404b540aSrobert   code = GET_CODE (x);
2509*404b540aSrobert 
2510*404b540aSrobert   /* These types may be freely shared so we needn't do any resetting
2511*404b540aSrobert      for them.  */
2512*404b540aSrobert 
2513*404b540aSrobert   switch (code)
2514*404b540aSrobert     {
2515*404b540aSrobert     case REG:
2516*404b540aSrobert     case CONST_INT:
2517*404b540aSrobert     case CONST_DOUBLE:
2518*404b540aSrobert     case CONST_VECTOR:
2519*404b540aSrobert     case SYMBOL_REF:
2520*404b540aSrobert     case CODE_LABEL:
2521*404b540aSrobert     case PC:
2522*404b540aSrobert     case CC0:
2523*404b540aSrobert       return;
2524*404b540aSrobert 
2525*404b540aSrobert     case INSN:
2526*404b540aSrobert     case JUMP_INSN:
2527*404b540aSrobert     case CALL_INSN:
2528*404b540aSrobert     case NOTE:
2529*404b540aSrobert     case LABEL_REF:
2530*404b540aSrobert     case BARRIER:
2531*404b540aSrobert       /* The chain of insns is not being copied.  */
2532*404b540aSrobert       return;
2533*404b540aSrobert 
2534*404b540aSrobert     default:
2535*404b540aSrobert       break;
2536*404b540aSrobert     }
2537*404b540aSrobert 
2538*404b540aSrobert   RTX_FLAG (x, used) = 0;
2539*404b540aSrobert 
2540*404b540aSrobert   format_ptr = GET_RTX_FORMAT (code);
2541*404b540aSrobert   length = GET_RTX_LENGTH (code);
2542*404b540aSrobert 
2543*404b540aSrobert   for (i = 0; i < length; i++)
2544*404b540aSrobert     {
2545*404b540aSrobert       switch (*format_ptr++)
2546*404b540aSrobert 	{
2547*404b540aSrobert 	case 'e':
2548*404b540aSrobert           if (i == length-1)
2549*404b540aSrobert             {
2550*404b540aSrobert               x = XEXP (x, i);
2551*404b540aSrobert 	      goto repeat;
2552*404b540aSrobert             }
2553*404b540aSrobert 	  reset_used_flags (XEXP (x, i));
2554*404b540aSrobert 	  break;
2555*404b540aSrobert 
2556*404b540aSrobert 	case 'E':
2557*404b540aSrobert 	  for (j = 0; j < XVECLEN (x, i); j++)
2558*404b540aSrobert 	    reset_used_flags (XVECEXP (x, i, j));
2559*404b540aSrobert 	  break;
2560*404b540aSrobert 	}
2561*404b540aSrobert     }
2562*404b540aSrobert }
2563*404b540aSrobert 
2564*404b540aSrobert /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2565*404b540aSrobert    to look for shared sub-parts.  */
2566*404b540aSrobert 
2567*404b540aSrobert void
set_used_flags(rtx x)2568*404b540aSrobert set_used_flags (rtx x)
2569*404b540aSrobert {
2570*404b540aSrobert   int i, j;
2571*404b540aSrobert   enum rtx_code code;
2572*404b540aSrobert   const char *format_ptr;
2573*404b540aSrobert 
2574*404b540aSrobert   if (x == 0)
2575*404b540aSrobert     return;
2576*404b540aSrobert 
2577*404b540aSrobert   code = GET_CODE (x);
2578*404b540aSrobert 
2579*404b540aSrobert   /* These types may be freely shared so we needn't do any resetting
2580*404b540aSrobert      for them.  */
2581*404b540aSrobert 
2582*404b540aSrobert   switch (code)
2583*404b540aSrobert     {
2584*404b540aSrobert     case REG:
2585*404b540aSrobert     case CONST_INT:
2586*404b540aSrobert     case CONST_DOUBLE:
2587*404b540aSrobert     case CONST_VECTOR:
2588*404b540aSrobert     case SYMBOL_REF:
2589*404b540aSrobert     case CODE_LABEL:
2590*404b540aSrobert     case PC:
2591*404b540aSrobert     case CC0:
2592*404b540aSrobert       return;
2593*404b540aSrobert 
2594*404b540aSrobert     case INSN:
2595*404b540aSrobert     case JUMP_INSN:
2596*404b540aSrobert     case CALL_INSN:
2597*404b540aSrobert     case NOTE:
2598*404b540aSrobert     case LABEL_REF:
2599*404b540aSrobert     case BARRIER:
2600*404b540aSrobert       /* The chain of insns is not being copied.  */
2601*404b540aSrobert       return;
2602*404b540aSrobert 
2603*404b540aSrobert     default:
2604*404b540aSrobert       break;
2605*404b540aSrobert     }
2606*404b540aSrobert 
2607*404b540aSrobert   RTX_FLAG (x, used) = 1;
2608*404b540aSrobert 
2609*404b540aSrobert   format_ptr = GET_RTX_FORMAT (code);
2610*404b540aSrobert   for (i = 0; i < GET_RTX_LENGTH (code); i++)
2611*404b540aSrobert     {
2612*404b540aSrobert       switch (*format_ptr++)
2613*404b540aSrobert 	{
2614*404b540aSrobert 	case 'e':
2615*404b540aSrobert 	  set_used_flags (XEXP (x, i));
2616*404b540aSrobert 	  break;
2617*404b540aSrobert 
2618*404b540aSrobert 	case 'E':
2619*404b540aSrobert 	  for (j = 0; j < XVECLEN (x, i); j++)
2620*404b540aSrobert 	    set_used_flags (XVECEXP (x, i, j));
2621*404b540aSrobert 	  break;
2622*404b540aSrobert 	}
2623*404b540aSrobert     }
2624*404b540aSrobert }
2625*404b540aSrobert 
2626*404b540aSrobert /* Copy X if necessary so that it won't be altered by changes in OTHER.
2627*404b540aSrobert    Return X or the rtx for the pseudo reg the value of X was copied into.
2628*404b540aSrobert    OTHER must be valid as a SET_DEST.  */
2629*404b540aSrobert 
2630*404b540aSrobert rtx
make_safe_from(rtx x,rtx other)2631*404b540aSrobert make_safe_from (rtx x, rtx other)
2632*404b540aSrobert {
2633*404b540aSrobert   while (1)
2634*404b540aSrobert     switch (GET_CODE (other))
2635*404b540aSrobert       {
2636*404b540aSrobert       case SUBREG:
2637*404b540aSrobert 	other = SUBREG_REG (other);
2638*404b540aSrobert 	break;
2639*404b540aSrobert       case STRICT_LOW_PART:
2640*404b540aSrobert       case SIGN_EXTEND:
2641*404b540aSrobert       case ZERO_EXTEND:
2642*404b540aSrobert 	other = XEXP (other, 0);
2643*404b540aSrobert 	break;
2644*404b540aSrobert       default:
2645*404b540aSrobert 	goto done;
2646*404b540aSrobert       }
2647*404b540aSrobert  done:
2648*404b540aSrobert   if ((MEM_P (other)
2649*404b540aSrobert        && ! CONSTANT_P (x)
2650*404b540aSrobert        && !REG_P (x)
2651*404b540aSrobert        && GET_CODE (x) != SUBREG)
2652*404b540aSrobert       || (REG_P (other)
2653*404b540aSrobert 	  && (REGNO (other) < FIRST_PSEUDO_REGISTER
2654*404b540aSrobert 	      || reg_mentioned_p (other, x))))
2655*404b540aSrobert     {
2656*404b540aSrobert       rtx temp = gen_reg_rtx (GET_MODE (x));
2657*404b540aSrobert       emit_move_insn (temp, x);
2658*404b540aSrobert       return temp;
2659*404b540aSrobert     }
2660*404b540aSrobert   return x;
2661*404b540aSrobert }
2662*404b540aSrobert 
2663*404b540aSrobert /* Emission of insns (adding them to the doubly-linked list).  */
2664*404b540aSrobert 
2665*404b540aSrobert /* Return the first insn of the current sequence or current function.  */
2666*404b540aSrobert 
2667*404b540aSrobert rtx
get_insns(void)2668*404b540aSrobert get_insns (void)
2669*404b540aSrobert {
2670*404b540aSrobert   return first_insn;
2671*404b540aSrobert }
2672*404b540aSrobert 
2673*404b540aSrobert /* Specify a new insn as the first in the chain.  */
2674*404b540aSrobert 
2675*404b540aSrobert void
set_first_insn(rtx insn)2676*404b540aSrobert set_first_insn (rtx insn)
2677*404b540aSrobert {
2678*404b540aSrobert   gcc_assert (!PREV_INSN (insn));
2679*404b540aSrobert   first_insn = insn;
2680*404b540aSrobert }
2681*404b540aSrobert 
2682*404b540aSrobert /* Return the last insn emitted in current sequence or current function.  */
2683*404b540aSrobert 
2684*404b540aSrobert rtx
get_last_insn(void)2685*404b540aSrobert get_last_insn (void)
2686*404b540aSrobert {
2687*404b540aSrobert   return last_insn;
2688*404b540aSrobert }
2689*404b540aSrobert 
2690*404b540aSrobert /* Specify a new insn as the last in the chain.  */
2691*404b540aSrobert 
2692*404b540aSrobert void
set_last_insn(rtx insn)2693*404b540aSrobert set_last_insn (rtx insn)
2694*404b540aSrobert {
2695*404b540aSrobert   gcc_assert (!NEXT_INSN (insn));
2696*404b540aSrobert   last_insn = insn;
2697*404b540aSrobert }
2698*404b540aSrobert 
2699*404b540aSrobert /* Return the last insn emitted, even if it is in a sequence now pushed.  */
2700*404b540aSrobert 
2701*404b540aSrobert rtx
get_last_insn_anywhere(void)2702*404b540aSrobert get_last_insn_anywhere (void)
2703*404b540aSrobert {
2704*404b540aSrobert   struct sequence_stack *stack;
2705*404b540aSrobert   if (last_insn)
2706*404b540aSrobert     return last_insn;
2707*404b540aSrobert   for (stack = seq_stack; stack; stack = stack->next)
2708*404b540aSrobert     if (stack->last != 0)
2709*404b540aSrobert       return stack->last;
2710*404b540aSrobert   return 0;
2711*404b540aSrobert }
2712*404b540aSrobert 
2713*404b540aSrobert /* Return the first nonnote insn emitted in current sequence or current
2714*404b540aSrobert    function.  This routine looks inside SEQUENCEs.  */
2715*404b540aSrobert 
2716*404b540aSrobert rtx
get_first_nonnote_insn(void)2717*404b540aSrobert get_first_nonnote_insn (void)
2718*404b540aSrobert {
2719*404b540aSrobert   rtx insn = first_insn;
2720*404b540aSrobert 
2721*404b540aSrobert   if (insn)
2722*404b540aSrobert     {
2723*404b540aSrobert       if (NOTE_P (insn))
2724*404b540aSrobert 	for (insn = next_insn (insn);
2725*404b540aSrobert 	     insn && NOTE_P (insn);
2726*404b540aSrobert 	     insn = next_insn (insn))
2727*404b540aSrobert 	  continue;
2728*404b540aSrobert       else
2729*404b540aSrobert 	{
2730*404b540aSrobert 	  if (NONJUMP_INSN_P (insn)
2731*404b540aSrobert 	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
2732*404b540aSrobert 	    insn = XVECEXP (PATTERN (insn), 0, 0);
2733*404b540aSrobert 	}
2734*404b540aSrobert     }
2735*404b540aSrobert 
2736*404b540aSrobert   return insn;
2737*404b540aSrobert }
2738*404b540aSrobert 
2739*404b540aSrobert /* Return the last nonnote insn emitted in current sequence or current
2740*404b540aSrobert    function.  This routine looks inside SEQUENCEs.  */
2741*404b540aSrobert 
2742*404b540aSrobert rtx
get_last_nonnote_insn(void)2743*404b540aSrobert get_last_nonnote_insn (void)
2744*404b540aSrobert {
2745*404b540aSrobert   rtx insn = last_insn;
2746*404b540aSrobert 
2747*404b540aSrobert   if (insn)
2748*404b540aSrobert     {
2749*404b540aSrobert       if (NOTE_P (insn))
2750*404b540aSrobert 	for (insn = previous_insn (insn);
2751*404b540aSrobert 	     insn && NOTE_P (insn);
2752*404b540aSrobert 	     insn = previous_insn (insn))
2753*404b540aSrobert 	  continue;
2754*404b540aSrobert       else
2755*404b540aSrobert 	{
2756*404b540aSrobert 	  if (NONJUMP_INSN_P (insn)
2757*404b540aSrobert 	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
2758*404b540aSrobert 	    insn = XVECEXP (PATTERN (insn), 0,
2759*404b540aSrobert 			    XVECLEN (PATTERN (insn), 0) - 1);
2760*404b540aSrobert 	}
2761*404b540aSrobert     }
2762*404b540aSrobert 
2763*404b540aSrobert   return insn;
2764*404b540aSrobert }
2765*404b540aSrobert 
2766*404b540aSrobert /* Return a number larger than any instruction's uid in this function.  */
2767*404b540aSrobert 
2768*404b540aSrobert int
get_max_uid(void)2769*404b540aSrobert get_max_uid (void)
2770*404b540aSrobert {
2771*404b540aSrobert   return cur_insn_uid;
2772*404b540aSrobert }
2773*404b540aSrobert 
2774*404b540aSrobert /* Renumber instructions so that no instruction UIDs are wasted.  */
2775*404b540aSrobert 
2776*404b540aSrobert void
renumber_insns(void)2777*404b540aSrobert renumber_insns (void)
2778*404b540aSrobert {
2779*404b540aSrobert   rtx insn;
2780*404b540aSrobert 
2781*404b540aSrobert   /* If we're not supposed to renumber instructions, don't.  */
2782*404b540aSrobert   if (!flag_renumber_insns)
2783*404b540aSrobert     return;
2784*404b540aSrobert 
2785*404b540aSrobert   /* If there aren't that many instructions, then it's not really
2786*404b540aSrobert      worth renumbering them.  */
2787*404b540aSrobert   if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2788*404b540aSrobert     return;
2789*404b540aSrobert 
2790*404b540aSrobert   cur_insn_uid = 1;
2791*404b540aSrobert 
2792*404b540aSrobert   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2793*404b540aSrobert     {
2794*404b540aSrobert       if (dump_file)
2795*404b540aSrobert 	fprintf (dump_file, "Renumbering insn %d to %d\n",
2796*404b540aSrobert 		 INSN_UID (insn), cur_insn_uid);
2797*404b540aSrobert       INSN_UID (insn) = cur_insn_uid++;
2798*404b540aSrobert     }
2799*404b540aSrobert }
2800*404b540aSrobert 
2801*404b540aSrobert /* Return the next insn.  If it is a SEQUENCE, return the first insn
2802*404b540aSrobert    of the sequence.  */
2803*404b540aSrobert 
2804*404b540aSrobert rtx
next_insn(rtx insn)2805*404b540aSrobert next_insn (rtx insn)
2806*404b540aSrobert {
2807*404b540aSrobert   if (insn)
2808*404b540aSrobert     {
2809*404b540aSrobert       insn = NEXT_INSN (insn);
2810*404b540aSrobert       if (insn && NONJUMP_INSN_P (insn)
2811*404b540aSrobert 	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
2812*404b540aSrobert 	insn = XVECEXP (PATTERN (insn), 0, 0);
2813*404b540aSrobert     }
2814*404b540aSrobert 
2815*404b540aSrobert   return insn;
2816*404b540aSrobert }
2817*404b540aSrobert 
2818*404b540aSrobert /* Return the previous insn.  If it is a SEQUENCE, return the last insn
2819*404b540aSrobert    of the sequence.  */
2820*404b540aSrobert 
2821*404b540aSrobert rtx
previous_insn(rtx insn)2822*404b540aSrobert previous_insn (rtx insn)
2823*404b540aSrobert {
2824*404b540aSrobert   if (insn)
2825*404b540aSrobert     {
2826*404b540aSrobert       insn = PREV_INSN (insn);
2827*404b540aSrobert       if (insn && NONJUMP_INSN_P (insn)
2828*404b540aSrobert 	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
2829*404b540aSrobert 	insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2830*404b540aSrobert     }
2831*404b540aSrobert 
2832*404b540aSrobert   return insn;
2833*404b540aSrobert }
2834*404b540aSrobert 
2835*404b540aSrobert /* Return the next insn after INSN that is not a NOTE.  This routine does not
2836*404b540aSrobert    look inside SEQUENCEs.  */
2837*404b540aSrobert 
2838*404b540aSrobert rtx
next_nonnote_insn(rtx insn)2839*404b540aSrobert next_nonnote_insn (rtx insn)
2840*404b540aSrobert {
2841*404b540aSrobert   while (insn)
2842*404b540aSrobert     {
2843*404b540aSrobert       insn = NEXT_INSN (insn);
2844*404b540aSrobert       if (insn == 0 || !NOTE_P (insn))
2845*404b540aSrobert 	break;
2846*404b540aSrobert     }
2847*404b540aSrobert 
2848*404b540aSrobert   return insn;
2849*404b540aSrobert }
2850*404b540aSrobert 
2851*404b540aSrobert /* Return the previous insn before INSN that is not a NOTE.  This routine does
2852*404b540aSrobert    not look inside SEQUENCEs.  */
2853*404b540aSrobert 
2854*404b540aSrobert rtx
prev_nonnote_insn(rtx insn)2855*404b540aSrobert prev_nonnote_insn (rtx insn)
2856*404b540aSrobert {
2857*404b540aSrobert   while (insn)
2858*404b540aSrobert     {
2859*404b540aSrobert       insn = PREV_INSN (insn);
2860*404b540aSrobert       if (insn == 0 || !NOTE_P (insn))
2861*404b540aSrobert 	break;
2862*404b540aSrobert     }
2863*404b540aSrobert 
2864*404b540aSrobert   return insn;
2865*404b540aSrobert }
2866*404b540aSrobert 
2867*404b540aSrobert /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2868*404b540aSrobert    or 0, if there is none.  This routine does not look inside
2869*404b540aSrobert    SEQUENCEs.  */
2870*404b540aSrobert 
2871*404b540aSrobert rtx
next_real_insn(rtx insn)2872*404b540aSrobert next_real_insn (rtx insn)
2873*404b540aSrobert {
2874*404b540aSrobert   while (insn)
2875*404b540aSrobert     {
2876*404b540aSrobert       insn = NEXT_INSN (insn);
2877*404b540aSrobert       if (insn == 0 || INSN_P (insn))
2878*404b540aSrobert 	break;
2879*404b540aSrobert     }
2880*404b540aSrobert 
2881*404b540aSrobert   return insn;
2882*404b540aSrobert }
2883*404b540aSrobert 
2884*404b540aSrobert /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2885*404b540aSrobert    or 0, if there is none.  This routine does not look inside
2886*404b540aSrobert    SEQUENCEs.  */
2887*404b540aSrobert 
2888*404b540aSrobert rtx
prev_real_insn(rtx insn)2889*404b540aSrobert prev_real_insn (rtx insn)
2890*404b540aSrobert {
2891*404b540aSrobert   while (insn)
2892*404b540aSrobert     {
2893*404b540aSrobert       insn = PREV_INSN (insn);
2894*404b540aSrobert       if (insn == 0 || INSN_P (insn))
2895*404b540aSrobert 	break;
2896*404b540aSrobert     }
2897*404b540aSrobert 
2898*404b540aSrobert   return insn;
2899*404b540aSrobert }
2900*404b540aSrobert 
2901*404b540aSrobert /* Return the last CALL_INSN in the current list, or 0 if there is none.
2902*404b540aSrobert    This routine does not look inside SEQUENCEs.  */
2903*404b540aSrobert 
2904*404b540aSrobert rtx
last_call_insn(void)2905*404b540aSrobert last_call_insn (void)
2906*404b540aSrobert {
2907*404b540aSrobert   rtx insn;
2908*404b540aSrobert 
2909*404b540aSrobert   for (insn = get_last_insn ();
2910*404b540aSrobert        insn && !CALL_P (insn);
2911*404b540aSrobert        insn = PREV_INSN (insn))
2912*404b540aSrobert     ;
2913*404b540aSrobert 
2914*404b540aSrobert   return insn;
2915*404b540aSrobert }
2916*404b540aSrobert 
2917*404b540aSrobert /* Find the next insn after INSN that really does something.  This routine
2918*404b540aSrobert    does not look inside SEQUENCEs.  Until reload has completed, this is the
2919*404b540aSrobert    same as next_real_insn.  */
2920*404b540aSrobert 
2921*404b540aSrobert int
active_insn_p(rtx insn)2922*404b540aSrobert active_insn_p (rtx insn)
2923*404b540aSrobert {
2924*404b540aSrobert   return (CALL_P (insn) || JUMP_P (insn)
2925*404b540aSrobert 	  || (NONJUMP_INSN_P (insn)
2926*404b540aSrobert 	      && (! reload_completed
2927*404b540aSrobert 		  || (GET_CODE (PATTERN (insn)) != USE
2928*404b540aSrobert 		      && GET_CODE (PATTERN (insn)) != CLOBBER))));
2929*404b540aSrobert }
2930*404b540aSrobert 
2931*404b540aSrobert rtx
next_active_insn(rtx insn)2932*404b540aSrobert next_active_insn (rtx insn)
2933*404b540aSrobert {
2934*404b540aSrobert   while (insn)
2935*404b540aSrobert     {
2936*404b540aSrobert       insn = NEXT_INSN (insn);
2937*404b540aSrobert       if (insn == 0 || active_insn_p (insn))
2938*404b540aSrobert 	break;
2939*404b540aSrobert     }
2940*404b540aSrobert 
2941*404b540aSrobert   return insn;
2942*404b540aSrobert }
2943*404b540aSrobert 
2944*404b540aSrobert /* Find the last insn before INSN that really does something.  This routine
2945*404b540aSrobert    does not look inside SEQUENCEs.  Until reload has completed, this is the
2946*404b540aSrobert    same as prev_real_insn.  */
2947*404b540aSrobert 
2948*404b540aSrobert rtx
prev_active_insn(rtx insn)2949*404b540aSrobert prev_active_insn (rtx insn)
2950*404b540aSrobert {
2951*404b540aSrobert   while (insn)
2952*404b540aSrobert     {
2953*404b540aSrobert       insn = PREV_INSN (insn);
2954*404b540aSrobert       if (insn == 0 || active_insn_p (insn))
2955*404b540aSrobert 	break;
2956*404b540aSrobert     }
2957*404b540aSrobert 
2958*404b540aSrobert   return insn;
2959*404b540aSrobert }
2960*404b540aSrobert 
2961*404b540aSrobert /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none.  */
2962*404b540aSrobert 
2963*404b540aSrobert rtx
next_label(rtx insn)2964*404b540aSrobert next_label (rtx insn)
2965*404b540aSrobert {
2966*404b540aSrobert   while (insn)
2967*404b540aSrobert     {
2968*404b540aSrobert       insn = NEXT_INSN (insn);
2969*404b540aSrobert       if (insn == 0 || LABEL_P (insn))
2970*404b540aSrobert 	break;
2971*404b540aSrobert     }
2972*404b540aSrobert 
2973*404b540aSrobert   return insn;
2974*404b540aSrobert }
2975*404b540aSrobert 
2976*404b540aSrobert /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none.  */
2977*404b540aSrobert 
2978*404b540aSrobert rtx
prev_label(rtx insn)2979*404b540aSrobert prev_label (rtx insn)
2980*404b540aSrobert {
2981*404b540aSrobert   while (insn)
2982*404b540aSrobert     {
2983*404b540aSrobert       insn = PREV_INSN (insn);
2984*404b540aSrobert       if (insn == 0 || LABEL_P (insn))
2985*404b540aSrobert 	break;
2986*404b540aSrobert     }
2987*404b540aSrobert 
2988*404b540aSrobert   return insn;
2989*404b540aSrobert }
2990*404b540aSrobert 
2991*404b540aSrobert /* Return the last label to mark the same position as LABEL.  Return null
2992*404b540aSrobert    if LABEL itself is null.  */
2993*404b540aSrobert 
2994*404b540aSrobert rtx
skip_consecutive_labels(rtx label)2995*404b540aSrobert skip_consecutive_labels (rtx label)
2996*404b540aSrobert {
2997*404b540aSrobert   rtx insn;
2998*404b540aSrobert 
2999*404b540aSrobert   for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3000*404b540aSrobert     if (LABEL_P (insn))
3001*404b540aSrobert       label = insn;
3002*404b540aSrobert 
3003*404b540aSrobert   return label;
3004*404b540aSrobert }
3005*404b540aSrobert 
3006*404b540aSrobert #ifdef HAVE_cc0
3007*404b540aSrobert /* INSN uses CC0 and is being moved into a delay slot.  Set up REG_CC_SETTER
3008*404b540aSrobert    and REG_CC_USER notes so we can find it.  */
3009*404b540aSrobert 
3010*404b540aSrobert void
link_cc0_insns(rtx insn)3011*404b540aSrobert link_cc0_insns (rtx insn)
3012*404b540aSrobert {
3013*404b540aSrobert   rtx user = next_nonnote_insn (insn);
3014*404b540aSrobert 
3015*404b540aSrobert   if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3016*404b540aSrobert     user = XVECEXP (PATTERN (user), 0, 0);
3017*404b540aSrobert 
3018*404b540aSrobert   REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3019*404b540aSrobert 					REG_NOTES (user));
3020*404b540aSrobert   REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3021*404b540aSrobert }
3022*404b540aSrobert 
3023*404b540aSrobert /* Return the next insn that uses CC0 after INSN, which is assumed to
3024*404b540aSrobert    set it.  This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3025*404b540aSrobert    applied to the result of this function should yield INSN).
3026*404b540aSrobert 
3027*404b540aSrobert    Normally, this is simply the next insn.  However, if a REG_CC_USER note
3028*404b540aSrobert    is present, it contains the insn that uses CC0.
3029*404b540aSrobert 
3030*404b540aSrobert    Return 0 if we can't find the insn.  */
3031*404b540aSrobert 
3032*404b540aSrobert rtx
next_cc0_user(rtx insn)3033*404b540aSrobert next_cc0_user (rtx insn)
3034*404b540aSrobert {
3035*404b540aSrobert   rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3036*404b540aSrobert 
3037*404b540aSrobert   if (note)
3038*404b540aSrobert     return XEXP (note, 0);
3039*404b540aSrobert 
3040*404b540aSrobert   insn = next_nonnote_insn (insn);
3041*404b540aSrobert   if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3042*404b540aSrobert     insn = XVECEXP (PATTERN (insn), 0, 0);
3043*404b540aSrobert 
3044*404b540aSrobert   if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3045*404b540aSrobert     return insn;
3046*404b540aSrobert 
3047*404b540aSrobert   return 0;
3048*404b540aSrobert }
3049*404b540aSrobert 
3050*404b540aSrobert /* Find the insn that set CC0 for INSN.  Unless INSN has a REG_CC_SETTER
3051*404b540aSrobert    note, it is the previous insn.  */
3052*404b540aSrobert 
3053*404b540aSrobert rtx
prev_cc0_setter(rtx insn)3054*404b540aSrobert prev_cc0_setter (rtx insn)
3055*404b540aSrobert {
3056*404b540aSrobert   rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3057*404b540aSrobert 
3058*404b540aSrobert   if (note)
3059*404b540aSrobert     return XEXP (note, 0);
3060*404b540aSrobert 
3061*404b540aSrobert   insn = prev_nonnote_insn (insn);
3062*404b540aSrobert   gcc_assert (sets_cc0_p (PATTERN (insn)));
3063*404b540aSrobert 
3064*404b540aSrobert   return insn;
3065*404b540aSrobert }
3066*404b540aSrobert #endif
3067*404b540aSrobert 
3068*404b540aSrobert /* Increment the label uses for all labels present in rtx.  */
3069*404b540aSrobert 
3070*404b540aSrobert static void
mark_label_nuses(rtx x)3071*404b540aSrobert mark_label_nuses (rtx x)
3072*404b540aSrobert {
3073*404b540aSrobert   enum rtx_code code;
3074*404b540aSrobert   int i, j;
3075*404b540aSrobert   const char *fmt;
3076*404b540aSrobert 
3077*404b540aSrobert   code = GET_CODE (x);
3078*404b540aSrobert   if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3079*404b540aSrobert     LABEL_NUSES (XEXP (x, 0))++;
3080*404b540aSrobert 
3081*404b540aSrobert   fmt = GET_RTX_FORMAT (code);
3082*404b540aSrobert   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3083*404b540aSrobert     {
3084*404b540aSrobert       if (fmt[i] == 'e')
3085*404b540aSrobert 	mark_label_nuses (XEXP (x, i));
3086*404b540aSrobert       else if (fmt[i] == 'E')
3087*404b540aSrobert 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3088*404b540aSrobert 	  mark_label_nuses (XVECEXP (x, i, j));
3089*404b540aSrobert     }
3090*404b540aSrobert }
3091*404b540aSrobert 
3092*404b540aSrobert 
3093*404b540aSrobert /* Try splitting insns that can be split for better scheduling.
3094*404b540aSrobert    PAT is the pattern which might split.
3095*404b540aSrobert    TRIAL is the insn providing PAT.
3096*404b540aSrobert    LAST is nonzero if we should return the last insn of the sequence produced.
3097*404b540aSrobert 
3098*404b540aSrobert    If this routine succeeds in splitting, it returns the first or last
3099*404b540aSrobert    replacement insn depending on the value of LAST.  Otherwise, it
3100*404b540aSrobert    returns TRIAL.  If the insn to be returned can be split, it will be.  */
3101*404b540aSrobert 
3102*404b540aSrobert rtx
try_split(rtx pat,rtx trial,int last)3103*404b540aSrobert try_split (rtx pat, rtx trial, int last)
3104*404b540aSrobert {
3105*404b540aSrobert   rtx before = PREV_INSN (trial);
3106*404b540aSrobert   rtx after = NEXT_INSN (trial);
3107*404b540aSrobert   int has_barrier = 0;
3108*404b540aSrobert   rtx tem;
3109*404b540aSrobert   rtx note, seq;
3110*404b540aSrobert   int probability;
3111*404b540aSrobert   rtx insn_last, insn;
3112*404b540aSrobert   int njumps = 0;
3113*404b540aSrobert 
3114*404b540aSrobert   if (any_condjump_p (trial)
3115*404b540aSrobert       && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3116*404b540aSrobert     split_branch_probability = INTVAL (XEXP (note, 0));
3117*404b540aSrobert   probability = split_branch_probability;
3118*404b540aSrobert 
3119*404b540aSrobert   seq = split_insns (pat, trial);
3120*404b540aSrobert 
3121*404b540aSrobert   split_branch_probability = -1;
3122*404b540aSrobert 
3123*404b540aSrobert   /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3124*404b540aSrobert      We may need to handle this specially.  */
3125*404b540aSrobert   if (after && BARRIER_P (after))
3126*404b540aSrobert     {
3127*404b540aSrobert       has_barrier = 1;
3128*404b540aSrobert       after = NEXT_INSN (after);
3129*404b540aSrobert     }
3130*404b540aSrobert 
3131*404b540aSrobert   if (!seq)
3132*404b540aSrobert     return trial;
3133*404b540aSrobert 
3134*404b540aSrobert   /* Avoid infinite loop if any insn of the result matches
3135*404b540aSrobert      the original pattern.  */
3136*404b540aSrobert   insn_last = seq;
3137*404b540aSrobert   while (1)
3138*404b540aSrobert     {
3139*404b540aSrobert       if (INSN_P (insn_last)
3140*404b540aSrobert 	  && rtx_equal_p (PATTERN (insn_last), pat))
3141*404b540aSrobert 	return trial;
3142*404b540aSrobert       if (!NEXT_INSN (insn_last))
3143*404b540aSrobert 	break;
3144*404b540aSrobert       insn_last = NEXT_INSN (insn_last);
3145*404b540aSrobert     }
3146*404b540aSrobert 
3147*404b540aSrobert   /* Mark labels.  */
3148*404b540aSrobert   for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3149*404b540aSrobert     {
3150*404b540aSrobert       if (JUMP_P (insn))
3151*404b540aSrobert 	{
3152*404b540aSrobert 	  mark_jump_label (PATTERN (insn), insn, 0);
3153*404b540aSrobert 	  njumps++;
3154*404b540aSrobert 	  if (probability != -1
3155*404b540aSrobert 	      && any_condjump_p (insn)
3156*404b540aSrobert 	      && !find_reg_note (insn, REG_BR_PROB, 0))
3157*404b540aSrobert 	    {
3158*404b540aSrobert 	      /* We can preserve the REG_BR_PROB notes only if exactly
3159*404b540aSrobert 		 one jump is created, otherwise the machine description
3160*404b540aSrobert 		 is responsible for this step using
3161*404b540aSrobert 		 split_branch_probability variable.  */
3162*404b540aSrobert 	      gcc_assert (njumps == 1);
3163*404b540aSrobert 	      REG_NOTES (insn)
3164*404b540aSrobert 		= gen_rtx_EXPR_LIST (REG_BR_PROB,
3165*404b540aSrobert 				     GEN_INT (probability),
3166*404b540aSrobert 				     REG_NOTES (insn));
3167*404b540aSrobert 	    }
3168*404b540aSrobert 	}
3169*404b540aSrobert     }
3170*404b540aSrobert 
3171*404b540aSrobert   /* If we are splitting a CALL_INSN, look for the CALL_INSN
3172*404b540aSrobert      in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it.  */
3173*404b540aSrobert   if (CALL_P (trial))
3174*404b540aSrobert     {
3175*404b540aSrobert       for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3176*404b540aSrobert 	if (CALL_P (insn))
3177*404b540aSrobert 	  {
3178*404b540aSrobert 	    rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3179*404b540aSrobert 	    while (*p)
3180*404b540aSrobert 	      p = &XEXP (*p, 1);
3181*404b540aSrobert 	    *p = CALL_INSN_FUNCTION_USAGE (trial);
3182*404b540aSrobert 	    SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3183*404b540aSrobert 	  }
3184*404b540aSrobert     }
3185*404b540aSrobert 
3186*404b540aSrobert   /* Copy notes, particularly those related to the CFG.  */
3187*404b540aSrobert   for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3188*404b540aSrobert     {
3189*404b540aSrobert       switch (REG_NOTE_KIND (note))
3190*404b540aSrobert 	{
3191*404b540aSrobert 	case REG_EH_REGION:
3192*404b540aSrobert 	  insn = insn_last;
3193*404b540aSrobert 	  while (insn != NULL_RTX)
3194*404b540aSrobert 	    {
3195*404b540aSrobert 	      if (CALL_P (insn)
3196*404b540aSrobert 		  || (flag_non_call_exceptions && INSN_P (insn)
3197*404b540aSrobert 		      && may_trap_p (PATTERN (insn))))
3198*404b540aSrobert 		REG_NOTES (insn)
3199*404b540aSrobert 		  = gen_rtx_EXPR_LIST (REG_EH_REGION,
3200*404b540aSrobert 				       XEXP (note, 0),
3201*404b540aSrobert 				       REG_NOTES (insn));
3202*404b540aSrobert 	      insn = PREV_INSN (insn);
3203*404b540aSrobert 	    }
3204*404b540aSrobert 	  break;
3205*404b540aSrobert 
3206*404b540aSrobert 	case REG_NORETURN:
3207*404b540aSrobert 	case REG_SETJMP:
3208*404b540aSrobert 	  insn = insn_last;
3209*404b540aSrobert 	  while (insn != NULL_RTX)
3210*404b540aSrobert 	    {
3211*404b540aSrobert 	      if (CALL_P (insn))
3212*404b540aSrobert 		REG_NOTES (insn)
3213*404b540aSrobert 		  = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3214*404b540aSrobert 				       XEXP (note, 0),
3215*404b540aSrobert 				       REG_NOTES (insn));
3216*404b540aSrobert 	      insn = PREV_INSN (insn);
3217*404b540aSrobert 	    }
3218*404b540aSrobert 	  break;
3219*404b540aSrobert 
3220*404b540aSrobert 	case REG_NON_LOCAL_GOTO:
3221*404b540aSrobert 	  insn = insn_last;
3222*404b540aSrobert 	  while (insn != NULL_RTX)
3223*404b540aSrobert 	    {
3224*404b540aSrobert 	      if (JUMP_P (insn))
3225*404b540aSrobert 		REG_NOTES (insn)
3226*404b540aSrobert 		  = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3227*404b540aSrobert 				       XEXP (note, 0),
3228*404b540aSrobert 				       REG_NOTES (insn));
3229*404b540aSrobert 	      insn = PREV_INSN (insn);
3230*404b540aSrobert 	    }
3231*404b540aSrobert 	  break;
3232*404b540aSrobert 
3233*404b540aSrobert 	default:
3234*404b540aSrobert 	  break;
3235*404b540aSrobert 	}
3236*404b540aSrobert     }
3237*404b540aSrobert 
3238*404b540aSrobert   /* If there are LABELS inside the split insns increment the
3239*404b540aSrobert      usage count so we don't delete the label.  */
3240*404b540aSrobert   if (NONJUMP_INSN_P (trial))
3241*404b540aSrobert     {
3242*404b540aSrobert       insn = insn_last;
3243*404b540aSrobert       while (insn != NULL_RTX)
3244*404b540aSrobert 	{
3245*404b540aSrobert 	  if (NONJUMP_INSN_P (insn))
3246*404b540aSrobert 	    mark_label_nuses (PATTERN (insn));
3247*404b540aSrobert 
3248*404b540aSrobert 	  insn = PREV_INSN (insn);
3249*404b540aSrobert 	}
3250*404b540aSrobert     }
3251*404b540aSrobert 
3252*404b540aSrobert   tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3253*404b540aSrobert 
3254*404b540aSrobert   delete_insn (trial);
3255*404b540aSrobert   if (has_barrier)
3256*404b540aSrobert     emit_barrier_after (tem);
3257*404b540aSrobert 
3258*404b540aSrobert   /* Recursively call try_split for each new insn created; by the
3259*404b540aSrobert      time control returns here that insn will be fully split, so
3260*404b540aSrobert      set LAST and continue from the insn after the one returned.
3261*404b540aSrobert      We can't use next_active_insn here since AFTER may be a note.
3262*404b540aSrobert      Ignore deleted insns, which can be occur if not optimizing.  */
3263*404b540aSrobert   for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3264*404b540aSrobert     if (! INSN_DELETED_P (tem) && INSN_P (tem))
3265*404b540aSrobert       tem = try_split (PATTERN (tem), tem, 1);
3266*404b540aSrobert 
3267*404b540aSrobert   /* Return either the first or the last insn, depending on which was
3268*404b540aSrobert      requested.  */
3269*404b540aSrobert   return last
3270*404b540aSrobert     ? (after ? PREV_INSN (after) : last_insn)
3271*404b540aSrobert     : NEXT_INSN (before);
3272*404b540aSrobert }
3273*404b540aSrobert 
3274*404b540aSrobert /* Make and return an INSN rtx, initializing all its slots.
3275*404b540aSrobert    Store PATTERN in the pattern slots.  */
3276*404b540aSrobert 
3277*404b540aSrobert rtx
make_insn_raw(rtx pattern)3278*404b540aSrobert make_insn_raw (rtx pattern)
3279*404b540aSrobert {
3280*404b540aSrobert   rtx insn;
3281*404b540aSrobert 
3282*404b540aSrobert   insn = rtx_alloc (INSN);
3283*404b540aSrobert 
3284*404b540aSrobert   INSN_UID (insn) = cur_insn_uid++;
3285*404b540aSrobert   PATTERN (insn) = pattern;
3286*404b540aSrobert   INSN_CODE (insn) = -1;
3287*404b540aSrobert   LOG_LINKS (insn) = NULL;
3288*404b540aSrobert   REG_NOTES (insn) = NULL;
3289*404b540aSrobert   INSN_LOCATOR (insn) = 0;
3290*404b540aSrobert   BLOCK_FOR_INSN (insn) = NULL;
3291*404b540aSrobert 
3292*404b540aSrobert #ifdef ENABLE_RTL_CHECKING
3293*404b540aSrobert   if (insn
3294*404b540aSrobert       && INSN_P (insn)
3295*404b540aSrobert       && (returnjump_p (insn)
3296*404b540aSrobert 	  || (GET_CODE (insn) == SET
3297*404b540aSrobert 	      && SET_DEST (insn) == pc_rtx)))
3298*404b540aSrobert     {
3299*404b540aSrobert       warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3300*404b540aSrobert       debug_rtx (insn);
3301*404b540aSrobert     }
3302*404b540aSrobert #endif
3303*404b540aSrobert 
3304*404b540aSrobert   return insn;
3305*404b540aSrobert }
3306*404b540aSrobert 
3307*404b540aSrobert /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn.  */
3308*404b540aSrobert 
3309*404b540aSrobert rtx
make_jump_insn_raw(rtx pattern)3310*404b540aSrobert make_jump_insn_raw (rtx pattern)
3311*404b540aSrobert {
3312*404b540aSrobert   rtx insn;
3313*404b540aSrobert 
3314*404b540aSrobert   insn = rtx_alloc (JUMP_INSN);
3315*404b540aSrobert   INSN_UID (insn) = cur_insn_uid++;
3316*404b540aSrobert 
3317*404b540aSrobert   PATTERN (insn) = pattern;
3318*404b540aSrobert   INSN_CODE (insn) = -1;
3319*404b540aSrobert   LOG_LINKS (insn) = NULL;
3320*404b540aSrobert   REG_NOTES (insn) = NULL;
3321*404b540aSrobert   JUMP_LABEL (insn) = NULL;
3322*404b540aSrobert   INSN_LOCATOR (insn) = 0;
3323*404b540aSrobert   BLOCK_FOR_INSN (insn) = NULL;
3324*404b540aSrobert 
3325*404b540aSrobert   return insn;
3326*404b540aSrobert }
3327*404b540aSrobert 
3328*404b540aSrobert /* Like `make_insn_raw' but make a CALL_INSN instead of an insn.  */
3329*404b540aSrobert 
3330*404b540aSrobert static rtx
make_call_insn_raw(rtx pattern)3331*404b540aSrobert make_call_insn_raw (rtx pattern)
3332*404b540aSrobert {
3333*404b540aSrobert   rtx insn;
3334*404b540aSrobert 
3335*404b540aSrobert   insn = rtx_alloc (CALL_INSN);
3336*404b540aSrobert   INSN_UID (insn) = cur_insn_uid++;
3337*404b540aSrobert 
3338*404b540aSrobert   PATTERN (insn) = pattern;
3339*404b540aSrobert   INSN_CODE (insn) = -1;
3340*404b540aSrobert   LOG_LINKS (insn) = NULL;
3341*404b540aSrobert   REG_NOTES (insn) = NULL;
3342*404b540aSrobert   CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3343*404b540aSrobert   INSN_LOCATOR (insn) = 0;
3344*404b540aSrobert   BLOCK_FOR_INSN (insn) = NULL;
3345*404b540aSrobert 
3346*404b540aSrobert   return insn;
3347*404b540aSrobert }
3348*404b540aSrobert 
3349*404b540aSrobert /* Add INSN to the end of the doubly-linked list.
3350*404b540aSrobert    INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE.  */
3351*404b540aSrobert 
3352*404b540aSrobert void
add_insn(rtx insn)3353*404b540aSrobert add_insn (rtx insn)
3354*404b540aSrobert {
3355*404b540aSrobert   PREV_INSN (insn) = last_insn;
3356*404b540aSrobert   NEXT_INSN (insn) = 0;
3357*404b540aSrobert 
3358*404b540aSrobert   if (NULL != last_insn)
3359*404b540aSrobert     NEXT_INSN (last_insn) = insn;
3360*404b540aSrobert 
3361*404b540aSrobert   if (NULL == first_insn)
3362*404b540aSrobert     first_insn = insn;
3363*404b540aSrobert 
3364*404b540aSrobert   last_insn = insn;
3365*404b540aSrobert }
3366*404b540aSrobert 
3367*404b540aSrobert /* Add INSN into the doubly-linked list after insn AFTER.  This and
3368*404b540aSrobert    the next should be the only functions called to insert an insn once
3369*404b540aSrobert    delay slots have been filled since only they know how to update a
3370*404b540aSrobert    SEQUENCE.  */
3371*404b540aSrobert 
3372*404b540aSrobert void
add_insn_after(rtx insn,rtx after)3373*404b540aSrobert add_insn_after (rtx insn, rtx after)
3374*404b540aSrobert {
3375*404b540aSrobert   rtx next = NEXT_INSN (after);
3376*404b540aSrobert   basic_block bb;
3377*404b540aSrobert 
3378*404b540aSrobert   gcc_assert (!optimize || !INSN_DELETED_P (after));
3379*404b540aSrobert 
3380*404b540aSrobert   NEXT_INSN (insn) = next;
3381*404b540aSrobert   PREV_INSN (insn) = after;
3382*404b540aSrobert 
3383*404b540aSrobert   if (next)
3384*404b540aSrobert     {
3385*404b540aSrobert       PREV_INSN (next) = insn;
3386*404b540aSrobert       if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3387*404b540aSrobert 	PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3388*404b540aSrobert     }
3389*404b540aSrobert   else if (last_insn == after)
3390*404b540aSrobert     last_insn = insn;
3391*404b540aSrobert   else
3392*404b540aSrobert     {
3393*404b540aSrobert       struct sequence_stack *stack = seq_stack;
3394*404b540aSrobert       /* Scan all pending sequences too.  */
3395*404b540aSrobert       for (; stack; stack = stack->next)
3396*404b540aSrobert 	if (after == stack->last)
3397*404b540aSrobert 	  {
3398*404b540aSrobert 	    stack->last = insn;
3399*404b540aSrobert 	    break;
3400*404b540aSrobert 	  }
3401*404b540aSrobert 
3402*404b540aSrobert       gcc_assert (stack);
3403*404b540aSrobert     }
3404*404b540aSrobert 
3405*404b540aSrobert   if (!BARRIER_P (after)
3406*404b540aSrobert       && !BARRIER_P (insn)
3407*404b540aSrobert       && (bb = BLOCK_FOR_INSN (after)))
3408*404b540aSrobert     {
3409*404b540aSrobert       set_block_for_insn (insn, bb);
3410*404b540aSrobert       if (INSN_P (insn))
3411*404b540aSrobert 	bb->flags |= BB_DIRTY;
3412*404b540aSrobert       /* Should not happen as first in the BB is always
3413*404b540aSrobert 	 either NOTE or LABEL.  */
3414*404b540aSrobert       if (BB_END (bb) == after
3415*404b540aSrobert 	  /* Avoid clobbering of structure when creating new BB.  */
3416*404b540aSrobert 	  && !BARRIER_P (insn)
3417*404b540aSrobert 	  && (!NOTE_P (insn)
3418*404b540aSrobert 	      || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3419*404b540aSrobert 	BB_END (bb) = insn;
3420*404b540aSrobert     }
3421*404b540aSrobert 
3422*404b540aSrobert   NEXT_INSN (after) = insn;
3423*404b540aSrobert   if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3424*404b540aSrobert     {
3425*404b540aSrobert       rtx sequence = PATTERN (after);
3426*404b540aSrobert       NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3427*404b540aSrobert     }
3428*404b540aSrobert }
3429*404b540aSrobert 
3430*404b540aSrobert /* Add INSN into the doubly-linked list before insn BEFORE.  This and
3431*404b540aSrobert    the previous should be the only functions called to insert an insn once
3432*404b540aSrobert    delay slots have been filled since only they know how to update a
3433*404b540aSrobert    SEQUENCE.  */
3434*404b540aSrobert 
3435*404b540aSrobert void
add_insn_before(rtx insn,rtx before)3436*404b540aSrobert add_insn_before (rtx insn, rtx before)
3437*404b540aSrobert {
3438*404b540aSrobert   rtx prev = PREV_INSN (before);
3439*404b540aSrobert   basic_block bb;
3440*404b540aSrobert 
3441*404b540aSrobert   gcc_assert (!optimize || !INSN_DELETED_P (before));
3442*404b540aSrobert 
3443*404b540aSrobert   PREV_INSN (insn) = prev;
3444*404b540aSrobert   NEXT_INSN (insn) = before;
3445*404b540aSrobert 
3446*404b540aSrobert   if (prev)
3447*404b540aSrobert     {
3448*404b540aSrobert       NEXT_INSN (prev) = insn;
3449*404b540aSrobert       if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3450*404b540aSrobert 	{
3451*404b540aSrobert 	  rtx sequence = PATTERN (prev);
3452*404b540aSrobert 	  NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3453*404b540aSrobert 	}
3454*404b540aSrobert     }
3455*404b540aSrobert   else if (first_insn == before)
3456*404b540aSrobert     first_insn = insn;
3457*404b540aSrobert   else
3458*404b540aSrobert     {
3459*404b540aSrobert       struct sequence_stack *stack = seq_stack;
3460*404b540aSrobert       /* Scan all pending sequences too.  */
3461*404b540aSrobert       for (; stack; stack = stack->next)
3462*404b540aSrobert 	if (before == stack->first)
3463*404b540aSrobert 	  {
3464*404b540aSrobert 	    stack->first = insn;
3465*404b540aSrobert 	    break;
3466*404b540aSrobert 	  }
3467*404b540aSrobert 
3468*404b540aSrobert       gcc_assert (stack);
3469*404b540aSrobert     }
3470*404b540aSrobert 
3471*404b540aSrobert   if (!BARRIER_P (before)
3472*404b540aSrobert       && !BARRIER_P (insn)
3473*404b540aSrobert       && (bb = BLOCK_FOR_INSN (before)))
3474*404b540aSrobert     {
3475*404b540aSrobert       set_block_for_insn (insn, bb);
3476*404b540aSrobert       if (INSN_P (insn))
3477*404b540aSrobert 	bb->flags |= BB_DIRTY;
3478*404b540aSrobert       /* Should not happen as first in the BB is always either NOTE or
3479*404b540aSrobert 	 LABEL.  */
3480*404b540aSrobert       gcc_assert (BB_HEAD (bb) != insn
3481*404b540aSrobert 		  /* Avoid clobbering of structure when creating new BB.  */
3482*404b540aSrobert 		  || BARRIER_P (insn)
3483*404b540aSrobert 		  || (NOTE_P (insn)
3484*404b540aSrobert 		      && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK));
3485*404b540aSrobert     }
3486*404b540aSrobert 
3487*404b540aSrobert   PREV_INSN (before) = insn;
3488*404b540aSrobert   if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3489*404b540aSrobert     PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3490*404b540aSrobert }
3491*404b540aSrobert 
3492*404b540aSrobert /* Remove an insn from its doubly-linked list.  This function knows how
3493*404b540aSrobert    to handle sequences.  */
3494*404b540aSrobert void
remove_insn(rtx insn)3495*404b540aSrobert remove_insn (rtx insn)
3496*404b540aSrobert {
3497*404b540aSrobert   rtx next = NEXT_INSN (insn);
3498*404b540aSrobert   rtx prev = PREV_INSN (insn);
3499*404b540aSrobert   basic_block bb;
3500*404b540aSrobert 
3501*404b540aSrobert   if (prev)
3502*404b540aSrobert     {
3503*404b540aSrobert       NEXT_INSN (prev) = next;
3504*404b540aSrobert       if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3505*404b540aSrobert 	{
3506*404b540aSrobert 	  rtx sequence = PATTERN (prev);
3507*404b540aSrobert 	  NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3508*404b540aSrobert 	}
3509*404b540aSrobert     }
3510*404b540aSrobert   else if (first_insn == insn)
3511*404b540aSrobert     first_insn = next;
3512*404b540aSrobert   else
3513*404b540aSrobert     {
3514*404b540aSrobert       struct sequence_stack *stack = seq_stack;
3515*404b540aSrobert       /* Scan all pending sequences too.  */
3516*404b540aSrobert       for (; stack; stack = stack->next)
3517*404b540aSrobert 	if (insn == stack->first)
3518*404b540aSrobert 	  {
3519*404b540aSrobert 	    stack->first = next;
3520*404b540aSrobert 	    break;
3521*404b540aSrobert 	  }
3522*404b540aSrobert 
3523*404b540aSrobert       gcc_assert (stack);
3524*404b540aSrobert     }
3525*404b540aSrobert 
3526*404b540aSrobert   if (next)
3527*404b540aSrobert     {
3528*404b540aSrobert       PREV_INSN (next) = prev;
3529*404b540aSrobert       if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3530*404b540aSrobert 	PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3531*404b540aSrobert     }
3532*404b540aSrobert   else if (last_insn == insn)
3533*404b540aSrobert     last_insn = prev;
3534*404b540aSrobert   else
3535*404b540aSrobert     {
3536*404b540aSrobert       struct sequence_stack *stack = seq_stack;
3537*404b540aSrobert       /* Scan all pending sequences too.  */
3538*404b540aSrobert       for (; stack; stack = stack->next)
3539*404b540aSrobert 	if (insn == stack->last)
3540*404b540aSrobert 	  {
3541*404b540aSrobert 	    stack->last = prev;
3542*404b540aSrobert 	    break;
3543*404b540aSrobert 	  }
3544*404b540aSrobert 
3545*404b540aSrobert       gcc_assert (stack);
3546*404b540aSrobert     }
3547*404b540aSrobert   if (!BARRIER_P (insn)
3548*404b540aSrobert       && (bb = BLOCK_FOR_INSN (insn)))
3549*404b540aSrobert     {
3550*404b540aSrobert       if (INSN_P (insn))
3551*404b540aSrobert 	bb->flags |= BB_DIRTY;
3552*404b540aSrobert       if (BB_HEAD (bb) == insn)
3553*404b540aSrobert 	{
3554*404b540aSrobert 	  /* Never ever delete the basic block note without deleting whole
3555*404b540aSrobert 	     basic block.  */
3556*404b540aSrobert 	  gcc_assert (!NOTE_P (insn));
3557*404b540aSrobert 	  BB_HEAD (bb) = next;
3558*404b540aSrobert 	}
3559*404b540aSrobert       if (BB_END (bb) == insn)
3560*404b540aSrobert 	BB_END (bb) = prev;
3561*404b540aSrobert     }
3562*404b540aSrobert }
3563*404b540aSrobert 
3564*404b540aSrobert /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN.  */
3565*404b540aSrobert 
3566*404b540aSrobert void
add_function_usage_to(rtx call_insn,rtx call_fusage)3567*404b540aSrobert add_function_usage_to (rtx call_insn, rtx call_fusage)
3568*404b540aSrobert {
3569*404b540aSrobert   gcc_assert (call_insn && CALL_P (call_insn));
3570*404b540aSrobert 
3571*404b540aSrobert   /* Put the register usage information on the CALL.  If there is already
3572*404b540aSrobert      some usage information, put ours at the end.  */
3573*404b540aSrobert   if (CALL_INSN_FUNCTION_USAGE (call_insn))
3574*404b540aSrobert     {
3575*404b540aSrobert       rtx link;
3576*404b540aSrobert 
3577*404b540aSrobert       for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3578*404b540aSrobert 	   link = XEXP (link, 1))
3579*404b540aSrobert 	;
3580*404b540aSrobert 
3581*404b540aSrobert       XEXP (link, 1) = call_fusage;
3582*404b540aSrobert     }
3583*404b540aSrobert   else
3584*404b540aSrobert     CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3585*404b540aSrobert }
3586*404b540aSrobert 
3587*404b540aSrobert /* Delete all insns made since FROM.
3588*404b540aSrobert    FROM becomes the new last instruction.  */
3589*404b540aSrobert 
3590*404b540aSrobert void
delete_insns_since(rtx from)3591*404b540aSrobert delete_insns_since (rtx from)
3592*404b540aSrobert {
3593*404b540aSrobert   if (from == 0)
3594*404b540aSrobert     first_insn = 0;
3595*404b540aSrobert   else
3596*404b540aSrobert     NEXT_INSN (from) = 0;
3597*404b540aSrobert   last_insn = from;
3598*404b540aSrobert }
3599*404b540aSrobert 
3600*404b540aSrobert /* This function is deprecated, please use sequences instead.
3601*404b540aSrobert 
3602*404b540aSrobert    Move a consecutive bunch of insns to a different place in the chain.
3603*404b540aSrobert    The insns to be moved are those between FROM and TO.
3604*404b540aSrobert    They are moved to a new position after the insn AFTER.
3605*404b540aSrobert    AFTER must not be FROM or TO or any insn in between.
3606*404b540aSrobert 
3607*404b540aSrobert    This function does not know about SEQUENCEs and hence should not be
3608*404b540aSrobert    called after delay-slot filling has been done.  */
3609*404b540aSrobert 
3610*404b540aSrobert void
reorder_insns_nobb(rtx from,rtx to,rtx after)3611*404b540aSrobert reorder_insns_nobb (rtx from, rtx to, rtx after)
3612*404b540aSrobert {
3613*404b540aSrobert   /* Splice this bunch out of where it is now.  */
3614*404b540aSrobert   if (PREV_INSN (from))
3615*404b540aSrobert     NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3616*404b540aSrobert   if (NEXT_INSN (to))
3617*404b540aSrobert     PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3618*404b540aSrobert   if (last_insn == to)
3619*404b540aSrobert     last_insn = PREV_INSN (from);
3620*404b540aSrobert   if (first_insn == from)
3621*404b540aSrobert     first_insn = NEXT_INSN (to);
3622*404b540aSrobert 
3623*404b540aSrobert   /* Make the new neighbors point to it and it to them.  */
3624*404b540aSrobert   if (NEXT_INSN (after))
3625*404b540aSrobert     PREV_INSN (NEXT_INSN (after)) = to;
3626*404b540aSrobert 
3627*404b540aSrobert   NEXT_INSN (to) = NEXT_INSN (after);
3628*404b540aSrobert   PREV_INSN (from) = after;
3629*404b540aSrobert   NEXT_INSN (after) = from;
3630*404b540aSrobert   if (after == last_insn)
3631*404b540aSrobert     last_insn = to;
3632*404b540aSrobert }
3633*404b540aSrobert 
3634*404b540aSrobert /* Same as function above, but take care to update BB boundaries.  */
3635*404b540aSrobert void
reorder_insns(rtx from,rtx to,rtx after)3636*404b540aSrobert reorder_insns (rtx from, rtx to, rtx after)
3637*404b540aSrobert {
3638*404b540aSrobert   rtx prev = PREV_INSN (from);
3639*404b540aSrobert   basic_block bb, bb2;
3640*404b540aSrobert 
3641*404b540aSrobert   reorder_insns_nobb (from, to, after);
3642*404b540aSrobert 
3643*404b540aSrobert   if (!BARRIER_P (after)
3644*404b540aSrobert       && (bb = BLOCK_FOR_INSN (after)))
3645*404b540aSrobert     {
3646*404b540aSrobert       rtx x;
3647*404b540aSrobert       bb->flags |= BB_DIRTY;
3648*404b540aSrobert 
3649*404b540aSrobert       if (!BARRIER_P (from)
3650*404b540aSrobert 	  && (bb2 = BLOCK_FOR_INSN (from)))
3651*404b540aSrobert 	{
3652*404b540aSrobert 	  if (BB_END (bb2) == to)
3653*404b540aSrobert 	    BB_END (bb2) = prev;
3654*404b540aSrobert 	  bb2->flags |= BB_DIRTY;
3655*404b540aSrobert 	}
3656*404b540aSrobert 
3657*404b540aSrobert       if (BB_END (bb) == after)
3658*404b540aSrobert 	BB_END (bb) = to;
3659*404b540aSrobert 
3660*404b540aSrobert       for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3661*404b540aSrobert 	if (!BARRIER_P (x))
3662*404b540aSrobert 	  set_block_for_insn (x, bb);
3663*404b540aSrobert     }
3664*404b540aSrobert }
3665*404b540aSrobert 
3666*404b540aSrobert /* Return the line note insn preceding INSN.  */
3667*404b540aSrobert 
3668*404b540aSrobert static rtx
find_line_note(rtx insn)3669*404b540aSrobert find_line_note (rtx insn)
3670*404b540aSrobert {
3671*404b540aSrobert   if (no_line_numbers)
3672*404b540aSrobert     return 0;
3673*404b540aSrobert 
3674*404b540aSrobert   for (; insn; insn = PREV_INSN (insn))
3675*404b540aSrobert     if (NOTE_P (insn)
3676*404b540aSrobert 	&& NOTE_LINE_NUMBER (insn) >= 0)
3677*404b540aSrobert       break;
3678*404b540aSrobert 
3679*404b540aSrobert   return insn;
3680*404b540aSrobert }
3681*404b540aSrobert 
3682*404b540aSrobert 
3683*404b540aSrobert /* Emit insn(s) of given code and pattern
3684*404b540aSrobert    at a specified place within the doubly-linked list.
3685*404b540aSrobert 
3686*404b540aSrobert    All of the emit_foo global entry points accept an object
3687*404b540aSrobert    X which is either an insn list or a PATTERN of a single
3688*404b540aSrobert    instruction.
3689*404b540aSrobert 
3690*404b540aSrobert    There are thus a few canonical ways to generate code and
3691*404b540aSrobert    emit it at a specific place in the instruction stream.  For
3692*404b540aSrobert    example, consider the instruction named SPOT and the fact that
3693*404b540aSrobert    we would like to emit some instructions before SPOT.  We might
3694*404b540aSrobert    do it like this:
3695*404b540aSrobert 
3696*404b540aSrobert 	start_sequence ();
3697*404b540aSrobert 	... emit the new instructions ...
3698*404b540aSrobert 	insns_head = get_insns ();
3699*404b540aSrobert 	end_sequence ();
3700*404b540aSrobert 
3701*404b540aSrobert 	emit_insn_before (insns_head, SPOT);
3702*404b540aSrobert 
3703*404b540aSrobert    It used to be common to generate SEQUENCE rtl instead, but that
3704*404b540aSrobert    is a relic of the past which no longer occurs.  The reason is that
3705*404b540aSrobert    SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3706*404b540aSrobert    generated would almost certainly die right after it was created.  */
3707*404b540aSrobert 
3708*404b540aSrobert /* Make X be output before the instruction BEFORE.  */
3709*404b540aSrobert 
3710*404b540aSrobert rtx
emit_insn_before_noloc(rtx x,rtx before)3711*404b540aSrobert emit_insn_before_noloc (rtx x, rtx before)
3712*404b540aSrobert {
3713*404b540aSrobert   rtx last = before;
3714*404b540aSrobert   rtx insn;
3715*404b540aSrobert 
3716*404b540aSrobert   gcc_assert (before);
3717*404b540aSrobert 
3718*404b540aSrobert   if (x == NULL_RTX)
3719*404b540aSrobert     return last;
3720*404b540aSrobert 
3721*404b540aSrobert   switch (GET_CODE (x))
3722*404b540aSrobert     {
3723*404b540aSrobert     case INSN:
3724*404b540aSrobert     case JUMP_INSN:
3725*404b540aSrobert     case CALL_INSN:
3726*404b540aSrobert     case CODE_LABEL:
3727*404b540aSrobert     case BARRIER:
3728*404b540aSrobert     case NOTE:
3729*404b540aSrobert       insn = x;
3730*404b540aSrobert       while (insn)
3731*404b540aSrobert 	{
3732*404b540aSrobert 	  rtx next = NEXT_INSN (insn);
3733*404b540aSrobert 	  add_insn_before (insn, before);
3734*404b540aSrobert 	  last = insn;
3735*404b540aSrobert 	  insn = next;
3736*404b540aSrobert 	}
3737*404b540aSrobert       break;
3738*404b540aSrobert 
3739*404b540aSrobert #ifdef ENABLE_RTL_CHECKING
3740*404b540aSrobert     case SEQUENCE:
3741*404b540aSrobert       gcc_unreachable ();
3742*404b540aSrobert       break;
3743*404b540aSrobert #endif
3744*404b540aSrobert 
3745*404b540aSrobert     default:
3746*404b540aSrobert       last = make_insn_raw (x);
3747*404b540aSrobert       add_insn_before (last, before);
3748*404b540aSrobert       break;
3749*404b540aSrobert     }
3750*404b540aSrobert 
3751*404b540aSrobert   return last;
3752*404b540aSrobert }
3753*404b540aSrobert 
3754*404b540aSrobert /* Make an instruction with body X and code JUMP_INSN
3755*404b540aSrobert    and output it before the instruction BEFORE.  */
3756*404b540aSrobert 
3757*404b540aSrobert rtx
emit_jump_insn_before_noloc(rtx x,rtx before)3758*404b540aSrobert emit_jump_insn_before_noloc (rtx x, rtx before)
3759*404b540aSrobert {
3760*404b540aSrobert   rtx insn, last = NULL_RTX;
3761*404b540aSrobert 
3762*404b540aSrobert   gcc_assert (before);
3763*404b540aSrobert 
3764*404b540aSrobert   switch (GET_CODE (x))
3765*404b540aSrobert     {
3766*404b540aSrobert     case INSN:
3767*404b540aSrobert     case JUMP_INSN:
3768*404b540aSrobert     case CALL_INSN:
3769*404b540aSrobert     case CODE_LABEL:
3770*404b540aSrobert     case BARRIER:
3771*404b540aSrobert     case NOTE:
3772*404b540aSrobert       insn = x;
3773*404b540aSrobert       while (insn)
3774*404b540aSrobert 	{
3775*404b540aSrobert 	  rtx next = NEXT_INSN (insn);
3776*404b540aSrobert 	  add_insn_before (insn, before);
3777*404b540aSrobert 	  last = insn;
3778*404b540aSrobert 	  insn = next;
3779*404b540aSrobert 	}
3780*404b540aSrobert       break;
3781*404b540aSrobert 
3782*404b540aSrobert #ifdef ENABLE_RTL_CHECKING
3783*404b540aSrobert     case SEQUENCE:
3784*404b540aSrobert       gcc_unreachable ();
3785*404b540aSrobert       break;
3786*404b540aSrobert #endif
3787*404b540aSrobert 
3788*404b540aSrobert     default:
3789*404b540aSrobert       last = make_jump_insn_raw (x);
3790*404b540aSrobert       add_insn_before (last, before);
3791*404b540aSrobert       break;
3792*404b540aSrobert     }
3793*404b540aSrobert 
3794*404b540aSrobert   return last;
3795*404b540aSrobert }
3796*404b540aSrobert 
3797*404b540aSrobert /* Make an instruction with body X and code CALL_INSN
3798*404b540aSrobert    and output it before the instruction BEFORE.  */
3799*404b540aSrobert 
3800*404b540aSrobert rtx
emit_call_insn_before_noloc(rtx x,rtx before)3801*404b540aSrobert emit_call_insn_before_noloc (rtx x, rtx before)
3802*404b540aSrobert {
3803*404b540aSrobert   rtx last = NULL_RTX, insn;
3804*404b540aSrobert 
3805*404b540aSrobert   gcc_assert (before);
3806*404b540aSrobert 
3807*404b540aSrobert   switch (GET_CODE (x))
3808*404b540aSrobert     {
3809*404b540aSrobert     case INSN:
3810*404b540aSrobert     case JUMP_INSN:
3811*404b540aSrobert     case CALL_INSN:
3812*404b540aSrobert     case CODE_LABEL:
3813*404b540aSrobert     case BARRIER:
3814*404b540aSrobert     case NOTE:
3815*404b540aSrobert       insn = x;
3816*404b540aSrobert       while (insn)
3817*404b540aSrobert 	{
3818*404b540aSrobert 	  rtx next = NEXT_INSN (insn);
3819*404b540aSrobert 	  add_insn_before (insn, before);
3820*404b540aSrobert 	  last = insn;
3821*404b540aSrobert 	  insn = next;
3822*404b540aSrobert 	}
3823*404b540aSrobert       break;
3824*404b540aSrobert 
3825*404b540aSrobert #ifdef ENABLE_RTL_CHECKING
3826*404b540aSrobert     case SEQUENCE:
3827*404b540aSrobert       gcc_unreachable ();
3828*404b540aSrobert       break;
3829*404b540aSrobert #endif
3830*404b540aSrobert 
3831*404b540aSrobert     default:
3832*404b540aSrobert       last = make_call_insn_raw (x);
3833*404b540aSrobert       add_insn_before (last, before);
3834*404b540aSrobert       break;
3835*404b540aSrobert     }
3836*404b540aSrobert 
3837*404b540aSrobert   return last;
3838*404b540aSrobert }
3839*404b540aSrobert 
3840*404b540aSrobert /* Make an insn of code BARRIER
3841*404b540aSrobert    and output it before the insn BEFORE.  */
3842*404b540aSrobert 
3843*404b540aSrobert rtx
emit_barrier_before(rtx before)3844*404b540aSrobert emit_barrier_before (rtx before)
3845*404b540aSrobert {
3846*404b540aSrobert   rtx insn = rtx_alloc (BARRIER);
3847*404b540aSrobert 
3848*404b540aSrobert   INSN_UID (insn) = cur_insn_uid++;
3849*404b540aSrobert 
3850*404b540aSrobert   add_insn_before (insn, before);
3851*404b540aSrobert   return insn;
3852*404b540aSrobert }
3853*404b540aSrobert 
3854*404b540aSrobert /* Emit the label LABEL before the insn BEFORE.  */
3855*404b540aSrobert 
3856*404b540aSrobert rtx
emit_label_before(rtx label,rtx before)3857*404b540aSrobert emit_label_before (rtx label, rtx before)
3858*404b540aSrobert {
3859*404b540aSrobert   /* This can be called twice for the same label as a result of the
3860*404b540aSrobert      confusion that follows a syntax error!  So make it harmless.  */
3861*404b540aSrobert   if (INSN_UID (label) == 0)
3862*404b540aSrobert     {
3863*404b540aSrobert       INSN_UID (label) = cur_insn_uid++;
3864*404b540aSrobert       add_insn_before (label, before);
3865*404b540aSrobert     }
3866*404b540aSrobert 
3867*404b540aSrobert   return label;
3868*404b540aSrobert }
3869*404b540aSrobert 
3870*404b540aSrobert /* Emit a note of subtype SUBTYPE before the insn BEFORE.  */
3871*404b540aSrobert 
3872*404b540aSrobert rtx
emit_note_before(int subtype,rtx before)3873*404b540aSrobert emit_note_before (int subtype, rtx before)
3874*404b540aSrobert {
3875*404b540aSrobert   rtx note = rtx_alloc (NOTE);
3876*404b540aSrobert   INSN_UID (note) = cur_insn_uid++;
3877*404b540aSrobert #ifndef USE_MAPPED_LOCATION
3878*404b540aSrobert   NOTE_SOURCE_FILE (note) = 0;
3879*404b540aSrobert #endif
3880*404b540aSrobert   NOTE_LINE_NUMBER (note) = subtype;
3881*404b540aSrobert   BLOCK_FOR_INSN (note) = NULL;
3882*404b540aSrobert 
3883*404b540aSrobert   add_insn_before (note, before);
3884*404b540aSrobert   return note;
3885*404b540aSrobert }
3886*404b540aSrobert 
3887*404b540aSrobert /* Helper for emit_insn_after, handles lists of instructions
3888*404b540aSrobert    efficiently.  */
3889*404b540aSrobert 
3890*404b540aSrobert static rtx emit_insn_after_1 (rtx, rtx);
3891*404b540aSrobert 
3892*404b540aSrobert static rtx
emit_insn_after_1(rtx first,rtx after)3893*404b540aSrobert emit_insn_after_1 (rtx first, rtx after)
3894*404b540aSrobert {
3895*404b540aSrobert   rtx last;
3896*404b540aSrobert   rtx after_after;
3897*404b540aSrobert   basic_block bb;
3898*404b540aSrobert 
3899*404b540aSrobert   if (!BARRIER_P (after)
3900*404b540aSrobert       && (bb = BLOCK_FOR_INSN (after)))
3901*404b540aSrobert     {
3902*404b540aSrobert       bb->flags |= BB_DIRTY;
3903*404b540aSrobert       for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3904*404b540aSrobert 	if (!BARRIER_P (last))
3905*404b540aSrobert 	  set_block_for_insn (last, bb);
3906*404b540aSrobert       if (!BARRIER_P (last))
3907*404b540aSrobert 	set_block_for_insn (last, bb);
3908*404b540aSrobert       if (BB_END (bb) == after)
3909*404b540aSrobert 	BB_END (bb) = last;
3910*404b540aSrobert     }
3911*404b540aSrobert   else
3912*404b540aSrobert     for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3913*404b540aSrobert       continue;
3914*404b540aSrobert 
3915*404b540aSrobert   after_after = NEXT_INSN (after);
3916*404b540aSrobert 
3917*404b540aSrobert   NEXT_INSN (after) = first;
3918*404b540aSrobert   PREV_INSN (first) = after;
3919*404b540aSrobert   NEXT_INSN (last) = after_after;
3920*404b540aSrobert   if (after_after)
3921*404b540aSrobert     PREV_INSN (after_after) = last;
3922*404b540aSrobert 
3923*404b540aSrobert   if (after == last_insn)
3924*404b540aSrobert     last_insn = last;
3925*404b540aSrobert   return last;
3926*404b540aSrobert }
3927*404b540aSrobert 
3928*404b540aSrobert /* Make X be output after the insn AFTER.  */
3929*404b540aSrobert 
3930*404b540aSrobert rtx
emit_insn_after_noloc(rtx x,rtx after)3931*404b540aSrobert emit_insn_after_noloc (rtx x, rtx after)
3932*404b540aSrobert {
3933*404b540aSrobert   rtx last = after;
3934*404b540aSrobert 
3935*404b540aSrobert   gcc_assert (after);
3936*404b540aSrobert 
3937*404b540aSrobert   if (x == NULL_RTX)
3938*404b540aSrobert     return last;
3939*404b540aSrobert 
3940*404b540aSrobert   switch (GET_CODE (x))
3941*404b540aSrobert     {
3942*404b540aSrobert     case INSN:
3943*404b540aSrobert     case JUMP_INSN:
3944*404b540aSrobert     case CALL_INSN:
3945*404b540aSrobert     case CODE_LABEL:
3946*404b540aSrobert     case BARRIER:
3947*404b540aSrobert     case NOTE:
3948*404b540aSrobert       last = emit_insn_after_1 (x, after);
3949*404b540aSrobert       break;
3950*404b540aSrobert 
3951*404b540aSrobert #ifdef ENABLE_RTL_CHECKING
3952*404b540aSrobert     case SEQUENCE:
3953*404b540aSrobert       gcc_unreachable ();
3954*404b540aSrobert       break;
3955*404b540aSrobert #endif
3956*404b540aSrobert 
3957*404b540aSrobert     default:
3958*404b540aSrobert       last = make_insn_raw (x);
3959*404b540aSrobert       add_insn_after (last, after);
3960*404b540aSrobert       break;
3961*404b540aSrobert     }
3962*404b540aSrobert 
3963*404b540aSrobert   return last;
3964*404b540aSrobert }
3965*404b540aSrobert 
3966*404b540aSrobert /* Similar to emit_insn_after, except that line notes are to be inserted so
3967*404b540aSrobert    as to act as if this insn were at FROM.  */
3968*404b540aSrobert 
3969*404b540aSrobert void
emit_insn_after_with_line_notes(rtx x,rtx after,rtx from)3970*404b540aSrobert emit_insn_after_with_line_notes (rtx x, rtx after, rtx from)
3971*404b540aSrobert {
3972*404b540aSrobert   rtx from_line = find_line_note (from);
3973*404b540aSrobert   rtx after_line = find_line_note (after);
3974*404b540aSrobert   rtx insn = emit_insn_after (x, after);
3975*404b540aSrobert 
3976*404b540aSrobert   if (from_line)
3977*404b540aSrobert     emit_note_copy_after (from_line, after);
3978*404b540aSrobert 
3979*404b540aSrobert   if (after_line)
3980*404b540aSrobert     emit_note_copy_after (after_line, insn);
3981*404b540aSrobert }
3982*404b540aSrobert 
3983*404b540aSrobert /* Make an insn of code JUMP_INSN with body X
3984*404b540aSrobert    and output it after the insn AFTER.  */
3985*404b540aSrobert 
3986*404b540aSrobert rtx
emit_jump_insn_after_noloc(rtx x,rtx after)3987*404b540aSrobert emit_jump_insn_after_noloc (rtx x, rtx after)
3988*404b540aSrobert {
3989*404b540aSrobert   rtx last;
3990*404b540aSrobert 
3991*404b540aSrobert   gcc_assert (after);
3992*404b540aSrobert 
3993*404b540aSrobert   switch (GET_CODE (x))
3994*404b540aSrobert     {
3995*404b540aSrobert     case INSN:
3996*404b540aSrobert     case JUMP_INSN:
3997*404b540aSrobert     case CALL_INSN:
3998*404b540aSrobert     case CODE_LABEL:
3999*404b540aSrobert     case BARRIER:
4000*404b540aSrobert     case NOTE:
4001*404b540aSrobert       last = emit_insn_after_1 (x, after);
4002*404b540aSrobert       break;
4003*404b540aSrobert 
4004*404b540aSrobert #ifdef ENABLE_RTL_CHECKING
4005*404b540aSrobert     case SEQUENCE:
4006*404b540aSrobert       gcc_unreachable ();
4007*404b540aSrobert       break;
4008*404b540aSrobert #endif
4009*404b540aSrobert 
4010*404b540aSrobert     default:
4011*404b540aSrobert       last = make_jump_insn_raw (x);
4012*404b540aSrobert       add_insn_after (last, after);
4013*404b540aSrobert       break;
4014*404b540aSrobert     }
4015*404b540aSrobert 
4016*404b540aSrobert   return last;
4017*404b540aSrobert }
4018*404b540aSrobert 
4019*404b540aSrobert /* Make an instruction with body X and code CALL_INSN
4020*404b540aSrobert    and output it after the instruction AFTER.  */
4021*404b540aSrobert 
4022*404b540aSrobert rtx
emit_call_insn_after_noloc(rtx x,rtx after)4023*404b540aSrobert emit_call_insn_after_noloc (rtx x, rtx after)
4024*404b540aSrobert {
4025*404b540aSrobert   rtx last;
4026*404b540aSrobert 
4027*404b540aSrobert   gcc_assert (after);
4028*404b540aSrobert 
4029*404b540aSrobert   switch (GET_CODE (x))
4030*404b540aSrobert     {
4031*404b540aSrobert     case INSN:
4032*404b540aSrobert     case JUMP_INSN:
4033*404b540aSrobert     case CALL_INSN:
4034*404b540aSrobert     case CODE_LABEL:
4035*404b540aSrobert     case BARRIER:
4036*404b540aSrobert     case NOTE:
4037*404b540aSrobert       last = emit_insn_after_1 (x, after);
4038*404b540aSrobert       break;
4039*404b540aSrobert 
4040*404b540aSrobert #ifdef ENABLE_RTL_CHECKING
4041*404b540aSrobert     case SEQUENCE:
4042*404b540aSrobert       gcc_unreachable ();
4043*404b540aSrobert       break;
4044*404b540aSrobert #endif
4045*404b540aSrobert 
4046*404b540aSrobert     default:
4047*404b540aSrobert       last = make_call_insn_raw (x);
4048*404b540aSrobert       add_insn_after (last, after);
4049*404b540aSrobert       break;
4050*404b540aSrobert     }
4051*404b540aSrobert 
4052*404b540aSrobert   return last;
4053*404b540aSrobert }
4054*404b540aSrobert 
4055*404b540aSrobert /* Make an insn of code BARRIER
4056*404b540aSrobert    and output it after the insn AFTER.  */
4057*404b540aSrobert 
4058*404b540aSrobert rtx
emit_barrier_after(rtx after)4059*404b540aSrobert emit_barrier_after (rtx after)
4060*404b540aSrobert {
4061*404b540aSrobert   rtx insn = rtx_alloc (BARRIER);
4062*404b540aSrobert 
4063*404b540aSrobert   INSN_UID (insn) = cur_insn_uid++;
4064*404b540aSrobert 
4065*404b540aSrobert   add_insn_after (insn, after);
4066*404b540aSrobert   return insn;
4067*404b540aSrobert }
4068*404b540aSrobert 
4069*404b540aSrobert /* Emit the label LABEL after the insn AFTER.  */
4070*404b540aSrobert 
4071*404b540aSrobert rtx
emit_label_after(rtx label,rtx after)4072*404b540aSrobert emit_label_after (rtx label, rtx after)
4073*404b540aSrobert {
4074*404b540aSrobert   /* This can be called twice for the same label
4075*404b540aSrobert      as a result of the confusion that follows a syntax error!
4076*404b540aSrobert      So make it harmless.  */
4077*404b540aSrobert   if (INSN_UID (label) == 0)
4078*404b540aSrobert     {
4079*404b540aSrobert       INSN_UID (label) = cur_insn_uid++;
4080*404b540aSrobert       add_insn_after (label, after);
4081*404b540aSrobert     }
4082*404b540aSrobert 
4083*404b540aSrobert   return label;
4084*404b540aSrobert }
4085*404b540aSrobert 
4086*404b540aSrobert /* Emit a note of subtype SUBTYPE after the insn AFTER.  */
4087*404b540aSrobert 
4088*404b540aSrobert rtx
emit_note_after(int subtype,rtx after)4089*404b540aSrobert emit_note_after (int subtype, rtx after)
4090*404b540aSrobert {
4091*404b540aSrobert   rtx note = rtx_alloc (NOTE);
4092*404b540aSrobert   INSN_UID (note) = cur_insn_uid++;
4093*404b540aSrobert #ifndef USE_MAPPED_LOCATION
4094*404b540aSrobert   NOTE_SOURCE_FILE (note) = 0;
4095*404b540aSrobert #endif
4096*404b540aSrobert   NOTE_LINE_NUMBER (note) = subtype;
4097*404b540aSrobert   BLOCK_FOR_INSN (note) = NULL;
4098*404b540aSrobert   add_insn_after (note, after);
4099*404b540aSrobert   return note;
4100*404b540aSrobert }
4101*404b540aSrobert 
4102*404b540aSrobert /* Emit a copy of note ORIG after the insn AFTER.  */
4103*404b540aSrobert 
4104*404b540aSrobert rtx
emit_note_copy_after(rtx orig,rtx after)4105*404b540aSrobert emit_note_copy_after (rtx orig, rtx after)
4106*404b540aSrobert {
4107*404b540aSrobert   rtx note;
4108*404b540aSrobert 
4109*404b540aSrobert   if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4110*404b540aSrobert     {
4111*404b540aSrobert       cur_insn_uid++;
4112*404b540aSrobert       return 0;
4113*404b540aSrobert     }
4114*404b540aSrobert 
4115*404b540aSrobert   note = rtx_alloc (NOTE);
4116*404b540aSrobert   INSN_UID (note) = cur_insn_uid++;
4117*404b540aSrobert   NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4118*404b540aSrobert   NOTE_DATA (note) = NOTE_DATA (orig);
4119*404b540aSrobert   BLOCK_FOR_INSN (note) = NULL;
4120*404b540aSrobert   add_insn_after (note, after);
4121*404b540aSrobert   return note;
4122*404b540aSrobert }
4123*404b540aSrobert 
4124*404b540aSrobert /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE.  */
4125*404b540aSrobert rtx
emit_insn_after_setloc(rtx pattern,rtx after,int loc)4126*404b540aSrobert emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4127*404b540aSrobert {
4128*404b540aSrobert   rtx last = emit_insn_after_noloc (pattern, after);
4129*404b540aSrobert 
4130*404b540aSrobert   if (pattern == NULL_RTX || !loc)
4131*404b540aSrobert     return last;
4132*404b540aSrobert 
4133*404b540aSrobert   after = NEXT_INSN (after);
4134*404b540aSrobert   while (1)
4135*404b540aSrobert     {
4136*404b540aSrobert       if (active_insn_p (after) && !INSN_LOCATOR (after))
4137*404b540aSrobert 	INSN_LOCATOR (after) = loc;
4138*404b540aSrobert       if (after == last)
4139*404b540aSrobert 	break;
4140*404b540aSrobert       after = NEXT_INSN (after);
4141*404b540aSrobert     }
4142*404b540aSrobert   return last;
4143*404b540aSrobert }
4144*404b540aSrobert 
4145*404b540aSrobert /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
4146*404b540aSrobert rtx
emit_insn_after(rtx pattern,rtx after)4147*404b540aSrobert emit_insn_after (rtx pattern, rtx after)
4148*404b540aSrobert {
4149*404b540aSrobert   if (INSN_P (after))
4150*404b540aSrobert     return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4151*404b540aSrobert   else
4152*404b540aSrobert     return emit_insn_after_noloc (pattern, after);
4153*404b540aSrobert }
4154*404b540aSrobert 
4155*404b540aSrobert /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE.  */
4156*404b540aSrobert rtx
emit_jump_insn_after_setloc(rtx pattern,rtx after,int loc)4157*404b540aSrobert emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4158*404b540aSrobert {
4159*404b540aSrobert   rtx last = emit_jump_insn_after_noloc (pattern, after);
4160*404b540aSrobert 
4161*404b540aSrobert   if (pattern == NULL_RTX || !loc)
4162*404b540aSrobert     return last;
4163*404b540aSrobert 
4164*404b540aSrobert   after = NEXT_INSN (after);
4165*404b540aSrobert   while (1)
4166*404b540aSrobert     {
4167*404b540aSrobert       if (active_insn_p (after) && !INSN_LOCATOR (after))
4168*404b540aSrobert 	INSN_LOCATOR (after) = loc;
4169*404b540aSrobert       if (after == last)
4170*404b540aSrobert 	break;
4171*404b540aSrobert       after = NEXT_INSN (after);
4172*404b540aSrobert     }
4173*404b540aSrobert   return last;
4174*404b540aSrobert }
4175*404b540aSrobert 
4176*404b540aSrobert /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
4177*404b540aSrobert rtx
emit_jump_insn_after(rtx pattern,rtx after)4178*404b540aSrobert emit_jump_insn_after (rtx pattern, rtx after)
4179*404b540aSrobert {
4180*404b540aSrobert   if (INSN_P (after))
4181*404b540aSrobert     return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4182*404b540aSrobert   else
4183*404b540aSrobert     return emit_jump_insn_after_noloc (pattern, after);
4184*404b540aSrobert }
4185*404b540aSrobert 
4186*404b540aSrobert /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE.  */
4187*404b540aSrobert rtx
emit_call_insn_after_setloc(rtx pattern,rtx after,int loc)4188*404b540aSrobert emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4189*404b540aSrobert {
4190*404b540aSrobert   rtx last = emit_call_insn_after_noloc (pattern, after);
4191*404b540aSrobert 
4192*404b540aSrobert   if (pattern == NULL_RTX || !loc)
4193*404b540aSrobert     return last;
4194*404b540aSrobert 
4195*404b540aSrobert   after = NEXT_INSN (after);
4196*404b540aSrobert   while (1)
4197*404b540aSrobert     {
4198*404b540aSrobert       if (active_insn_p (after) && !INSN_LOCATOR (after))
4199*404b540aSrobert 	INSN_LOCATOR (after) = loc;
4200*404b540aSrobert       if (after == last)
4201*404b540aSrobert 	break;
4202*404b540aSrobert       after = NEXT_INSN (after);
4203*404b540aSrobert     }
4204*404b540aSrobert   return last;
4205*404b540aSrobert }
4206*404b540aSrobert 
4207*404b540aSrobert /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
4208*404b540aSrobert rtx
emit_call_insn_after(rtx pattern,rtx after)4209*404b540aSrobert emit_call_insn_after (rtx pattern, rtx after)
4210*404b540aSrobert {
4211*404b540aSrobert   if (INSN_P (after))
4212*404b540aSrobert     return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4213*404b540aSrobert   else
4214*404b540aSrobert     return emit_call_insn_after_noloc (pattern, after);
4215*404b540aSrobert }
4216*404b540aSrobert 
4217*404b540aSrobert /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE.  */
4218*404b540aSrobert rtx
emit_insn_before_setloc(rtx pattern,rtx before,int loc)4219*404b540aSrobert emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4220*404b540aSrobert {
4221*404b540aSrobert   rtx first = PREV_INSN (before);
4222*404b540aSrobert   rtx last = emit_insn_before_noloc (pattern, before);
4223*404b540aSrobert 
4224*404b540aSrobert   if (pattern == NULL_RTX || !loc)
4225*404b540aSrobert     return last;
4226*404b540aSrobert 
4227*404b540aSrobert   first = NEXT_INSN (first);
4228*404b540aSrobert   while (1)
4229*404b540aSrobert     {
4230*404b540aSrobert       if (active_insn_p (first) && !INSN_LOCATOR (first))
4231*404b540aSrobert 	INSN_LOCATOR (first) = loc;
4232*404b540aSrobert       if (first == last)
4233*404b540aSrobert 	break;
4234*404b540aSrobert       first = NEXT_INSN (first);
4235*404b540aSrobert     }
4236*404b540aSrobert   return last;
4237*404b540aSrobert }
4238*404b540aSrobert 
4239*404b540aSrobert /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE.  */
4240*404b540aSrobert rtx
emit_insn_before(rtx pattern,rtx before)4241*404b540aSrobert emit_insn_before (rtx pattern, rtx before)
4242*404b540aSrobert {
4243*404b540aSrobert   if (INSN_P (before))
4244*404b540aSrobert     return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4245*404b540aSrobert   else
4246*404b540aSrobert     return emit_insn_before_noloc (pattern, before);
4247*404b540aSrobert }
4248*404b540aSrobert 
4249*404b540aSrobert /* like emit_insn_before_noloc, but set insn_locator according to scope.  */
4250*404b540aSrobert rtx
emit_jump_insn_before_setloc(rtx pattern,rtx before,int loc)4251*404b540aSrobert emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4252*404b540aSrobert {
4253*404b540aSrobert   rtx first = PREV_INSN (before);
4254*404b540aSrobert   rtx last = emit_jump_insn_before_noloc (pattern, before);
4255*404b540aSrobert 
4256*404b540aSrobert   if (pattern == NULL_RTX)
4257*404b540aSrobert     return last;
4258*404b540aSrobert 
4259*404b540aSrobert   first = NEXT_INSN (first);
4260*404b540aSrobert   while (1)
4261*404b540aSrobert     {
4262*404b540aSrobert       if (active_insn_p (first) && !INSN_LOCATOR (first))
4263*404b540aSrobert 	INSN_LOCATOR (first) = loc;
4264*404b540aSrobert       if (first == last)
4265*404b540aSrobert 	break;
4266*404b540aSrobert       first = NEXT_INSN (first);
4267*404b540aSrobert     }
4268*404b540aSrobert   return last;
4269*404b540aSrobert }
4270*404b540aSrobert 
4271*404b540aSrobert /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE.  */
4272*404b540aSrobert rtx
emit_jump_insn_before(rtx pattern,rtx before)4273*404b540aSrobert emit_jump_insn_before (rtx pattern, rtx before)
4274*404b540aSrobert {
4275*404b540aSrobert   if (INSN_P (before))
4276*404b540aSrobert     return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4277*404b540aSrobert   else
4278*404b540aSrobert     return emit_jump_insn_before_noloc (pattern, before);
4279*404b540aSrobert }
4280*404b540aSrobert 
4281*404b540aSrobert /* like emit_insn_before_noloc, but set insn_locator according to scope.  */
4282*404b540aSrobert rtx
emit_call_insn_before_setloc(rtx pattern,rtx before,int loc)4283*404b540aSrobert emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4284*404b540aSrobert {
4285*404b540aSrobert   rtx first = PREV_INSN (before);
4286*404b540aSrobert   rtx last = emit_call_insn_before_noloc (pattern, before);
4287*404b540aSrobert 
4288*404b540aSrobert   if (pattern == NULL_RTX)
4289*404b540aSrobert     return last;
4290*404b540aSrobert 
4291*404b540aSrobert   first = NEXT_INSN (first);
4292*404b540aSrobert   while (1)
4293*404b540aSrobert     {
4294*404b540aSrobert       if (active_insn_p (first) && !INSN_LOCATOR (first))
4295*404b540aSrobert 	INSN_LOCATOR (first) = loc;
4296*404b540aSrobert       if (first == last)
4297*404b540aSrobert 	break;
4298*404b540aSrobert       first = NEXT_INSN (first);
4299*404b540aSrobert     }
4300*404b540aSrobert   return last;
4301*404b540aSrobert }
4302*404b540aSrobert 
4303*404b540aSrobert /* like emit_call_insn_before_noloc,
4304*404b540aSrobert    but set insn_locator according to before.  */
4305*404b540aSrobert rtx
emit_call_insn_before(rtx pattern,rtx before)4306*404b540aSrobert emit_call_insn_before (rtx pattern, rtx before)
4307*404b540aSrobert {
4308*404b540aSrobert   if (INSN_P (before))
4309*404b540aSrobert     return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4310*404b540aSrobert   else
4311*404b540aSrobert     return emit_call_insn_before_noloc (pattern, before);
4312*404b540aSrobert }
4313*404b540aSrobert 
4314*404b540aSrobert /* Take X and emit it at the end of the doubly-linked
4315*404b540aSrobert    INSN list.
4316*404b540aSrobert 
4317*404b540aSrobert    Returns the last insn emitted.  */
4318*404b540aSrobert 
4319*404b540aSrobert rtx
emit_insn(rtx x)4320*404b540aSrobert emit_insn (rtx x)
4321*404b540aSrobert {
4322*404b540aSrobert   rtx last = last_insn;
4323*404b540aSrobert   rtx insn;
4324*404b540aSrobert 
4325*404b540aSrobert   if (x == NULL_RTX)
4326*404b540aSrobert     return last;
4327*404b540aSrobert 
4328*404b540aSrobert   switch (GET_CODE (x))
4329*404b540aSrobert     {
4330*404b540aSrobert     case INSN:
4331*404b540aSrobert     case JUMP_INSN:
4332*404b540aSrobert     case CALL_INSN:
4333*404b540aSrobert     case CODE_LABEL:
4334*404b540aSrobert     case BARRIER:
4335*404b540aSrobert     case NOTE:
4336*404b540aSrobert       insn = x;
4337*404b540aSrobert       while (insn)
4338*404b540aSrobert 	{
4339*404b540aSrobert 	  rtx next = NEXT_INSN (insn);
4340*404b540aSrobert 	  add_insn (insn);
4341*404b540aSrobert 	  last = insn;
4342*404b540aSrobert 	  insn = next;
4343*404b540aSrobert 	}
4344*404b540aSrobert       break;
4345*404b540aSrobert 
4346*404b540aSrobert #ifdef ENABLE_RTL_CHECKING
4347*404b540aSrobert     case SEQUENCE:
4348*404b540aSrobert       gcc_unreachable ();
4349*404b540aSrobert       break;
4350*404b540aSrobert #endif
4351*404b540aSrobert 
4352*404b540aSrobert     default:
4353*404b540aSrobert       last = make_insn_raw (x);
4354*404b540aSrobert       add_insn (last);
4355*404b540aSrobert       break;
4356*404b540aSrobert     }
4357*404b540aSrobert 
4358*404b540aSrobert   return last;
4359*404b540aSrobert }
4360*404b540aSrobert 
4361*404b540aSrobert /* Make an insn of code JUMP_INSN with pattern X
4362*404b540aSrobert    and add it to the end of the doubly-linked list.  */
4363*404b540aSrobert 
4364*404b540aSrobert rtx
emit_jump_insn(rtx x)4365*404b540aSrobert emit_jump_insn (rtx x)
4366*404b540aSrobert {
4367*404b540aSrobert   rtx last = NULL_RTX, insn;
4368*404b540aSrobert 
4369*404b540aSrobert   switch (GET_CODE (x))
4370*404b540aSrobert     {
4371*404b540aSrobert     case INSN:
4372*404b540aSrobert     case JUMP_INSN:
4373*404b540aSrobert     case CALL_INSN:
4374*404b540aSrobert     case CODE_LABEL:
4375*404b540aSrobert     case BARRIER:
4376*404b540aSrobert     case NOTE:
4377*404b540aSrobert       insn = x;
4378*404b540aSrobert       while (insn)
4379*404b540aSrobert 	{
4380*404b540aSrobert 	  rtx next = NEXT_INSN (insn);
4381*404b540aSrobert 	  add_insn (insn);
4382*404b540aSrobert 	  last = insn;
4383*404b540aSrobert 	  insn = next;
4384*404b540aSrobert 	}
4385*404b540aSrobert       break;
4386*404b540aSrobert 
4387*404b540aSrobert #ifdef ENABLE_RTL_CHECKING
4388*404b540aSrobert     case SEQUENCE:
4389*404b540aSrobert       gcc_unreachable ();
4390*404b540aSrobert       break;
4391*404b540aSrobert #endif
4392*404b540aSrobert 
4393*404b540aSrobert     default:
4394*404b540aSrobert       last = make_jump_insn_raw (x);
4395*404b540aSrobert       add_insn (last);
4396*404b540aSrobert       break;
4397*404b540aSrobert     }
4398*404b540aSrobert 
4399*404b540aSrobert   return last;
4400*404b540aSrobert }
4401*404b540aSrobert 
4402*404b540aSrobert /* Make an insn of code CALL_INSN with pattern X
4403*404b540aSrobert    and add it to the end of the doubly-linked list.  */
4404*404b540aSrobert 
4405*404b540aSrobert rtx
emit_call_insn(rtx x)4406*404b540aSrobert emit_call_insn (rtx x)
4407*404b540aSrobert {
4408*404b540aSrobert   rtx insn;
4409*404b540aSrobert 
4410*404b540aSrobert   switch (GET_CODE (x))
4411*404b540aSrobert     {
4412*404b540aSrobert     case INSN:
4413*404b540aSrobert     case JUMP_INSN:
4414*404b540aSrobert     case CALL_INSN:
4415*404b540aSrobert     case CODE_LABEL:
4416*404b540aSrobert     case BARRIER:
4417*404b540aSrobert     case NOTE:
4418*404b540aSrobert       insn = emit_insn (x);
4419*404b540aSrobert       break;
4420*404b540aSrobert 
4421*404b540aSrobert #ifdef ENABLE_RTL_CHECKING
4422*404b540aSrobert     case SEQUENCE:
4423*404b540aSrobert       gcc_unreachable ();
4424*404b540aSrobert       break;
4425*404b540aSrobert #endif
4426*404b540aSrobert 
4427*404b540aSrobert     default:
4428*404b540aSrobert       insn = make_call_insn_raw (x);
4429*404b540aSrobert       add_insn (insn);
4430*404b540aSrobert       break;
4431*404b540aSrobert     }
4432*404b540aSrobert 
4433*404b540aSrobert   return insn;
4434*404b540aSrobert }
4435*404b540aSrobert 
4436*404b540aSrobert /* Add the label LABEL to the end of the doubly-linked list.  */
4437*404b540aSrobert 
4438*404b540aSrobert rtx
emit_label(rtx label)4439*404b540aSrobert emit_label (rtx label)
4440*404b540aSrobert {
4441*404b540aSrobert   /* This can be called twice for the same label
4442*404b540aSrobert      as a result of the confusion that follows a syntax error!
4443*404b540aSrobert      So make it harmless.  */
4444*404b540aSrobert   if (INSN_UID (label) == 0)
4445*404b540aSrobert     {
4446*404b540aSrobert       INSN_UID (label) = cur_insn_uid++;
4447*404b540aSrobert       add_insn (label);
4448*404b540aSrobert     }
4449*404b540aSrobert   return label;
4450*404b540aSrobert }
4451*404b540aSrobert 
4452*404b540aSrobert /* Make an insn of code BARRIER
4453*404b540aSrobert    and add it to the end of the doubly-linked list.  */
4454*404b540aSrobert 
4455*404b540aSrobert rtx
emit_barrier(void)4456*404b540aSrobert emit_barrier (void)
4457*404b540aSrobert {
4458*404b540aSrobert   rtx barrier = rtx_alloc (BARRIER);
4459*404b540aSrobert   INSN_UID (barrier) = cur_insn_uid++;
4460*404b540aSrobert   add_insn (barrier);
4461*404b540aSrobert   return barrier;
4462*404b540aSrobert }
4463*404b540aSrobert 
4464*404b540aSrobert /* Make line numbering NOTE insn for LOCATION add it to the end
4465*404b540aSrobert    of the doubly-linked list, but only if line-numbers are desired for
4466*404b540aSrobert    debugging info and it doesn't match the previous one.  */
4467*404b540aSrobert 
4468*404b540aSrobert rtx
emit_line_note(location_t location)4469*404b540aSrobert emit_line_note (location_t location)
4470*404b540aSrobert {
4471*404b540aSrobert   rtx note;
4472*404b540aSrobert 
4473*404b540aSrobert #ifdef USE_MAPPED_LOCATION
4474*404b540aSrobert   if (location == last_location)
4475*404b540aSrobert     return NULL_RTX;
4476*404b540aSrobert #else
4477*404b540aSrobert   if (location.file && last_location.file
4478*404b540aSrobert       && !strcmp (location.file, last_location.file)
4479*404b540aSrobert       && location.line == last_location.line)
4480*404b540aSrobert     return NULL_RTX;
4481*404b540aSrobert #endif
4482*404b540aSrobert   last_location = location;
4483*404b540aSrobert 
4484*404b540aSrobert   if (no_line_numbers)
4485*404b540aSrobert     {
4486*404b540aSrobert       cur_insn_uid++;
4487*404b540aSrobert       return NULL_RTX;
4488*404b540aSrobert     }
4489*404b540aSrobert 
4490*404b540aSrobert #ifdef USE_MAPPED_LOCATION
4491*404b540aSrobert   note = emit_note ((int) location);
4492*404b540aSrobert #else
4493*404b540aSrobert   note = emit_note (location.line);
4494*404b540aSrobert   NOTE_SOURCE_FILE (note) = location.file;
4495*404b540aSrobert #endif
4496*404b540aSrobert 
4497*404b540aSrobert   return note;
4498*404b540aSrobert }
4499*404b540aSrobert 
4500*404b540aSrobert /* Emit a copy of note ORIG.  */
4501*404b540aSrobert 
4502*404b540aSrobert rtx
emit_note_copy(rtx orig)4503*404b540aSrobert emit_note_copy (rtx orig)
4504*404b540aSrobert {
4505*404b540aSrobert   rtx note;
4506*404b540aSrobert 
4507*404b540aSrobert   if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4508*404b540aSrobert     {
4509*404b540aSrobert       cur_insn_uid++;
4510*404b540aSrobert       return NULL_RTX;
4511*404b540aSrobert     }
4512*404b540aSrobert 
4513*404b540aSrobert   note = rtx_alloc (NOTE);
4514*404b540aSrobert 
4515*404b540aSrobert   INSN_UID (note) = cur_insn_uid++;
4516*404b540aSrobert   NOTE_DATA (note) = NOTE_DATA (orig);
4517*404b540aSrobert   NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4518*404b540aSrobert   BLOCK_FOR_INSN (note) = NULL;
4519*404b540aSrobert   add_insn (note);
4520*404b540aSrobert 
4521*404b540aSrobert   return note;
4522*404b540aSrobert }
4523*404b540aSrobert 
4524*404b540aSrobert /* Make an insn of code NOTE or type NOTE_NO
4525*404b540aSrobert    and add it to the end of the doubly-linked list.  */
4526*404b540aSrobert 
4527*404b540aSrobert rtx
emit_note(int note_no)4528*404b540aSrobert emit_note (int note_no)
4529*404b540aSrobert {
4530*404b540aSrobert   rtx note;
4531*404b540aSrobert 
4532*404b540aSrobert   note = rtx_alloc (NOTE);
4533*404b540aSrobert   INSN_UID (note) = cur_insn_uid++;
4534*404b540aSrobert   NOTE_LINE_NUMBER (note) = note_no;
4535*404b540aSrobert   memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4536*404b540aSrobert   BLOCK_FOR_INSN (note) = NULL;
4537*404b540aSrobert   add_insn (note);
4538*404b540aSrobert   return note;
4539*404b540aSrobert }
4540*404b540aSrobert 
4541*404b540aSrobert /* Cause next statement to emit a line note even if the line number
4542*404b540aSrobert    has not changed.  */
4543*404b540aSrobert 
4544*404b540aSrobert void
force_next_line_note(void)4545*404b540aSrobert force_next_line_note (void)
4546*404b540aSrobert {
4547*404b540aSrobert #ifdef USE_MAPPED_LOCATION
4548*404b540aSrobert   last_location = -1;
4549*404b540aSrobert #else
4550*404b540aSrobert   last_location.line = -1;
4551*404b540aSrobert #endif
4552*404b540aSrobert }
4553*404b540aSrobert 
4554*404b540aSrobert /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4555*404b540aSrobert    note of this type already exists, remove it first.  */
4556*404b540aSrobert 
4557*404b540aSrobert rtx
set_unique_reg_note(rtx insn,enum reg_note kind,rtx datum)4558*404b540aSrobert set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4559*404b540aSrobert {
4560*404b540aSrobert   rtx note = find_reg_note (insn, kind, NULL_RTX);
4561*404b540aSrobert 
4562*404b540aSrobert   switch (kind)
4563*404b540aSrobert     {
4564*404b540aSrobert     case REG_EQUAL:
4565*404b540aSrobert     case REG_EQUIV:
4566*404b540aSrobert       /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4567*404b540aSrobert 	 has multiple sets (some callers assume single_set
4568*404b540aSrobert 	 means the insn only has one set, when in fact it
4569*404b540aSrobert 	 means the insn only has one * useful * set).  */
4570*404b540aSrobert       if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4571*404b540aSrobert 	{
4572*404b540aSrobert 	  gcc_assert (!note);
4573*404b540aSrobert 	  return NULL_RTX;
4574*404b540aSrobert 	}
4575*404b540aSrobert 
4576*404b540aSrobert       /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4577*404b540aSrobert 	 It serves no useful purpose and breaks eliminate_regs.  */
4578*404b540aSrobert       if (GET_CODE (datum) == ASM_OPERANDS)
4579*404b540aSrobert 	return NULL_RTX;
4580*404b540aSrobert       break;
4581*404b540aSrobert 
4582*404b540aSrobert     default:
4583*404b540aSrobert       break;
4584*404b540aSrobert     }
4585*404b540aSrobert 
4586*404b540aSrobert   if (note)
4587*404b540aSrobert     {
4588*404b540aSrobert       XEXP (note, 0) = datum;
4589*404b540aSrobert       return note;
4590*404b540aSrobert     }
4591*404b540aSrobert 
4592*404b540aSrobert   REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4593*404b540aSrobert   return REG_NOTES (insn);
4594*404b540aSrobert }
4595*404b540aSrobert 
4596*404b540aSrobert /* Return an indication of which type of insn should have X as a body.
4597*404b540aSrobert    The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN.  */
4598*404b540aSrobert 
4599*404b540aSrobert static enum rtx_code
classify_insn(rtx x)4600*404b540aSrobert classify_insn (rtx x)
4601*404b540aSrobert {
4602*404b540aSrobert   if (LABEL_P (x))
4603*404b540aSrobert     return CODE_LABEL;
4604*404b540aSrobert   if (GET_CODE (x) == CALL)
4605*404b540aSrobert     return CALL_INSN;
4606*404b540aSrobert   if (GET_CODE (x) == RETURN)
4607*404b540aSrobert     return JUMP_INSN;
4608*404b540aSrobert   if (GET_CODE (x) == SET)
4609*404b540aSrobert     {
4610*404b540aSrobert       if (SET_DEST (x) == pc_rtx)
4611*404b540aSrobert 	return JUMP_INSN;
4612*404b540aSrobert       else if (GET_CODE (SET_SRC (x)) == CALL)
4613*404b540aSrobert 	return CALL_INSN;
4614*404b540aSrobert       else
4615*404b540aSrobert 	return INSN;
4616*404b540aSrobert     }
4617*404b540aSrobert   if (GET_CODE (x) == PARALLEL)
4618*404b540aSrobert     {
4619*404b540aSrobert       int j;
4620*404b540aSrobert       for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4621*404b540aSrobert 	if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4622*404b540aSrobert 	  return CALL_INSN;
4623*404b540aSrobert 	else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4624*404b540aSrobert 		 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4625*404b540aSrobert 	  return JUMP_INSN;
4626*404b540aSrobert 	else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4627*404b540aSrobert 		 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4628*404b540aSrobert 	  return CALL_INSN;
4629*404b540aSrobert     }
4630*404b540aSrobert   return INSN;
4631*404b540aSrobert }
4632*404b540aSrobert 
4633*404b540aSrobert /* Emit the rtl pattern X as an appropriate kind of insn.
4634*404b540aSrobert    If X is a label, it is simply added into the insn chain.  */
4635*404b540aSrobert 
4636*404b540aSrobert rtx
emit(rtx x)4637*404b540aSrobert emit (rtx x)
4638*404b540aSrobert {
4639*404b540aSrobert   enum rtx_code code = classify_insn (x);
4640*404b540aSrobert 
4641*404b540aSrobert   switch (code)
4642*404b540aSrobert     {
4643*404b540aSrobert     case CODE_LABEL:
4644*404b540aSrobert       return emit_label (x);
4645*404b540aSrobert     case INSN:
4646*404b540aSrobert       return emit_insn (x);
4647*404b540aSrobert     case  JUMP_INSN:
4648*404b540aSrobert       {
4649*404b540aSrobert 	rtx insn = emit_jump_insn (x);
4650*404b540aSrobert 	if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4651*404b540aSrobert 	  return emit_barrier ();
4652*404b540aSrobert 	return insn;
4653*404b540aSrobert       }
4654*404b540aSrobert     case CALL_INSN:
4655*404b540aSrobert       return emit_call_insn (x);
4656*404b540aSrobert     default:
4657*404b540aSrobert       gcc_unreachable ();
4658*404b540aSrobert     }
4659*404b540aSrobert }
4660*404b540aSrobert 
4661*404b540aSrobert /* Space for free sequence stack entries.  */
4662*404b540aSrobert static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
4663*404b540aSrobert 
4664*404b540aSrobert /* Begin emitting insns to a sequence.  If this sequence will contain
4665*404b540aSrobert    something that might cause the compiler to pop arguments to function
4666*404b540aSrobert    calls (because those pops have previously been deferred; see
4667*404b540aSrobert    INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
4668*404b540aSrobert    before calling this function.  That will ensure that the deferred
4669*404b540aSrobert    pops are not accidentally emitted in the middle of this sequence.  */
4670*404b540aSrobert 
4671*404b540aSrobert void
start_sequence(void)4672*404b540aSrobert start_sequence (void)
4673*404b540aSrobert {
4674*404b540aSrobert   struct sequence_stack *tem;
4675*404b540aSrobert 
4676*404b540aSrobert   if (free_sequence_stack != NULL)
4677*404b540aSrobert     {
4678*404b540aSrobert       tem = free_sequence_stack;
4679*404b540aSrobert       free_sequence_stack = tem->next;
4680*404b540aSrobert     }
4681*404b540aSrobert   else
4682*404b540aSrobert     tem = ggc_alloc (sizeof (struct sequence_stack));
4683*404b540aSrobert 
4684*404b540aSrobert   tem->next = seq_stack;
4685*404b540aSrobert   tem->first = first_insn;
4686*404b540aSrobert   tem->last = last_insn;
4687*404b540aSrobert 
4688*404b540aSrobert   seq_stack = tem;
4689*404b540aSrobert 
4690*404b540aSrobert   first_insn = 0;
4691*404b540aSrobert   last_insn = 0;
4692*404b540aSrobert }
4693*404b540aSrobert 
4694*404b540aSrobert /* Set up the insn chain starting with FIRST as the current sequence,
4695*404b540aSrobert    saving the previously current one.  See the documentation for
4696*404b540aSrobert    start_sequence for more information about how to use this function.  */
4697*404b540aSrobert 
4698*404b540aSrobert void
push_to_sequence(rtx first)4699*404b540aSrobert push_to_sequence (rtx first)
4700*404b540aSrobert {
4701*404b540aSrobert   rtx last;
4702*404b540aSrobert 
4703*404b540aSrobert   start_sequence ();
4704*404b540aSrobert 
4705*404b540aSrobert   for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4706*404b540aSrobert 
4707*404b540aSrobert   first_insn = first;
4708*404b540aSrobert   last_insn = last;
4709*404b540aSrobert }
4710*404b540aSrobert 
4711*404b540aSrobert /* Set up the outer-level insn chain
4712*404b540aSrobert    as the current sequence, saving the previously current one.  */
4713*404b540aSrobert 
4714*404b540aSrobert void
push_topmost_sequence(void)4715*404b540aSrobert push_topmost_sequence (void)
4716*404b540aSrobert {
4717*404b540aSrobert   struct sequence_stack *stack, *top = NULL;
4718*404b540aSrobert 
4719*404b540aSrobert   start_sequence ();
4720*404b540aSrobert 
4721*404b540aSrobert   for (stack = seq_stack; stack; stack = stack->next)
4722*404b540aSrobert     top = stack;
4723*404b540aSrobert 
4724*404b540aSrobert   first_insn = top->first;
4725*404b540aSrobert   last_insn = top->last;
4726*404b540aSrobert }
4727*404b540aSrobert 
4728*404b540aSrobert /* After emitting to the outer-level insn chain, update the outer-level
4729*404b540aSrobert    insn chain, and restore the previous saved state.  */
4730*404b540aSrobert 
4731*404b540aSrobert void
pop_topmost_sequence(void)4732*404b540aSrobert pop_topmost_sequence (void)
4733*404b540aSrobert {
4734*404b540aSrobert   struct sequence_stack *stack, *top = NULL;
4735*404b540aSrobert 
4736*404b540aSrobert   for (stack = seq_stack; stack; stack = stack->next)
4737*404b540aSrobert     top = stack;
4738*404b540aSrobert 
4739*404b540aSrobert   top->first = first_insn;
4740*404b540aSrobert   top->last = last_insn;
4741*404b540aSrobert 
4742*404b540aSrobert   end_sequence ();
4743*404b540aSrobert }
4744*404b540aSrobert 
4745*404b540aSrobert /* After emitting to a sequence, restore previous saved state.
4746*404b540aSrobert 
4747*404b540aSrobert    To get the contents of the sequence just made, you must call
4748*404b540aSrobert    `get_insns' *before* calling here.
4749*404b540aSrobert 
4750*404b540aSrobert    If the compiler might have deferred popping arguments while
4751*404b540aSrobert    generating this sequence, and this sequence will not be immediately
4752*404b540aSrobert    inserted into the instruction stream, use do_pending_stack_adjust
4753*404b540aSrobert    before calling get_insns.  That will ensure that the deferred
4754*404b540aSrobert    pops are inserted into this sequence, and not into some random
4755*404b540aSrobert    location in the instruction stream.  See INHIBIT_DEFER_POP for more
4756*404b540aSrobert    information about deferred popping of arguments.  */
4757*404b540aSrobert 
4758*404b540aSrobert void
end_sequence(void)4759*404b540aSrobert end_sequence (void)
4760*404b540aSrobert {
4761*404b540aSrobert   struct sequence_stack *tem = seq_stack;
4762*404b540aSrobert 
4763*404b540aSrobert   first_insn = tem->first;
4764*404b540aSrobert   last_insn = tem->last;
4765*404b540aSrobert   seq_stack = tem->next;
4766*404b540aSrobert 
4767*404b540aSrobert   memset (tem, 0, sizeof (*tem));
4768*404b540aSrobert   tem->next = free_sequence_stack;
4769*404b540aSrobert   free_sequence_stack = tem;
4770*404b540aSrobert }
4771*404b540aSrobert 
4772*404b540aSrobert /* Return 1 if currently emitting into a sequence.  */
4773*404b540aSrobert 
4774*404b540aSrobert int
in_sequence_p(void)4775*404b540aSrobert in_sequence_p (void)
4776*404b540aSrobert {
4777*404b540aSrobert   return seq_stack != 0;
4778*404b540aSrobert }
4779*404b540aSrobert 
4780*404b540aSrobert /* Put the various virtual registers into REGNO_REG_RTX.  */
4781*404b540aSrobert 
4782*404b540aSrobert static void
init_virtual_regs(struct emit_status * es)4783*404b540aSrobert init_virtual_regs (struct emit_status *es)
4784*404b540aSrobert {
4785*404b540aSrobert   rtx *ptr = es->x_regno_reg_rtx;
4786*404b540aSrobert   ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4787*404b540aSrobert   ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4788*404b540aSrobert   ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4789*404b540aSrobert   ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4790*404b540aSrobert   ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4791*404b540aSrobert }
4792*404b540aSrobert 
4793*404b540aSrobert 
4794*404b540aSrobert /* Used by copy_insn_1 to avoid copying SCRATCHes more than once.  */
4795*404b540aSrobert static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4796*404b540aSrobert static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4797*404b540aSrobert static int copy_insn_n_scratches;
4798*404b540aSrobert 
4799*404b540aSrobert /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4800*404b540aSrobert    copied an ASM_OPERANDS.
4801*404b540aSrobert    In that case, it is the original input-operand vector.  */
4802*404b540aSrobert static rtvec orig_asm_operands_vector;
4803*404b540aSrobert 
4804*404b540aSrobert /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4805*404b540aSrobert    copied an ASM_OPERANDS.
4806*404b540aSrobert    In that case, it is the copied input-operand vector.  */
4807*404b540aSrobert static rtvec copy_asm_operands_vector;
4808*404b540aSrobert 
4809*404b540aSrobert /* Likewise for the constraints vector.  */
4810*404b540aSrobert static rtvec orig_asm_constraints_vector;
4811*404b540aSrobert static rtvec copy_asm_constraints_vector;
4812*404b540aSrobert 
4813*404b540aSrobert /* Recursively create a new copy of an rtx for copy_insn.
4814*404b540aSrobert    This function differs from copy_rtx in that it handles SCRATCHes and
4815*404b540aSrobert    ASM_OPERANDs properly.
4816*404b540aSrobert    Normally, this function is not used directly; use copy_insn as front end.
4817*404b540aSrobert    However, you could first copy an insn pattern with copy_insn and then use
4818*404b540aSrobert    this function afterwards to properly copy any REG_NOTEs containing
4819*404b540aSrobert    SCRATCHes.  */
4820*404b540aSrobert 
4821*404b540aSrobert rtx
copy_insn_1(rtx orig)4822*404b540aSrobert copy_insn_1 (rtx orig)
4823*404b540aSrobert {
4824*404b540aSrobert   rtx copy;
4825*404b540aSrobert   int i, j;
4826*404b540aSrobert   RTX_CODE code;
4827*404b540aSrobert   const char *format_ptr;
4828*404b540aSrobert 
4829*404b540aSrobert   code = GET_CODE (orig);
4830*404b540aSrobert 
4831*404b540aSrobert   switch (code)
4832*404b540aSrobert     {
4833*404b540aSrobert     case REG:
4834*404b540aSrobert     case CONST_INT:
4835*404b540aSrobert     case CONST_DOUBLE:
4836*404b540aSrobert     case CONST_VECTOR:
4837*404b540aSrobert     case SYMBOL_REF:
4838*404b540aSrobert     case CODE_LABEL:
4839*404b540aSrobert     case PC:
4840*404b540aSrobert     case CC0:
4841*404b540aSrobert       return orig;
4842*404b540aSrobert     case CLOBBER:
4843*404b540aSrobert       if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
4844*404b540aSrobert 	return orig;
4845*404b540aSrobert       break;
4846*404b540aSrobert 
4847*404b540aSrobert     case SCRATCH:
4848*404b540aSrobert       for (i = 0; i < copy_insn_n_scratches; i++)
4849*404b540aSrobert 	if (copy_insn_scratch_in[i] == orig)
4850*404b540aSrobert 	  return copy_insn_scratch_out[i];
4851*404b540aSrobert       break;
4852*404b540aSrobert 
4853*404b540aSrobert     case CONST:
4854*404b540aSrobert       /* CONST can be shared if it contains a SYMBOL_REF.  If it contains
4855*404b540aSrobert 	 a LABEL_REF, it isn't sharable.  */
4856*404b540aSrobert       if (GET_CODE (XEXP (orig, 0)) == PLUS
4857*404b540aSrobert 	  && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4858*404b540aSrobert 	  && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4859*404b540aSrobert 	return orig;
4860*404b540aSrobert       break;
4861*404b540aSrobert 
4862*404b540aSrobert       /* A MEM with a constant address is not sharable.  The problem is that
4863*404b540aSrobert 	 the constant address may need to be reloaded.  If the mem is shared,
4864*404b540aSrobert 	 then reloading one copy of this mem will cause all copies to appear
4865*404b540aSrobert 	 to have been reloaded.  */
4866*404b540aSrobert 
4867*404b540aSrobert     default:
4868*404b540aSrobert       break;
4869*404b540aSrobert     }
4870*404b540aSrobert 
4871*404b540aSrobert   /* Copy the various flags, fields, and other information.  We assume
4872*404b540aSrobert      that all fields need copying, and then clear the fields that should
4873*404b540aSrobert      not be copied.  That is the sensible default behavior, and forces
4874*404b540aSrobert      us to explicitly document why we are *not* copying a flag.  */
4875*404b540aSrobert   copy = shallow_copy_rtx (orig);
4876*404b540aSrobert 
4877*404b540aSrobert   /* We do not copy the USED flag, which is used as a mark bit during
4878*404b540aSrobert      walks over the RTL.  */
4879*404b540aSrobert   RTX_FLAG (copy, used) = 0;
4880*404b540aSrobert 
4881*404b540aSrobert   /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs.  */
4882*404b540aSrobert   if (INSN_P (orig))
4883*404b540aSrobert     {
4884*404b540aSrobert       RTX_FLAG (copy, jump) = 0;
4885*404b540aSrobert       RTX_FLAG (copy, call) = 0;
4886*404b540aSrobert       RTX_FLAG (copy, frame_related) = 0;
4887*404b540aSrobert     }
4888*404b540aSrobert 
4889*404b540aSrobert   format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4890*404b540aSrobert 
4891*404b540aSrobert   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4892*404b540aSrobert     switch (*format_ptr++)
4893*404b540aSrobert       {
4894*404b540aSrobert       case 'e':
4895*404b540aSrobert 	if (XEXP (orig, i) != NULL)
4896*404b540aSrobert 	  XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4897*404b540aSrobert 	break;
4898*404b540aSrobert 
4899*404b540aSrobert       case 'E':
4900*404b540aSrobert       case 'V':
4901*404b540aSrobert 	if (XVEC (orig, i) == orig_asm_constraints_vector)
4902*404b540aSrobert 	  XVEC (copy, i) = copy_asm_constraints_vector;
4903*404b540aSrobert 	else if (XVEC (orig, i) == orig_asm_operands_vector)
4904*404b540aSrobert 	  XVEC (copy, i) = copy_asm_operands_vector;
4905*404b540aSrobert 	else if (XVEC (orig, i) != NULL)
4906*404b540aSrobert 	  {
4907*404b540aSrobert 	    XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4908*404b540aSrobert 	    for (j = 0; j < XVECLEN (copy, i); j++)
4909*404b540aSrobert 	      XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
4910*404b540aSrobert 	  }
4911*404b540aSrobert 	break;
4912*404b540aSrobert 
4913*404b540aSrobert       case 't':
4914*404b540aSrobert       case 'w':
4915*404b540aSrobert       case 'i':
4916*404b540aSrobert       case 's':
4917*404b540aSrobert       case 'S':
4918*404b540aSrobert       case 'u':
4919*404b540aSrobert       case '0':
4920*404b540aSrobert 	/* These are left unchanged.  */
4921*404b540aSrobert 	break;
4922*404b540aSrobert 
4923*404b540aSrobert       default:
4924*404b540aSrobert 	gcc_unreachable ();
4925*404b540aSrobert       }
4926*404b540aSrobert 
4927*404b540aSrobert   if (code == SCRATCH)
4928*404b540aSrobert     {
4929*404b540aSrobert       i = copy_insn_n_scratches++;
4930*404b540aSrobert       gcc_assert (i < MAX_RECOG_OPERANDS);
4931*404b540aSrobert       copy_insn_scratch_in[i] = orig;
4932*404b540aSrobert       copy_insn_scratch_out[i] = copy;
4933*404b540aSrobert     }
4934*404b540aSrobert   else if (code == ASM_OPERANDS)
4935*404b540aSrobert     {
4936*404b540aSrobert       orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
4937*404b540aSrobert       copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
4938*404b540aSrobert       orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
4939*404b540aSrobert       copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
4940*404b540aSrobert     }
4941*404b540aSrobert 
4942*404b540aSrobert   return copy;
4943*404b540aSrobert }
4944*404b540aSrobert 
4945*404b540aSrobert /* Create a new copy of an rtx.
4946*404b540aSrobert    This function differs from copy_rtx in that it handles SCRATCHes and
4947*404b540aSrobert    ASM_OPERANDs properly.
4948*404b540aSrobert    INSN doesn't really have to be a full INSN; it could be just the
4949*404b540aSrobert    pattern.  */
4950*404b540aSrobert rtx
copy_insn(rtx insn)4951*404b540aSrobert copy_insn (rtx insn)
4952*404b540aSrobert {
4953*404b540aSrobert   copy_insn_n_scratches = 0;
4954*404b540aSrobert   orig_asm_operands_vector = 0;
4955*404b540aSrobert   orig_asm_constraints_vector = 0;
4956*404b540aSrobert   copy_asm_operands_vector = 0;
4957*404b540aSrobert   copy_asm_constraints_vector = 0;
4958*404b540aSrobert   return copy_insn_1 (insn);
4959*404b540aSrobert }
4960*404b540aSrobert 
4961*404b540aSrobert /* Initialize data structures and variables in this file
4962*404b540aSrobert    before generating rtl for each function.  */
4963*404b540aSrobert 
4964*404b540aSrobert void
init_emit(void)4965*404b540aSrobert init_emit (void)
4966*404b540aSrobert {
4967*404b540aSrobert   struct function *f = cfun;
4968*404b540aSrobert 
4969*404b540aSrobert   f->emit = ggc_alloc (sizeof (struct emit_status));
4970*404b540aSrobert   first_insn = NULL;
4971*404b540aSrobert   last_insn = NULL;
4972*404b540aSrobert   cur_insn_uid = 1;
4973*404b540aSrobert   reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
4974*404b540aSrobert   last_location = UNKNOWN_LOCATION;
4975*404b540aSrobert   first_label_num = label_num;
4976*404b540aSrobert   seq_stack = NULL;
4977*404b540aSrobert 
4978*404b540aSrobert   /* Init the tables that describe all the pseudo regs.  */
4979*404b540aSrobert 
4980*404b540aSrobert   f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
4981*404b540aSrobert 
4982*404b540aSrobert   f->emit->regno_pointer_align
4983*404b540aSrobert     = ggc_alloc_cleared (f->emit->regno_pointer_align_length
4984*404b540aSrobert 			 * sizeof (unsigned char));
4985*404b540aSrobert 
4986*404b540aSrobert   regno_reg_rtx
4987*404b540aSrobert     = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
4988*404b540aSrobert 
4989*404b540aSrobert   /* Put copies of all the hard registers into regno_reg_rtx.  */
4990*404b540aSrobert   memcpy (regno_reg_rtx,
4991*404b540aSrobert 	  static_regno_reg_rtx,
4992*404b540aSrobert 	  FIRST_PSEUDO_REGISTER * sizeof (rtx));
4993*404b540aSrobert 
4994*404b540aSrobert   /* Put copies of all the virtual register rtx into regno_reg_rtx.  */
4995*404b540aSrobert   init_virtual_regs (f->emit);
4996*404b540aSrobert 
4997*404b540aSrobert   /* Indicate that the virtual registers and stack locations are
4998*404b540aSrobert      all pointers.  */
4999*404b540aSrobert   REG_POINTER (stack_pointer_rtx) = 1;
5000*404b540aSrobert   REG_POINTER (frame_pointer_rtx) = 1;
5001*404b540aSrobert   REG_POINTER (hard_frame_pointer_rtx) = 1;
5002*404b540aSrobert   REG_POINTER (arg_pointer_rtx) = 1;
5003*404b540aSrobert 
5004*404b540aSrobert   REG_POINTER (virtual_incoming_args_rtx) = 1;
5005*404b540aSrobert   REG_POINTER (virtual_stack_vars_rtx) = 1;
5006*404b540aSrobert   REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5007*404b540aSrobert   REG_POINTER (virtual_outgoing_args_rtx) = 1;
5008*404b540aSrobert   REG_POINTER (virtual_cfa_rtx) = 1;
5009*404b540aSrobert 
5010*404b540aSrobert #ifdef STACK_BOUNDARY
5011*404b540aSrobert   REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5012*404b540aSrobert   REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5013*404b540aSrobert   REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5014*404b540aSrobert   REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5015*404b540aSrobert 
5016*404b540aSrobert   REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5017*404b540aSrobert   REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5018*404b540aSrobert   REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5019*404b540aSrobert   REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5020*404b540aSrobert   REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5021*404b540aSrobert #endif
5022*404b540aSrobert 
5023*404b540aSrobert #ifdef INIT_EXPANDERS
5024*404b540aSrobert   INIT_EXPANDERS;
5025*404b540aSrobert #endif
5026*404b540aSrobert }
5027*404b540aSrobert 
5028*404b540aSrobert /* Generate a vector constant for mode MODE and constant value CONSTANT.  */
5029*404b540aSrobert 
5030*404b540aSrobert static rtx
gen_const_vector(enum machine_mode mode,int constant)5031*404b540aSrobert gen_const_vector (enum machine_mode mode, int constant)
5032*404b540aSrobert {
5033*404b540aSrobert   rtx tem;
5034*404b540aSrobert   rtvec v;
5035*404b540aSrobert   int units, i;
5036*404b540aSrobert   enum machine_mode inner;
5037*404b540aSrobert 
5038*404b540aSrobert   units = GET_MODE_NUNITS (mode);
5039*404b540aSrobert   inner = GET_MODE_INNER (mode);
5040*404b540aSrobert 
5041*404b540aSrobert   gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5042*404b540aSrobert 
5043*404b540aSrobert   v = rtvec_alloc (units);
5044*404b540aSrobert 
5045*404b540aSrobert   /* We need to call this function after we set the scalar const_tiny_rtx
5046*404b540aSrobert      entries.  */
5047*404b540aSrobert   gcc_assert (const_tiny_rtx[constant][(int) inner]);
5048*404b540aSrobert 
5049*404b540aSrobert   for (i = 0; i < units; ++i)
5050*404b540aSrobert     RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5051*404b540aSrobert 
5052*404b540aSrobert   tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5053*404b540aSrobert   return tem;
5054*404b540aSrobert }
5055*404b540aSrobert 
5056*404b540aSrobert /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5057*404b540aSrobert    all elements are zero, and the one vector when all elements are one.  */
5058*404b540aSrobert rtx
gen_rtx_CONST_VECTOR(enum machine_mode mode,rtvec v)5059*404b540aSrobert gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5060*404b540aSrobert {
5061*404b540aSrobert   enum machine_mode inner = GET_MODE_INNER (mode);
5062*404b540aSrobert   int nunits = GET_MODE_NUNITS (mode);
5063*404b540aSrobert   rtx x;
5064*404b540aSrobert   int i;
5065*404b540aSrobert 
5066*404b540aSrobert   /* Check to see if all of the elements have the same value.  */
5067*404b540aSrobert   x = RTVEC_ELT (v, nunits - 1);
5068*404b540aSrobert   for (i = nunits - 2; i >= 0; i--)
5069*404b540aSrobert     if (RTVEC_ELT (v, i) != x)
5070*404b540aSrobert       break;
5071*404b540aSrobert 
5072*404b540aSrobert   /* If the values are all the same, check to see if we can use one of the
5073*404b540aSrobert      standard constant vectors.  */
5074*404b540aSrobert   if (i == -1)
5075*404b540aSrobert     {
5076*404b540aSrobert       if (x == CONST0_RTX (inner))
5077*404b540aSrobert 	return CONST0_RTX (mode);
5078*404b540aSrobert       else if (x == CONST1_RTX (inner))
5079*404b540aSrobert 	return CONST1_RTX (mode);
5080*404b540aSrobert     }
5081*404b540aSrobert 
5082*404b540aSrobert   return gen_rtx_raw_CONST_VECTOR (mode, v);
5083*404b540aSrobert }
5084*404b540aSrobert 
5085*404b540aSrobert /* Create some permanent unique rtl objects shared between all functions.
5086*404b540aSrobert    LINE_NUMBERS is nonzero if line numbers are to be generated.  */
5087*404b540aSrobert 
5088*404b540aSrobert void
init_emit_once(int line_numbers)5089*404b540aSrobert init_emit_once (int line_numbers)
5090*404b540aSrobert {
5091*404b540aSrobert   int i;
5092*404b540aSrobert   enum machine_mode mode;
5093*404b540aSrobert   enum machine_mode double_mode;
5094*404b540aSrobert 
5095*404b540aSrobert   /* We need reg_raw_mode, so initialize the modes now.  */
5096*404b540aSrobert   init_reg_modes_once ();
5097*404b540aSrobert 
5098*404b540aSrobert   /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5099*404b540aSrobert      tables.  */
5100*404b540aSrobert   const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5101*404b540aSrobert 				    const_int_htab_eq, NULL);
5102*404b540aSrobert 
5103*404b540aSrobert   const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5104*404b540aSrobert 				       const_double_htab_eq, NULL);
5105*404b540aSrobert 
5106*404b540aSrobert   mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5107*404b540aSrobert 				    mem_attrs_htab_eq, NULL);
5108*404b540aSrobert   reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5109*404b540aSrobert 				    reg_attrs_htab_eq, NULL);
5110*404b540aSrobert 
5111*404b540aSrobert   no_line_numbers = ! line_numbers;
5112*404b540aSrobert 
5113*404b540aSrobert   /* Compute the word and byte modes.  */
5114*404b540aSrobert 
5115*404b540aSrobert   byte_mode = VOIDmode;
5116*404b540aSrobert   word_mode = VOIDmode;
5117*404b540aSrobert   double_mode = VOIDmode;
5118*404b540aSrobert 
5119*404b540aSrobert   for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5120*404b540aSrobert        mode != VOIDmode;
5121*404b540aSrobert        mode = GET_MODE_WIDER_MODE (mode))
5122*404b540aSrobert     {
5123*404b540aSrobert       if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5124*404b540aSrobert 	  && byte_mode == VOIDmode)
5125*404b540aSrobert 	byte_mode = mode;
5126*404b540aSrobert 
5127*404b540aSrobert       if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5128*404b540aSrobert 	  && word_mode == VOIDmode)
5129*404b540aSrobert 	word_mode = mode;
5130*404b540aSrobert     }
5131*404b540aSrobert 
5132*404b540aSrobert   for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5133*404b540aSrobert        mode != VOIDmode;
5134*404b540aSrobert        mode = GET_MODE_WIDER_MODE (mode))
5135*404b540aSrobert     {
5136*404b540aSrobert       if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5137*404b540aSrobert 	  && double_mode == VOIDmode)
5138*404b540aSrobert 	double_mode = mode;
5139*404b540aSrobert     }
5140*404b540aSrobert 
5141*404b540aSrobert   ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5142*404b540aSrobert 
5143*404b540aSrobert   /* Assign register numbers to the globally defined register rtx.
5144*404b540aSrobert      This must be done at runtime because the register number field
5145*404b540aSrobert      is in a union and some compilers can't initialize unions.  */
5146*404b540aSrobert 
5147*404b540aSrobert   pc_rtx = gen_rtx_PC (VOIDmode);
5148*404b540aSrobert   cc0_rtx = gen_rtx_CC0 (VOIDmode);
5149*404b540aSrobert   stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5150*404b540aSrobert   frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5151*404b540aSrobert   if (hard_frame_pointer_rtx == 0)
5152*404b540aSrobert     hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5153*404b540aSrobert 					  HARD_FRAME_POINTER_REGNUM);
5154*404b540aSrobert   if (arg_pointer_rtx == 0)
5155*404b540aSrobert     arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5156*404b540aSrobert   virtual_incoming_args_rtx =
5157*404b540aSrobert     gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5158*404b540aSrobert   virtual_stack_vars_rtx =
5159*404b540aSrobert     gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5160*404b540aSrobert   virtual_stack_dynamic_rtx =
5161*404b540aSrobert     gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5162*404b540aSrobert   virtual_outgoing_args_rtx =
5163*404b540aSrobert     gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5164*404b540aSrobert   virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5165*404b540aSrobert 
5166*404b540aSrobert   /* Initialize RTL for commonly used hard registers.  These are
5167*404b540aSrobert      copied into regno_reg_rtx as we begin to compile each function.  */
5168*404b540aSrobert   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5169*404b540aSrobert     static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5170*404b540aSrobert 
5171*404b540aSrobert #ifdef INIT_EXPANDERS
5172*404b540aSrobert   /* This is to initialize {init|mark|free}_machine_status before the first
5173*404b540aSrobert      call to push_function_context_to.  This is needed by the Chill front
5174*404b540aSrobert      end which calls push_function_context_to before the first call to
5175*404b540aSrobert      init_function_start.  */
5176*404b540aSrobert   INIT_EXPANDERS;
5177*404b540aSrobert #endif
5178*404b540aSrobert 
5179*404b540aSrobert   /* Create the unique rtx's for certain rtx codes and operand values.  */
5180*404b540aSrobert 
5181*404b540aSrobert   /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5182*404b540aSrobert      tries to use these variables.  */
5183*404b540aSrobert   for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5184*404b540aSrobert     const_int_rtx[i + MAX_SAVED_CONST_INT] =
5185*404b540aSrobert       gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5186*404b540aSrobert 
5187*404b540aSrobert   if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5188*404b540aSrobert       && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5189*404b540aSrobert     const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5190*404b540aSrobert   else
5191*404b540aSrobert     const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5192*404b540aSrobert 
5193*404b540aSrobert   REAL_VALUE_FROM_INT (dconst0,   0,  0, double_mode);
5194*404b540aSrobert   REAL_VALUE_FROM_INT (dconst1,   1,  0, double_mode);
5195*404b540aSrobert   REAL_VALUE_FROM_INT (dconst2,   2,  0, double_mode);
5196*404b540aSrobert   REAL_VALUE_FROM_INT (dconst3,   3,  0, double_mode);
5197*404b540aSrobert   REAL_VALUE_FROM_INT (dconst10, 10,  0, double_mode);
5198*404b540aSrobert   REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5199*404b540aSrobert   REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5200*404b540aSrobert 
5201*404b540aSrobert   dconsthalf = dconst1;
5202*404b540aSrobert   SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5203*404b540aSrobert 
5204*404b540aSrobert   real_arithmetic (&dconstthird, RDIV_EXPR, &dconst1, &dconst3);
5205*404b540aSrobert 
5206*404b540aSrobert   /* Initialize mathematical constants for constant folding builtins.
5207*404b540aSrobert      These constants need to be given to at least 160 bits precision.  */
5208*404b540aSrobert   real_from_string (&dconstpi,
5209*404b540aSrobert     "3.1415926535897932384626433832795028841971693993751058209749445923078");
5210*404b540aSrobert   real_from_string (&dconste,
5211*404b540aSrobert     "2.7182818284590452353602874713526624977572470936999595749669676277241");
5212*404b540aSrobert 
5213*404b540aSrobert   for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5214*404b540aSrobert     {
5215*404b540aSrobert       REAL_VALUE_TYPE *r =
5216*404b540aSrobert 	(i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5217*404b540aSrobert 
5218*404b540aSrobert       for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5219*404b540aSrobert 	   mode != VOIDmode;
5220*404b540aSrobert 	   mode = GET_MODE_WIDER_MODE (mode))
5221*404b540aSrobert 	const_tiny_rtx[i][(int) mode] =
5222*404b540aSrobert 	  CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5223*404b540aSrobert 
5224*404b540aSrobert       for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5225*404b540aSrobert 	   mode != VOIDmode;
5226*404b540aSrobert 	   mode = GET_MODE_WIDER_MODE (mode))
5227*404b540aSrobert 	const_tiny_rtx[i][(int) mode] =
5228*404b540aSrobert 	  CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5229*404b540aSrobert 
5230*404b540aSrobert       const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5231*404b540aSrobert 
5232*404b540aSrobert       for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5233*404b540aSrobert 	   mode != VOIDmode;
5234*404b540aSrobert 	   mode = GET_MODE_WIDER_MODE (mode))
5235*404b540aSrobert 	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5236*404b540aSrobert 
5237*404b540aSrobert       for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5238*404b540aSrobert 	   mode != VOIDmode;
5239*404b540aSrobert 	   mode = GET_MODE_WIDER_MODE (mode))
5240*404b540aSrobert 	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5241*404b540aSrobert     }
5242*404b540aSrobert 
5243*404b540aSrobert   for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5244*404b540aSrobert        mode != VOIDmode;
5245*404b540aSrobert        mode = GET_MODE_WIDER_MODE (mode))
5246*404b540aSrobert     {
5247*404b540aSrobert       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5248*404b540aSrobert       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5249*404b540aSrobert     }
5250*404b540aSrobert 
5251*404b540aSrobert   for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5252*404b540aSrobert        mode != VOIDmode;
5253*404b540aSrobert        mode = GET_MODE_WIDER_MODE (mode))
5254*404b540aSrobert     {
5255*404b540aSrobert       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5256*404b540aSrobert       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5257*404b540aSrobert     }
5258*404b540aSrobert 
5259*404b540aSrobert   for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5260*404b540aSrobert     if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5261*404b540aSrobert       const_tiny_rtx[0][i] = const0_rtx;
5262*404b540aSrobert 
5263*404b540aSrobert   const_tiny_rtx[0][(int) BImode] = const0_rtx;
5264*404b540aSrobert   if (STORE_FLAG_VALUE == 1)
5265*404b540aSrobert     const_tiny_rtx[1][(int) BImode] = const1_rtx;
5266*404b540aSrobert 
5267*404b540aSrobert #ifdef RETURN_ADDRESS_POINTER_REGNUM
5268*404b540aSrobert   return_address_pointer_rtx
5269*404b540aSrobert     = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5270*404b540aSrobert #endif
5271*404b540aSrobert 
5272*404b540aSrobert #ifdef STATIC_CHAIN_REGNUM
5273*404b540aSrobert   static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5274*404b540aSrobert 
5275*404b540aSrobert #ifdef STATIC_CHAIN_INCOMING_REGNUM
5276*404b540aSrobert   if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5277*404b540aSrobert     static_chain_incoming_rtx
5278*404b540aSrobert       = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5279*404b540aSrobert   else
5280*404b540aSrobert #endif
5281*404b540aSrobert     static_chain_incoming_rtx = static_chain_rtx;
5282*404b540aSrobert #endif
5283*404b540aSrobert 
5284*404b540aSrobert #ifdef STATIC_CHAIN
5285*404b540aSrobert   static_chain_rtx = STATIC_CHAIN;
5286*404b540aSrobert 
5287*404b540aSrobert #ifdef STATIC_CHAIN_INCOMING
5288*404b540aSrobert   static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5289*404b540aSrobert #else
5290*404b540aSrobert   static_chain_incoming_rtx = static_chain_rtx;
5291*404b540aSrobert #endif
5292*404b540aSrobert #endif
5293*404b540aSrobert 
5294*404b540aSrobert   if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5295*404b540aSrobert     pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5296*404b540aSrobert }
5297*404b540aSrobert 
5298*404b540aSrobert /* Produce exact duplicate of insn INSN after AFTER.
5299*404b540aSrobert    Care updating of libcall regions if present.  */
5300*404b540aSrobert 
5301*404b540aSrobert rtx
emit_copy_of_insn_after(rtx insn,rtx after)5302*404b540aSrobert emit_copy_of_insn_after (rtx insn, rtx after)
5303*404b540aSrobert {
5304*404b540aSrobert   rtx new;
5305*404b540aSrobert   rtx note1, note2, link;
5306*404b540aSrobert 
5307*404b540aSrobert   switch (GET_CODE (insn))
5308*404b540aSrobert     {
5309*404b540aSrobert     case INSN:
5310*404b540aSrobert       new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5311*404b540aSrobert       break;
5312*404b540aSrobert 
5313*404b540aSrobert     case JUMP_INSN:
5314*404b540aSrobert       new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5315*404b540aSrobert       break;
5316*404b540aSrobert 
5317*404b540aSrobert     case CALL_INSN:
5318*404b540aSrobert       new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5319*404b540aSrobert       if (CALL_INSN_FUNCTION_USAGE (insn))
5320*404b540aSrobert 	CALL_INSN_FUNCTION_USAGE (new)
5321*404b540aSrobert 	  = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5322*404b540aSrobert       SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5323*404b540aSrobert       CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5324*404b540aSrobert       break;
5325*404b540aSrobert 
5326*404b540aSrobert     default:
5327*404b540aSrobert       gcc_unreachable ();
5328*404b540aSrobert     }
5329*404b540aSrobert 
5330*404b540aSrobert   /* Update LABEL_NUSES.  */
5331*404b540aSrobert   mark_jump_label (PATTERN (new), new, 0);
5332*404b540aSrobert 
5333*404b540aSrobert   INSN_LOCATOR (new) = INSN_LOCATOR (insn);
5334*404b540aSrobert 
5335*404b540aSrobert   /* If the old insn is frame related, then so is the new one.  This is
5336*404b540aSrobert      primarily needed for IA-64 unwind info which marks epilogue insns,
5337*404b540aSrobert      which may be duplicated by the basic block reordering code.  */
5338*404b540aSrobert   RTX_FRAME_RELATED_P (new) = RTX_FRAME_RELATED_P (insn);
5339*404b540aSrobert 
5340*404b540aSrobert   /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5341*404b540aSrobert      make them.  */
5342*404b540aSrobert   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5343*404b540aSrobert     if (REG_NOTE_KIND (link) != REG_LABEL)
5344*404b540aSrobert       {
5345*404b540aSrobert 	if (GET_CODE (link) == EXPR_LIST)
5346*404b540aSrobert 	  REG_NOTES (new)
5347*404b540aSrobert 	    = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5348*404b540aSrobert 					      XEXP (link, 0),
5349*404b540aSrobert 					      REG_NOTES (new)));
5350*404b540aSrobert 	else
5351*404b540aSrobert 	  REG_NOTES (new)
5352*404b540aSrobert 	    = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5353*404b540aSrobert 					      XEXP (link, 0),
5354*404b540aSrobert 					      REG_NOTES (new)));
5355*404b540aSrobert       }
5356*404b540aSrobert 
5357*404b540aSrobert   /* Fix the libcall sequences.  */
5358*404b540aSrobert   if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5359*404b540aSrobert     {
5360*404b540aSrobert       rtx p = new;
5361*404b540aSrobert       while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5362*404b540aSrobert 	p = PREV_INSN (p);
5363*404b540aSrobert       XEXP (note1, 0) = p;
5364*404b540aSrobert       XEXP (note2, 0) = new;
5365*404b540aSrobert     }
5366*404b540aSrobert   INSN_CODE (new) = INSN_CODE (insn);
5367*404b540aSrobert   return new;
5368*404b540aSrobert }
5369*404b540aSrobert 
5370*404b540aSrobert static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5371*404b540aSrobert rtx
gen_hard_reg_clobber(enum machine_mode mode,unsigned int regno)5372*404b540aSrobert gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5373*404b540aSrobert {
5374*404b540aSrobert   if (hard_reg_clobbers[mode][regno])
5375*404b540aSrobert     return hard_reg_clobbers[mode][regno];
5376*404b540aSrobert   else
5377*404b540aSrobert     return (hard_reg_clobbers[mode][regno] =
5378*404b540aSrobert 	    gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5379*404b540aSrobert }
5380*404b540aSrobert 
5381*404b540aSrobert #include "gt-emit-rtl.h"
5382