xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/emit-rtl.c (revision d909946ca08dceb44d7d0f22ec9488679695d976)
1 /* Emit RTL for the GCC expander.
2    Copyright (C) 1987-2013 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 
21 /* Middle-to-low level generation of rtx code and insns.
22 
23    This file contains support functions for creating rtl expressions
24    and manipulating them in the doubly-linked chain of insns.
25 
26    The patterns of the insns are created by machine-dependent
27    routines in insn-emit.c, which is generated automatically from
28    the machine description.  These routines make the individual rtx's
29    of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30    which are automatically generated from rtl.def; what is machine
31    dependent is the kind of rtx's they make and what arguments they
32    use.  */
33 
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "diagnostic-core.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "tm_p.h"
42 #include "flags.h"
43 #include "function.h"
44 #include "expr.h"
45 #include "regs.h"
46 #include "hard-reg-set.h"
47 #include "hashtab.h"
48 #include "insn-config.h"
49 #include "recog.h"
50 #include "bitmap.h"
51 #include "basic-block.h"
52 #include "ggc.h"
53 #include "debug.h"
54 #include "langhooks.h"
55 #include "df.h"
56 #include "params.h"
57 #include "target.h"
58 
59 struct target_rtl default_target_rtl;
60 #if SWITCHABLE_TARGET
61 struct target_rtl *this_target_rtl = &default_target_rtl;
62 #endif
63 
64 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
65 
66 /* Commonly used modes.  */
67 
68 enum machine_mode byte_mode;	/* Mode whose width is BITS_PER_UNIT.  */
69 enum machine_mode word_mode;	/* Mode whose width is BITS_PER_WORD.  */
70 enum machine_mode double_mode;	/* Mode whose width is DOUBLE_TYPE_SIZE.  */
71 enum machine_mode ptr_mode;	/* Mode whose width is POINTER_SIZE.  */
72 
73 /* Datastructures maintained for currently processed function in RTL form.  */
74 
75 struct rtl_data x_rtl;
76 
77 /* Indexed by pseudo register number, gives the rtx for that pseudo.
78    Allocated in parallel with regno_pointer_align.
79    FIXME: We could put it into emit_status struct, but gengtype is not able to deal
80    with length attribute nested in top level structures.  */
81 
82 rtx * regno_reg_rtx;
83 
84 /* This is *not* reset after each function.  It gives each CODE_LABEL
85    in the entire compilation a unique label number.  */
86 
87 static GTY(()) int label_num = 1;
88 
89 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
90    the values of 0, 1, and 2.  For the integer entries and VOIDmode, we
91    record a copy of const[012]_rtx and constm1_rtx.  CONSTM1_RTX
92    is set only for MODE_INT and MODE_VECTOR_INT modes.  */
93 
94 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
95 
96 rtx const_true_rtx;
97 
98 REAL_VALUE_TYPE dconst0;
99 REAL_VALUE_TYPE dconst1;
100 REAL_VALUE_TYPE dconst2;
101 REAL_VALUE_TYPE dconstm1;
102 REAL_VALUE_TYPE dconsthalf;
103 
104 /* Record fixed-point constant 0 and 1.  */
105 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
106 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
107 
108 /* We make one copy of (const_int C) where C is in
109    [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
110    to save space during the compilation and simplify comparisons of
111    integers.  */
112 
113 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
114 
115 /* Standard pieces of rtx, to be substituted directly into things.  */
116 rtx pc_rtx;
117 rtx ret_rtx;
118 rtx simple_return_rtx;
119 rtx cc0_rtx;
120 
121 /* A hash table storing CONST_INTs whose absolute value is greater
122    than MAX_SAVED_CONST_INT.  */
123 
124 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
125      htab_t const_int_htab;
126 
127 /* A hash table storing memory attribute structures.  */
128 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
129      htab_t mem_attrs_htab;
130 
131 /* A hash table storing register attribute structures.  */
132 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
133      htab_t reg_attrs_htab;
134 
135 /* A hash table storing all CONST_DOUBLEs.  */
136 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
137      htab_t const_double_htab;
138 
139 /* A hash table storing all CONST_FIXEDs.  */
140 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
141      htab_t const_fixed_htab;
142 
143 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
144 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
145 #define first_label_num (crtl->emit.x_first_label_num)
146 
147 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
148 static void set_used_decls (tree);
149 static void mark_label_nuses (rtx);
150 static hashval_t const_int_htab_hash (const void *);
151 static int const_int_htab_eq (const void *, const void *);
152 static hashval_t const_double_htab_hash (const void *);
153 static int const_double_htab_eq (const void *, const void *);
154 static rtx lookup_const_double (rtx);
155 static hashval_t const_fixed_htab_hash (const void *);
156 static int const_fixed_htab_eq (const void *, const void *);
157 static rtx lookup_const_fixed (rtx);
158 static hashval_t mem_attrs_htab_hash (const void *);
159 static int mem_attrs_htab_eq (const void *, const void *);
160 static hashval_t reg_attrs_htab_hash (const void *);
161 static int reg_attrs_htab_eq (const void *, const void *);
162 static reg_attrs *get_reg_attrs (tree, int);
163 static rtx gen_const_vector (enum machine_mode, int);
164 static void copy_rtx_if_shared_1 (rtx *orig);
165 
166 /* Probability of the conditional branch currently proceeded by try_split.
167    Set to -1 otherwise.  */
168 int split_branch_probability = -1;
169 
170 /* Returns a hash code for X (which is a really a CONST_INT).  */
171 
172 static hashval_t
173 const_int_htab_hash (const void *x)
174 {
175   return (hashval_t) INTVAL ((const_rtx) x);
176 }
177 
178 /* Returns nonzero if the value represented by X (which is really a
179    CONST_INT) is the same as that given by Y (which is really a
180    HOST_WIDE_INT *).  */
181 
182 static int
183 const_int_htab_eq (const void *x, const void *y)
184 {
185   return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
186 }
187 
188 /* Returns a hash code for X (which is really a CONST_DOUBLE).  */
189 static hashval_t
190 const_double_htab_hash (const void *x)
191 {
192   const_rtx const value = (const_rtx) x;
193   hashval_t h;
194 
195   if (GET_MODE (value) == VOIDmode)
196     h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
197   else
198     {
199       h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
200       /* MODE is used in the comparison, so it should be in the hash.  */
201       h ^= GET_MODE (value);
202     }
203   return h;
204 }
205 
206 /* Returns nonzero if the value represented by X (really a ...)
207    is the same as that represented by Y (really a ...) */
208 static int
209 const_double_htab_eq (const void *x, const void *y)
210 {
211   const_rtx const a = (const_rtx)x, b = (const_rtx)y;
212 
213   if (GET_MODE (a) != GET_MODE (b))
214     return 0;
215   if (GET_MODE (a) == VOIDmode)
216     return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
217 	    && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
218   else
219     return real_identical (CONST_DOUBLE_REAL_VALUE (a),
220 			   CONST_DOUBLE_REAL_VALUE (b));
221 }
222 
223 /* Returns a hash code for X (which is really a CONST_FIXED).  */
224 
225 static hashval_t
226 const_fixed_htab_hash (const void *x)
227 {
228   const_rtx const value = (const_rtx) x;
229   hashval_t h;
230 
231   h = fixed_hash (CONST_FIXED_VALUE (value));
232   /* MODE is used in the comparison, so it should be in the hash.  */
233   h ^= GET_MODE (value);
234   return h;
235 }
236 
237 /* Returns nonzero if the value represented by X (really a ...)
238    is the same as that represented by Y (really a ...).  */
239 
240 static int
241 const_fixed_htab_eq (const void *x, const void *y)
242 {
243   const_rtx const a = (const_rtx) x, b = (const_rtx) y;
244 
245   if (GET_MODE (a) != GET_MODE (b))
246     return 0;
247   return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
248 }
249 
250 /* Returns a hash code for X (which is a really a mem_attrs *).  */
251 
252 static hashval_t
253 mem_attrs_htab_hash (const void *x)
254 {
255   const mem_attrs *const p = (const mem_attrs *) x;
256 
257   return (p->alias ^ (p->align * 1000)
258 	  ^ (p->addrspace * 4000)
259 	  ^ ((p->offset_known_p ? p->offset : 0) * 50000)
260 	  ^ ((p->size_known_p ? p->size : 0) * 2500000)
261 	  ^ (size_t) iterative_hash_expr (p->expr, 0));
262 }
263 
264 /* Return true if the given memory attributes are equal.  */
265 
266 bool
267 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
268 {
269   return (p->alias == q->alias
270 	  && p->offset_known_p == q->offset_known_p
271 	  && (!p->offset_known_p || p->offset == q->offset)
272 	  && p->size_known_p == q->size_known_p
273 	  && (!p->size_known_p || p->size == q->size)
274 	  && p->align == q->align
275 	  && p->addrspace == q->addrspace
276 	  && (p->expr == q->expr
277 	      || (p->expr != NULL_TREE && q->expr != NULL_TREE
278 		  && operand_equal_p (p->expr, q->expr, 0))));
279 }
280 
281 /* Returns nonzero if the value represented by X (which is really a
282    mem_attrs *) is the same as that given by Y (which is also really a
283    mem_attrs *).  */
284 
285 static int
286 mem_attrs_htab_eq (const void *x, const void *y)
287 {
288   return mem_attrs_eq_p ((const mem_attrs *) x, (const mem_attrs *) y);
289 }
290 
291 /* Set MEM's memory attributes so that they are the same as ATTRS.  */
292 
293 static void
294 set_mem_attrs (rtx mem, mem_attrs *attrs)
295 {
296   void **slot;
297 
298   /* If everything is the default, we can just clear the attributes.  */
299   if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
300     {
301       MEM_ATTRS (mem) = 0;
302       return;
303     }
304 
305   slot = htab_find_slot (mem_attrs_htab, attrs, INSERT);
306   if (*slot == 0)
307     {
308       *slot = ggc_alloc_mem_attrs ();
309       memcpy (*slot, attrs, sizeof (mem_attrs));
310     }
311 
312   MEM_ATTRS (mem) = (mem_attrs *) *slot;
313 }
314 
315 /* Returns a hash code for X (which is a really a reg_attrs *).  */
316 
317 static hashval_t
318 reg_attrs_htab_hash (const void *x)
319 {
320   const reg_attrs *const p = (const reg_attrs *) x;
321 
322   return ((p->offset * 1000) ^ (intptr_t) p->decl);
323 }
324 
325 /* Returns nonzero if the value represented by X (which is really a
326    reg_attrs *) is the same as that given by Y (which is also really a
327    reg_attrs *).  */
328 
329 static int
330 reg_attrs_htab_eq (const void *x, const void *y)
331 {
332   const reg_attrs *const p = (const reg_attrs *) x;
333   const reg_attrs *const q = (const reg_attrs *) y;
334 
335   return (p->decl == q->decl && p->offset == q->offset);
336 }
337 /* Allocate a new reg_attrs structure and insert it into the hash table if
338    one identical to it is not already in the table.  We are doing this for
339    MEM of mode MODE.  */
340 
341 static reg_attrs *
342 get_reg_attrs (tree decl, int offset)
343 {
344   reg_attrs attrs;
345   void **slot;
346 
347   /* If everything is the default, we can just return zero.  */
348   if (decl == 0 && offset == 0)
349     return 0;
350 
351   attrs.decl = decl;
352   attrs.offset = offset;
353 
354   slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
355   if (*slot == 0)
356     {
357       *slot = ggc_alloc_reg_attrs ();
358       memcpy (*slot, &attrs, sizeof (reg_attrs));
359     }
360 
361   return (reg_attrs *) *slot;
362 }
363 
364 
365 #if !HAVE_blockage
366 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
367    and to block register equivalences to be seen across this insn.  */
368 
369 rtx
370 gen_blockage (void)
371 {
372   rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
373   MEM_VOLATILE_P (x) = true;
374   return x;
375 }
376 #endif
377 
378 
379 /* Generate a new REG rtx.  Make sure ORIGINAL_REGNO is set properly, and
380    don't attempt to share with the various global pieces of rtl (such as
381    frame_pointer_rtx).  */
382 
383 rtx
384 gen_raw_REG (enum machine_mode mode, int regno)
385 {
386   rtx x = gen_rtx_raw_REG (mode, regno);
387   ORIGINAL_REGNO (x) = regno;
388   return x;
389 }
390 
391 /* There are some RTL codes that require special attention; the generation
392    functions do the raw handling.  If you add to this list, modify
393    special_rtx in gengenrtl.c as well.  */
394 
395 rtx
396 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
397 {
398   void **slot;
399 
400   if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
401     return const_int_rtx[arg + MAX_SAVED_CONST_INT];
402 
403 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
404   if (const_true_rtx && arg == STORE_FLAG_VALUE)
405     return const_true_rtx;
406 #endif
407 
408   /* Look up the CONST_INT in the hash table.  */
409   slot = htab_find_slot_with_hash (const_int_htab, &arg,
410 				   (hashval_t) arg, INSERT);
411   if (*slot == 0)
412     *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
413 
414   return (rtx) *slot;
415 }
416 
417 rtx
418 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
419 {
420   return GEN_INT (trunc_int_for_mode (c, mode));
421 }
422 
423 /* CONST_DOUBLEs might be created from pairs of integers, or from
424    REAL_VALUE_TYPEs.  Also, their length is known only at run time,
425    so we cannot use gen_rtx_raw_CONST_DOUBLE.  */
426 
427 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
428    hash table.  If so, return its counterpart; otherwise add it
429    to the hash table and return it.  */
430 static rtx
431 lookup_const_double (rtx real)
432 {
433   void **slot = htab_find_slot (const_double_htab, real, INSERT);
434   if (*slot == 0)
435     *slot = real;
436 
437   return (rtx) *slot;
438 }
439 
440 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
441    VALUE in mode MODE.  */
442 rtx
443 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
444 {
445   rtx real = rtx_alloc (CONST_DOUBLE);
446   PUT_MODE (real, mode);
447 
448   real->u.rv = value;
449 
450   return lookup_const_double (real);
451 }
452 
453 /* Determine whether FIXED, a CONST_FIXED, already exists in the
454    hash table.  If so, return its counterpart; otherwise add it
455    to the hash table and return it.  */
456 
457 static rtx
458 lookup_const_fixed (rtx fixed)
459 {
460   void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
461   if (*slot == 0)
462     *slot = fixed;
463 
464   return (rtx) *slot;
465 }
466 
467 /* Return a CONST_FIXED rtx for a fixed-point value specified by
468    VALUE in mode MODE.  */
469 
470 rtx
471 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
472 {
473   rtx fixed = rtx_alloc (CONST_FIXED);
474   PUT_MODE (fixed, mode);
475 
476   fixed->u.fv = value;
477 
478   return lookup_const_fixed (fixed);
479 }
480 
481 /* Constructs double_int from rtx CST.  */
482 
483 double_int
484 rtx_to_double_int (const_rtx cst)
485 {
486   double_int r;
487 
488   if (CONST_INT_P (cst))
489       r = double_int::from_shwi (INTVAL (cst));
490   else if (CONST_DOUBLE_AS_INT_P (cst))
491     {
492       r.low = CONST_DOUBLE_LOW (cst);
493       r.high = CONST_DOUBLE_HIGH (cst);
494     }
495   else
496     gcc_unreachable ();
497 
498   return r;
499 }
500 
501 
502 /* Return a CONST_DOUBLE or CONST_INT for a value specified as
503    a double_int.  */
504 
505 rtx
506 immed_double_int_const (double_int i, enum machine_mode mode)
507 {
508   return immed_double_const (i.low, i.high, mode);
509 }
510 
511 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
512    of ints: I0 is the low-order word and I1 is the high-order word.
513    For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
514    implied upper bits are copies of the high bit of i1.  The value
515    itself is neither signed nor unsigned.  Do not use this routine for
516    non-integer modes; convert to REAL_VALUE_TYPE and use
517    CONST_DOUBLE_FROM_REAL_VALUE.  */
518 
519 rtx
520 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
521 {
522   rtx value;
523   unsigned int i;
524 
525   /* There are the following cases (note that there are no modes with
526      HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
527 
528      1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
529 	gen_int_mode.
530      2) If the value of the integer fits into HOST_WIDE_INT anyway
531         (i.e., i1 consists only from copies of the sign bit, and sign
532 	of i0 and i1 are the same), then we return a CONST_INT for i0.
533      3) Otherwise, we create a CONST_DOUBLE for i0 and i1.  */
534   if (mode != VOIDmode)
535     {
536       gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
537 		  || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
538 		  /* We can get a 0 for an error mark.  */
539 		  || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
540 		  || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
541 
542       if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
543 	return gen_int_mode (i0, mode);
544     }
545 
546   /* If this integer fits in one word, return a CONST_INT.  */
547   if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
548     return GEN_INT (i0);
549 
550   /* We use VOIDmode for integers.  */
551   value = rtx_alloc (CONST_DOUBLE);
552   PUT_MODE (value, VOIDmode);
553 
554   CONST_DOUBLE_LOW (value) = i0;
555   CONST_DOUBLE_HIGH (value) = i1;
556 
557   for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
558     XWINT (value, i) = 0;
559 
560   return lookup_const_double (value);
561 }
562 
563 rtx
564 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
565 {
566   /* In case the MD file explicitly references the frame pointer, have
567      all such references point to the same frame pointer.  This is
568      used during frame pointer elimination to distinguish the explicit
569      references to these registers from pseudos that happened to be
570      assigned to them.
571 
572      If we have eliminated the frame pointer or arg pointer, we will
573      be using it as a normal register, for example as a spill
574      register.  In such cases, we might be accessing it in a mode that
575      is not Pmode and therefore cannot use the pre-allocated rtx.
576 
577      Also don't do this when we are making new REGs in reload, since
578      we don't want to get confused with the real pointers.  */
579 
580   if (mode == Pmode && !reload_in_progress && !lra_in_progress)
581     {
582       if (regno == FRAME_POINTER_REGNUM
583 	  && (!reload_completed || frame_pointer_needed))
584 	return frame_pointer_rtx;
585 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
586       if (regno == HARD_FRAME_POINTER_REGNUM
587 	  && (!reload_completed || frame_pointer_needed))
588 	return hard_frame_pointer_rtx;
589 #endif
590 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
591       if (regno == ARG_POINTER_REGNUM)
592 	return arg_pointer_rtx;
593 #endif
594 #ifdef RETURN_ADDRESS_POINTER_REGNUM
595       if (regno == RETURN_ADDRESS_POINTER_REGNUM)
596 	return return_address_pointer_rtx;
597 #endif
598       if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
599 	  && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
600 	  && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
601 	return pic_offset_table_rtx;
602       if (regno == STACK_POINTER_REGNUM)
603 	return stack_pointer_rtx;
604     }
605 
606 #if 0
607   /* If the per-function register table has been set up, try to re-use
608      an existing entry in that table to avoid useless generation of RTL.
609 
610      This code is disabled for now until we can fix the various backends
611      which depend on having non-shared hard registers in some cases.   Long
612      term we want to re-enable this code as it can significantly cut down
613      on the amount of useless RTL that gets generated.
614 
615      We'll also need to fix some code that runs after reload that wants to
616      set ORIGINAL_REGNO.  */
617 
618   if (cfun
619       && cfun->emit
620       && regno_reg_rtx
621       && regno < FIRST_PSEUDO_REGISTER
622       && reg_raw_mode[regno] == mode)
623     return regno_reg_rtx[regno];
624 #endif
625 
626   return gen_raw_REG (mode, regno);
627 }
628 
629 rtx
630 gen_rtx_MEM (enum machine_mode mode, rtx addr)
631 {
632   rtx rt = gen_rtx_raw_MEM (mode, addr);
633 
634   /* This field is not cleared by the mere allocation of the rtx, so
635      we clear it here.  */
636   MEM_ATTRS (rt) = 0;
637 
638   return rt;
639 }
640 
641 /* Generate a memory referring to non-trapping constant memory.  */
642 
643 rtx
644 gen_const_mem (enum machine_mode mode, rtx addr)
645 {
646   rtx mem = gen_rtx_MEM (mode, addr);
647   MEM_READONLY_P (mem) = 1;
648   MEM_NOTRAP_P (mem) = 1;
649   return mem;
650 }
651 
652 /* Generate a MEM referring to fixed portions of the frame, e.g., register
653    save areas.  */
654 
655 rtx
656 gen_frame_mem (enum machine_mode mode, rtx addr)
657 {
658   rtx mem = gen_rtx_MEM (mode, addr);
659   MEM_NOTRAP_P (mem) = 1;
660   set_mem_alias_set (mem, get_frame_alias_set ());
661   return mem;
662 }
663 
664 /* Generate a MEM referring to a temporary use of the stack, not part
665     of the fixed stack frame.  For example, something which is pushed
666     by a target splitter.  */
667 rtx
668 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
669 {
670   rtx mem = gen_rtx_MEM (mode, addr);
671   MEM_NOTRAP_P (mem) = 1;
672   if (!cfun->calls_alloca)
673     set_mem_alias_set (mem, get_frame_alias_set ());
674   return mem;
675 }
676 
677 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET).  Return true if
678    this construct would be valid, and false otherwise.  */
679 
680 bool
681 validate_subreg (enum machine_mode omode, enum machine_mode imode,
682 		 const_rtx reg, unsigned int offset)
683 {
684   unsigned int isize = GET_MODE_SIZE (imode);
685   unsigned int osize = GET_MODE_SIZE (omode);
686 
687   /* All subregs must be aligned.  */
688   if (offset % osize != 0)
689     return false;
690 
691   /* The subreg offset cannot be outside the inner object.  */
692   if (offset >= isize)
693     return false;
694 
695   /* ??? This should not be here.  Temporarily continue to allow word_mode
696      subregs of anything.  The most common offender is (subreg:SI (reg:DF)).
697      Generally, backends are doing something sketchy but it'll take time to
698      fix them all.  */
699   if (omode == word_mode)
700     ;
701   /* ??? Similarly, e.g. with (subreg:DF (reg:TI)).  Though store_bit_field
702      is the culprit here, and not the backends.  */
703   else if (osize >= UNITS_PER_WORD && isize >= osize)
704     ;
705   /* Allow component subregs of complex and vector.  Though given the below
706      extraction rules, it's not always clear what that means.  */
707   else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
708 	   && GET_MODE_INNER (imode) == omode)
709     ;
710   /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
711      i.e. (subreg:V4SF (reg:SF) 0).  This surely isn't the cleanest way to
712      represent this.  It's questionable if this ought to be represented at
713      all -- why can't this all be hidden in post-reload splitters that make
714      arbitrarily mode changes to the registers themselves.  */
715   else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
716     ;
717   /* Subregs involving floating point modes are not allowed to
718      change size.  Therefore (subreg:DI (reg:DF) 0) is fine, but
719      (subreg:SI (reg:DF) 0) isn't.  */
720   else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
721     {
722       if (! (isize == osize
723 	     /* LRA can use subreg to store a floating point value in
724 		an integer mode.  Although the floating point and the
725 		integer modes need the same number of hard registers,
726 		the size of floating point mode can be less than the
727 		integer mode.  LRA also uses subregs for a register
728 		should be used in different mode in on insn.  */
729 	     || lra_in_progress))
730 	return false;
731     }
732 
733   /* Paradoxical subregs must have offset zero.  */
734   if (osize > isize)
735     return offset == 0;
736 
737   /* This is a normal subreg.  Verify that the offset is representable.  */
738 
739   /* For hard registers, we already have most of these rules collected in
740      subreg_offset_representable_p.  */
741   if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
742     {
743       unsigned int regno = REGNO (reg);
744 
745 #ifdef CANNOT_CHANGE_MODE_CLASS
746       if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
747 	  && GET_MODE_INNER (imode) == omode)
748 	;
749       else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
750 	return false;
751 #endif
752 
753       return subreg_offset_representable_p (regno, imode, offset, omode);
754     }
755 
756   /* For pseudo registers, we want most of the same checks.  Namely:
757      If the register no larger than a word, the subreg must be lowpart.
758      If the register is larger than a word, the subreg must be the lowpart
759      of a subword.  A subreg does *not* perform arbitrary bit extraction.
760      Given that we've already checked mode/offset alignment, we only have
761      to check subword subregs here.  */
762   if (osize < UNITS_PER_WORD
763       && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
764     {
765       enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
766       unsigned int low_off = subreg_lowpart_offset (omode, wmode);
767       if (offset % UNITS_PER_WORD != low_off)
768 	return false;
769     }
770   return true;
771 }
772 
773 rtx
774 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
775 {
776   gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
777   return gen_rtx_raw_SUBREG (mode, reg, offset);
778 }
779 
780 /* Generate a SUBREG representing the least-significant part of REG if MODE
781    is smaller than mode of REG, otherwise paradoxical SUBREG.  */
782 
783 rtx
784 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
785 {
786   enum machine_mode inmode;
787 
788   inmode = GET_MODE (reg);
789   if (inmode == VOIDmode)
790     inmode = mode;
791   return gen_rtx_SUBREG (mode, reg,
792 			 subreg_lowpart_offset (mode, inmode));
793 }
794 
795 
796 /* Create an rtvec and stores within it the RTXen passed in the arguments.  */
797 
798 rtvec
799 gen_rtvec (int n, ...)
800 {
801   int i;
802   rtvec rt_val;
803   va_list p;
804 
805   va_start (p, n);
806 
807   /* Don't allocate an empty rtvec...  */
808   if (n == 0)
809     {
810       va_end (p);
811       return NULL_RTVEC;
812     }
813 
814   rt_val = rtvec_alloc (n);
815 
816   for (i = 0; i < n; i++)
817     rt_val->elem[i] = va_arg (p, rtx);
818 
819   va_end (p);
820   return rt_val;
821 }
822 
823 rtvec
824 gen_rtvec_v (int n, rtx *argp)
825 {
826   int i;
827   rtvec rt_val;
828 
829   /* Don't allocate an empty rtvec...  */
830   if (n == 0)
831     return NULL_RTVEC;
832 
833   rt_val = rtvec_alloc (n);
834 
835   for (i = 0; i < n; i++)
836     rt_val->elem[i] = *argp++;
837 
838   return rt_val;
839 }
840 
841 /* Return the number of bytes between the start of an OUTER_MODE
842    in-memory value and the start of an INNER_MODE in-memory value,
843    given that the former is a lowpart of the latter.  It may be a
844    paradoxical lowpart, in which case the offset will be negative
845    on big-endian targets.  */
846 
847 int
848 byte_lowpart_offset (enum machine_mode outer_mode,
849 		     enum machine_mode inner_mode)
850 {
851   if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
852     return subreg_lowpart_offset (outer_mode, inner_mode);
853   else
854     return -subreg_lowpart_offset (inner_mode, outer_mode);
855 }
856 
857 /* Generate a REG rtx for a new pseudo register of mode MODE.
858    This pseudo is assigned the next sequential register number.  */
859 
860 rtx
861 gen_reg_rtx (enum machine_mode mode)
862 {
863   rtx val;
864   unsigned int align = GET_MODE_ALIGNMENT (mode);
865 
866   gcc_assert (can_create_pseudo_p ());
867 
868   /* If a virtual register with bigger mode alignment is generated,
869      increase stack alignment estimation because it might be spilled
870      to stack later.  */
871   if (SUPPORTS_STACK_ALIGNMENT
872       && crtl->stack_alignment_estimated < align
873       && !crtl->stack_realign_processed)
874     {
875       unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
876       if (crtl->stack_alignment_estimated < min_align)
877 	crtl->stack_alignment_estimated = min_align;
878     }
879 
880   if (generating_concat_p
881       && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
882 	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
883     {
884       /* For complex modes, don't make a single pseudo.
885 	 Instead, make a CONCAT of two pseudos.
886 	 This allows noncontiguous allocation of the real and imaginary parts,
887 	 which makes much better code.  Besides, allocating DCmode
888 	 pseudos overstrains reload on some machines like the 386.  */
889       rtx realpart, imagpart;
890       enum machine_mode partmode = GET_MODE_INNER (mode);
891 
892       realpart = gen_reg_rtx (partmode);
893       imagpart = gen_reg_rtx (partmode);
894       return gen_rtx_CONCAT (mode, realpart, imagpart);
895     }
896 
897   /* Make sure regno_pointer_align, and regno_reg_rtx are large
898      enough to have an element for this pseudo reg number.  */
899 
900   if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
901     {
902       int old_size = crtl->emit.regno_pointer_align_length;
903       char *tmp;
904       rtx *new1;
905 
906       tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
907       memset (tmp + old_size, 0, old_size);
908       crtl->emit.regno_pointer_align = (unsigned char *) tmp;
909 
910       new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
911       memset (new1 + old_size, 0, old_size * sizeof (rtx));
912       regno_reg_rtx = new1;
913 
914       crtl->emit.regno_pointer_align_length = old_size * 2;
915     }
916 
917   val = gen_raw_REG (mode, reg_rtx_no);
918   regno_reg_rtx[reg_rtx_no++] = val;
919   return val;
920 }
921 
922 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise.  */
923 
924 bool
925 reg_is_parm_p (rtx reg)
926 {
927   tree decl;
928 
929   gcc_assert (REG_P (reg));
930   decl = REG_EXPR (reg);
931   return (decl && TREE_CODE (decl) == PARM_DECL);
932 }
933 
934 /* Update NEW with the same attributes as REG, but with OFFSET added
935    to the REG_OFFSET.  */
936 
937 static void
938 update_reg_offset (rtx new_rtx, rtx reg, int offset)
939 {
940   REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
941 				   REG_OFFSET (reg) + offset);
942 }
943 
944 /* Generate a register with same attributes as REG, but with OFFSET
945    added to the REG_OFFSET.  */
946 
947 rtx
948 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
949 		    int offset)
950 {
951   rtx new_rtx = gen_rtx_REG (mode, regno);
952 
953   update_reg_offset (new_rtx, reg, offset);
954   return new_rtx;
955 }
956 
957 /* Generate a new pseudo-register with the same attributes as REG, but
958    with OFFSET added to the REG_OFFSET.  */
959 
960 rtx
961 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
962 {
963   rtx new_rtx = gen_reg_rtx (mode);
964 
965   update_reg_offset (new_rtx, reg, offset);
966   return new_rtx;
967 }
968 
969 /* Adjust REG in-place so that it has mode MODE.  It is assumed that the
970    new register is a (possibly paradoxical) lowpart of the old one.  */
971 
972 void
973 adjust_reg_mode (rtx reg, enum machine_mode mode)
974 {
975   update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
976   PUT_MODE (reg, mode);
977 }
978 
979 /* Copy REG's attributes from X, if X has any attributes.  If REG and X
980    have different modes, REG is a (possibly paradoxical) lowpart of X.  */
981 
982 void
983 set_reg_attrs_from_value (rtx reg, rtx x)
984 {
985   int offset;
986   bool can_be_reg_pointer = true;
987 
988   /* Don't call mark_reg_pointer for incompatible pointer sign
989      extension.  */
990   while (GET_CODE (x) == SIGN_EXTEND
991 	 || GET_CODE (x) == ZERO_EXTEND
992 	 || GET_CODE (x) == TRUNCATE
993 	 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
994     {
995 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
996       if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
997 	  || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
998 	can_be_reg_pointer = false;
999 #endif
1000       x = XEXP (x, 0);
1001     }
1002 
1003   /* Hard registers can be reused for multiple purposes within the same
1004      function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1005      on them is wrong.  */
1006   if (HARD_REGISTER_P (reg))
1007     return;
1008 
1009   offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1010   if (MEM_P (x))
1011     {
1012       if (MEM_OFFSET_KNOWN_P (x))
1013 	REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1014 					 MEM_OFFSET (x) + offset);
1015       if (can_be_reg_pointer && MEM_POINTER (x))
1016 	mark_reg_pointer (reg, 0);
1017     }
1018   else if (REG_P (x))
1019     {
1020       if (REG_ATTRS (x))
1021 	update_reg_offset (reg, x, offset);
1022       if (can_be_reg_pointer && REG_POINTER (x))
1023 	mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1024     }
1025 }
1026 
1027 /* Generate a REG rtx for a new pseudo register, copying the mode
1028    and attributes from X.  */
1029 
1030 rtx
1031 gen_reg_rtx_and_attrs (rtx x)
1032 {
1033   rtx reg = gen_reg_rtx (GET_MODE (x));
1034   set_reg_attrs_from_value (reg, x);
1035   return reg;
1036 }
1037 
1038 /* Set the register attributes for registers contained in PARM_RTX.
1039    Use needed values from memory attributes of MEM.  */
1040 
1041 void
1042 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1043 {
1044   if (REG_P (parm_rtx))
1045     set_reg_attrs_from_value (parm_rtx, mem);
1046   else if (GET_CODE (parm_rtx) == PARALLEL)
1047     {
1048       /* Check for a NULL entry in the first slot, used to indicate that the
1049 	 parameter goes both on the stack and in registers.  */
1050       int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1051       for (; i < XVECLEN (parm_rtx, 0); i++)
1052 	{
1053 	  rtx x = XVECEXP (parm_rtx, 0, i);
1054 	  if (REG_P (XEXP (x, 0)))
1055 	    REG_ATTRS (XEXP (x, 0))
1056 	      = get_reg_attrs (MEM_EXPR (mem),
1057 			       INTVAL (XEXP (x, 1)));
1058 	}
1059     }
1060 }
1061 
1062 /* Set the REG_ATTRS for registers in value X, given that X represents
1063    decl T.  */
1064 
1065 void
1066 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1067 {
1068   if (GET_CODE (x) == SUBREG)
1069     {
1070       gcc_assert (subreg_lowpart_p (x));
1071       x = SUBREG_REG (x);
1072     }
1073   if (REG_P (x))
1074     REG_ATTRS (x)
1075       = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1076 					       DECL_MODE (t)));
1077   if (GET_CODE (x) == CONCAT)
1078     {
1079       if (REG_P (XEXP (x, 0)))
1080         REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1081       if (REG_P (XEXP (x, 1)))
1082 	REG_ATTRS (XEXP (x, 1))
1083 	  = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1084     }
1085   if (GET_CODE (x) == PARALLEL)
1086     {
1087       int i, start;
1088 
1089       /* Check for a NULL entry, used to indicate that the parameter goes
1090 	 both on the stack and in registers.  */
1091       if (XEXP (XVECEXP (x, 0, 0), 0))
1092 	start = 0;
1093       else
1094 	start = 1;
1095 
1096       for (i = start; i < XVECLEN (x, 0); i++)
1097 	{
1098 	  rtx y = XVECEXP (x, 0, i);
1099 	  if (REG_P (XEXP (y, 0)))
1100 	    REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1101 	}
1102     }
1103 }
1104 
1105 /* Assign the RTX X to declaration T.  */
1106 
1107 void
1108 set_decl_rtl (tree t, rtx x)
1109 {
1110   DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1111   if (x)
1112     set_reg_attrs_for_decl_rtl (t, x);
1113 }
1114 
1115 /* Assign the RTX X to parameter declaration T.  BY_REFERENCE_P is true
1116    if the ABI requires the parameter to be passed by reference.  */
1117 
1118 void
1119 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1120 {
1121   DECL_INCOMING_RTL (t) = x;
1122   if (x && !by_reference_p)
1123     set_reg_attrs_for_decl_rtl (t, x);
1124 }
1125 
1126 /* Identify REG (which may be a CONCAT) as a user register.  */
1127 
1128 void
1129 mark_user_reg (rtx reg)
1130 {
1131   if (GET_CODE (reg) == CONCAT)
1132     {
1133       REG_USERVAR_P (XEXP (reg, 0)) = 1;
1134       REG_USERVAR_P (XEXP (reg, 1)) = 1;
1135     }
1136   else
1137     {
1138       gcc_assert (REG_P (reg));
1139       REG_USERVAR_P (reg) = 1;
1140     }
1141 }
1142 
1143 /* Identify REG as a probable pointer register and show its alignment
1144    as ALIGN, if nonzero.  */
1145 
1146 void
1147 mark_reg_pointer (rtx reg, int align)
1148 {
1149   if (! REG_POINTER (reg))
1150     {
1151       REG_POINTER (reg) = 1;
1152 
1153       if (align)
1154 	REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1155     }
1156   else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1157     /* We can no-longer be sure just how aligned this pointer is.  */
1158     REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1159 }
1160 
1161 /* Return 1 plus largest pseudo reg number used in the current function.  */
1162 
1163 int
1164 max_reg_num (void)
1165 {
1166   return reg_rtx_no;
1167 }
1168 
1169 /* Return 1 + the largest label number used so far in the current function.  */
1170 
1171 int
1172 max_label_num (void)
1173 {
1174   return label_num;
1175 }
1176 
1177 /* Return first label number used in this function (if any were used).  */
1178 
1179 int
1180 get_first_label_num (void)
1181 {
1182   return first_label_num;
1183 }
1184 
1185 /* If the rtx for label was created during the expansion of a nested
1186    function, then first_label_num won't include this label number.
1187    Fix this now so that array indices work later.  */
1188 
1189 void
1190 maybe_set_first_label_num (rtx x)
1191 {
1192   if (CODE_LABEL_NUMBER (x) < first_label_num)
1193     first_label_num = CODE_LABEL_NUMBER (x);
1194 }
1195 
1196 /* Return a value representing some low-order bits of X, where the number
1197    of low-order bits is given by MODE.  Note that no conversion is done
1198    between floating-point and fixed-point values, rather, the bit
1199    representation is returned.
1200 
1201    This function handles the cases in common between gen_lowpart, below,
1202    and two variants in cse.c and combine.c.  These are the cases that can
1203    be safely handled at all points in the compilation.
1204 
1205    If this is not a case we can handle, return 0.  */
1206 
1207 rtx
1208 gen_lowpart_common (enum machine_mode mode, rtx x)
1209 {
1210   int msize = GET_MODE_SIZE (mode);
1211   int xsize;
1212   int offset = 0;
1213   enum machine_mode innermode;
1214 
1215   /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1216      so we have to make one up.  Yuk.  */
1217   innermode = GET_MODE (x);
1218   if (CONST_INT_P (x)
1219       && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1220     innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1221   else if (innermode == VOIDmode)
1222     innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1223 
1224   xsize = GET_MODE_SIZE (innermode);
1225 
1226   gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1227 
1228   if (innermode == mode)
1229     return x;
1230 
1231   /* MODE must occupy no more words than the mode of X.  */
1232   if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1233       > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1234     return 0;
1235 
1236   /* Don't allow generating paradoxical FLOAT_MODE subregs.  */
1237   if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1238     return 0;
1239 
1240   offset = subreg_lowpart_offset (mode, innermode);
1241 
1242   if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1243       && (GET_MODE_CLASS (mode) == MODE_INT
1244 	  || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1245     {
1246       /* If we are getting the low-order part of something that has been
1247 	 sign- or zero-extended, we can either just use the object being
1248 	 extended or make a narrower extension.  If we want an even smaller
1249 	 piece than the size of the object being extended, call ourselves
1250 	 recursively.
1251 
1252 	 This case is used mostly by combine and cse.  */
1253 
1254       if (GET_MODE (XEXP (x, 0)) == mode)
1255 	return XEXP (x, 0);
1256       else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1257 	return gen_lowpart_common (mode, XEXP (x, 0));
1258       else if (msize < xsize)
1259 	return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1260     }
1261   else if (GET_CODE (x) == SUBREG || REG_P (x)
1262 	   || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1263 	   || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1264     return simplify_gen_subreg (mode, x, innermode, offset);
1265 
1266   /* Otherwise, we can't do this.  */
1267   return 0;
1268 }
1269 
1270 rtx
1271 gen_highpart (enum machine_mode mode, rtx x)
1272 {
1273   unsigned int msize = GET_MODE_SIZE (mode);
1274   rtx result;
1275 
1276   /* This case loses if X is a subreg.  To catch bugs early,
1277      complain if an invalid MODE is used even in other cases.  */
1278   gcc_assert (msize <= UNITS_PER_WORD
1279 	      || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1280 
1281   result = simplify_gen_subreg (mode, x, GET_MODE (x),
1282 				subreg_highpart_offset (mode, GET_MODE (x)));
1283   gcc_assert (result);
1284 
1285   /* simplify_gen_subreg is not guaranteed to return a valid operand for
1286      the target if we have a MEM.  gen_highpart must return a valid operand,
1287      emitting code if necessary to do so.  */
1288   if (MEM_P (result))
1289     {
1290       result = validize_mem (result);
1291       gcc_assert (result);
1292     }
1293 
1294   return result;
1295 }
1296 
1297 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1298    be VOIDmode constant.  */
1299 rtx
1300 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1301 {
1302   if (GET_MODE (exp) != VOIDmode)
1303     {
1304       gcc_assert (GET_MODE (exp) == innermode);
1305       return gen_highpart (outermode, exp);
1306     }
1307   return simplify_gen_subreg (outermode, exp, innermode,
1308 			      subreg_highpart_offset (outermode, innermode));
1309 }
1310 
1311 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value.  */
1312 
1313 unsigned int
1314 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1315 {
1316   unsigned int offset = 0;
1317   int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1318 
1319   if (difference > 0)
1320     {
1321       if (WORDS_BIG_ENDIAN)
1322 	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1323       if (BYTES_BIG_ENDIAN)
1324 	offset += difference % UNITS_PER_WORD;
1325     }
1326 
1327   return offset;
1328 }
1329 
1330 /* Return offset in bytes to get OUTERMODE high part
1331    of the value in mode INNERMODE stored in memory in target format.  */
1332 unsigned int
1333 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1334 {
1335   unsigned int offset = 0;
1336   int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1337 
1338   gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1339 
1340   if (difference > 0)
1341     {
1342       if (! WORDS_BIG_ENDIAN)
1343 	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1344       if (! BYTES_BIG_ENDIAN)
1345 	offset += difference % UNITS_PER_WORD;
1346     }
1347 
1348   return offset;
1349 }
1350 
1351 /* Return 1 iff X, assumed to be a SUBREG,
1352    refers to the least significant part of its containing reg.
1353    If X is not a SUBREG, always return 1 (it is its own low part!).  */
1354 
1355 int
1356 subreg_lowpart_p (const_rtx x)
1357 {
1358   if (GET_CODE (x) != SUBREG)
1359     return 1;
1360   else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1361     return 0;
1362 
1363   return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1364 	  == SUBREG_BYTE (x));
1365 }
1366 
1367 /* Return true if X is a paradoxical subreg, false otherwise.  */
1368 bool
1369 paradoxical_subreg_p (const_rtx x)
1370 {
1371   if (GET_CODE (x) != SUBREG)
1372     return false;
1373   return (GET_MODE_PRECISION (GET_MODE (x))
1374 	  > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1375 }
1376 
1377 /* Return subword OFFSET of operand OP.
1378    The word number, OFFSET, is interpreted as the word number starting
1379    at the low-order address.  OFFSET 0 is the low-order word if not
1380    WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1381 
1382    If we cannot extract the required word, we return zero.  Otherwise,
1383    an rtx corresponding to the requested word will be returned.
1384 
1385    VALIDATE_ADDRESS is nonzero if the address should be validated.  Before
1386    reload has completed, a valid address will always be returned.  After
1387    reload, if a valid address cannot be returned, we return zero.
1388 
1389    If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1390    it is the responsibility of the caller.
1391 
1392    MODE is the mode of OP in case it is a CONST_INT.
1393 
1394    ??? This is still rather broken for some cases.  The problem for the
1395    moment is that all callers of this thing provide no 'goal mode' to
1396    tell us to work with.  This exists because all callers were written
1397    in a word based SUBREG world.
1398    Now use of this function can be deprecated by simplify_subreg in most
1399    cases.
1400  */
1401 
1402 rtx
1403 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1404 {
1405   if (mode == VOIDmode)
1406     mode = GET_MODE (op);
1407 
1408   gcc_assert (mode != VOIDmode);
1409 
1410   /* If OP is narrower than a word, fail.  */
1411   if (mode != BLKmode
1412       && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1413     return 0;
1414 
1415   /* If we want a word outside OP, return zero.  */
1416   if (mode != BLKmode
1417       && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1418     return const0_rtx;
1419 
1420   /* Form a new MEM at the requested address.  */
1421   if (MEM_P (op))
1422     {
1423       rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1424 
1425       if (! validate_address)
1426 	return new_rtx;
1427 
1428       else if (reload_completed)
1429 	{
1430 	  if (! strict_memory_address_addr_space_p (word_mode,
1431 						    XEXP (new_rtx, 0),
1432 						    MEM_ADDR_SPACE (op)))
1433 	    return 0;
1434 	}
1435       else
1436 	return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1437     }
1438 
1439   /* Rest can be handled by simplify_subreg.  */
1440   return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1441 }
1442 
1443 /* Similar to `operand_subword', but never return 0.  If we can't
1444    extract the required subword, put OP into a register and try again.
1445    The second attempt must succeed.  We always validate the address in
1446    this case.
1447 
1448    MODE is the mode of OP, in case it is CONST_INT.  */
1449 
1450 rtx
1451 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1452 {
1453   rtx result = operand_subword (op, offset, 1, mode);
1454 
1455   if (result)
1456     return result;
1457 
1458   if (mode != BLKmode && mode != VOIDmode)
1459     {
1460       /* If this is a register which can not be accessed by words, copy it
1461 	 to a pseudo register.  */
1462       if (REG_P (op))
1463 	op = copy_to_reg (op);
1464       else
1465 	op = force_reg (mode, op);
1466     }
1467 
1468   result = operand_subword (op, offset, 1, mode);
1469   gcc_assert (result);
1470 
1471   return result;
1472 }
1473 
1474 /* Returns 1 if both MEM_EXPR can be considered equal
1475    and 0 otherwise.  */
1476 
1477 int
1478 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1479 {
1480   if (expr1 == expr2)
1481     return 1;
1482 
1483   if (! expr1 || ! expr2)
1484     return 0;
1485 
1486   if (TREE_CODE (expr1) != TREE_CODE (expr2))
1487     return 0;
1488 
1489   return operand_equal_p (expr1, expr2, 0);
1490 }
1491 
1492 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1493    bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1494    -1 if not known.  */
1495 
1496 int
1497 get_mem_align_offset (rtx mem, unsigned int align)
1498 {
1499   tree expr;
1500   unsigned HOST_WIDE_INT offset;
1501 
1502   /* This function can't use
1503      if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1504 	 || (MAX (MEM_ALIGN (mem),
1505 	          MAX (align, get_object_alignment (MEM_EXPR (mem))))
1506 	     < align))
1507        return -1;
1508      else
1509        return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1510      for two reasons:
1511      - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1512        for <variable>.  get_inner_reference doesn't handle it and
1513        even if it did, the alignment in that case needs to be determined
1514        from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1515      - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1516        isn't sufficiently aligned, the object it is in might be.  */
1517   gcc_assert (MEM_P (mem));
1518   expr = MEM_EXPR (mem);
1519   if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1520     return -1;
1521 
1522   offset = MEM_OFFSET (mem);
1523   if (DECL_P (expr))
1524     {
1525       if (DECL_ALIGN (expr) < align)
1526 	return -1;
1527     }
1528   else if (INDIRECT_REF_P (expr))
1529     {
1530       if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1531 	return -1;
1532     }
1533   else if (TREE_CODE (expr) == COMPONENT_REF)
1534     {
1535       while (1)
1536 	{
1537 	  tree inner = TREE_OPERAND (expr, 0);
1538 	  tree field = TREE_OPERAND (expr, 1);
1539 	  tree byte_offset = component_ref_field_offset (expr);
1540 	  tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1541 
1542 	  if (!byte_offset
1543 	      || !host_integerp (byte_offset, 1)
1544 	      || !host_integerp (bit_offset, 1))
1545 	    return -1;
1546 
1547 	  offset += tree_low_cst (byte_offset, 1);
1548 	  offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1549 
1550 	  if (inner == NULL_TREE)
1551 	    {
1552 	      if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1553 		  < (unsigned int) align)
1554 		return -1;
1555 	      break;
1556 	    }
1557 	  else if (DECL_P (inner))
1558 	    {
1559 	      if (DECL_ALIGN (inner) < align)
1560 		return -1;
1561 	      break;
1562 	    }
1563 	  else if (TREE_CODE (inner) != COMPONENT_REF)
1564 	    return -1;
1565 	  expr = inner;
1566 	}
1567     }
1568   else
1569     return -1;
1570 
1571   return offset & ((align / BITS_PER_UNIT) - 1);
1572 }
1573 
1574 /* Given REF (a MEM) and T, either the type of X or the expression
1575    corresponding to REF, set the memory attributes.  OBJECTP is nonzero
1576    if we are making a new object of this type.  BITPOS is nonzero if
1577    there is an offset outstanding on T that will be applied later.  */
1578 
1579 void
1580 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1581 				 HOST_WIDE_INT bitpos)
1582 {
1583   HOST_WIDE_INT apply_bitpos = 0;
1584   tree type;
1585   struct mem_attrs attrs, *defattrs, *refattrs;
1586   addr_space_t as;
1587 
1588   /* It can happen that type_for_mode was given a mode for which there
1589      is no language-level type.  In which case it returns NULL, which
1590      we can see here.  */
1591   if (t == NULL_TREE)
1592     return;
1593 
1594   type = TYPE_P (t) ? t : TREE_TYPE (t);
1595   if (type == error_mark_node)
1596     return;
1597 
1598   /* If we have already set DECL_RTL = ref, get_alias_set will get the
1599      wrong answer, as it assumes that DECL_RTL already has the right alias
1600      info.  Callers should not set DECL_RTL until after the call to
1601      set_mem_attributes.  */
1602   gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1603 
1604   memset (&attrs, 0, sizeof (attrs));
1605 
1606   /* Get the alias set from the expression or type (perhaps using a
1607      front-end routine) and use it.  */
1608   attrs.alias = get_alias_set (t);
1609 
1610   MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1611   MEM_POINTER (ref) = POINTER_TYPE_P (type);
1612 
1613   /* Default values from pre-existing memory attributes if present.  */
1614   refattrs = MEM_ATTRS (ref);
1615   if (refattrs)
1616     {
1617       /* ??? Can this ever happen?  Calling this routine on a MEM that
1618 	 already carries memory attributes should probably be invalid.  */
1619       attrs.expr = refattrs->expr;
1620       attrs.offset_known_p = refattrs->offset_known_p;
1621       attrs.offset = refattrs->offset;
1622       attrs.size_known_p = refattrs->size_known_p;
1623       attrs.size = refattrs->size;
1624       attrs.align = refattrs->align;
1625     }
1626 
1627   /* Otherwise, default values from the mode of the MEM reference.  */
1628   else
1629     {
1630       defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1631       gcc_assert (!defattrs->expr);
1632       gcc_assert (!defattrs->offset_known_p);
1633 
1634       /* Respect mode size.  */
1635       attrs.size_known_p = defattrs->size_known_p;
1636       attrs.size = defattrs->size;
1637       /* ??? Is this really necessary?  We probably should always get
1638 	 the size from the type below.  */
1639 
1640       /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1641          if T is an object, always compute the object alignment below.  */
1642       if (TYPE_P (t))
1643 	attrs.align = defattrs->align;
1644       else
1645 	attrs.align = BITS_PER_UNIT;
1646       /* ??? If T is a type, respecting mode alignment may *also* be wrong
1647 	 e.g. if the type carries an alignment attribute.  Should we be
1648 	 able to simply always use TYPE_ALIGN?  */
1649     }
1650 
1651   /* We can set the alignment from the type if we are making an object,
1652      this is an INDIRECT_REF, or if TYPE_ALIGN_OK.  */
1653   if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1654     attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1655 
1656   else if (TREE_CODE (t) == MEM_REF)
1657     {
1658       tree op0 = TREE_OPERAND (t, 0);
1659       if (TREE_CODE (op0) == ADDR_EXPR
1660 	  && (DECL_P (TREE_OPERAND (op0, 0))
1661 	      || CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))))
1662 	{
1663 	  if (DECL_P (TREE_OPERAND (op0, 0)))
1664 	    attrs.align = DECL_ALIGN (TREE_OPERAND (op0, 0));
1665 	  else if (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)))
1666 	    {
1667 	      attrs.align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0, 0)));
1668 #ifdef CONSTANT_ALIGNMENT
1669 	      attrs.align = CONSTANT_ALIGNMENT (TREE_OPERAND (op0, 0),
1670 						attrs.align);
1671 #endif
1672 	    }
1673 	  if (TREE_INT_CST_LOW (TREE_OPERAND (t, 1)) != 0)
1674 	    {
1675 	      unsigned HOST_WIDE_INT ioff
1676 		= TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1677 	      unsigned HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1678 	      attrs.align = MIN (aoff, attrs.align);
1679 	    }
1680 	}
1681       else
1682 	/* ??? This isn't fully correct, we can't set the alignment from the
1683 	   type in all cases.  */
1684 	attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1685     }
1686 
1687   else if (TREE_CODE (t) == TARGET_MEM_REF)
1688     /* ??? This isn't fully correct, we can't set the alignment from the
1689        type in all cases.  */
1690     attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1691 
1692   /* If the size is known, we can set that.  */
1693   tree new_size = TYPE_SIZE_UNIT (type);
1694 
1695   /* If T is not a type, we may be able to deduce some more information about
1696      the expression.  */
1697   if (! TYPE_P (t))
1698     {
1699       tree base;
1700       bool align_computed = false;
1701 
1702       if (TREE_THIS_VOLATILE (t))
1703 	MEM_VOLATILE_P (ref) = 1;
1704 
1705       /* Now remove any conversions: they don't change what the underlying
1706 	 object is.  Likewise for SAVE_EXPR.  */
1707       while (CONVERT_EXPR_P (t)
1708 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
1709 	     || TREE_CODE (t) == SAVE_EXPR)
1710 	t = TREE_OPERAND (t, 0);
1711 
1712       /* Note whether this expression can trap.  */
1713       MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1714 
1715       base = get_base_address (t);
1716       if (base)
1717 	{
1718 	  if (DECL_P (base)
1719 	      && TREE_READONLY (base)
1720 	      && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1721 	      && !TREE_THIS_VOLATILE (base))
1722 	    MEM_READONLY_P (ref) = 1;
1723 
1724 	  /* Mark static const strings readonly as well.  */
1725 	  if (TREE_CODE (base) == STRING_CST
1726 	      && TREE_READONLY (base)
1727 	      && TREE_STATIC (base))
1728 	    MEM_READONLY_P (ref) = 1;
1729 
1730 	  if (TREE_CODE (base) == MEM_REF
1731 	      || TREE_CODE (base) == TARGET_MEM_REF)
1732 	    as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1733 								      0))));
1734 	  else
1735 	    as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1736 	}
1737       else
1738 	as = TYPE_ADDR_SPACE (type);
1739 
1740       /* If this expression uses it's parent's alias set, mark it such
1741 	 that we won't change it.  */
1742       if (component_uses_parent_alias_set (t))
1743 	MEM_KEEP_ALIAS_SET_P (ref) = 1;
1744 
1745       /* If this is a decl, set the attributes of the MEM from it.  */
1746       if (DECL_P (t))
1747 	{
1748 	  attrs.expr = t;
1749 	  attrs.offset_known_p = true;
1750 	  attrs.offset = 0;
1751 	  apply_bitpos = bitpos;
1752 	  new_size = DECL_SIZE_UNIT (t);
1753 	  attrs.align = DECL_ALIGN (t);
1754 	  align_computed = true;
1755 	}
1756 
1757       /* If this is a constant, we know the alignment.  */
1758       else if (CONSTANT_CLASS_P (t))
1759 	{
1760 	  attrs.align = TYPE_ALIGN (type);
1761 #ifdef CONSTANT_ALIGNMENT
1762 	  attrs.align = CONSTANT_ALIGNMENT (t, attrs.align);
1763 #endif
1764 	  align_computed = true;
1765 	}
1766 
1767       /* If this is a field reference, record it.  */
1768       else if (TREE_CODE (t) == COMPONENT_REF)
1769 	{
1770 	  attrs.expr = t;
1771 	  attrs.offset_known_p = true;
1772 	  attrs.offset = 0;
1773 	  apply_bitpos = bitpos;
1774 	  if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1775 	    new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1776 	}
1777 
1778       /* If this is an array reference, look for an outer field reference.  */
1779       else if (TREE_CODE (t) == ARRAY_REF)
1780 	{
1781 	  tree off_tree = size_zero_node;
1782 	  /* We can't modify t, because we use it at the end of the
1783 	     function.  */
1784 	  tree t2 = t;
1785 
1786 	  do
1787 	    {
1788 	      tree index = TREE_OPERAND (t2, 1);
1789 	      tree low_bound = array_ref_low_bound (t2);
1790 	      tree unit_size = array_ref_element_size (t2);
1791 
1792 	      /* We assume all arrays have sizes that are a multiple of a byte.
1793 		 First subtract the lower bound, if any, in the type of the
1794 		 index, then convert to sizetype and multiply by the size of
1795 		 the array element.  */
1796 	      if (! integer_zerop (low_bound))
1797 		index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1798 				     index, low_bound);
1799 
1800 	      off_tree = size_binop (PLUS_EXPR,
1801 				     size_binop (MULT_EXPR,
1802 						 fold_convert (sizetype,
1803 							       index),
1804 						 unit_size),
1805 				     off_tree);
1806 	      t2 = TREE_OPERAND (t2, 0);
1807 	    }
1808 	  while (TREE_CODE (t2) == ARRAY_REF);
1809 
1810 	  if (DECL_P (t2))
1811 	    {
1812 	      attrs.expr = t2;
1813 	      attrs.offset_known_p = false;
1814 	      if (host_integerp (off_tree, 1))
1815 		{
1816 		  HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1817 		  HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1818 		  attrs.align = DECL_ALIGN (t2);
1819 		  if (aoff && (unsigned HOST_WIDE_INT) aoff < attrs.align)
1820 	            attrs.align = aoff;
1821 		  align_computed = true;
1822 		  attrs.offset_known_p = true;
1823 		  attrs.offset = ioff;
1824 		  apply_bitpos = bitpos;
1825 		}
1826 	    }
1827 	  else if (TREE_CODE (t2) == COMPONENT_REF)
1828 	    {
1829 	      attrs.expr = t2;
1830 	      attrs.offset_known_p = false;
1831 	      if (host_integerp (off_tree, 1))
1832 		{
1833 		  attrs.offset_known_p = true;
1834 		  attrs.offset = tree_low_cst (off_tree, 1);
1835 		  apply_bitpos = bitpos;
1836 		}
1837 	      /* ??? Any reason the field size would be different than
1838 		 the size we got from the type?  */
1839 	    }
1840 	}
1841 
1842       /* If this is an indirect reference, record it.  */
1843       else if (TREE_CODE (t) == MEM_REF
1844 	       || TREE_CODE (t) == TARGET_MEM_REF)
1845 	{
1846 	  attrs.expr = t;
1847 	  attrs.offset_known_p = true;
1848 	  attrs.offset = 0;
1849 	  apply_bitpos = bitpos;
1850 	}
1851 
1852       if (!align_computed)
1853 	{
1854 	  unsigned int obj_align;
1855 	  unsigned HOST_WIDE_INT obj_bitpos;
1856 	  get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1857 	  obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1858 	  if (obj_bitpos != 0)
1859 	    obj_align = (obj_bitpos & -obj_bitpos);
1860 	  attrs.align = MAX (attrs.align, obj_align);
1861 	}
1862     }
1863   else
1864     as = TYPE_ADDR_SPACE (type);
1865 
1866   if (host_integerp (new_size, 1))
1867     {
1868       attrs.size_known_p = true;
1869       attrs.size = tree_low_cst (new_size, 1);
1870     }
1871 
1872   /* If we modified OFFSET based on T, then subtract the outstanding
1873      bit position offset.  Similarly, increase the size of the accessed
1874      object to contain the negative offset.  */
1875   if (apply_bitpos)
1876     {
1877       gcc_assert (attrs.offset_known_p);
1878       attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1879       if (attrs.size_known_p)
1880 	attrs.size += apply_bitpos / BITS_PER_UNIT;
1881     }
1882 
1883   /* Now set the attributes we computed above.  */
1884   attrs.addrspace = as;
1885   set_mem_attrs (ref, &attrs);
1886 }
1887 
1888 void
1889 set_mem_attributes (rtx ref, tree t, int objectp)
1890 {
1891   set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1892 }
1893 
1894 /* Set the alias set of MEM to SET.  */
1895 
1896 void
1897 set_mem_alias_set (rtx mem, alias_set_type set)
1898 {
1899   struct mem_attrs attrs;
1900 
1901   /* If the new and old alias sets don't conflict, something is wrong.  */
1902   gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1903   attrs = *get_mem_attrs (mem);
1904   attrs.alias = set;
1905   set_mem_attrs (mem, &attrs);
1906 }
1907 
1908 /* Set the address space of MEM to ADDRSPACE (target-defined).  */
1909 
1910 void
1911 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1912 {
1913   struct mem_attrs attrs;
1914 
1915   attrs = *get_mem_attrs (mem);
1916   attrs.addrspace = addrspace;
1917   set_mem_attrs (mem, &attrs);
1918 }
1919 
1920 /* Set the alignment of MEM to ALIGN bits.  */
1921 
1922 void
1923 set_mem_align (rtx mem, unsigned int align)
1924 {
1925   struct mem_attrs attrs;
1926 
1927   attrs = *get_mem_attrs (mem);
1928   attrs.align = align;
1929   set_mem_attrs (mem, &attrs);
1930 }
1931 
1932 /* Set the expr for MEM to EXPR.  */
1933 
1934 void
1935 set_mem_expr (rtx mem, tree expr)
1936 {
1937   struct mem_attrs attrs;
1938 
1939   attrs = *get_mem_attrs (mem);
1940   attrs.expr = expr;
1941   set_mem_attrs (mem, &attrs);
1942 }
1943 
1944 /* Set the offset of MEM to OFFSET.  */
1945 
1946 void
1947 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
1948 {
1949   struct mem_attrs attrs;
1950 
1951   attrs = *get_mem_attrs (mem);
1952   attrs.offset_known_p = true;
1953   attrs.offset = offset;
1954   set_mem_attrs (mem, &attrs);
1955 }
1956 
1957 /* Clear the offset of MEM.  */
1958 
1959 void
1960 clear_mem_offset (rtx mem)
1961 {
1962   struct mem_attrs attrs;
1963 
1964   attrs = *get_mem_attrs (mem);
1965   attrs.offset_known_p = false;
1966   set_mem_attrs (mem, &attrs);
1967 }
1968 
1969 /* Set the size of MEM to SIZE.  */
1970 
1971 void
1972 set_mem_size (rtx mem, HOST_WIDE_INT size)
1973 {
1974   struct mem_attrs attrs;
1975 
1976   attrs = *get_mem_attrs (mem);
1977   attrs.size_known_p = true;
1978   attrs.size = size;
1979   set_mem_attrs (mem, &attrs);
1980 }
1981 
1982 /* Clear the size of MEM.  */
1983 
1984 void
1985 clear_mem_size (rtx mem)
1986 {
1987   struct mem_attrs attrs;
1988 
1989   attrs = *get_mem_attrs (mem);
1990   attrs.size_known_p = false;
1991   set_mem_attrs (mem, &attrs);
1992 }
1993 
1994 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1995    and its address changed to ADDR.  (VOIDmode means don't change the mode.
1996    NULL for ADDR means don't change the address.)  VALIDATE is nonzero if the
1997    returned memory location is required to be valid.  The memory
1998    attributes are not changed.  */
1999 
2000 static rtx
2001 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
2002 {
2003   addr_space_t as;
2004   rtx new_rtx;
2005 
2006   gcc_assert (MEM_P (memref));
2007   as = MEM_ADDR_SPACE (memref);
2008   if (mode == VOIDmode)
2009     mode = GET_MODE (memref);
2010   if (addr == 0)
2011     addr = XEXP (memref, 0);
2012   if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2013       && (!validate || memory_address_addr_space_p (mode, addr, as)))
2014     return memref;
2015 
2016   if (validate)
2017     {
2018       if (reload_in_progress || reload_completed)
2019 	gcc_assert (memory_address_addr_space_p (mode, addr, as));
2020       else
2021 	addr = memory_address_addr_space (mode, addr, as);
2022     }
2023 
2024   if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2025     return memref;
2026 
2027   new_rtx = gen_rtx_MEM (mode, addr);
2028   MEM_COPY_ATTRIBUTES (new_rtx, memref);
2029   return new_rtx;
2030 }
2031 
2032 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2033    way we are changing MEMREF, so we only preserve the alias set.  */
2034 
2035 rtx
2036 change_address (rtx memref, enum machine_mode mode, rtx addr)
2037 {
2038   rtx new_rtx = change_address_1 (memref, mode, addr, 1);
2039   enum machine_mode mmode = GET_MODE (new_rtx);
2040   struct mem_attrs attrs, *defattrs;
2041 
2042   attrs = *get_mem_attrs (memref);
2043   defattrs = mode_mem_attrs[(int) mmode];
2044   attrs.expr = NULL_TREE;
2045   attrs.offset_known_p = false;
2046   attrs.size_known_p = defattrs->size_known_p;
2047   attrs.size = defattrs->size;
2048   attrs.align = defattrs->align;
2049 
2050   /* If there are no changes, just return the original memory reference.  */
2051   if (new_rtx == memref)
2052     {
2053       if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2054 	return new_rtx;
2055 
2056       new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2057       MEM_COPY_ATTRIBUTES (new_rtx, memref);
2058     }
2059 
2060   set_mem_attrs (new_rtx, &attrs);
2061   return new_rtx;
2062 }
2063 
2064 /* Return a memory reference like MEMREF, but with its mode changed
2065    to MODE and its address offset by OFFSET bytes.  If VALIDATE is
2066    nonzero, the memory address is forced to be valid.
2067    If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2068    and the caller is responsible for adjusting MEMREF base register.
2069    If ADJUST_OBJECT is zero, the underlying object associated with the
2070    memory reference is left unchanged and the caller is responsible for
2071    dealing with it.  Otherwise, if the new memory reference is outside
2072    the underlying object, even partially, then the object is dropped.
2073    SIZE, if nonzero, is the size of an access in cases where MODE
2074    has no inherent size.  */
2075 
2076 rtx
2077 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2078 		  int validate, int adjust_address, int adjust_object,
2079 		  HOST_WIDE_INT size)
2080 {
2081   rtx addr = XEXP (memref, 0);
2082   rtx new_rtx;
2083   enum machine_mode address_mode;
2084   int pbits;
2085   struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2086   unsigned HOST_WIDE_INT max_align;
2087 #ifdef POINTERS_EXTEND_UNSIGNED
2088   enum machine_mode pointer_mode
2089     = targetm.addr_space.pointer_mode (attrs.addrspace);
2090 #endif
2091 
2092   /* VOIDmode means no mode change for change_address_1.  */
2093   if (mode == VOIDmode)
2094     mode = GET_MODE (memref);
2095 
2096   /* Take the size of non-BLKmode accesses from the mode.  */
2097   defattrs = mode_mem_attrs[(int) mode];
2098   if (defattrs->size_known_p)
2099     size = defattrs->size;
2100 
2101   /* If there are no changes, just return the original memory reference.  */
2102   if (mode == GET_MODE (memref) && !offset
2103       && (size == 0 || (attrs.size_known_p && attrs.size == size))
2104       && (!validate || memory_address_addr_space_p (mode, addr,
2105 						    attrs.addrspace)))
2106     return memref;
2107 
2108   /* ??? Prefer to create garbage instead of creating shared rtl.
2109      This may happen even if offset is nonzero -- consider
2110      (plus (plus reg reg) const_int) -- so do this always.  */
2111   addr = copy_rtx (addr);
2112 
2113   /* Convert a possibly large offset to a signed value within the
2114      range of the target address space.  */
2115   address_mode = get_address_mode (memref);
2116   pbits = GET_MODE_BITSIZE (address_mode);
2117   if (HOST_BITS_PER_WIDE_INT > pbits)
2118     {
2119       int shift = HOST_BITS_PER_WIDE_INT - pbits;
2120       offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2121 		>> shift);
2122     }
2123 
2124   if (adjust_address)
2125     {
2126       /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2127 	 object, we can merge it into the LO_SUM.  */
2128       if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2129 	  && offset >= 0
2130 	  && (unsigned HOST_WIDE_INT) offset
2131 	      < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2132 	addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2133 			       plus_constant (address_mode,
2134 					      XEXP (addr, 1), offset));
2135 #ifdef POINTERS_EXTEND_UNSIGNED
2136       /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2137 	 in that mode, we merge it into the ZERO_EXTEND.  We take advantage of
2138 	 the fact that pointers are not allowed to overflow.  */
2139       else if (POINTERS_EXTEND_UNSIGNED > 0
2140 	       && GET_CODE (addr) == ZERO_EXTEND
2141 	       && GET_MODE (XEXP (addr, 0)) == pointer_mode
2142 	       && trunc_int_for_mode (offset, pointer_mode) == offset)
2143 	addr = gen_rtx_ZERO_EXTEND (address_mode,
2144 				    plus_constant (pointer_mode,
2145 						   XEXP (addr, 0), offset));
2146 #endif
2147       else
2148 	addr = plus_constant (address_mode, addr, offset);
2149     }
2150 
2151   new_rtx = change_address_1 (memref, mode, addr, validate);
2152 
2153   /* If the address is a REG, change_address_1 rightfully returns memref,
2154      but this would destroy memref's MEM_ATTRS.  */
2155   if (new_rtx == memref && offset != 0)
2156     new_rtx = copy_rtx (new_rtx);
2157 
2158   /* Conservatively drop the object if we don't know where we start from.  */
2159   if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2160     {
2161       attrs.expr = NULL_TREE;
2162       attrs.alias = 0;
2163     }
2164 
2165   /* Compute the new values of the memory attributes due to this adjustment.
2166      We add the offsets and update the alignment.  */
2167   if (attrs.offset_known_p)
2168     {
2169       attrs.offset += offset;
2170 
2171       /* Drop the object if the new left end is not within its bounds.  */
2172       if (adjust_object && attrs.offset < 0)
2173 	{
2174 	  attrs.expr = NULL_TREE;
2175 	  attrs.alias = 0;
2176 	}
2177     }
2178 
2179   /* Compute the new alignment by taking the MIN of the alignment and the
2180      lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2181      if zero.  */
2182   if (offset != 0)
2183     {
2184       max_align = (offset & -offset) * BITS_PER_UNIT;
2185       attrs.align = MIN (attrs.align, max_align);
2186     }
2187 
2188   if (size)
2189     {
2190       /* Drop the object if the new right end is not within its bounds.  */
2191       if (adjust_object && (offset + size) > attrs.size)
2192 	{
2193 	  attrs.expr = NULL_TREE;
2194 	  attrs.alias = 0;
2195 	}
2196       attrs.size_known_p = true;
2197       attrs.size = size;
2198     }
2199   else if (attrs.size_known_p)
2200     {
2201       gcc_assert (!adjust_object);
2202       attrs.size -= offset;
2203       /* ??? The store_by_pieces machinery generates negative sizes,
2204 	 so don't assert for that here.  */
2205     }
2206 
2207   set_mem_attrs (new_rtx, &attrs);
2208 
2209   return new_rtx;
2210 }
2211 
2212 /* Return a memory reference like MEMREF, but with its mode changed
2213    to MODE and its address changed to ADDR, which is assumed to be
2214    MEMREF offset by OFFSET bytes.  If VALIDATE is
2215    nonzero, the memory address is forced to be valid.  */
2216 
2217 rtx
2218 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2219 			     HOST_WIDE_INT offset, int validate)
2220 {
2221   memref = change_address_1 (memref, VOIDmode, addr, validate);
2222   return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2223 }
2224 
2225 /* Return a memory reference like MEMREF, but whose address is changed by
2226    adding OFFSET, an RTX, to it.  POW2 is the highest power of two factor
2227    known to be in OFFSET (possibly 1).  */
2228 
2229 rtx
2230 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2231 {
2232   rtx new_rtx, addr = XEXP (memref, 0);
2233   enum machine_mode address_mode;
2234   struct mem_attrs attrs, *defattrs;
2235 
2236   attrs = *get_mem_attrs (memref);
2237   address_mode = get_address_mode (memref);
2238   new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2239 
2240   /* At this point we don't know _why_ the address is invalid.  It
2241      could have secondary memory references, multiplies or anything.
2242 
2243      However, if we did go and rearrange things, we can wind up not
2244      being able to recognize the magic around pic_offset_table_rtx.
2245      This stuff is fragile, and is yet another example of why it is
2246      bad to expose PIC machinery too early.  */
2247   if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2248 				     attrs.addrspace)
2249       && GET_CODE (addr) == PLUS
2250       && XEXP (addr, 0) == pic_offset_table_rtx)
2251     {
2252       addr = force_reg (GET_MODE (addr), addr);
2253       new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2254     }
2255 
2256   update_temp_slot_address (XEXP (memref, 0), new_rtx);
2257   new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
2258 
2259   /* If there are no changes, just return the original memory reference.  */
2260   if (new_rtx == memref)
2261     return new_rtx;
2262 
2263   /* Update the alignment to reflect the offset.  Reset the offset, which
2264      we don't know.  */
2265   defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2266   attrs.offset_known_p = false;
2267   attrs.size_known_p = defattrs->size_known_p;
2268   attrs.size = defattrs->size;
2269   attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2270   set_mem_attrs (new_rtx, &attrs);
2271   return new_rtx;
2272 }
2273 
2274 /* Return a memory reference like MEMREF, but with its address changed to
2275    ADDR.  The caller is asserting that the actual piece of memory pointed
2276    to is the same, just the form of the address is being changed, such as
2277    by putting something into a register.  */
2278 
2279 rtx
2280 replace_equiv_address (rtx memref, rtx addr)
2281 {
2282   /* change_address_1 copies the memory attribute structure without change
2283      and that's exactly what we want here.  */
2284   update_temp_slot_address (XEXP (memref, 0), addr);
2285   return change_address_1 (memref, VOIDmode, addr, 1);
2286 }
2287 
2288 /* Likewise, but the reference is not required to be valid.  */
2289 
2290 rtx
2291 replace_equiv_address_nv (rtx memref, rtx addr)
2292 {
2293   return change_address_1 (memref, VOIDmode, addr, 0);
2294 }
2295 
2296 /* Return a memory reference like MEMREF, but with its mode widened to
2297    MODE and offset by OFFSET.  This would be used by targets that e.g.
2298    cannot issue QImode memory operations and have to use SImode memory
2299    operations plus masking logic.  */
2300 
2301 rtx
2302 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2303 {
2304   rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2305   struct mem_attrs attrs;
2306   unsigned int size = GET_MODE_SIZE (mode);
2307 
2308   /* If there are no changes, just return the original memory reference.  */
2309   if (new_rtx == memref)
2310     return new_rtx;
2311 
2312   attrs = *get_mem_attrs (new_rtx);
2313 
2314   /* If we don't know what offset we were at within the expression, then
2315      we can't know if we've overstepped the bounds.  */
2316   if (! attrs.offset_known_p)
2317     attrs.expr = NULL_TREE;
2318 
2319   while (attrs.expr)
2320     {
2321       if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2322 	{
2323 	  tree field = TREE_OPERAND (attrs.expr, 1);
2324 	  tree offset = component_ref_field_offset (attrs.expr);
2325 
2326 	  if (! DECL_SIZE_UNIT (field))
2327 	    {
2328 	      attrs.expr = NULL_TREE;
2329 	      break;
2330 	    }
2331 
2332 	  /* Is the field at least as large as the access?  If so, ok,
2333 	     otherwise strip back to the containing structure.  */
2334 	  if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2335 	      && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2336 	      && attrs.offset >= 0)
2337 	    break;
2338 
2339 	  if (! host_integerp (offset, 1))
2340 	    {
2341 	      attrs.expr = NULL_TREE;
2342 	      break;
2343 	    }
2344 
2345 	  attrs.expr = TREE_OPERAND (attrs.expr, 0);
2346 	  attrs.offset += tree_low_cst (offset, 1);
2347 	  attrs.offset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2348 			   / BITS_PER_UNIT);
2349 	}
2350       /* Similarly for the decl.  */
2351       else if (DECL_P (attrs.expr)
2352 	       && DECL_SIZE_UNIT (attrs.expr)
2353 	       && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2354 	       && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2355 	       && (! attrs.offset_known_p || attrs.offset >= 0))
2356 	break;
2357       else
2358 	{
2359 	  /* The widened memory access overflows the expression, which means
2360 	     that it could alias another expression.  Zap it.  */
2361 	  attrs.expr = NULL_TREE;
2362 	  break;
2363 	}
2364     }
2365 
2366   if (! attrs.expr)
2367     attrs.offset_known_p = false;
2368 
2369   /* The widened memory may alias other stuff, so zap the alias set.  */
2370   /* ??? Maybe use get_alias_set on any remaining expression.  */
2371   attrs.alias = 0;
2372   attrs.size_known_p = true;
2373   attrs.size = size;
2374   set_mem_attrs (new_rtx, &attrs);
2375   return new_rtx;
2376 }
2377 
2378 /* A fake decl that is used as the MEM_EXPR of spill slots.  */
2379 static GTY(()) tree spill_slot_decl;
2380 
2381 tree
2382 get_spill_slot_decl (bool force_build_p)
2383 {
2384   tree d = spill_slot_decl;
2385   rtx rd;
2386   struct mem_attrs attrs;
2387 
2388   if (d || !force_build_p)
2389     return d;
2390 
2391   d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2392 		  VAR_DECL, get_identifier ("%sfp"), void_type_node);
2393   DECL_ARTIFICIAL (d) = 1;
2394   DECL_IGNORED_P (d) = 1;
2395   TREE_USED (d) = 1;
2396   spill_slot_decl = d;
2397 
2398   rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2399   MEM_NOTRAP_P (rd) = 1;
2400   attrs = *mode_mem_attrs[(int) BLKmode];
2401   attrs.alias = new_alias_set ();
2402   attrs.expr = d;
2403   set_mem_attrs (rd, &attrs);
2404   SET_DECL_RTL (d, rd);
2405 
2406   return d;
2407 }
2408 
2409 /* Given MEM, a result from assign_stack_local, fill in the memory
2410    attributes as appropriate for a register allocator spill slot.
2411    These slots are not aliasable by other memory.  We arrange for
2412    them all to use a single MEM_EXPR, so that the aliasing code can
2413    work properly in the case of shared spill slots.  */
2414 
2415 void
2416 set_mem_attrs_for_spill (rtx mem)
2417 {
2418   struct mem_attrs attrs;
2419   rtx addr;
2420 
2421   attrs = *get_mem_attrs (mem);
2422   attrs.expr = get_spill_slot_decl (true);
2423   attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2424   attrs.addrspace = ADDR_SPACE_GENERIC;
2425 
2426   /* We expect the incoming memory to be of the form:
2427 	(mem:MODE (plus (reg sfp) (const_int offset)))
2428      with perhaps the plus missing for offset = 0.  */
2429   addr = XEXP (mem, 0);
2430   attrs.offset_known_p = true;
2431   attrs.offset = 0;
2432   if (GET_CODE (addr) == PLUS
2433       && CONST_INT_P (XEXP (addr, 1)))
2434     attrs.offset = INTVAL (XEXP (addr, 1));
2435 
2436   set_mem_attrs (mem, &attrs);
2437   MEM_NOTRAP_P (mem) = 1;
2438 }
2439 
2440 /* Return a newly created CODE_LABEL rtx with a unique label number.  */
2441 
2442 rtx
2443 gen_label_rtx (void)
2444 {
2445   return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2446 			     NULL, label_num++, NULL);
2447 }
2448 
2449 /* For procedure integration.  */
2450 
2451 /* Install new pointers to the first and last insns in the chain.
2452    Also, set cur_insn_uid to one higher than the last in use.
2453    Used for an inline-procedure after copying the insn chain.  */
2454 
2455 void
2456 set_new_first_and_last_insn (rtx first, rtx last)
2457 {
2458   rtx insn;
2459 
2460   set_first_insn (first);
2461   set_last_insn (last);
2462   cur_insn_uid = 0;
2463 
2464   if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2465     {
2466       int debug_count = 0;
2467 
2468       cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2469       cur_debug_insn_uid = 0;
2470 
2471       for (insn = first; insn; insn = NEXT_INSN (insn))
2472 	if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2473 	  cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2474 	else
2475 	  {
2476 	    cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2477 	    if (DEBUG_INSN_P (insn))
2478 	      debug_count++;
2479 	  }
2480 
2481       if (debug_count)
2482 	cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2483       else
2484 	cur_debug_insn_uid++;
2485     }
2486   else
2487     for (insn = first; insn; insn = NEXT_INSN (insn))
2488       cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2489 
2490   cur_insn_uid++;
2491 }
2492 
2493 /* Go through all the RTL insn bodies and copy any invalid shared
2494    structure.  This routine should only be called once.  */
2495 
2496 static void
2497 unshare_all_rtl_1 (rtx insn)
2498 {
2499   /* Unshare just about everything else.  */
2500   unshare_all_rtl_in_chain (insn);
2501 
2502   /* Make sure the addresses of stack slots found outside the insn chain
2503      (such as, in DECL_RTL of a variable) are not shared
2504      with the insn chain.
2505 
2506      This special care is necessary when the stack slot MEM does not
2507      actually appear in the insn chain.  If it does appear, its address
2508      is unshared from all else at that point.  */
2509   stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2510 }
2511 
2512 /* Go through all the RTL insn bodies and copy any invalid shared
2513    structure, again.  This is a fairly expensive thing to do so it
2514    should be done sparingly.  */
2515 
2516 void
2517 unshare_all_rtl_again (rtx insn)
2518 {
2519   rtx p;
2520   tree decl;
2521 
2522   for (p = insn; p; p = NEXT_INSN (p))
2523     if (INSN_P (p))
2524       {
2525 	reset_used_flags (PATTERN (p));
2526 	reset_used_flags (REG_NOTES (p));
2527 	if (CALL_P (p))
2528 	  reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2529       }
2530 
2531   /* Make sure that virtual stack slots are not shared.  */
2532   set_used_decls (DECL_INITIAL (cfun->decl));
2533 
2534   /* Make sure that virtual parameters are not shared.  */
2535   for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2536     set_used_flags (DECL_RTL (decl));
2537 
2538   reset_used_flags (stack_slot_list);
2539 
2540   unshare_all_rtl_1 (insn);
2541 }
2542 
2543 unsigned int
2544 unshare_all_rtl (void)
2545 {
2546   unshare_all_rtl_1 (get_insns ());
2547   return 0;
2548 }
2549 
2550 
2551 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2552    Recursively does the same for subexpressions.  */
2553 
2554 static void
2555 verify_rtx_sharing (rtx orig, rtx insn)
2556 {
2557   rtx x = orig;
2558   int i;
2559   enum rtx_code code;
2560   const char *format_ptr;
2561 
2562   if (x == 0)
2563     return;
2564 
2565   code = GET_CODE (x);
2566 
2567   /* These types may be freely shared.  */
2568 
2569   switch (code)
2570     {
2571     case REG:
2572     case DEBUG_EXPR:
2573     case VALUE:
2574     CASE_CONST_ANY:
2575     case SYMBOL_REF:
2576     case LABEL_REF:
2577     case CODE_LABEL:
2578     case PC:
2579     case CC0:
2580     case RETURN:
2581     case SIMPLE_RETURN:
2582     case SCRATCH:
2583       /* SCRATCH must be shared because they represent distinct values.  */
2584       return;
2585     case CLOBBER:
2586       /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2587          clobbers or clobbers of hard registers that originated as pseudos.
2588          This is needed to allow safe register renaming.  */
2589       if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2590 	  && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2591 	return;
2592       break;
2593 
2594     case CONST:
2595       if (shared_const_p (orig))
2596 	return;
2597       break;
2598 
2599     case MEM:
2600       /* A MEM is allowed to be shared if its address is constant.  */
2601       if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2602 	  || reload_completed || reload_in_progress)
2603 	return;
2604 
2605       break;
2606 
2607     default:
2608       break;
2609     }
2610 
2611   /* This rtx may not be shared.  If it has already been seen,
2612      replace it with a copy of itself.  */
2613 #ifdef ENABLE_CHECKING
2614   if (RTX_FLAG (x, used))
2615     {
2616       error ("invalid rtl sharing found in the insn");
2617       debug_rtx (insn);
2618       error ("shared rtx");
2619       debug_rtx (x);
2620       internal_error ("internal consistency failure");
2621     }
2622 #endif
2623   gcc_assert (!RTX_FLAG (x, used));
2624 
2625   RTX_FLAG (x, used) = 1;
2626 
2627   /* Now scan the subexpressions recursively.  */
2628 
2629   format_ptr = GET_RTX_FORMAT (code);
2630 
2631   for (i = 0; i < GET_RTX_LENGTH (code); i++)
2632     {
2633       switch (*format_ptr++)
2634 	{
2635 	case 'e':
2636 	  verify_rtx_sharing (XEXP (x, i), insn);
2637 	  break;
2638 
2639 	case 'E':
2640 	  if (XVEC (x, i) != NULL)
2641 	    {
2642 	      int j;
2643 	      int len = XVECLEN (x, i);
2644 
2645 	      for (j = 0; j < len; j++)
2646 		{
2647 		  /* We allow sharing of ASM_OPERANDS inside single
2648 		     instruction.  */
2649 		  if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2650 		      && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2651 			  == ASM_OPERANDS))
2652 		    verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2653 		  else
2654 		    verify_rtx_sharing (XVECEXP (x, i, j), insn);
2655 		}
2656 	    }
2657 	  break;
2658 	}
2659     }
2660   return;
2661 }
2662 
2663 /* Go through all the RTL insn bodies and clear all the USED bits.  */
2664 
2665 static void
2666 reset_all_used_flags (void)
2667 {
2668   rtx p;
2669 
2670   for (p = get_insns (); p; p = NEXT_INSN (p))
2671     if (INSN_P (p))
2672       {
2673 	reset_used_flags (PATTERN (p));
2674 	reset_used_flags (REG_NOTES (p));
2675 	if (CALL_P (p))
2676 	  reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2677 	if (GET_CODE (PATTERN (p)) == SEQUENCE)
2678 	  {
2679 	    int i;
2680 	    rtx q, sequence = PATTERN (p);
2681 
2682 	    for (i = 0; i < XVECLEN (sequence, 0); i++)
2683 	      {
2684 		q = XVECEXP (sequence, 0, i);
2685 		gcc_assert (INSN_P (q));
2686 		reset_used_flags (PATTERN (q));
2687 		reset_used_flags (REG_NOTES (q));
2688 		if (CALL_P (q))
2689 		  reset_used_flags (CALL_INSN_FUNCTION_USAGE (q));
2690 	      }
2691 	  }
2692       }
2693 }
2694 
2695 /* Go through all the RTL insn bodies and check that there is no unexpected
2696    sharing in between the subexpressions.  */
2697 
2698 DEBUG_FUNCTION void
2699 verify_rtl_sharing (void)
2700 {
2701   rtx p;
2702 
2703   timevar_push (TV_VERIFY_RTL_SHARING);
2704 
2705   reset_all_used_flags ();
2706 
2707   for (p = get_insns (); p; p = NEXT_INSN (p))
2708     if (INSN_P (p))
2709       {
2710 	verify_rtx_sharing (PATTERN (p), p);
2711 	verify_rtx_sharing (REG_NOTES (p), p);
2712 	if (CALL_P (p))
2713 	  verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (p), p);
2714       }
2715 
2716   reset_all_used_flags ();
2717 
2718   timevar_pop (TV_VERIFY_RTL_SHARING);
2719 }
2720 
2721 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2722    Assumes the mark bits are cleared at entry.  */
2723 
2724 void
2725 unshare_all_rtl_in_chain (rtx insn)
2726 {
2727   for (; insn; insn = NEXT_INSN (insn))
2728     if (INSN_P (insn))
2729       {
2730 	PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2731 	REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2732 	if (CALL_P (insn))
2733 	  CALL_INSN_FUNCTION_USAGE (insn)
2734 	    = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2735       }
2736 }
2737 
2738 /* Go through all virtual stack slots of a function and mark them as
2739    shared.  We never replace the DECL_RTLs themselves with a copy,
2740    but expressions mentioned into a DECL_RTL cannot be shared with
2741    expressions in the instruction stream.
2742 
2743    Note that reload may convert pseudo registers into memories in-place.
2744    Pseudo registers are always shared, but MEMs never are.  Thus if we
2745    reset the used flags on MEMs in the instruction stream, we must set
2746    them again on MEMs that appear in DECL_RTLs.  */
2747 
2748 static void
2749 set_used_decls (tree blk)
2750 {
2751   tree t;
2752 
2753   /* Mark decls.  */
2754   for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2755     if (DECL_RTL_SET_P (t))
2756       set_used_flags (DECL_RTL (t));
2757 
2758   /* Now process sub-blocks.  */
2759   for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2760     set_used_decls (t);
2761 }
2762 
2763 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2764    Recursively does the same for subexpressions.  Uses
2765    copy_rtx_if_shared_1 to reduce stack space.  */
2766 
2767 rtx
2768 copy_rtx_if_shared (rtx orig)
2769 {
2770   copy_rtx_if_shared_1 (&orig);
2771   return orig;
2772 }
2773 
2774 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2775    use.  Recursively does the same for subexpressions.  */
2776 
2777 static void
2778 copy_rtx_if_shared_1 (rtx *orig1)
2779 {
2780   rtx x;
2781   int i;
2782   enum rtx_code code;
2783   rtx *last_ptr;
2784   const char *format_ptr;
2785   int copied = 0;
2786   int length;
2787 
2788   /* Repeat is used to turn tail-recursion into iteration.  */
2789 repeat:
2790   x = *orig1;
2791 
2792   if (x == 0)
2793     return;
2794 
2795   code = GET_CODE (x);
2796 
2797   /* These types may be freely shared.  */
2798 
2799   switch (code)
2800     {
2801     case REG:
2802     case DEBUG_EXPR:
2803     case VALUE:
2804     CASE_CONST_ANY:
2805     case SYMBOL_REF:
2806     case LABEL_REF:
2807     case CODE_LABEL:
2808     case PC:
2809     case CC0:
2810     case RETURN:
2811     case SIMPLE_RETURN:
2812     case SCRATCH:
2813       /* SCRATCH must be shared because they represent distinct values.  */
2814       return;
2815     case CLOBBER:
2816       /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2817          clobbers or clobbers of hard registers that originated as pseudos.
2818          This is needed to allow safe register renaming.  */
2819       if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2820 	  && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2821 	return;
2822       break;
2823 
2824     case CONST:
2825       if (shared_const_p (x))
2826 	return;
2827       break;
2828 
2829     case DEBUG_INSN:
2830     case INSN:
2831     case JUMP_INSN:
2832     case CALL_INSN:
2833     case NOTE:
2834     case BARRIER:
2835       /* The chain of insns is not being copied.  */
2836       return;
2837 
2838     default:
2839       break;
2840     }
2841 
2842   /* This rtx may not be shared.  If it has already been seen,
2843      replace it with a copy of itself.  */
2844 
2845   if (RTX_FLAG (x, used))
2846     {
2847       x = shallow_copy_rtx (x);
2848       copied = 1;
2849     }
2850   RTX_FLAG (x, used) = 1;
2851 
2852   /* Now scan the subexpressions recursively.
2853      We can store any replaced subexpressions directly into X
2854      since we know X is not shared!  Any vectors in X
2855      must be copied if X was copied.  */
2856 
2857   format_ptr = GET_RTX_FORMAT (code);
2858   length = GET_RTX_LENGTH (code);
2859   last_ptr = NULL;
2860 
2861   for (i = 0; i < length; i++)
2862     {
2863       switch (*format_ptr++)
2864 	{
2865 	case 'e':
2866           if (last_ptr)
2867             copy_rtx_if_shared_1 (last_ptr);
2868 	  last_ptr = &XEXP (x, i);
2869 	  break;
2870 
2871 	case 'E':
2872 	  if (XVEC (x, i) != NULL)
2873 	    {
2874 	      int j;
2875 	      int len = XVECLEN (x, i);
2876 
2877               /* Copy the vector iff I copied the rtx and the length
2878 		 is nonzero.  */
2879 	      if (copied && len > 0)
2880 		XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2881 
2882               /* Call recursively on all inside the vector.  */
2883 	      for (j = 0; j < len; j++)
2884                 {
2885 		  if (last_ptr)
2886 		    copy_rtx_if_shared_1 (last_ptr);
2887                   last_ptr = &XVECEXP (x, i, j);
2888                 }
2889 	    }
2890 	  break;
2891 	}
2892     }
2893   *orig1 = x;
2894   if (last_ptr)
2895     {
2896       orig1 = last_ptr;
2897       goto repeat;
2898     }
2899   return;
2900 }
2901 
2902 /* Set the USED bit in X and its non-shareable subparts to FLAG.  */
2903 
2904 static void
2905 mark_used_flags (rtx x, int flag)
2906 {
2907   int i, j;
2908   enum rtx_code code;
2909   const char *format_ptr;
2910   int length;
2911 
2912   /* Repeat is used to turn tail-recursion into iteration.  */
2913 repeat:
2914   if (x == 0)
2915     return;
2916 
2917   code = GET_CODE (x);
2918 
2919   /* These types may be freely shared so we needn't do any resetting
2920      for them.  */
2921 
2922   switch (code)
2923     {
2924     case REG:
2925     case DEBUG_EXPR:
2926     case VALUE:
2927     CASE_CONST_ANY:
2928     case SYMBOL_REF:
2929     case CODE_LABEL:
2930     case PC:
2931     case CC0:
2932     case RETURN:
2933     case SIMPLE_RETURN:
2934       return;
2935 
2936     case DEBUG_INSN:
2937     case INSN:
2938     case JUMP_INSN:
2939     case CALL_INSN:
2940     case NOTE:
2941     case LABEL_REF:
2942     case BARRIER:
2943       /* The chain of insns is not being copied.  */
2944       return;
2945 
2946     default:
2947       break;
2948     }
2949 
2950   RTX_FLAG (x, used) = flag;
2951 
2952   format_ptr = GET_RTX_FORMAT (code);
2953   length = GET_RTX_LENGTH (code);
2954 
2955   for (i = 0; i < length; i++)
2956     {
2957       switch (*format_ptr++)
2958 	{
2959 	case 'e':
2960           if (i == length-1)
2961             {
2962               x = XEXP (x, i);
2963 	      goto repeat;
2964             }
2965 	  mark_used_flags (XEXP (x, i), flag);
2966 	  break;
2967 
2968 	case 'E':
2969 	  for (j = 0; j < XVECLEN (x, i); j++)
2970 	    mark_used_flags (XVECEXP (x, i, j), flag);
2971 	  break;
2972 	}
2973     }
2974 }
2975 
2976 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2977    to look for shared sub-parts.  */
2978 
2979 void
2980 reset_used_flags (rtx x)
2981 {
2982   mark_used_flags (x, 0);
2983 }
2984 
2985 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2986    to look for shared sub-parts.  */
2987 
2988 void
2989 set_used_flags (rtx x)
2990 {
2991   mark_used_flags (x, 1);
2992 }
2993 
2994 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2995    Return X or the rtx for the pseudo reg the value of X was copied into.
2996    OTHER must be valid as a SET_DEST.  */
2997 
2998 rtx
2999 make_safe_from (rtx x, rtx other)
3000 {
3001   while (1)
3002     switch (GET_CODE (other))
3003       {
3004       case SUBREG:
3005 	other = SUBREG_REG (other);
3006 	break;
3007       case STRICT_LOW_PART:
3008       case SIGN_EXTEND:
3009       case ZERO_EXTEND:
3010 	other = XEXP (other, 0);
3011 	break;
3012       default:
3013 	goto done;
3014       }
3015  done:
3016   if ((MEM_P (other)
3017        && ! CONSTANT_P (x)
3018        && !REG_P (x)
3019        && GET_CODE (x) != SUBREG)
3020       || (REG_P (other)
3021 	  && (REGNO (other) < FIRST_PSEUDO_REGISTER
3022 	      || reg_mentioned_p (other, x))))
3023     {
3024       rtx temp = gen_reg_rtx (GET_MODE (x));
3025       emit_move_insn (temp, x);
3026       return temp;
3027     }
3028   return x;
3029 }
3030 
3031 /* Emission of insns (adding them to the doubly-linked list).  */
3032 
3033 /* Return the last insn emitted, even if it is in a sequence now pushed.  */
3034 
3035 rtx
3036 get_last_insn_anywhere (void)
3037 {
3038   struct sequence_stack *stack;
3039   if (get_last_insn ())
3040     return get_last_insn ();
3041   for (stack = seq_stack; stack; stack = stack->next)
3042     if (stack->last != 0)
3043       return stack->last;
3044   return 0;
3045 }
3046 
3047 /* Return the first nonnote insn emitted in current sequence or current
3048    function.  This routine looks inside SEQUENCEs.  */
3049 
3050 rtx
3051 get_first_nonnote_insn (void)
3052 {
3053   rtx insn = get_insns ();
3054 
3055   if (insn)
3056     {
3057       if (NOTE_P (insn))
3058 	for (insn = next_insn (insn);
3059 	     insn && NOTE_P (insn);
3060 	     insn = next_insn (insn))
3061 	  continue;
3062       else
3063 	{
3064 	  if (NONJUMP_INSN_P (insn)
3065 	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
3066 	    insn = XVECEXP (PATTERN (insn), 0, 0);
3067 	}
3068     }
3069 
3070   return insn;
3071 }
3072 
3073 /* Return the last nonnote insn emitted in current sequence or current
3074    function.  This routine looks inside SEQUENCEs.  */
3075 
3076 rtx
3077 get_last_nonnote_insn (void)
3078 {
3079   rtx insn = get_last_insn ();
3080 
3081   if (insn)
3082     {
3083       if (NOTE_P (insn))
3084 	for (insn = previous_insn (insn);
3085 	     insn && NOTE_P (insn);
3086 	     insn = previous_insn (insn))
3087 	  continue;
3088       else
3089 	{
3090 	  if (NONJUMP_INSN_P (insn)
3091 	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
3092 	    insn = XVECEXP (PATTERN (insn), 0,
3093 			    XVECLEN (PATTERN (insn), 0) - 1);
3094 	}
3095     }
3096 
3097   return insn;
3098 }
3099 
3100 /* Return the number of actual (non-debug) insns emitted in this
3101    function.  */
3102 
3103 int
3104 get_max_insn_count (void)
3105 {
3106   int n = cur_insn_uid;
3107 
3108   /* The table size must be stable across -g, to avoid codegen
3109      differences due to debug insns, and not be affected by
3110      -fmin-insn-uid, to avoid excessive table size and to simplify
3111      debugging of -fcompare-debug failures.  */
3112   if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3113     n -= cur_debug_insn_uid;
3114   else
3115     n -= MIN_NONDEBUG_INSN_UID;
3116 
3117   return n;
3118 }
3119 
3120 
3121 /* Return the next insn.  If it is a SEQUENCE, return the first insn
3122    of the sequence.  */
3123 
3124 rtx
3125 next_insn (rtx insn)
3126 {
3127   if (insn)
3128     {
3129       insn = NEXT_INSN (insn);
3130       if (insn && NONJUMP_INSN_P (insn)
3131 	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
3132 	insn = XVECEXP (PATTERN (insn), 0, 0);
3133     }
3134 
3135   return insn;
3136 }
3137 
3138 /* Return the previous insn.  If it is a SEQUENCE, return the last insn
3139    of the sequence.  */
3140 
3141 rtx
3142 previous_insn (rtx insn)
3143 {
3144   if (insn)
3145     {
3146       insn = PREV_INSN (insn);
3147       if (insn && NONJUMP_INSN_P (insn)
3148 	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
3149 	insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3150     }
3151 
3152   return insn;
3153 }
3154 
3155 /* Return the next insn after INSN that is not a NOTE.  This routine does not
3156    look inside SEQUENCEs.  */
3157 
3158 rtx
3159 next_nonnote_insn (rtx insn)
3160 {
3161   while (insn)
3162     {
3163       insn = NEXT_INSN (insn);
3164       if (insn == 0 || !NOTE_P (insn))
3165 	break;
3166     }
3167 
3168   return insn;
3169 }
3170 
3171 /* Return the next insn after INSN that is not a NOTE, but stop the
3172    search before we enter another basic block.  This routine does not
3173    look inside SEQUENCEs.  */
3174 
3175 rtx
3176 next_nonnote_insn_bb (rtx insn)
3177 {
3178   while (insn)
3179     {
3180       insn = NEXT_INSN (insn);
3181       if (insn == 0 || !NOTE_P (insn))
3182 	break;
3183       if (NOTE_INSN_BASIC_BLOCK_P (insn))
3184 	return NULL_RTX;
3185     }
3186 
3187   return insn;
3188 }
3189 
3190 /* Return the previous insn before INSN that is not a NOTE.  This routine does
3191    not look inside SEQUENCEs.  */
3192 
3193 rtx
3194 prev_nonnote_insn (rtx insn)
3195 {
3196   while (insn)
3197     {
3198       insn = PREV_INSN (insn);
3199       if (insn == 0 || !NOTE_P (insn))
3200 	break;
3201     }
3202 
3203   return insn;
3204 }
3205 
3206 /* Return the previous insn before INSN that is not a NOTE, but stop
3207    the search before we enter another basic block.  This routine does
3208    not look inside SEQUENCEs.  */
3209 
3210 rtx
3211 prev_nonnote_insn_bb (rtx insn)
3212 {
3213   while (insn)
3214     {
3215       insn = PREV_INSN (insn);
3216       if (insn == 0 || !NOTE_P (insn))
3217 	break;
3218       if (NOTE_INSN_BASIC_BLOCK_P (insn))
3219 	return NULL_RTX;
3220     }
3221 
3222   return insn;
3223 }
3224 
3225 /* Return the next insn after INSN that is not a DEBUG_INSN.  This
3226    routine does not look inside SEQUENCEs.  */
3227 
3228 rtx
3229 next_nondebug_insn (rtx insn)
3230 {
3231   while (insn)
3232     {
3233       insn = NEXT_INSN (insn);
3234       if (insn == 0 || !DEBUG_INSN_P (insn))
3235 	break;
3236     }
3237 
3238   return insn;
3239 }
3240 
3241 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3242    This routine does not look inside SEQUENCEs.  */
3243 
3244 rtx
3245 prev_nondebug_insn (rtx insn)
3246 {
3247   while (insn)
3248     {
3249       insn = PREV_INSN (insn);
3250       if (insn == 0 || !DEBUG_INSN_P (insn))
3251 	break;
3252     }
3253 
3254   return insn;
3255 }
3256 
3257 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3258    This routine does not look inside SEQUENCEs.  */
3259 
3260 rtx
3261 next_nonnote_nondebug_insn (rtx insn)
3262 {
3263   while (insn)
3264     {
3265       insn = NEXT_INSN (insn);
3266       if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3267 	break;
3268     }
3269 
3270   return insn;
3271 }
3272 
3273 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3274    This routine does not look inside SEQUENCEs.  */
3275 
3276 rtx
3277 prev_nonnote_nondebug_insn (rtx insn)
3278 {
3279   while (insn)
3280     {
3281       insn = PREV_INSN (insn);
3282       if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3283 	break;
3284     }
3285 
3286   return insn;
3287 }
3288 
3289 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3290    or 0, if there is none.  This routine does not look inside
3291    SEQUENCEs.  */
3292 
3293 rtx
3294 next_real_insn (rtx insn)
3295 {
3296   while (insn)
3297     {
3298       insn = NEXT_INSN (insn);
3299       if (insn == 0 || INSN_P (insn))
3300 	break;
3301     }
3302 
3303   return insn;
3304 }
3305 
3306 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3307    or 0, if there is none.  This routine does not look inside
3308    SEQUENCEs.  */
3309 
3310 rtx
3311 prev_real_insn (rtx insn)
3312 {
3313   while (insn)
3314     {
3315       insn = PREV_INSN (insn);
3316       if (insn == 0 || INSN_P (insn))
3317 	break;
3318     }
3319 
3320   return insn;
3321 }
3322 
3323 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3324    This routine does not look inside SEQUENCEs.  */
3325 
3326 rtx
3327 last_call_insn (void)
3328 {
3329   rtx insn;
3330 
3331   for (insn = get_last_insn ();
3332        insn && !CALL_P (insn);
3333        insn = PREV_INSN (insn))
3334     ;
3335 
3336   return insn;
3337 }
3338 
3339 /* Find the next insn after INSN that really does something.  This routine
3340    does not look inside SEQUENCEs.  After reload this also skips over
3341    standalone USE and CLOBBER insn.  */
3342 
3343 int
3344 active_insn_p (const_rtx insn)
3345 {
3346   return (CALL_P (insn) || JUMP_P (insn)
3347 	  || (NONJUMP_INSN_P (insn)
3348 	      && (! reload_completed
3349 		  || (GET_CODE (PATTERN (insn)) != USE
3350 		      && GET_CODE (PATTERN (insn)) != CLOBBER))));
3351 }
3352 
3353 rtx
3354 next_active_insn (rtx insn)
3355 {
3356   while (insn)
3357     {
3358       insn = NEXT_INSN (insn);
3359       if (insn == 0 || active_insn_p (insn))
3360 	break;
3361     }
3362 
3363   return insn;
3364 }
3365 
3366 /* Find the last insn before INSN that really does something.  This routine
3367    does not look inside SEQUENCEs.  After reload this also skips over
3368    standalone USE and CLOBBER insn.  */
3369 
3370 rtx
3371 prev_active_insn (rtx insn)
3372 {
3373   while (insn)
3374     {
3375       insn = PREV_INSN (insn);
3376       if (insn == 0 || active_insn_p (insn))
3377 	break;
3378     }
3379 
3380   return insn;
3381 }
3382 
3383 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none.  */
3384 
3385 rtx
3386 next_label (rtx insn)
3387 {
3388   while (insn)
3389     {
3390       insn = NEXT_INSN (insn);
3391       if (insn == 0 || LABEL_P (insn))
3392 	break;
3393     }
3394 
3395   return insn;
3396 }
3397 
3398 /* Return the last label to mark the same position as LABEL.  Return LABEL
3399    itself if it is null or any return rtx.  */
3400 
3401 rtx
3402 skip_consecutive_labels (rtx label)
3403 {
3404   rtx insn;
3405 
3406   if (label && ANY_RETURN_P (label))
3407     return label;
3408 
3409   for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3410     if (LABEL_P (insn))
3411       label = insn;
3412 
3413   return label;
3414 }
3415 
3416 #ifdef HAVE_cc0
3417 /* INSN uses CC0 and is being moved into a delay slot.  Set up REG_CC_SETTER
3418    and REG_CC_USER notes so we can find it.  */
3419 
3420 void
3421 link_cc0_insns (rtx insn)
3422 {
3423   rtx user = next_nonnote_insn (insn);
3424 
3425   if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3426     user = XVECEXP (PATTERN (user), 0, 0);
3427 
3428   add_reg_note (user, REG_CC_SETTER, insn);
3429   add_reg_note (insn, REG_CC_USER, user);
3430 }
3431 
3432 /* Return the next insn that uses CC0 after INSN, which is assumed to
3433    set it.  This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3434    applied to the result of this function should yield INSN).
3435 
3436    Normally, this is simply the next insn.  However, if a REG_CC_USER note
3437    is present, it contains the insn that uses CC0.
3438 
3439    Return 0 if we can't find the insn.  */
3440 
3441 rtx
3442 next_cc0_user (rtx insn)
3443 {
3444   rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3445 
3446   if (note)
3447     return XEXP (note, 0);
3448 
3449   insn = next_nonnote_insn (insn);
3450   if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3451     insn = XVECEXP (PATTERN (insn), 0, 0);
3452 
3453   if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3454     return insn;
3455 
3456   return 0;
3457 }
3458 
3459 /* Find the insn that set CC0 for INSN.  Unless INSN has a REG_CC_SETTER
3460    note, it is the previous insn.  */
3461 
3462 rtx
3463 prev_cc0_setter (rtx insn)
3464 {
3465   rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3466 
3467   if (note)
3468     return XEXP (note, 0);
3469 
3470   insn = prev_nonnote_insn (insn);
3471   gcc_assert (sets_cc0_p (PATTERN (insn)));
3472 
3473   return insn;
3474 }
3475 #endif
3476 
3477 #ifdef AUTO_INC_DEC
3478 /* Find a RTX_AUTOINC class rtx which matches DATA.  */
3479 
3480 static int
3481 find_auto_inc (rtx *xp, void *data)
3482 {
3483   rtx x = *xp;
3484   rtx reg = (rtx) data;
3485 
3486   if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3487     return 0;
3488 
3489   switch (GET_CODE (x))
3490     {
3491       case PRE_DEC:
3492       case PRE_INC:
3493       case POST_DEC:
3494       case POST_INC:
3495       case PRE_MODIFY:
3496       case POST_MODIFY:
3497 	if (rtx_equal_p (reg, XEXP (x, 0)))
3498 	  return 1;
3499 	break;
3500 
3501       default:
3502 	gcc_unreachable ();
3503     }
3504   return -1;
3505 }
3506 #endif
3507 
3508 /* Increment the label uses for all labels present in rtx.  */
3509 
3510 static void
3511 mark_label_nuses (rtx x)
3512 {
3513   enum rtx_code code;
3514   int i, j;
3515   const char *fmt;
3516 
3517   code = GET_CODE (x);
3518   if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3519     LABEL_NUSES (XEXP (x, 0))++;
3520 
3521   fmt = GET_RTX_FORMAT (code);
3522   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3523     {
3524       if (fmt[i] == 'e')
3525 	mark_label_nuses (XEXP (x, i));
3526       else if (fmt[i] == 'E')
3527 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3528 	  mark_label_nuses (XVECEXP (x, i, j));
3529     }
3530 }
3531 
3532 
3533 /* Try splitting insns that can be split for better scheduling.
3534    PAT is the pattern which might split.
3535    TRIAL is the insn providing PAT.
3536    LAST is nonzero if we should return the last insn of the sequence produced.
3537 
3538    If this routine succeeds in splitting, it returns the first or last
3539    replacement insn depending on the value of LAST.  Otherwise, it
3540    returns TRIAL.  If the insn to be returned can be split, it will be.  */
3541 
3542 rtx
3543 try_split (rtx pat, rtx trial, int last)
3544 {
3545   rtx before = PREV_INSN (trial);
3546   rtx after = NEXT_INSN (trial);
3547   int has_barrier = 0;
3548   rtx note, seq, tem;
3549   int probability;
3550   rtx insn_last, insn;
3551   int njumps = 0;
3552 
3553   /* We're not good at redistributing frame information.  */
3554   if (RTX_FRAME_RELATED_P (trial))
3555     return trial;
3556 
3557   if (any_condjump_p (trial)
3558       && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3559     split_branch_probability = INTVAL (XEXP (note, 0));
3560   probability = split_branch_probability;
3561 
3562   seq = split_insns (pat, trial);
3563 
3564   split_branch_probability = -1;
3565 
3566   /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3567      We may need to handle this specially.  */
3568   if (after && BARRIER_P (after))
3569     {
3570       has_barrier = 1;
3571       after = NEXT_INSN (after);
3572     }
3573 
3574   if (!seq)
3575     return trial;
3576 
3577   /* Avoid infinite loop if any insn of the result matches
3578      the original pattern.  */
3579   insn_last = seq;
3580   while (1)
3581     {
3582       if (INSN_P (insn_last)
3583 	  && rtx_equal_p (PATTERN (insn_last), pat))
3584 	return trial;
3585       if (!NEXT_INSN (insn_last))
3586 	break;
3587       insn_last = NEXT_INSN (insn_last);
3588     }
3589 
3590   /* We will be adding the new sequence to the function.  The splitters
3591      may have introduced invalid RTL sharing, so unshare the sequence now.  */
3592   unshare_all_rtl_in_chain (seq);
3593 
3594   /* Mark labels.  */
3595   for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3596     {
3597       if (JUMP_P (insn))
3598 	{
3599 	  mark_jump_label (PATTERN (insn), insn, 0);
3600 	  njumps++;
3601 	  if (probability != -1
3602 	      && any_condjump_p (insn)
3603 	      && !find_reg_note (insn, REG_BR_PROB, 0))
3604 	    {
3605 	      /* We can preserve the REG_BR_PROB notes only if exactly
3606 		 one jump is created, otherwise the machine description
3607 		 is responsible for this step using
3608 		 split_branch_probability variable.  */
3609 	      gcc_assert (njumps == 1);
3610 	      add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
3611 	    }
3612 	}
3613     }
3614 
3615   /* If we are splitting a CALL_INSN, look for the CALL_INSN
3616      in SEQ and copy any additional information across.  */
3617   if (CALL_P (trial))
3618     {
3619       for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3620 	if (CALL_P (insn))
3621 	  {
3622 	    rtx next, *p;
3623 
3624 	    /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3625 	       target may have explicitly specified.  */
3626 	    p = &CALL_INSN_FUNCTION_USAGE (insn);
3627 	    while (*p)
3628 	      p = &XEXP (*p, 1);
3629 	    *p = CALL_INSN_FUNCTION_USAGE (trial);
3630 
3631 	    /* If the old call was a sibling call, the new one must
3632 	       be too.  */
3633 	    SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3634 
3635 	    /* If the new call is the last instruction in the sequence,
3636 	       it will effectively replace the old call in-situ.  Otherwise
3637 	       we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3638 	       so that it comes immediately after the new call.  */
3639 	    if (NEXT_INSN (insn))
3640 	      for (next = NEXT_INSN (trial);
3641 		   next && NOTE_P (next);
3642 		   next = NEXT_INSN (next))
3643 		if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3644 		  {
3645 		    remove_insn (next);
3646 		    add_insn_after (next, insn, NULL);
3647 		    break;
3648 		  }
3649 	  }
3650     }
3651 
3652   /* Copy notes, particularly those related to the CFG.  */
3653   for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3654     {
3655       switch (REG_NOTE_KIND (note))
3656 	{
3657 	case REG_EH_REGION:
3658 	  copy_reg_eh_region_note_backward (note, insn_last, NULL);
3659 	  break;
3660 
3661 	case REG_NORETURN:
3662 	case REG_SETJMP:
3663 	case REG_TM:
3664 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3665 	    {
3666 	      if (CALL_P (insn))
3667 		add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3668 	    }
3669 	  break;
3670 
3671 	case REG_NON_LOCAL_GOTO:
3672 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3673 	    {
3674 	      if (JUMP_P (insn))
3675 		add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3676 	    }
3677 	  break;
3678 
3679 #ifdef AUTO_INC_DEC
3680 	case REG_INC:
3681 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3682 	    {
3683 	      rtx reg = XEXP (note, 0);
3684 	      if (!FIND_REG_INC_NOTE (insn, reg)
3685 		  && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3686 		add_reg_note (insn, REG_INC, reg);
3687 	    }
3688 	  break;
3689 #endif
3690 
3691 	case REG_ARGS_SIZE:
3692 	  fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3693 	  break;
3694 
3695 	default:
3696 	  break;
3697 	}
3698     }
3699 
3700   /* If there are LABELS inside the split insns increment the
3701      usage count so we don't delete the label.  */
3702   if (INSN_P (trial))
3703     {
3704       insn = insn_last;
3705       while (insn != NULL_RTX)
3706 	{
3707 	  /* JUMP_P insns have already been "marked" above.  */
3708 	  if (NONJUMP_INSN_P (insn))
3709 	    mark_label_nuses (PATTERN (insn));
3710 
3711 	  insn = PREV_INSN (insn);
3712 	}
3713     }
3714 
3715   tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3716 
3717   delete_insn (trial);
3718   if (has_barrier)
3719     emit_barrier_after (tem);
3720 
3721   /* Recursively call try_split for each new insn created; by the
3722      time control returns here that insn will be fully split, so
3723      set LAST and continue from the insn after the one returned.
3724      We can't use next_active_insn here since AFTER may be a note.
3725      Ignore deleted insns, which can be occur if not optimizing.  */
3726   for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3727     if (! INSN_DELETED_P (tem) && INSN_P (tem))
3728       tem = try_split (PATTERN (tem), tem, 1);
3729 
3730   /* Return either the first or the last insn, depending on which was
3731      requested.  */
3732   return last
3733     ? (after ? PREV_INSN (after) : get_last_insn ())
3734     : NEXT_INSN (before);
3735 }
3736 
3737 /* Make and return an INSN rtx, initializing all its slots.
3738    Store PATTERN in the pattern slots.  */
3739 
3740 rtx
3741 make_insn_raw (rtx pattern)
3742 {
3743   rtx insn;
3744 
3745   insn = rtx_alloc (INSN);
3746 
3747   INSN_UID (insn) = cur_insn_uid++;
3748   PATTERN (insn) = pattern;
3749   INSN_CODE (insn) = -1;
3750   REG_NOTES (insn) = NULL;
3751   INSN_LOCATION (insn) = curr_insn_location ();
3752   BLOCK_FOR_INSN (insn) = NULL;
3753 
3754 #ifdef ENABLE_RTL_CHECKING
3755   if (insn
3756       && INSN_P (insn)
3757       && (returnjump_p (insn)
3758 	  || (GET_CODE (insn) == SET
3759 	      && SET_DEST (insn) == pc_rtx)))
3760     {
3761       warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3762       debug_rtx (insn);
3763     }
3764 #endif
3765 
3766   return insn;
3767 }
3768 
3769 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn.  */
3770 
3771 static rtx
3772 make_debug_insn_raw (rtx pattern)
3773 {
3774   rtx insn;
3775 
3776   insn = rtx_alloc (DEBUG_INSN);
3777   INSN_UID (insn) = cur_debug_insn_uid++;
3778   if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3779     INSN_UID (insn) = cur_insn_uid++;
3780 
3781   PATTERN (insn) = pattern;
3782   INSN_CODE (insn) = -1;
3783   REG_NOTES (insn) = NULL;
3784   INSN_LOCATION (insn) = curr_insn_location ();
3785   BLOCK_FOR_INSN (insn) = NULL;
3786 
3787   return insn;
3788 }
3789 
3790 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn.  */
3791 
3792 static rtx
3793 make_jump_insn_raw (rtx pattern)
3794 {
3795   rtx insn;
3796 
3797   insn = rtx_alloc (JUMP_INSN);
3798   INSN_UID (insn) = cur_insn_uid++;
3799 
3800   PATTERN (insn) = pattern;
3801   INSN_CODE (insn) = -1;
3802   REG_NOTES (insn) = NULL;
3803   JUMP_LABEL (insn) = NULL;
3804   INSN_LOCATION (insn) = curr_insn_location ();
3805   BLOCK_FOR_INSN (insn) = NULL;
3806 
3807   return insn;
3808 }
3809 
3810 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn.  */
3811 
3812 static rtx
3813 make_call_insn_raw (rtx pattern)
3814 {
3815   rtx insn;
3816 
3817   insn = rtx_alloc (CALL_INSN);
3818   INSN_UID (insn) = cur_insn_uid++;
3819 
3820   PATTERN (insn) = pattern;
3821   INSN_CODE (insn) = -1;
3822   REG_NOTES (insn) = NULL;
3823   CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3824   INSN_LOCATION (insn) = curr_insn_location ();
3825   BLOCK_FOR_INSN (insn) = NULL;
3826 
3827   return insn;
3828 }
3829 
3830 /* Add INSN to the end of the doubly-linked list.
3831    INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE.  */
3832 
3833 void
3834 add_insn (rtx insn)
3835 {
3836   PREV_INSN (insn) = get_last_insn();
3837   NEXT_INSN (insn) = 0;
3838 
3839   if (NULL != get_last_insn())
3840     NEXT_INSN (get_last_insn ()) = insn;
3841 
3842   if (NULL == get_insns ())
3843     set_first_insn (insn);
3844 
3845   set_last_insn (insn);
3846 }
3847 
3848 /* Add INSN into the doubly-linked list after insn AFTER.  This and
3849    the next should be the only functions called to insert an insn once
3850    delay slots have been filled since only they know how to update a
3851    SEQUENCE.  */
3852 
3853 void
3854 add_insn_after (rtx insn, rtx after, basic_block bb)
3855 {
3856   rtx next = NEXT_INSN (after);
3857 
3858   gcc_assert (!optimize || !INSN_DELETED_P (after));
3859 
3860   NEXT_INSN (insn) = next;
3861   PREV_INSN (insn) = after;
3862 
3863   if (next)
3864     {
3865       PREV_INSN (next) = insn;
3866       if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3867 	PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3868     }
3869   else if (get_last_insn () == after)
3870     set_last_insn (insn);
3871   else
3872     {
3873       struct sequence_stack *stack = seq_stack;
3874       /* Scan all pending sequences too.  */
3875       for (; stack; stack = stack->next)
3876 	if (after == stack->last)
3877 	  {
3878 	    stack->last = insn;
3879 	    break;
3880 	  }
3881 
3882       gcc_assert (stack);
3883     }
3884 
3885   if (!BARRIER_P (after)
3886       && !BARRIER_P (insn)
3887       && (bb = BLOCK_FOR_INSN (after)))
3888     {
3889       set_block_for_insn (insn, bb);
3890       if (INSN_P (insn))
3891 	df_insn_rescan (insn);
3892       /* Should not happen as first in the BB is always
3893 	 either NOTE or LABEL.  */
3894       if (BB_END (bb) == after
3895 	  /* Avoid clobbering of structure when creating new BB.  */
3896 	  && !BARRIER_P (insn)
3897 	  && !NOTE_INSN_BASIC_BLOCK_P (insn))
3898 	BB_END (bb) = insn;
3899     }
3900 
3901   NEXT_INSN (after) = insn;
3902   if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3903     {
3904       rtx sequence = PATTERN (after);
3905       NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3906     }
3907 }
3908 
3909 /* Add INSN into the doubly-linked list before insn BEFORE.  This and
3910    the previous should be the only functions called to insert an insn
3911    once delay slots have been filled since only they know how to
3912    update a SEQUENCE.  If BB is NULL, an attempt is made to infer the
3913    bb from before.  */
3914 
3915 void
3916 add_insn_before (rtx insn, rtx before, basic_block bb)
3917 {
3918   rtx prev = PREV_INSN (before);
3919 
3920   gcc_assert (!optimize || !INSN_DELETED_P (before));
3921 
3922   PREV_INSN (insn) = prev;
3923   NEXT_INSN (insn) = before;
3924 
3925   if (prev)
3926     {
3927       NEXT_INSN (prev) = insn;
3928       if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3929 	{
3930 	  rtx sequence = PATTERN (prev);
3931 	  NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3932 	}
3933     }
3934   else if (get_insns () == before)
3935     set_first_insn (insn);
3936   else
3937     {
3938       struct sequence_stack *stack = seq_stack;
3939       /* Scan all pending sequences too.  */
3940       for (; stack; stack = stack->next)
3941 	if (before == stack->first)
3942 	  {
3943 	    stack->first = insn;
3944 	    break;
3945 	  }
3946 
3947       gcc_assert (stack);
3948     }
3949 
3950   if (!bb
3951       && !BARRIER_P (before)
3952       && !BARRIER_P (insn))
3953     bb = BLOCK_FOR_INSN (before);
3954 
3955   if (bb)
3956     {
3957       set_block_for_insn (insn, bb);
3958       if (INSN_P (insn))
3959 	df_insn_rescan (insn);
3960       /* Should not happen as first in the BB is always either NOTE or
3961 	 LABEL.  */
3962       gcc_assert (BB_HEAD (bb) != insn
3963 		  /* Avoid clobbering of structure when creating new BB.  */
3964 		  || BARRIER_P (insn)
3965 		  || NOTE_INSN_BASIC_BLOCK_P (insn));
3966     }
3967 
3968   PREV_INSN (before) = insn;
3969   if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3970     PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3971 }
3972 
3973 
3974 /* Replace insn with an deleted instruction note.  */
3975 
3976 void
3977 set_insn_deleted (rtx insn)
3978 {
3979   df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3980   PUT_CODE (insn, NOTE);
3981   NOTE_KIND (insn) = NOTE_INSN_DELETED;
3982 }
3983 
3984 
3985 /* Remove an insn from its doubly-linked list.  This function knows how
3986    to handle sequences.  */
3987 void
3988 remove_insn (rtx insn)
3989 {
3990   rtx next = NEXT_INSN (insn);
3991   rtx prev = PREV_INSN (insn);
3992   basic_block bb;
3993 
3994   /* Later in the code, the block will be marked dirty.  */
3995   df_insn_delete (NULL, INSN_UID (insn));
3996 
3997   if (prev)
3998     {
3999       NEXT_INSN (prev) = next;
4000       if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4001 	{
4002 	  rtx sequence = PATTERN (prev);
4003 	  NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
4004 	}
4005     }
4006   else if (get_insns () == insn)
4007     {
4008       if (next)
4009         PREV_INSN (next) = NULL;
4010       set_first_insn (next);
4011     }
4012   else
4013     {
4014       struct sequence_stack *stack = seq_stack;
4015       /* Scan all pending sequences too.  */
4016       for (; stack; stack = stack->next)
4017 	if (insn == stack->first)
4018 	  {
4019 	    stack->first = next;
4020 	    break;
4021 	  }
4022 
4023       gcc_assert (stack);
4024     }
4025 
4026   if (next)
4027     {
4028       PREV_INSN (next) = prev;
4029       if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4030 	PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
4031     }
4032   else if (get_last_insn () == insn)
4033     set_last_insn (prev);
4034   else
4035     {
4036       struct sequence_stack *stack = seq_stack;
4037       /* Scan all pending sequences too.  */
4038       for (; stack; stack = stack->next)
4039 	if (insn == stack->last)
4040 	  {
4041 	    stack->last = prev;
4042 	    break;
4043 	  }
4044 
4045       gcc_assert (stack);
4046     }
4047   if (!BARRIER_P (insn)
4048       && (bb = BLOCK_FOR_INSN (insn)))
4049     {
4050       if (NONDEBUG_INSN_P (insn))
4051 	df_set_bb_dirty (bb);
4052       if (BB_HEAD (bb) == insn)
4053 	{
4054 	  /* Never ever delete the basic block note without deleting whole
4055 	     basic block.  */
4056 	  gcc_assert (!NOTE_P (insn));
4057 	  BB_HEAD (bb) = next;
4058 	}
4059       if (BB_END (bb) == insn)
4060 	BB_END (bb) = prev;
4061     }
4062 }
4063 
4064 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN.  */
4065 
4066 void
4067 add_function_usage_to (rtx call_insn, rtx call_fusage)
4068 {
4069   gcc_assert (call_insn && CALL_P (call_insn));
4070 
4071   /* Put the register usage information on the CALL.  If there is already
4072      some usage information, put ours at the end.  */
4073   if (CALL_INSN_FUNCTION_USAGE (call_insn))
4074     {
4075       rtx link;
4076 
4077       for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4078 	   link = XEXP (link, 1))
4079 	;
4080 
4081       XEXP (link, 1) = call_fusage;
4082     }
4083   else
4084     CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4085 }
4086 
4087 /* Delete all insns made since FROM.
4088    FROM becomes the new last instruction.  */
4089 
4090 void
4091 delete_insns_since (rtx from)
4092 {
4093   if (from == 0)
4094     set_first_insn (0);
4095   else
4096     NEXT_INSN (from) = 0;
4097   set_last_insn (from);
4098 }
4099 
4100 /* This function is deprecated, please use sequences instead.
4101 
4102    Move a consecutive bunch of insns to a different place in the chain.
4103    The insns to be moved are those between FROM and TO.
4104    They are moved to a new position after the insn AFTER.
4105    AFTER must not be FROM or TO or any insn in between.
4106 
4107    This function does not know about SEQUENCEs and hence should not be
4108    called after delay-slot filling has been done.  */
4109 
4110 void
4111 reorder_insns_nobb (rtx from, rtx to, rtx after)
4112 {
4113 #ifdef ENABLE_CHECKING
4114   rtx x;
4115   for (x = from; x != to; x = NEXT_INSN (x))
4116     gcc_assert (after != x);
4117   gcc_assert (after != to);
4118 #endif
4119 
4120   /* Splice this bunch out of where it is now.  */
4121   if (PREV_INSN (from))
4122     NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4123   if (NEXT_INSN (to))
4124     PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4125   if (get_last_insn () == to)
4126     set_last_insn (PREV_INSN (from));
4127   if (get_insns () == from)
4128     set_first_insn (NEXT_INSN (to));
4129 
4130   /* Make the new neighbors point to it and it to them.  */
4131   if (NEXT_INSN (after))
4132     PREV_INSN (NEXT_INSN (after)) = to;
4133 
4134   NEXT_INSN (to) = NEXT_INSN (after);
4135   PREV_INSN (from) = after;
4136   NEXT_INSN (after) = from;
4137   if (after == get_last_insn())
4138     set_last_insn (to);
4139 }
4140 
4141 /* Same as function above, but take care to update BB boundaries.  */
4142 void
4143 reorder_insns (rtx from, rtx to, rtx after)
4144 {
4145   rtx prev = PREV_INSN (from);
4146   basic_block bb, bb2;
4147 
4148   reorder_insns_nobb (from, to, after);
4149 
4150   if (!BARRIER_P (after)
4151       && (bb = BLOCK_FOR_INSN (after)))
4152     {
4153       rtx x;
4154       df_set_bb_dirty (bb);
4155 
4156       if (!BARRIER_P (from)
4157 	  && (bb2 = BLOCK_FOR_INSN (from)))
4158 	{
4159 	  if (BB_END (bb2) == to)
4160 	    BB_END (bb2) = prev;
4161 	  df_set_bb_dirty (bb2);
4162 	}
4163 
4164       if (BB_END (bb) == after)
4165 	BB_END (bb) = to;
4166 
4167       for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4168 	if (!BARRIER_P (x))
4169 	  df_insn_change_bb (x, bb);
4170     }
4171 }
4172 
4173 
4174 /* Emit insn(s) of given code and pattern
4175    at a specified place within the doubly-linked list.
4176 
4177    All of the emit_foo global entry points accept an object
4178    X which is either an insn list or a PATTERN of a single
4179    instruction.
4180 
4181    There are thus a few canonical ways to generate code and
4182    emit it at a specific place in the instruction stream.  For
4183    example, consider the instruction named SPOT and the fact that
4184    we would like to emit some instructions before SPOT.  We might
4185    do it like this:
4186 
4187 	start_sequence ();
4188 	... emit the new instructions ...
4189 	insns_head = get_insns ();
4190 	end_sequence ();
4191 
4192 	emit_insn_before (insns_head, SPOT);
4193 
4194    It used to be common to generate SEQUENCE rtl instead, but that
4195    is a relic of the past which no longer occurs.  The reason is that
4196    SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4197    generated would almost certainly die right after it was created.  */
4198 
4199 static rtx
4200 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4201                            rtx (*make_raw) (rtx))
4202 {
4203   rtx insn;
4204 
4205   gcc_assert (before);
4206 
4207   if (x == NULL_RTX)
4208     return last;
4209 
4210   switch (GET_CODE (x))
4211     {
4212     case DEBUG_INSN:
4213     case INSN:
4214     case JUMP_INSN:
4215     case CALL_INSN:
4216     case CODE_LABEL:
4217     case BARRIER:
4218     case NOTE:
4219       insn = x;
4220       while (insn)
4221 	{
4222 	  rtx next = NEXT_INSN (insn);
4223 	  add_insn_before (insn, before, bb);
4224 	  last = insn;
4225 	  insn = next;
4226 	}
4227       break;
4228 
4229 #ifdef ENABLE_RTL_CHECKING
4230     case SEQUENCE:
4231       gcc_unreachable ();
4232       break;
4233 #endif
4234 
4235     default:
4236       last = (*make_raw) (x);
4237       add_insn_before (last, before, bb);
4238       break;
4239     }
4240 
4241   return last;
4242 }
4243 
4244 /* Make X be output before the instruction BEFORE.  */
4245 
4246 rtx
4247 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4248 {
4249   return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4250 }
4251 
4252 /* Make an instruction with body X and code JUMP_INSN
4253    and output it before the instruction BEFORE.  */
4254 
4255 rtx
4256 emit_jump_insn_before_noloc (rtx x, rtx before)
4257 {
4258   return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4259 				    make_jump_insn_raw);
4260 }
4261 
4262 /* Make an instruction with body X and code CALL_INSN
4263    and output it before the instruction BEFORE.  */
4264 
4265 rtx
4266 emit_call_insn_before_noloc (rtx x, rtx before)
4267 {
4268   return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4269 				    make_call_insn_raw);
4270 }
4271 
4272 /* Make an instruction with body X and code DEBUG_INSN
4273    and output it before the instruction BEFORE.  */
4274 
4275 rtx
4276 emit_debug_insn_before_noloc (rtx x, rtx before)
4277 {
4278   return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4279 				    make_debug_insn_raw);
4280 }
4281 
4282 /* Make an insn of code BARRIER
4283    and output it before the insn BEFORE.  */
4284 
4285 rtx
4286 emit_barrier_before (rtx before)
4287 {
4288   rtx insn = rtx_alloc (BARRIER);
4289 
4290   INSN_UID (insn) = cur_insn_uid++;
4291 
4292   add_insn_before (insn, before, NULL);
4293   return insn;
4294 }
4295 
4296 /* Emit the label LABEL before the insn BEFORE.  */
4297 
4298 rtx
4299 emit_label_before (rtx label, rtx before)
4300 {
4301   gcc_checking_assert (INSN_UID (label) == 0);
4302   INSN_UID (label) = cur_insn_uid++;
4303   add_insn_before (label, before, NULL);
4304   return label;
4305 }
4306 
4307 /* Emit a note of subtype SUBTYPE before the insn BEFORE.  */
4308 
4309 rtx
4310 emit_note_before (enum insn_note subtype, rtx before)
4311 {
4312   rtx note = rtx_alloc (NOTE);
4313   INSN_UID (note) = cur_insn_uid++;
4314   NOTE_KIND (note) = subtype;
4315   BLOCK_FOR_INSN (note) = NULL;
4316   memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4317 
4318   add_insn_before (note, before, NULL);
4319   return note;
4320 }
4321 
4322 /* Helper for emit_insn_after, handles lists of instructions
4323    efficiently.  */
4324 
4325 static rtx
4326 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
4327 {
4328   rtx last;
4329   rtx after_after;
4330   if (!bb && !BARRIER_P (after))
4331     bb = BLOCK_FOR_INSN (after);
4332 
4333   if (bb)
4334     {
4335       df_set_bb_dirty (bb);
4336       for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4337 	if (!BARRIER_P (last))
4338 	  {
4339 	    set_block_for_insn (last, bb);
4340 	    df_insn_rescan (last);
4341 	  }
4342       if (!BARRIER_P (last))
4343 	{
4344 	  set_block_for_insn (last, bb);
4345 	  df_insn_rescan (last);
4346 	}
4347       if (BB_END (bb) == after)
4348 	BB_END (bb) = last;
4349     }
4350   else
4351     for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4352       continue;
4353 
4354   after_after = NEXT_INSN (after);
4355 
4356   NEXT_INSN (after) = first;
4357   PREV_INSN (first) = after;
4358   NEXT_INSN (last) = after_after;
4359   if (after_after)
4360     PREV_INSN (after_after) = last;
4361 
4362   if (after == get_last_insn())
4363     set_last_insn (last);
4364 
4365   return last;
4366 }
4367 
4368 static rtx
4369 emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4370 			  rtx (*make_raw)(rtx))
4371 {
4372   rtx last = after;
4373 
4374   gcc_assert (after);
4375 
4376   if (x == NULL_RTX)
4377     return last;
4378 
4379   switch (GET_CODE (x))
4380     {
4381     case DEBUG_INSN:
4382     case INSN:
4383     case JUMP_INSN:
4384     case CALL_INSN:
4385     case CODE_LABEL:
4386     case BARRIER:
4387     case NOTE:
4388       last = emit_insn_after_1 (x, after, bb);
4389       break;
4390 
4391 #ifdef ENABLE_RTL_CHECKING
4392     case SEQUENCE:
4393       gcc_unreachable ();
4394       break;
4395 #endif
4396 
4397     default:
4398       last = (*make_raw) (x);
4399       add_insn_after (last, after, bb);
4400       break;
4401     }
4402 
4403   return last;
4404 }
4405 
4406 /* Make X be output after the insn AFTER and set the BB of insn.  If
4407    BB is NULL, an attempt is made to infer the BB from AFTER.  */
4408 
4409 rtx
4410 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4411 {
4412   return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4413 }
4414 
4415 
4416 /* Make an insn of code JUMP_INSN with body X
4417    and output it after the insn AFTER.  */
4418 
4419 rtx
4420 emit_jump_insn_after_noloc (rtx x, rtx after)
4421 {
4422   return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
4423 }
4424 
4425 /* Make an instruction with body X and code CALL_INSN
4426    and output it after the instruction AFTER.  */
4427 
4428 rtx
4429 emit_call_insn_after_noloc (rtx x, rtx after)
4430 {
4431   return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4432 }
4433 
4434 /* Make an instruction with body X and code CALL_INSN
4435    and output it after the instruction AFTER.  */
4436 
4437 rtx
4438 emit_debug_insn_after_noloc (rtx x, rtx after)
4439 {
4440   return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4441 }
4442 
4443 /* Make an insn of code BARRIER
4444    and output it after the insn AFTER.  */
4445 
4446 rtx
4447 emit_barrier_after (rtx after)
4448 {
4449   rtx insn = rtx_alloc (BARRIER);
4450 
4451   INSN_UID (insn) = cur_insn_uid++;
4452 
4453   add_insn_after (insn, after, NULL);
4454   return insn;
4455 }
4456 
4457 /* Emit the label LABEL after the insn AFTER.  */
4458 
4459 rtx
4460 emit_label_after (rtx label, rtx after)
4461 {
4462   gcc_checking_assert (INSN_UID (label) == 0);
4463   INSN_UID (label) = cur_insn_uid++;
4464   add_insn_after (label, after, NULL);
4465   return label;
4466 }
4467 
4468 /* Emit a note of subtype SUBTYPE after the insn AFTER.  */
4469 
4470 rtx
4471 emit_note_after (enum insn_note subtype, rtx after)
4472 {
4473   rtx note = rtx_alloc (NOTE);
4474   INSN_UID (note) = cur_insn_uid++;
4475   NOTE_KIND (note) = subtype;
4476   BLOCK_FOR_INSN (note) = NULL;
4477   memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4478   add_insn_after (note, after, NULL);
4479   return note;
4480 }
4481 
4482 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4483    MAKE_RAW indicates how to turn PATTERN into a real insn.  */
4484 
4485 static rtx
4486 emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4487 			   rtx (*make_raw) (rtx))
4488 {
4489   rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4490 
4491   if (pattern == NULL_RTX || !loc)
4492     return last;
4493 
4494   after = NEXT_INSN (after);
4495   while (1)
4496     {
4497       if (active_insn_p (after) && !INSN_LOCATION (after))
4498 	INSN_LOCATION (after) = loc;
4499       if (after == last)
4500 	break;
4501       after = NEXT_INSN (after);
4502     }
4503   return last;
4504 }
4505 
4506 /* Insert PATTERN after AFTER.  MAKE_RAW indicates how to turn PATTERN
4507    into a real insn.  SKIP_DEBUG_INSNS indicates whether to insert after
4508    any DEBUG_INSNs.  */
4509 
4510 static rtx
4511 emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4512 		    rtx (*make_raw) (rtx))
4513 {
4514   rtx prev = after;
4515 
4516   if (skip_debug_insns)
4517     while (DEBUG_INSN_P (prev))
4518       prev = PREV_INSN (prev);
4519 
4520   if (INSN_P (prev))
4521     return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4522 				      make_raw);
4523   else
4524     return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4525 }
4526 
4527 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
4528 rtx
4529 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4530 {
4531   return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4532 }
4533 
4534 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
4535 rtx
4536 emit_insn_after (rtx pattern, rtx after)
4537 {
4538   return emit_pattern_after (pattern, after, true, make_insn_raw);
4539 }
4540 
4541 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
4542 rtx
4543 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4544 {
4545   return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
4546 }
4547 
4548 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
4549 rtx
4550 emit_jump_insn_after (rtx pattern, rtx after)
4551 {
4552   return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
4553 }
4554 
4555 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
4556 rtx
4557 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4558 {
4559   return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4560 }
4561 
4562 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
4563 rtx
4564 emit_call_insn_after (rtx pattern, rtx after)
4565 {
4566   return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4567 }
4568 
4569 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
4570 rtx
4571 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4572 {
4573   return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4574 }
4575 
4576 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
4577 rtx
4578 emit_debug_insn_after (rtx pattern, rtx after)
4579 {
4580   return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4581 }
4582 
4583 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4584    MAKE_RAW indicates how to turn PATTERN into a real insn.  INSNP
4585    indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4586    CALL_INSN, etc.  */
4587 
4588 static rtx
4589 emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4590 			    rtx (*make_raw) (rtx))
4591 {
4592   rtx first = PREV_INSN (before);
4593   rtx last = emit_pattern_before_noloc (pattern, before,
4594                                         insnp ? before : NULL_RTX,
4595                                         NULL, make_raw);
4596 
4597   if (pattern == NULL_RTX || !loc)
4598     return last;
4599 
4600   if (!first)
4601     first = get_insns ();
4602   else
4603     first = NEXT_INSN (first);
4604   while (1)
4605     {
4606       if (active_insn_p (first) && !INSN_LOCATION (first))
4607 	INSN_LOCATION (first) = loc;
4608       if (first == last)
4609 	break;
4610       first = NEXT_INSN (first);
4611     }
4612   return last;
4613 }
4614 
4615 /* Insert PATTERN before BEFORE.  MAKE_RAW indicates how to turn PATTERN
4616    into a real insn.  SKIP_DEBUG_INSNS indicates whether to insert
4617    before any DEBUG_INSNs.  INSNP indicates if PATTERN is meant for an
4618    INSN as opposed to a JUMP_INSN, CALL_INSN, etc.  */
4619 
4620 static rtx
4621 emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4622 		     bool insnp, rtx (*make_raw) (rtx))
4623 {
4624   rtx next = before;
4625 
4626   if (skip_debug_insns)
4627     while (DEBUG_INSN_P (next))
4628       next = PREV_INSN (next);
4629 
4630   if (INSN_P (next))
4631     return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4632 				       insnp, make_raw);
4633   else
4634     return emit_pattern_before_noloc (pattern, before,
4635                                       insnp ? before : NULL_RTX,
4636                                       NULL, make_raw);
4637 }
4638 
4639 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
4640 rtx
4641 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4642 {
4643   return emit_pattern_before_setloc (pattern, before, loc, true,
4644 				     make_insn_raw);
4645 }
4646 
4647 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE.  */
4648 rtx
4649 emit_insn_before (rtx pattern, rtx before)
4650 {
4651   return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4652 }
4653 
4654 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
4655 rtx
4656 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4657 {
4658   return emit_pattern_before_setloc (pattern, before, loc, false,
4659 				     make_jump_insn_raw);
4660 }
4661 
4662 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE.  */
4663 rtx
4664 emit_jump_insn_before (rtx pattern, rtx before)
4665 {
4666   return emit_pattern_before (pattern, before, true, false,
4667 			      make_jump_insn_raw);
4668 }
4669 
4670 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
4671 rtx
4672 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4673 {
4674   return emit_pattern_before_setloc (pattern, before, loc, false,
4675 				     make_call_insn_raw);
4676 }
4677 
4678 /* Like emit_call_insn_before_noloc,
4679    but set insn_location according to BEFORE.  */
4680 rtx
4681 emit_call_insn_before (rtx pattern, rtx before)
4682 {
4683   return emit_pattern_before (pattern, before, true, false,
4684 			      make_call_insn_raw);
4685 }
4686 
4687 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
4688 rtx
4689 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4690 {
4691   return emit_pattern_before_setloc (pattern, before, loc, false,
4692 				     make_debug_insn_raw);
4693 }
4694 
4695 /* Like emit_debug_insn_before_noloc,
4696    but set insn_location according to BEFORE.  */
4697 rtx
4698 emit_debug_insn_before (rtx pattern, rtx before)
4699 {
4700   return emit_pattern_before (pattern, before, false, false,
4701 			      make_debug_insn_raw);
4702 }
4703 
4704 /* Take X and emit it at the end of the doubly-linked
4705    INSN list.
4706 
4707    Returns the last insn emitted.  */
4708 
4709 rtx
4710 emit_insn (rtx x)
4711 {
4712   rtx last = get_last_insn();
4713   rtx insn;
4714 
4715   if (x == NULL_RTX)
4716     return last;
4717 
4718   switch (GET_CODE (x))
4719     {
4720     case DEBUG_INSN:
4721     case INSN:
4722     case JUMP_INSN:
4723     case CALL_INSN:
4724     case CODE_LABEL:
4725     case BARRIER:
4726     case NOTE:
4727       insn = x;
4728       while (insn)
4729 	{
4730 	  rtx next = NEXT_INSN (insn);
4731 	  add_insn (insn);
4732 	  last = insn;
4733 	  insn = next;
4734 	}
4735       break;
4736 
4737 #ifdef ENABLE_RTL_CHECKING
4738     case SEQUENCE:
4739       gcc_unreachable ();
4740       break;
4741 #endif
4742 
4743     default:
4744       last = make_insn_raw (x);
4745       add_insn (last);
4746       break;
4747     }
4748 
4749   return last;
4750 }
4751 
4752 /* Make an insn of code DEBUG_INSN with pattern X
4753    and add it to the end of the doubly-linked list.  */
4754 
4755 rtx
4756 emit_debug_insn (rtx x)
4757 {
4758   rtx last = get_last_insn();
4759   rtx insn;
4760 
4761   if (x == NULL_RTX)
4762     return last;
4763 
4764   switch (GET_CODE (x))
4765     {
4766     case DEBUG_INSN:
4767     case INSN:
4768     case JUMP_INSN:
4769     case CALL_INSN:
4770     case CODE_LABEL:
4771     case BARRIER:
4772     case NOTE:
4773       insn = x;
4774       while (insn)
4775 	{
4776 	  rtx next = NEXT_INSN (insn);
4777 	  add_insn (insn);
4778 	  last = insn;
4779 	  insn = next;
4780 	}
4781       break;
4782 
4783 #ifdef ENABLE_RTL_CHECKING
4784     case SEQUENCE:
4785       gcc_unreachable ();
4786       break;
4787 #endif
4788 
4789     default:
4790       last = make_debug_insn_raw (x);
4791       add_insn (last);
4792       break;
4793     }
4794 
4795   return last;
4796 }
4797 
4798 /* Make an insn of code JUMP_INSN with pattern X
4799    and add it to the end of the doubly-linked list.  */
4800 
4801 rtx
4802 emit_jump_insn (rtx x)
4803 {
4804   rtx last = NULL_RTX, insn;
4805 
4806   switch (GET_CODE (x))
4807     {
4808     case DEBUG_INSN:
4809     case INSN:
4810     case JUMP_INSN:
4811     case CALL_INSN:
4812     case CODE_LABEL:
4813     case BARRIER:
4814     case NOTE:
4815       insn = x;
4816       while (insn)
4817 	{
4818 	  rtx next = NEXT_INSN (insn);
4819 	  add_insn (insn);
4820 	  last = insn;
4821 	  insn = next;
4822 	}
4823       break;
4824 
4825 #ifdef ENABLE_RTL_CHECKING
4826     case SEQUENCE:
4827       gcc_unreachable ();
4828       break;
4829 #endif
4830 
4831     default:
4832       last = make_jump_insn_raw (x);
4833       add_insn (last);
4834       break;
4835     }
4836 
4837   return last;
4838 }
4839 
4840 /* Make an insn of code CALL_INSN with pattern X
4841    and add it to the end of the doubly-linked list.  */
4842 
4843 rtx
4844 emit_call_insn (rtx x)
4845 {
4846   rtx insn;
4847 
4848   switch (GET_CODE (x))
4849     {
4850     case DEBUG_INSN:
4851     case INSN:
4852     case JUMP_INSN:
4853     case CALL_INSN:
4854     case CODE_LABEL:
4855     case BARRIER:
4856     case NOTE:
4857       insn = emit_insn (x);
4858       break;
4859 
4860 #ifdef ENABLE_RTL_CHECKING
4861     case SEQUENCE:
4862       gcc_unreachable ();
4863       break;
4864 #endif
4865 
4866     default:
4867       insn = make_call_insn_raw (x);
4868       add_insn (insn);
4869       break;
4870     }
4871 
4872   return insn;
4873 }
4874 
4875 /* Add the label LABEL to the end of the doubly-linked list.  */
4876 
4877 rtx
4878 emit_label (rtx label)
4879 {
4880   gcc_checking_assert (INSN_UID (label) == 0);
4881   INSN_UID (label) = cur_insn_uid++;
4882   add_insn (label);
4883   return label;
4884 }
4885 
4886 /* Make an insn of code BARRIER
4887    and add it to the end of the doubly-linked list.  */
4888 
4889 rtx
4890 emit_barrier (void)
4891 {
4892   rtx barrier = rtx_alloc (BARRIER);
4893   INSN_UID (barrier) = cur_insn_uid++;
4894   add_insn (barrier);
4895   return barrier;
4896 }
4897 
4898 /* Emit a copy of note ORIG.  */
4899 
4900 rtx
4901 emit_note_copy (rtx orig)
4902 {
4903   rtx note;
4904 
4905   note = rtx_alloc (NOTE);
4906 
4907   INSN_UID (note) = cur_insn_uid++;
4908   NOTE_DATA (note) = NOTE_DATA (orig);
4909   NOTE_KIND (note) = NOTE_KIND (orig);
4910   BLOCK_FOR_INSN (note) = NULL;
4911   add_insn (note);
4912 
4913   return note;
4914 }
4915 
4916 /* Make an insn of code NOTE or type NOTE_NO
4917    and add it to the end of the doubly-linked list.  */
4918 
4919 rtx
4920 emit_note (enum insn_note kind)
4921 {
4922   rtx note;
4923 
4924   note = rtx_alloc (NOTE);
4925   INSN_UID (note) = cur_insn_uid++;
4926   NOTE_KIND (note) = kind;
4927   memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4928   BLOCK_FOR_INSN (note) = NULL;
4929   add_insn (note);
4930   return note;
4931 }
4932 
4933 /* Emit a clobber of lvalue X.  */
4934 
4935 rtx
4936 emit_clobber (rtx x)
4937 {
4938   /* CONCATs should not appear in the insn stream.  */
4939   if (GET_CODE (x) == CONCAT)
4940     {
4941       emit_clobber (XEXP (x, 0));
4942       return emit_clobber (XEXP (x, 1));
4943     }
4944   return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4945 }
4946 
4947 /* Return a sequence of insns to clobber lvalue X.  */
4948 
4949 rtx
4950 gen_clobber (rtx x)
4951 {
4952   rtx seq;
4953 
4954   start_sequence ();
4955   emit_clobber (x);
4956   seq = get_insns ();
4957   end_sequence ();
4958   return seq;
4959 }
4960 
4961 /* Emit a use of rvalue X.  */
4962 
4963 rtx
4964 emit_use (rtx x)
4965 {
4966   /* CONCATs should not appear in the insn stream.  */
4967   if (GET_CODE (x) == CONCAT)
4968     {
4969       emit_use (XEXP (x, 0));
4970       return emit_use (XEXP (x, 1));
4971     }
4972   return emit_insn (gen_rtx_USE (VOIDmode, x));
4973 }
4974 
4975 /* Return a sequence of insns to use rvalue X.  */
4976 
4977 rtx
4978 gen_use (rtx x)
4979 {
4980   rtx seq;
4981 
4982   start_sequence ();
4983   emit_use (x);
4984   seq = get_insns ();
4985   end_sequence ();
4986   return seq;
4987 }
4988 
4989 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4990    note of this type already exists, remove it first.  */
4991 
4992 rtx
4993 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4994 {
4995   rtx note = find_reg_note (insn, kind, NULL_RTX);
4996 
4997   switch (kind)
4998     {
4999     case REG_EQUAL:
5000     case REG_EQUIV:
5001       /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
5002 	 has multiple sets (some callers assume single_set
5003 	 means the insn only has one set, when in fact it
5004 	 means the insn only has one * useful * set).  */
5005       if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
5006 	{
5007 	  gcc_assert (!note);
5008 	  return NULL_RTX;
5009 	}
5010 
5011       /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5012 	 It serves no useful purpose and breaks eliminate_regs.  */
5013       if (GET_CODE (datum) == ASM_OPERANDS)
5014 	return NULL_RTX;
5015 
5016       if (note)
5017 	{
5018 	  XEXP (note, 0) = datum;
5019 	  df_notes_rescan (insn);
5020 	  return note;
5021 	}
5022       break;
5023 
5024     default:
5025       if (note)
5026 	{
5027 	  XEXP (note, 0) = datum;
5028 	  return note;
5029 	}
5030       break;
5031     }
5032 
5033   add_reg_note (insn, kind, datum);
5034 
5035   switch (kind)
5036     {
5037     case REG_EQUAL:
5038     case REG_EQUIV:
5039       df_notes_rescan (insn);
5040       break;
5041     default:
5042       break;
5043     }
5044 
5045   return REG_NOTES (insn);
5046 }
5047 
5048 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST.  */
5049 rtx
5050 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5051 {
5052   rtx set = single_set (insn);
5053 
5054   if (set && SET_DEST (set) == dst)
5055     return set_unique_reg_note (insn, kind, datum);
5056   return NULL_RTX;
5057 }
5058 
5059 /* Return an indication of which type of insn should have X as a body.
5060    The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN.  */
5061 
5062 static enum rtx_code
5063 classify_insn (rtx x)
5064 {
5065   if (LABEL_P (x))
5066     return CODE_LABEL;
5067   if (GET_CODE (x) == CALL)
5068     return CALL_INSN;
5069   if (ANY_RETURN_P (x))
5070     return JUMP_INSN;
5071   if (GET_CODE (x) == SET)
5072     {
5073       if (SET_DEST (x) == pc_rtx)
5074 	return JUMP_INSN;
5075       else if (GET_CODE (SET_SRC (x)) == CALL)
5076 	return CALL_INSN;
5077       else
5078 	return INSN;
5079     }
5080   if (GET_CODE (x) == PARALLEL)
5081     {
5082       int j;
5083       for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5084 	if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5085 	  return CALL_INSN;
5086 	else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5087 		 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5088 	  return JUMP_INSN;
5089 	else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5090 		 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5091 	  return CALL_INSN;
5092     }
5093   return INSN;
5094 }
5095 
5096 /* Emit the rtl pattern X as an appropriate kind of insn.
5097    If X is a label, it is simply added into the insn chain.  */
5098 
5099 rtx
5100 emit (rtx x)
5101 {
5102   enum rtx_code code = classify_insn (x);
5103 
5104   switch (code)
5105     {
5106     case CODE_LABEL:
5107       return emit_label (x);
5108     case INSN:
5109       return emit_insn (x);
5110     case  JUMP_INSN:
5111       {
5112 	rtx insn = emit_jump_insn (x);
5113 	if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5114 	  return emit_barrier ();
5115 	return insn;
5116       }
5117     case CALL_INSN:
5118       return emit_call_insn (x);
5119     case DEBUG_INSN:
5120       return emit_debug_insn (x);
5121     default:
5122       gcc_unreachable ();
5123     }
5124 }
5125 
5126 /* Space for free sequence stack entries.  */
5127 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5128 
5129 /* Begin emitting insns to a sequence.  If this sequence will contain
5130    something that might cause the compiler to pop arguments to function
5131    calls (because those pops have previously been deferred; see
5132    INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5133    before calling this function.  That will ensure that the deferred
5134    pops are not accidentally emitted in the middle of this sequence.  */
5135 
5136 void
5137 start_sequence (void)
5138 {
5139   struct sequence_stack *tem;
5140 
5141   if (free_sequence_stack != NULL)
5142     {
5143       tem = free_sequence_stack;
5144       free_sequence_stack = tem->next;
5145     }
5146   else
5147     tem = ggc_alloc_sequence_stack ();
5148 
5149   tem->next = seq_stack;
5150   tem->first = get_insns ();
5151   tem->last = get_last_insn ();
5152 
5153   seq_stack = tem;
5154 
5155   set_first_insn (0);
5156   set_last_insn (0);
5157 }
5158 
5159 /* Set up the insn chain starting with FIRST as the current sequence,
5160    saving the previously current one.  See the documentation for
5161    start_sequence for more information about how to use this function.  */
5162 
5163 void
5164 push_to_sequence (rtx first)
5165 {
5166   rtx last;
5167 
5168   start_sequence ();
5169 
5170   for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5171     ;
5172 
5173   set_first_insn (first);
5174   set_last_insn (last);
5175 }
5176 
5177 /* Like push_to_sequence, but take the last insn as an argument to avoid
5178    looping through the list.  */
5179 
5180 void
5181 push_to_sequence2 (rtx first, rtx last)
5182 {
5183   start_sequence ();
5184 
5185   set_first_insn (first);
5186   set_last_insn (last);
5187 }
5188 
5189 /* Set up the outer-level insn chain
5190    as the current sequence, saving the previously current one.  */
5191 
5192 void
5193 push_topmost_sequence (void)
5194 {
5195   struct sequence_stack *stack, *top = NULL;
5196 
5197   start_sequence ();
5198 
5199   for (stack = seq_stack; stack; stack = stack->next)
5200     top = stack;
5201 
5202   set_first_insn (top->first);
5203   set_last_insn (top->last);
5204 }
5205 
5206 /* After emitting to the outer-level insn chain, update the outer-level
5207    insn chain, and restore the previous saved state.  */
5208 
5209 void
5210 pop_topmost_sequence (void)
5211 {
5212   struct sequence_stack *stack, *top = NULL;
5213 
5214   for (stack = seq_stack; stack; stack = stack->next)
5215     top = stack;
5216 
5217   top->first = get_insns ();
5218   top->last = get_last_insn ();
5219 
5220   end_sequence ();
5221 }
5222 
5223 /* After emitting to a sequence, restore previous saved state.
5224 
5225    To get the contents of the sequence just made, you must call
5226    `get_insns' *before* calling here.
5227 
5228    If the compiler might have deferred popping arguments while
5229    generating this sequence, and this sequence will not be immediately
5230    inserted into the instruction stream, use do_pending_stack_adjust
5231    before calling get_insns.  That will ensure that the deferred
5232    pops are inserted into this sequence, and not into some random
5233    location in the instruction stream.  See INHIBIT_DEFER_POP for more
5234    information about deferred popping of arguments.  */
5235 
5236 void
5237 end_sequence (void)
5238 {
5239   struct sequence_stack *tem = seq_stack;
5240 
5241   set_first_insn (tem->first);
5242   set_last_insn (tem->last);
5243   seq_stack = tem->next;
5244 
5245   memset (tem, 0, sizeof (*tem));
5246   tem->next = free_sequence_stack;
5247   free_sequence_stack = tem;
5248 }
5249 
5250 /* Return 1 if currently emitting into a sequence.  */
5251 
5252 int
5253 in_sequence_p (void)
5254 {
5255   return seq_stack != 0;
5256 }
5257 
5258 /* Put the various virtual registers into REGNO_REG_RTX.  */
5259 
5260 static void
5261 init_virtual_regs (void)
5262 {
5263   regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5264   regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5265   regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5266   regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5267   regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5268   regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5269     = virtual_preferred_stack_boundary_rtx;
5270 }
5271 
5272 
5273 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once.  */
5274 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5275 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5276 static int copy_insn_n_scratches;
5277 
5278 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5279    copied an ASM_OPERANDS.
5280    In that case, it is the original input-operand vector.  */
5281 static rtvec orig_asm_operands_vector;
5282 
5283 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5284    copied an ASM_OPERANDS.
5285    In that case, it is the copied input-operand vector.  */
5286 static rtvec copy_asm_operands_vector;
5287 
5288 /* Likewise for the constraints vector.  */
5289 static rtvec orig_asm_constraints_vector;
5290 static rtvec copy_asm_constraints_vector;
5291 
5292 /* Recursively create a new copy of an rtx for copy_insn.
5293    This function differs from copy_rtx in that it handles SCRATCHes and
5294    ASM_OPERANDs properly.
5295    Normally, this function is not used directly; use copy_insn as front end.
5296    However, you could first copy an insn pattern with copy_insn and then use
5297    this function afterwards to properly copy any REG_NOTEs containing
5298    SCRATCHes.  */
5299 
5300 rtx
5301 copy_insn_1 (rtx orig)
5302 {
5303   rtx copy;
5304   int i, j;
5305   RTX_CODE code;
5306   const char *format_ptr;
5307 
5308   if (orig == NULL)
5309     return NULL;
5310 
5311   code = GET_CODE (orig);
5312 
5313   switch (code)
5314     {
5315     case REG:
5316     case DEBUG_EXPR:
5317     CASE_CONST_ANY:
5318     case SYMBOL_REF:
5319     case CODE_LABEL:
5320     case PC:
5321     case CC0:
5322     case RETURN:
5323     case SIMPLE_RETURN:
5324       return orig;
5325     case CLOBBER:
5326       /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5327          clobbers or clobbers of hard registers that originated as pseudos.
5328          This is needed to allow safe register renaming.  */
5329       if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5330 	  && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5331 	return orig;
5332       break;
5333 
5334     case SCRATCH:
5335       for (i = 0; i < copy_insn_n_scratches; i++)
5336 	if (copy_insn_scratch_in[i] == orig)
5337 	  return copy_insn_scratch_out[i];
5338       break;
5339 
5340     case CONST:
5341       if (shared_const_p (orig))
5342 	return orig;
5343       break;
5344 
5345       /* A MEM with a constant address is not sharable.  The problem is that
5346 	 the constant address may need to be reloaded.  If the mem is shared,
5347 	 then reloading one copy of this mem will cause all copies to appear
5348 	 to have been reloaded.  */
5349 
5350     default:
5351       break;
5352     }
5353 
5354   /* Copy the various flags, fields, and other information.  We assume
5355      that all fields need copying, and then clear the fields that should
5356      not be copied.  That is the sensible default behavior, and forces
5357      us to explicitly document why we are *not* copying a flag.  */
5358   copy = shallow_copy_rtx (orig);
5359 
5360   /* We do not copy the USED flag, which is used as a mark bit during
5361      walks over the RTL.  */
5362   RTX_FLAG (copy, used) = 0;
5363 
5364   /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs.  */
5365   if (INSN_P (orig))
5366     {
5367       RTX_FLAG (copy, jump) = 0;
5368       RTX_FLAG (copy, call) = 0;
5369       RTX_FLAG (copy, frame_related) = 0;
5370     }
5371 
5372   format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5373 
5374   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5375     switch (*format_ptr++)
5376       {
5377       case 'e':
5378 	if (XEXP (orig, i) != NULL)
5379 	  XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5380 	break;
5381 
5382       case 'E':
5383       case 'V':
5384 	if (XVEC (orig, i) == orig_asm_constraints_vector)
5385 	  XVEC (copy, i) = copy_asm_constraints_vector;
5386 	else if (XVEC (orig, i) == orig_asm_operands_vector)
5387 	  XVEC (copy, i) = copy_asm_operands_vector;
5388 	else if (XVEC (orig, i) != NULL)
5389 	  {
5390 	    XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5391 	    for (j = 0; j < XVECLEN (copy, i); j++)
5392 	      XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5393 	  }
5394 	break;
5395 
5396       case 't':
5397       case 'w':
5398       case 'i':
5399       case 's':
5400       case 'S':
5401       case 'u':
5402       case '0':
5403 	/* These are left unchanged.  */
5404 	break;
5405 
5406       default:
5407 	gcc_unreachable ();
5408       }
5409 
5410   if (code == SCRATCH)
5411     {
5412       i = copy_insn_n_scratches++;
5413       gcc_assert (i < MAX_RECOG_OPERANDS);
5414       copy_insn_scratch_in[i] = orig;
5415       copy_insn_scratch_out[i] = copy;
5416     }
5417   else if (code == ASM_OPERANDS)
5418     {
5419       orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5420       copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5421       orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5422       copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5423     }
5424 
5425   return copy;
5426 }
5427 
5428 /* Create a new copy of an rtx.
5429    This function differs from copy_rtx in that it handles SCRATCHes and
5430    ASM_OPERANDs properly.
5431    INSN doesn't really have to be a full INSN; it could be just the
5432    pattern.  */
5433 rtx
5434 copy_insn (rtx insn)
5435 {
5436   copy_insn_n_scratches = 0;
5437   orig_asm_operands_vector = 0;
5438   orig_asm_constraints_vector = 0;
5439   copy_asm_operands_vector = 0;
5440   copy_asm_constraints_vector = 0;
5441   return copy_insn_1 (insn);
5442 }
5443 
5444 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5445    on that assumption that INSN itself remains in its original place.  */
5446 
5447 rtx
5448 copy_delay_slot_insn (rtx insn)
5449 {
5450   /* Copy INSN with its rtx_code, all its notes, location etc.  */
5451   insn = copy_rtx (insn);
5452   INSN_UID (insn) = cur_insn_uid++;
5453   return insn;
5454 }
5455 
5456 /* Initialize data structures and variables in this file
5457    before generating rtl for each function.  */
5458 
5459 void
5460 init_emit (void)
5461 {
5462   set_first_insn (NULL);
5463   set_last_insn (NULL);
5464   if (MIN_NONDEBUG_INSN_UID)
5465     cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5466   else
5467     cur_insn_uid = 1;
5468   cur_debug_insn_uid = 1;
5469   reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5470   first_label_num = label_num;
5471   seq_stack = NULL;
5472 
5473   /* Init the tables that describe all the pseudo regs.  */
5474 
5475   crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5476 
5477   crtl->emit.regno_pointer_align
5478     = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5479 
5480   regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
5481 
5482   /* Put copies of all the hard registers into regno_reg_rtx.  */
5483   memcpy (regno_reg_rtx,
5484 	  initial_regno_reg_rtx,
5485 	  FIRST_PSEUDO_REGISTER * sizeof (rtx));
5486 
5487   /* Put copies of all the virtual register rtx into regno_reg_rtx.  */
5488   init_virtual_regs ();
5489 
5490   /* Indicate that the virtual registers and stack locations are
5491      all pointers.  */
5492   REG_POINTER (stack_pointer_rtx) = 1;
5493   REG_POINTER (frame_pointer_rtx) = 1;
5494   REG_POINTER (hard_frame_pointer_rtx) = 1;
5495   REG_POINTER (arg_pointer_rtx) = 1;
5496 
5497   REG_POINTER (virtual_incoming_args_rtx) = 1;
5498   REG_POINTER (virtual_stack_vars_rtx) = 1;
5499   REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5500   REG_POINTER (virtual_outgoing_args_rtx) = 1;
5501   REG_POINTER (virtual_cfa_rtx) = 1;
5502 
5503 #ifdef STACK_BOUNDARY
5504   REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5505   REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5506   REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5507   REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5508 
5509   REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5510   REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5511   REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5512   REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5513   REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5514 #endif
5515 
5516 #ifdef INIT_EXPANDERS
5517   INIT_EXPANDERS;
5518 #endif
5519 }
5520 
5521 /* Generate a vector constant for mode MODE and constant value CONSTANT.  */
5522 
5523 static rtx
5524 gen_const_vector (enum machine_mode mode, int constant)
5525 {
5526   rtx tem;
5527   rtvec v;
5528   int units, i;
5529   enum machine_mode inner;
5530 
5531   units = GET_MODE_NUNITS (mode);
5532   inner = GET_MODE_INNER (mode);
5533 
5534   gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5535 
5536   v = rtvec_alloc (units);
5537 
5538   /* We need to call this function after we set the scalar const_tiny_rtx
5539      entries.  */
5540   gcc_assert (const_tiny_rtx[constant][(int) inner]);
5541 
5542   for (i = 0; i < units; ++i)
5543     RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5544 
5545   tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5546   return tem;
5547 }
5548 
5549 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5550    all elements are zero, and the one vector when all elements are one.  */
5551 rtx
5552 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5553 {
5554   enum machine_mode inner = GET_MODE_INNER (mode);
5555   int nunits = GET_MODE_NUNITS (mode);
5556   rtx x;
5557   int i;
5558 
5559   /* Check to see if all of the elements have the same value.  */
5560   x = RTVEC_ELT (v, nunits - 1);
5561   for (i = nunits - 2; i >= 0; i--)
5562     if (RTVEC_ELT (v, i) != x)
5563       break;
5564 
5565   /* If the values are all the same, check to see if we can use one of the
5566      standard constant vectors.  */
5567   if (i == -1)
5568     {
5569       if (x == CONST0_RTX (inner))
5570 	return CONST0_RTX (mode);
5571       else if (x == CONST1_RTX (inner))
5572 	return CONST1_RTX (mode);
5573       else if (x == CONSTM1_RTX (inner))
5574 	return CONSTM1_RTX (mode);
5575     }
5576 
5577   return gen_rtx_raw_CONST_VECTOR (mode, v);
5578 }
5579 
5580 /* Initialise global register information required by all functions.  */
5581 
5582 void
5583 init_emit_regs (void)
5584 {
5585   int i;
5586   enum machine_mode mode;
5587   mem_attrs *attrs;
5588 
5589   /* Reset register attributes */
5590   htab_empty (reg_attrs_htab);
5591 
5592   /* We need reg_raw_mode, so initialize the modes now.  */
5593   init_reg_modes_target ();
5594 
5595   /* Assign register numbers to the globally defined register rtx.  */
5596   stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5597   frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5598   hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5599   arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5600   virtual_incoming_args_rtx =
5601     gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5602   virtual_stack_vars_rtx =
5603     gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5604   virtual_stack_dynamic_rtx =
5605     gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5606   virtual_outgoing_args_rtx =
5607     gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5608   virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5609   virtual_preferred_stack_boundary_rtx =
5610     gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5611 
5612   /* Initialize RTL for commonly used hard registers.  These are
5613      copied into regno_reg_rtx as we begin to compile each function.  */
5614   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5615     initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5616 
5617 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5618   return_address_pointer_rtx
5619     = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5620 #endif
5621 
5622   if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5623     pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5624   else
5625     pic_offset_table_rtx = NULL_RTX;
5626 
5627   for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5628     {
5629       mode = (enum machine_mode) i;
5630       attrs = ggc_alloc_cleared_mem_attrs ();
5631       attrs->align = BITS_PER_UNIT;
5632       attrs->addrspace = ADDR_SPACE_GENERIC;
5633       if (mode != BLKmode)
5634 	{
5635 	  attrs->size_known_p = true;
5636 	  attrs->size = GET_MODE_SIZE (mode);
5637 	  if (STRICT_ALIGNMENT)
5638 	    attrs->align = GET_MODE_ALIGNMENT (mode);
5639 	}
5640       mode_mem_attrs[i] = attrs;
5641     }
5642 }
5643 
5644 /* Create some permanent unique rtl objects shared between all functions.  */
5645 
5646 void
5647 init_emit_once (void)
5648 {
5649   int i;
5650   enum machine_mode mode;
5651   enum machine_mode double_mode;
5652 
5653   /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5654      hash tables.  */
5655   const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5656 				    const_int_htab_eq, NULL);
5657 
5658   const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5659 				       const_double_htab_eq, NULL);
5660 
5661   const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5662 				      const_fixed_htab_eq, NULL);
5663 
5664   mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5665 				    mem_attrs_htab_eq, NULL);
5666   reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5667 				    reg_attrs_htab_eq, NULL);
5668 
5669   /* Compute the word and byte modes.  */
5670 
5671   byte_mode = VOIDmode;
5672   word_mode = VOIDmode;
5673   double_mode = VOIDmode;
5674 
5675   for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5676        mode != VOIDmode;
5677        mode = GET_MODE_WIDER_MODE (mode))
5678     {
5679       if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5680 	  && byte_mode == VOIDmode)
5681 	byte_mode = mode;
5682 
5683       if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5684 	  && word_mode == VOIDmode)
5685 	word_mode = mode;
5686     }
5687 
5688   for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5689        mode != VOIDmode;
5690        mode = GET_MODE_WIDER_MODE (mode))
5691     {
5692       if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5693 	  && double_mode == VOIDmode)
5694 	double_mode = mode;
5695     }
5696 
5697   ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5698 
5699 #ifdef INIT_EXPANDERS
5700   /* This is to initialize {init|mark|free}_machine_status before the first
5701      call to push_function_context_to.  This is needed by the Chill front
5702      end which calls push_function_context_to before the first call to
5703      init_function_start.  */
5704   INIT_EXPANDERS;
5705 #endif
5706 
5707   /* Create the unique rtx's for certain rtx codes and operand values.  */
5708 
5709   /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5710      tries to use these variables.  */
5711   for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5712     const_int_rtx[i + MAX_SAVED_CONST_INT] =
5713       gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5714 
5715   if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5716       && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5717     const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5718   else
5719     const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5720 
5721   REAL_VALUE_FROM_INT (dconst0,   0,  0, double_mode);
5722   REAL_VALUE_FROM_INT (dconst1,   1,  0, double_mode);
5723   REAL_VALUE_FROM_INT (dconst2,   2,  0, double_mode);
5724 
5725   dconstm1 = dconst1;
5726   dconstm1.sign = 1;
5727 
5728   dconsthalf = dconst1;
5729   SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5730 
5731   for (i = 0; i < 3; i++)
5732     {
5733       const REAL_VALUE_TYPE *const r =
5734 	(i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5735 
5736       for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5737 	   mode != VOIDmode;
5738 	   mode = GET_MODE_WIDER_MODE (mode))
5739 	const_tiny_rtx[i][(int) mode] =
5740 	  CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5741 
5742       for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5743 	   mode != VOIDmode;
5744 	   mode = GET_MODE_WIDER_MODE (mode))
5745 	const_tiny_rtx[i][(int) mode] =
5746 	  CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5747 
5748       const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5749 
5750       for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5751 	   mode != VOIDmode;
5752 	   mode = GET_MODE_WIDER_MODE (mode))
5753 	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5754 
5755       for (mode = MIN_MODE_PARTIAL_INT;
5756 	   mode <= MAX_MODE_PARTIAL_INT;
5757 	   mode = (enum machine_mode)((int)(mode) + 1))
5758 	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5759     }
5760 
5761   const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5762 
5763   for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5764        mode != VOIDmode;
5765        mode = GET_MODE_WIDER_MODE (mode))
5766     const_tiny_rtx[3][(int) mode] = constm1_rtx;
5767 
5768   for (mode = MIN_MODE_PARTIAL_INT;
5769        mode <= MAX_MODE_PARTIAL_INT;
5770        mode = (enum machine_mode)((int)(mode) + 1))
5771     const_tiny_rtx[3][(int) mode] = constm1_rtx;
5772 
5773   for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5774        mode != VOIDmode;
5775        mode = GET_MODE_WIDER_MODE (mode))
5776     {
5777       rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5778       const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5779     }
5780 
5781   for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5782        mode != VOIDmode;
5783        mode = GET_MODE_WIDER_MODE (mode))
5784     {
5785       rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5786       const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5787     }
5788 
5789   for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5790        mode != VOIDmode;
5791        mode = GET_MODE_WIDER_MODE (mode))
5792     {
5793       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5794       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5795       const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
5796     }
5797 
5798   for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5799        mode != VOIDmode;
5800        mode = GET_MODE_WIDER_MODE (mode))
5801     {
5802       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5803       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5804     }
5805 
5806   for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5807        mode != VOIDmode;
5808        mode = GET_MODE_WIDER_MODE (mode))
5809     {
5810       FCONST0(mode).data.high = 0;
5811       FCONST0(mode).data.low = 0;
5812       FCONST0(mode).mode = mode;
5813       const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5814 				      FCONST0 (mode), mode);
5815     }
5816 
5817   for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5818        mode != VOIDmode;
5819        mode = GET_MODE_WIDER_MODE (mode))
5820     {
5821       FCONST0(mode).data.high = 0;
5822       FCONST0(mode).data.low = 0;
5823       FCONST0(mode).mode = mode;
5824       const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5825 				      FCONST0 (mode), mode);
5826     }
5827 
5828   for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5829        mode != VOIDmode;
5830        mode = GET_MODE_WIDER_MODE (mode))
5831     {
5832       FCONST0(mode).data.high = 0;
5833       FCONST0(mode).data.low = 0;
5834       FCONST0(mode).mode = mode;
5835       const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5836 				      FCONST0 (mode), mode);
5837 
5838       /* We store the value 1.  */
5839       FCONST1(mode).data.high = 0;
5840       FCONST1(mode).data.low = 0;
5841       FCONST1(mode).mode = mode;
5842       FCONST1(mode).data
5843 	= double_int_one.lshift (GET_MODE_FBIT (mode),
5844 				 HOST_BITS_PER_DOUBLE_INT,
5845 				 SIGNED_FIXED_POINT_MODE_P (mode));
5846       const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5847 				      FCONST1 (mode), mode);
5848     }
5849 
5850   for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5851        mode != VOIDmode;
5852        mode = GET_MODE_WIDER_MODE (mode))
5853     {
5854       FCONST0(mode).data.high = 0;
5855       FCONST0(mode).data.low = 0;
5856       FCONST0(mode).mode = mode;
5857       const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5858 				      FCONST0 (mode), mode);
5859 
5860       /* We store the value 1.  */
5861       FCONST1(mode).data.high = 0;
5862       FCONST1(mode).data.low = 0;
5863       FCONST1(mode).mode = mode;
5864       FCONST1(mode).data
5865 	= double_int_one.lshift (GET_MODE_FBIT (mode),
5866 				 HOST_BITS_PER_DOUBLE_INT,
5867 				 SIGNED_FIXED_POINT_MODE_P (mode));
5868       const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5869 				      FCONST1 (mode), mode);
5870     }
5871 
5872   for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5873        mode != VOIDmode;
5874        mode = GET_MODE_WIDER_MODE (mode))
5875     {
5876       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5877     }
5878 
5879   for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5880        mode != VOIDmode;
5881        mode = GET_MODE_WIDER_MODE (mode))
5882     {
5883       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5884     }
5885 
5886   for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5887        mode != VOIDmode;
5888        mode = GET_MODE_WIDER_MODE (mode))
5889     {
5890       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5891       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5892     }
5893 
5894   for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5895        mode != VOIDmode;
5896        mode = GET_MODE_WIDER_MODE (mode))
5897     {
5898       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5899       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5900     }
5901 
5902   for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5903     if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5904       const_tiny_rtx[0][i] = const0_rtx;
5905 
5906   const_tiny_rtx[0][(int) BImode] = const0_rtx;
5907   if (STORE_FLAG_VALUE == 1)
5908     const_tiny_rtx[1][(int) BImode] = const1_rtx;
5909 
5910   pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
5911   ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
5912   simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
5913   cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
5914 }
5915 
5916 /* Produce exact duplicate of insn INSN after AFTER.
5917    Care updating of libcall regions if present.  */
5918 
5919 rtx
5920 emit_copy_of_insn_after (rtx insn, rtx after)
5921 {
5922   rtx new_rtx, link;
5923 
5924   switch (GET_CODE (insn))
5925     {
5926     case INSN:
5927       new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
5928       break;
5929 
5930     case JUMP_INSN:
5931       new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5932       break;
5933 
5934     case DEBUG_INSN:
5935       new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5936       break;
5937 
5938     case CALL_INSN:
5939       new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5940       if (CALL_INSN_FUNCTION_USAGE (insn))
5941 	CALL_INSN_FUNCTION_USAGE (new_rtx)
5942 	  = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5943       SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5944       RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5945       RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
5946       RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
5947 	= RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
5948       break;
5949 
5950     default:
5951       gcc_unreachable ();
5952     }
5953 
5954   /* Update LABEL_NUSES.  */
5955   mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
5956 
5957   INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
5958 
5959   /* If the old insn is frame related, then so is the new one.  This is
5960      primarily needed for IA-64 unwind info which marks epilogue insns,
5961      which may be duplicated by the basic block reordering code.  */
5962   RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
5963 
5964   /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5965      will make them.  REG_LABEL_TARGETs are created there too, but are
5966      supposed to be sticky, so we copy them.  */
5967   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5968     if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
5969       {
5970 	if (GET_CODE (link) == EXPR_LIST)
5971 	  add_reg_note (new_rtx, REG_NOTE_KIND (link),
5972 			copy_insn_1 (XEXP (link, 0)));
5973 	else
5974 	  add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
5975       }
5976 
5977   INSN_CODE (new_rtx) = INSN_CODE (insn);
5978   return new_rtx;
5979 }
5980 
5981 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5982 rtx
5983 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5984 {
5985   if (hard_reg_clobbers[mode][regno])
5986     return hard_reg_clobbers[mode][regno];
5987   else
5988     return (hard_reg_clobbers[mode][regno] =
5989 	    gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5990 }
5991 
5992 location_t prologue_location;
5993 location_t epilogue_location;
5994 
5995 /* Hold current location information and last location information, so the
5996    datastructures are built lazily only when some instructions in given
5997    place are needed.  */
5998 static location_t curr_location;
5999 
6000 /* Allocate insn location datastructure.  */
6001 void
6002 insn_locations_init (void)
6003 {
6004   prologue_location = epilogue_location = 0;
6005   curr_location = UNKNOWN_LOCATION;
6006 }
6007 
6008 /* At the end of emit stage, clear current location.  */
6009 void
6010 insn_locations_finalize (void)
6011 {
6012   epilogue_location = curr_location;
6013   curr_location = UNKNOWN_LOCATION;
6014 }
6015 
6016 /* Set current location.  */
6017 void
6018 set_curr_insn_location (location_t location)
6019 {
6020   curr_location = location;
6021 }
6022 
6023 /* Get current location.  */
6024 location_t
6025 curr_insn_location (void)
6026 {
6027   return curr_location;
6028 }
6029 
6030 /* Return lexical scope block insn belongs to.  */
6031 tree
6032 insn_scope (const_rtx insn)
6033 {
6034   return LOCATION_BLOCK (INSN_LOCATION (insn));
6035 }
6036 
6037 /* Return line number of the statement that produced this insn.  */
6038 int
6039 insn_line (const_rtx insn)
6040 {
6041   return LOCATION_LINE (INSN_LOCATION (insn));
6042 }
6043 
6044 /* Return source file of the statement that produced this insn.  */
6045 const char *
6046 insn_file (const_rtx insn)
6047 {
6048   return LOCATION_FILE (INSN_LOCATION (insn));
6049 }
6050 
6051 /* Return true if memory model MODEL requires a pre-operation (release-style)
6052    barrier or a post-operation (acquire-style) barrier.  While not universal,
6053    this function matches behavior of several targets.  */
6054 
6055 bool
6056 need_atomic_barrier_p (enum memmodel model, bool pre)
6057 {
6058   switch (model & MEMMODEL_MASK)
6059     {
6060     case MEMMODEL_RELAXED:
6061     case MEMMODEL_CONSUME:
6062       return false;
6063     case MEMMODEL_RELEASE:
6064       return pre;
6065     case MEMMODEL_ACQUIRE:
6066       return !pre;
6067     case MEMMODEL_ACQ_REL:
6068     case MEMMODEL_SEQ_CST:
6069       return true;
6070     default:
6071       gcc_unreachable ();
6072     }
6073 }
6074 
6075 #include "gt-emit-rtl.h"
6076