xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/emit-rtl.c (revision bdc22b2e01993381dcefeff2bc9b56ca75a4235c)
1 /* Emit RTL for the GCC expander.
2    Copyright (C) 1987-2015 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 
21 /* Middle-to-low level generation of rtx code and insns.
22 
23    This file contains support functions for creating rtl expressions
24    and manipulating them in the doubly-linked chain of insns.
25 
26    The patterns of the insns are created by machine-dependent
27    routines in insn-emit.c, which is generated automatically from
28    the machine description.  These routines make the individual rtx's
29    of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30    which are automatically generated from rtl.def; what is machine
31    dependent is the kind of rtx's they make and what arguments they
32    use.  */
33 
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "diagnostic-core.h"
39 #include "rtl.h"
40 #include "hash-set.h"
41 #include "machmode.h"
42 #include "vec.h"
43 #include "double-int.h"
44 #include "input.h"
45 #include "alias.h"
46 #include "symtab.h"
47 #include "wide-int.h"
48 #include "inchash.h"
49 #include "real.h"
50 #include "tree.h"
51 #include "fold-const.h"
52 #include "varasm.h"
53 #include "predict.h"
54 #include "hard-reg-set.h"
55 #include "function.h"
56 #include "cfgrtl.h"
57 #include "basic-block.h"
58 #include "tree-eh.h"
59 #include "tm_p.h"
60 #include "flags.h"
61 #include "stringpool.h"
62 #include "hashtab.h"
63 #include "statistics.h"
64 #include "fixed-value.h"
65 #include "insn-config.h"
66 #include "expmed.h"
67 #include "dojump.h"
68 #include "explow.h"
69 #include "calls.h"
70 #include "emit-rtl.h"
71 #include "stmt.h"
72 #include "expr.h"
73 #include "regs.h"
74 #include "recog.h"
75 #include "bitmap.h"
76 #include "debug.h"
77 #include "langhooks.h"
78 #include "df.h"
79 #include "params.h"
80 #include "target.h"
81 #include "builtins.h"
82 #include "rtl-iter.h"
83 
84 struct target_rtl default_target_rtl;
85 #if SWITCHABLE_TARGET
86 struct target_rtl *this_target_rtl = &default_target_rtl;
87 #endif
88 
89 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
90 
91 /* Commonly used modes.  */
92 
93 machine_mode byte_mode;	/* Mode whose width is BITS_PER_UNIT.  */
94 machine_mode word_mode;	/* Mode whose width is BITS_PER_WORD.  */
95 machine_mode double_mode;	/* Mode whose width is DOUBLE_TYPE_SIZE.  */
96 machine_mode ptr_mode;	/* Mode whose width is POINTER_SIZE.  */
97 
98 /* Datastructures maintained for currently processed function in RTL form.  */
99 
100 struct rtl_data x_rtl;
101 
102 /* Indexed by pseudo register number, gives the rtx for that pseudo.
103    Allocated in parallel with regno_pointer_align.
104    FIXME: We could put it into emit_status struct, but gengtype is not able to deal
105    with length attribute nested in top level structures.  */
106 
107 rtx * regno_reg_rtx;
108 
109 /* This is *not* reset after each function.  It gives each CODE_LABEL
110    in the entire compilation a unique label number.  */
111 
112 static GTY(()) int label_num = 1;
113 
114 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
115    the values of 0, 1, and 2.  For the integer entries and VOIDmode, we
116    record a copy of const[012]_rtx and constm1_rtx.  CONSTM1_RTX
117    is set only for MODE_INT and MODE_VECTOR_INT modes.  */
118 
119 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
120 
121 rtx const_true_rtx;
122 
123 REAL_VALUE_TYPE dconst0;
124 REAL_VALUE_TYPE dconst1;
125 REAL_VALUE_TYPE dconst2;
126 REAL_VALUE_TYPE dconstm1;
127 REAL_VALUE_TYPE dconsthalf;
128 
129 /* Record fixed-point constant 0 and 1.  */
130 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
131 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
132 
133 /* We make one copy of (const_int C) where C is in
134    [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
135    to save space during the compilation and simplify comparisons of
136    integers.  */
137 
138 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
139 
140 /* Standard pieces of rtx, to be substituted directly into things.  */
141 rtx pc_rtx;
142 rtx ret_rtx;
143 rtx simple_return_rtx;
144 rtx cc0_rtx;
145 
146 /* A hash table storing CONST_INTs whose absolute value is greater
147    than MAX_SAVED_CONST_INT.  */
148 
149 struct const_int_hasher : ggc_cache_hasher<rtx>
150 {
151   typedef HOST_WIDE_INT compare_type;
152 
153   static hashval_t hash (rtx i);
154   static bool equal (rtx i, HOST_WIDE_INT h);
155 };
156 
157 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
158 
159 struct const_wide_int_hasher : ggc_cache_hasher<rtx>
160 {
161   static hashval_t hash (rtx x);
162   static bool equal (rtx x, rtx y);
163 };
164 
165 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
166 
167 /* A hash table storing register attribute structures.  */
168 struct reg_attr_hasher : ggc_cache_hasher<reg_attrs *>
169 {
170   static hashval_t hash (reg_attrs *x);
171   static bool equal (reg_attrs *a, reg_attrs *b);
172 };
173 
174 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
175 
176 /* A hash table storing all CONST_DOUBLEs.  */
177 struct const_double_hasher : ggc_cache_hasher<rtx>
178 {
179   static hashval_t hash (rtx x);
180   static bool equal (rtx x, rtx y);
181 };
182 
183 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
184 
185 /* A hash table storing all CONST_FIXEDs.  */
186 struct const_fixed_hasher : ggc_cache_hasher<rtx>
187 {
188   static hashval_t hash (rtx x);
189   static bool equal (rtx x, rtx y);
190 };
191 
192 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
193 
194 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
195 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
196 #define first_label_num (crtl->emit.x_first_label_num)
197 
198 static void set_used_decls (tree);
199 static void mark_label_nuses (rtx);
200 #if TARGET_SUPPORTS_WIDE_INT
201 static rtx lookup_const_wide_int (rtx);
202 #endif
203 static rtx lookup_const_double (rtx);
204 static rtx lookup_const_fixed (rtx);
205 static reg_attrs *get_reg_attrs (tree, int);
206 static rtx gen_const_vector (machine_mode, int);
207 static void copy_rtx_if_shared_1 (rtx *orig);
208 
209 /* Probability of the conditional branch currently proceeded by try_split.
210    Set to -1 otherwise.  */
211 int split_branch_probability = -1;
212 
213 /* Returns a hash code for X (which is a really a CONST_INT).  */
214 
215 hashval_t
216 const_int_hasher::hash (rtx x)
217 {
218   return (hashval_t) INTVAL (x);
219 }
220 
221 /* Returns nonzero if the value represented by X (which is really a
222    CONST_INT) is the same as that given by Y (which is really a
223    HOST_WIDE_INT *).  */
224 
225 bool
226 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
227 {
228   return (INTVAL (x) == y);
229 }
230 
231 #if TARGET_SUPPORTS_WIDE_INT
232 /* Returns a hash code for X (which is a really a CONST_WIDE_INT).  */
233 
234 hashval_t
235 const_wide_int_hasher::hash (rtx x)
236 {
237   int i;
238   unsigned HOST_WIDE_INT hash = 0;
239   const_rtx xr = x;
240 
241   for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
242     hash += CONST_WIDE_INT_ELT (xr, i);
243 
244   return (hashval_t) hash;
245 }
246 
247 /* Returns nonzero if the value represented by X (which is really a
248    CONST_WIDE_INT) is the same as that given by Y (which is really a
249    CONST_WIDE_INT).  */
250 
251 bool
252 const_wide_int_hasher::equal (rtx x, rtx y)
253 {
254   int i;
255   const_rtx xr = x;
256   const_rtx yr = y;
257   if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
258     return false;
259 
260   for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
261     if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
262       return false;
263 
264   return true;
265 }
266 #endif
267 
268 /* Returns a hash code for X (which is really a CONST_DOUBLE).  */
269 hashval_t
270 const_double_hasher::hash (rtx x)
271 {
272   const_rtx const value = x;
273   hashval_t h;
274 
275   if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
276     h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
277   else
278     {
279       h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
280       /* MODE is used in the comparison, so it should be in the hash.  */
281       h ^= GET_MODE (value);
282     }
283   return h;
284 }
285 
286 /* Returns nonzero if the value represented by X (really a ...)
287    is the same as that represented by Y (really a ...) */
288 bool
289 const_double_hasher::equal (rtx x, rtx y)
290 {
291   const_rtx const a = x, b = y;
292 
293   if (GET_MODE (a) != GET_MODE (b))
294     return 0;
295   if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
296     return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
297 	    && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
298   else
299     return real_identical (CONST_DOUBLE_REAL_VALUE (a),
300 			   CONST_DOUBLE_REAL_VALUE (b));
301 }
302 
303 /* Returns a hash code for X (which is really a CONST_FIXED).  */
304 
305 hashval_t
306 const_fixed_hasher::hash (rtx x)
307 {
308   const_rtx const value = x;
309   hashval_t h;
310 
311   h = fixed_hash (CONST_FIXED_VALUE (value));
312   /* MODE is used in the comparison, so it should be in the hash.  */
313   h ^= GET_MODE (value);
314   return h;
315 }
316 
317 /* Returns nonzero if the value represented by X is the same as that
318    represented by Y.  */
319 
320 bool
321 const_fixed_hasher::equal (rtx x, rtx y)
322 {
323   const_rtx const a = x, b = y;
324 
325   if (GET_MODE (a) != GET_MODE (b))
326     return 0;
327   return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
328 }
329 
330 /* Return true if the given memory attributes are equal.  */
331 
332 bool
333 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
334 {
335   if (p == q)
336     return true;
337   if (!p || !q)
338     return false;
339   return (p->alias == q->alias
340 	  && p->offset_known_p == q->offset_known_p
341 	  && (!p->offset_known_p || p->offset == q->offset)
342 	  && p->size_known_p == q->size_known_p
343 	  && (!p->size_known_p || p->size == q->size)
344 	  && p->align == q->align
345 	  && p->addrspace == q->addrspace
346 	  && (p->expr == q->expr
347 	      || (p->expr != NULL_TREE && q->expr != NULL_TREE
348 		  && operand_equal_p (p->expr, q->expr, 0))));
349 }
350 
351 /* Set MEM's memory attributes so that they are the same as ATTRS.  */
352 
353 static void
354 set_mem_attrs (rtx mem, mem_attrs *attrs)
355 {
356   /* If everything is the default, we can just clear the attributes.  */
357   if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
358     {
359       MEM_ATTRS (mem) = 0;
360       return;
361     }
362 
363   if (!MEM_ATTRS (mem)
364       || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
365     {
366       MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
367       memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
368     }
369 }
370 
371 /* Returns a hash code for X (which is a really a reg_attrs *).  */
372 
373 hashval_t
374 reg_attr_hasher::hash (reg_attrs *x)
375 {
376   const reg_attrs *const p = x;
377 
378   return ((p->offset * 1000) ^ (intptr_t) p->decl);
379 }
380 
381 /* Returns nonzero if the value represented by X  is the same as that given by
382    Y.  */
383 
384 bool
385 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
386 {
387   const reg_attrs *const p = x;
388   const reg_attrs *const q = y;
389 
390   return (p->decl == q->decl && p->offset == q->offset);
391 }
392 /* Allocate a new reg_attrs structure and insert it into the hash table if
393    one identical to it is not already in the table.  We are doing this for
394    MEM of mode MODE.  */
395 
396 static reg_attrs *
397 get_reg_attrs (tree decl, int offset)
398 {
399   reg_attrs attrs;
400 
401   /* If everything is the default, we can just return zero.  */
402   if (decl == 0 && offset == 0)
403     return 0;
404 
405   attrs.decl = decl;
406   attrs.offset = offset;
407 
408   reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
409   if (*slot == 0)
410     {
411       *slot = ggc_alloc<reg_attrs> ();
412       memcpy (*slot, &attrs, sizeof (reg_attrs));
413     }
414 
415   return *slot;
416 }
417 
418 
419 #if !HAVE_blockage
420 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
421    and to block register equivalences to be seen across this insn.  */
422 
423 rtx
424 gen_blockage (void)
425 {
426   rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
427   MEM_VOLATILE_P (x) = true;
428   return x;
429 }
430 #endif
431 
432 
433 /* Generate a new REG rtx.  Make sure ORIGINAL_REGNO is set properly, and
434    don't attempt to share with the various global pieces of rtl (such as
435    frame_pointer_rtx).  */
436 
437 rtx
438 gen_raw_REG (machine_mode mode, int regno)
439 {
440   rtx x = gen_rtx_raw_REG (mode, regno);
441   ORIGINAL_REGNO (x) = regno;
442   return x;
443 }
444 
445 /* There are some RTL codes that require special attention; the generation
446    functions do the raw handling.  If you add to this list, modify
447    special_rtx in gengenrtl.c as well.  */
448 
449 rtx_expr_list *
450 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
451 {
452   return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
453 						 expr_list));
454 }
455 
456 rtx_insn_list *
457 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
458 {
459   return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
460 						 insn_list));
461 }
462 
463 rtx_insn *
464 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
465 	      basic_block bb, rtx pattern, int location, int code,
466 	      rtx reg_notes)
467 {
468   return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
469 						 prev_insn, next_insn,
470 						 bb, pattern, location, code,
471 						 reg_notes));
472 }
473 
474 rtx
475 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
476 {
477   if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
478     return const_int_rtx[arg + MAX_SAVED_CONST_INT];
479 
480 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
481   if (const_true_rtx && arg == STORE_FLAG_VALUE)
482     return const_true_rtx;
483 #endif
484 
485   /* Look up the CONST_INT in the hash table.  */
486   rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
487 						   INSERT);
488   if (*slot == 0)
489     *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
490 
491   return *slot;
492 }
493 
494 rtx
495 gen_int_mode (HOST_WIDE_INT c, machine_mode mode)
496 {
497   return GEN_INT (trunc_int_for_mode (c, mode));
498 }
499 
500 /* CONST_DOUBLEs might be created from pairs of integers, or from
501    REAL_VALUE_TYPEs.  Also, their length is known only at run time,
502    so we cannot use gen_rtx_raw_CONST_DOUBLE.  */
503 
504 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
505    hash table.  If so, return its counterpart; otherwise add it
506    to the hash table and return it.  */
507 static rtx
508 lookup_const_double (rtx real)
509 {
510   rtx *slot = const_double_htab->find_slot (real, INSERT);
511   if (*slot == 0)
512     *slot = real;
513 
514   return *slot;
515 }
516 
517 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
518    VALUE in mode MODE.  */
519 rtx
520 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
521 {
522   rtx real = rtx_alloc (CONST_DOUBLE);
523   PUT_MODE (real, mode);
524 
525   real->u.rv = value;
526 
527   return lookup_const_double (real);
528 }
529 
530 /* Determine whether FIXED, a CONST_FIXED, already exists in the
531    hash table.  If so, return its counterpart; otherwise add it
532    to the hash table and return it.  */
533 
534 static rtx
535 lookup_const_fixed (rtx fixed)
536 {
537   rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
538   if (*slot == 0)
539     *slot = fixed;
540 
541   return *slot;
542 }
543 
544 /* Return a CONST_FIXED rtx for a fixed-point value specified by
545    VALUE in mode MODE.  */
546 
547 rtx
548 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
549 {
550   rtx fixed = rtx_alloc (CONST_FIXED);
551   PUT_MODE (fixed, mode);
552 
553   fixed->u.fv = value;
554 
555   return lookup_const_fixed (fixed);
556 }
557 
558 #if TARGET_SUPPORTS_WIDE_INT == 0
559 /* Constructs double_int from rtx CST.  */
560 
561 double_int
562 rtx_to_double_int (const_rtx cst)
563 {
564   double_int r;
565 
566   if (CONST_INT_P (cst))
567       r = double_int::from_shwi (INTVAL (cst));
568   else if (CONST_DOUBLE_AS_INT_P (cst))
569     {
570       r.low = CONST_DOUBLE_LOW (cst);
571       r.high = CONST_DOUBLE_HIGH (cst);
572     }
573   else
574     gcc_unreachable ();
575 
576   return r;
577 }
578 #endif
579 
580 #if TARGET_SUPPORTS_WIDE_INT
581 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
582    If so, return its counterpart; otherwise add it to the hash table and
583    return it.  */
584 
585 static rtx
586 lookup_const_wide_int (rtx wint)
587 {
588   rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
589   if (*slot == 0)
590     *slot = wint;
591 
592   return *slot;
593 }
594 #endif
595 
596 /* Return an rtx constant for V, given that the constant has mode MODE.
597    The returned rtx will be a CONST_INT if V fits, otherwise it will be
598    a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
599    (if TARGET_SUPPORTS_WIDE_INT).  */
600 
601 rtx
602 immed_wide_int_const (const wide_int_ref &v, machine_mode mode)
603 {
604   unsigned int len = v.get_len ();
605   unsigned int prec = GET_MODE_PRECISION (mode);
606 
607   /* Allow truncation but not extension since we do not know if the
608      number is signed or unsigned.  */
609   gcc_assert (prec <= v.get_precision ());
610 
611   if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
612     return gen_int_mode (v.elt (0), mode);
613 
614 #if TARGET_SUPPORTS_WIDE_INT
615   {
616     unsigned int i;
617     rtx value;
618     unsigned int blocks_needed
619       = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
620 
621     if (len > blocks_needed)
622       len = blocks_needed;
623 
624     value = const_wide_int_alloc (len);
625 
626     /* It is so tempting to just put the mode in here.  Must control
627        myself ... */
628     PUT_MODE (value, VOIDmode);
629     CWI_PUT_NUM_ELEM (value, len);
630 
631     for (i = 0; i < len; i++)
632       CONST_WIDE_INT_ELT (value, i) = v.elt (i);
633 
634     return lookup_const_wide_int (value);
635   }
636 #else
637   return immed_double_const (v.elt (0), v.elt (1), mode);
638 #endif
639 }
640 
641 #if TARGET_SUPPORTS_WIDE_INT == 0
642 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
643    of ints: I0 is the low-order word and I1 is the high-order word.
644    For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
645    implied upper bits are copies of the high bit of i1.  The value
646    itself is neither signed nor unsigned.  Do not use this routine for
647    non-integer modes; convert to REAL_VALUE_TYPE and use
648    CONST_DOUBLE_FROM_REAL_VALUE.  */
649 
650 rtx
651 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
652 {
653   rtx value;
654   unsigned int i;
655 
656   /* There are the following cases (note that there are no modes with
657      HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
658 
659      1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
660 	gen_int_mode.
661      2) If the value of the integer fits into HOST_WIDE_INT anyway
662         (i.e., i1 consists only from copies of the sign bit, and sign
663 	of i0 and i1 are the same), then we return a CONST_INT for i0.
664      3) Otherwise, we create a CONST_DOUBLE for i0 and i1.  */
665   if (mode != VOIDmode)
666     {
667       gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
668 		  || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
669 		  /* We can get a 0 for an error mark.  */
670 		  || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
671 		  || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
672 		  || GET_MODE_CLASS (mode) == MODE_POINTER_BOUNDS);
673 
674       if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
675 	return gen_int_mode (i0, mode);
676     }
677 
678   /* If this integer fits in one word, return a CONST_INT.  */
679   if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
680     return GEN_INT (i0);
681 
682   /* We use VOIDmode for integers.  */
683   value = rtx_alloc (CONST_DOUBLE);
684   PUT_MODE (value, VOIDmode);
685 
686   CONST_DOUBLE_LOW (value) = i0;
687   CONST_DOUBLE_HIGH (value) = i1;
688 
689   for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
690     XWINT (value, i) = 0;
691 
692   return lookup_const_double (value);
693 }
694 #endif
695 
696 rtx
697 gen_rtx_REG (machine_mode mode, unsigned int regno)
698 {
699   /* In case the MD file explicitly references the frame pointer, have
700      all such references point to the same frame pointer.  This is
701      used during frame pointer elimination to distinguish the explicit
702      references to these registers from pseudos that happened to be
703      assigned to them.
704 
705      If we have eliminated the frame pointer or arg pointer, we will
706      be using it as a normal register, for example as a spill
707      register.  In such cases, we might be accessing it in a mode that
708      is not Pmode and therefore cannot use the pre-allocated rtx.
709 
710      Also don't do this when we are making new REGs in reload, since
711      we don't want to get confused with the real pointers.  */
712 
713   if (mode == Pmode && !reload_in_progress && !lra_in_progress)
714     {
715       if (regno == FRAME_POINTER_REGNUM
716 	  && (!reload_completed || frame_pointer_needed))
717 	return frame_pointer_rtx;
718 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
719       if (regno == HARD_FRAME_POINTER_REGNUM
720 	  && (!reload_completed || frame_pointer_needed))
721 	return hard_frame_pointer_rtx;
722 #endif
723 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
724       if (regno == ARG_POINTER_REGNUM)
725 	return arg_pointer_rtx;
726 #endif
727 #ifdef RETURN_ADDRESS_POINTER_REGNUM
728       if (regno == RETURN_ADDRESS_POINTER_REGNUM)
729 	return return_address_pointer_rtx;
730 #endif
731       if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
732 	  && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
733 	  && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
734 	return pic_offset_table_rtx;
735       if (regno == STACK_POINTER_REGNUM)
736 	return stack_pointer_rtx;
737     }
738 
739 #if 0
740   /* If the per-function register table has been set up, try to re-use
741      an existing entry in that table to avoid useless generation of RTL.
742 
743      This code is disabled for now until we can fix the various backends
744      which depend on having non-shared hard registers in some cases.   Long
745      term we want to re-enable this code as it can significantly cut down
746      on the amount of useless RTL that gets generated.
747 
748      We'll also need to fix some code that runs after reload that wants to
749      set ORIGINAL_REGNO.  */
750 
751   if (cfun
752       && cfun->emit
753       && regno_reg_rtx
754       && regno < FIRST_PSEUDO_REGISTER
755       && reg_raw_mode[regno] == mode)
756     return regno_reg_rtx[regno];
757 #endif
758 
759   return gen_raw_REG (mode, regno);
760 }
761 
762 rtx
763 gen_rtx_MEM (machine_mode mode, rtx addr)
764 {
765   rtx rt = gen_rtx_raw_MEM (mode, addr);
766 
767   /* This field is not cleared by the mere allocation of the rtx, so
768      we clear it here.  */
769   MEM_ATTRS (rt) = 0;
770 
771   return rt;
772 }
773 
774 /* Generate a memory referring to non-trapping constant memory.  */
775 
776 rtx
777 gen_const_mem (machine_mode mode, rtx addr)
778 {
779   rtx mem = gen_rtx_MEM (mode, addr);
780   MEM_READONLY_P (mem) = 1;
781   MEM_NOTRAP_P (mem) = 1;
782   return mem;
783 }
784 
785 /* Generate a MEM referring to fixed portions of the frame, e.g., register
786    save areas.  */
787 
788 rtx
789 gen_frame_mem (machine_mode mode, rtx addr)
790 {
791   rtx mem = gen_rtx_MEM (mode, addr);
792   MEM_NOTRAP_P (mem) = 1;
793   set_mem_alias_set (mem, get_frame_alias_set ());
794   return mem;
795 }
796 
797 /* Generate a MEM referring to a temporary use of the stack, not part
798     of the fixed stack frame.  For example, something which is pushed
799     by a target splitter.  */
800 rtx
801 gen_tmp_stack_mem (machine_mode mode, rtx addr)
802 {
803   rtx mem = gen_rtx_MEM (mode, addr);
804   MEM_NOTRAP_P (mem) = 1;
805   if (!cfun->calls_alloca)
806     set_mem_alias_set (mem, get_frame_alias_set ());
807   return mem;
808 }
809 
810 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET).  Return true if
811    this construct would be valid, and false otherwise.  */
812 
813 bool
814 validate_subreg (machine_mode omode, machine_mode imode,
815 		 const_rtx reg, unsigned int offset)
816 {
817   unsigned int isize = GET_MODE_SIZE (imode);
818   unsigned int osize = GET_MODE_SIZE (omode);
819 
820   /* All subregs must be aligned.  */
821   if (offset % osize != 0)
822     return false;
823 
824   /* The subreg offset cannot be outside the inner object.  */
825   if (offset >= isize)
826     return false;
827 
828   /* ??? This should not be here.  Temporarily continue to allow word_mode
829      subregs of anything.  The most common offender is (subreg:SI (reg:DF)).
830      Generally, backends are doing something sketchy but it'll take time to
831      fix them all.  */
832   if (omode == word_mode)
833     ;
834   /* ??? Similarly, e.g. with (subreg:DF (reg:TI)).  Though store_bit_field
835      is the culprit here, and not the backends.  */
836   else if (osize >= UNITS_PER_WORD && isize >= osize)
837     ;
838   /* Allow component subregs of complex and vector.  Though given the below
839      extraction rules, it's not always clear what that means.  */
840   else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
841 	   && GET_MODE_INNER (imode) == omode)
842     ;
843   /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
844      i.e. (subreg:V4SF (reg:SF) 0).  This surely isn't the cleanest way to
845      represent this.  It's questionable if this ought to be represented at
846      all -- why can't this all be hidden in post-reload splitters that make
847      arbitrarily mode changes to the registers themselves.  */
848   else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
849     ;
850   /* Subregs involving floating point modes are not allowed to
851      change size.  Therefore (subreg:DI (reg:DF) 0) is fine, but
852      (subreg:SI (reg:DF) 0) isn't.  */
853   else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
854     {
855       if (! (isize == osize
856 	     /* LRA can use subreg to store a floating point value in
857 		an integer mode.  Although the floating point and the
858 		integer modes need the same number of hard registers,
859 		the size of floating point mode can be less than the
860 		integer mode.  LRA also uses subregs for a register
861 		should be used in different mode in on insn.  */
862 	     || lra_in_progress))
863 	return false;
864     }
865 
866   /* Paradoxical subregs must have offset zero.  */
867   if (osize > isize)
868     return offset == 0;
869 
870   /* This is a normal subreg.  Verify that the offset is representable.  */
871 
872   /* For hard registers, we already have most of these rules collected in
873      subreg_offset_representable_p.  */
874   if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
875     {
876       unsigned int regno = REGNO (reg);
877 
878 #ifdef CANNOT_CHANGE_MODE_CLASS
879       if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
880 	  && GET_MODE_INNER (imode) == omode)
881 	;
882       else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
883 	return false;
884 #endif
885 
886       return subreg_offset_representable_p (regno, imode, offset, omode);
887     }
888 
889   /* For pseudo registers, we want most of the same checks.  Namely:
890      If the register no larger than a word, the subreg must be lowpart.
891      If the register is larger than a word, the subreg must be the lowpart
892      of a subword.  A subreg does *not* perform arbitrary bit extraction.
893      Given that we've already checked mode/offset alignment, we only have
894      to check subword subregs here.  */
895   if (osize < UNITS_PER_WORD
896       && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
897     {
898       machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
899       unsigned int low_off = subreg_lowpart_offset (omode, wmode);
900       if (offset % UNITS_PER_WORD != low_off)
901 	return false;
902     }
903   return true;
904 }
905 
906 rtx
907 gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset)
908 {
909   gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
910   return gen_rtx_raw_SUBREG (mode, reg, offset);
911 }
912 
913 /* Generate a SUBREG representing the least-significant part of REG if MODE
914    is smaller than mode of REG, otherwise paradoxical SUBREG.  */
915 
916 rtx
917 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
918 {
919   machine_mode inmode;
920 
921   inmode = GET_MODE (reg);
922   if (inmode == VOIDmode)
923     inmode = mode;
924   return gen_rtx_SUBREG (mode, reg,
925 			 subreg_lowpart_offset (mode, inmode));
926 }
927 
928 rtx
929 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
930 		      enum var_init_status status)
931 {
932   rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
933   PAT_VAR_LOCATION_STATUS (x) = status;
934   return x;
935 }
936 
937 
938 /* Create an rtvec and stores within it the RTXen passed in the arguments.  */
939 
940 rtvec
941 gen_rtvec (int n, ...)
942 {
943   int i;
944   rtvec rt_val;
945   va_list p;
946 
947   va_start (p, n);
948 
949   /* Don't allocate an empty rtvec...  */
950   if (n == 0)
951     {
952       va_end (p);
953       return NULL_RTVEC;
954     }
955 
956   rt_val = rtvec_alloc (n);
957 
958   for (i = 0; i < n; i++)
959     rt_val->elem[i] = va_arg (p, rtx);
960 
961   va_end (p);
962   return rt_val;
963 }
964 
965 rtvec
966 gen_rtvec_v (int n, rtx *argp)
967 {
968   int i;
969   rtvec rt_val;
970 
971   /* Don't allocate an empty rtvec...  */
972   if (n == 0)
973     return NULL_RTVEC;
974 
975   rt_val = rtvec_alloc (n);
976 
977   for (i = 0; i < n; i++)
978     rt_val->elem[i] = *argp++;
979 
980   return rt_val;
981 }
982 
983 rtvec
984 gen_rtvec_v (int n, rtx_insn **argp)
985 {
986   int i;
987   rtvec rt_val;
988 
989   /* Don't allocate an empty rtvec...  */
990   if (n == 0)
991     return NULL_RTVEC;
992 
993   rt_val = rtvec_alloc (n);
994 
995   for (i = 0; i < n; i++)
996     rt_val->elem[i] = *argp++;
997 
998   return rt_val;
999 }
1000 
1001 
1002 /* Return the number of bytes between the start of an OUTER_MODE
1003    in-memory value and the start of an INNER_MODE in-memory value,
1004    given that the former is a lowpart of the latter.  It may be a
1005    paradoxical lowpart, in which case the offset will be negative
1006    on big-endian targets.  */
1007 
1008 int
1009 byte_lowpart_offset (machine_mode outer_mode,
1010 		     machine_mode inner_mode)
1011 {
1012   if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
1013     return subreg_lowpart_offset (outer_mode, inner_mode);
1014   else
1015     return -subreg_lowpart_offset (inner_mode, outer_mode);
1016 }
1017 
1018 /* Generate a REG rtx for a new pseudo register of mode MODE.
1019    This pseudo is assigned the next sequential register number.  */
1020 
1021 rtx
1022 gen_reg_rtx (machine_mode mode)
1023 {
1024   rtx val;
1025   unsigned int align = GET_MODE_ALIGNMENT (mode);
1026 
1027   gcc_assert (can_create_pseudo_p ());
1028 
1029   /* If a virtual register with bigger mode alignment is generated,
1030      increase stack alignment estimation because it might be spilled
1031      to stack later.  */
1032   if (SUPPORTS_STACK_ALIGNMENT
1033       && crtl->stack_alignment_estimated < align
1034       && !crtl->stack_realign_processed)
1035     {
1036       unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1037       if (crtl->stack_alignment_estimated < min_align)
1038 	crtl->stack_alignment_estimated = min_align;
1039     }
1040 
1041   if (generating_concat_p
1042       && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1043 	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1044     {
1045       /* For complex modes, don't make a single pseudo.
1046 	 Instead, make a CONCAT of two pseudos.
1047 	 This allows noncontiguous allocation of the real and imaginary parts,
1048 	 which makes much better code.  Besides, allocating DCmode
1049 	 pseudos overstrains reload on some machines like the 386.  */
1050       rtx realpart, imagpart;
1051       machine_mode partmode = GET_MODE_INNER (mode);
1052 
1053       realpart = gen_reg_rtx (partmode);
1054       imagpart = gen_reg_rtx (partmode);
1055       return gen_rtx_CONCAT (mode, realpart, imagpart);
1056     }
1057 
1058   /* Do not call gen_reg_rtx with uninitialized crtl.  */
1059   gcc_assert (crtl->emit.regno_pointer_align_length);
1060 
1061   /* Make sure regno_pointer_align, and regno_reg_rtx are large
1062      enough to have an element for this pseudo reg number.  */
1063 
1064   if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
1065     {
1066       int old_size = crtl->emit.regno_pointer_align_length;
1067       char *tmp;
1068       rtx *new1;
1069 
1070       tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
1071       memset (tmp + old_size, 0, old_size);
1072       crtl->emit.regno_pointer_align = (unsigned char *) tmp;
1073 
1074       new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
1075       memset (new1 + old_size, 0, old_size * sizeof (rtx));
1076       regno_reg_rtx = new1;
1077 
1078       crtl->emit.regno_pointer_align_length = old_size * 2;
1079     }
1080 
1081   val = gen_raw_REG (mode, reg_rtx_no);
1082   regno_reg_rtx[reg_rtx_no++] = val;
1083   return val;
1084 }
1085 
1086 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise.  */
1087 
1088 bool
1089 reg_is_parm_p (rtx reg)
1090 {
1091   tree decl;
1092 
1093   gcc_assert (REG_P (reg));
1094   decl = REG_EXPR (reg);
1095   return (decl && TREE_CODE (decl) == PARM_DECL);
1096 }
1097 
1098 /* Update NEW with the same attributes as REG, but with OFFSET added
1099    to the REG_OFFSET.  */
1100 
1101 static void
1102 update_reg_offset (rtx new_rtx, rtx reg, int offset)
1103 {
1104   REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1105 				   REG_OFFSET (reg) + offset);
1106 }
1107 
1108 /* Generate a register with same attributes as REG, but with OFFSET
1109    added to the REG_OFFSET.  */
1110 
1111 rtx
1112 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1113 		    int offset)
1114 {
1115   rtx new_rtx = gen_rtx_REG (mode, regno);
1116 
1117   update_reg_offset (new_rtx, reg, offset);
1118   return new_rtx;
1119 }
1120 
1121 /* Generate a new pseudo-register with the same attributes as REG, but
1122    with OFFSET added to the REG_OFFSET.  */
1123 
1124 rtx
1125 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1126 {
1127   rtx new_rtx = gen_reg_rtx (mode);
1128 
1129   update_reg_offset (new_rtx, reg, offset);
1130   return new_rtx;
1131 }
1132 
1133 /* Adjust REG in-place so that it has mode MODE.  It is assumed that the
1134    new register is a (possibly paradoxical) lowpart of the old one.  */
1135 
1136 void
1137 adjust_reg_mode (rtx reg, machine_mode mode)
1138 {
1139   update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1140   PUT_MODE (reg, mode);
1141 }
1142 
1143 /* Copy REG's attributes from X, if X has any attributes.  If REG and X
1144    have different modes, REG is a (possibly paradoxical) lowpart of X.  */
1145 
1146 void
1147 set_reg_attrs_from_value (rtx reg, rtx x)
1148 {
1149   int offset;
1150   bool can_be_reg_pointer = true;
1151 
1152   /* Don't call mark_reg_pointer for incompatible pointer sign
1153      extension.  */
1154   while (GET_CODE (x) == SIGN_EXTEND
1155 	 || GET_CODE (x) == ZERO_EXTEND
1156 	 || GET_CODE (x) == TRUNCATE
1157 	 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1158     {
1159 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1160       if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1161 	  || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
1162 	can_be_reg_pointer = false;
1163 #endif
1164       x = XEXP (x, 0);
1165     }
1166 
1167   /* Hard registers can be reused for multiple purposes within the same
1168      function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1169      on them is wrong.  */
1170   if (HARD_REGISTER_P (reg))
1171     return;
1172 
1173   offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1174   if (MEM_P (x))
1175     {
1176       if (MEM_OFFSET_KNOWN_P (x))
1177 	REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1178 					 MEM_OFFSET (x) + offset);
1179       if (can_be_reg_pointer && MEM_POINTER (x))
1180 	mark_reg_pointer (reg, 0);
1181     }
1182   else if (REG_P (x))
1183     {
1184       if (REG_ATTRS (x))
1185 	update_reg_offset (reg, x, offset);
1186       if (can_be_reg_pointer && REG_POINTER (x))
1187 	mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1188     }
1189 }
1190 
1191 /* Generate a REG rtx for a new pseudo register, copying the mode
1192    and attributes from X.  */
1193 
1194 rtx
1195 gen_reg_rtx_and_attrs (rtx x)
1196 {
1197   rtx reg = gen_reg_rtx (GET_MODE (x));
1198   set_reg_attrs_from_value (reg, x);
1199   return reg;
1200 }
1201 
1202 /* Set the register attributes for registers contained in PARM_RTX.
1203    Use needed values from memory attributes of MEM.  */
1204 
1205 void
1206 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1207 {
1208   if (REG_P (parm_rtx))
1209     set_reg_attrs_from_value (parm_rtx, mem);
1210   else if (GET_CODE (parm_rtx) == PARALLEL)
1211     {
1212       /* Check for a NULL entry in the first slot, used to indicate that the
1213 	 parameter goes both on the stack and in registers.  */
1214       int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1215       for (; i < XVECLEN (parm_rtx, 0); i++)
1216 	{
1217 	  rtx x = XVECEXP (parm_rtx, 0, i);
1218 	  if (REG_P (XEXP (x, 0)))
1219 	    REG_ATTRS (XEXP (x, 0))
1220 	      = get_reg_attrs (MEM_EXPR (mem),
1221 			       INTVAL (XEXP (x, 1)));
1222 	}
1223     }
1224 }
1225 
1226 /* Set the REG_ATTRS for registers in value X, given that X represents
1227    decl T.  */
1228 
1229 void
1230 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1231 {
1232   if (GET_CODE (x) == SUBREG)
1233     {
1234       gcc_assert (subreg_lowpart_p (x));
1235       x = SUBREG_REG (x);
1236     }
1237   if (REG_P (x))
1238     REG_ATTRS (x)
1239       = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1240 					       DECL_MODE (t)));
1241   if (GET_CODE (x) == CONCAT)
1242     {
1243       if (REG_P (XEXP (x, 0)))
1244         REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1245       if (REG_P (XEXP (x, 1)))
1246 	REG_ATTRS (XEXP (x, 1))
1247 	  = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1248     }
1249   if (GET_CODE (x) == PARALLEL)
1250     {
1251       int i, start;
1252 
1253       /* Check for a NULL entry, used to indicate that the parameter goes
1254 	 both on the stack and in registers.  */
1255       if (XEXP (XVECEXP (x, 0, 0), 0))
1256 	start = 0;
1257       else
1258 	start = 1;
1259 
1260       for (i = start; i < XVECLEN (x, 0); i++)
1261 	{
1262 	  rtx y = XVECEXP (x, 0, i);
1263 	  if (REG_P (XEXP (y, 0)))
1264 	    REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1265 	}
1266     }
1267 }
1268 
1269 /* Assign the RTX X to declaration T.  */
1270 
1271 void
1272 set_decl_rtl (tree t, rtx x)
1273 {
1274   DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1275   if (x)
1276     set_reg_attrs_for_decl_rtl (t, x);
1277 }
1278 
1279 /* Assign the RTX X to parameter declaration T.  BY_REFERENCE_P is true
1280    if the ABI requires the parameter to be passed by reference.  */
1281 
1282 void
1283 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1284 {
1285   DECL_INCOMING_RTL (t) = x;
1286   if (x && !by_reference_p)
1287     set_reg_attrs_for_decl_rtl (t, x);
1288 }
1289 
1290 /* Identify REG (which may be a CONCAT) as a user register.  */
1291 
1292 void
1293 mark_user_reg (rtx reg)
1294 {
1295   if (GET_CODE (reg) == CONCAT)
1296     {
1297       REG_USERVAR_P (XEXP (reg, 0)) = 1;
1298       REG_USERVAR_P (XEXP (reg, 1)) = 1;
1299     }
1300   else
1301     {
1302       gcc_assert (REG_P (reg));
1303       REG_USERVAR_P (reg) = 1;
1304     }
1305 }
1306 
1307 /* Identify REG as a probable pointer register and show its alignment
1308    as ALIGN, if nonzero.  */
1309 
1310 void
1311 mark_reg_pointer (rtx reg, int align)
1312 {
1313   if (! REG_POINTER (reg))
1314     {
1315       REG_POINTER (reg) = 1;
1316 
1317       if (align)
1318 	REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1319     }
1320   else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1321     /* We can no-longer be sure just how aligned this pointer is.  */
1322     REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1323 }
1324 
1325 /* Return 1 plus largest pseudo reg number used in the current function.  */
1326 
1327 int
1328 max_reg_num (void)
1329 {
1330   return reg_rtx_no;
1331 }
1332 
1333 /* Return 1 + the largest label number used so far in the current function.  */
1334 
1335 int
1336 max_label_num (void)
1337 {
1338   return label_num;
1339 }
1340 
1341 /* Return first label number used in this function (if any were used).  */
1342 
1343 int
1344 get_first_label_num (void)
1345 {
1346   return first_label_num;
1347 }
1348 
1349 /* If the rtx for label was created during the expansion of a nested
1350    function, then first_label_num won't include this label number.
1351    Fix this now so that array indices work later.  */
1352 
1353 void
1354 maybe_set_first_label_num (rtx x)
1355 {
1356   if (CODE_LABEL_NUMBER (x) < first_label_num)
1357     first_label_num = CODE_LABEL_NUMBER (x);
1358 }
1359 
1360 /* Return a value representing some low-order bits of X, where the number
1361    of low-order bits is given by MODE.  Note that no conversion is done
1362    between floating-point and fixed-point values, rather, the bit
1363    representation is returned.
1364 
1365    This function handles the cases in common between gen_lowpart, below,
1366    and two variants in cse.c and combine.c.  These are the cases that can
1367    be safely handled at all points in the compilation.
1368 
1369    If this is not a case we can handle, return 0.  */
1370 
1371 rtx
1372 gen_lowpart_common (machine_mode mode, rtx x)
1373 {
1374   int msize = GET_MODE_SIZE (mode);
1375   int xsize;
1376   int offset = 0;
1377   machine_mode innermode;
1378 
1379   /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1380      so we have to make one up.  Yuk.  */
1381   innermode = GET_MODE (x);
1382   if (CONST_INT_P (x)
1383       && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1384     innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1385   else if (innermode == VOIDmode)
1386     innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1387 
1388   xsize = GET_MODE_SIZE (innermode);
1389 
1390   gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1391 
1392   if (innermode == mode)
1393     return x;
1394 
1395   /* MODE must occupy no more words than the mode of X.  */
1396   if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1397       > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1398     return 0;
1399 
1400   /* Don't allow generating paradoxical FLOAT_MODE subregs.  */
1401   if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1402     return 0;
1403 
1404   offset = subreg_lowpart_offset (mode, innermode);
1405 
1406   if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1407       && (GET_MODE_CLASS (mode) == MODE_INT
1408 	  || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1409     {
1410       /* If we are getting the low-order part of something that has been
1411 	 sign- or zero-extended, we can either just use the object being
1412 	 extended or make a narrower extension.  If we want an even smaller
1413 	 piece than the size of the object being extended, call ourselves
1414 	 recursively.
1415 
1416 	 This case is used mostly by combine and cse.  */
1417 
1418       if (GET_MODE (XEXP (x, 0)) == mode)
1419 	return XEXP (x, 0);
1420       else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1421 	return gen_lowpart_common (mode, XEXP (x, 0));
1422       else if (msize < xsize)
1423 	return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1424     }
1425   else if (GET_CODE (x) == SUBREG || REG_P (x)
1426 	   || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1427 	   || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1428     return simplify_gen_subreg (mode, x, innermode, offset);
1429 
1430   /* Otherwise, we can't do this.  */
1431   return 0;
1432 }
1433 
1434 rtx
1435 gen_highpart (machine_mode mode, rtx x)
1436 {
1437   unsigned int msize = GET_MODE_SIZE (mode);
1438   rtx result;
1439 
1440   /* This case loses if X is a subreg.  To catch bugs early,
1441      complain if an invalid MODE is used even in other cases.  */
1442   gcc_assert (msize <= UNITS_PER_WORD
1443 	      || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1444 
1445   result = simplify_gen_subreg (mode, x, GET_MODE (x),
1446 				subreg_highpart_offset (mode, GET_MODE (x)));
1447   gcc_assert (result);
1448 
1449   /* simplify_gen_subreg is not guaranteed to return a valid operand for
1450      the target if we have a MEM.  gen_highpart must return a valid operand,
1451      emitting code if necessary to do so.  */
1452   if (MEM_P (result))
1453     {
1454       result = validize_mem (result);
1455       gcc_assert (result);
1456     }
1457 
1458   return result;
1459 }
1460 
1461 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1462    be VOIDmode constant.  */
1463 rtx
1464 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1465 {
1466   if (GET_MODE (exp) != VOIDmode)
1467     {
1468       gcc_assert (GET_MODE (exp) == innermode);
1469       return gen_highpart (outermode, exp);
1470     }
1471   return simplify_gen_subreg (outermode, exp, innermode,
1472 			      subreg_highpart_offset (outermode, innermode));
1473 }
1474 
1475 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value.  */
1476 
1477 unsigned int
1478 subreg_lowpart_offset (machine_mode outermode, machine_mode innermode)
1479 {
1480   unsigned int offset = 0;
1481   int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1482 
1483   if (difference > 0)
1484     {
1485       if (WORDS_BIG_ENDIAN)
1486 	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1487       if (BYTES_BIG_ENDIAN)
1488 	offset += difference % UNITS_PER_WORD;
1489     }
1490 
1491   return offset;
1492 }
1493 
1494 /* Return offset in bytes to get OUTERMODE high part
1495    of the value in mode INNERMODE stored in memory in target format.  */
1496 unsigned int
1497 subreg_highpart_offset (machine_mode outermode, machine_mode innermode)
1498 {
1499   unsigned int offset = 0;
1500   int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1501 
1502   gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1503 
1504   if (difference > 0)
1505     {
1506       if (! WORDS_BIG_ENDIAN)
1507 	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1508       if (! BYTES_BIG_ENDIAN)
1509 	offset += difference % UNITS_PER_WORD;
1510     }
1511 
1512   return offset;
1513 }
1514 
1515 /* Return 1 iff X, assumed to be a SUBREG,
1516    refers to the least significant part of its containing reg.
1517    If X is not a SUBREG, always return 1 (it is its own low part!).  */
1518 
1519 int
1520 subreg_lowpart_p (const_rtx x)
1521 {
1522   if (GET_CODE (x) != SUBREG)
1523     return 1;
1524   else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1525     return 0;
1526 
1527   return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1528 	  == SUBREG_BYTE (x));
1529 }
1530 
1531 /* Return true if X is a paradoxical subreg, false otherwise.  */
1532 bool
1533 paradoxical_subreg_p (const_rtx x)
1534 {
1535   if (GET_CODE (x) != SUBREG)
1536     return false;
1537   return (GET_MODE_PRECISION (GET_MODE (x))
1538 	  > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1539 }
1540 
1541 /* Return subword OFFSET of operand OP.
1542    The word number, OFFSET, is interpreted as the word number starting
1543    at the low-order address.  OFFSET 0 is the low-order word if not
1544    WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1545 
1546    If we cannot extract the required word, we return zero.  Otherwise,
1547    an rtx corresponding to the requested word will be returned.
1548 
1549    VALIDATE_ADDRESS is nonzero if the address should be validated.  Before
1550    reload has completed, a valid address will always be returned.  After
1551    reload, if a valid address cannot be returned, we return zero.
1552 
1553    If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1554    it is the responsibility of the caller.
1555 
1556    MODE is the mode of OP in case it is a CONST_INT.
1557 
1558    ??? This is still rather broken for some cases.  The problem for the
1559    moment is that all callers of this thing provide no 'goal mode' to
1560    tell us to work with.  This exists because all callers were written
1561    in a word based SUBREG world.
1562    Now use of this function can be deprecated by simplify_subreg in most
1563    cases.
1564  */
1565 
1566 rtx
1567 operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode)
1568 {
1569   if (mode == VOIDmode)
1570     mode = GET_MODE (op);
1571 
1572   gcc_assert (mode != VOIDmode);
1573 
1574   /* If OP is narrower than a word, fail.  */
1575   if (mode != BLKmode
1576       && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1577     return 0;
1578 
1579   /* If we want a word outside OP, return zero.  */
1580   if (mode != BLKmode
1581       && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1582     return const0_rtx;
1583 
1584   /* Form a new MEM at the requested address.  */
1585   if (MEM_P (op))
1586     {
1587       rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1588 
1589       if (! validate_address)
1590 	return new_rtx;
1591 
1592       else if (reload_completed)
1593 	{
1594 	  if (! strict_memory_address_addr_space_p (word_mode,
1595 						    XEXP (new_rtx, 0),
1596 						    MEM_ADDR_SPACE (op)))
1597 	    return 0;
1598 	}
1599       else
1600 	return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1601     }
1602 
1603   /* Rest can be handled by simplify_subreg.  */
1604   return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1605 }
1606 
1607 /* Similar to `operand_subword', but never return 0.  If we can't
1608    extract the required subword, put OP into a register and try again.
1609    The second attempt must succeed.  We always validate the address in
1610    this case.
1611 
1612    MODE is the mode of OP, in case it is CONST_INT.  */
1613 
1614 rtx
1615 operand_subword_force (rtx op, unsigned int offset, machine_mode mode)
1616 {
1617   rtx result = operand_subword (op, offset, 1, mode);
1618 
1619   if (result)
1620     return result;
1621 
1622   if (mode != BLKmode && mode != VOIDmode)
1623     {
1624       /* If this is a register which can not be accessed by words, copy it
1625 	 to a pseudo register.  */
1626       if (REG_P (op))
1627 	op = copy_to_reg (op);
1628       else
1629 	op = force_reg (mode, op);
1630     }
1631 
1632   result = operand_subword (op, offset, 1, mode);
1633   gcc_assert (result);
1634 
1635   return result;
1636 }
1637 
1638 /* Returns 1 if both MEM_EXPR can be considered equal
1639    and 0 otherwise.  */
1640 
1641 int
1642 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1643 {
1644   if (expr1 == expr2)
1645     return 1;
1646 
1647   if (! expr1 || ! expr2)
1648     return 0;
1649 
1650   if (TREE_CODE (expr1) != TREE_CODE (expr2))
1651     return 0;
1652 
1653   return operand_equal_p (expr1, expr2, 0);
1654 }
1655 
1656 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1657    bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1658    -1 if not known.  */
1659 
1660 int
1661 get_mem_align_offset (rtx mem, unsigned int align)
1662 {
1663   tree expr;
1664   unsigned HOST_WIDE_INT offset;
1665 
1666   /* This function can't use
1667      if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1668 	 || (MAX (MEM_ALIGN (mem),
1669 	          MAX (align, get_object_alignment (MEM_EXPR (mem))))
1670 	     < align))
1671        return -1;
1672      else
1673        return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1674      for two reasons:
1675      - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1676        for <variable>.  get_inner_reference doesn't handle it and
1677        even if it did, the alignment in that case needs to be determined
1678        from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1679      - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1680        isn't sufficiently aligned, the object it is in might be.  */
1681   gcc_assert (MEM_P (mem));
1682   expr = MEM_EXPR (mem);
1683   if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1684     return -1;
1685 
1686   offset = MEM_OFFSET (mem);
1687   if (DECL_P (expr))
1688     {
1689       if (DECL_ALIGN (expr) < align)
1690 	return -1;
1691     }
1692   else if (INDIRECT_REF_P (expr))
1693     {
1694       if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1695 	return -1;
1696     }
1697   else if (TREE_CODE (expr) == COMPONENT_REF)
1698     {
1699       while (1)
1700 	{
1701 	  tree inner = TREE_OPERAND (expr, 0);
1702 	  tree field = TREE_OPERAND (expr, 1);
1703 	  tree byte_offset = component_ref_field_offset (expr);
1704 	  tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1705 
1706 	  if (!byte_offset
1707 	      || !tree_fits_uhwi_p (byte_offset)
1708 	      || !tree_fits_uhwi_p (bit_offset))
1709 	    return -1;
1710 
1711 	  offset += tree_to_uhwi (byte_offset);
1712 	  offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1713 
1714 	  if (inner == NULL_TREE)
1715 	    {
1716 	      if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1717 		  < (unsigned int) align)
1718 		return -1;
1719 	      break;
1720 	    }
1721 	  else if (DECL_P (inner))
1722 	    {
1723 	      if (DECL_ALIGN (inner) < align)
1724 		return -1;
1725 	      break;
1726 	    }
1727 	  else if (TREE_CODE (inner) != COMPONENT_REF)
1728 	    return -1;
1729 	  expr = inner;
1730 	}
1731     }
1732   else
1733     return -1;
1734 
1735   return offset & ((align / BITS_PER_UNIT) - 1);
1736 }
1737 
1738 /* Given REF (a MEM) and T, either the type of X or the expression
1739    corresponding to REF, set the memory attributes.  OBJECTP is nonzero
1740    if we are making a new object of this type.  BITPOS is nonzero if
1741    there is an offset outstanding on T that will be applied later.  */
1742 
1743 void
1744 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1745 				 HOST_WIDE_INT bitpos)
1746 {
1747   HOST_WIDE_INT apply_bitpos = 0;
1748   tree type;
1749   struct mem_attrs attrs, *defattrs, *refattrs;
1750   addr_space_t as;
1751 
1752   /* It can happen that type_for_mode was given a mode for which there
1753      is no language-level type.  In which case it returns NULL, which
1754      we can see here.  */
1755   if (t == NULL_TREE)
1756     return;
1757 
1758   type = TYPE_P (t) ? t : TREE_TYPE (t);
1759   if (type == error_mark_node)
1760     return;
1761 
1762   /* If we have already set DECL_RTL = ref, get_alias_set will get the
1763      wrong answer, as it assumes that DECL_RTL already has the right alias
1764      info.  Callers should not set DECL_RTL until after the call to
1765      set_mem_attributes.  */
1766   gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1767 
1768   memset (&attrs, 0, sizeof (attrs));
1769 
1770   /* Get the alias set from the expression or type (perhaps using a
1771      front-end routine) and use it.  */
1772   attrs.alias = get_alias_set (t);
1773 
1774   MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1775   MEM_POINTER (ref) = POINTER_TYPE_P (type);
1776 
1777   /* Default values from pre-existing memory attributes if present.  */
1778   refattrs = MEM_ATTRS (ref);
1779   if (refattrs)
1780     {
1781       /* ??? Can this ever happen?  Calling this routine on a MEM that
1782 	 already carries memory attributes should probably be invalid.  */
1783       attrs.expr = refattrs->expr;
1784       attrs.offset_known_p = refattrs->offset_known_p;
1785       attrs.offset = refattrs->offset;
1786       attrs.size_known_p = refattrs->size_known_p;
1787       attrs.size = refattrs->size;
1788       attrs.align = refattrs->align;
1789     }
1790 
1791   /* Otherwise, default values from the mode of the MEM reference.  */
1792   else
1793     {
1794       defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1795       gcc_assert (!defattrs->expr);
1796       gcc_assert (!defattrs->offset_known_p);
1797 
1798       /* Respect mode size.  */
1799       attrs.size_known_p = defattrs->size_known_p;
1800       attrs.size = defattrs->size;
1801       /* ??? Is this really necessary?  We probably should always get
1802 	 the size from the type below.  */
1803 
1804       /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1805          if T is an object, always compute the object alignment below.  */
1806       if (TYPE_P (t))
1807 	attrs.align = defattrs->align;
1808       else
1809 	attrs.align = BITS_PER_UNIT;
1810       /* ??? If T is a type, respecting mode alignment may *also* be wrong
1811 	 e.g. if the type carries an alignment attribute.  Should we be
1812 	 able to simply always use TYPE_ALIGN?  */
1813     }
1814 
1815   /* We can set the alignment from the type if we are making an object,
1816      this is an INDIRECT_REF, or if TYPE_ALIGN_OK.  */
1817   if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1818     attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1819 
1820   /* If the size is known, we can set that.  */
1821   tree new_size = TYPE_SIZE_UNIT (type);
1822 
1823   /* The address-space is that of the type.  */
1824   as = TYPE_ADDR_SPACE (type);
1825 
1826   /* If T is not a type, we may be able to deduce some more information about
1827      the expression.  */
1828   if (! TYPE_P (t))
1829     {
1830       tree base;
1831 
1832       if (TREE_THIS_VOLATILE (t))
1833 	MEM_VOLATILE_P (ref) = 1;
1834 
1835       /* Now remove any conversions: they don't change what the underlying
1836 	 object is.  Likewise for SAVE_EXPR.  */
1837       while (CONVERT_EXPR_P (t)
1838 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
1839 	     || TREE_CODE (t) == SAVE_EXPR)
1840 	t = TREE_OPERAND (t, 0);
1841 
1842       /* Note whether this expression can trap.  */
1843       MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1844 
1845       base = get_base_address (t);
1846       if (base)
1847 	{
1848 	  if (DECL_P (base)
1849 	      && TREE_READONLY (base)
1850 	      && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1851 	      && !TREE_THIS_VOLATILE (base))
1852 	    MEM_READONLY_P (ref) = 1;
1853 
1854 	  /* Mark static const strings readonly as well.  */
1855 	  if (TREE_CODE (base) == STRING_CST
1856 	      && TREE_READONLY (base)
1857 	      && TREE_STATIC (base))
1858 	    MEM_READONLY_P (ref) = 1;
1859 
1860 	  /* Address-space information is on the base object.  */
1861 	  if (TREE_CODE (base) == MEM_REF
1862 	      || TREE_CODE (base) == TARGET_MEM_REF)
1863 	    as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1864 								      0))));
1865 	  else
1866 	    as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1867 	}
1868 
1869       /* If this expression uses it's parent's alias set, mark it such
1870 	 that we won't change it.  */
1871       if (component_uses_parent_alias_set_from (t) != NULL_TREE)
1872 	MEM_KEEP_ALIAS_SET_P (ref) = 1;
1873 
1874       /* If this is a decl, set the attributes of the MEM from it.  */
1875       if (DECL_P (t))
1876 	{
1877 	  attrs.expr = t;
1878 	  attrs.offset_known_p = true;
1879 	  attrs.offset = 0;
1880 	  apply_bitpos = bitpos;
1881 	  new_size = DECL_SIZE_UNIT (t);
1882 	}
1883 
1884       /* ???  If we end up with a constant here do record a MEM_EXPR.  */
1885       else if (CONSTANT_CLASS_P (t))
1886 	;
1887 
1888       /* If this is a field reference, record it.  */
1889       else if (TREE_CODE (t) == COMPONENT_REF)
1890 	{
1891 	  attrs.expr = t;
1892 	  attrs.offset_known_p = true;
1893 	  attrs.offset = 0;
1894 	  apply_bitpos = bitpos;
1895 	  if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1896 	    new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1897 	}
1898 
1899       /* If this is an array reference, look for an outer field reference.  */
1900       else if (TREE_CODE (t) == ARRAY_REF)
1901 	{
1902 	  tree off_tree = size_zero_node;
1903 	  /* We can't modify t, because we use it at the end of the
1904 	     function.  */
1905 	  tree t2 = t;
1906 
1907 	  do
1908 	    {
1909 	      tree index = TREE_OPERAND (t2, 1);
1910 	      tree low_bound = array_ref_low_bound (t2);
1911 	      tree unit_size = array_ref_element_size (t2);
1912 
1913 	      /* We assume all arrays have sizes that are a multiple of a byte.
1914 		 First subtract the lower bound, if any, in the type of the
1915 		 index, then convert to sizetype and multiply by the size of
1916 		 the array element.  */
1917 	      if (! integer_zerop (low_bound))
1918 		index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1919 				     index, low_bound);
1920 
1921 	      off_tree = size_binop (PLUS_EXPR,
1922 				     size_binop (MULT_EXPR,
1923 						 fold_convert (sizetype,
1924 							       index),
1925 						 unit_size),
1926 				     off_tree);
1927 	      t2 = TREE_OPERAND (t2, 0);
1928 	    }
1929 	  while (TREE_CODE (t2) == ARRAY_REF);
1930 
1931 	  if (DECL_P (t2)
1932 	      || TREE_CODE (t2) == COMPONENT_REF)
1933 	    {
1934 	      attrs.expr = t2;
1935 	      attrs.offset_known_p = false;
1936 	      if (tree_fits_uhwi_p (off_tree))
1937 		{
1938 		  attrs.offset_known_p = true;
1939 		  attrs.offset = tree_to_uhwi (off_tree);
1940 		  apply_bitpos = bitpos;
1941 		}
1942 	    }
1943 	  /* Else do not record a MEM_EXPR.  */
1944 	}
1945 
1946       /* If this is an indirect reference, record it.  */
1947       else if (TREE_CODE (t) == MEM_REF
1948 	       || TREE_CODE (t) == TARGET_MEM_REF)
1949 	{
1950 	  attrs.expr = t;
1951 	  attrs.offset_known_p = true;
1952 	  attrs.offset = 0;
1953 	  apply_bitpos = bitpos;
1954 	}
1955 
1956       /* Compute the alignment.  */
1957       unsigned int obj_align;
1958       unsigned HOST_WIDE_INT obj_bitpos;
1959       get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1960       obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1961       if (obj_bitpos != 0)
1962 	obj_align = (obj_bitpos & -obj_bitpos);
1963       attrs.align = MAX (attrs.align, obj_align);
1964     }
1965 
1966   if (tree_fits_uhwi_p (new_size))
1967     {
1968       attrs.size_known_p = true;
1969       attrs.size = tree_to_uhwi (new_size);
1970     }
1971 
1972   /* If we modified OFFSET based on T, then subtract the outstanding
1973      bit position offset.  Similarly, increase the size of the accessed
1974      object to contain the negative offset.  */
1975   if (apply_bitpos)
1976     {
1977       gcc_assert (attrs.offset_known_p);
1978       attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1979       if (attrs.size_known_p)
1980 	attrs.size += apply_bitpos / BITS_PER_UNIT;
1981     }
1982 
1983   /* Now set the attributes we computed above.  */
1984   attrs.addrspace = as;
1985   set_mem_attrs (ref, &attrs);
1986 }
1987 
1988 void
1989 set_mem_attributes (rtx ref, tree t, int objectp)
1990 {
1991   set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1992 }
1993 
1994 /* Set the alias set of MEM to SET.  */
1995 
1996 void
1997 set_mem_alias_set (rtx mem, alias_set_type set)
1998 {
1999   struct mem_attrs attrs;
2000 
2001   /* If the new and old alias sets don't conflict, something is wrong.  */
2002   gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2003   attrs = *get_mem_attrs (mem);
2004   attrs.alias = set;
2005   set_mem_attrs (mem, &attrs);
2006 }
2007 
2008 /* Set the address space of MEM to ADDRSPACE (target-defined).  */
2009 
2010 void
2011 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2012 {
2013   struct mem_attrs attrs;
2014 
2015   attrs = *get_mem_attrs (mem);
2016   attrs.addrspace = addrspace;
2017   set_mem_attrs (mem, &attrs);
2018 }
2019 
2020 /* Set the alignment of MEM to ALIGN bits.  */
2021 
2022 void
2023 set_mem_align (rtx mem, unsigned int align)
2024 {
2025   struct mem_attrs attrs;
2026 
2027   attrs = *get_mem_attrs (mem);
2028   attrs.align = align;
2029   set_mem_attrs (mem, &attrs);
2030 }
2031 
2032 /* Set the expr for MEM to EXPR.  */
2033 
2034 void
2035 set_mem_expr (rtx mem, tree expr)
2036 {
2037   struct mem_attrs attrs;
2038 
2039   attrs = *get_mem_attrs (mem);
2040   attrs.expr = expr;
2041   set_mem_attrs (mem, &attrs);
2042 }
2043 
2044 /* Set the offset of MEM to OFFSET.  */
2045 
2046 void
2047 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
2048 {
2049   struct mem_attrs attrs;
2050 
2051   attrs = *get_mem_attrs (mem);
2052   attrs.offset_known_p = true;
2053   attrs.offset = offset;
2054   set_mem_attrs (mem, &attrs);
2055 }
2056 
2057 /* Clear the offset of MEM.  */
2058 
2059 void
2060 clear_mem_offset (rtx mem)
2061 {
2062   struct mem_attrs attrs;
2063 
2064   attrs = *get_mem_attrs (mem);
2065   attrs.offset_known_p = false;
2066   set_mem_attrs (mem, &attrs);
2067 }
2068 
2069 /* Set the size of MEM to SIZE.  */
2070 
2071 void
2072 set_mem_size (rtx mem, HOST_WIDE_INT size)
2073 {
2074   struct mem_attrs attrs;
2075 
2076   attrs = *get_mem_attrs (mem);
2077   attrs.size_known_p = true;
2078   attrs.size = size;
2079   set_mem_attrs (mem, &attrs);
2080 }
2081 
2082 /* Clear the size of MEM.  */
2083 
2084 void
2085 clear_mem_size (rtx mem)
2086 {
2087   struct mem_attrs attrs;
2088 
2089   attrs = *get_mem_attrs (mem);
2090   attrs.size_known_p = false;
2091   set_mem_attrs (mem, &attrs);
2092 }
2093 
2094 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2095    and its address changed to ADDR.  (VOIDmode means don't change the mode.
2096    NULL for ADDR means don't change the address.)  VALIDATE is nonzero if the
2097    returned memory location is required to be valid.  INPLACE is true if any
2098    changes can be made directly to MEMREF or false if MEMREF must be treated
2099    as immutable.
2100 
2101    The memory attributes are not changed.  */
2102 
2103 static rtx
2104 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2105 		  bool inplace)
2106 {
2107   addr_space_t as;
2108   rtx new_rtx;
2109 
2110   gcc_assert (MEM_P (memref));
2111   as = MEM_ADDR_SPACE (memref);
2112   if (mode == VOIDmode)
2113     mode = GET_MODE (memref);
2114   if (addr == 0)
2115     addr = XEXP (memref, 0);
2116   if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2117       && (!validate || memory_address_addr_space_p (mode, addr, as)))
2118     return memref;
2119 
2120   /* Don't validate address for LRA.  LRA can make the address valid
2121      by itself in most efficient way.  */
2122   if (validate && !lra_in_progress)
2123     {
2124       if (reload_in_progress || reload_completed)
2125 	gcc_assert (memory_address_addr_space_p (mode, addr, as));
2126       else
2127 	addr = memory_address_addr_space (mode, addr, as);
2128     }
2129 
2130   if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2131     return memref;
2132 
2133   if (inplace)
2134     {
2135       XEXP (memref, 0) = addr;
2136       return memref;
2137     }
2138 
2139   new_rtx = gen_rtx_MEM (mode, addr);
2140   MEM_COPY_ATTRIBUTES (new_rtx, memref);
2141   return new_rtx;
2142 }
2143 
2144 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2145    way we are changing MEMREF, so we only preserve the alias set.  */
2146 
2147 rtx
2148 change_address (rtx memref, machine_mode mode, rtx addr)
2149 {
2150   rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2151   machine_mode mmode = GET_MODE (new_rtx);
2152   struct mem_attrs attrs, *defattrs;
2153 
2154   attrs = *get_mem_attrs (memref);
2155   defattrs = mode_mem_attrs[(int) mmode];
2156   attrs.expr = NULL_TREE;
2157   attrs.offset_known_p = false;
2158   attrs.size_known_p = defattrs->size_known_p;
2159   attrs.size = defattrs->size;
2160   attrs.align = defattrs->align;
2161 
2162   /* If there are no changes, just return the original memory reference.  */
2163   if (new_rtx == memref)
2164     {
2165       if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2166 	return new_rtx;
2167 
2168       new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2169       MEM_COPY_ATTRIBUTES (new_rtx, memref);
2170     }
2171 
2172   set_mem_attrs (new_rtx, &attrs);
2173   return new_rtx;
2174 }
2175 
2176 /* Return a memory reference like MEMREF, but with its mode changed
2177    to MODE and its address offset by OFFSET bytes.  If VALIDATE is
2178    nonzero, the memory address is forced to be valid.
2179    If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2180    and the caller is responsible for adjusting MEMREF base register.
2181    If ADJUST_OBJECT is zero, the underlying object associated with the
2182    memory reference is left unchanged and the caller is responsible for
2183    dealing with it.  Otherwise, if the new memory reference is outside
2184    the underlying object, even partially, then the object is dropped.
2185    SIZE, if nonzero, is the size of an access in cases where MODE
2186    has no inherent size.  */
2187 
2188 rtx
2189 adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset,
2190 		  int validate, int adjust_address, int adjust_object,
2191 		  HOST_WIDE_INT size)
2192 {
2193   rtx addr = XEXP (memref, 0);
2194   rtx new_rtx;
2195   machine_mode address_mode;
2196   int pbits;
2197   struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2198   unsigned HOST_WIDE_INT max_align;
2199 #ifdef POINTERS_EXTEND_UNSIGNED
2200   machine_mode pointer_mode
2201     = targetm.addr_space.pointer_mode (attrs.addrspace);
2202 #endif
2203 
2204   /* VOIDmode means no mode change for change_address_1.  */
2205   if (mode == VOIDmode)
2206     mode = GET_MODE (memref);
2207 
2208   /* Take the size of non-BLKmode accesses from the mode.  */
2209   defattrs = mode_mem_attrs[(int) mode];
2210   if (defattrs->size_known_p)
2211     size = defattrs->size;
2212 
2213   /* If there are no changes, just return the original memory reference.  */
2214   if (mode == GET_MODE (memref) && !offset
2215       && (size == 0 || (attrs.size_known_p && attrs.size == size))
2216       && (!validate || memory_address_addr_space_p (mode, addr,
2217 						    attrs.addrspace)))
2218     return memref;
2219 
2220   /* ??? Prefer to create garbage instead of creating shared rtl.
2221      This may happen even if offset is nonzero -- consider
2222      (plus (plus reg reg) const_int) -- so do this always.  */
2223   addr = copy_rtx (addr);
2224 
2225   /* Convert a possibly large offset to a signed value within the
2226      range of the target address space.  */
2227   address_mode = get_address_mode (memref);
2228   pbits = GET_MODE_BITSIZE (address_mode);
2229   if (HOST_BITS_PER_WIDE_INT > pbits)
2230     {
2231       int shift = HOST_BITS_PER_WIDE_INT - pbits;
2232       offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2233 		>> shift);
2234     }
2235 
2236   if (adjust_address)
2237     {
2238       /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2239 	 object, we can merge it into the LO_SUM.  */
2240       if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2241 	  && offset >= 0
2242 	  && (unsigned HOST_WIDE_INT) offset
2243 	      < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2244 	addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2245 			       plus_constant (address_mode,
2246 					      XEXP (addr, 1), offset));
2247 #ifdef POINTERS_EXTEND_UNSIGNED
2248       /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2249 	 in that mode, we merge it into the ZERO_EXTEND.  We take advantage of
2250 	 the fact that pointers are not allowed to overflow.  */
2251       else if (POINTERS_EXTEND_UNSIGNED > 0
2252 	       && GET_CODE (addr) == ZERO_EXTEND
2253 	       && GET_MODE (XEXP (addr, 0)) == pointer_mode
2254 	       && trunc_int_for_mode (offset, pointer_mode) == offset)
2255 	addr = gen_rtx_ZERO_EXTEND (address_mode,
2256 				    plus_constant (pointer_mode,
2257 						   XEXP (addr, 0), offset));
2258 #endif
2259       else
2260 	addr = plus_constant (address_mode, addr, offset);
2261     }
2262 
2263   new_rtx = change_address_1 (memref, mode, addr, validate, false);
2264 
2265   /* If the address is a REG, change_address_1 rightfully returns memref,
2266      but this would destroy memref's MEM_ATTRS.  */
2267   if (new_rtx == memref && offset != 0)
2268     new_rtx = copy_rtx (new_rtx);
2269 
2270   /* Conservatively drop the object if we don't know where we start from.  */
2271   if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2272     {
2273       attrs.expr = NULL_TREE;
2274       attrs.alias = 0;
2275     }
2276 
2277   /* Compute the new values of the memory attributes due to this adjustment.
2278      We add the offsets and update the alignment.  */
2279   if (attrs.offset_known_p)
2280     {
2281       attrs.offset += offset;
2282 
2283       /* Drop the object if the new left end is not within its bounds.  */
2284       if (adjust_object && attrs.offset < 0)
2285 	{
2286 	  attrs.expr = NULL_TREE;
2287 	  attrs.alias = 0;
2288 	}
2289     }
2290 
2291   /* Compute the new alignment by taking the MIN of the alignment and the
2292      lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2293      if zero.  */
2294   if (offset != 0)
2295     {
2296       max_align = (offset & -offset) * BITS_PER_UNIT;
2297       attrs.align = MIN (attrs.align, max_align);
2298     }
2299 
2300   if (size)
2301     {
2302       /* Drop the object if the new right end is not within its bounds.  */
2303       if (adjust_object && (offset + size) > attrs.size)
2304 	{
2305 	  attrs.expr = NULL_TREE;
2306 	  attrs.alias = 0;
2307 	}
2308       attrs.size_known_p = true;
2309       attrs.size = size;
2310     }
2311   else if (attrs.size_known_p)
2312     {
2313       gcc_assert (!adjust_object);
2314       attrs.size -= offset;
2315       /* ??? The store_by_pieces machinery generates negative sizes,
2316 	 so don't assert for that here.  */
2317     }
2318 
2319   set_mem_attrs (new_rtx, &attrs);
2320 
2321   return new_rtx;
2322 }
2323 
2324 /* Return a memory reference like MEMREF, but with its mode changed
2325    to MODE and its address changed to ADDR, which is assumed to be
2326    MEMREF offset by OFFSET bytes.  If VALIDATE is
2327    nonzero, the memory address is forced to be valid.  */
2328 
2329 rtx
2330 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2331 			     HOST_WIDE_INT offset, int validate)
2332 {
2333   memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2334   return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2335 }
2336 
2337 /* Return a memory reference like MEMREF, but whose address is changed by
2338    adding OFFSET, an RTX, to it.  POW2 is the highest power of two factor
2339    known to be in OFFSET (possibly 1).  */
2340 
2341 rtx
2342 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2343 {
2344   rtx new_rtx, addr = XEXP (memref, 0);
2345   machine_mode address_mode;
2346   struct mem_attrs attrs, *defattrs;
2347 
2348   attrs = *get_mem_attrs (memref);
2349   address_mode = get_address_mode (memref);
2350   new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2351 
2352   /* At this point we don't know _why_ the address is invalid.  It
2353      could have secondary memory references, multiplies or anything.
2354 
2355      However, if we did go and rearrange things, we can wind up not
2356      being able to recognize the magic around pic_offset_table_rtx.
2357      This stuff is fragile, and is yet another example of why it is
2358      bad to expose PIC machinery too early.  */
2359   if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2360 				     attrs.addrspace)
2361       && GET_CODE (addr) == PLUS
2362       && XEXP (addr, 0) == pic_offset_table_rtx)
2363     {
2364       addr = force_reg (GET_MODE (addr), addr);
2365       new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2366     }
2367 
2368   update_temp_slot_address (XEXP (memref, 0), new_rtx);
2369   new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2370 
2371   /* If there are no changes, just return the original memory reference.  */
2372   if (new_rtx == memref)
2373     return new_rtx;
2374 
2375   /* Update the alignment to reflect the offset.  Reset the offset, which
2376      we don't know.  */
2377   defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2378   attrs.offset_known_p = false;
2379   attrs.size_known_p = defattrs->size_known_p;
2380   attrs.size = defattrs->size;
2381   attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2382   set_mem_attrs (new_rtx, &attrs);
2383   return new_rtx;
2384 }
2385 
2386 /* Return a memory reference like MEMREF, but with its address changed to
2387    ADDR.  The caller is asserting that the actual piece of memory pointed
2388    to is the same, just the form of the address is being changed, such as
2389    by putting something into a register.  INPLACE is true if any changes
2390    can be made directly to MEMREF or false if MEMREF must be treated as
2391    immutable.  */
2392 
2393 rtx
2394 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2395 {
2396   /* change_address_1 copies the memory attribute structure without change
2397      and that's exactly what we want here.  */
2398   update_temp_slot_address (XEXP (memref, 0), addr);
2399   return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2400 }
2401 
2402 /* Likewise, but the reference is not required to be valid.  */
2403 
2404 rtx
2405 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2406 {
2407   return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2408 }
2409 
2410 /* Return a memory reference like MEMREF, but with its mode widened to
2411    MODE and offset by OFFSET.  This would be used by targets that e.g.
2412    cannot issue QImode memory operations and have to use SImode memory
2413    operations plus masking logic.  */
2414 
2415 rtx
2416 widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset)
2417 {
2418   rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2419   struct mem_attrs attrs;
2420   unsigned int size = GET_MODE_SIZE (mode);
2421 
2422   /* If there are no changes, just return the original memory reference.  */
2423   if (new_rtx == memref)
2424     return new_rtx;
2425 
2426   attrs = *get_mem_attrs (new_rtx);
2427 
2428   /* If we don't know what offset we were at within the expression, then
2429      we can't know if we've overstepped the bounds.  */
2430   if (! attrs.offset_known_p)
2431     attrs.expr = NULL_TREE;
2432 
2433   while (attrs.expr)
2434     {
2435       if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2436 	{
2437 	  tree field = TREE_OPERAND (attrs.expr, 1);
2438 	  tree offset = component_ref_field_offset (attrs.expr);
2439 
2440 	  if (! DECL_SIZE_UNIT (field))
2441 	    {
2442 	      attrs.expr = NULL_TREE;
2443 	      break;
2444 	    }
2445 
2446 	  /* Is the field at least as large as the access?  If so, ok,
2447 	     otherwise strip back to the containing structure.  */
2448 	  if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2449 	      && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2450 	      && attrs.offset >= 0)
2451 	    break;
2452 
2453 	  if (! tree_fits_uhwi_p (offset))
2454 	    {
2455 	      attrs.expr = NULL_TREE;
2456 	      break;
2457 	    }
2458 
2459 	  attrs.expr = TREE_OPERAND (attrs.expr, 0);
2460 	  attrs.offset += tree_to_uhwi (offset);
2461 	  attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2462 			   / BITS_PER_UNIT);
2463 	}
2464       /* Similarly for the decl.  */
2465       else if (DECL_P (attrs.expr)
2466 	       && DECL_SIZE_UNIT (attrs.expr)
2467 	       && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2468 	       && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2469 	       && (! attrs.offset_known_p || attrs.offset >= 0))
2470 	break;
2471       else
2472 	{
2473 	  /* The widened memory access overflows the expression, which means
2474 	     that it could alias another expression.  Zap it.  */
2475 	  attrs.expr = NULL_TREE;
2476 	  break;
2477 	}
2478     }
2479 
2480   if (! attrs.expr)
2481     attrs.offset_known_p = false;
2482 
2483   /* The widened memory may alias other stuff, so zap the alias set.  */
2484   /* ??? Maybe use get_alias_set on any remaining expression.  */
2485   attrs.alias = 0;
2486   attrs.size_known_p = true;
2487   attrs.size = size;
2488   set_mem_attrs (new_rtx, &attrs);
2489   return new_rtx;
2490 }
2491 
2492 /* A fake decl that is used as the MEM_EXPR of spill slots.  */
2493 static GTY(()) tree spill_slot_decl;
2494 
2495 tree
2496 get_spill_slot_decl (bool force_build_p)
2497 {
2498   tree d = spill_slot_decl;
2499   rtx rd;
2500   struct mem_attrs attrs;
2501 
2502   if (d || !force_build_p)
2503     return d;
2504 
2505   d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2506 		  VAR_DECL, get_identifier ("%sfp"), void_type_node);
2507   DECL_ARTIFICIAL (d) = 1;
2508   DECL_IGNORED_P (d) = 1;
2509   TREE_USED (d) = 1;
2510   spill_slot_decl = d;
2511 
2512   rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2513   MEM_NOTRAP_P (rd) = 1;
2514   attrs = *mode_mem_attrs[(int) BLKmode];
2515   attrs.alias = new_alias_set ();
2516   attrs.expr = d;
2517   set_mem_attrs (rd, &attrs);
2518   SET_DECL_RTL (d, rd);
2519 
2520   return d;
2521 }
2522 
2523 /* Given MEM, a result from assign_stack_local, fill in the memory
2524    attributes as appropriate for a register allocator spill slot.
2525    These slots are not aliasable by other memory.  We arrange for
2526    them all to use a single MEM_EXPR, so that the aliasing code can
2527    work properly in the case of shared spill slots.  */
2528 
2529 void
2530 set_mem_attrs_for_spill (rtx mem)
2531 {
2532   struct mem_attrs attrs;
2533   rtx addr;
2534 
2535   attrs = *get_mem_attrs (mem);
2536   attrs.expr = get_spill_slot_decl (true);
2537   attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2538   attrs.addrspace = ADDR_SPACE_GENERIC;
2539 
2540   /* We expect the incoming memory to be of the form:
2541 	(mem:MODE (plus (reg sfp) (const_int offset)))
2542      with perhaps the plus missing for offset = 0.  */
2543   addr = XEXP (mem, 0);
2544   attrs.offset_known_p = true;
2545   attrs.offset = 0;
2546   if (GET_CODE (addr) == PLUS
2547       && CONST_INT_P (XEXP (addr, 1)))
2548     attrs.offset = INTVAL (XEXP (addr, 1));
2549 
2550   set_mem_attrs (mem, &attrs);
2551   MEM_NOTRAP_P (mem) = 1;
2552 }
2553 
2554 /* Return a newly created CODE_LABEL rtx with a unique label number.  */
2555 
2556 rtx_code_label *
2557 gen_label_rtx (void)
2558 {
2559   return as_a <rtx_code_label *> (
2560 	    gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2561 				NULL, label_num++, NULL));
2562 }
2563 
2564 /* For procedure integration.  */
2565 
2566 /* Install new pointers to the first and last insns in the chain.
2567    Also, set cur_insn_uid to one higher than the last in use.
2568    Used for an inline-procedure after copying the insn chain.  */
2569 
2570 void
2571 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2572 {
2573   rtx_insn *insn;
2574 
2575   set_first_insn (first);
2576   set_last_insn (last);
2577   cur_insn_uid = 0;
2578 
2579   if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2580     {
2581       int debug_count = 0;
2582 
2583       cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2584       cur_debug_insn_uid = 0;
2585 
2586       for (insn = first; insn; insn = NEXT_INSN (insn))
2587 	if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2588 	  cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2589 	else
2590 	  {
2591 	    cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2592 	    if (DEBUG_INSN_P (insn))
2593 	      debug_count++;
2594 	  }
2595 
2596       if (debug_count)
2597 	cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2598       else
2599 	cur_debug_insn_uid++;
2600     }
2601   else
2602     for (insn = first; insn; insn = NEXT_INSN (insn))
2603       cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2604 
2605   cur_insn_uid++;
2606 }
2607 
2608 /* Go through all the RTL insn bodies and copy any invalid shared
2609    structure.  This routine should only be called once.  */
2610 
2611 static void
2612 unshare_all_rtl_1 (rtx_insn *insn)
2613 {
2614   /* Unshare just about everything else.  */
2615   unshare_all_rtl_in_chain (insn);
2616 
2617   /* Make sure the addresses of stack slots found outside the insn chain
2618      (such as, in DECL_RTL of a variable) are not shared
2619      with the insn chain.
2620 
2621      This special care is necessary when the stack slot MEM does not
2622      actually appear in the insn chain.  If it does appear, its address
2623      is unshared from all else at that point.  */
2624   stack_slot_list = safe_as_a <rtx_expr_list *> (
2625 		      copy_rtx_if_shared (stack_slot_list));
2626 }
2627 
2628 /* Go through all the RTL insn bodies and copy any invalid shared
2629    structure, again.  This is a fairly expensive thing to do so it
2630    should be done sparingly.  */
2631 
2632 void
2633 unshare_all_rtl_again (rtx_insn *insn)
2634 {
2635   rtx_insn *p;
2636   tree decl;
2637 
2638   for (p = insn; p; p = NEXT_INSN (p))
2639     if (INSN_P (p))
2640       {
2641 	reset_used_flags (PATTERN (p));
2642 	reset_used_flags (REG_NOTES (p));
2643 	if (CALL_P (p))
2644 	  reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2645       }
2646 
2647   /* Make sure that virtual stack slots are not shared.  */
2648   set_used_decls (DECL_INITIAL (cfun->decl));
2649 
2650   /* Make sure that virtual parameters are not shared.  */
2651   for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2652     set_used_flags (DECL_RTL (decl));
2653 
2654   reset_used_flags (stack_slot_list);
2655 
2656   unshare_all_rtl_1 (insn);
2657 }
2658 
2659 unsigned int
2660 unshare_all_rtl (void)
2661 {
2662   unshare_all_rtl_1 (get_insns ());
2663   return 0;
2664 }
2665 
2666 
2667 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2668    Recursively does the same for subexpressions.  */
2669 
2670 static void
2671 verify_rtx_sharing (rtx orig, rtx insn)
2672 {
2673   rtx x = orig;
2674   int i;
2675   enum rtx_code code;
2676   const char *format_ptr;
2677 
2678   if (x == 0)
2679     return;
2680 
2681   code = GET_CODE (x);
2682 
2683   /* These types may be freely shared.  */
2684 
2685   switch (code)
2686     {
2687     case REG:
2688     case DEBUG_EXPR:
2689     case VALUE:
2690     CASE_CONST_ANY:
2691     case SYMBOL_REF:
2692     case LABEL_REF:
2693     case CODE_LABEL:
2694     case PC:
2695     case CC0:
2696     case RETURN:
2697     case SIMPLE_RETURN:
2698     case SCRATCH:
2699       /* SCRATCH must be shared because they represent distinct values.  */
2700       return;
2701     case CLOBBER:
2702       /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2703          clobbers or clobbers of hard registers that originated as pseudos.
2704          This is needed to allow safe register renaming.  */
2705       if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2706 	  && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2707 	return;
2708       break;
2709 
2710     case CONST:
2711       if (shared_const_p (orig))
2712 	return;
2713       break;
2714 
2715     case MEM:
2716       /* A MEM is allowed to be shared if its address is constant.  */
2717       if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2718 	  || reload_completed || reload_in_progress)
2719 	return;
2720 
2721       break;
2722 
2723     default:
2724       break;
2725     }
2726 
2727   /* This rtx may not be shared.  If it has already been seen,
2728      replace it with a copy of itself.  */
2729 #ifdef ENABLE_CHECKING
2730   if (RTX_FLAG (x, used))
2731     {
2732       error ("invalid rtl sharing found in the insn");
2733       debug_rtx (insn);
2734       error ("shared rtx");
2735       debug_rtx (x);
2736       internal_error ("internal consistency failure");
2737     }
2738 #endif
2739   gcc_assert (!RTX_FLAG (x, used));
2740 
2741   RTX_FLAG (x, used) = 1;
2742 
2743   /* Now scan the subexpressions recursively.  */
2744 
2745   format_ptr = GET_RTX_FORMAT (code);
2746 
2747   for (i = 0; i < GET_RTX_LENGTH (code); i++)
2748     {
2749       switch (*format_ptr++)
2750 	{
2751 	case 'e':
2752 	  verify_rtx_sharing (XEXP (x, i), insn);
2753 	  break;
2754 
2755 	case 'E':
2756 	  if (XVEC (x, i) != NULL)
2757 	    {
2758 	      int j;
2759 	      int len = XVECLEN (x, i);
2760 
2761 	      for (j = 0; j < len; j++)
2762 		{
2763 		  /* We allow sharing of ASM_OPERANDS inside single
2764 		     instruction.  */
2765 		  if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2766 		      && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2767 			  == ASM_OPERANDS))
2768 		    verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2769 		  else
2770 		    verify_rtx_sharing (XVECEXP (x, i, j), insn);
2771 		}
2772 	    }
2773 	  break;
2774 	}
2775     }
2776   return;
2777 }
2778 
2779 /* Reset used-flags for INSN.  */
2780 
2781 static void
2782 reset_insn_used_flags (rtx insn)
2783 {
2784   gcc_assert (INSN_P (insn));
2785   reset_used_flags (PATTERN (insn));
2786   reset_used_flags (REG_NOTES (insn));
2787   if (CALL_P (insn))
2788     reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2789 }
2790 
2791 /* Go through all the RTL insn bodies and clear all the USED bits.  */
2792 
2793 static void
2794 reset_all_used_flags (void)
2795 {
2796   rtx_insn *p;
2797 
2798   for (p = get_insns (); p; p = NEXT_INSN (p))
2799     if (INSN_P (p))
2800       {
2801 	rtx pat = PATTERN (p);
2802 	if (GET_CODE (pat) != SEQUENCE)
2803 	  reset_insn_used_flags (p);
2804 	else
2805 	  {
2806 	    gcc_assert (REG_NOTES (p) == NULL);
2807 	    for (int i = 0; i < XVECLEN (pat, 0); i++)
2808 	      {
2809 		rtx insn = XVECEXP (pat, 0, i);
2810 		if (INSN_P (insn))
2811 		  reset_insn_used_flags (insn);
2812 	      }
2813 	  }
2814       }
2815 }
2816 
2817 /* Verify sharing in INSN.  */
2818 
2819 static void
2820 verify_insn_sharing (rtx insn)
2821 {
2822   gcc_assert (INSN_P (insn));
2823   reset_used_flags (PATTERN (insn));
2824   reset_used_flags (REG_NOTES (insn));
2825   if (CALL_P (insn))
2826     reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2827 }
2828 
2829 /* Go through all the RTL insn bodies and check that there is no unexpected
2830    sharing in between the subexpressions.  */
2831 
2832 DEBUG_FUNCTION void
2833 verify_rtl_sharing (void)
2834 {
2835   rtx_insn *p;
2836 
2837   timevar_push (TV_VERIFY_RTL_SHARING);
2838 
2839   reset_all_used_flags ();
2840 
2841   for (p = get_insns (); p; p = NEXT_INSN (p))
2842     if (INSN_P (p))
2843       {
2844 	rtx pat = PATTERN (p);
2845 	if (GET_CODE (pat) != SEQUENCE)
2846 	  verify_insn_sharing (p);
2847 	else
2848 	  for (int i = 0; i < XVECLEN (pat, 0); i++)
2849 	      {
2850 		rtx insn = XVECEXP (pat, 0, i);
2851 		if (INSN_P (insn))
2852 		  verify_insn_sharing (insn);
2853 	      }
2854       }
2855 
2856   reset_all_used_flags ();
2857 
2858   timevar_pop (TV_VERIFY_RTL_SHARING);
2859 }
2860 
2861 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2862    Assumes the mark bits are cleared at entry.  */
2863 
2864 void
2865 unshare_all_rtl_in_chain (rtx_insn *insn)
2866 {
2867   for (; insn; insn = NEXT_INSN (insn))
2868     if (INSN_P (insn))
2869       {
2870 	PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2871 	REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2872 	if (CALL_P (insn))
2873 	  CALL_INSN_FUNCTION_USAGE (insn)
2874 	    = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2875       }
2876 }
2877 
2878 /* Go through all virtual stack slots of a function and mark them as
2879    shared.  We never replace the DECL_RTLs themselves with a copy,
2880    but expressions mentioned into a DECL_RTL cannot be shared with
2881    expressions in the instruction stream.
2882 
2883    Note that reload may convert pseudo registers into memories in-place.
2884    Pseudo registers are always shared, but MEMs never are.  Thus if we
2885    reset the used flags on MEMs in the instruction stream, we must set
2886    them again on MEMs that appear in DECL_RTLs.  */
2887 
2888 static void
2889 set_used_decls (tree blk)
2890 {
2891   tree t;
2892 
2893   /* Mark decls.  */
2894   for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2895     if (DECL_RTL_SET_P (t))
2896       set_used_flags (DECL_RTL (t));
2897 
2898   /* Now process sub-blocks.  */
2899   for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2900     set_used_decls (t);
2901 }
2902 
2903 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2904    Recursively does the same for subexpressions.  Uses
2905    copy_rtx_if_shared_1 to reduce stack space.  */
2906 
2907 rtx
2908 copy_rtx_if_shared (rtx orig)
2909 {
2910   copy_rtx_if_shared_1 (&orig);
2911   return orig;
2912 }
2913 
2914 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2915    use.  Recursively does the same for subexpressions.  */
2916 
2917 static void
2918 copy_rtx_if_shared_1 (rtx *orig1)
2919 {
2920   rtx x;
2921   int i;
2922   enum rtx_code code;
2923   rtx *last_ptr;
2924   const char *format_ptr;
2925   int copied = 0;
2926   int length;
2927 
2928   /* Repeat is used to turn tail-recursion into iteration.  */
2929 repeat:
2930   x = *orig1;
2931 
2932   if (x == 0)
2933     return;
2934 
2935   code = GET_CODE (x);
2936 
2937   /* These types may be freely shared.  */
2938 
2939   switch (code)
2940     {
2941     case REG:
2942     case DEBUG_EXPR:
2943     case VALUE:
2944     CASE_CONST_ANY:
2945     case SYMBOL_REF:
2946     case LABEL_REF:
2947     case CODE_LABEL:
2948     case PC:
2949     case CC0:
2950     case RETURN:
2951     case SIMPLE_RETURN:
2952     case SCRATCH:
2953       /* SCRATCH must be shared because they represent distinct values.  */
2954       return;
2955     case CLOBBER:
2956       /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2957          clobbers or clobbers of hard registers that originated as pseudos.
2958          This is needed to allow safe register renaming.  */
2959       if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2960 	  && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2961 	return;
2962       break;
2963 
2964     case CONST:
2965       if (shared_const_p (x))
2966 	return;
2967       break;
2968 
2969     case DEBUG_INSN:
2970     case INSN:
2971     case JUMP_INSN:
2972     case CALL_INSN:
2973     case NOTE:
2974     case BARRIER:
2975       /* The chain of insns is not being copied.  */
2976       return;
2977 
2978     default:
2979       break;
2980     }
2981 
2982   /* This rtx may not be shared.  If it has already been seen,
2983      replace it with a copy of itself.  */
2984 
2985   if (RTX_FLAG (x, used))
2986     {
2987       x = shallow_copy_rtx (x);
2988       copied = 1;
2989     }
2990   RTX_FLAG (x, used) = 1;
2991 
2992   /* Now scan the subexpressions recursively.
2993      We can store any replaced subexpressions directly into X
2994      since we know X is not shared!  Any vectors in X
2995      must be copied if X was copied.  */
2996 
2997   format_ptr = GET_RTX_FORMAT (code);
2998   length = GET_RTX_LENGTH (code);
2999   last_ptr = NULL;
3000 
3001   for (i = 0; i < length; i++)
3002     {
3003       switch (*format_ptr++)
3004 	{
3005 	case 'e':
3006           if (last_ptr)
3007             copy_rtx_if_shared_1 (last_ptr);
3008 	  last_ptr = &XEXP (x, i);
3009 	  break;
3010 
3011 	case 'E':
3012 	  if (XVEC (x, i) != NULL)
3013 	    {
3014 	      int j;
3015 	      int len = XVECLEN (x, i);
3016 
3017               /* Copy the vector iff I copied the rtx and the length
3018 		 is nonzero.  */
3019 	      if (copied && len > 0)
3020 		XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3021 
3022               /* Call recursively on all inside the vector.  */
3023 	      for (j = 0; j < len; j++)
3024                 {
3025 		  if (last_ptr)
3026 		    copy_rtx_if_shared_1 (last_ptr);
3027                   last_ptr = &XVECEXP (x, i, j);
3028                 }
3029 	    }
3030 	  break;
3031 	}
3032     }
3033   *orig1 = x;
3034   if (last_ptr)
3035     {
3036       orig1 = last_ptr;
3037       goto repeat;
3038     }
3039   return;
3040 }
3041 
3042 /* Set the USED bit in X and its non-shareable subparts to FLAG.  */
3043 
3044 static void
3045 mark_used_flags (rtx x, int flag)
3046 {
3047   int i, j;
3048   enum rtx_code code;
3049   const char *format_ptr;
3050   int length;
3051 
3052   /* Repeat is used to turn tail-recursion into iteration.  */
3053 repeat:
3054   if (x == 0)
3055     return;
3056 
3057   code = GET_CODE (x);
3058 
3059   /* These types may be freely shared so we needn't do any resetting
3060      for them.  */
3061 
3062   switch (code)
3063     {
3064     case REG:
3065     case DEBUG_EXPR:
3066     case VALUE:
3067     CASE_CONST_ANY:
3068     case SYMBOL_REF:
3069     case CODE_LABEL:
3070     case PC:
3071     case CC0:
3072     case RETURN:
3073     case SIMPLE_RETURN:
3074       return;
3075 
3076     case DEBUG_INSN:
3077     case INSN:
3078     case JUMP_INSN:
3079     case CALL_INSN:
3080     case NOTE:
3081     case LABEL_REF:
3082     case BARRIER:
3083       /* The chain of insns is not being copied.  */
3084       return;
3085 
3086     default:
3087       break;
3088     }
3089 
3090   RTX_FLAG (x, used) = flag;
3091 
3092   format_ptr = GET_RTX_FORMAT (code);
3093   length = GET_RTX_LENGTH (code);
3094 
3095   for (i = 0; i < length; i++)
3096     {
3097       switch (*format_ptr++)
3098 	{
3099 	case 'e':
3100           if (i == length-1)
3101             {
3102               x = XEXP (x, i);
3103 	      goto repeat;
3104             }
3105 	  mark_used_flags (XEXP (x, i), flag);
3106 	  break;
3107 
3108 	case 'E':
3109 	  for (j = 0; j < XVECLEN (x, i); j++)
3110 	    mark_used_flags (XVECEXP (x, i, j), flag);
3111 	  break;
3112 	}
3113     }
3114 }
3115 
3116 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3117    to look for shared sub-parts.  */
3118 
3119 void
3120 reset_used_flags (rtx x)
3121 {
3122   mark_used_flags (x, 0);
3123 }
3124 
3125 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3126    to look for shared sub-parts.  */
3127 
3128 void
3129 set_used_flags (rtx x)
3130 {
3131   mark_used_flags (x, 1);
3132 }
3133 
3134 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3135    Return X or the rtx for the pseudo reg the value of X was copied into.
3136    OTHER must be valid as a SET_DEST.  */
3137 
3138 rtx
3139 make_safe_from (rtx x, rtx other)
3140 {
3141   while (1)
3142     switch (GET_CODE (other))
3143       {
3144       case SUBREG:
3145 	other = SUBREG_REG (other);
3146 	break;
3147       case STRICT_LOW_PART:
3148       case SIGN_EXTEND:
3149       case ZERO_EXTEND:
3150 	other = XEXP (other, 0);
3151 	break;
3152       default:
3153 	goto done;
3154       }
3155  done:
3156   if ((MEM_P (other)
3157        && ! CONSTANT_P (x)
3158        && !REG_P (x)
3159        && GET_CODE (x) != SUBREG)
3160       || (REG_P (other)
3161 	  && (REGNO (other) < FIRST_PSEUDO_REGISTER
3162 	      || reg_mentioned_p (other, x))))
3163     {
3164       rtx temp = gen_reg_rtx (GET_MODE (x));
3165       emit_move_insn (temp, x);
3166       return temp;
3167     }
3168   return x;
3169 }
3170 
3171 /* Emission of insns (adding them to the doubly-linked list).  */
3172 
3173 /* Return the last insn emitted, even if it is in a sequence now pushed.  */
3174 
3175 rtx_insn *
3176 get_last_insn_anywhere (void)
3177 {
3178   struct sequence_stack *stack;
3179   if (get_last_insn ())
3180     return get_last_insn ();
3181   for (stack = seq_stack; stack; stack = stack->next)
3182     if (stack->last != 0)
3183       return stack->last;
3184   return 0;
3185 }
3186 
3187 /* Return the first nonnote insn emitted in current sequence or current
3188    function.  This routine looks inside SEQUENCEs.  */
3189 
3190 rtx_insn *
3191 get_first_nonnote_insn (void)
3192 {
3193   rtx_insn *insn = get_insns ();
3194 
3195   if (insn)
3196     {
3197       if (NOTE_P (insn))
3198 	for (insn = next_insn (insn);
3199 	     insn && NOTE_P (insn);
3200 	     insn = next_insn (insn))
3201 	  continue;
3202       else
3203 	{
3204 	  if (NONJUMP_INSN_P (insn)
3205 	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
3206 	    insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3207 	}
3208     }
3209 
3210   return insn;
3211 }
3212 
3213 /* Return the last nonnote insn emitted in current sequence or current
3214    function.  This routine looks inside SEQUENCEs.  */
3215 
3216 rtx_insn *
3217 get_last_nonnote_insn (void)
3218 {
3219   rtx_insn *insn = get_last_insn ();
3220 
3221   if (insn)
3222     {
3223       if (NOTE_P (insn))
3224 	for (insn = previous_insn (insn);
3225 	     insn && NOTE_P (insn);
3226 	     insn = previous_insn (insn))
3227 	  continue;
3228       else
3229 	{
3230 	  if (NONJUMP_INSN_P (insn))
3231 	    if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3232 	      insn = seq->insn (seq->len () - 1);
3233 	}
3234     }
3235 
3236   return insn;
3237 }
3238 
3239 /* Return the number of actual (non-debug) insns emitted in this
3240    function.  */
3241 
3242 int
3243 get_max_insn_count (void)
3244 {
3245   int n = cur_insn_uid;
3246 
3247   /* The table size must be stable across -g, to avoid codegen
3248      differences due to debug insns, and not be affected by
3249      -fmin-insn-uid, to avoid excessive table size and to simplify
3250      debugging of -fcompare-debug failures.  */
3251   if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3252     n -= cur_debug_insn_uid;
3253   else
3254     n -= MIN_NONDEBUG_INSN_UID;
3255 
3256   return n;
3257 }
3258 
3259 
3260 /* Return the next insn.  If it is a SEQUENCE, return the first insn
3261    of the sequence.  */
3262 
3263 rtx_insn *
3264 next_insn (rtx_insn *insn)
3265 {
3266   if (insn)
3267     {
3268       insn = NEXT_INSN (insn);
3269       if (insn && NONJUMP_INSN_P (insn)
3270 	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
3271 	insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3272     }
3273 
3274   return insn;
3275 }
3276 
3277 /* Return the previous insn.  If it is a SEQUENCE, return the last insn
3278    of the sequence.  */
3279 
3280 rtx_insn *
3281 previous_insn (rtx_insn *insn)
3282 {
3283   if (insn)
3284     {
3285       insn = PREV_INSN (insn);
3286       if (insn && NONJUMP_INSN_P (insn))
3287 	if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3288 	  insn = seq->insn (seq->len () - 1);
3289     }
3290 
3291   return insn;
3292 }
3293 
3294 /* Return the next insn after INSN that is not a NOTE.  This routine does not
3295    look inside SEQUENCEs.  */
3296 
3297 rtx_insn *
3298 next_nonnote_insn (rtx uncast_insn)
3299 {
3300   rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3301   while (insn)
3302     {
3303       insn = NEXT_INSN (insn);
3304       if (insn == 0 || !NOTE_P (insn))
3305 	break;
3306     }
3307 
3308   return insn;
3309 }
3310 
3311 /* Return the next insn after INSN that is not a NOTE, but stop the
3312    search before we enter another basic block.  This routine does not
3313    look inside SEQUENCEs.  */
3314 
3315 rtx_insn *
3316 next_nonnote_insn_bb (rtx_insn *insn)
3317 {
3318   while (insn)
3319     {
3320       insn = NEXT_INSN (insn);
3321       if (insn == 0 || !NOTE_P (insn))
3322 	break;
3323       if (NOTE_INSN_BASIC_BLOCK_P (insn))
3324 	return NULL;
3325     }
3326 
3327   return insn;
3328 }
3329 
3330 /* Return the previous insn before INSN that is not a NOTE.  This routine does
3331    not look inside SEQUENCEs.  */
3332 
3333 rtx_insn *
3334 prev_nonnote_insn (rtx uncast_insn)
3335 {
3336   rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3337 
3338   while (insn)
3339     {
3340       insn = PREV_INSN (insn);
3341       if (insn == 0 || !NOTE_P (insn))
3342 	break;
3343     }
3344 
3345   return insn;
3346 }
3347 
3348 /* Return the previous insn before INSN that is not a NOTE, but stop
3349    the search before we enter another basic block.  This routine does
3350    not look inside SEQUENCEs.  */
3351 
3352 rtx_insn *
3353 prev_nonnote_insn_bb (rtx uncast_insn)
3354 {
3355   rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3356 
3357   while (insn)
3358     {
3359       insn = PREV_INSN (insn);
3360       if (insn == 0 || !NOTE_P (insn))
3361 	break;
3362       if (NOTE_INSN_BASIC_BLOCK_P (insn))
3363 	return NULL;
3364     }
3365 
3366   return insn;
3367 }
3368 
3369 /* Return the next insn after INSN that is not a DEBUG_INSN.  This
3370    routine does not look inside SEQUENCEs.  */
3371 
3372 rtx_insn *
3373 next_nondebug_insn (rtx uncast_insn)
3374 {
3375   rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3376 
3377   while (insn)
3378     {
3379       insn = NEXT_INSN (insn);
3380       if (insn == 0 || !DEBUG_INSN_P (insn))
3381 	break;
3382     }
3383 
3384   return insn;
3385 }
3386 
3387 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3388    This routine does not look inside SEQUENCEs.  */
3389 
3390 rtx_insn *
3391 prev_nondebug_insn (rtx uncast_insn)
3392 {
3393   rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3394 
3395   while (insn)
3396     {
3397       insn = PREV_INSN (insn);
3398       if (insn == 0 || !DEBUG_INSN_P (insn))
3399 	break;
3400     }
3401 
3402   return insn;
3403 }
3404 
3405 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3406    This routine does not look inside SEQUENCEs.  */
3407 
3408 rtx_insn *
3409 next_nonnote_nondebug_insn (rtx uncast_insn)
3410 {
3411   rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3412 
3413   while (insn)
3414     {
3415       insn = NEXT_INSN (insn);
3416       if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3417 	break;
3418     }
3419 
3420   return insn;
3421 }
3422 
3423 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3424    This routine does not look inside SEQUENCEs.  */
3425 
3426 rtx_insn *
3427 prev_nonnote_nondebug_insn (rtx uncast_insn)
3428 {
3429   rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3430 
3431   while (insn)
3432     {
3433       insn = PREV_INSN (insn);
3434       if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3435 	break;
3436     }
3437 
3438   return insn;
3439 }
3440 
3441 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3442    or 0, if there is none.  This routine does not look inside
3443    SEQUENCEs.  */
3444 
3445 rtx_insn *
3446 next_real_insn (rtx uncast_insn)
3447 {
3448   rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3449 
3450   while (insn)
3451     {
3452       insn = NEXT_INSN (insn);
3453       if (insn == 0 || INSN_P (insn))
3454 	break;
3455     }
3456 
3457   return insn;
3458 }
3459 
3460 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3461    or 0, if there is none.  This routine does not look inside
3462    SEQUENCEs.  */
3463 
3464 rtx_insn *
3465 prev_real_insn (rtx uncast_insn)
3466 {
3467   rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3468 
3469   while (insn)
3470     {
3471       insn = PREV_INSN (insn);
3472       if (insn == 0 || INSN_P (insn))
3473 	break;
3474     }
3475 
3476   return insn;
3477 }
3478 
3479 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3480    This routine does not look inside SEQUENCEs.  */
3481 
3482 rtx_call_insn *
3483 last_call_insn (void)
3484 {
3485   rtx_insn *insn;
3486 
3487   for (insn = get_last_insn ();
3488        insn && !CALL_P (insn);
3489        insn = PREV_INSN (insn))
3490     ;
3491 
3492   return safe_as_a <rtx_call_insn *> (insn);
3493 }
3494 
3495 /* Find the next insn after INSN that really does something.  This routine
3496    does not look inside SEQUENCEs.  After reload this also skips over
3497    standalone USE and CLOBBER insn.  */
3498 
3499 int
3500 active_insn_p (const_rtx insn)
3501 {
3502   return (CALL_P (insn) || JUMP_P (insn)
3503 	  || JUMP_TABLE_DATA_P (insn) /* FIXME */
3504 	  || (NONJUMP_INSN_P (insn)
3505 	      && (! reload_completed
3506 		  || (GET_CODE (PATTERN (insn)) != USE
3507 		      && GET_CODE (PATTERN (insn)) != CLOBBER))));
3508 }
3509 
3510 rtx_insn *
3511 next_active_insn (rtx uncast_insn)
3512 {
3513   rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3514 
3515   while (insn)
3516     {
3517       insn = NEXT_INSN (insn);
3518       if (insn == 0 || active_insn_p (insn))
3519 	break;
3520     }
3521 
3522   return insn;
3523 }
3524 
3525 /* Find the last insn before INSN that really does something.  This routine
3526    does not look inside SEQUENCEs.  After reload this also skips over
3527    standalone USE and CLOBBER insn.  */
3528 
3529 rtx_insn *
3530 prev_active_insn (rtx uncast_insn)
3531 {
3532   rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3533 
3534   while (insn)
3535     {
3536       insn = PREV_INSN (insn);
3537       if (insn == 0 || active_insn_p (insn))
3538 	break;
3539     }
3540 
3541   return insn;
3542 }
3543 
3544 #ifdef HAVE_cc0
3545 /* Return the next insn that uses CC0 after INSN, which is assumed to
3546    set it.  This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3547    applied to the result of this function should yield INSN).
3548 
3549    Normally, this is simply the next insn.  However, if a REG_CC_USER note
3550    is present, it contains the insn that uses CC0.
3551 
3552    Return 0 if we can't find the insn.  */
3553 
3554 rtx_insn *
3555 next_cc0_user (rtx uncast_insn)
3556 {
3557   rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3558 
3559   rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3560 
3561   if (note)
3562     return safe_as_a <rtx_insn *> (XEXP (note, 0));
3563 
3564   insn = next_nonnote_insn (insn);
3565   if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3566     insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3567 
3568   if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3569     return insn;
3570 
3571   return 0;
3572 }
3573 
3574 /* Find the insn that set CC0 for INSN.  Unless INSN has a REG_CC_SETTER
3575    note, it is the previous insn.  */
3576 
3577 rtx_insn *
3578 prev_cc0_setter (rtx uncast_insn)
3579 {
3580   rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3581 
3582   rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3583 
3584   if (note)
3585     return safe_as_a <rtx_insn *> (XEXP (note, 0));
3586 
3587   insn = prev_nonnote_insn (insn);
3588   gcc_assert (sets_cc0_p (PATTERN (insn)));
3589 
3590   return insn;
3591 }
3592 #endif
3593 
3594 #ifdef AUTO_INC_DEC
3595 /* Find a RTX_AUTOINC class rtx which matches DATA.  */
3596 
3597 static int
3598 find_auto_inc (const_rtx x, const_rtx reg)
3599 {
3600   subrtx_iterator::array_type array;
3601   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3602     {
3603       const_rtx x = *iter;
3604       if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3605 	  && rtx_equal_p (reg, XEXP (x, 0)))
3606 	return true;
3607     }
3608   return false;
3609 }
3610 #endif
3611 
3612 /* Increment the label uses for all labels present in rtx.  */
3613 
3614 static void
3615 mark_label_nuses (rtx x)
3616 {
3617   enum rtx_code code;
3618   int i, j;
3619   const char *fmt;
3620 
3621   code = GET_CODE (x);
3622   if (code == LABEL_REF && LABEL_P (LABEL_REF_LABEL (x)))
3623     LABEL_NUSES (LABEL_REF_LABEL (x))++;
3624 
3625   fmt = GET_RTX_FORMAT (code);
3626   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3627     {
3628       if (fmt[i] == 'e')
3629 	mark_label_nuses (XEXP (x, i));
3630       else if (fmt[i] == 'E')
3631 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3632 	  mark_label_nuses (XVECEXP (x, i, j));
3633     }
3634 }
3635 
3636 
3637 /* Try splitting insns that can be split for better scheduling.
3638    PAT is the pattern which might split.
3639    TRIAL is the insn providing PAT.
3640    LAST is nonzero if we should return the last insn of the sequence produced.
3641 
3642    If this routine succeeds in splitting, it returns the first or last
3643    replacement insn depending on the value of LAST.  Otherwise, it
3644    returns TRIAL.  If the insn to be returned can be split, it will be.  */
3645 
3646 rtx_insn *
3647 try_split (rtx pat, rtx uncast_trial, int last)
3648 {
3649   rtx_insn *trial = as_a <rtx_insn *> (uncast_trial);
3650   rtx_insn *before = PREV_INSN (trial);
3651   rtx_insn *after = NEXT_INSN (trial);
3652   rtx note;
3653   rtx_insn *seq, *tem;
3654   int probability;
3655   rtx_insn *insn_last, *insn;
3656   int njumps = 0;
3657   rtx call_insn = NULL_RTX;
3658 
3659   /* We're not good at redistributing frame information.  */
3660   if (RTX_FRAME_RELATED_P (trial))
3661     return trial;
3662 
3663   if (any_condjump_p (trial)
3664       && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3665     split_branch_probability = XINT (note, 0);
3666   probability = split_branch_probability;
3667 
3668   seq = safe_as_a <rtx_insn *> (split_insns (pat, trial));
3669 
3670   split_branch_probability = -1;
3671 
3672   if (!seq)
3673     return trial;
3674 
3675   /* Avoid infinite loop if any insn of the result matches
3676      the original pattern.  */
3677   insn_last = seq;
3678   while (1)
3679     {
3680       if (INSN_P (insn_last)
3681 	  && rtx_equal_p (PATTERN (insn_last), pat))
3682 	return trial;
3683       if (!NEXT_INSN (insn_last))
3684 	break;
3685       insn_last = NEXT_INSN (insn_last);
3686     }
3687 
3688   /* We will be adding the new sequence to the function.  The splitters
3689      may have introduced invalid RTL sharing, so unshare the sequence now.  */
3690   unshare_all_rtl_in_chain (seq);
3691 
3692   /* Mark labels and copy flags.  */
3693   for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3694     {
3695       if (JUMP_P (insn))
3696 	{
3697 	  if (JUMP_P (trial))
3698 	    CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3699 	  mark_jump_label (PATTERN (insn), insn, 0);
3700 	  njumps++;
3701 	  if (probability != -1
3702 	      && any_condjump_p (insn)
3703 	      && !find_reg_note (insn, REG_BR_PROB, 0))
3704 	    {
3705 	      /* We can preserve the REG_BR_PROB notes only if exactly
3706 		 one jump is created, otherwise the machine description
3707 		 is responsible for this step using
3708 		 split_branch_probability variable.  */
3709 	      gcc_assert (njumps == 1);
3710 	      add_int_reg_note (insn, REG_BR_PROB, probability);
3711 	    }
3712 	}
3713     }
3714 
3715   /* If we are splitting a CALL_INSN, look for the CALL_INSN
3716      in SEQ and copy any additional information across.  */
3717   if (CALL_P (trial))
3718     {
3719       for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3720 	if (CALL_P (insn))
3721 	  {
3722 	    rtx_insn *next;
3723 	    rtx *p;
3724 
3725 	    gcc_assert (call_insn == NULL_RTX);
3726 	    call_insn = insn;
3727 
3728 	    /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3729 	       target may have explicitly specified.  */
3730 	    p = &CALL_INSN_FUNCTION_USAGE (insn);
3731 	    while (*p)
3732 	      p = &XEXP (*p, 1);
3733 	    *p = CALL_INSN_FUNCTION_USAGE (trial);
3734 
3735 	    /* If the old call was a sibling call, the new one must
3736 	       be too.  */
3737 	    SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3738 
3739 	    /* If the new call is the last instruction in the sequence,
3740 	       it will effectively replace the old call in-situ.  Otherwise
3741 	       we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3742 	       so that it comes immediately after the new call.  */
3743 	    if (NEXT_INSN (insn))
3744 	      for (next = NEXT_INSN (trial);
3745 		   next && NOTE_P (next);
3746 		   next = NEXT_INSN (next))
3747 		if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3748 		  {
3749 		    remove_insn (next);
3750 		    add_insn_after (next, insn, NULL);
3751 		    break;
3752 		  }
3753 	  }
3754     }
3755 
3756   /* Copy notes, particularly those related to the CFG.  */
3757   for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3758     {
3759       switch (REG_NOTE_KIND (note))
3760 	{
3761 	case REG_EH_REGION:
3762 	  copy_reg_eh_region_note_backward (note, insn_last, NULL);
3763 	  break;
3764 
3765 	case REG_NORETURN:
3766 	case REG_SETJMP:
3767 	case REG_TM:
3768 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3769 	    {
3770 	      if (CALL_P (insn))
3771 		add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3772 	    }
3773 	  break;
3774 
3775 	case REG_NON_LOCAL_GOTO:
3776 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3777 	    {
3778 	      if (JUMP_P (insn))
3779 		add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3780 	    }
3781 	  break;
3782 
3783 #ifdef AUTO_INC_DEC
3784 	case REG_INC:
3785 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3786 	    {
3787 	      rtx reg = XEXP (note, 0);
3788 	      if (!FIND_REG_INC_NOTE (insn, reg)
3789 		  && find_auto_inc (PATTERN (insn), reg))
3790 		add_reg_note (insn, REG_INC, reg);
3791 	    }
3792 	  break;
3793 #endif
3794 
3795 	case REG_ARGS_SIZE:
3796 	  fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0)));
3797 	  break;
3798 
3799 	case REG_CALL_DECL:
3800 	  gcc_assert (call_insn != NULL_RTX);
3801 	  add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3802 	  break;
3803 
3804 	default:
3805 	  break;
3806 	}
3807     }
3808 
3809   /* If there are LABELS inside the split insns increment the
3810      usage count so we don't delete the label.  */
3811   if (INSN_P (trial))
3812     {
3813       insn = insn_last;
3814       while (insn != NULL_RTX)
3815 	{
3816 	  /* JUMP_P insns have already been "marked" above.  */
3817 	  if (NONJUMP_INSN_P (insn))
3818 	    mark_label_nuses (PATTERN (insn));
3819 
3820 	  insn = PREV_INSN (insn);
3821 	}
3822     }
3823 
3824   tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3825 
3826   delete_insn (trial);
3827 
3828   /* Recursively call try_split for each new insn created; by the
3829      time control returns here that insn will be fully split, so
3830      set LAST and continue from the insn after the one returned.
3831      We can't use next_active_insn here since AFTER may be a note.
3832      Ignore deleted insns, which can be occur if not optimizing.  */
3833   for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3834     if (! tem->deleted () && INSN_P (tem))
3835       tem = try_split (PATTERN (tem), tem, 1);
3836 
3837   /* Return either the first or the last insn, depending on which was
3838      requested.  */
3839   return last
3840     ? (after ? PREV_INSN (after) : get_last_insn ())
3841     : NEXT_INSN (before);
3842 }
3843 
3844 /* Make and return an INSN rtx, initializing all its slots.
3845    Store PATTERN in the pattern slots.  */
3846 
3847 rtx_insn *
3848 make_insn_raw (rtx pattern)
3849 {
3850   rtx_insn *insn;
3851 
3852   insn = as_a <rtx_insn *> (rtx_alloc (INSN));
3853 
3854   INSN_UID (insn) = cur_insn_uid++;
3855   PATTERN (insn) = pattern;
3856   INSN_CODE (insn) = -1;
3857   REG_NOTES (insn) = NULL;
3858   INSN_LOCATION (insn) = curr_insn_location ();
3859   BLOCK_FOR_INSN (insn) = NULL;
3860 
3861 #ifdef ENABLE_RTL_CHECKING
3862   if (insn
3863       && INSN_P (insn)
3864       && (returnjump_p (insn)
3865 	  || (GET_CODE (insn) == SET
3866 	      && SET_DEST (insn) == pc_rtx)))
3867     {
3868       warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3869       debug_rtx (insn);
3870     }
3871 #endif
3872 
3873   return insn;
3874 }
3875 
3876 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn.  */
3877 
3878 static rtx_insn *
3879 make_debug_insn_raw (rtx pattern)
3880 {
3881   rtx_debug_insn *insn;
3882 
3883   insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
3884   INSN_UID (insn) = cur_debug_insn_uid++;
3885   if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3886     INSN_UID (insn) = cur_insn_uid++;
3887 
3888   PATTERN (insn) = pattern;
3889   INSN_CODE (insn) = -1;
3890   REG_NOTES (insn) = NULL;
3891   INSN_LOCATION (insn) = curr_insn_location ();
3892   BLOCK_FOR_INSN (insn) = NULL;
3893 
3894   return insn;
3895 }
3896 
3897 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn.  */
3898 
3899 static rtx_insn *
3900 make_jump_insn_raw (rtx pattern)
3901 {
3902   rtx_jump_insn *insn;
3903 
3904   insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
3905   INSN_UID (insn) = cur_insn_uid++;
3906 
3907   PATTERN (insn) = pattern;
3908   INSN_CODE (insn) = -1;
3909   REG_NOTES (insn) = NULL;
3910   JUMP_LABEL (insn) = NULL;
3911   INSN_LOCATION (insn) = curr_insn_location ();
3912   BLOCK_FOR_INSN (insn) = NULL;
3913 
3914   return insn;
3915 }
3916 
3917 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn.  */
3918 
3919 static rtx_insn *
3920 make_call_insn_raw (rtx pattern)
3921 {
3922   rtx_call_insn *insn;
3923 
3924   insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
3925   INSN_UID (insn) = cur_insn_uid++;
3926 
3927   PATTERN (insn) = pattern;
3928   INSN_CODE (insn) = -1;
3929   REG_NOTES (insn) = NULL;
3930   CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3931   INSN_LOCATION (insn) = curr_insn_location ();
3932   BLOCK_FOR_INSN (insn) = NULL;
3933 
3934   return insn;
3935 }
3936 
3937 /* Like `make_insn_raw' but make a NOTE instead of an insn.  */
3938 
3939 static rtx_note *
3940 make_note_raw (enum insn_note subtype)
3941 {
3942   /* Some notes are never created this way at all.  These notes are
3943      only created by patching out insns.  */
3944   gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3945 	      && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3946 
3947   rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
3948   INSN_UID (note) = cur_insn_uid++;
3949   NOTE_KIND (note) = subtype;
3950   BLOCK_FOR_INSN (note) = NULL;
3951   memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3952   return note;
3953 }
3954 
3955 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3956    INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3957    but also BARRIERs and JUMP_TABLE_DATAs.  PREV and NEXT may be NULL.  */
3958 
3959 static inline void
3960 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
3961 {
3962   SET_PREV_INSN (insn) = prev;
3963   SET_NEXT_INSN (insn) = next;
3964   if (prev != NULL)
3965     {
3966       SET_NEXT_INSN (prev) = insn;
3967       if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3968 	{
3969 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
3970 	  SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
3971 	}
3972     }
3973   if (next != NULL)
3974     {
3975       SET_PREV_INSN (next) = insn;
3976       if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3977 	{
3978 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
3979 	  SET_PREV_INSN (sequence->insn (0)) = insn;
3980 	}
3981     }
3982 
3983   if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3984     {
3985       rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
3986       SET_PREV_INSN (sequence->insn (0)) = prev;
3987       SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
3988     }
3989 }
3990 
3991 /* Add INSN to the end of the doubly-linked list.
3992    INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE.  */
3993 
3994 void
3995 add_insn (rtx_insn *insn)
3996 {
3997   rtx_insn *prev = get_last_insn ();
3998   link_insn_into_chain (insn, prev, NULL);
3999   if (NULL == get_insns ())
4000     set_first_insn (insn);
4001   set_last_insn (insn);
4002 }
4003 
4004 /* Add INSN into the doubly-linked list after insn AFTER.  */
4005 
4006 static void
4007 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4008 {
4009   rtx_insn *next = NEXT_INSN (after);
4010 
4011   gcc_assert (!optimize || !after->deleted ());
4012 
4013   link_insn_into_chain (insn, after, next);
4014 
4015   if (next == NULL)
4016     {
4017       if (get_last_insn () == after)
4018 	set_last_insn (insn);
4019       else
4020 	{
4021 	  struct sequence_stack *stack = seq_stack;
4022 	  /* Scan all pending sequences too.  */
4023 	  for (; stack; stack = stack->next)
4024 	    if (after == stack->last)
4025 	      {
4026 		stack->last = insn;
4027 		break;
4028 	      }
4029 	}
4030     }
4031 }
4032 
4033 /* Add INSN into the doubly-linked list before insn BEFORE.  */
4034 
4035 static void
4036 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4037 {
4038   rtx_insn *prev = PREV_INSN (before);
4039 
4040   gcc_assert (!optimize || !before->deleted ());
4041 
4042   link_insn_into_chain (insn, prev, before);
4043 
4044   if (prev == NULL)
4045     {
4046       if (get_insns () == before)
4047 	set_first_insn (insn);
4048       else
4049 	{
4050 	  struct sequence_stack *stack = seq_stack;
4051 	  /* Scan all pending sequences too.  */
4052 	  for (; stack; stack = stack->next)
4053 	    if (before == stack->first)
4054 	      {
4055 		stack->first = insn;
4056 		break;
4057 	      }
4058 
4059 	  gcc_assert (stack);
4060 	}
4061     }
4062 }
4063 
4064 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4065    If BB is NULL, an attempt is made to infer the bb from before.
4066 
4067    This and the next function should be the only functions called
4068    to insert an insn once delay slots have been filled since only
4069    they know how to update a SEQUENCE. */
4070 
4071 void
4072 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4073 {
4074   rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4075   rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4076   add_insn_after_nobb (insn, after);
4077   if (!BARRIER_P (after)
4078       && !BARRIER_P (insn)
4079       && (bb = BLOCK_FOR_INSN (after)))
4080     {
4081       set_block_for_insn (insn, bb);
4082       if (INSN_P (insn))
4083 	df_insn_rescan (insn);
4084       /* Should not happen as first in the BB is always
4085 	 either NOTE or LABEL.  */
4086       if (BB_END (bb) == after
4087 	  /* Avoid clobbering of structure when creating new BB.  */
4088 	  && !BARRIER_P (insn)
4089 	  && !NOTE_INSN_BASIC_BLOCK_P (insn))
4090 	BB_END (bb) = insn;
4091     }
4092 }
4093 
4094 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4095    If BB is NULL, an attempt is made to infer the bb from before.
4096 
4097    This and the previous function should be the only functions called
4098    to insert an insn once delay slots have been filled since only
4099    they know how to update a SEQUENCE. */
4100 
4101 void
4102 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4103 {
4104   rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4105   rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4106   add_insn_before_nobb (insn, before);
4107 
4108   if (!bb
4109       && !BARRIER_P (before)
4110       && !BARRIER_P (insn))
4111     bb = BLOCK_FOR_INSN (before);
4112 
4113   if (bb)
4114     {
4115       set_block_for_insn (insn, bb);
4116       if (INSN_P (insn))
4117 	df_insn_rescan (insn);
4118       /* Should not happen as first in the BB is always either NOTE or
4119 	 LABEL.  */
4120       gcc_assert (BB_HEAD (bb) != insn
4121 		  /* Avoid clobbering of structure when creating new BB.  */
4122 		  || BARRIER_P (insn)
4123 		  || NOTE_INSN_BASIC_BLOCK_P (insn));
4124     }
4125 }
4126 
4127 /* Replace insn with an deleted instruction note.  */
4128 
4129 void
4130 set_insn_deleted (rtx insn)
4131 {
4132   if (INSN_P (insn))
4133     df_insn_delete (as_a <rtx_insn *> (insn));
4134   PUT_CODE (insn, NOTE);
4135   NOTE_KIND (insn) = NOTE_INSN_DELETED;
4136 }
4137 
4138 
4139 /* Unlink INSN from the insn chain.
4140 
4141    This function knows how to handle sequences.
4142 
4143    This function does not invalidate data flow information associated with
4144    INSN (i.e. does not call df_insn_delete).  That makes this function
4145    usable for only disconnecting an insn from the chain, and re-emit it
4146    elsewhere later.
4147 
4148    To later insert INSN elsewhere in the insn chain via add_insn and
4149    similar functions, PREV_INSN and NEXT_INSN must be nullified by
4150    the caller.  Nullifying them here breaks many insn chain walks.
4151 
4152    To really delete an insn and related DF information, use delete_insn.  */
4153 
4154 void
4155 remove_insn (rtx uncast_insn)
4156 {
4157   rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4158   rtx_insn *next = NEXT_INSN (insn);
4159   rtx_insn *prev = PREV_INSN (insn);
4160   basic_block bb;
4161 
4162   if (prev)
4163     {
4164       SET_NEXT_INSN (prev) = next;
4165       if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4166 	{
4167 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4168 	  SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4169 	}
4170     }
4171   else if (get_insns () == insn)
4172     {
4173       if (next)
4174         SET_PREV_INSN (next) = NULL;
4175       set_first_insn (next);
4176     }
4177   else
4178     {
4179       struct sequence_stack *stack = seq_stack;
4180       /* Scan all pending sequences too.  */
4181       for (; stack; stack = stack->next)
4182 	if (insn == stack->first)
4183 	  {
4184 	    stack->first = next;
4185 	    break;
4186 	  }
4187 
4188       gcc_assert (stack);
4189     }
4190 
4191   if (next)
4192     {
4193       SET_PREV_INSN (next) = prev;
4194       if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4195 	{
4196 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4197 	  SET_PREV_INSN (sequence->insn (0)) = prev;
4198 	}
4199     }
4200   else if (get_last_insn () == insn)
4201     set_last_insn (prev);
4202   else
4203     {
4204       struct sequence_stack *stack = seq_stack;
4205       /* Scan all pending sequences too.  */
4206       for (; stack; stack = stack->next)
4207 	if (insn == stack->last)
4208 	  {
4209 	    stack->last = prev;
4210 	    break;
4211 	  }
4212 
4213       gcc_assert (stack);
4214     }
4215 
4216   /* Fix up basic block boundaries, if necessary.  */
4217   if (!BARRIER_P (insn)
4218       && (bb = BLOCK_FOR_INSN (insn)))
4219     {
4220       if (BB_HEAD (bb) == insn)
4221 	{
4222 	  /* Never ever delete the basic block note without deleting whole
4223 	     basic block.  */
4224 	  gcc_assert (!NOTE_P (insn));
4225 	  BB_HEAD (bb) = next;
4226 	}
4227       if (BB_END (bb) == insn)
4228 	BB_END (bb) = prev;
4229     }
4230 }
4231 
4232 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN.  */
4233 
4234 void
4235 add_function_usage_to (rtx call_insn, rtx call_fusage)
4236 {
4237   gcc_assert (call_insn && CALL_P (call_insn));
4238 
4239   /* Put the register usage information on the CALL.  If there is already
4240      some usage information, put ours at the end.  */
4241   if (CALL_INSN_FUNCTION_USAGE (call_insn))
4242     {
4243       rtx link;
4244 
4245       for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4246 	   link = XEXP (link, 1))
4247 	;
4248 
4249       XEXP (link, 1) = call_fusage;
4250     }
4251   else
4252     CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4253 }
4254 
4255 /* Delete all insns made since FROM.
4256    FROM becomes the new last instruction.  */
4257 
4258 void
4259 delete_insns_since (rtx_insn *from)
4260 {
4261   if (from == 0)
4262     set_first_insn (0);
4263   else
4264     SET_NEXT_INSN (from) = 0;
4265   set_last_insn (from);
4266 }
4267 
4268 /* This function is deprecated, please use sequences instead.
4269 
4270    Move a consecutive bunch of insns to a different place in the chain.
4271    The insns to be moved are those between FROM and TO.
4272    They are moved to a new position after the insn AFTER.
4273    AFTER must not be FROM or TO or any insn in between.
4274 
4275    This function does not know about SEQUENCEs and hence should not be
4276    called after delay-slot filling has been done.  */
4277 
4278 void
4279 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4280 {
4281 #ifdef ENABLE_CHECKING
4282   rtx_insn *x;
4283   for (x = from; x != to; x = NEXT_INSN (x))
4284     gcc_assert (after != x);
4285   gcc_assert (after != to);
4286 #endif
4287 
4288   /* Splice this bunch out of where it is now.  */
4289   if (PREV_INSN (from))
4290     SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4291   if (NEXT_INSN (to))
4292     SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4293   if (get_last_insn () == to)
4294     set_last_insn (PREV_INSN (from));
4295   if (get_insns () == from)
4296     set_first_insn (NEXT_INSN (to));
4297 
4298   /* Make the new neighbors point to it and it to them.  */
4299   if (NEXT_INSN (after))
4300     SET_PREV_INSN (NEXT_INSN (after)) = to;
4301 
4302   SET_NEXT_INSN (to) = NEXT_INSN (after);
4303   SET_PREV_INSN (from) = after;
4304   SET_NEXT_INSN (after) = from;
4305   if (after == get_last_insn ())
4306     set_last_insn (to);
4307 }
4308 
4309 /* Same as function above, but take care to update BB boundaries.  */
4310 void
4311 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4312 {
4313   rtx_insn *prev = PREV_INSN (from);
4314   basic_block bb, bb2;
4315 
4316   reorder_insns_nobb (from, to, after);
4317 
4318   if (!BARRIER_P (after)
4319       && (bb = BLOCK_FOR_INSN (after)))
4320     {
4321       rtx_insn *x;
4322       df_set_bb_dirty (bb);
4323 
4324       if (!BARRIER_P (from)
4325 	  && (bb2 = BLOCK_FOR_INSN (from)))
4326 	{
4327 	  if (BB_END (bb2) == to)
4328 	    BB_END (bb2) = prev;
4329 	  df_set_bb_dirty (bb2);
4330 	}
4331 
4332       if (BB_END (bb) == after)
4333 	BB_END (bb) = to;
4334 
4335       for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4336 	if (!BARRIER_P (x))
4337 	  df_insn_change_bb (x, bb);
4338     }
4339 }
4340 
4341 
4342 /* Emit insn(s) of given code and pattern
4343    at a specified place within the doubly-linked list.
4344 
4345    All of the emit_foo global entry points accept an object
4346    X which is either an insn list or a PATTERN of a single
4347    instruction.
4348 
4349    There are thus a few canonical ways to generate code and
4350    emit it at a specific place in the instruction stream.  For
4351    example, consider the instruction named SPOT and the fact that
4352    we would like to emit some instructions before SPOT.  We might
4353    do it like this:
4354 
4355 	start_sequence ();
4356 	... emit the new instructions ...
4357 	insns_head = get_insns ();
4358 	end_sequence ();
4359 
4360 	emit_insn_before (insns_head, SPOT);
4361 
4362    It used to be common to generate SEQUENCE rtl instead, but that
4363    is a relic of the past which no longer occurs.  The reason is that
4364    SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4365    generated would almost certainly die right after it was created.  */
4366 
4367 static rtx_insn *
4368 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4369                            rtx_insn *(*make_raw) (rtx))
4370 {
4371   rtx_insn *insn;
4372 
4373   gcc_assert (before);
4374 
4375   if (x == NULL_RTX)
4376     return safe_as_a <rtx_insn *> (last);
4377 
4378   switch (GET_CODE (x))
4379     {
4380     case DEBUG_INSN:
4381     case INSN:
4382     case JUMP_INSN:
4383     case CALL_INSN:
4384     case CODE_LABEL:
4385     case BARRIER:
4386     case NOTE:
4387       insn = as_a <rtx_insn *> (x);
4388       while (insn)
4389 	{
4390 	  rtx_insn *next = NEXT_INSN (insn);
4391 	  add_insn_before (insn, before, bb);
4392 	  last = insn;
4393 	  insn = next;
4394 	}
4395       break;
4396 
4397 #ifdef ENABLE_RTL_CHECKING
4398     case SEQUENCE:
4399       gcc_unreachable ();
4400       break;
4401 #endif
4402 
4403     default:
4404       last = (*make_raw) (x);
4405       add_insn_before (last, before, bb);
4406       break;
4407     }
4408 
4409   return safe_as_a <rtx_insn *> (last);
4410 }
4411 
4412 /* Make X be output before the instruction BEFORE.  */
4413 
4414 rtx_insn *
4415 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4416 {
4417   return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4418 }
4419 
4420 /* Make an instruction with body X and code JUMP_INSN
4421    and output it before the instruction BEFORE.  */
4422 
4423 rtx_insn *
4424 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4425 {
4426   return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4427 				    make_jump_insn_raw);
4428 }
4429 
4430 /* Make an instruction with body X and code CALL_INSN
4431    and output it before the instruction BEFORE.  */
4432 
4433 rtx_insn *
4434 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4435 {
4436   return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4437 				    make_call_insn_raw);
4438 }
4439 
4440 /* Make an instruction with body X and code DEBUG_INSN
4441    and output it before the instruction BEFORE.  */
4442 
4443 rtx_insn *
4444 emit_debug_insn_before_noloc (rtx x, rtx before)
4445 {
4446   return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4447 				    make_debug_insn_raw);
4448 }
4449 
4450 /* Make an insn of code BARRIER
4451    and output it before the insn BEFORE.  */
4452 
4453 rtx_barrier *
4454 emit_barrier_before (rtx before)
4455 {
4456   rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4457 
4458   INSN_UID (insn) = cur_insn_uid++;
4459 
4460   add_insn_before (insn, before, NULL);
4461   return insn;
4462 }
4463 
4464 /* Emit the label LABEL before the insn BEFORE.  */
4465 
4466 rtx_insn *
4467 emit_label_before (rtx label, rtx_insn *before)
4468 {
4469   gcc_checking_assert (INSN_UID (label) == 0);
4470   INSN_UID (label) = cur_insn_uid++;
4471   add_insn_before (label, before, NULL);
4472   return as_a <rtx_insn *> (label);
4473 }
4474 
4475 /* Helper for emit_insn_after, handles lists of instructions
4476    efficiently.  */
4477 
4478 static rtx_insn *
4479 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4480 {
4481   rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4482   rtx_insn *last;
4483   rtx_insn *after_after;
4484   if (!bb && !BARRIER_P (after))
4485     bb = BLOCK_FOR_INSN (after);
4486 
4487   if (bb)
4488     {
4489       df_set_bb_dirty (bb);
4490       for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4491 	if (!BARRIER_P (last))
4492 	  {
4493 	    set_block_for_insn (last, bb);
4494 	    df_insn_rescan (last);
4495 	  }
4496       if (!BARRIER_P (last))
4497 	{
4498 	  set_block_for_insn (last, bb);
4499 	  df_insn_rescan (last);
4500 	}
4501       if (BB_END (bb) == after)
4502 	BB_END (bb) = last;
4503     }
4504   else
4505     for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4506       continue;
4507 
4508   after_after = NEXT_INSN (after);
4509 
4510   SET_NEXT_INSN (after) = first;
4511   SET_PREV_INSN (first) = after;
4512   SET_NEXT_INSN (last) = after_after;
4513   if (after_after)
4514     SET_PREV_INSN (after_after) = last;
4515 
4516   if (after == get_last_insn ())
4517     set_last_insn (last);
4518 
4519   return last;
4520 }
4521 
4522 static rtx_insn *
4523 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4524 			  rtx_insn *(*make_raw)(rtx))
4525 {
4526   rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4527   rtx_insn *last = after;
4528 
4529   gcc_assert (after);
4530 
4531   if (x == NULL_RTX)
4532     return last;
4533 
4534   switch (GET_CODE (x))
4535     {
4536     case DEBUG_INSN:
4537     case INSN:
4538     case JUMP_INSN:
4539     case CALL_INSN:
4540     case CODE_LABEL:
4541     case BARRIER:
4542     case NOTE:
4543       last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4544       break;
4545 
4546 #ifdef ENABLE_RTL_CHECKING
4547     case SEQUENCE:
4548       gcc_unreachable ();
4549       break;
4550 #endif
4551 
4552     default:
4553       last = (*make_raw) (x);
4554       add_insn_after (last, after, bb);
4555       break;
4556     }
4557 
4558   return last;
4559 }
4560 
4561 /* Make X be output after the insn AFTER and set the BB of insn.  If
4562    BB is NULL, an attempt is made to infer the BB from AFTER.  */
4563 
4564 rtx_insn *
4565 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4566 {
4567   return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4568 }
4569 
4570 
4571 /* Make an insn of code JUMP_INSN with body X
4572    and output it after the insn AFTER.  */
4573 
4574 rtx_insn *
4575 emit_jump_insn_after_noloc (rtx x, rtx after)
4576 {
4577   return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
4578 }
4579 
4580 /* Make an instruction with body X and code CALL_INSN
4581    and output it after the instruction AFTER.  */
4582 
4583 rtx_insn *
4584 emit_call_insn_after_noloc (rtx x, rtx after)
4585 {
4586   return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4587 }
4588 
4589 /* Make an instruction with body X and code CALL_INSN
4590    and output it after the instruction AFTER.  */
4591 
4592 rtx_insn *
4593 emit_debug_insn_after_noloc (rtx x, rtx after)
4594 {
4595   return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4596 }
4597 
4598 /* Make an insn of code BARRIER
4599    and output it after the insn AFTER.  */
4600 
4601 rtx_barrier *
4602 emit_barrier_after (rtx after)
4603 {
4604   rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4605 
4606   INSN_UID (insn) = cur_insn_uid++;
4607 
4608   add_insn_after (insn, after, NULL);
4609   return insn;
4610 }
4611 
4612 /* Emit the label LABEL after the insn AFTER.  */
4613 
4614 rtx_insn *
4615 emit_label_after (rtx label, rtx_insn *after)
4616 {
4617   gcc_checking_assert (INSN_UID (label) == 0);
4618   INSN_UID (label) = cur_insn_uid++;
4619   add_insn_after (label, after, NULL);
4620   return as_a <rtx_insn *> (label);
4621 }
4622 
4623 /* Notes require a bit of special handling: Some notes need to have their
4624    BLOCK_FOR_INSN set, others should never have it set, and some should
4625    have it set or clear depending on the context.   */
4626 
4627 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4628    that never set BLOCK_FOR_INSN on NOTE.  BB_BOUNDARY is true if the
4629    caller is asked to emit a note before BB_HEAD, or after BB_END.  */
4630 
4631 static bool
4632 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4633 {
4634   switch (subtype)
4635     {
4636       /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks.  */
4637       case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4638 	return true;
4639 
4640       /* Notes for var tracking and EH region markers can appear between or
4641 	 inside basic blocks.  If the caller is emitting on the basic block
4642 	 boundary, do not set BLOCK_FOR_INSN on the new note.  */
4643       case NOTE_INSN_VAR_LOCATION:
4644       case NOTE_INSN_CALL_ARG_LOCATION:
4645       case NOTE_INSN_EH_REGION_BEG:
4646       case NOTE_INSN_EH_REGION_END:
4647 	return on_bb_boundary_p;
4648 
4649       /* Otherwise, BLOCK_FOR_INSN must be set.  */
4650       default:
4651 	return false;
4652     }
4653 }
4654 
4655 /* Emit a note of subtype SUBTYPE after the insn AFTER.  */
4656 
4657 rtx_note *
4658 emit_note_after (enum insn_note subtype, rtx uncast_after)
4659 {
4660   rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4661   rtx_note *note = make_note_raw (subtype);
4662   basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4663   bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4664 
4665   if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4666     add_insn_after_nobb (note, after);
4667   else
4668     add_insn_after (note, after, bb);
4669   return note;
4670 }
4671 
4672 /* Emit a note of subtype SUBTYPE before the insn BEFORE.  */
4673 
4674 rtx_note *
4675 emit_note_before (enum insn_note subtype, rtx uncast_before)
4676 {
4677   rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4678   rtx_note *note = make_note_raw (subtype);
4679   basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4680   bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4681 
4682   if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4683     add_insn_before_nobb (note, before);
4684   else
4685     add_insn_before (note, before, bb);
4686   return note;
4687 }
4688 
4689 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4690    MAKE_RAW indicates how to turn PATTERN into a real insn.  */
4691 
4692 static rtx_insn *
4693 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4694 			   rtx_insn *(*make_raw) (rtx))
4695 {
4696   rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4697   rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4698 
4699   if (pattern == NULL_RTX || !loc)
4700     return safe_as_a <rtx_insn *> (last);
4701 
4702   after = NEXT_INSN (after);
4703   while (1)
4704     {
4705       if (active_insn_p (after) && !INSN_LOCATION (after))
4706 	INSN_LOCATION (after) = loc;
4707       if (after == last)
4708 	break;
4709       after = NEXT_INSN (after);
4710     }
4711   return safe_as_a <rtx_insn *> (last);
4712 }
4713 
4714 /* Insert PATTERN after AFTER.  MAKE_RAW indicates how to turn PATTERN
4715    into a real insn.  SKIP_DEBUG_INSNS indicates whether to insert after
4716    any DEBUG_INSNs.  */
4717 
4718 static rtx_insn *
4719 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4720 		    rtx_insn *(*make_raw) (rtx))
4721 {
4722   rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4723   rtx_insn *prev = after;
4724 
4725   if (skip_debug_insns)
4726     while (DEBUG_INSN_P (prev))
4727       prev = PREV_INSN (prev);
4728 
4729   if (INSN_P (prev))
4730     return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4731 				      make_raw);
4732   else
4733     return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4734 }
4735 
4736 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
4737 rtx_insn *
4738 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4739 {
4740   return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4741 }
4742 
4743 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
4744 rtx_insn *
4745 emit_insn_after (rtx pattern, rtx after)
4746 {
4747   return emit_pattern_after (pattern, after, true, make_insn_raw);
4748 }
4749 
4750 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
4751 rtx_insn *
4752 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4753 {
4754   return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
4755 }
4756 
4757 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
4758 rtx_insn *
4759 emit_jump_insn_after (rtx pattern, rtx after)
4760 {
4761   return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
4762 }
4763 
4764 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
4765 rtx_insn *
4766 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4767 {
4768   return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4769 }
4770 
4771 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
4772 rtx_insn *
4773 emit_call_insn_after (rtx pattern, rtx after)
4774 {
4775   return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4776 }
4777 
4778 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
4779 rtx_insn *
4780 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4781 {
4782   return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4783 }
4784 
4785 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
4786 rtx_insn *
4787 emit_debug_insn_after (rtx pattern, rtx after)
4788 {
4789   return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4790 }
4791 
4792 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4793    MAKE_RAW indicates how to turn PATTERN into a real insn.  INSNP
4794    indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4795    CALL_INSN, etc.  */
4796 
4797 static rtx_insn *
4798 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4799 			    rtx_insn *(*make_raw) (rtx))
4800 {
4801   rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4802   rtx_insn *first = PREV_INSN (before);
4803   rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4804 					      insnp ? before : NULL_RTX,
4805 					      NULL, make_raw);
4806 
4807   if (pattern == NULL_RTX || !loc)
4808     return last;
4809 
4810   if (!first)
4811     first = get_insns ();
4812   else
4813     first = NEXT_INSN (first);
4814   while (1)
4815     {
4816       if (active_insn_p (first) && !INSN_LOCATION (first))
4817 	INSN_LOCATION (first) = loc;
4818       if (first == last)
4819 	break;
4820       first = NEXT_INSN (first);
4821     }
4822   return last;
4823 }
4824 
4825 /* Insert PATTERN before BEFORE.  MAKE_RAW indicates how to turn PATTERN
4826    into a real insn.  SKIP_DEBUG_INSNS indicates whether to insert
4827    before any DEBUG_INSNs.  INSNP indicates if PATTERN is meant for an
4828    INSN as opposed to a JUMP_INSN, CALL_INSN, etc.  */
4829 
4830 static rtx_insn *
4831 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4832 		     bool insnp, rtx_insn *(*make_raw) (rtx))
4833 {
4834   rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4835   rtx_insn *next = before;
4836 
4837   if (skip_debug_insns)
4838     while (DEBUG_INSN_P (next))
4839       next = PREV_INSN (next);
4840 
4841   if (INSN_P (next))
4842     return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4843 				       insnp, make_raw);
4844   else
4845     return emit_pattern_before_noloc (pattern, before,
4846                                       insnp ? before : NULL_RTX,
4847                                       NULL, make_raw);
4848 }
4849 
4850 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
4851 rtx_insn *
4852 emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4853 {
4854   return emit_pattern_before_setloc (pattern, before, loc, true,
4855 				     make_insn_raw);
4856 }
4857 
4858 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE.  */
4859 rtx_insn *
4860 emit_insn_before (rtx pattern, rtx before)
4861 {
4862   return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4863 }
4864 
4865 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
4866 rtx_insn *
4867 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4868 {
4869   return emit_pattern_before_setloc (pattern, before, loc, false,
4870 				     make_jump_insn_raw);
4871 }
4872 
4873 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE.  */
4874 rtx_insn *
4875 emit_jump_insn_before (rtx pattern, rtx before)
4876 {
4877   return emit_pattern_before (pattern, before, true, false,
4878 			      make_jump_insn_raw);
4879 }
4880 
4881 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
4882 rtx_insn *
4883 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
4884 {
4885   return emit_pattern_before_setloc (pattern, before, loc, false,
4886 				     make_call_insn_raw);
4887 }
4888 
4889 /* Like emit_call_insn_before_noloc,
4890    but set insn_location according to BEFORE.  */
4891 rtx_insn *
4892 emit_call_insn_before (rtx pattern, rtx_insn *before)
4893 {
4894   return emit_pattern_before (pattern, before, true, false,
4895 			      make_call_insn_raw);
4896 }
4897 
4898 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
4899 rtx_insn *
4900 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4901 {
4902   return emit_pattern_before_setloc (pattern, before, loc, false,
4903 				     make_debug_insn_raw);
4904 }
4905 
4906 /* Like emit_debug_insn_before_noloc,
4907    but set insn_location according to BEFORE.  */
4908 rtx_insn *
4909 emit_debug_insn_before (rtx pattern, rtx before)
4910 {
4911   return emit_pattern_before (pattern, before, false, false,
4912 			      make_debug_insn_raw);
4913 }
4914 
4915 /* Take X and emit it at the end of the doubly-linked
4916    INSN list.
4917 
4918    Returns the last insn emitted.  */
4919 
4920 rtx_insn *
4921 emit_insn (rtx x)
4922 {
4923   rtx_insn *last = get_last_insn ();
4924   rtx_insn *insn;
4925 
4926   if (x == NULL_RTX)
4927     return last;
4928 
4929   switch (GET_CODE (x))
4930     {
4931     case DEBUG_INSN:
4932     case INSN:
4933     case JUMP_INSN:
4934     case CALL_INSN:
4935     case CODE_LABEL:
4936     case BARRIER:
4937     case NOTE:
4938       insn = as_a <rtx_insn *> (x);
4939       while (insn)
4940 	{
4941 	  rtx_insn *next = NEXT_INSN (insn);
4942 	  add_insn (insn);
4943 	  last = insn;
4944 	  insn = next;
4945 	}
4946       break;
4947 
4948 #ifdef ENABLE_RTL_CHECKING
4949     case JUMP_TABLE_DATA:
4950     case SEQUENCE:
4951       gcc_unreachable ();
4952       break;
4953 #endif
4954 
4955     default:
4956       last = make_insn_raw (x);
4957       add_insn (last);
4958       break;
4959     }
4960 
4961   return last;
4962 }
4963 
4964 /* Make an insn of code DEBUG_INSN with pattern X
4965    and add it to the end of the doubly-linked list.  */
4966 
4967 rtx_insn *
4968 emit_debug_insn (rtx x)
4969 {
4970   rtx_insn *last = get_last_insn ();
4971   rtx_insn *insn;
4972 
4973   if (x == NULL_RTX)
4974     return last;
4975 
4976   switch (GET_CODE (x))
4977     {
4978     case DEBUG_INSN:
4979     case INSN:
4980     case JUMP_INSN:
4981     case CALL_INSN:
4982     case CODE_LABEL:
4983     case BARRIER:
4984     case NOTE:
4985       insn = as_a <rtx_insn *> (x);
4986       while (insn)
4987 	{
4988 	  rtx_insn *next = NEXT_INSN (insn);
4989 	  add_insn (insn);
4990 	  last = insn;
4991 	  insn = next;
4992 	}
4993       break;
4994 
4995 #ifdef ENABLE_RTL_CHECKING
4996     case JUMP_TABLE_DATA:
4997     case SEQUENCE:
4998       gcc_unreachable ();
4999       break;
5000 #endif
5001 
5002     default:
5003       last = make_debug_insn_raw (x);
5004       add_insn (last);
5005       break;
5006     }
5007 
5008   return last;
5009 }
5010 
5011 /* Make an insn of code JUMP_INSN with pattern X
5012    and add it to the end of the doubly-linked list.  */
5013 
5014 rtx_insn *
5015 emit_jump_insn (rtx x)
5016 {
5017   rtx_insn *last = NULL;
5018   rtx_insn *insn;
5019 
5020   switch (GET_CODE (x))
5021     {
5022     case DEBUG_INSN:
5023     case INSN:
5024     case JUMP_INSN:
5025     case CALL_INSN:
5026     case CODE_LABEL:
5027     case BARRIER:
5028     case NOTE:
5029       insn = as_a <rtx_insn *> (x);
5030       while (insn)
5031 	{
5032 	  rtx_insn *next = NEXT_INSN (insn);
5033 	  add_insn (insn);
5034 	  last = insn;
5035 	  insn = next;
5036 	}
5037       break;
5038 
5039 #ifdef ENABLE_RTL_CHECKING
5040     case JUMP_TABLE_DATA:
5041     case SEQUENCE:
5042       gcc_unreachable ();
5043       break;
5044 #endif
5045 
5046     default:
5047       last = make_jump_insn_raw (x);
5048       add_insn (last);
5049       break;
5050     }
5051 
5052   return last;
5053 }
5054 
5055 /* Make an insn of code CALL_INSN with pattern X
5056    and add it to the end of the doubly-linked list.  */
5057 
5058 rtx_insn *
5059 emit_call_insn (rtx x)
5060 {
5061   rtx_insn *insn;
5062 
5063   switch (GET_CODE (x))
5064     {
5065     case DEBUG_INSN:
5066     case INSN:
5067     case JUMP_INSN:
5068     case CALL_INSN:
5069     case CODE_LABEL:
5070     case BARRIER:
5071     case NOTE:
5072       insn = emit_insn (x);
5073       break;
5074 
5075 #ifdef ENABLE_RTL_CHECKING
5076     case SEQUENCE:
5077     case JUMP_TABLE_DATA:
5078       gcc_unreachable ();
5079       break;
5080 #endif
5081 
5082     default:
5083       insn = make_call_insn_raw (x);
5084       add_insn (insn);
5085       break;
5086     }
5087 
5088   return insn;
5089 }
5090 
5091 /* Add the label LABEL to the end of the doubly-linked list.  */
5092 
5093 rtx_insn *
5094 emit_label (rtx label)
5095 {
5096   gcc_checking_assert (INSN_UID (label) == 0);
5097   INSN_UID (label) = cur_insn_uid++;
5098   add_insn (as_a <rtx_insn *> (label));
5099   return as_a <rtx_insn *> (label);
5100 }
5101 
5102 /* Make an insn of code JUMP_TABLE_DATA
5103    and add it to the end of the doubly-linked list.  */
5104 
5105 rtx_jump_table_data *
5106 emit_jump_table_data (rtx table)
5107 {
5108   rtx_jump_table_data *jump_table_data =
5109     as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5110   INSN_UID (jump_table_data) = cur_insn_uid++;
5111   PATTERN (jump_table_data) = table;
5112   BLOCK_FOR_INSN (jump_table_data) = NULL;
5113   add_insn (jump_table_data);
5114   return jump_table_data;
5115 }
5116 
5117 /* Make an insn of code BARRIER
5118    and add it to the end of the doubly-linked list.  */
5119 
5120 rtx_barrier *
5121 emit_barrier (void)
5122 {
5123   rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5124   INSN_UID (barrier) = cur_insn_uid++;
5125   add_insn (barrier);
5126   return barrier;
5127 }
5128 
5129 /* Emit a copy of note ORIG.  */
5130 
5131 rtx_note *
5132 emit_note_copy (rtx_note *orig)
5133 {
5134   enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5135   rtx_note *note = make_note_raw (kind);
5136   NOTE_DATA (note) = NOTE_DATA (orig);
5137   add_insn (note);
5138   return note;
5139 }
5140 
5141 /* Make an insn of code NOTE or type NOTE_NO
5142    and add it to the end of the doubly-linked list.  */
5143 
5144 rtx_note *
5145 emit_note (enum insn_note kind)
5146 {
5147   rtx_note *note = make_note_raw (kind);
5148   add_insn (note);
5149   return note;
5150 }
5151 
5152 /* Emit a clobber of lvalue X.  */
5153 
5154 rtx_insn *
5155 emit_clobber (rtx x)
5156 {
5157   /* CONCATs should not appear in the insn stream.  */
5158   if (GET_CODE (x) == CONCAT)
5159     {
5160       emit_clobber (XEXP (x, 0));
5161       return emit_clobber (XEXP (x, 1));
5162     }
5163   return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5164 }
5165 
5166 /* Return a sequence of insns to clobber lvalue X.  */
5167 
5168 rtx_insn *
5169 gen_clobber (rtx x)
5170 {
5171   rtx_insn *seq;
5172 
5173   start_sequence ();
5174   emit_clobber (x);
5175   seq = get_insns ();
5176   end_sequence ();
5177   return seq;
5178 }
5179 
5180 /* Emit a use of rvalue X.  */
5181 
5182 rtx_insn *
5183 emit_use (rtx x)
5184 {
5185   /* CONCATs should not appear in the insn stream.  */
5186   if (GET_CODE (x) == CONCAT)
5187     {
5188       emit_use (XEXP (x, 0));
5189       return emit_use (XEXP (x, 1));
5190     }
5191   return emit_insn (gen_rtx_USE (VOIDmode, x));
5192 }
5193 
5194 /* Return a sequence of insns to use rvalue X.  */
5195 
5196 rtx_insn *
5197 gen_use (rtx x)
5198 {
5199   rtx_insn *seq;
5200 
5201   start_sequence ();
5202   emit_use (x);
5203   seq = get_insns ();
5204   end_sequence ();
5205   return seq;
5206 }
5207 
5208 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5209    Return the set in INSN that such notes describe, or NULL if the notes
5210    have no meaning for INSN.  */
5211 
5212 rtx
5213 set_for_reg_notes (rtx insn)
5214 {
5215   rtx pat, reg;
5216 
5217   if (!INSN_P (insn))
5218     return NULL_RTX;
5219 
5220   pat = PATTERN (insn);
5221   if (GET_CODE (pat) == PARALLEL)
5222     {
5223       /* We do not use single_set because that ignores SETs of unused
5224 	 registers.  REG_EQUAL and REG_EQUIV notes really do require the
5225 	 PARALLEL to have a single SET.  */
5226       if (multiple_sets (insn))
5227 	return NULL_RTX;
5228       pat = XVECEXP (pat, 0, 0);
5229     }
5230 
5231   if (GET_CODE (pat) != SET)
5232     return NULL_RTX;
5233 
5234   reg = SET_DEST (pat);
5235 
5236   /* Notes apply to the contents of a STRICT_LOW_PART.  */
5237   if (GET_CODE (reg) == STRICT_LOW_PART)
5238     reg = XEXP (reg, 0);
5239 
5240   /* Check that we have a register.  */
5241   if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5242     return NULL_RTX;
5243 
5244   return pat;
5245 }
5246 
5247 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5248    note of this type already exists, remove it first.  */
5249 
5250 rtx
5251 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5252 {
5253   rtx note = find_reg_note (insn, kind, NULL_RTX);
5254 
5255   switch (kind)
5256     {
5257     case REG_EQUAL:
5258     case REG_EQUIV:
5259       /* We need to support the REG_EQUAL on USE trick of find_reloads.  */
5260       if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
5261 	return NULL_RTX;
5262 
5263       /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5264 	 It serves no useful purpose and breaks eliminate_regs.  */
5265       if (GET_CODE (datum) == ASM_OPERANDS)
5266 	return NULL_RTX;
5267 
5268       /* Notes with side effects are dangerous.  Even if the side-effect
5269 	 initially mirrors one in PATTERN (INSN), later optimizations
5270 	 might alter the way that the final register value is calculated
5271 	 and so move or alter the side-effect in some way.  The note would
5272 	 then no longer be a valid substitution for SET_SRC.  */
5273       if (side_effects_p (datum))
5274 	return NULL_RTX;
5275       break;
5276 
5277     default:
5278       break;
5279     }
5280 
5281   if (note)
5282     XEXP (note, 0) = datum;
5283   else
5284     {
5285       add_reg_note (insn, kind, datum);
5286       note = REG_NOTES (insn);
5287     }
5288 
5289   switch (kind)
5290     {
5291     case REG_EQUAL:
5292     case REG_EQUIV:
5293       df_notes_rescan (as_a <rtx_insn *> (insn));
5294       break;
5295     default:
5296       break;
5297     }
5298 
5299   return note;
5300 }
5301 
5302 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST.  */
5303 rtx
5304 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5305 {
5306   rtx set = set_for_reg_notes (insn);
5307 
5308   if (set && SET_DEST (set) == dst)
5309     return set_unique_reg_note (insn, kind, datum);
5310   return NULL_RTX;
5311 }
5312 
5313 /* Return an indication of which type of insn should have X as a body.
5314    The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN.  */
5315 
5316 static enum rtx_code
5317 classify_insn (rtx x)
5318 {
5319   if (LABEL_P (x))
5320     return CODE_LABEL;
5321   if (GET_CODE (x) == CALL)
5322     return CALL_INSN;
5323   if (ANY_RETURN_P (x))
5324     return JUMP_INSN;
5325   if (GET_CODE (x) == SET)
5326     {
5327       if (SET_DEST (x) == pc_rtx)
5328 	return JUMP_INSN;
5329       else if (GET_CODE (SET_SRC (x)) == CALL)
5330 	return CALL_INSN;
5331       else
5332 	return INSN;
5333     }
5334   if (GET_CODE (x) == PARALLEL)
5335     {
5336       int j;
5337       for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5338 	if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5339 	  return CALL_INSN;
5340 	else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5341 		 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5342 	  return JUMP_INSN;
5343 	else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5344 		 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5345 	  return CALL_INSN;
5346     }
5347   return INSN;
5348 }
5349 
5350 /* Emit the rtl pattern X as an appropriate kind of insn.
5351    If X is a label, it is simply added into the insn chain.  */
5352 
5353 rtx_insn *
5354 emit (rtx x)
5355 {
5356   enum rtx_code code = classify_insn (x);
5357 
5358   switch (code)
5359     {
5360     case CODE_LABEL:
5361       return emit_label (x);
5362     case INSN:
5363       return emit_insn (x);
5364     case  JUMP_INSN:
5365       {
5366 	rtx_insn *insn = emit_jump_insn (x);
5367 	if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5368 	  return emit_barrier ();
5369 	return insn;
5370       }
5371     case CALL_INSN:
5372       return emit_call_insn (x);
5373     case DEBUG_INSN:
5374       return emit_debug_insn (x);
5375     default:
5376       gcc_unreachable ();
5377     }
5378 }
5379 
5380 /* Space for free sequence stack entries.  */
5381 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5382 
5383 /* Begin emitting insns to a sequence.  If this sequence will contain
5384    something that might cause the compiler to pop arguments to function
5385    calls (because those pops have previously been deferred; see
5386    INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5387    before calling this function.  That will ensure that the deferred
5388    pops are not accidentally emitted in the middle of this sequence.  */
5389 
5390 void
5391 start_sequence (void)
5392 {
5393   struct sequence_stack *tem;
5394 
5395   if (free_sequence_stack != NULL)
5396     {
5397       tem = free_sequence_stack;
5398       free_sequence_stack = tem->next;
5399     }
5400   else
5401     tem = ggc_alloc<sequence_stack> ();
5402 
5403   tem->next = seq_stack;
5404   tem->first = get_insns ();
5405   tem->last = get_last_insn ();
5406 
5407   seq_stack = tem;
5408 
5409   set_first_insn (0);
5410   set_last_insn (0);
5411 }
5412 
5413 /* Set up the insn chain starting with FIRST as the current sequence,
5414    saving the previously current one.  See the documentation for
5415    start_sequence for more information about how to use this function.  */
5416 
5417 void
5418 push_to_sequence (rtx_insn *first)
5419 {
5420   rtx_insn *last;
5421 
5422   start_sequence ();
5423 
5424   for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5425     ;
5426 
5427   set_first_insn (first);
5428   set_last_insn (last);
5429 }
5430 
5431 /* Like push_to_sequence, but take the last insn as an argument to avoid
5432    looping through the list.  */
5433 
5434 void
5435 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5436 {
5437   start_sequence ();
5438 
5439   set_first_insn (first);
5440   set_last_insn (last);
5441 }
5442 
5443 /* Set up the outer-level insn chain
5444    as the current sequence, saving the previously current one.  */
5445 
5446 void
5447 push_topmost_sequence (void)
5448 {
5449   struct sequence_stack *stack, *top = NULL;
5450 
5451   start_sequence ();
5452 
5453   for (stack = seq_stack; stack; stack = stack->next)
5454     top = stack;
5455 
5456   set_first_insn (top->first);
5457   set_last_insn (top->last);
5458 }
5459 
5460 /* After emitting to the outer-level insn chain, update the outer-level
5461    insn chain, and restore the previous saved state.  */
5462 
5463 void
5464 pop_topmost_sequence (void)
5465 {
5466   struct sequence_stack *stack, *top = NULL;
5467 
5468   for (stack = seq_stack; stack; stack = stack->next)
5469     top = stack;
5470 
5471   top->first = get_insns ();
5472   top->last = get_last_insn ();
5473 
5474   end_sequence ();
5475 }
5476 
5477 /* After emitting to a sequence, restore previous saved state.
5478 
5479    To get the contents of the sequence just made, you must call
5480    `get_insns' *before* calling here.
5481 
5482    If the compiler might have deferred popping arguments while
5483    generating this sequence, and this sequence will not be immediately
5484    inserted into the instruction stream, use do_pending_stack_adjust
5485    before calling get_insns.  That will ensure that the deferred
5486    pops are inserted into this sequence, and not into some random
5487    location in the instruction stream.  See INHIBIT_DEFER_POP for more
5488    information about deferred popping of arguments.  */
5489 
5490 void
5491 end_sequence (void)
5492 {
5493   struct sequence_stack *tem = seq_stack;
5494 
5495   set_first_insn (tem->first);
5496   set_last_insn (tem->last);
5497   seq_stack = tem->next;
5498 
5499   memset (tem, 0, sizeof (*tem));
5500   tem->next = free_sequence_stack;
5501   free_sequence_stack = tem;
5502 }
5503 
5504 /* Return 1 if currently emitting into a sequence.  */
5505 
5506 int
5507 in_sequence_p (void)
5508 {
5509   return seq_stack != 0;
5510 }
5511 
5512 /* Put the various virtual registers into REGNO_REG_RTX.  */
5513 
5514 static void
5515 init_virtual_regs (void)
5516 {
5517   regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5518   regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5519   regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5520   regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5521   regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5522   regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5523     = virtual_preferred_stack_boundary_rtx;
5524 }
5525 
5526 
5527 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once.  */
5528 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5529 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5530 static int copy_insn_n_scratches;
5531 
5532 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5533    copied an ASM_OPERANDS.
5534    In that case, it is the original input-operand vector.  */
5535 static rtvec orig_asm_operands_vector;
5536 
5537 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5538    copied an ASM_OPERANDS.
5539    In that case, it is the copied input-operand vector.  */
5540 static rtvec copy_asm_operands_vector;
5541 
5542 /* Likewise for the constraints vector.  */
5543 static rtvec orig_asm_constraints_vector;
5544 static rtvec copy_asm_constraints_vector;
5545 
5546 /* Recursively create a new copy of an rtx for copy_insn.
5547    This function differs from copy_rtx in that it handles SCRATCHes and
5548    ASM_OPERANDs properly.
5549    Normally, this function is not used directly; use copy_insn as front end.
5550    However, you could first copy an insn pattern with copy_insn and then use
5551    this function afterwards to properly copy any REG_NOTEs containing
5552    SCRATCHes.  */
5553 
5554 rtx
5555 copy_insn_1 (rtx orig)
5556 {
5557   rtx copy;
5558   int i, j;
5559   RTX_CODE code;
5560   const char *format_ptr;
5561 
5562   if (orig == NULL)
5563     return NULL;
5564 
5565   code = GET_CODE (orig);
5566 
5567   switch (code)
5568     {
5569     case REG:
5570     case DEBUG_EXPR:
5571     CASE_CONST_ANY:
5572     case SYMBOL_REF:
5573     case CODE_LABEL:
5574     case PC:
5575     case CC0:
5576     case RETURN:
5577     case SIMPLE_RETURN:
5578       return orig;
5579     case CLOBBER:
5580       /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5581          clobbers or clobbers of hard registers that originated as pseudos.
5582          This is needed to allow safe register renaming.  */
5583       if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5584 	  && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5585 	return orig;
5586       break;
5587 
5588     case SCRATCH:
5589       for (i = 0; i < copy_insn_n_scratches; i++)
5590 	if (copy_insn_scratch_in[i] == orig)
5591 	  return copy_insn_scratch_out[i];
5592       break;
5593 
5594     case CONST:
5595       if (shared_const_p (orig))
5596 	return orig;
5597       break;
5598 
5599       /* A MEM with a constant address is not sharable.  The problem is that
5600 	 the constant address may need to be reloaded.  If the mem is shared,
5601 	 then reloading one copy of this mem will cause all copies to appear
5602 	 to have been reloaded.  */
5603 
5604     default:
5605       break;
5606     }
5607 
5608   /* Copy the various flags, fields, and other information.  We assume
5609      that all fields need copying, and then clear the fields that should
5610      not be copied.  That is the sensible default behavior, and forces
5611      us to explicitly document why we are *not* copying a flag.  */
5612   copy = shallow_copy_rtx (orig);
5613 
5614   /* We do not copy the USED flag, which is used as a mark bit during
5615      walks over the RTL.  */
5616   RTX_FLAG (copy, used) = 0;
5617 
5618   /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs.  */
5619   if (INSN_P (orig))
5620     {
5621       RTX_FLAG (copy, jump) = 0;
5622       RTX_FLAG (copy, call) = 0;
5623       RTX_FLAG (copy, frame_related) = 0;
5624     }
5625 
5626   format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5627 
5628   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5629     switch (*format_ptr++)
5630       {
5631       case 'e':
5632 	if (XEXP (orig, i) != NULL)
5633 	  XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5634 	break;
5635 
5636       case 'E':
5637       case 'V':
5638 	if (XVEC (orig, i) == orig_asm_constraints_vector)
5639 	  XVEC (copy, i) = copy_asm_constraints_vector;
5640 	else if (XVEC (orig, i) == orig_asm_operands_vector)
5641 	  XVEC (copy, i) = copy_asm_operands_vector;
5642 	else if (XVEC (orig, i) != NULL)
5643 	  {
5644 	    XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5645 	    for (j = 0; j < XVECLEN (copy, i); j++)
5646 	      XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5647 	  }
5648 	break;
5649 
5650       case 't':
5651       case 'w':
5652       case 'i':
5653       case 's':
5654       case 'S':
5655       case 'u':
5656       case '0':
5657 	/* These are left unchanged.  */
5658 	break;
5659 
5660       default:
5661 	gcc_unreachable ();
5662       }
5663 
5664   if (code == SCRATCH)
5665     {
5666       i = copy_insn_n_scratches++;
5667       gcc_assert (i < MAX_RECOG_OPERANDS);
5668       copy_insn_scratch_in[i] = orig;
5669       copy_insn_scratch_out[i] = copy;
5670     }
5671   else if (code == ASM_OPERANDS)
5672     {
5673       orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5674       copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5675       orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5676       copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5677     }
5678 
5679   return copy;
5680 }
5681 
5682 /* Create a new copy of an rtx.
5683    This function differs from copy_rtx in that it handles SCRATCHes and
5684    ASM_OPERANDs properly.
5685    INSN doesn't really have to be a full INSN; it could be just the
5686    pattern.  */
5687 rtx
5688 copy_insn (rtx insn)
5689 {
5690   copy_insn_n_scratches = 0;
5691   orig_asm_operands_vector = 0;
5692   orig_asm_constraints_vector = 0;
5693   copy_asm_operands_vector = 0;
5694   copy_asm_constraints_vector = 0;
5695   return copy_insn_1 (insn);
5696 }
5697 
5698 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5699    on that assumption that INSN itself remains in its original place.  */
5700 
5701 rtx_insn *
5702 copy_delay_slot_insn (rtx_insn *insn)
5703 {
5704   /* Copy INSN with its rtx_code, all its notes, location etc.  */
5705   insn = as_a <rtx_insn *> (copy_rtx (insn));
5706   INSN_UID (insn) = cur_insn_uid++;
5707   return insn;
5708 }
5709 
5710 /* Initialize data structures and variables in this file
5711    before generating rtl for each function.  */
5712 
5713 void
5714 init_emit (void)
5715 {
5716   set_first_insn (NULL);
5717   set_last_insn (NULL);
5718   if (MIN_NONDEBUG_INSN_UID)
5719     cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5720   else
5721     cur_insn_uid = 1;
5722   cur_debug_insn_uid = 1;
5723   reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5724   first_label_num = label_num;
5725   seq_stack = NULL;
5726 
5727   /* Init the tables that describe all the pseudo regs.  */
5728 
5729   crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5730 
5731   crtl->emit.regno_pointer_align
5732     = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5733 
5734   regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5735 
5736   /* Put copies of all the hard registers into regno_reg_rtx.  */
5737   memcpy (regno_reg_rtx,
5738 	  initial_regno_reg_rtx,
5739 	  FIRST_PSEUDO_REGISTER * sizeof (rtx));
5740 
5741   /* Put copies of all the virtual register rtx into regno_reg_rtx.  */
5742   init_virtual_regs ();
5743 
5744   /* Indicate that the virtual registers and stack locations are
5745      all pointers.  */
5746   REG_POINTER (stack_pointer_rtx) = 1;
5747   REG_POINTER (frame_pointer_rtx) = 1;
5748   REG_POINTER (hard_frame_pointer_rtx) = 1;
5749   REG_POINTER (arg_pointer_rtx) = 1;
5750 
5751   REG_POINTER (virtual_incoming_args_rtx) = 1;
5752   REG_POINTER (virtual_stack_vars_rtx) = 1;
5753   REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5754   REG_POINTER (virtual_outgoing_args_rtx) = 1;
5755   REG_POINTER (virtual_cfa_rtx) = 1;
5756 
5757 #ifdef STACK_BOUNDARY
5758   REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5759   REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5760   REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5761   REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5762 
5763   REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5764   REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5765   REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5766   REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5767   REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5768 #endif
5769 
5770 #ifdef INIT_EXPANDERS
5771   INIT_EXPANDERS;
5772 #endif
5773 }
5774 
5775 /* Generate a vector constant for mode MODE and constant value CONSTANT.  */
5776 
5777 static rtx
5778 gen_const_vector (machine_mode mode, int constant)
5779 {
5780   rtx tem;
5781   rtvec v;
5782   int units, i;
5783   machine_mode inner;
5784 
5785   units = GET_MODE_NUNITS (mode);
5786   inner = GET_MODE_INNER (mode);
5787 
5788   gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5789 
5790   v = rtvec_alloc (units);
5791 
5792   /* We need to call this function after we set the scalar const_tiny_rtx
5793      entries.  */
5794   gcc_assert (const_tiny_rtx[constant][(int) inner]);
5795 
5796   for (i = 0; i < units; ++i)
5797     RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5798 
5799   tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5800   return tem;
5801 }
5802 
5803 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5804    all elements are zero, and the one vector when all elements are one.  */
5805 rtx
5806 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
5807 {
5808   machine_mode inner = GET_MODE_INNER (mode);
5809   int nunits = GET_MODE_NUNITS (mode);
5810   rtx x;
5811   int i;
5812 
5813   /* Check to see if all of the elements have the same value.  */
5814   x = RTVEC_ELT (v, nunits - 1);
5815   for (i = nunits - 2; i >= 0; i--)
5816     if (RTVEC_ELT (v, i) != x)
5817       break;
5818 
5819   /* If the values are all the same, check to see if we can use one of the
5820      standard constant vectors.  */
5821   if (i == -1)
5822     {
5823       if (x == CONST0_RTX (inner))
5824 	return CONST0_RTX (mode);
5825       else if (x == CONST1_RTX (inner))
5826 	return CONST1_RTX (mode);
5827       else if (x == CONSTM1_RTX (inner))
5828 	return CONSTM1_RTX (mode);
5829     }
5830 
5831   return gen_rtx_raw_CONST_VECTOR (mode, v);
5832 }
5833 
5834 /* Initialise global register information required by all functions.  */
5835 
5836 void
5837 init_emit_regs (void)
5838 {
5839   int i;
5840   machine_mode mode;
5841   mem_attrs *attrs;
5842 
5843   /* Reset register attributes */
5844   reg_attrs_htab->empty ();
5845 
5846   /* We need reg_raw_mode, so initialize the modes now.  */
5847   init_reg_modes_target ();
5848 
5849   /* Assign register numbers to the globally defined register rtx.  */
5850   stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5851   frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5852   hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5853   arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5854   virtual_incoming_args_rtx =
5855     gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5856   virtual_stack_vars_rtx =
5857     gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5858   virtual_stack_dynamic_rtx =
5859     gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5860   virtual_outgoing_args_rtx =
5861     gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5862   virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5863   virtual_preferred_stack_boundary_rtx =
5864     gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5865 
5866   /* Initialize RTL for commonly used hard registers.  These are
5867      copied into regno_reg_rtx as we begin to compile each function.  */
5868   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5869     initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5870 
5871 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5872   return_address_pointer_rtx
5873     = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5874 #endif
5875 
5876   pic_offset_table_rtx = NULL_RTX;
5877   if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5878     pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5879 
5880   for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5881     {
5882       mode = (machine_mode) i;
5883       attrs = ggc_cleared_alloc<mem_attrs> ();
5884       attrs->align = BITS_PER_UNIT;
5885       attrs->addrspace = ADDR_SPACE_GENERIC;
5886       if (mode != BLKmode)
5887 	{
5888 	  attrs->size_known_p = true;
5889 	  attrs->size = GET_MODE_SIZE (mode);
5890 	  if (STRICT_ALIGNMENT)
5891 	    attrs->align = GET_MODE_ALIGNMENT (mode);
5892 	}
5893       mode_mem_attrs[i] = attrs;
5894     }
5895 }
5896 
5897 /* Initialize global machine_mode variables.  */
5898 
5899 void
5900 init_derived_machine_modes (void)
5901 {
5902   byte_mode = VOIDmode;
5903   word_mode = VOIDmode;
5904 
5905   for (machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5906        mode != VOIDmode;
5907        mode = GET_MODE_WIDER_MODE (mode))
5908     {
5909       if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5910 	  && byte_mode == VOIDmode)
5911 	byte_mode = mode;
5912 
5913       if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5914 	  && word_mode == VOIDmode)
5915 	word_mode = mode;
5916     }
5917 
5918   ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5919 }
5920 
5921 /* Create some permanent unique rtl objects shared between all functions.  */
5922 
5923 void
5924 init_emit_once (void)
5925 {
5926   int i;
5927   machine_mode mode;
5928   machine_mode double_mode;
5929 
5930   /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5931      CONST_FIXED, and memory attribute hash tables.  */
5932   const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
5933 
5934 #if TARGET_SUPPORTS_WIDE_INT
5935   const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
5936 #endif
5937   const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
5938 
5939   const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
5940 
5941   reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
5942 
5943 #ifdef INIT_EXPANDERS
5944   /* This is to initialize {init|mark|free}_machine_status before the first
5945      call to push_function_context_to.  This is needed by the Chill front
5946      end which calls push_function_context_to before the first call to
5947      init_function_start.  */
5948   INIT_EXPANDERS;
5949 #endif
5950 
5951   /* Create the unique rtx's for certain rtx codes and operand values.  */
5952 
5953   /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5954      tries to use these variables.  */
5955   for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5956     const_int_rtx[i + MAX_SAVED_CONST_INT] =
5957       gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5958 
5959   if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5960       && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5961     const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5962   else
5963     const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5964 
5965   double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0);
5966 
5967   real_from_integer (&dconst0, double_mode, 0, SIGNED);
5968   real_from_integer (&dconst1, double_mode, 1, SIGNED);
5969   real_from_integer (&dconst2, double_mode, 2, SIGNED);
5970 
5971   dconstm1 = dconst1;
5972   dconstm1.sign = 1;
5973 
5974   dconsthalf = dconst1;
5975   SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5976 
5977   for (i = 0; i < 3; i++)
5978     {
5979       const REAL_VALUE_TYPE *const r =
5980 	(i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5981 
5982       for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5983 	   mode != VOIDmode;
5984 	   mode = GET_MODE_WIDER_MODE (mode))
5985 	const_tiny_rtx[i][(int) mode] =
5986 	  CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5987 
5988       for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5989 	   mode != VOIDmode;
5990 	   mode = GET_MODE_WIDER_MODE (mode))
5991 	const_tiny_rtx[i][(int) mode] =
5992 	  CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5993 
5994       const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5995 
5996       for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5997 	   mode != VOIDmode;
5998 	   mode = GET_MODE_WIDER_MODE (mode))
5999 	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6000 
6001       for (mode = MIN_MODE_PARTIAL_INT;
6002 	   mode <= MAX_MODE_PARTIAL_INT;
6003 	   mode = (machine_mode)((int)(mode) + 1))
6004 	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6005     }
6006 
6007   const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6008 
6009   for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6010        mode != VOIDmode;
6011        mode = GET_MODE_WIDER_MODE (mode))
6012     const_tiny_rtx[3][(int) mode] = constm1_rtx;
6013 
6014   for (mode = MIN_MODE_PARTIAL_INT;
6015        mode <= MAX_MODE_PARTIAL_INT;
6016        mode = (machine_mode)((int)(mode) + 1))
6017     const_tiny_rtx[3][(int) mode] = constm1_rtx;
6018 
6019   for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
6020        mode != VOIDmode;
6021        mode = GET_MODE_WIDER_MODE (mode))
6022     {
6023       rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6024       const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6025     }
6026 
6027   for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
6028        mode != VOIDmode;
6029        mode = GET_MODE_WIDER_MODE (mode))
6030     {
6031       rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6032       const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6033     }
6034 
6035   for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
6036        mode != VOIDmode;
6037        mode = GET_MODE_WIDER_MODE (mode))
6038     {
6039       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6040       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6041       const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6042     }
6043 
6044   for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
6045        mode != VOIDmode;
6046        mode = GET_MODE_WIDER_MODE (mode))
6047     {
6048       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6049       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6050     }
6051 
6052   for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
6053        mode != VOIDmode;
6054        mode = GET_MODE_WIDER_MODE (mode))
6055     {
6056       FCONST0 (mode).data.high = 0;
6057       FCONST0 (mode).data.low = 0;
6058       FCONST0 (mode).mode = mode;
6059       const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6060 				      FCONST0 (mode), mode);
6061     }
6062 
6063   for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
6064        mode != VOIDmode;
6065        mode = GET_MODE_WIDER_MODE (mode))
6066     {
6067       FCONST0 (mode).data.high = 0;
6068       FCONST0 (mode).data.low = 0;
6069       FCONST0 (mode).mode = mode;
6070       const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6071 				      FCONST0 (mode), mode);
6072     }
6073 
6074   for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
6075        mode != VOIDmode;
6076        mode = GET_MODE_WIDER_MODE (mode))
6077     {
6078       FCONST0 (mode).data.high = 0;
6079       FCONST0 (mode).data.low = 0;
6080       FCONST0 (mode).mode = mode;
6081       const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6082 				      FCONST0 (mode), mode);
6083 
6084       /* We store the value 1.  */
6085       FCONST1 (mode).data.high = 0;
6086       FCONST1 (mode).data.low = 0;
6087       FCONST1 (mode).mode = mode;
6088       FCONST1 (mode).data
6089 	= double_int_one.lshift (GET_MODE_FBIT (mode),
6090 				 HOST_BITS_PER_DOUBLE_INT,
6091 				 SIGNED_FIXED_POINT_MODE_P (mode));
6092       const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6093 				      FCONST1 (mode), mode);
6094     }
6095 
6096   for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
6097        mode != VOIDmode;
6098        mode = GET_MODE_WIDER_MODE (mode))
6099     {
6100       FCONST0 (mode).data.high = 0;
6101       FCONST0 (mode).data.low = 0;
6102       FCONST0 (mode).mode = mode;
6103       const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6104 				      FCONST0 (mode), mode);
6105 
6106       /* We store the value 1.  */
6107       FCONST1 (mode).data.high = 0;
6108       FCONST1 (mode).data.low = 0;
6109       FCONST1 (mode).mode = mode;
6110       FCONST1 (mode).data
6111 	= double_int_one.lshift (GET_MODE_FBIT (mode),
6112 				 HOST_BITS_PER_DOUBLE_INT,
6113 				 SIGNED_FIXED_POINT_MODE_P (mode));
6114       const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
6115 				      FCONST1 (mode), mode);
6116     }
6117 
6118   for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
6119        mode != VOIDmode;
6120        mode = GET_MODE_WIDER_MODE (mode))
6121     {
6122       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6123     }
6124 
6125   for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
6126        mode != VOIDmode;
6127        mode = GET_MODE_WIDER_MODE (mode))
6128     {
6129       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6130     }
6131 
6132   for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
6133        mode != VOIDmode;
6134        mode = GET_MODE_WIDER_MODE (mode))
6135     {
6136       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6137       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6138     }
6139 
6140   for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
6141        mode != VOIDmode;
6142        mode = GET_MODE_WIDER_MODE (mode))
6143     {
6144       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6145       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6146     }
6147 
6148   for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6149     if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6150       const_tiny_rtx[0][i] = const0_rtx;
6151 
6152   const_tiny_rtx[0][(int) BImode] = const0_rtx;
6153   if (STORE_FLAG_VALUE == 1)
6154     const_tiny_rtx[1][(int) BImode] = const1_rtx;
6155 
6156   for (mode = GET_CLASS_NARROWEST_MODE (MODE_POINTER_BOUNDS);
6157        mode != VOIDmode;
6158        mode = GET_MODE_WIDER_MODE (mode))
6159     {
6160       wide_int wi_zero = wi::zero (GET_MODE_PRECISION (mode));
6161       const_tiny_rtx[0][mode] = immed_wide_int_const (wi_zero, mode);
6162     }
6163 
6164   pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6165   ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6166   simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6167   cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6168 }
6169 
6170 /* Produce exact duplicate of insn INSN after AFTER.
6171    Care updating of libcall regions if present.  */
6172 
6173 rtx_insn *
6174 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6175 {
6176   rtx_insn *new_rtx;
6177   rtx link;
6178 
6179   switch (GET_CODE (insn))
6180     {
6181     case INSN:
6182       new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6183       break;
6184 
6185     case JUMP_INSN:
6186       new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6187       CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6188       break;
6189 
6190     case DEBUG_INSN:
6191       new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6192       break;
6193 
6194     case CALL_INSN:
6195       new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6196       if (CALL_INSN_FUNCTION_USAGE (insn))
6197 	CALL_INSN_FUNCTION_USAGE (new_rtx)
6198 	  = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6199       SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6200       RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6201       RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6202       RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6203 	= RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6204       break;
6205 
6206     default:
6207       gcc_unreachable ();
6208     }
6209 
6210   /* Update LABEL_NUSES.  */
6211   mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6212 
6213   INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6214 
6215   /* If the old insn is frame related, then so is the new one.  This is
6216      primarily needed for IA-64 unwind info which marks epilogue insns,
6217      which may be duplicated by the basic block reordering code.  */
6218   RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6219 
6220   /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6221      will make them.  REG_LABEL_TARGETs are created there too, but are
6222      supposed to be sticky, so we copy them.  */
6223   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6224     if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6225       {
6226 	if (GET_CODE (link) == EXPR_LIST)
6227 	  add_reg_note (new_rtx, REG_NOTE_KIND (link),
6228 			copy_insn_1 (XEXP (link, 0)));
6229 	else
6230 	  add_shallow_copy_of_reg_note (new_rtx, link);
6231       }
6232 
6233   INSN_CODE (new_rtx) = INSN_CODE (insn);
6234   return new_rtx;
6235 }
6236 
6237 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6238 rtx
6239 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6240 {
6241   if (hard_reg_clobbers[mode][regno])
6242     return hard_reg_clobbers[mode][regno];
6243   else
6244     return (hard_reg_clobbers[mode][regno] =
6245 	    gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6246 }
6247 
6248 location_t prologue_location;
6249 location_t epilogue_location;
6250 
6251 /* Hold current location information and last location information, so the
6252    datastructures are built lazily only when some instructions in given
6253    place are needed.  */
6254 static location_t curr_location;
6255 
6256 /* Allocate insn location datastructure.  */
6257 void
6258 insn_locations_init (void)
6259 {
6260   prologue_location = epilogue_location = 0;
6261   curr_location = UNKNOWN_LOCATION;
6262 }
6263 
6264 /* At the end of emit stage, clear current location.  */
6265 void
6266 insn_locations_finalize (void)
6267 {
6268   epilogue_location = curr_location;
6269   curr_location = UNKNOWN_LOCATION;
6270 }
6271 
6272 /* Set current location.  */
6273 void
6274 set_curr_insn_location (location_t location)
6275 {
6276   curr_location = location;
6277 }
6278 
6279 /* Get current location.  */
6280 location_t
6281 curr_insn_location (void)
6282 {
6283   return curr_location;
6284 }
6285 
6286 /* Return lexical scope block insn belongs to.  */
6287 tree
6288 insn_scope (const rtx_insn *insn)
6289 {
6290   return LOCATION_BLOCK (INSN_LOCATION (insn));
6291 }
6292 
6293 /* Return line number of the statement that produced this insn.  */
6294 int
6295 insn_line (const rtx_insn *insn)
6296 {
6297   return LOCATION_LINE (INSN_LOCATION (insn));
6298 }
6299 
6300 /* Return source file of the statement that produced this insn.  */
6301 const char *
6302 insn_file (const rtx_insn *insn)
6303 {
6304   return LOCATION_FILE (INSN_LOCATION (insn));
6305 }
6306 
6307 /* Return expanded location of the statement that produced this insn.  */
6308 expanded_location
6309 insn_location (const rtx_insn *insn)
6310 {
6311   return expand_location (INSN_LOCATION (insn));
6312 }
6313 
6314 /* Return true if memory model MODEL requires a pre-operation (release-style)
6315    barrier or a post-operation (acquire-style) barrier.  While not universal,
6316    this function matches behavior of several targets.  */
6317 
6318 bool
6319 need_atomic_barrier_p (enum memmodel model, bool pre)
6320 {
6321   switch (model & MEMMODEL_MASK)
6322     {
6323     case MEMMODEL_RELAXED:
6324     case MEMMODEL_CONSUME:
6325       return false;
6326     case MEMMODEL_RELEASE:
6327     case MEMMODEL_SYNC_RELEASE:
6328       return pre;
6329     case MEMMODEL_ACQUIRE:
6330     case MEMMODEL_SYNC_ACQUIRE:
6331       return !pre;
6332     case MEMMODEL_ACQ_REL:
6333     case MEMMODEL_SEQ_CST:
6334     case MEMMODEL_SYNC_SEQ_CST:
6335       return true;
6336     default:
6337       gcc_unreachable ();
6338     }
6339 }
6340 
6341 #include "gt-emit-rtl.h"
6342