xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/emit-rtl.c (revision 8feb0f0b7eaff0608f8350bbfa3098827b4bb91b)
1 /* Emit RTL for the GCC expander.
2    Copyright (C) 1987-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 
21 /* Middle-to-low level generation of rtx code and insns.
22 
23    This file contains support functions for creating rtl expressions
24    and manipulating them in the doubly-linked chain of insns.
25 
26    The patterns of the insns are created by machine-dependent
27    routines in insn-emit.c, which is generated automatically from
28    the machine description.  These routines make the individual rtx's
29    of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30    which are automatically generated from rtl.def; what is machine
31    dependent is the kind of rtx's they make and what arguments they
32    use.  */
33 
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "memmodel.h"
38 #include "backend.h"
39 #include "target.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "df.h"
43 #include "tm_p.h"
44 #include "stringpool.h"
45 #include "insn-config.h"
46 #include "regs.h"
47 #include "emit-rtl.h"
48 #include "recog.h"
49 #include "diagnostic-core.h"
50 #include "alias.h"
51 #include "fold-const.h"
52 #include "varasm.h"
53 #include "cfgrtl.h"
54 #include "tree-eh.h"
55 #include "explow.h"
56 #include "expr.h"
57 #include "builtins.h"
58 #include "rtl-iter.h"
59 #include "stor-layout.h"
60 #include "opts.h"
61 #include "predict.h"
62 #include "rtx-vector-builder.h"
63 #include "gimple.h"
64 #include "gimple-ssa.h"
65 #include "gimplify.h"
66 
67 struct target_rtl default_target_rtl;
68 #if SWITCHABLE_TARGET
69 struct target_rtl *this_target_rtl = &default_target_rtl;
70 #endif
71 
72 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
73 
74 /* Commonly used modes.  */
75 
76 scalar_int_mode byte_mode;	/* Mode whose width is BITS_PER_UNIT.  */
77 scalar_int_mode word_mode;	/* Mode whose width is BITS_PER_WORD.  */
78 scalar_int_mode ptr_mode;	/* Mode whose width is POINTER_SIZE.  */
79 
80 /* Datastructures maintained for currently processed function in RTL form.  */
81 
82 struct rtl_data x_rtl;
83 
84 /* Indexed by pseudo register number, gives the rtx for that pseudo.
85    Allocated in parallel with regno_pointer_align.
86    FIXME: We could put it into emit_status struct, but gengtype is not able to deal
87    with length attribute nested in top level structures.  */
88 
89 rtx * regno_reg_rtx;
90 
91 /* This is *not* reset after each function.  It gives each CODE_LABEL
92    in the entire compilation a unique label number.  */
93 
94 static GTY(()) int label_num = 1;
95 
96 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
97    the values of 0, 1, and 2.  For the integer entries and VOIDmode, we
98    record a copy of const[012]_rtx and constm1_rtx.  CONSTM1_RTX
99    is set only for MODE_INT and MODE_VECTOR_INT modes.  */
100 
101 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
102 
103 rtx const_true_rtx;
104 
105 REAL_VALUE_TYPE dconst0;
106 REAL_VALUE_TYPE dconst1;
107 REAL_VALUE_TYPE dconst2;
108 REAL_VALUE_TYPE dconstm1;
109 REAL_VALUE_TYPE dconsthalf;
110 
111 /* Record fixed-point constant 0 and 1.  */
112 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
113 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
114 
115 /* We make one copy of (const_int C) where C is in
116    [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
117    to save space during the compilation and simplify comparisons of
118    integers.  */
119 
120 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
121 
122 /* Standard pieces of rtx, to be substituted directly into things.  */
123 rtx pc_rtx;
124 rtx ret_rtx;
125 rtx simple_return_rtx;
126 rtx cc0_rtx;
127 
128 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
129    this pointer should normally never be dereferenced), but is required to be
130    distinct from NULL_RTX.  Currently used by peephole2 pass.  */
131 rtx_insn *invalid_insn_rtx;
132 
133 /* A hash table storing CONST_INTs whose absolute value is greater
134    than MAX_SAVED_CONST_INT.  */
135 
136 struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
137 {
138   typedef HOST_WIDE_INT compare_type;
139 
140   static hashval_t hash (rtx i);
141   static bool equal (rtx i, HOST_WIDE_INT h);
142 };
143 
144 static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
145 
146 struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
147 {
148   static hashval_t hash (rtx x);
149   static bool equal (rtx x, rtx y);
150 };
151 
152 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
153 
154 struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def>
155 {
156   typedef std::pair<machine_mode, poly_wide_int_ref> compare_type;
157 
158   static hashval_t hash (rtx x);
159   static bool equal (rtx x, const compare_type &y);
160 };
161 
162 static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab;
163 
164 /* A hash table storing register attribute structures.  */
165 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
166 {
167   static hashval_t hash (reg_attrs *x);
168   static bool equal (reg_attrs *a, reg_attrs *b);
169 };
170 
171 static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
172 
173 /* A hash table storing all CONST_DOUBLEs.  */
174 struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
175 {
176   static hashval_t hash (rtx x);
177   static bool equal (rtx x, rtx y);
178 };
179 
180 static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
181 
182 /* A hash table storing all CONST_FIXEDs.  */
183 struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
184 {
185   static hashval_t hash (rtx x);
186   static bool equal (rtx x, rtx y);
187 };
188 
189 static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
190 
191 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
192 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
193 #define first_label_num (crtl->emit.x_first_label_num)
194 
195 static void set_used_decls (tree);
196 static void mark_label_nuses (rtx);
197 #if TARGET_SUPPORTS_WIDE_INT
198 static rtx lookup_const_wide_int (rtx);
199 #endif
200 static rtx lookup_const_double (rtx);
201 static rtx lookup_const_fixed (rtx);
202 static rtx gen_const_vector (machine_mode, int);
203 static void copy_rtx_if_shared_1 (rtx *orig);
204 
205 /* Probability of the conditional branch currently proceeded by try_split.  */
206 profile_probability split_branch_probability;
207 
208 /* Returns a hash code for X (which is a really a CONST_INT).  */
209 
210 hashval_t
hash(rtx x)211 const_int_hasher::hash (rtx x)
212 {
213   return (hashval_t) INTVAL (x);
214 }
215 
216 /* Returns nonzero if the value represented by X (which is really a
217    CONST_INT) is the same as that given by Y (which is really a
218    HOST_WIDE_INT *).  */
219 
220 bool
equal(rtx x,HOST_WIDE_INT y)221 const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
222 {
223   return (INTVAL (x) == y);
224 }
225 
226 #if TARGET_SUPPORTS_WIDE_INT
227 /* Returns a hash code for X (which is a really a CONST_WIDE_INT).  */
228 
229 hashval_t
hash(rtx x)230 const_wide_int_hasher::hash (rtx x)
231 {
232   int i;
233   unsigned HOST_WIDE_INT hash = 0;
234   const_rtx xr = x;
235 
236   for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
237     hash += CONST_WIDE_INT_ELT (xr, i);
238 
239   return (hashval_t) hash;
240 }
241 
242 /* Returns nonzero if the value represented by X (which is really a
243    CONST_WIDE_INT) is the same as that given by Y (which is really a
244    CONST_WIDE_INT).  */
245 
246 bool
equal(rtx x,rtx y)247 const_wide_int_hasher::equal (rtx x, rtx y)
248 {
249   int i;
250   const_rtx xr = x;
251   const_rtx yr = y;
252   if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
253     return false;
254 
255   for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
256     if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
257       return false;
258 
259   return true;
260 }
261 #endif
262 
263 /* Returns a hash code for CONST_POLY_INT X.  */
264 
265 hashval_t
hash(rtx x)266 const_poly_int_hasher::hash (rtx x)
267 {
268   inchash::hash h;
269   h.add_int (GET_MODE (x));
270   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
271     h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
272   return h.end ();
273 }
274 
275 /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y.  */
276 
277 bool
equal(rtx x,const compare_type & y)278 const_poly_int_hasher::equal (rtx x, const compare_type &y)
279 {
280   if (GET_MODE (x) != y.first)
281     return false;
282   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
283     if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i])
284       return false;
285   return true;
286 }
287 
288 /* Returns a hash code for X (which is really a CONST_DOUBLE).  */
289 hashval_t
hash(rtx x)290 const_double_hasher::hash (rtx x)
291 {
292   const_rtx const value = x;
293   hashval_t h;
294 
295   if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
296     h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
297   else
298     {
299       h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
300       /* MODE is used in the comparison, so it should be in the hash.  */
301       h ^= GET_MODE (value);
302     }
303   return h;
304 }
305 
306 /* Returns nonzero if the value represented by X (really a ...)
307    is the same as that represented by Y (really a ...) */
308 bool
equal(rtx x,rtx y)309 const_double_hasher::equal (rtx x, rtx y)
310 {
311   const_rtx const a = x, b = y;
312 
313   if (GET_MODE (a) != GET_MODE (b))
314     return 0;
315   if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
316     return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
317 	    && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
318   else
319     return real_identical (CONST_DOUBLE_REAL_VALUE (a),
320 			   CONST_DOUBLE_REAL_VALUE (b));
321 }
322 
323 /* Returns a hash code for X (which is really a CONST_FIXED).  */
324 
325 hashval_t
hash(rtx x)326 const_fixed_hasher::hash (rtx x)
327 {
328   const_rtx const value = x;
329   hashval_t h;
330 
331   h = fixed_hash (CONST_FIXED_VALUE (value));
332   /* MODE is used in the comparison, so it should be in the hash.  */
333   h ^= GET_MODE (value);
334   return h;
335 }
336 
337 /* Returns nonzero if the value represented by X is the same as that
338    represented by Y.  */
339 
340 bool
equal(rtx x,rtx y)341 const_fixed_hasher::equal (rtx x, rtx y)
342 {
343   const_rtx const a = x, b = y;
344 
345   if (GET_MODE (a) != GET_MODE (b))
346     return 0;
347   return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
348 }
349 
350 /* Return true if the given memory attributes are equal.  */
351 
352 bool
mem_attrs_eq_p(const class mem_attrs * p,const class mem_attrs * q)353 mem_attrs_eq_p (const class mem_attrs *p, const class mem_attrs *q)
354 {
355   if (p == q)
356     return true;
357   if (!p || !q)
358     return false;
359   return (p->alias == q->alias
360 	  && p->offset_known_p == q->offset_known_p
361 	  && (!p->offset_known_p || known_eq (p->offset, q->offset))
362 	  && p->size_known_p == q->size_known_p
363 	  && (!p->size_known_p || known_eq (p->size, q->size))
364 	  && p->align == q->align
365 	  && p->addrspace == q->addrspace
366 	  && (p->expr == q->expr
367 	      || (p->expr != NULL_TREE && q->expr != NULL_TREE
368 		  && operand_equal_p (p->expr, q->expr, 0))));
369 }
370 
371 /* Set MEM's memory attributes so that they are the same as ATTRS.  */
372 
373 static void
set_mem_attrs(rtx mem,mem_attrs * attrs)374 set_mem_attrs (rtx mem, mem_attrs *attrs)
375 {
376   /* If everything is the default, we can just clear the attributes.  */
377   if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
378     {
379       MEM_ATTRS (mem) = 0;
380       return;
381     }
382 
383   if (!MEM_ATTRS (mem)
384       || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
385     {
386       MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
387       memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
388     }
389 }
390 
391 /* Returns a hash code for X (which is a really a reg_attrs *).  */
392 
393 hashval_t
hash(reg_attrs * x)394 reg_attr_hasher::hash (reg_attrs *x)
395 {
396   const reg_attrs *const p = x;
397 
398   inchash::hash h;
399   h.add_ptr (p->decl);
400   h.add_poly_hwi (p->offset);
401   return h.end ();
402 }
403 
404 /* Returns nonzero if the value represented by X  is the same as that given by
405    Y.  */
406 
407 bool
equal(reg_attrs * x,reg_attrs * y)408 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
409 {
410   const reg_attrs *const p = x;
411   const reg_attrs *const q = y;
412 
413   return (p->decl == q->decl && known_eq (p->offset, q->offset));
414 }
415 /* Allocate a new reg_attrs structure and insert it into the hash table if
416    one identical to it is not already in the table.  We are doing this for
417    MEM of mode MODE.  */
418 
419 static reg_attrs *
get_reg_attrs(tree decl,poly_int64 offset)420 get_reg_attrs (tree decl, poly_int64 offset)
421 {
422   reg_attrs attrs;
423 
424   /* If everything is the default, we can just return zero.  */
425   if (decl == 0 && known_eq (offset, 0))
426     return 0;
427 
428   attrs.decl = decl;
429   attrs.offset = offset;
430 
431   reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
432   if (*slot == 0)
433     {
434       *slot = ggc_alloc<reg_attrs> ();
435       memcpy (*slot, &attrs, sizeof (reg_attrs));
436     }
437 
438   return *slot;
439 }
440 
441 
442 #if !HAVE_blockage
443 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
444    and to block register equivalences to be seen across this insn.  */
445 
446 rtx
gen_blockage(void)447 gen_blockage (void)
448 {
449   rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
450   MEM_VOLATILE_P (x) = true;
451   return x;
452 }
453 #endif
454 
455 
456 /* Set the mode and register number of X to MODE and REGNO.  */
457 
458 void
set_mode_and_regno(rtx x,machine_mode mode,unsigned int regno)459 set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
460 {
461   unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
462 			? hard_regno_nregs (regno, mode)
463 			: 1);
464   PUT_MODE_RAW (x, mode);
465   set_regno_raw (x, regno, nregs);
466 }
467 
468 /* Initialize a fresh REG rtx with mode MODE and register REGNO.  */
469 
470 rtx
init_raw_REG(rtx x,machine_mode mode,unsigned int regno)471 init_raw_REG (rtx x, machine_mode mode, unsigned int regno)
472 {
473   set_mode_and_regno (x, mode, regno);
474   REG_ATTRS (x) = NULL;
475   ORIGINAL_REGNO (x) = regno;
476   return x;
477 }
478 
479 /* Generate a new REG rtx.  Make sure ORIGINAL_REGNO is set properly, and
480    don't attempt to share with the various global pieces of rtl (such as
481    frame_pointer_rtx).  */
482 
483 rtx
gen_raw_REG(machine_mode mode,unsigned int regno)484 gen_raw_REG (machine_mode mode, unsigned int regno)
485 {
486   rtx x = rtx_alloc (REG MEM_STAT_INFO);
487   init_raw_REG (x, mode, regno);
488   return x;
489 }
490 
491 /* There are some RTL codes that require special attention; the generation
492    functions do the raw handling.  If you add to this list, modify
493    special_rtx in gengenrtl.c as well.  */
494 
495 rtx_expr_list *
gen_rtx_EXPR_LIST(machine_mode mode,rtx expr,rtx expr_list)496 gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
497 {
498   return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
499 						 expr_list));
500 }
501 
502 rtx_insn_list *
gen_rtx_INSN_LIST(machine_mode mode,rtx insn,rtx insn_list)503 gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
504 {
505   return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
506 						 insn_list));
507 }
508 
509 rtx_insn *
gen_rtx_INSN(machine_mode mode,rtx_insn * prev_insn,rtx_insn * next_insn,basic_block bb,rtx pattern,int location,int code,rtx reg_notes)510 gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
511 	      basic_block bb, rtx pattern, int location, int code,
512 	      rtx reg_notes)
513 {
514   return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
515 						 prev_insn, next_insn,
516 						 bb, pattern, location, code,
517 						 reg_notes));
518 }
519 
520 rtx
gen_rtx_CONST_INT(machine_mode mode ATTRIBUTE_UNUSED,HOST_WIDE_INT arg)521 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
522 {
523   if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
524     return const_int_rtx[arg + MAX_SAVED_CONST_INT];
525 
526 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
527   if (const_true_rtx && arg == STORE_FLAG_VALUE)
528     return const_true_rtx;
529 #endif
530 
531   /* Look up the CONST_INT in the hash table.  */
532   rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
533 						   INSERT);
534   if (*slot == 0)
535     *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
536 
537   return *slot;
538 }
539 
540 rtx
gen_int_mode(poly_int64 c,machine_mode mode)541 gen_int_mode (poly_int64 c, machine_mode mode)
542 {
543   c = trunc_int_for_mode (c, mode);
544   if (c.is_constant ())
545     return GEN_INT (c.coeffs[0]);
546   unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
547   return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode);
548 }
549 
550 /* CONST_DOUBLEs might be created from pairs of integers, or from
551    REAL_VALUE_TYPEs.  Also, their length is known only at run time,
552    so we cannot use gen_rtx_raw_CONST_DOUBLE.  */
553 
554 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
555    hash table.  If so, return its counterpart; otherwise add it
556    to the hash table and return it.  */
557 static rtx
lookup_const_double(rtx real)558 lookup_const_double (rtx real)
559 {
560   rtx *slot = const_double_htab->find_slot (real, INSERT);
561   if (*slot == 0)
562     *slot = real;
563 
564   return *slot;
565 }
566 
567 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
568    VALUE in mode MODE.  */
569 rtx
const_double_from_real_value(REAL_VALUE_TYPE value,machine_mode mode)570 const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
571 {
572   rtx real = rtx_alloc (CONST_DOUBLE);
573   PUT_MODE (real, mode);
574 
575   real->u.rv = value;
576 
577   return lookup_const_double (real);
578 }
579 
580 /* Determine whether FIXED, a CONST_FIXED, already exists in the
581    hash table.  If so, return its counterpart; otherwise add it
582    to the hash table and return it.  */
583 
584 static rtx
lookup_const_fixed(rtx fixed)585 lookup_const_fixed (rtx fixed)
586 {
587   rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
588   if (*slot == 0)
589     *slot = fixed;
590 
591   return *slot;
592 }
593 
594 /* Return a CONST_FIXED rtx for a fixed-point value specified by
595    VALUE in mode MODE.  */
596 
597 rtx
const_fixed_from_fixed_value(FIXED_VALUE_TYPE value,machine_mode mode)598 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
599 {
600   rtx fixed = rtx_alloc (CONST_FIXED);
601   PUT_MODE (fixed, mode);
602 
603   fixed->u.fv = value;
604 
605   return lookup_const_fixed (fixed);
606 }
607 
608 #if TARGET_SUPPORTS_WIDE_INT == 0
609 /* Constructs double_int from rtx CST.  */
610 
611 double_int
rtx_to_double_int(const_rtx cst)612 rtx_to_double_int (const_rtx cst)
613 {
614   double_int r;
615 
616   if (CONST_INT_P (cst))
617       r = double_int::from_shwi (INTVAL (cst));
618   else if (CONST_DOUBLE_AS_INT_P (cst))
619     {
620       r.low = CONST_DOUBLE_LOW (cst);
621       r.high = CONST_DOUBLE_HIGH (cst);
622     }
623   else
624     gcc_unreachable ();
625 
626   return r;
627 }
628 #endif
629 
630 #if TARGET_SUPPORTS_WIDE_INT
631 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
632    If so, return its counterpart; otherwise add it to the hash table and
633    return it.  */
634 
635 static rtx
lookup_const_wide_int(rtx wint)636 lookup_const_wide_int (rtx wint)
637 {
638   rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
639   if (*slot == 0)
640     *slot = wint;
641 
642   return *slot;
643 }
644 #endif
645 
646 /* Return an rtx constant for V, given that the constant has mode MODE.
647    The returned rtx will be a CONST_INT if V fits, otherwise it will be
648    a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
649    (if TARGET_SUPPORTS_WIDE_INT).  */
650 
651 static rtx
immed_wide_int_const_1(const wide_int_ref & v,machine_mode mode)652 immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode)
653 {
654   unsigned int len = v.get_len ();
655   /* Not scalar_int_mode because we also allow pointer bound modes.  */
656   unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
657 
658   /* Allow truncation but not extension since we do not know if the
659      number is signed or unsigned.  */
660   gcc_assert (prec <= v.get_precision ());
661 
662   if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
663     return gen_int_mode (v.elt (0), mode);
664 
665 #if TARGET_SUPPORTS_WIDE_INT
666   {
667     unsigned int i;
668     rtx value;
669     unsigned int blocks_needed
670       = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
671 
672     if (len > blocks_needed)
673       len = blocks_needed;
674 
675     value = const_wide_int_alloc (len);
676 
677     /* It is so tempting to just put the mode in here.  Must control
678        myself ... */
679     PUT_MODE (value, VOIDmode);
680     CWI_PUT_NUM_ELEM (value, len);
681 
682     for (i = 0; i < len; i++)
683       CONST_WIDE_INT_ELT (value, i) = v.elt (i);
684 
685     return lookup_const_wide_int (value);
686   }
687 #else
688   return immed_double_const (v.elt (0), v.elt (1), mode);
689 #endif
690 }
691 
692 #if TARGET_SUPPORTS_WIDE_INT == 0
693 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
694    of ints: I0 is the low-order word and I1 is the high-order word.
695    For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
696    implied upper bits are copies of the high bit of i1.  The value
697    itself is neither signed nor unsigned.  Do not use this routine for
698    non-integer modes; convert to REAL_VALUE_TYPE and use
699    const_double_from_real_value.  */
700 
701 rtx
immed_double_const(HOST_WIDE_INT i0,HOST_WIDE_INT i1,machine_mode mode)702 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
703 {
704   rtx value;
705   unsigned int i;
706 
707   /* There are the following cases (note that there are no modes with
708      HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
709 
710      1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
711 	gen_int_mode.
712      2) If the value of the integer fits into HOST_WIDE_INT anyway
713         (i.e., i1 consists only from copies of the sign bit, and sign
714 	of i0 and i1 are the same), then we return a CONST_INT for i0.
715      3) Otherwise, we create a CONST_DOUBLE for i0 and i1.  */
716   scalar_mode smode;
717   if (is_a <scalar_mode> (mode, &smode)
718       && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
719     return gen_int_mode (i0, mode);
720 
721   /* If this integer fits in one word, return a CONST_INT.  */
722   if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
723     return GEN_INT (i0);
724 
725   /* We use VOIDmode for integers.  */
726   value = rtx_alloc (CONST_DOUBLE);
727   PUT_MODE (value, VOIDmode);
728 
729   CONST_DOUBLE_LOW (value) = i0;
730   CONST_DOUBLE_HIGH (value) = i1;
731 
732   for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
733     XWINT (value, i) = 0;
734 
735   return lookup_const_double (value);
736 }
737 #endif
738 
739 /* Return an rtx representation of C in mode MODE.  */
740 
741 rtx
immed_wide_int_const(const poly_wide_int_ref & c,machine_mode mode)742 immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode)
743 {
744   if (c.is_constant ())
745     return immed_wide_int_const_1 (c.coeffs[0], mode);
746 
747   /* Not scalar_int_mode because we also allow pointer bound modes.  */
748   unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
749 
750   /* Allow truncation but not extension since we do not know if the
751      number is signed or unsigned.  */
752   gcc_assert (prec <= c.coeffs[0].get_precision ());
753   poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED);
754 
755   /* See whether we already have an rtx for this constant.  */
756   inchash::hash h;
757   h.add_int (mode);
758   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
759     h.add_wide_int (newc.coeffs[i]);
760   const_poly_int_hasher::compare_type typed_value (mode, newc);
761   rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value,
762 							h.end (), INSERT);
763   rtx x = *slot;
764   if (x)
765     return x;
766 
767   /* Create a new rtx.  There's a choice to be made here between installing
768      the actual mode of the rtx or leaving it as VOIDmode (for consistency
769      with CONST_INT).  In practice the handling of the codes is different
770      enough that we get no benefit from using VOIDmode, and various places
771      assume that VOIDmode implies CONST_INT.  Using the real mode seems like
772      the right long-term direction anyway.  */
773   typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi;
774   size_t extra_size = twi::extra_size (prec);
775   x = rtx_alloc_v (CONST_POLY_INT,
776 		   sizeof (struct const_poly_int_def) + extra_size);
777   PUT_MODE (x, mode);
778   CONST_POLY_INT_COEFFS (x).set_precision (prec);
779   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
780     CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i];
781 
782   *slot = x;
783   return x;
784 }
785 
786 rtx
gen_rtx_REG(machine_mode mode,unsigned int regno)787 gen_rtx_REG (machine_mode mode, unsigned int regno)
788 {
789   /* In case the MD file explicitly references the frame pointer, have
790      all such references point to the same frame pointer.  This is
791      used during frame pointer elimination to distinguish the explicit
792      references to these registers from pseudos that happened to be
793      assigned to them.
794 
795      If we have eliminated the frame pointer or arg pointer, we will
796      be using it as a normal register, for example as a spill
797      register.  In such cases, we might be accessing it in a mode that
798      is not Pmode and therefore cannot use the pre-allocated rtx.
799 
800      Also don't do this when we are making new REGs in reload, since
801      we don't want to get confused with the real pointers.  */
802 
803   if (mode == Pmode && !reload_in_progress && !lra_in_progress)
804     {
805       if (regno == FRAME_POINTER_REGNUM
806 	  && (!reload_completed || frame_pointer_needed))
807 	return frame_pointer_rtx;
808 
809       if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
810 	  && regno == HARD_FRAME_POINTER_REGNUM
811 	  && (!reload_completed || frame_pointer_needed))
812 	return hard_frame_pointer_rtx;
813 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
814       if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
815 	  && regno == ARG_POINTER_REGNUM)
816 	return arg_pointer_rtx;
817 #endif
818 #ifdef RETURN_ADDRESS_POINTER_REGNUM
819       if (regno == RETURN_ADDRESS_POINTER_REGNUM)
820 	return return_address_pointer_rtx;
821 #endif
822       if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
823 	  && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
824 	  && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
825 	return pic_offset_table_rtx;
826       if (regno == STACK_POINTER_REGNUM)
827 	return stack_pointer_rtx;
828     }
829 
830 #if 0
831   /* If the per-function register table has been set up, try to re-use
832      an existing entry in that table to avoid useless generation of RTL.
833 
834      This code is disabled for now until we can fix the various backends
835      which depend on having non-shared hard registers in some cases.   Long
836      term we want to re-enable this code as it can significantly cut down
837      on the amount of useless RTL that gets generated.
838 
839      We'll also need to fix some code that runs after reload that wants to
840      set ORIGINAL_REGNO.  */
841 
842   if (cfun
843       && cfun->emit
844       && regno_reg_rtx
845       && regno < FIRST_PSEUDO_REGISTER
846       && reg_raw_mode[regno] == mode)
847     return regno_reg_rtx[regno];
848 #endif
849 
850   return gen_raw_REG (mode, regno);
851 }
852 
853 rtx
gen_rtx_MEM(machine_mode mode,rtx addr)854 gen_rtx_MEM (machine_mode mode, rtx addr)
855 {
856   rtx rt = gen_rtx_raw_MEM (mode, addr);
857 
858   /* This field is not cleared by the mere allocation of the rtx, so
859      we clear it here.  */
860   MEM_ATTRS (rt) = 0;
861 
862   return rt;
863 }
864 
865 /* Generate a memory referring to non-trapping constant memory.  */
866 
867 rtx
gen_const_mem(machine_mode mode,rtx addr)868 gen_const_mem (machine_mode mode, rtx addr)
869 {
870   rtx mem = gen_rtx_MEM (mode, addr);
871   MEM_READONLY_P (mem) = 1;
872   MEM_NOTRAP_P (mem) = 1;
873   return mem;
874 }
875 
876 /* Generate a MEM referring to fixed portions of the frame, e.g., register
877    save areas.  */
878 
879 rtx
gen_frame_mem(machine_mode mode,rtx addr)880 gen_frame_mem (machine_mode mode, rtx addr)
881 {
882   rtx mem = gen_rtx_MEM (mode, addr);
883   MEM_NOTRAP_P (mem) = 1;
884   set_mem_alias_set (mem, get_frame_alias_set ());
885   return mem;
886 }
887 
888 /* Generate a MEM referring to a temporary use of the stack, not part
889     of the fixed stack frame.  For example, something which is pushed
890     by a target splitter.  */
891 rtx
gen_tmp_stack_mem(machine_mode mode,rtx addr)892 gen_tmp_stack_mem (machine_mode mode, rtx addr)
893 {
894   rtx mem = gen_rtx_MEM (mode, addr);
895   MEM_NOTRAP_P (mem) = 1;
896   if (!cfun->calls_alloca)
897     set_mem_alias_set (mem, get_frame_alias_set ());
898   return mem;
899 }
900 
901 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET).  Return true if
902    this construct would be valid, and false otherwise.  */
903 
904 bool
validate_subreg(machine_mode omode,machine_mode imode,const_rtx reg,poly_uint64 offset)905 validate_subreg (machine_mode omode, machine_mode imode,
906 		 const_rtx reg, poly_uint64 offset)
907 {
908   poly_uint64 isize = GET_MODE_SIZE (imode);
909   poly_uint64 osize = GET_MODE_SIZE (omode);
910 
911   /* The sizes must be ordered, so that we know whether the subreg
912      is partial, paradoxical or complete.  */
913   if (!ordered_p (isize, osize))
914     return false;
915 
916   /* All subregs must be aligned.  */
917   if (!multiple_p (offset, osize))
918     return false;
919 
920   /* The subreg offset cannot be outside the inner object.  */
921   if (maybe_ge (offset, isize))
922     return false;
923 
924   poly_uint64 regsize = REGMODE_NATURAL_SIZE (imode);
925 
926   /* ??? This should not be here.  Temporarily continue to allow word_mode
927      subregs of anything.  The most common offender is (subreg:SI (reg:DF)).
928      Generally, backends are doing something sketchy but it'll take time to
929      fix them all.  */
930   if (omode == word_mode)
931     ;
932   /* ??? Similarly, e.g. with (subreg:DF (reg:TI)).  Though store_bit_field
933      is the culprit here, and not the backends.  */
934   else if (known_ge (osize, regsize) && known_ge (isize, osize))
935     ;
936   /* Allow component subregs of complex and vector.  Though given the below
937      extraction rules, it's not always clear what that means.  */
938   else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
939 	   && GET_MODE_INNER (imode) == omode)
940     ;
941   /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
942      i.e. (subreg:V4SF (reg:SF) 0).  This surely isn't the cleanest way to
943      represent this.  It's questionable if this ought to be represented at
944      all -- why can't this all be hidden in post-reload splitters that make
945      arbitrarily mode changes to the registers themselves.  */
946   else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
947     ;
948   /* Subregs involving floating point modes are not allowed to
949      change size.  Therefore (subreg:DI (reg:DF) 0) is fine, but
950      (subreg:SI (reg:DF) 0) isn't.  */
951   else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
952     {
953       if (! (known_eq (isize, osize)
954 	     /* LRA can use subreg to store a floating point value in
955 		an integer mode.  Although the floating point and the
956 		integer modes need the same number of hard registers,
957 		the size of floating point mode can be less than the
958 		integer mode.  LRA also uses subregs for a register
959 		should be used in different mode in on insn.  */
960 	     || lra_in_progress))
961 	return false;
962     }
963 
964   /* Paradoxical subregs must have offset zero.  */
965   if (maybe_gt (osize, isize))
966     return known_eq (offset, 0U);
967 
968   /* This is a normal subreg.  Verify that the offset is representable.  */
969 
970   /* For hard registers, we already have most of these rules collected in
971      subreg_offset_representable_p.  */
972   if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
973     {
974       unsigned int regno = REGNO (reg);
975 
976       if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
977 	  && GET_MODE_INNER (imode) == omode)
978 	;
979       else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode))
980 	return false;
981 
982       return subreg_offset_representable_p (regno, imode, offset, omode);
983     }
984 
985   /* The outer size must be ordered wrt the register size, otherwise
986      we wouldn't know at compile time how many registers the outer
987      mode occupies.  */
988   if (!ordered_p (osize, regsize))
989     return false;
990 
991   /* For pseudo registers, we want most of the same checks.  Namely:
992 
993      Assume that the pseudo register will be allocated to hard registers
994      that can hold REGSIZE bytes each.  If OSIZE is not a multiple of REGSIZE,
995      the remainder must correspond to the lowpart of the containing hard
996      register.  If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
997      otherwise it is at the lowest offset.
998 
999      Given that we've already checked the mode and offset alignment,
1000      we only have to check subblock subregs here.  */
1001   if (maybe_lt (osize, regsize)
1002       && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
1003     {
1004       /* It is invalid for the target to pick a register size for a mode
1005 	 that isn't ordered wrt to the size of that mode.  */
1006       poly_uint64 block_size = ordered_min (isize, regsize);
1007       unsigned int start_reg;
1008       poly_uint64 offset_within_reg;
1009       if (!can_div_trunc_p (offset, block_size, &start_reg, &offset_within_reg)
1010 	  || (BYTES_BIG_ENDIAN
1011 	      ? maybe_ne (offset_within_reg, block_size - osize)
1012 	      : maybe_ne (offset_within_reg, 0U)))
1013 	return false;
1014     }
1015   return true;
1016 }
1017 
1018 rtx
gen_rtx_SUBREG(machine_mode mode,rtx reg,poly_uint64 offset)1019 gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset)
1020 {
1021   gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
1022   return gen_rtx_raw_SUBREG (mode, reg, offset);
1023 }
1024 
1025 /* Generate a SUBREG representing the least-significant part of REG if MODE
1026    is smaller than mode of REG, otherwise paradoxical SUBREG.  */
1027 
1028 rtx
gen_lowpart_SUBREG(machine_mode mode,rtx reg)1029 gen_lowpart_SUBREG (machine_mode mode, rtx reg)
1030 {
1031   machine_mode inmode;
1032 
1033   inmode = GET_MODE (reg);
1034   if (inmode == VOIDmode)
1035     inmode = mode;
1036   return gen_rtx_SUBREG (mode, reg,
1037 			 subreg_lowpart_offset (mode, inmode));
1038 }
1039 
1040 rtx
gen_rtx_VAR_LOCATION(machine_mode mode,tree decl,rtx loc,enum var_init_status status)1041 gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
1042 		      enum var_init_status status)
1043 {
1044   rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
1045   PAT_VAR_LOCATION_STATUS (x) = status;
1046   return x;
1047 }
1048 
1049 
1050 /* Create an rtvec and stores within it the RTXen passed in the arguments.  */
1051 
1052 rtvec
gen_rtvec(int n,...)1053 gen_rtvec (int n, ...)
1054 {
1055   int i;
1056   rtvec rt_val;
1057   va_list p;
1058 
1059   va_start (p, n);
1060 
1061   /* Don't allocate an empty rtvec...  */
1062   if (n == 0)
1063     {
1064       va_end (p);
1065       return NULL_RTVEC;
1066     }
1067 
1068   rt_val = rtvec_alloc (n);
1069 
1070   for (i = 0; i < n; i++)
1071     rt_val->elem[i] = va_arg (p, rtx);
1072 
1073   va_end (p);
1074   return rt_val;
1075 }
1076 
1077 rtvec
gen_rtvec_v(int n,rtx * argp)1078 gen_rtvec_v (int n, rtx *argp)
1079 {
1080   int i;
1081   rtvec rt_val;
1082 
1083   /* Don't allocate an empty rtvec...  */
1084   if (n == 0)
1085     return NULL_RTVEC;
1086 
1087   rt_val = rtvec_alloc (n);
1088 
1089   for (i = 0; i < n; i++)
1090     rt_val->elem[i] = *argp++;
1091 
1092   return rt_val;
1093 }
1094 
1095 rtvec
gen_rtvec_v(int n,rtx_insn ** argp)1096 gen_rtvec_v (int n, rtx_insn **argp)
1097 {
1098   int i;
1099   rtvec rt_val;
1100 
1101   /* Don't allocate an empty rtvec...  */
1102   if (n == 0)
1103     return NULL_RTVEC;
1104 
1105   rt_val = rtvec_alloc (n);
1106 
1107   for (i = 0; i < n; i++)
1108     rt_val->elem[i] = *argp++;
1109 
1110   return rt_val;
1111 }
1112 
1113 
1114 /* Return the number of bytes between the start of an OUTER_MODE
1115    in-memory value and the start of an INNER_MODE in-memory value,
1116    given that the former is a lowpart of the latter.  It may be a
1117    paradoxical lowpart, in which case the offset will be negative
1118    on big-endian targets.  */
1119 
1120 poly_int64
byte_lowpart_offset(machine_mode outer_mode,machine_mode inner_mode)1121 byte_lowpart_offset (machine_mode outer_mode,
1122 		     machine_mode inner_mode)
1123 {
1124   if (paradoxical_subreg_p (outer_mode, inner_mode))
1125     return -subreg_lowpart_offset (inner_mode, outer_mode);
1126   else
1127     return subreg_lowpart_offset (outer_mode, inner_mode);
1128 }
1129 
1130 /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1131    from address X.  For paradoxical big-endian subregs this is a
1132    negative value, otherwise it's the same as OFFSET.  */
1133 
1134 poly_int64
subreg_memory_offset(machine_mode outer_mode,machine_mode inner_mode,poly_uint64 offset)1135 subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode,
1136 		      poly_uint64 offset)
1137 {
1138   if (paradoxical_subreg_p (outer_mode, inner_mode))
1139     {
1140       gcc_assert (known_eq (offset, 0U));
1141       return -subreg_lowpart_offset (inner_mode, outer_mode);
1142     }
1143   return offset;
1144 }
1145 
1146 /* As above, but return the offset that existing subreg X would have
1147    if SUBREG_REG (X) were stored in memory.  The only significant thing
1148    about the current SUBREG_REG is its mode.  */
1149 
1150 poly_int64
subreg_memory_offset(const_rtx x)1151 subreg_memory_offset (const_rtx x)
1152 {
1153   return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
1154 			       SUBREG_BYTE (x));
1155 }
1156 
1157 /* Generate a REG rtx for a new pseudo register of mode MODE.
1158    This pseudo is assigned the next sequential register number.  */
1159 
1160 rtx
gen_reg_rtx(machine_mode mode)1161 gen_reg_rtx (machine_mode mode)
1162 {
1163   rtx val;
1164   unsigned int align = GET_MODE_ALIGNMENT (mode);
1165 
1166   gcc_assert (can_create_pseudo_p ());
1167 
1168   /* If a virtual register with bigger mode alignment is generated,
1169      increase stack alignment estimation because it might be spilled
1170      to stack later.  */
1171   if (SUPPORTS_STACK_ALIGNMENT
1172       && crtl->stack_alignment_estimated < align
1173       && !crtl->stack_realign_processed)
1174     {
1175       unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1176       if (crtl->stack_alignment_estimated < min_align)
1177 	crtl->stack_alignment_estimated = min_align;
1178     }
1179 
1180   if (generating_concat_p
1181       && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1182 	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1183     {
1184       /* For complex modes, don't make a single pseudo.
1185 	 Instead, make a CONCAT of two pseudos.
1186 	 This allows noncontiguous allocation of the real and imaginary parts,
1187 	 which makes much better code.  Besides, allocating DCmode
1188 	 pseudos overstrains reload on some machines like the 386.  */
1189       rtx realpart, imagpart;
1190       machine_mode partmode = GET_MODE_INNER (mode);
1191 
1192       realpart = gen_reg_rtx (partmode);
1193       imagpart = gen_reg_rtx (partmode);
1194       return gen_rtx_CONCAT (mode, realpart, imagpart);
1195     }
1196 
1197   /* Do not call gen_reg_rtx with uninitialized crtl.  */
1198   gcc_assert (crtl->emit.regno_pointer_align_length);
1199 
1200   crtl->emit.ensure_regno_capacity ();
1201   gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
1202 
1203   val = gen_raw_REG (mode, reg_rtx_no);
1204   regno_reg_rtx[reg_rtx_no++] = val;
1205   return val;
1206 }
1207 
1208 /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1209    enough to have elements in the range 0 <= idx <= reg_rtx_no.  */
1210 
1211 void
ensure_regno_capacity()1212 emit_status::ensure_regno_capacity ()
1213 {
1214   int old_size = regno_pointer_align_length;
1215 
1216   if (reg_rtx_no < old_size)
1217     return;
1218 
1219   int new_size = old_size * 2;
1220   while (reg_rtx_no >= new_size)
1221     new_size *= 2;
1222 
1223   char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
1224   memset (tmp + old_size, 0, new_size - old_size);
1225   regno_pointer_align = (unsigned char *) tmp;
1226 
1227   rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
1228   memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
1229   regno_reg_rtx = new1;
1230 
1231   crtl->emit.regno_pointer_align_length = new_size;
1232 }
1233 
1234 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise.  */
1235 
1236 bool
reg_is_parm_p(rtx reg)1237 reg_is_parm_p (rtx reg)
1238 {
1239   tree decl;
1240 
1241   gcc_assert (REG_P (reg));
1242   decl = REG_EXPR (reg);
1243   return (decl && TREE_CODE (decl) == PARM_DECL);
1244 }
1245 
1246 /* Update NEW with the same attributes as REG, but with OFFSET added
1247    to the REG_OFFSET.  */
1248 
1249 static void
update_reg_offset(rtx new_rtx,rtx reg,poly_int64 offset)1250 update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset)
1251 {
1252   REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1253 				       REG_OFFSET (reg) + offset);
1254 }
1255 
1256 /* Generate a register with same attributes as REG, but with OFFSET
1257    added to the REG_OFFSET.  */
1258 
1259 rtx
gen_rtx_REG_offset(rtx reg,machine_mode mode,unsigned int regno,poly_int64 offset)1260 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1261 		    poly_int64 offset)
1262 {
1263   rtx new_rtx = gen_rtx_REG (mode, regno);
1264 
1265   update_reg_offset (new_rtx, reg, offset);
1266   return new_rtx;
1267 }
1268 
1269 /* Generate a new pseudo-register with the same attributes as REG, but
1270    with OFFSET added to the REG_OFFSET.  */
1271 
1272 rtx
gen_reg_rtx_offset(rtx reg,machine_mode mode,int offset)1273 gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1274 {
1275   rtx new_rtx = gen_reg_rtx (mode);
1276 
1277   update_reg_offset (new_rtx, reg, offset);
1278   return new_rtx;
1279 }
1280 
1281 /* Adjust REG in-place so that it has mode MODE.  It is assumed that the
1282    new register is a (possibly paradoxical) lowpart of the old one.  */
1283 
1284 void
adjust_reg_mode(rtx reg,machine_mode mode)1285 adjust_reg_mode (rtx reg, machine_mode mode)
1286 {
1287   update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1288   PUT_MODE (reg, mode);
1289 }
1290 
1291 /* Copy REG's attributes from X, if X has any attributes.  If REG and X
1292    have different modes, REG is a (possibly paradoxical) lowpart of X.  */
1293 
1294 void
set_reg_attrs_from_value(rtx reg,rtx x)1295 set_reg_attrs_from_value (rtx reg, rtx x)
1296 {
1297   poly_int64 offset;
1298   bool can_be_reg_pointer = true;
1299 
1300   /* Don't call mark_reg_pointer for incompatible pointer sign
1301      extension.  */
1302   while (GET_CODE (x) == SIGN_EXTEND
1303 	 || GET_CODE (x) == ZERO_EXTEND
1304 	 || GET_CODE (x) == TRUNCATE
1305 	 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1306     {
1307 #if defined(POINTERS_EXTEND_UNSIGNED)
1308       if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1309 	   || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1310 	   || (paradoxical_subreg_p (x)
1311 	       && ! (SUBREG_PROMOTED_VAR_P (x)
1312 		     && SUBREG_CHECK_PROMOTED_SIGN (x,
1313 						    POINTERS_EXTEND_UNSIGNED))))
1314 	  && !targetm.have_ptr_extend ())
1315 	can_be_reg_pointer = false;
1316 #endif
1317       x = XEXP (x, 0);
1318     }
1319 
1320   /* Hard registers can be reused for multiple purposes within the same
1321      function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1322      on them is wrong.  */
1323   if (HARD_REGISTER_P (reg))
1324     return;
1325 
1326   offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1327   if (MEM_P (x))
1328     {
1329       if (MEM_OFFSET_KNOWN_P (x))
1330 	REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1331 					 MEM_OFFSET (x) + offset);
1332       if (can_be_reg_pointer && MEM_POINTER (x))
1333 	mark_reg_pointer (reg, 0);
1334     }
1335   else if (REG_P (x))
1336     {
1337       if (REG_ATTRS (x))
1338 	update_reg_offset (reg, x, offset);
1339       if (can_be_reg_pointer && REG_POINTER (x))
1340 	mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1341     }
1342 }
1343 
1344 /* Generate a REG rtx for a new pseudo register, copying the mode
1345    and attributes from X.  */
1346 
1347 rtx
gen_reg_rtx_and_attrs(rtx x)1348 gen_reg_rtx_and_attrs (rtx x)
1349 {
1350   rtx reg = gen_reg_rtx (GET_MODE (x));
1351   set_reg_attrs_from_value (reg, x);
1352   return reg;
1353 }
1354 
1355 /* Set the register attributes for registers contained in PARM_RTX.
1356    Use needed values from memory attributes of MEM.  */
1357 
1358 void
set_reg_attrs_for_parm(rtx parm_rtx,rtx mem)1359 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1360 {
1361   if (REG_P (parm_rtx))
1362     set_reg_attrs_from_value (parm_rtx, mem);
1363   else if (GET_CODE (parm_rtx) == PARALLEL)
1364     {
1365       /* Check for a NULL entry in the first slot, used to indicate that the
1366 	 parameter goes both on the stack and in registers.  */
1367       int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1368       for (; i < XVECLEN (parm_rtx, 0); i++)
1369 	{
1370 	  rtx x = XVECEXP (parm_rtx, 0, i);
1371 	  if (REG_P (XEXP (x, 0)))
1372 	    REG_ATTRS (XEXP (x, 0))
1373 	      = get_reg_attrs (MEM_EXPR (mem),
1374 			       INTVAL (XEXP (x, 1)));
1375 	}
1376     }
1377 }
1378 
1379 /* Set the REG_ATTRS for registers in value X, given that X represents
1380    decl T.  */
1381 
1382 void
set_reg_attrs_for_decl_rtl(tree t,rtx x)1383 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1384 {
1385   if (!t)
1386     return;
1387   tree tdecl = t;
1388   if (GET_CODE (x) == SUBREG)
1389     {
1390       gcc_assert (subreg_lowpart_p (x));
1391       x = SUBREG_REG (x);
1392     }
1393   if (REG_P (x))
1394     REG_ATTRS (x)
1395       = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1396 					       DECL_P (tdecl)
1397 					       ? DECL_MODE (tdecl)
1398 					       : TYPE_MODE (TREE_TYPE (tdecl))));
1399   if (GET_CODE (x) == CONCAT)
1400     {
1401       if (REG_P (XEXP (x, 0)))
1402         REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1403       if (REG_P (XEXP (x, 1)))
1404 	REG_ATTRS (XEXP (x, 1))
1405 	  = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1406     }
1407   if (GET_CODE (x) == PARALLEL)
1408     {
1409       int i, start;
1410 
1411       /* Check for a NULL entry, used to indicate that the parameter goes
1412 	 both on the stack and in registers.  */
1413       if (XEXP (XVECEXP (x, 0, 0), 0))
1414 	start = 0;
1415       else
1416 	start = 1;
1417 
1418       for (i = start; i < XVECLEN (x, 0); i++)
1419 	{
1420 	  rtx y = XVECEXP (x, 0, i);
1421 	  if (REG_P (XEXP (y, 0)))
1422 	    REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1423 	}
1424     }
1425 }
1426 
1427 /* Assign the RTX X to declaration T.  */
1428 
1429 void
set_decl_rtl(tree t,rtx x)1430 set_decl_rtl (tree t, rtx x)
1431 {
1432   DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1433   if (x)
1434     set_reg_attrs_for_decl_rtl (t, x);
1435 }
1436 
1437 /* Assign the RTX X to parameter declaration T.  BY_REFERENCE_P is true
1438    if the ABI requires the parameter to be passed by reference.  */
1439 
1440 void
set_decl_incoming_rtl(tree t,rtx x,bool by_reference_p)1441 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1442 {
1443   DECL_INCOMING_RTL (t) = x;
1444   if (x && !by_reference_p)
1445     set_reg_attrs_for_decl_rtl (t, x);
1446 }
1447 
1448 /* Identify REG (which may be a CONCAT) as a user register.  */
1449 
1450 void
mark_user_reg(rtx reg)1451 mark_user_reg (rtx reg)
1452 {
1453   if (GET_CODE (reg) == CONCAT)
1454     {
1455       REG_USERVAR_P (XEXP (reg, 0)) = 1;
1456       REG_USERVAR_P (XEXP (reg, 1)) = 1;
1457     }
1458   else
1459     {
1460       gcc_assert (REG_P (reg));
1461       REG_USERVAR_P (reg) = 1;
1462     }
1463 }
1464 
1465 /* Identify REG as a probable pointer register and show its alignment
1466    as ALIGN, if nonzero.  */
1467 
1468 void
mark_reg_pointer(rtx reg,int align)1469 mark_reg_pointer (rtx reg, int align)
1470 {
1471   if (! REG_POINTER (reg))
1472     {
1473       REG_POINTER (reg) = 1;
1474 
1475       if (align)
1476 	REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1477     }
1478   else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1479     /* We can no-longer be sure just how aligned this pointer is.  */
1480     REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1481 }
1482 
1483 /* Return 1 plus largest pseudo reg number used in the current function.  */
1484 
1485 int
max_reg_num(void)1486 max_reg_num (void)
1487 {
1488   return reg_rtx_no;
1489 }
1490 
1491 /* Return 1 + the largest label number used so far in the current function.  */
1492 
1493 int
max_label_num(void)1494 max_label_num (void)
1495 {
1496   return label_num;
1497 }
1498 
1499 /* Return first label number used in this function (if any were used).  */
1500 
1501 int
get_first_label_num(void)1502 get_first_label_num (void)
1503 {
1504   return first_label_num;
1505 }
1506 
1507 /* If the rtx for label was created during the expansion of a nested
1508    function, then first_label_num won't include this label number.
1509    Fix this now so that array indices work later.  */
1510 
1511 void
maybe_set_first_label_num(rtx_code_label * x)1512 maybe_set_first_label_num (rtx_code_label *x)
1513 {
1514   if (CODE_LABEL_NUMBER (x) < first_label_num)
1515     first_label_num = CODE_LABEL_NUMBER (x);
1516 }
1517 
1518 /* For use by the RTL function loader, when mingling with normal
1519    functions.
1520    Ensure that label_num is greater than the label num of X, to avoid
1521    duplicate labels in the generated assembler.  */
1522 
1523 void
maybe_set_max_label_num(rtx_code_label * x)1524 maybe_set_max_label_num (rtx_code_label *x)
1525 {
1526   if (CODE_LABEL_NUMBER (x) >= label_num)
1527     label_num = CODE_LABEL_NUMBER (x) + 1;
1528 }
1529 
1530 
1531 /* Return a value representing some low-order bits of X, where the number
1532    of low-order bits is given by MODE.  Note that no conversion is done
1533    between floating-point and fixed-point values, rather, the bit
1534    representation is returned.
1535 
1536    This function handles the cases in common between gen_lowpart, below,
1537    and two variants in cse.c and combine.c.  These are the cases that can
1538    be safely handled at all points in the compilation.
1539 
1540    If this is not a case we can handle, return 0.  */
1541 
1542 rtx
gen_lowpart_common(machine_mode mode,rtx x)1543 gen_lowpart_common (machine_mode mode, rtx x)
1544 {
1545   poly_uint64 msize = GET_MODE_SIZE (mode);
1546   machine_mode innermode;
1547 
1548   /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1549      so we have to make one up.  Yuk.  */
1550   innermode = GET_MODE (x);
1551   if (CONST_INT_P (x)
1552       && known_le (msize * BITS_PER_UNIT,
1553 		   (unsigned HOST_WIDE_INT) HOST_BITS_PER_WIDE_INT))
1554     innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
1555   else if (innermode == VOIDmode)
1556     innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require ();
1557 
1558   gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1559 
1560   if (innermode == mode)
1561     return x;
1562 
1563   /* The size of the outer and inner modes must be ordered.  */
1564   poly_uint64 xsize = GET_MODE_SIZE (innermode);
1565   if (!ordered_p (msize, xsize))
1566     return 0;
1567 
1568   if (SCALAR_FLOAT_MODE_P (mode))
1569     {
1570       /* Don't allow paradoxical FLOAT_MODE subregs.  */
1571       if (maybe_gt (msize, xsize))
1572 	return 0;
1573     }
1574   else
1575     {
1576       /* MODE must occupy no more of the underlying registers than X.  */
1577       poly_uint64 regsize = REGMODE_NATURAL_SIZE (innermode);
1578       unsigned int mregs, xregs;
1579       if (!can_div_away_from_zero_p (msize, regsize, &mregs)
1580 	  || !can_div_away_from_zero_p (xsize, regsize, &xregs)
1581 	  || mregs > xregs)
1582 	return 0;
1583     }
1584 
1585   scalar_int_mode int_mode, int_innermode, from_mode;
1586   if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1587       && is_a <scalar_int_mode> (mode, &int_mode)
1588       && is_a <scalar_int_mode> (innermode, &int_innermode)
1589       && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
1590     {
1591       /* If we are getting the low-order part of something that has been
1592 	 sign- or zero-extended, we can either just use the object being
1593 	 extended or make a narrower extension.  If we want an even smaller
1594 	 piece than the size of the object being extended, call ourselves
1595 	 recursively.
1596 
1597 	 This case is used mostly by combine and cse.  */
1598 
1599       if (from_mode == int_mode)
1600 	return XEXP (x, 0);
1601       else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
1602 	return gen_lowpart_common (int_mode, XEXP (x, 0));
1603       else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
1604 	return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
1605     }
1606   else if (GET_CODE (x) == SUBREG || REG_P (x)
1607 	   || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1608 	   || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x)
1609 	   || CONST_POLY_INT_P (x))
1610     return lowpart_subreg (mode, x, innermode);
1611 
1612   /* Otherwise, we can't do this.  */
1613   return 0;
1614 }
1615 
1616 rtx
gen_highpart(machine_mode mode,rtx x)1617 gen_highpart (machine_mode mode, rtx x)
1618 {
1619   poly_uint64 msize = GET_MODE_SIZE (mode);
1620   rtx result;
1621 
1622   /* This case loses if X is a subreg.  To catch bugs early,
1623      complain if an invalid MODE is used even in other cases.  */
1624   gcc_assert (known_le (msize, (unsigned int) UNITS_PER_WORD)
1625 	      || known_eq (msize, GET_MODE_UNIT_SIZE (GET_MODE (x))));
1626 
1627   result = simplify_gen_subreg (mode, x, GET_MODE (x),
1628 				subreg_highpart_offset (mode, GET_MODE (x)));
1629   gcc_assert (result);
1630 
1631   /* simplify_gen_subreg is not guaranteed to return a valid operand for
1632      the target if we have a MEM.  gen_highpart must return a valid operand,
1633      emitting code if necessary to do so.  */
1634   if (MEM_P (result))
1635     {
1636       result = validize_mem (result);
1637       gcc_assert (result);
1638     }
1639 
1640   return result;
1641 }
1642 
1643 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1644    be VOIDmode constant.  */
1645 rtx
gen_highpart_mode(machine_mode outermode,machine_mode innermode,rtx exp)1646 gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1647 {
1648   if (GET_MODE (exp) != VOIDmode)
1649     {
1650       gcc_assert (GET_MODE (exp) == innermode);
1651       return gen_highpart (outermode, exp);
1652     }
1653   return simplify_gen_subreg (outermode, exp, innermode,
1654 			      subreg_highpart_offset (outermode, innermode));
1655 }
1656 
1657 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1658    OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes.  */
1659 
1660 poly_uint64
subreg_size_lowpart_offset(poly_uint64 outer_bytes,poly_uint64 inner_bytes)1661 subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1662 {
1663   gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
1664   if (maybe_gt (outer_bytes, inner_bytes))
1665     /* Paradoxical subregs always have a SUBREG_BYTE of 0.  */
1666     return 0;
1667 
1668   if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1669     return inner_bytes - outer_bytes;
1670   else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1671     return 0;
1672   else
1673     return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
1674 }
1675 
1676 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1677    OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes.  */
1678 
1679 poly_uint64
subreg_size_highpart_offset(poly_uint64 outer_bytes,poly_uint64 inner_bytes)1680 subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1681 {
1682   gcc_assert (known_ge (inner_bytes, outer_bytes));
1683 
1684   if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1685     return 0;
1686   else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1687     return inner_bytes - outer_bytes;
1688   else
1689     return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
1690 					(inner_bytes - outer_bytes)
1691 					* BITS_PER_UNIT);
1692 }
1693 
1694 /* Return 1 iff X, assumed to be a SUBREG,
1695    refers to the least significant part of its containing reg.
1696    If X is not a SUBREG, always return 1 (it is its own low part!).  */
1697 
1698 int
subreg_lowpart_p(const_rtx x)1699 subreg_lowpart_p (const_rtx x)
1700 {
1701   if (GET_CODE (x) != SUBREG)
1702     return 1;
1703   else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1704     return 0;
1705 
1706   return known_eq (subreg_lowpart_offset (GET_MODE (x),
1707 					  GET_MODE (SUBREG_REG (x))),
1708 		   SUBREG_BYTE (x));
1709 }
1710 
1711 /* Return subword OFFSET of operand OP.
1712    The word number, OFFSET, is interpreted as the word number starting
1713    at the low-order address.  OFFSET 0 is the low-order word if not
1714    WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1715 
1716    If we cannot extract the required word, we return zero.  Otherwise,
1717    an rtx corresponding to the requested word will be returned.
1718 
1719    VALIDATE_ADDRESS is nonzero if the address should be validated.  Before
1720    reload has completed, a valid address will always be returned.  After
1721    reload, if a valid address cannot be returned, we return zero.
1722 
1723    If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1724    it is the responsibility of the caller.
1725 
1726    MODE is the mode of OP in case it is a CONST_INT.
1727 
1728    ??? This is still rather broken for some cases.  The problem for the
1729    moment is that all callers of this thing provide no 'goal mode' to
1730    tell us to work with.  This exists because all callers were written
1731    in a word based SUBREG world.
1732    Now use of this function can be deprecated by simplify_subreg in most
1733    cases.
1734  */
1735 
1736 rtx
operand_subword(rtx op,poly_uint64 offset,int validate_address,machine_mode mode)1737 operand_subword (rtx op, poly_uint64 offset, int validate_address,
1738 		 machine_mode mode)
1739 {
1740   if (mode == VOIDmode)
1741     mode = GET_MODE (op);
1742 
1743   gcc_assert (mode != VOIDmode);
1744 
1745   /* If OP is narrower than a word, fail.  */
1746   if (mode != BLKmode
1747       && maybe_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD))
1748     return 0;
1749 
1750   /* If we want a word outside OP, return zero.  */
1751   if (mode != BLKmode
1752       && maybe_gt ((offset + 1) * UNITS_PER_WORD, GET_MODE_SIZE (mode)))
1753     return const0_rtx;
1754 
1755   /* Form a new MEM at the requested address.  */
1756   if (MEM_P (op))
1757     {
1758       rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1759 
1760       if (! validate_address)
1761 	return new_rtx;
1762 
1763       else if (reload_completed)
1764 	{
1765 	  if (! strict_memory_address_addr_space_p (word_mode,
1766 						    XEXP (new_rtx, 0),
1767 						    MEM_ADDR_SPACE (op)))
1768 	    return 0;
1769 	}
1770       else
1771 	return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1772     }
1773 
1774   /* Rest can be handled by simplify_subreg.  */
1775   return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1776 }
1777 
1778 /* Similar to `operand_subword', but never return 0.  If we can't
1779    extract the required subword, put OP into a register and try again.
1780    The second attempt must succeed.  We always validate the address in
1781    this case.
1782 
1783    MODE is the mode of OP, in case it is CONST_INT.  */
1784 
1785 rtx
operand_subword_force(rtx op,poly_uint64 offset,machine_mode mode)1786 operand_subword_force (rtx op, poly_uint64 offset, machine_mode mode)
1787 {
1788   rtx result = operand_subword (op, offset, 1, mode);
1789 
1790   if (result)
1791     return result;
1792 
1793   if (mode != BLKmode && mode != VOIDmode)
1794     {
1795       /* If this is a register which cannot be accessed by words, copy it
1796 	 to a pseudo register.  */
1797       if (REG_P (op))
1798 	op = copy_to_reg (op);
1799       else
1800 	op = force_reg (mode, op);
1801     }
1802 
1803   result = operand_subword (op, offset, 1, mode);
1804   gcc_assert (result);
1805 
1806   return result;
1807 }
1808 
mem_attrs()1809 mem_attrs::mem_attrs ()
1810   : expr (NULL_TREE),
1811     offset (0),
1812     size (0),
1813     alias (0),
1814     align (0),
1815     addrspace (ADDR_SPACE_GENERIC),
1816     offset_known_p (false),
1817     size_known_p (false)
1818 {}
1819 
1820 /* Returns 1 if both MEM_EXPR can be considered equal
1821    and 0 otherwise.  */
1822 
1823 int
mem_expr_equal_p(const_tree expr1,const_tree expr2)1824 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1825 {
1826   if (expr1 == expr2)
1827     return 1;
1828 
1829   if (! expr1 || ! expr2)
1830     return 0;
1831 
1832   if (TREE_CODE (expr1) != TREE_CODE (expr2))
1833     return 0;
1834 
1835   return operand_equal_p (expr1, expr2, 0);
1836 }
1837 
1838 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1839    bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1840    -1 if not known.  */
1841 
1842 int
get_mem_align_offset(rtx mem,unsigned int align)1843 get_mem_align_offset (rtx mem, unsigned int align)
1844 {
1845   tree expr;
1846   poly_uint64 offset;
1847 
1848   /* This function can't use
1849      if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1850 	 || (MAX (MEM_ALIGN (mem),
1851 	          MAX (align, get_object_alignment (MEM_EXPR (mem))))
1852 	     < align))
1853        return -1;
1854      else
1855        return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1856      for two reasons:
1857      - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1858        for <variable>.  get_inner_reference doesn't handle it and
1859        even if it did, the alignment in that case needs to be determined
1860        from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1861      - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1862        isn't sufficiently aligned, the object it is in might be.  */
1863   gcc_assert (MEM_P (mem));
1864   expr = MEM_EXPR (mem);
1865   if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1866     return -1;
1867 
1868   offset = MEM_OFFSET (mem);
1869   if (DECL_P (expr))
1870     {
1871       if (DECL_ALIGN (expr) < align)
1872 	return -1;
1873     }
1874   else if (INDIRECT_REF_P (expr))
1875     {
1876       if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1877 	return -1;
1878     }
1879   else if (TREE_CODE (expr) == COMPONENT_REF)
1880     {
1881       while (1)
1882 	{
1883 	  tree inner = TREE_OPERAND (expr, 0);
1884 	  tree field = TREE_OPERAND (expr, 1);
1885 	  tree byte_offset = component_ref_field_offset (expr);
1886 	  tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1887 
1888 	  poly_uint64 suboffset;
1889 	  if (!byte_offset
1890 	      || !poly_int_tree_p (byte_offset, &suboffset)
1891 	      || !tree_fits_uhwi_p (bit_offset))
1892 	    return -1;
1893 
1894 	  offset += suboffset;
1895 	  offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1896 
1897 	  if (inner == NULL_TREE)
1898 	    {
1899 	      if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1900 		  < (unsigned int) align)
1901 		return -1;
1902 	      break;
1903 	    }
1904 	  else if (DECL_P (inner))
1905 	    {
1906 	      if (DECL_ALIGN (inner) < align)
1907 		return -1;
1908 	      break;
1909 	    }
1910 	  else if (TREE_CODE (inner) != COMPONENT_REF)
1911 	    return -1;
1912 	  expr = inner;
1913 	}
1914     }
1915   else
1916     return -1;
1917 
1918   HOST_WIDE_INT misalign;
1919   if (!known_misalignment (offset, align / BITS_PER_UNIT, &misalign))
1920     return -1;
1921   return misalign;
1922 }
1923 
1924 /* Given REF (a MEM) and T, either the type of X or the expression
1925    corresponding to REF, set the memory attributes.  OBJECTP is nonzero
1926    if we are making a new object of this type.  BITPOS is nonzero if
1927    there is an offset outstanding on T that will be applied later.  */
1928 
1929 void
set_mem_attributes_minus_bitpos(rtx ref,tree t,int objectp,poly_int64 bitpos)1930 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1931 				 poly_int64 bitpos)
1932 {
1933   poly_int64 apply_bitpos = 0;
1934   tree type;
1935   class mem_attrs attrs, *defattrs, *refattrs;
1936   addr_space_t as;
1937 
1938   /* It can happen that type_for_mode was given a mode for which there
1939      is no language-level type.  In which case it returns NULL, which
1940      we can see here.  */
1941   if (t == NULL_TREE)
1942     return;
1943 
1944   type = TYPE_P (t) ? t : TREE_TYPE (t);
1945   if (type == error_mark_node)
1946     return;
1947 
1948   /* If we have already set DECL_RTL = ref, get_alias_set will get the
1949      wrong answer, as it assumes that DECL_RTL already has the right alias
1950      info.  Callers should not set DECL_RTL until after the call to
1951      set_mem_attributes.  */
1952   gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1953 
1954   /* Get the alias set from the expression or type (perhaps using a
1955      front-end routine) and use it.  */
1956   attrs.alias = get_alias_set (t);
1957 
1958   MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1959   MEM_POINTER (ref) = POINTER_TYPE_P (type);
1960 
1961   /* Default values from pre-existing memory attributes if present.  */
1962   refattrs = MEM_ATTRS (ref);
1963   if (refattrs)
1964     {
1965       /* ??? Can this ever happen?  Calling this routine on a MEM that
1966 	 already carries memory attributes should probably be invalid.  */
1967       attrs.expr = refattrs->expr;
1968       attrs.offset_known_p = refattrs->offset_known_p;
1969       attrs.offset = refattrs->offset;
1970       attrs.size_known_p = refattrs->size_known_p;
1971       attrs.size = refattrs->size;
1972       attrs.align = refattrs->align;
1973     }
1974 
1975   /* Otherwise, default values from the mode of the MEM reference.  */
1976   else
1977     {
1978       defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1979       gcc_assert (!defattrs->expr);
1980       gcc_assert (!defattrs->offset_known_p);
1981 
1982       /* Respect mode size.  */
1983       attrs.size_known_p = defattrs->size_known_p;
1984       attrs.size = defattrs->size;
1985       /* ??? Is this really necessary?  We probably should always get
1986 	 the size from the type below.  */
1987 
1988       /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1989          if T is an object, always compute the object alignment below.  */
1990       if (TYPE_P (t))
1991 	attrs.align = defattrs->align;
1992       else
1993 	attrs.align = BITS_PER_UNIT;
1994       /* ??? If T is a type, respecting mode alignment may *also* be wrong
1995 	 e.g. if the type carries an alignment attribute.  Should we be
1996 	 able to simply always use TYPE_ALIGN?  */
1997     }
1998 
1999   /* We can set the alignment from the type if we are making an object or if
2000      this is an INDIRECT_REF.  */
2001   if (objectp || TREE_CODE (t) == INDIRECT_REF)
2002     attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
2003 
2004   /* If the size is known, we can set that.  */
2005   tree new_size = TYPE_SIZE_UNIT (type);
2006 
2007   /* The address-space is that of the type.  */
2008   as = TYPE_ADDR_SPACE (type);
2009 
2010   /* If T is not a type, we may be able to deduce some more information about
2011      the expression.  */
2012   if (! TYPE_P (t))
2013     {
2014       tree base;
2015 
2016       if (TREE_THIS_VOLATILE (t))
2017 	MEM_VOLATILE_P (ref) = 1;
2018 
2019       /* Now remove any conversions: they don't change what the underlying
2020 	 object is.  Likewise for SAVE_EXPR.  */
2021       while (CONVERT_EXPR_P (t)
2022 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
2023 	     || TREE_CODE (t) == SAVE_EXPR)
2024 	t = TREE_OPERAND (t, 0);
2025 
2026       /* Note whether this expression can trap.  */
2027       MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
2028 
2029       base = get_base_address (t);
2030       if (base)
2031 	{
2032 	  if (DECL_P (base)
2033 	      && TREE_READONLY (base)
2034 	      && (TREE_STATIC (base) || DECL_EXTERNAL (base))
2035 	      && !TREE_THIS_VOLATILE (base))
2036 	    MEM_READONLY_P (ref) = 1;
2037 
2038 	  /* Mark static const strings readonly as well.  */
2039 	  if (TREE_CODE (base) == STRING_CST
2040 	      && TREE_READONLY (base)
2041 	      && TREE_STATIC (base))
2042 	    MEM_READONLY_P (ref) = 1;
2043 
2044 	  /* Address-space information is on the base object.  */
2045 	  if (TREE_CODE (base) == MEM_REF
2046 	      || TREE_CODE (base) == TARGET_MEM_REF)
2047 	    as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
2048 								      0))));
2049 	  else
2050 	    as = TYPE_ADDR_SPACE (TREE_TYPE (base));
2051 	}
2052 
2053       /* If this expression uses it's parent's alias set, mark it such
2054 	 that we won't change it.  */
2055       if (component_uses_parent_alias_set_from (t) != NULL_TREE)
2056 	MEM_KEEP_ALIAS_SET_P (ref) = 1;
2057 
2058       /* If this is a decl, set the attributes of the MEM from it.  */
2059       if (DECL_P (t))
2060 	{
2061 	  attrs.expr = t;
2062 	  attrs.offset_known_p = true;
2063 	  attrs.offset = 0;
2064 	  apply_bitpos = bitpos;
2065 	  new_size = DECL_SIZE_UNIT (t);
2066 	}
2067 
2068       /* ???  If we end up with a constant or a descriptor do not
2069 	 record a MEM_EXPR.  */
2070       else if (CONSTANT_CLASS_P (t)
2071 	       || TREE_CODE (t) == CONSTRUCTOR)
2072 	;
2073 
2074       /* If this is a field reference, record it.  */
2075       else if (TREE_CODE (t) == COMPONENT_REF)
2076 	{
2077 	  attrs.expr = t;
2078 	  attrs.offset_known_p = true;
2079 	  attrs.offset = 0;
2080 	  apply_bitpos = bitpos;
2081 	  if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
2082 	    new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
2083 	}
2084 
2085       /* Else record it.  */
2086       else
2087 	{
2088 	  gcc_assert (handled_component_p (t)
2089 		      || TREE_CODE (t) == MEM_REF
2090 		      || TREE_CODE (t) == TARGET_MEM_REF);
2091 	  attrs.expr = t;
2092 	  attrs.offset_known_p = true;
2093 	  attrs.offset = 0;
2094 	  apply_bitpos = bitpos;
2095 	}
2096 
2097       /* If this is a reference based on a partitioned decl replace the
2098 	 base with a MEM_REF of the pointer representative we created
2099 	 during stack slot partitioning.  */
2100       if (attrs.expr
2101 	  && VAR_P (base)
2102 	  && ! is_global_var (base)
2103 	  && cfun->gimple_df->decls_to_pointers != NULL)
2104 	{
2105 	  tree *namep = cfun->gimple_df->decls_to_pointers->get (base);
2106 	  if (namep)
2107 	    {
2108 	      attrs.expr = unshare_expr (attrs.expr);
2109 	      tree *orig_base = &attrs.expr;
2110 	      while (handled_component_p (*orig_base))
2111 		orig_base = &TREE_OPERAND (*orig_base, 0);
2112 	      tree aptrt = reference_alias_ptr_type (*orig_base);
2113 	      *orig_base = build2 (MEM_REF, TREE_TYPE (*orig_base), *namep,
2114 				   build_int_cst (aptrt, 0));
2115 	    }
2116 	}
2117 
2118       /* Compute the alignment.  */
2119       unsigned int obj_align;
2120       unsigned HOST_WIDE_INT obj_bitpos;
2121       get_object_alignment_1 (t, &obj_align, &obj_bitpos);
2122       unsigned int diff_align = known_alignment (obj_bitpos - bitpos);
2123       if (diff_align != 0)
2124 	obj_align = MIN (obj_align, diff_align);
2125       attrs.align = MAX (attrs.align, obj_align);
2126     }
2127 
2128   poly_uint64 const_size;
2129   if (poly_int_tree_p (new_size, &const_size))
2130     {
2131       attrs.size_known_p = true;
2132       attrs.size = const_size;
2133     }
2134 
2135   /* If we modified OFFSET based on T, then subtract the outstanding
2136      bit position offset.  Similarly, increase the size of the accessed
2137      object to contain the negative offset.  */
2138   if (maybe_ne (apply_bitpos, 0))
2139     {
2140       gcc_assert (attrs.offset_known_p);
2141       poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos);
2142       attrs.offset -= bytepos;
2143       if (attrs.size_known_p)
2144 	attrs.size += bytepos;
2145     }
2146 
2147   /* Now set the attributes we computed above.  */
2148   attrs.addrspace = as;
2149   set_mem_attrs (ref, &attrs);
2150 }
2151 
2152 void
set_mem_attributes(rtx ref,tree t,int objectp)2153 set_mem_attributes (rtx ref, tree t, int objectp)
2154 {
2155   set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2156 }
2157 
2158 /* Set the alias set of MEM to SET.  */
2159 
2160 void
set_mem_alias_set(rtx mem,alias_set_type set)2161 set_mem_alias_set (rtx mem, alias_set_type set)
2162 {
2163   /* If the new and old alias sets don't conflict, something is wrong.  */
2164   gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2165   mem_attrs attrs (*get_mem_attrs (mem));
2166   attrs.alias = set;
2167   set_mem_attrs (mem, &attrs);
2168 }
2169 
2170 /* Set the address space of MEM to ADDRSPACE (target-defined).  */
2171 
2172 void
set_mem_addr_space(rtx mem,addr_space_t addrspace)2173 set_mem_addr_space (rtx mem, addr_space_t addrspace)
2174 {
2175   mem_attrs attrs (*get_mem_attrs (mem));
2176   attrs.addrspace = addrspace;
2177   set_mem_attrs (mem, &attrs);
2178 }
2179 
2180 /* Set the alignment of MEM to ALIGN bits.  */
2181 
2182 void
set_mem_align(rtx mem,unsigned int align)2183 set_mem_align (rtx mem, unsigned int align)
2184 {
2185   mem_attrs attrs (*get_mem_attrs (mem));
2186   attrs.align = align;
2187   set_mem_attrs (mem, &attrs);
2188 }
2189 
2190 /* Set the expr for MEM to EXPR.  */
2191 
2192 void
set_mem_expr(rtx mem,tree expr)2193 set_mem_expr (rtx mem, tree expr)
2194 {
2195   mem_attrs attrs (*get_mem_attrs (mem));
2196   attrs.expr = expr;
2197   set_mem_attrs (mem, &attrs);
2198 }
2199 
2200 /* Set the offset of MEM to OFFSET.  */
2201 
2202 void
set_mem_offset(rtx mem,poly_int64 offset)2203 set_mem_offset (rtx mem, poly_int64 offset)
2204 {
2205   mem_attrs attrs (*get_mem_attrs (mem));
2206   attrs.offset_known_p = true;
2207   attrs.offset = offset;
2208   set_mem_attrs (mem, &attrs);
2209 }
2210 
2211 /* Clear the offset of MEM.  */
2212 
2213 void
clear_mem_offset(rtx mem)2214 clear_mem_offset (rtx mem)
2215 {
2216   mem_attrs attrs (*get_mem_attrs (mem));
2217   attrs.offset_known_p = false;
2218   set_mem_attrs (mem, &attrs);
2219 }
2220 
2221 /* Set the size of MEM to SIZE.  */
2222 
2223 void
set_mem_size(rtx mem,poly_int64 size)2224 set_mem_size (rtx mem, poly_int64 size)
2225 {
2226   mem_attrs attrs (*get_mem_attrs (mem));
2227   attrs.size_known_p = true;
2228   attrs.size = size;
2229   set_mem_attrs (mem, &attrs);
2230 }
2231 
2232 /* Clear the size of MEM.  */
2233 
2234 void
clear_mem_size(rtx mem)2235 clear_mem_size (rtx mem)
2236 {
2237   mem_attrs attrs (*get_mem_attrs (mem));
2238   attrs.size_known_p = false;
2239   set_mem_attrs (mem, &attrs);
2240 }
2241 
2242 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2243    and its address changed to ADDR.  (VOIDmode means don't change the mode.
2244    NULL for ADDR means don't change the address.)  VALIDATE is nonzero if the
2245    returned memory location is required to be valid.  INPLACE is true if any
2246    changes can be made directly to MEMREF or false if MEMREF must be treated
2247    as immutable.
2248 
2249    The memory attributes are not changed.  */
2250 
2251 static rtx
change_address_1(rtx memref,machine_mode mode,rtx addr,int validate,bool inplace)2252 change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2253 		  bool inplace)
2254 {
2255   addr_space_t as;
2256   rtx new_rtx;
2257 
2258   gcc_assert (MEM_P (memref));
2259   as = MEM_ADDR_SPACE (memref);
2260   if (mode == VOIDmode)
2261     mode = GET_MODE (memref);
2262   if (addr == 0)
2263     addr = XEXP (memref, 0);
2264   if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2265       && (!validate || memory_address_addr_space_p (mode, addr, as)))
2266     return memref;
2267 
2268   /* Don't validate address for LRA.  LRA can make the address valid
2269      by itself in most efficient way.  */
2270   if (validate && !lra_in_progress)
2271     {
2272       if (reload_in_progress || reload_completed)
2273 	gcc_assert (memory_address_addr_space_p (mode, addr, as));
2274       else
2275 	addr = memory_address_addr_space (mode, addr, as);
2276     }
2277 
2278   if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2279     return memref;
2280 
2281   if (inplace)
2282     {
2283       XEXP (memref, 0) = addr;
2284       return memref;
2285     }
2286 
2287   new_rtx = gen_rtx_MEM (mode, addr);
2288   MEM_COPY_ATTRIBUTES (new_rtx, memref);
2289   return new_rtx;
2290 }
2291 
2292 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2293    way we are changing MEMREF, so we only preserve the alias set.  */
2294 
2295 rtx
change_address(rtx memref,machine_mode mode,rtx addr)2296 change_address (rtx memref, machine_mode mode, rtx addr)
2297 {
2298   rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2299   machine_mode mmode = GET_MODE (new_rtx);
2300   class mem_attrs *defattrs;
2301 
2302   mem_attrs attrs (*get_mem_attrs (memref));
2303   defattrs = mode_mem_attrs[(int) mmode];
2304   attrs.expr = NULL_TREE;
2305   attrs.offset_known_p = false;
2306   attrs.size_known_p = defattrs->size_known_p;
2307   attrs.size = defattrs->size;
2308   attrs.align = defattrs->align;
2309 
2310   /* If there are no changes, just return the original memory reference.  */
2311   if (new_rtx == memref)
2312     {
2313       if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2314 	return new_rtx;
2315 
2316       new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2317       MEM_COPY_ATTRIBUTES (new_rtx, memref);
2318     }
2319 
2320   set_mem_attrs (new_rtx, &attrs);
2321   return new_rtx;
2322 }
2323 
2324 /* Return a memory reference like MEMREF, but with its mode changed
2325    to MODE and its address offset by OFFSET bytes.  If VALIDATE is
2326    nonzero, the memory address is forced to be valid.
2327    If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2328    and the caller is responsible for adjusting MEMREF base register.
2329    If ADJUST_OBJECT is zero, the underlying object associated with the
2330    memory reference is left unchanged and the caller is responsible for
2331    dealing with it.  Otherwise, if the new memory reference is outside
2332    the underlying object, even partially, then the object is dropped.
2333    SIZE, if nonzero, is the size of an access in cases where MODE
2334    has no inherent size.  */
2335 
2336 rtx
adjust_address_1(rtx memref,machine_mode mode,poly_int64 offset,int validate,int adjust_address,int adjust_object,poly_int64 size)2337 adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset,
2338 		  int validate, int adjust_address, int adjust_object,
2339 		  poly_int64 size)
2340 {
2341   rtx addr = XEXP (memref, 0);
2342   rtx new_rtx;
2343   scalar_int_mode address_mode;
2344   class mem_attrs attrs (*get_mem_attrs (memref)), *defattrs;
2345   unsigned HOST_WIDE_INT max_align;
2346 #ifdef POINTERS_EXTEND_UNSIGNED
2347   scalar_int_mode pointer_mode
2348     = targetm.addr_space.pointer_mode (attrs.addrspace);
2349 #endif
2350 
2351   /* VOIDmode means no mode change for change_address_1.  */
2352   if (mode == VOIDmode)
2353     mode = GET_MODE (memref);
2354 
2355   /* Take the size of non-BLKmode accesses from the mode.  */
2356   defattrs = mode_mem_attrs[(int) mode];
2357   if (defattrs->size_known_p)
2358     size = defattrs->size;
2359 
2360   /* If there are no changes, just return the original memory reference.  */
2361   if (mode == GET_MODE (memref)
2362       && known_eq (offset, 0)
2363       && (known_eq (size, 0)
2364 	  || (attrs.size_known_p && known_eq (attrs.size, size)))
2365       && (!validate || memory_address_addr_space_p (mode, addr,
2366 						    attrs.addrspace)))
2367     return memref;
2368 
2369   /* ??? Prefer to create garbage instead of creating shared rtl.
2370      This may happen even if offset is nonzero -- consider
2371      (plus (plus reg reg) const_int) -- so do this always.  */
2372   addr = copy_rtx (addr);
2373 
2374   /* Convert a possibly large offset to a signed value within the
2375      range of the target address space.  */
2376   address_mode = get_address_mode (memref);
2377   offset = trunc_int_for_mode (offset, address_mode);
2378 
2379   if (adjust_address)
2380     {
2381       /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2382 	 object, we can merge it into the LO_SUM.  */
2383       if (GET_MODE (memref) != BLKmode
2384 	  && GET_CODE (addr) == LO_SUM
2385 	  && known_in_range_p (offset,
2386 			       0, (GET_MODE_ALIGNMENT (GET_MODE (memref))
2387 				   / BITS_PER_UNIT)))
2388 	addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2389 			       plus_constant (address_mode,
2390 					      XEXP (addr, 1), offset));
2391 #ifdef POINTERS_EXTEND_UNSIGNED
2392       /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2393 	 in that mode, we merge it into the ZERO_EXTEND.  We take advantage of
2394 	 the fact that pointers are not allowed to overflow.  */
2395       else if (POINTERS_EXTEND_UNSIGNED > 0
2396 	       && GET_CODE (addr) == ZERO_EXTEND
2397 	       && GET_MODE (XEXP (addr, 0)) == pointer_mode
2398 	       && known_eq (trunc_int_for_mode (offset, pointer_mode), offset))
2399 	addr = gen_rtx_ZERO_EXTEND (address_mode,
2400 				    plus_constant (pointer_mode,
2401 						   XEXP (addr, 0), offset));
2402 #endif
2403       else
2404 	addr = plus_constant (address_mode, addr, offset);
2405     }
2406 
2407   new_rtx = change_address_1 (memref, mode, addr, validate, false);
2408 
2409   /* If the address is a REG, change_address_1 rightfully returns memref,
2410      but this would destroy memref's MEM_ATTRS.  */
2411   if (new_rtx == memref && maybe_ne (offset, 0))
2412     new_rtx = copy_rtx (new_rtx);
2413 
2414   /* Conservatively drop the object if we don't know where we start from.  */
2415   if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2416     {
2417       attrs.expr = NULL_TREE;
2418       attrs.alias = 0;
2419     }
2420 
2421   /* Compute the new values of the memory attributes due to this adjustment.
2422      We add the offsets and update the alignment.  */
2423   if (attrs.offset_known_p)
2424     {
2425       attrs.offset += offset;
2426 
2427       /* Drop the object if the new left end is not within its bounds.  */
2428       if (adjust_object && maybe_lt (attrs.offset, 0))
2429 	{
2430 	  attrs.expr = NULL_TREE;
2431 	  attrs.alias = 0;
2432 	}
2433     }
2434 
2435   /* Compute the new alignment by taking the MIN of the alignment and the
2436      lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2437      if zero.  */
2438   if (maybe_ne (offset, 0))
2439     {
2440       max_align = known_alignment (offset) * BITS_PER_UNIT;
2441       attrs.align = MIN (attrs.align, max_align);
2442     }
2443 
2444   if (maybe_ne (size, 0))
2445     {
2446       /* Drop the object if the new right end is not within its bounds.  */
2447       if (adjust_object && maybe_gt (offset + size, attrs.size))
2448 	{
2449 	  attrs.expr = NULL_TREE;
2450 	  attrs.alias = 0;
2451 	}
2452       attrs.size_known_p = true;
2453       attrs.size = size;
2454     }
2455   else if (attrs.size_known_p)
2456     {
2457       gcc_assert (!adjust_object);
2458       attrs.size -= offset;
2459       /* ??? The store_by_pieces machinery generates negative sizes,
2460 	 so don't assert for that here.  */
2461     }
2462 
2463   set_mem_attrs (new_rtx, &attrs);
2464 
2465   return new_rtx;
2466 }
2467 
2468 /* Return a memory reference like MEMREF, but with its mode changed
2469    to MODE and its address changed to ADDR, which is assumed to be
2470    MEMREF offset by OFFSET bytes.  If VALIDATE is
2471    nonzero, the memory address is forced to be valid.  */
2472 
2473 rtx
adjust_automodify_address_1(rtx memref,machine_mode mode,rtx addr,poly_int64 offset,int validate)2474 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2475 			     poly_int64 offset, int validate)
2476 {
2477   memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2478   return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2479 }
2480 
2481 /* Return a memory reference like MEMREF, but whose address is changed by
2482    adding OFFSET, an RTX, to it.  POW2 is the highest power of two factor
2483    known to be in OFFSET (possibly 1).  */
2484 
2485 rtx
offset_address(rtx memref,rtx offset,unsigned HOST_WIDE_INT pow2)2486 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2487 {
2488   rtx new_rtx, addr = XEXP (memref, 0);
2489   machine_mode address_mode;
2490   class mem_attrs *defattrs;
2491 
2492   mem_attrs attrs (*get_mem_attrs (memref));
2493   address_mode = get_address_mode (memref);
2494   new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2495 
2496   /* At this point we don't know _why_ the address is invalid.  It
2497      could have secondary memory references, multiplies or anything.
2498 
2499      However, if we did go and rearrange things, we can wind up not
2500      being able to recognize the magic around pic_offset_table_rtx.
2501      This stuff is fragile, and is yet another example of why it is
2502      bad to expose PIC machinery too early.  */
2503   if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2504 				     attrs.addrspace)
2505       && GET_CODE (addr) == PLUS
2506       && XEXP (addr, 0) == pic_offset_table_rtx)
2507     {
2508       addr = force_reg (GET_MODE (addr), addr);
2509       new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2510     }
2511 
2512   update_temp_slot_address (XEXP (memref, 0), new_rtx);
2513   new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2514 
2515   /* If there are no changes, just return the original memory reference.  */
2516   if (new_rtx == memref)
2517     return new_rtx;
2518 
2519   /* Update the alignment to reflect the offset.  Reset the offset, which
2520      we don't know.  */
2521   defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2522   attrs.offset_known_p = false;
2523   attrs.size_known_p = defattrs->size_known_p;
2524   attrs.size = defattrs->size;
2525   attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2526   set_mem_attrs (new_rtx, &attrs);
2527   return new_rtx;
2528 }
2529 
2530 /* Return a memory reference like MEMREF, but with its address changed to
2531    ADDR.  The caller is asserting that the actual piece of memory pointed
2532    to is the same, just the form of the address is being changed, such as
2533    by putting something into a register.  INPLACE is true if any changes
2534    can be made directly to MEMREF or false if MEMREF must be treated as
2535    immutable.  */
2536 
2537 rtx
replace_equiv_address(rtx memref,rtx addr,bool inplace)2538 replace_equiv_address (rtx memref, rtx addr, bool inplace)
2539 {
2540   /* change_address_1 copies the memory attribute structure without change
2541      and that's exactly what we want here.  */
2542   update_temp_slot_address (XEXP (memref, 0), addr);
2543   return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2544 }
2545 
2546 /* Likewise, but the reference is not required to be valid.  */
2547 
2548 rtx
replace_equiv_address_nv(rtx memref,rtx addr,bool inplace)2549 replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2550 {
2551   return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2552 }
2553 
2554 /* Return a memory reference like MEMREF, but with its mode widened to
2555    MODE and offset by OFFSET.  This would be used by targets that e.g.
2556    cannot issue QImode memory operations and have to use SImode memory
2557    operations plus masking logic.  */
2558 
2559 rtx
widen_memory_access(rtx memref,machine_mode mode,poly_int64 offset)2560 widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset)
2561 {
2562   rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2563   poly_uint64 size = GET_MODE_SIZE (mode);
2564 
2565   /* If there are no changes, just return the original memory reference.  */
2566   if (new_rtx == memref)
2567     return new_rtx;
2568 
2569   mem_attrs attrs (*get_mem_attrs (new_rtx));
2570 
2571   /* If we don't know what offset we were at within the expression, then
2572      we can't know if we've overstepped the bounds.  */
2573   if (! attrs.offset_known_p)
2574     attrs.expr = NULL_TREE;
2575 
2576   while (attrs.expr)
2577     {
2578       if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2579 	{
2580 	  tree field = TREE_OPERAND (attrs.expr, 1);
2581 	  tree offset = component_ref_field_offset (attrs.expr);
2582 
2583 	  if (! DECL_SIZE_UNIT (field))
2584 	    {
2585 	      attrs.expr = NULL_TREE;
2586 	      break;
2587 	    }
2588 
2589 	  /* Is the field at least as large as the access?  If so, ok,
2590 	     otherwise strip back to the containing structure.  */
2591 	  if (poly_int_tree_p (DECL_SIZE_UNIT (field))
2592 	      && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size)
2593 	      && known_ge (attrs.offset, 0))
2594 	    break;
2595 
2596 	  poly_uint64 suboffset;
2597 	  if (!poly_int_tree_p (offset, &suboffset))
2598 	    {
2599 	      attrs.expr = NULL_TREE;
2600 	      break;
2601 	    }
2602 
2603 	  attrs.expr = TREE_OPERAND (attrs.expr, 0);
2604 	  attrs.offset += suboffset;
2605 	  attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2606 			   / BITS_PER_UNIT);
2607 	}
2608       /* Similarly for the decl.  */
2609       else if (DECL_P (attrs.expr)
2610 	       && DECL_SIZE_UNIT (attrs.expr)
2611 	       && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr))
2612 	       && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)),
2613 			   size)
2614 	       && known_ge (attrs.offset, 0))
2615 	break;
2616       else
2617 	{
2618 	  /* The widened memory access overflows the expression, which means
2619 	     that it could alias another expression.  Zap it.  */
2620 	  attrs.expr = NULL_TREE;
2621 	  break;
2622 	}
2623     }
2624 
2625   if (! attrs.expr)
2626     attrs.offset_known_p = false;
2627 
2628   /* The widened memory may alias other stuff, so zap the alias set.  */
2629   /* ??? Maybe use get_alias_set on any remaining expression.  */
2630   attrs.alias = 0;
2631   attrs.size_known_p = true;
2632   attrs.size = size;
2633   set_mem_attrs (new_rtx, &attrs);
2634   return new_rtx;
2635 }
2636 
2637 /* A fake decl that is used as the MEM_EXPR of spill slots.  */
2638 static GTY(()) tree spill_slot_decl;
2639 
2640 tree
get_spill_slot_decl(bool force_build_p)2641 get_spill_slot_decl (bool force_build_p)
2642 {
2643   tree d = spill_slot_decl;
2644   rtx rd;
2645 
2646   if (d || !force_build_p)
2647     return d;
2648 
2649   d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2650 		  VAR_DECL, get_identifier ("%sfp"), void_type_node);
2651   DECL_ARTIFICIAL (d) = 1;
2652   DECL_IGNORED_P (d) = 1;
2653   TREE_USED (d) = 1;
2654   spill_slot_decl = d;
2655 
2656   rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2657   MEM_NOTRAP_P (rd) = 1;
2658   mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]);
2659   attrs.alias = new_alias_set ();
2660   attrs.expr = d;
2661   set_mem_attrs (rd, &attrs);
2662   SET_DECL_RTL (d, rd);
2663 
2664   return d;
2665 }
2666 
2667 /* Given MEM, a result from assign_stack_local, fill in the memory
2668    attributes as appropriate for a register allocator spill slot.
2669    These slots are not aliasable by other memory.  We arrange for
2670    them all to use a single MEM_EXPR, so that the aliasing code can
2671    work properly in the case of shared spill slots.  */
2672 
2673 void
set_mem_attrs_for_spill(rtx mem)2674 set_mem_attrs_for_spill (rtx mem)
2675 {
2676   rtx addr;
2677 
2678   mem_attrs attrs (*get_mem_attrs (mem));
2679   attrs.expr = get_spill_slot_decl (true);
2680   attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2681   attrs.addrspace = ADDR_SPACE_GENERIC;
2682 
2683   /* We expect the incoming memory to be of the form:
2684 	(mem:MODE (plus (reg sfp) (const_int offset)))
2685      with perhaps the plus missing for offset = 0.  */
2686   addr = XEXP (mem, 0);
2687   attrs.offset_known_p = true;
2688   strip_offset (addr, &attrs.offset);
2689 
2690   set_mem_attrs (mem, &attrs);
2691   MEM_NOTRAP_P (mem) = 1;
2692 }
2693 
2694 /* Return a newly created CODE_LABEL rtx with a unique label number.  */
2695 
2696 rtx_code_label *
gen_label_rtx(void)2697 gen_label_rtx (void)
2698 {
2699   return as_a <rtx_code_label *> (
2700 	    gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2701 				NULL, label_num++, NULL));
2702 }
2703 
2704 /* For procedure integration.  */
2705 
2706 /* Install new pointers to the first and last insns in the chain.
2707    Also, set cur_insn_uid to one higher than the last in use.
2708    Used for an inline-procedure after copying the insn chain.  */
2709 
2710 void
set_new_first_and_last_insn(rtx_insn * first,rtx_insn * last)2711 set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2712 {
2713   rtx_insn *insn;
2714 
2715   set_first_insn (first);
2716   set_last_insn (last);
2717   cur_insn_uid = 0;
2718 
2719   if (param_min_nondebug_insn_uid || MAY_HAVE_DEBUG_INSNS)
2720     {
2721       int debug_count = 0;
2722 
2723       cur_insn_uid = param_min_nondebug_insn_uid - 1;
2724       cur_debug_insn_uid = 0;
2725 
2726       for (insn = first; insn; insn = NEXT_INSN (insn))
2727 	if (INSN_UID (insn) < param_min_nondebug_insn_uid)
2728 	  cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2729 	else
2730 	  {
2731 	    cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2732 	    if (DEBUG_INSN_P (insn))
2733 	      debug_count++;
2734 	  }
2735 
2736       if (debug_count)
2737 	cur_debug_insn_uid = param_min_nondebug_insn_uid + debug_count;
2738       else
2739 	cur_debug_insn_uid++;
2740     }
2741   else
2742     for (insn = first; insn; insn = NEXT_INSN (insn))
2743       cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2744 
2745   cur_insn_uid++;
2746 }
2747 
2748 /* Go through all the RTL insn bodies and copy any invalid shared
2749    structure.  This routine should only be called once.  */
2750 
2751 static void
unshare_all_rtl_1(rtx_insn * insn)2752 unshare_all_rtl_1 (rtx_insn *insn)
2753 {
2754   /* Unshare just about everything else.  */
2755   unshare_all_rtl_in_chain (insn);
2756 
2757   /* Make sure the addresses of stack slots found outside the insn chain
2758      (such as, in DECL_RTL of a variable) are not shared
2759      with the insn chain.
2760 
2761      This special care is necessary when the stack slot MEM does not
2762      actually appear in the insn chain.  If it does appear, its address
2763      is unshared from all else at that point.  */
2764   unsigned int i;
2765   rtx temp;
2766   FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2767     (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
2768 }
2769 
2770 /* Go through all the RTL insn bodies and copy any invalid shared
2771    structure, again.  This is a fairly expensive thing to do so it
2772    should be done sparingly.  */
2773 
2774 void
unshare_all_rtl_again(rtx_insn * insn)2775 unshare_all_rtl_again (rtx_insn *insn)
2776 {
2777   rtx_insn *p;
2778   tree decl;
2779 
2780   for (p = insn; p; p = NEXT_INSN (p))
2781     if (INSN_P (p))
2782       {
2783 	reset_used_flags (PATTERN (p));
2784 	reset_used_flags (REG_NOTES (p));
2785 	if (CALL_P (p))
2786 	  reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2787       }
2788 
2789   /* Make sure that virtual stack slots are not shared.  */
2790   set_used_decls (DECL_INITIAL (cfun->decl));
2791 
2792   /* Make sure that virtual parameters are not shared.  */
2793   for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2794     set_used_flags (DECL_RTL (decl));
2795 
2796   rtx temp;
2797   unsigned int i;
2798   FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2799     reset_used_flags (temp);
2800 
2801   unshare_all_rtl_1 (insn);
2802 }
2803 
2804 unsigned int
unshare_all_rtl(void)2805 unshare_all_rtl (void)
2806 {
2807   unshare_all_rtl_1 (get_insns ());
2808 
2809   for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2810     {
2811       if (DECL_RTL_SET_P (decl))
2812 	SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2813       DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
2814     }
2815 
2816   return 0;
2817 }
2818 
2819 
2820 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2821    Recursively does the same for subexpressions.  */
2822 
2823 static void
verify_rtx_sharing(rtx orig,rtx insn)2824 verify_rtx_sharing (rtx orig, rtx insn)
2825 {
2826   rtx x = orig;
2827   int i;
2828   enum rtx_code code;
2829   const char *format_ptr;
2830 
2831   if (x == 0)
2832     return;
2833 
2834   code = GET_CODE (x);
2835 
2836   /* These types may be freely shared.  */
2837 
2838   switch (code)
2839     {
2840     case REG:
2841     case DEBUG_EXPR:
2842     case VALUE:
2843     CASE_CONST_ANY:
2844     case SYMBOL_REF:
2845     case LABEL_REF:
2846     case CODE_LABEL:
2847     case PC:
2848     case CC0:
2849     case RETURN:
2850     case SIMPLE_RETURN:
2851     case SCRATCH:
2852       /* SCRATCH must be shared because they represent distinct values.  */
2853       return;
2854     case CLOBBER:
2855       /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2856          clobbers or clobbers of hard registers that originated as pseudos.
2857          This is needed to allow safe register renaming.  */
2858       if (REG_P (XEXP (x, 0))
2859 	  && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2860 	  && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
2861 	return;
2862       break;
2863 
2864     case CONST:
2865       if (shared_const_p (orig))
2866 	return;
2867       break;
2868 
2869     case MEM:
2870       /* A MEM is allowed to be shared if its address is constant.  */
2871       if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2872 	  || reload_completed || reload_in_progress)
2873 	return;
2874 
2875       break;
2876 
2877     default:
2878       break;
2879     }
2880 
2881   /* This rtx may not be shared.  If it has already been seen,
2882      replace it with a copy of itself.  */
2883   if (flag_checking && RTX_FLAG (x, used))
2884     {
2885       error ("invalid rtl sharing found in the insn");
2886       debug_rtx (insn);
2887       error ("shared rtx");
2888       debug_rtx (x);
2889       internal_error ("internal consistency failure");
2890     }
2891   gcc_assert (!RTX_FLAG (x, used));
2892 
2893   RTX_FLAG (x, used) = 1;
2894 
2895   /* Now scan the subexpressions recursively.  */
2896 
2897   format_ptr = GET_RTX_FORMAT (code);
2898 
2899   for (i = 0; i < GET_RTX_LENGTH (code); i++)
2900     {
2901       switch (*format_ptr++)
2902 	{
2903 	case 'e':
2904 	  verify_rtx_sharing (XEXP (x, i), insn);
2905 	  break;
2906 
2907 	case 'E':
2908 	  if (XVEC (x, i) != NULL)
2909 	    {
2910 	      int j;
2911 	      int len = XVECLEN (x, i);
2912 
2913 	      for (j = 0; j < len; j++)
2914 		{
2915 		  /* We allow sharing of ASM_OPERANDS inside single
2916 		     instruction.  */
2917 		  if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2918 		      && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2919 			  == ASM_OPERANDS))
2920 		    verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2921 		  else
2922 		    verify_rtx_sharing (XVECEXP (x, i, j), insn);
2923 		}
2924 	    }
2925 	  break;
2926 	}
2927     }
2928   return;
2929 }
2930 
2931 /* Reset used-flags for INSN.  */
2932 
2933 static void
reset_insn_used_flags(rtx insn)2934 reset_insn_used_flags (rtx insn)
2935 {
2936   gcc_assert (INSN_P (insn));
2937   reset_used_flags (PATTERN (insn));
2938   reset_used_flags (REG_NOTES (insn));
2939   if (CALL_P (insn))
2940     reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2941 }
2942 
2943 /* Go through all the RTL insn bodies and clear all the USED bits.  */
2944 
2945 static void
reset_all_used_flags(void)2946 reset_all_used_flags (void)
2947 {
2948   rtx_insn *p;
2949 
2950   for (p = get_insns (); p; p = NEXT_INSN (p))
2951     if (INSN_P (p))
2952       {
2953 	rtx pat = PATTERN (p);
2954 	if (GET_CODE (pat) != SEQUENCE)
2955 	  reset_insn_used_flags (p);
2956 	else
2957 	  {
2958 	    gcc_assert (REG_NOTES (p) == NULL);
2959 	    for (int i = 0; i < XVECLEN (pat, 0); i++)
2960 	      {
2961 		rtx insn = XVECEXP (pat, 0, i);
2962 		if (INSN_P (insn))
2963 		  reset_insn_used_flags (insn);
2964 	      }
2965 	  }
2966       }
2967 }
2968 
2969 /* Verify sharing in INSN.  */
2970 
2971 static void
verify_insn_sharing(rtx insn)2972 verify_insn_sharing (rtx insn)
2973 {
2974   gcc_assert (INSN_P (insn));
2975   verify_rtx_sharing (PATTERN (insn), insn);
2976   verify_rtx_sharing (REG_NOTES (insn), insn);
2977   if (CALL_P (insn))
2978     verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
2979 }
2980 
2981 /* Go through all the RTL insn bodies and check that there is no unexpected
2982    sharing in between the subexpressions.  */
2983 
2984 DEBUG_FUNCTION void
verify_rtl_sharing(void)2985 verify_rtl_sharing (void)
2986 {
2987   rtx_insn *p;
2988 
2989   timevar_push (TV_VERIFY_RTL_SHARING);
2990 
2991   reset_all_used_flags ();
2992 
2993   for (p = get_insns (); p; p = NEXT_INSN (p))
2994     if (INSN_P (p))
2995       {
2996 	rtx pat = PATTERN (p);
2997 	if (GET_CODE (pat) != SEQUENCE)
2998 	  verify_insn_sharing (p);
2999 	else
3000 	  for (int i = 0; i < XVECLEN (pat, 0); i++)
3001 	      {
3002 		rtx insn = XVECEXP (pat, 0, i);
3003 		if (INSN_P (insn))
3004 		  verify_insn_sharing (insn);
3005 	      }
3006       }
3007 
3008   reset_all_used_flags ();
3009 
3010   timevar_pop (TV_VERIFY_RTL_SHARING);
3011 }
3012 
3013 /* Go through all the RTL insn bodies and copy any invalid shared structure.
3014    Assumes the mark bits are cleared at entry.  */
3015 
3016 void
unshare_all_rtl_in_chain(rtx_insn * insn)3017 unshare_all_rtl_in_chain (rtx_insn *insn)
3018 {
3019   for (; insn; insn = NEXT_INSN (insn))
3020     if (INSN_P (insn))
3021       {
3022 	PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
3023 	REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
3024 	if (CALL_P (insn))
3025 	  CALL_INSN_FUNCTION_USAGE (insn)
3026 	    = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
3027       }
3028 }
3029 
3030 /* Go through all virtual stack slots of a function and mark them as
3031    shared.  We never replace the DECL_RTLs themselves with a copy,
3032    but expressions mentioned into a DECL_RTL cannot be shared with
3033    expressions in the instruction stream.
3034 
3035    Note that reload may convert pseudo registers into memories in-place.
3036    Pseudo registers are always shared, but MEMs never are.  Thus if we
3037    reset the used flags on MEMs in the instruction stream, we must set
3038    them again on MEMs that appear in DECL_RTLs.  */
3039 
3040 static void
set_used_decls(tree blk)3041 set_used_decls (tree blk)
3042 {
3043   tree t;
3044 
3045   /* Mark decls.  */
3046   for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
3047     if (DECL_RTL_SET_P (t))
3048       set_used_flags (DECL_RTL (t));
3049 
3050   /* Now process sub-blocks.  */
3051   for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
3052     set_used_decls (t);
3053 }
3054 
3055 /* Mark ORIG as in use, and return a copy of it if it was already in use.
3056    Recursively does the same for subexpressions.  Uses
3057    copy_rtx_if_shared_1 to reduce stack space.  */
3058 
3059 rtx
copy_rtx_if_shared(rtx orig)3060 copy_rtx_if_shared (rtx orig)
3061 {
3062   copy_rtx_if_shared_1 (&orig);
3063   return orig;
3064 }
3065 
3066 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
3067    use.  Recursively does the same for subexpressions.  */
3068 
3069 static void
copy_rtx_if_shared_1(rtx * orig1)3070 copy_rtx_if_shared_1 (rtx *orig1)
3071 {
3072   rtx x;
3073   int i;
3074   enum rtx_code code;
3075   rtx *last_ptr;
3076   const char *format_ptr;
3077   int copied = 0;
3078   int length;
3079 
3080   /* Repeat is used to turn tail-recursion into iteration.  */
3081 repeat:
3082   x = *orig1;
3083 
3084   if (x == 0)
3085     return;
3086 
3087   code = GET_CODE (x);
3088 
3089   /* These types may be freely shared.  */
3090 
3091   switch (code)
3092     {
3093     case REG:
3094     case DEBUG_EXPR:
3095     case VALUE:
3096     CASE_CONST_ANY:
3097     case SYMBOL_REF:
3098     case LABEL_REF:
3099     case CODE_LABEL:
3100     case PC:
3101     case CC0:
3102     case RETURN:
3103     case SIMPLE_RETURN:
3104     case SCRATCH:
3105       /* SCRATCH must be shared because they represent distinct values.  */
3106       return;
3107     case CLOBBER:
3108       /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
3109          clobbers or clobbers of hard registers that originated as pseudos.
3110          This is needed to allow safe register renaming.  */
3111       if (REG_P (XEXP (x, 0))
3112 	  && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
3113 	  && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3114 	return;
3115       break;
3116 
3117     case CONST:
3118       if (shared_const_p (x))
3119 	return;
3120       break;
3121 
3122     case DEBUG_INSN:
3123     case INSN:
3124     case JUMP_INSN:
3125     case CALL_INSN:
3126     case NOTE:
3127     case BARRIER:
3128       /* The chain of insns is not being copied.  */
3129       return;
3130 
3131     default:
3132       break;
3133     }
3134 
3135   /* This rtx may not be shared.  If it has already been seen,
3136      replace it with a copy of itself.  */
3137 
3138   if (RTX_FLAG (x, used))
3139     {
3140       x = shallow_copy_rtx (x);
3141       copied = 1;
3142     }
3143   RTX_FLAG (x, used) = 1;
3144 
3145   /* Now scan the subexpressions recursively.
3146      We can store any replaced subexpressions directly into X
3147      since we know X is not shared!  Any vectors in X
3148      must be copied if X was copied.  */
3149 
3150   format_ptr = GET_RTX_FORMAT (code);
3151   length = GET_RTX_LENGTH (code);
3152   last_ptr = NULL;
3153 
3154   for (i = 0; i < length; i++)
3155     {
3156       switch (*format_ptr++)
3157 	{
3158 	case 'e':
3159           if (last_ptr)
3160             copy_rtx_if_shared_1 (last_ptr);
3161 	  last_ptr = &XEXP (x, i);
3162 	  break;
3163 
3164 	case 'E':
3165 	  if (XVEC (x, i) != NULL)
3166 	    {
3167 	      int j;
3168 	      int len = XVECLEN (x, i);
3169 
3170               /* Copy the vector iff I copied the rtx and the length
3171 		 is nonzero.  */
3172 	      if (copied && len > 0)
3173 		XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3174 
3175               /* Call recursively on all inside the vector.  */
3176 	      for (j = 0; j < len; j++)
3177                 {
3178 		  if (last_ptr)
3179 		    copy_rtx_if_shared_1 (last_ptr);
3180                   last_ptr = &XVECEXP (x, i, j);
3181                 }
3182 	    }
3183 	  break;
3184 	}
3185     }
3186   *orig1 = x;
3187   if (last_ptr)
3188     {
3189       orig1 = last_ptr;
3190       goto repeat;
3191     }
3192   return;
3193 }
3194 
3195 /* Set the USED bit in X and its non-shareable subparts to FLAG.  */
3196 
3197 static void
mark_used_flags(rtx x,int flag)3198 mark_used_flags (rtx x, int flag)
3199 {
3200   int i, j;
3201   enum rtx_code code;
3202   const char *format_ptr;
3203   int length;
3204 
3205   /* Repeat is used to turn tail-recursion into iteration.  */
3206 repeat:
3207   if (x == 0)
3208     return;
3209 
3210   code = GET_CODE (x);
3211 
3212   /* These types may be freely shared so we needn't do any resetting
3213      for them.  */
3214 
3215   switch (code)
3216     {
3217     case REG:
3218     case DEBUG_EXPR:
3219     case VALUE:
3220     CASE_CONST_ANY:
3221     case SYMBOL_REF:
3222     case CODE_LABEL:
3223     case PC:
3224     case CC0:
3225     case RETURN:
3226     case SIMPLE_RETURN:
3227       return;
3228 
3229     case DEBUG_INSN:
3230     case INSN:
3231     case JUMP_INSN:
3232     case CALL_INSN:
3233     case NOTE:
3234     case LABEL_REF:
3235     case BARRIER:
3236       /* The chain of insns is not being copied.  */
3237       return;
3238 
3239     default:
3240       break;
3241     }
3242 
3243   RTX_FLAG (x, used) = flag;
3244 
3245   format_ptr = GET_RTX_FORMAT (code);
3246   length = GET_RTX_LENGTH (code);
3247 
3248   for (i = 0; i < length; i++)
3249     {
3250       switch (*format_ptr++)
3251 	{
3252 	case 'e':
3253           if (i == length-1)
3254             {
3255               x = XEXP (x, i);
3256 	      goto repeat;
3257             }
3258 	  mark_used_flags (XEXP (x, i), flag);
3259 	  break;
3260 
3261 	case 'E':
3262 	  for (j = 0; j < XVECLEN (x, i); j++)
3263 	    mark_used_flags (XVECEXP (x, i, j), flag);
3264 	  break;
3265 	}
3266     }
3267 }
3268 
3269 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3270    to look for shared sub-parts.  */
3271 
3272 void
reset_used_flags(rtx x)3273 reset_used_flags (rtx x)
3274 {
3275   mark_used_flags (x, 0);
3276 }
3277 
3278 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3279    to look for shared sub-parts.  */
3280 
3281 void
set_used_flags(rtx x)3282 set_used_flags (rtx x)
3283 {
3284   mark_used_flags (x, 1);
3285 }
3286 
3287 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3288    Return X or the rtx for the pseudo reg the value of X was copied into.
3289    OTHER must be valid as a SET_DEST.  */
3290 
3291 rtx
make_safe_from(rtx x,rtx other)3292 make_safe_from (rtx x, rtx other)
3293 {
3294   while (1)
3295     switch (GET_CODE (other))
3296       {
3297       case SUBREG:
3298 	other = SUBREG_REG (other);
3299 	break;
3300       case STRICT_LOW_PART:
3301       case SIGN_EXTEND:
3302       case ZERO_EXTEND:
3303 	other = XEXP (other, 0);
3304 	break;
3305       default:
3306 	goto done;
3307       }
3308  done:
3309   if ((MEM_P (other)
3310        && ! CONSTANT_P (x)
3311        && !REG_P (x)
3312        && GET_CODE (x) != SUBREG)
3313       || (REG_P (other)
3314 	  && (REGNO (other) < FIRST_PSEUDO_REGISTER
3315 	      || reg_mentioned_p (other, x))))
3316     {
3317       rtx temp = gen_reg_rtx (GET_MODE (x));
3318       emit_move_insn (temp, x);
3319       return temp;
3320     }
3321   return x;
3322 }
3323 
3324 /* Emission of insns (adding them to the doubly-linked list).  */
3325 
3326 /* Return the last insn emitted, even if it is in a sequence now pushed.  */
3327 
3328 rtx_insn *
get_last_insn_anywhere(void)3329 get_last_insn_anywhere (void)
3330 {
3331   struct sequence_stack *seq;
3332   for (seq = get_current_sequence (); seq; seq = seq->next)
3333     if (seq->last != 0)
3334       return seq->last;
3335   return 0;
3336 }
3337 
3338 /* Return the first nonnote insn emitted in current sequence or current
3339    function.  This routine looks inside SEQUENCEs.  */
3340 
3341 rtx_insn *
get_first_nonnote_insn(void)3342 get_first_nonnote_insn (void)
3343 {
3344   rtx_insn *insn = get_insns ();
3345 
3346   if (insn)
3347     {
3348       if (NOTE_P (insn))
3349 	for (insn = next_insn (insn);
3350 	     insn && NOTE_P (insn);
3351 	     insn = next_insn (insn))
3352 	  continue;
3353       else
3354 	{
3355 	  if (NONJUMP_INSN_P (insn)
3356 	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
3357 	    insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3358 	}
3359     }
3360 
3361   return insn;
3362 }
3363 
3364 /* Return the last nonnote insn emitted in current sequence or current
3365    function.  This routine looks inside SEQUENCEs.  */
3366 
3367 rtx_insn *
get_last_nonnote_insn(void)3368 get_last_nonnote_insn (void)
3369 {
3370   rtx_insn *insn = get_last_insn ();
3371 
3372   if (insn)
3373     {
3374       if (NOTE_P (insn))
3375 	for (insn = previous_insn (insn);
3376 	     insn && NOTE_P (insn);
3377 	     insn = previous_insn (insn))
3378 	  continue;
3379       else
3380 	{
3381 	  if (NONJUMP_INSN_P (insn))
3382 	    if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3383 	      insn = seq->insn (seq->len () - 1);
3384 	}
3385     }
3386 
3387   return insn;
3388 }
3389 
3390 /* Return the number of actual (non-debug) insns emitted in this
3391    function.  */
3392 
3393 int
get_max_insn_count(void)3394 get_max_insn_count (void)
3395 {
3396   int n = cur_insn_uid;
3397 
3398   /* The table size must be stable across -g, to avoid codegen
3399      differences due to debug insns, and not be affected by
3400      -fmin-insn-uid, to avoid excessive table size and to simplify
3401      debugging of -fcompare-debug failures.  */
3402   if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
3403     n -= cur_debug_insn_uid;
3404   else
3405     n -= param_min_nondebug_insn_uid;
3406 
3407   return n;
3408 }
3409 
3410 
3411 /* Return the next insn.  If it is a SEQUENCE, return the first insn
3412    of the sequence.  */
3413 
3414 rtx_insn *
next_insn(rtx_insn * insn)3415 next_insn (rtx_insn *insn)
3416 {
3417   if (insn)
3418     {
3419       insn = NEXT_INSN (insn);
3420       if (insn && NONJUMP_INSN_P (insn)
3421 	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
3422 	insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3423     }
3424 
3425   return insn;
3426 }
3427 
3428 /* Return the previous insn.  If it is a SEQUENCE, return the last insn
3429    of the sequence.  */
3430 
3431 rtx_insn *
previous_insn(rtx_insn * insn)3432 previous_insn (rtx_insn *insn)
3433 {
3434   if (insn)
3435     {
3436       insn = PREV_INSN (insn);
3437       if (insn && NONJUMP_INSN_P (insn))
3438 	if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3439 	  insn = seq->insn (seq->len () - 1);
3440     }
3441 
3442   return insn;
3443 }
3444 
3445 /* Return the next insn after INSN that is not a NOTE.  This routine does not
3446    look inside SEQUENCEs.  */
3447 
3448 rtx_insn *
next_nonnote_insn(rtx_insn * insn)3449 next_nonnote_insn (rtx_insn *insn)
3450 {
3451   while (insn)
3452     {
3453       insn = NEXT_INSN (insn);
3454       if (insn == 0 || !NOTE_P (insn))
3455 	break;
3456     }
3457 
3458   return insn;
3459 }
3460 
3461 /* Return the next insn after INSN that is not a DEBUG_INSN.  This
3462    routine does not look inside SEQUENCEs.  */
3463 
3464 rtx_insn *
next_nondebug_insn(rtx_insn * insn)3465 next_nondebug_insn (rtx_insn *insn)
3466 {
3467   while (insn)
3468     {
3469       insn = NEXT_INSN (insn);
3470       if (insn == 0 || !DEBUG_INSN_P (insn))
3471 	break;
3472     }
3473 
3474   return insn;
3475 }
3476 
3477 /* Return the previous insn before INSN that is not a NOTE.  This routine does
3478    not look inside SEQUENCEs.  */
3479 
3480 rtx_insn *
prev_nonnote_insn(rtx_insn * insn)3481 prev_nonnote_insn (rtx_insn *insn)
3482 {
3483   while (insn)
3484     {
3485       insn = PREV_INSN (insn);
3486       if (insn == 0 || !NOTE_P (insn))
3487 	break;
3488     }
3489 
3490   return insn;
3491 }
3492 
3493 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3494    This routine does not look inside SEQUENCEs.  */
3495 
3496 rtx_insn *
prev_nondebug_insn(rtx_insn * insn)3497 prev_nondebug_insn (rtx_insn *insn)
3498 {
3499   while (insn)
3500     {
3501       insn = PREV_INSN (insn);
3502       if (insn == 0 || !DEBUG_INSN_P (insn))
3503 	break;
3504     }
3505 
3506   return insn;
3507 }
3508 
3509 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3510    This routine does not look inside SEQUENCEs.  */
3511 
3512 rtx_insn *
next_nonnote_nondebug_insn(rtx_insn * insn)3513 next_nonnote_nondebug_insn (rtx_insn *insn)
3514 {
3515   while (insn)
3516     {
3517       insn = NEXT_INSN (insn);
3518       if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3519 	break;
3520     }
3521 
3522   return insn;
3523 }
3524 
3525 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3526    but stop the search before we enter another basic block.  This
3527    routine does not look inside SEQUENCEs.  */
3528 
3529 rtx_insn *
next_nonnote_nondebug_insn_bb(rtx_insn * insn)3530 next_nonnote_nondebug_insn_bb (rtx_insn *insn)
3531 {
3532   while (insn)
3533     {
3534       insn = NEXT_INSN (insn);
3535       if (insn == 0)
3536 	break;
3537       if (DEBUG_INSN_P (insn))
3538 	continue;
3539       if (!NOTE_P (insn))
3540 	break;
3541       if (NOTE_INSN_BASIC_BLOCK_P (insn))
3542 	return NULL;
3543     }
3544 
3545   return insn;
3546 }
3547 
3548 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3549    This routine does not look inside SEQUENCEs.  */
3550 
3551 rtx_insn *
prev_nonnote_nondebug_insn(rtx_insn * insn)3552 prev_nonnote_nondebug_insn (rtx_insn *insn)
3553 {
3554   while (insn)
3555     {
3556       insn = PREV_INSN (insn);
3557       if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3558 	break;
3559     }
3560 
3561   return insn;
3562 }
3563 
3564 /* Return the previous insn before INSN that is not a NOTE nor
3565    DEBUG_INSN, but stop the search before we enter another basic
3566    block.  This routine does not look inside SEQUENCEs.  */
3567 
3568 rtx_insn *
prev_nonnote_nondebug_insn_bb(rtx_insn * insn)3569 prev_nonnote_nondebug_insn_bb (rtx_insn *insn)
3570 {
3571   while (insn)
3572     {
3573       insn = PREV_INSN (insn);
3574       if (insn == 0)
3575 	break;
3576       if (DEBUG_INSN_P (insn))
3577 	continue;
3578       if (!NOTE_P (insn))
3579 	break;
3580       if (NOTE_INSN_BASIC_BLOCK_P (insn))
3581 	return NULL;
3582     }
3583 
3584   return insn;
3585 }
3586 
3587 /* Return the next INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN after INSN;
3588    or 0, if there is none.  This routine does not look inside
3589    SEQUENCEs.  */
3590 
3591 rtx_insn *
next_real_insn(rtx_insn * insn)3592 next_real_insn (rtx_insn *insn)
3593 {
3594   while (insn)
3595     {
3596       insn = NEXT_INSN (insn);
3597       if (insn == 0 || INSN_P (insn))
3598 	break;
3599     }
3600 
3601   return insn;
3602 }
3603 
3604 /* Return the last INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN before INSN;
3605    or 0, if there is none.  This routine does not look inside
3606    SEQUENCEs.  */
3607 
3608 rtx_insn *
prev_real_insn(rtx_insn * insn)3609 prev_real_insn (rtx_insn *insn)
3610 {
3611   while (insn)
3612     {
3613       insn = PREV_INSN (insn);
3614       if (insn == 0 || INSN_P (insn))
3615 	break;
3616     }
3617 
3618   return insn;
3619 }
3620 
3621 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3622    or 0, if there is none.  This routine does not look inside
3623    SEQUENCEs.  */
3624 
3625 rtx_insn *
next_real_nondebug_insn(rtx uncast_insn)3626 next_real_nondebug_insn (rtx uncast_insn)
3627 {
3628   rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3629 
3630   while (insn)
3631     {
3632       insn = NEXT_INSN (insn);
3633       if (insn == 0 || NONDEBUG_INSN_P (insn))
3634 	break;
3635     }
3636 
3637   return insn;
3638 }
3639 
3640 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3641    or 0, if there is none.  This routine does not look inside
3642    SEQUENCEs.  */
3643 
3644 rtx_insn *
prev_real_nondebug_insn(rtx_insn * insn)3645 prev_real_nondebug_insn (rtx_insn *insn)
3646 {
3647   while (insn)
3648     {
3649       insn = PREV_INSN (insn);
3650       if (insn == 0 || NONDEBUG_INSN_P (insn))
3651 	break;
3652     }
3653 
3654   return insn;
3655 }
3656 
3657 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3658    This routine does not look inside SEQUENCEs.  */
3659 
3660 rtx_call_insn *
last_call_insn(void)3661 last_call_insn (void)
3662 {
3663   rtx_insn *insn;
3664 
3665   for (insn = get_last_insn ();
3666        insn && !CALL_P (insn);
3667        insn = PREV_INSN (insn))
3668     ;
3669 
3670   return safe_as_a <rtx_call_insn *> (insn);
3671 }
3672 
3673 /* Find the next insn after INSN that really does something.  This routine
3674    does not look inside SEQUENCEs.  After reload this also skips over
3675    standalone USE and CLOBBER insn.  */
3676 
3677 int
active_insn_p(const rtx_insn * insn)3678 active_insn_p (const rtx_insn *insn)
3679 {
3680   return (CALL_P (insn) || JUMP_P (insn)
3681 	  || JUMP_TABLE_DATA_P (insn) /* FIXME */
3682 	  || (NONJUMP_INSN_P (insn)
3683 	      && (! reload_completed
3684 		  || (GET_CODE (PATTERN (insn)) != USE
3685 		      && GET_CODE (PATTERN (insn)) != CLOBBER))));
3686 }
3687 
3688 rtx_insn *
next_active_insn(rtx_insn * insn)3689 next_active_insn (rtx_insn *insn)
3690 {
3691   while (insn)
3692     {
3693       insn = NEXT_INSN (insn);
3694       if (insn == 0 || active_insn_p (insn))
3695 	break;
3696     }
3697 
3698   return insn;
3699 }
3700 
3701 /* Find the last insn before INSN that really does something.  This routine
3702    does not look inside SEQUENCEs.  After reload this also skips over
3703    standalone USE and CLOBBER insn.  */
3704 
3705 rtx_insn *
prev_active_insn(rtx_insn * insn)3706 prev_active_insn (rtx_insn *insn)
3707 {
3708   while (insn)
3709     {
3710       insn = PREV_INSN (insn);
3711       if (insn == 0 || active_insn_p (insn))
3712 	break;
3713     }
3714 
3715   return insn;
3716 }
3717 
3718 /* Return the next insn that uses CC0 after INSN, which is assumed to
3719    set it.  This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3720    applied to the result of this function should yield INSN).
3721 
3722    Normally, this is simply the next insn.  However, if a REG_CC_USER note
3723    is present, it contains the insn that uses CC0.
3724 
3725    Return 0 if we can't find the insn.  */
3726 
3727 rtx_insn *
next_cc0_user(rtx_insn * insn)3728 next_cc0_user (rtx_insn *insn)
3729 {
3730   rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3731 
3732   if (note)
3733     return safe_as_a <rtx_insn *> (XEXP (note, 0));
3734 
3735   insn = next_nonnote_insn (insn);
3736   if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3737     insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3738 
3739   if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3740     return insn;
3741 
3742   return 0;
3743 }
3744 
3745 /* Find the insn that set CC0 for INSN.  Unless INSN has a REG_CC_SETTER
3746    note, it is the previous insn.  */
3747 
3748 rtx_insn *
prev_cc0_setter(rtx_insn * insn)3749 prev_cc0_setter (rtx_insn *insn)
3750 {
3751   rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3752 
3753   if (note)
3754     return safe_as_a <rtx_insn *> (XEXP (note, 0));
3755 
3756   insn = prev_nonnote_insn (insn);
3757   gcc_assert (sets_cc0_p (PATTERN (insn)));
3758 
3759   return insn;
3760 }
3761 
3762 /* Find a RTX_AUTOINC class rtx which matches DATA.  */
3763 
3764 static int
find_auto_inc(const_rtx x,const_rtx reg)3765 find_auto_inc (const_rtx x, const_rtx reg)
3766 {
3767   subrtx_iterator::array_type array;
3768   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3769     {
3770       const_rtx x = *iter;
3771       if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3772 	  && rtx_equal_p (reg, XEXP (x, 0)))
3773 	return true;
3774     }
3775   return false;
3776 }
3777 
3778 /* Increment the label uses for all labels present in rtx.  */
3779 
3780 static void
mark_label_nuses(rtx x)3781 mark_label_nuses (rtx x)
3782 {
3783   enum rtx_code code;
3784   int i, j;
3785   const char *fmt;
3786 
3787   code = GET_CODE (x);
3788   if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
3789     LABEL_NUSES (label_ref_label (x))++;
3790 
3791   fmt = GET_RTX_FORMAT (code);
3792   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3793     {
3794       if (fmt[i] == 'e')
3795 	mark_label_nuses (XEXP (x, i));
3796       else if (fmt[i] == 'E')
3797 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3798 	  mark_label_nuses (XVECEXP (x, i, j));
3799     }
3800 }
3801 
3802 
3803 /* Try splitting insns that can be split for better scheduling.
3804    PAT is the pattern which might split.
3805    TRIAL is the insn providing PAT.
3806    LAST is nonzero if we should return the last insn of the sequence produced.
3807 
3808    If this routine succeeds in splitting, it returns the first or last
3809    replacement insn depending on the value of LAST.  Otherwise, it
3810    returns TRIAL.  If the insn to be returned can be split, it will be.  */
3811 
3812 rtx_insn *
try_split(rtx pat,rtx_insn * trial,int last)3813 try_split (rtx pat, rtx_insn *trial, int last)
3814 {
3815   rtx_insn *before, *after;
3816   rtx note;
3817   rtx_insn *seq, *tem;
3818   profile_probability probability;
3819   rtx_insn *insn_last, *insn;
3820   int njumps = 0;
3821   rtx_insn *call_insn = NULL;
3822 
3823   /* We're not good at redistributing frame information.  */
3824   if (RTX_FRAME_RELATED_P (trial))
3825     return trial;
3826 
3827   if (any_condjump_p (trial)
3828       && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3829     split_branch_probability
3830       = profile_probability::from_reg_br_prob_note (XINT (note, 0));
3831   else
3832     split_branch_probability = profile_probability::uninitialized ();
3833 
3834   probability = split_branch_probability;
3835 
3836   seq = split_insns (pat, trial);
3837 
3838   split_branch_probability = profile_probability::uninitialized ();
3839 
3840   if (!seq)
3841     return trial;
3842 
3843   /* Avoid infinite loop if any insn of the result matches
3844      the original pattern.  */
3845   insn_last = seq;
3846   while (1)
3847     {
3848       if (INSN_P (insn_last)
3849 	  && rtx_equal_p (PATTERN (insn_last), pat))
3850 	return trial;
3851       if (!NEXT_INSN (insn_last))
3852 	break;
3853       insn_last = NEXT_INSN (insn_last);
3854     }
3855 
3856   /* We will be adding the new sequence to the function.  The splitters
3857      may have introduced invalid RTL sharing, so unshare the sequence now.  */
3858   unshare_all_rtl_in_chain (seq);
3859 
3860   /* Mark labels and copy flags.  */
3861   for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3862     {
3863       if (JUMP_P (insn))
3864 	{
3865 	  if (JUMP_P (trial))
3866 	    CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3867 	  mark_jump_label (PATTERN (insn), insn, 0);
3868 	  njumps++;
3869 	  if (probability.initialized_p ()
3870 	      && any_condjump_p (insn)
3871 	      && !find_reg_note (insn, REG_BR_PROB, 0))
3872 	    {
3873 	      /* We can preserve the REG_BR_PROB notes only if exactly
3874 		 one jump is created, otherwise the machine description
3875 		 is responsible for this step using
3876 		 split_branch_probability variable.  */
3877 	      gcc_assert (njumps == 1);
3878 	      add_reg_br_prob_note (insn, probability);
3879 	    }
3880 	}
3881     }
3882 
3883   /* If we are splitting a CALL_INSN, look for the CALL_INSN
3884      in SEQ and copy any additional information across.  */
3885   if (CALL_P (trial))
3886     {
3887       for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3888 	if (CALL_P (insn))
3889 	  {
3890 	    gcc_assert (call_insn == NULL_RTX);
3891 	    call_insn = insn;
3892 
3893 	    /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3894 	       target may have explicitly specified.  */
3895 	    rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3896 	    while (*p)
3897 	      p = &XEXP (*p, 1);
3898 	    *p = CALL_INSN_FUNCTION_USAGE (trial);
3899 
3900 	    /* If the old call was a sibling call, the new one must
3901 	       be too.  */
3902 	    SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3903 	  }
3904     }
3905 
3906   /* Copy notes, particularly those related to the CFG.  */
3907   for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3908     {
3909       switch (REG_NOTE_KIND (note))
3910 	{
3911 	case REG_EH_REGION:
3912 	  copy_reg_eh_region_note_backward (note, insn_last, NULL);
3913 	  break;
3914 
3915 	case REG_NORETURN:
3916 	case REG_SETJMP:
3917 	case REG_TM:
3918 	case REG_CALL_NOCF_CHECK:
3919 	case REG_CALL_ARG_LOCATION:
3920 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3921 	    {
3922 	      if (CALL_P (insn))
3923 		add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3924 	    }
3925 	  break;
3926 
3927 	case REG_NON_LOCAL_GOTO:
3928 	case REG_LABEL_TARGET:
3929 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3930 	    {
3931 	      if (JUMP_P (insn))
3932 		add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3933 	    }
3934 	  break;
3935 
3936 	case REG_INC:
3937 	  if (!AUTO_INC_DEC)
3938 	    break;
3939 
3940 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3941 	    {
3942 	      rtx reg = XEXP (note, 0);
3943 	      if (!FIND_REG_INC_NOTE (insn, reg)
3944 		  && find_auto_inc (PATTERN (insn), reg))
3945 		add_reg_note (insn, REG_INC, reg);
3946 	    }
3947 	  break;
3948 
3949 	case REG_ARGS_SIZE:
3950 	  fixup_args_size_notes (NULL, insn_last, get_args_size (note));
3951 	  break;
3952 
3953 	case REG_CALL_DECL:
3954 	  gcc_assert (call_insn != NULL_RTX);
3955 	  add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3956 	  break;
3957 
3958 	default:
3959 	  break;
3960 	}
3961     }
3962 
3963   /* If there are LABELS inside the split insns increment the
3964      usage count so we don't delete the label.  */
3965   if (INSN_P (trial))
3966     {
3967       insn = insn_last;
3968       while (insn != NULL_RTX)
3969 	{
3970 	  /* JUMP_P insns have already been "marked" above.  */
3971 	  if (NONJUMP_INSN_P (insn))
3972 	    mark_label_nuses (PATTERN (insn));
3973 
3974 	  insn = PREV_INSN (insn);
3975 	}
3976     }
3977 
3978   before = PREV_INSN (trial);
3979   after = NEXT_INSN (trial);
3980 
3981   emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3982 
3983   delete_insn (trial);
3984 
3985   /* Recursively call try_split for each new insn created; by the
3986      time control returns here that insn will be fully split, so
3987      set LAST and continue from the insn after the one returned.
3988      We can't use next_active_insn here since AFTER may be a note.
3989      Ignore deleted insns, which can be occur if not optimizing.  */
3990   for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3991     if (! tem->deleted () && INSN_P (tem))
3992       tem = try_split (PATTERN (tem), tem, 1);
3993 
3994   /* Return either the first or the last insn, depending on which was
3995      requested.  */
3996   return last
3997     ? (after ? PREV_INSN (after) : get_last_insn ())
3998     : NEXT_INSN (before);
3999 }
4000 
4001 /* Make and return an INSN rtx, initializing all its slots.
4002    Store PATTERN in the pattern slots.  */
4003 
4004 rtx_insn *
make_insn_raw(rtx pattern)4005 make_insn_raw (rtx pattern)
4006 {
4007   rtx_insn *insn;
4008 
4009   insn = as_a <rtx_insn *> (rtx_alloc (INSN));
4010 
4011   INSN_UID (insn) = cur_insn_uid++;
4012   PATTERN (insn) = pattern;
4013   INSN_CODE (insn) = -1;
4014   REG_NOTES (insn) = NULL;
4015   INSN_LOCATION (insn) = curr_insn_location ();
4016   BLOCK_FOR_INSN (insn) = NULL;
4017 
4018 #ifdef ENABLE_RTL_CHECKING
4019   if (insn
4020       && INSN_P (insn)
4021       && (returnjump_p (insn)
4022 	  || (GET_CODE (insn) == SET
4023 	      && SET_DEST (insn) == pc_rtx)))
4024     {
4025       warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
4026       debug_rtx (insn);
4027     }
4028 #endif
4029 
4030   return insn;
4031 }
4032 
4033 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn.  */
4034 
4035 static rtx_insn *
make_debug_insn_raw(rtx pattern)4036 make_debug_insn_raw (rtx pattern)
4037 {
4038   rtx_debug_insn *insn;
4039 
4040   insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
4041   INSN_UID (insn) = cur_debug_insn_uid++;
4042   if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
4043     INSN_UID (insn) = cur_insn_uid++;
4044 
4045   PATTERN (insn) = pattern;
4046   INSN_CODE (insn) = -1;
4047   REG_NOTES (insn) = NULL;
4048   INSN_LOCATION (insn) = curr_insn_location ();
4049   BLOCK_FOR_INSN (insn) = NULL;
4050 
4051   return insn;
4052 }
4053 
4054 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn.  */
4055 
4056 static rtx_insn *
make_jump_insn_raw(rtx pattern)4057 make_jump_insn_raw (rtx pattern)
4058 {
4059   rtx_jump_insn *insn;
4060 
4061   insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
4062   INSN_UID (insn) = cur_insn_uid++;
4063 
4064   PATTERN (insn) = pattern;
4065   INSN_CODE (insn) = -1;
4066   REG_NOTES (insn) = NULL;
4067   JUMP_LABEL (insn) = NULL;
4068   INSN_LOCATION (insn) = curr_insn_location ();
4069   BLOCK_FOR_INSN (insn) = NULL;
4070 
4071   return insn;
4072 }
4073 
4074 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn.  */
4075 
4076 static rtx_insn *
make_call_insn_raw(rtx pattern)4077 make_call_insn_raw (rtx pattern)
4078 {
4079   rtx_call_insn *insn;
4080 
4081   insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
4082   INSN_UID (insn) = cur_insn_uid++;
4083 
4084   PATTERN (insn) = pattern;
4085   INSN_CODE (insn) = -1;
4086   REG_NOTES (insn) = NULL;
4087   CALL_INSN_FUNCTION_USAGE (insn) = NULL;
4088   INSN_LOCATION (insn) = curr_insn_location ();
4089   BLOCK_FOR_INSN (insn) = NULL;
4090 
4091   return insn;
4092 }
4093 
4094 /* Like `make_insn_raw' but make a NOTE instead of an insn.  */
4095 
4096 static rtx_note *
make_note_raw(enum insn_note subtype)4097 make_note_raw (enum insn_note subtype)
4098 {
4099   /* Some notes are never created this way at all.  These notes are
4100      only created by patching out insns.  */
4101   gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
4102 	      && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
4103 
4104   rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
4105   INSN_UID (note) = cur_insn_uid++;
4106   NOTE_KIND (note) = subtype;
4107   BLOCK_FOR_INSN (note) = NULL;
4108   memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4109   return note;
4110 }
4111 
4112 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
4113    INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
4114    but also BARRIERs and JUMP_TABLE_DATAs.  PREV and NEXT may be NULL.  */
4115 
4116 static inline void
link_insn_into_chain(rtx_insn * insn,rtx_insn * prev,rtx_insn * next)4117 link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4118 {
4119   SET_PREV_INSN (insn) = prev;
4120   SET_NEXT_INSN (insn) = next;
4121   if (prev != NULL)
4122     {
4123       SET_NEXT_INSN (prev) = insn;
4124       if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4125 	{
4126 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4127 	  SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
4128 	}
4129     }
4130   if (next != NULL)
4131     {
4132       SET_PREV_INSN (next) = insn;
4133       if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4134 	{
4135 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4136 	  SET_PREV_INSN (sequence->insn (0)) = insn;
4137 	}
4138     }
4139 
4140   if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4141     {
4142       rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
4143       SET_PREV_INSN (sequence->insn (0)) = prev;
4144       SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4145     }
4146 }
4147 
4148 /* Add INSN to the end of the doubly-linked list.
4149    INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE.  */
4150 
4151 void
add_insn(rtx_insn * insn)4152 add_insn (rtx_insn *insn)
4153 {
4154   rtx_insn *prev = get_last_insn ();
4155   link_insn_into_chain (insn, prev, NULL);
4156   if (get_insns () == NULL)
4157     set_first_insn (insn);
4158   set_last_insn (insn);
4159 }
4160 
4161 /* Add INSN into the doubly-linked list after insn AFTER.  */
4162 
4163 static void
add_insn_after_nobb(rtx_insn * insn,rtx_insn * after)4164 add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4165 {
4166   rtx_insn *next = NEXT_INSN (after);
4167 
4168   gcc_assert (!optimize || !after->deleted ());
4169 
4170   link_insn_into_chain (insn, after, next);
4171 
4172   if (next == NULL)
4173     {
4174       struct sequence_stack *seq;
4175 
4176       for (seq = get_current_sequence (); seq; seq = seq->next)
4177 	if (after == seq->last)
4178 	  {
4179 	    seq->last = insn;
4180 	    break;
4181 	  }
4182     }
4183 }
4184 
4185 /* Add INSN into the doubly-linked list before insn BEFORE.  */
4186 
4187 static void
add_insn_before_nobb(rtx_insn * insn,rtx_insn * before)4188 add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4189 {
4190   rtx_insn *prev = PREV_INSN (before);
4191 
4192   gcc_assert (!optimize || !before->deleted ());
4193 
4194   link_insn_into_chain (insn, prev, before);
4195 
4196   if (prev == NULL)
4197     {
4198       struct sequence_stack *seq;
4199 
4200       for (seq = get_current_sequence (); seq; seq = seq->next)
4201 	if (before == seq->first)
4202 	  {
4203 	    seq->first = insn;
4204 	    break;
4205 	  }
4206 
4207       gcc_assert (seq);
4208     }
4209 }
4210 
4211 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4212    If BB is NULL, an attempt is made to infer the bb from before.
4213 
4214    This and the next function should be the only functions called
4215    to insert an insn once delay slots have been filled since only
4216    they know how to update a SEQUENCE. */
4217 
4218 void
add_insn_after(rtx_insn * insn,rtx_insn * after,basic_block bb)4219 add_insn_after (rtx_insn *insn, rtx_insn *after, basic_block bb)
4220 {
4221   add_insn_after_nobb (insn, after);
4222   if (!BARRIER_P (after)
4223       && !BARRIER_P (insn)
4224       && (bb = BLOCK_FOR_INSN (after)))
4225     {
4226       set_block_for_insn (insn, bb);
4227       if (INSN_P (insn))
4228 	df_insn_rescan (insn);
4229       /* Should not happen as first in the BB is always
4230 	 either NOTE or LABEL.  */
4231       if (BB_END (bb) == after
4232 	  /* Avoid clobbering of structure when creating new BB.  */
4233 	  && !BARRIER_P (insn)
4234 	  && !NOTE_INSN_BASIC_BLOCK_P (insn))
4235 	BB_END (bb) = insn;
4236     }
4237 }
4238 
4239 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4240    If BB is NULL, an attempt is made to infer the bb from before.
4241 
4242    This and the previous function should be the only functions called
4243    to insert an insn once delay slots have been filled since only
4244    they know how to update a SEQUENCE. */
4245 
4246 void
add_insn_before(rtx_insn * insn,rtx_insn * before,basic_block bb)4247 add_insn_before (rtx_insn *insn, rtx_insn *before, basic_block bb)
4248 {
4249   add_insn_before_nobb (insn, before);
4250 
4251   if (!bb
4252       && !BARRIER_P (before)
4253       && !BARRIER_P (insn))
4254     bb = BLOCK_FOR_INSN (before);
4255 
4256   if (bb)
4257     {
4258       set_block_for_insn (insn, bb);
4259       if (INSN_P (insn))
4260 	df_insn_rescan (insn);
4261       /* Should not happen as first in the BB is always either NOTE or
4262 	 LABEL.  */
4263       gcc_assert (BB_HEAD (bb) != insn
4264 		  /* Avoid clobbering of structure when creating new BB.  */
4265 		  || BARRIER_P (insn)
4266 		  || NOTE_INSN_BASIC_BLOCK_P (insn));
4267     }
4268 }
4269 
4270 /* Replace insn with an deleted instruction note.  */
4271 
4272 void
set_insn_deleted(rtx_insn * insn)4273 set_insn_deleted (rtx_insn *insn)
4274 {
4275   if (INSN_P (insn))
4276     df_insn_delete (insn);
4277   PUT_CODE (insn, NOTE);
4278   NOTE_KIND (insn) = NOTE_INSN_DELETED;
4279 }
4280 
4281 
4282 /* Unlink INSN from the insn chain.
4283 
4284    This function knows how to handle sequences.
4285 
4286    This function does not invalidate data flow information associated with
4287    INSN (i.e. does not call df_insn_delete).  That makes this function
4288    usable for only disconnecting an insn from the chain, and re-emit it
4289    elsewhere later.
4290 
4291    To later insert INSN elsewhere in the insn chain via add_insn and
4292    similar functions, PREV_INSN and NEXT_INSN must be nullified by
4293    the caller.  Nullifying them here breaks many insn chain walks.
4294 
4295    To really delete an insn and related DF information, use delete_insn.  */
4296 
4297 void
remove_insn(rtx_insn * insn)4298 remove_insn (rtx_insn *insn)
4299 {
4300   rtx_insn *next = NEXT_INSN (insn);
4301   rtx_insn *prev = PREV_INSN (insn);
4302   basic_block bb;
4303 
4304   if (prev)
4305     {
4306       SET_NEXT_INSN (prev) = next;
4307       if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4308 	{
4309 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4310 	  SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4311 	}
4312     }
4313   else
4314     {
4315       struct sequence_stack *seq;
4316 
4317       for (seq = get_current_sequence (); seq; seq = seq->next)
4318 	if (insn == seq->first)
4319 	  {
4320 	    seq->first = next;
4321 	    break;
4322 	  }
4323 
4324       gcc_assert (seq);
4325     }
4326 
4327   if (next)
4328     {
4329       SET_PREV_INSN (next) = prev;
4330       if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4331 	{
4332 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4333 	  SET_PREV_INSN (sequence->insn (0)) = prev;
4334 	}
4335     }
4336   else
4337     {
4338       struct sequence_stack *seq;
4339 
4340       for (seq = get_current_sequence (); seq; seq = seq->next)
4341 	if (insn == seq->last)
4342 	  {
4343 	    seq->last = prev;
4344 	    break;
4345 	  }
4346 
4347       gcc_assert (seq);
4348     }
4349 
4350   /* Fix up basic block boundaries, if necessary.  */
4351   if (!BARRIER_P (insn)
4352       && (bb = BLOCK_FOR_INSN (insn)))
4353     {
4354       if (BB_HEAD (bb) == insn)
4355 	{
4356 	  /* Never ever delete the basic block note without deleting whole
4357 	     basic block.  */
4358 	  gcc_assert (!NOTE_P (insn));
4359 	  BB_HEAD (bb) = next;
4360 	}
4361       if (BB_END (bb) == insn)
4362 	BB_END (bb) = prev;
4363     }
4364 }
4365 
4366 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN.  */
4367 
4368 void
add_function_usage_to(rtx call_insn,rtx call_fusage)4369 add_function_usage_to (rtx call_insn, rtx call_fusage)
4370 {
4371   gcc_assert (call_insn && CALL_P (call_insn));
4372 
4373   /* Put the register usage information on the CALL.  If there is already
4374      some usage information, put ours at the end.  */
4375   if (CALL_INSN_FUNCTION_USAGE (call_insn))
4376     {
4377       rtx link;
4378 
4379       for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4380 	   link = XEXP (link, 1))
4381 	;
4382 
4383       XEXP (link, 1) = call_fusage;
4384     }
4385   else
4386     CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4387 }
4388 
4389 /* Delete all insns made since FROM.
4390    FROM becomes the new last instruction.  */
4391 
4392 void
delete_insns_since(rtx_insn * from)4393 delete_insns_since (rtx_insn *from)
4394 {
4395   if (from == 0)
4396     set_first_insn (0);
4397   else
4398     SET_NEXT_INSN (from) = 0;
4399   set_last_insn (from);
4400 }
4401 
4402 /* This function is deprecated, please use sequences instead.
4403 
4404    Move a consecutive bunch of insns to a different place in the chain.
4405    The insns to be moved are those between FROM and TO.
4406    They are moved to a new position after the insn AFTER.
4407    AFTER must not be FROM or TO or any insn in between.
4408 
4409    This function does not know about SEQUENCEs and hence should not be
4410    called after delay-slot filling has been done.  */
4411 
4412 void
reorder_insns_nobb(rtx_insn * from,rtx_insn * to,rtx_insn * after)4413 reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4414 {
4415   if (flag_checking)
4416     {
4417       for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4418 	gcc_assert (after != x);
4419       gcc_assert (after != to);
4420     }
4421 
4422   /* Splice this bunch out of where it is now.  */
4423   if (PREV_INSN (from))
4424     SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4425   if (NEXT_INSN (to))
4426     SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4427   if (get_last_insn () == to)
4428     set_last_insn (PREV_INSN (from));
4429   if (get_insns () == from)
4430     set_first_insn (NEXT_INSN (to));
4431 
4432   /* Make the new neighbors point to it and it to them.  */
4433   if (NEXT_INSN (after))
4434     SET_PREV_INSN (NEXT_INSN (after)) = to;
4435 
4436   SET_NEXT_INSN (to) = NEXT_INSN (after);
4437   SET_PREV_INSN (from) = after;
4438   SET_NEXT_INSN (after) = from;
4439   if (after == get_last_insn ())
4440     set_last_insn (to);
4441 }
4442 
4443 /* Same as function above, but take care to update BB boundaries.  */
4444 void
reorder_insns(rtx_insn * from,rtx_insn * to,rtx_insn * after)4445 reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4446 {
4447   rtx_insn *prev = PREV_INSN (from);
4448   basic_block bb, bb2;
4449 
4450   reorder_insns_nobb (from, to, after);
4451 
4452   if (!BARRIER_P (after)
4453       && (bb = BLOCK_FOR_INSN (after)))
4454     {
4455       rtx_insn *x;
4456       df_set_bb_dirty (bb);
4457 
4458       if (!BARRIER_P (from)
4459 	  && (bb2 = BLOCK_FOR_INSN (from)))
4460 	{
4461 	  if (BB_END (bb2) == to)
4462 	    BB_END (bb2) = prev;
4463 	  df_set_bb_dirty (bb2);
4464 	}
4465 
4466       if (BB_END (bb) == after)
4467 	BB_END (bb) = to;
4468 
4469       for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4470 	if (!BARRIER_P (x))
4471 	  df_insn_change_bb (x, bb);
4472     }
4473 }
4474 
4475 
4476 /* Emit insn(s) of given code and pattern
4477    at a specified place within the doubly-linked list.
4478 
4479    All of the emit_foo global entry points accept an object
4480    X which is either an insn list or a PATTERN of a single
4481    instruction.
4482 
4483    There are thus a few canonical ways to generate code and
4484    emit it at a specific place in the instruction stream.  For
4485    example, consider the instruction named SPOT and the fact that
4486    we would like to emit some instructions before SPOT.  We might
4487    do it like this:
4488 
4489 	start_sequence ();
4490 	... emit the new instructions ...
4491 	insns_head = get_insns ();
4492 	end_sequence ();
4493 
4494 	emit_insn_before (insns_head, SPOT);
4495 
4496    It used to be common to generate SEQUENCE rtl instead, but that
4497    is a relic of the past which no longer occurs.  The reason is that
4498    SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4499    generated would almost certainly die right after it was created.  */
4500 
4501 static rtx_insn *
emit_pattern_before_noloc(rtx x,rtx_insn * before,rtx_insn * last,basic_block bb,rtx_insn * (* make_raw)(rtx))4502 emit_pattern_before_noloc (rtx x, rtx_insn *before, rtx_insn *last,
4503 			   basic_block bb,
4504                            rtx_insn *(*make_raw) (rtx))
4505 {
4506   rtx_insn *insn;
4507 
4508   gcc_assert (before);
4509 
4510   if (x == NULL_RTX)
4511     return last;
4512 
4513   switch (GET_CODE (x))
4514     {
4515     case DEBUG_INSN:
4516     case INSN:
4517     case JUMP_INSN:
4518     case CALL_INSN:
4519     case CODE_LABEL:
4520     case BARRIER:
4521     case NOTE:
4522       insn = as_a <rtx_insn *> (x);
4523       while (insn)
4524 	{
4525 	  rtx_insn *next = NEXT_INSN (insn);
4526 	  add_insn_before (insn, before, bb);
4527 	  last = insn;
4528 	  insn = next;
4529 	}
4530       break;
4531 
4532 #ifdef ENABLE_RTL_CHECKING
4533     case SEQUENCE:
4534       gcc_unreachable ();
4535       break;
4536 #endif
4537 
4538     default:
4539       last = (*make_raw) (x);
4540       add_insn_before (last, before, bb);
4541       break;
4542     }
4543 
4544   return last;
4545 }
4546 
4547 /* Make X be output before the instruction BEFORE.  */
4548 
4549 rtx_insn *
emit_insn_before_noloc(rtx x,rtx_insn * before,basic_block bb)4550 emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4551 {
4552   return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4553 }
4554 
4555 /* Make an instruction with body X and code JUMP_INSN
4556    and output it before the instruction BEFORE.  */
4557 
4558 rtx_jump_insn *
emit_jump_insn_before_noloc(rtx x,rtx_insn * before)4559 emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4560 {
4561   return as_a <rtx_jump_insn *> (
4562 		emit_pattern_before_noloc (x, before, NULL, NULL,
4563 					   make_jump_insn_raw));
4564 }
4565 
4566 /* Make an instruction with body X and code CALL_INSN
4567    and output it before the instruction BEFORE.  */
4568 
4569 rtx_insn *
emit_call_insn_before_noloc(rtx x,rtx_insn * before)4570 emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4571 {
4572   return emit_pattern_before_noloc (x, before, NULL, NULL,
4573 				    make_call_insn_raw);
4574 }
4575 
4576 /* Make an instruction with body X and code DEBUG_INSN
4577    and output it before the instruction BEFORE.  */
4578 
4579 rtx_insn *
emit_debug_insn_before_noloc(rtx x,rtx_insn * before)4580 emit_debug_insn_before_noloc (rtx x, rtx_insn *before)
4581 {
4582   return emit_pattern_before_noloc (x, before, NULL, NULL,
4583 				    make_debug_insn_raw);
4584 }
4585 
4586 /* Make an insn of code BARRIER
4587    and output it before the insn BEFORE.  */
4588 
4589 rtx_barrier *
emit_barrier_before(rtx_insn * before)4590 emit_barrier_before (rtx_insn *before)
4591 {
4592   rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4593 
4594   INSN_UID (insn) = cur_insn_uid++;
4595 
4596   add_insn_before (insn, before, NULL);
4597   return insn;
4598 }
4599 
4600 /* Emit the label LABEL before the insn BEFORE.  */
4601 
4602 rtx_code_label *
emit_label_before(rtx_code_label * label,rtx_insn * before)4603 emit_label_before (rtx_code_label *label, rtx_insn *before)
4604 {
4605   gcc_checking_assert (INSN_UID (label) == 0);
4606   INSN_UID (label) = cur_insn_uid++;
4607   add_insn_before (label, before, NULL);
4608   return label;
4609 }
4610 
4611 /* Helper for emit_insn_after, handles lists of instructions
4612    efficiently.  */
4613 
4614 static rtx_insn *
emit_insn_after_1(rtx_insn * first,rtx_insn * after,basic_block bb)4615 emit_insn_after_1 (rtx_insn *first, rtx_insn *after, basic_block bb)
4616 {
4617   rtx_insn *last;
4618   rtx_insn *after_after;
4619   if (!bb && !BARRIER_P (after))
4620     bb = BLOCK_FOR_INSN (after);
4621 
4622   if (bb)
4623     {
4624       df_set_bb_dirty (bb);
4625       for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4626 	if (!BARRIER_P (last))
4627 	  {
4628 	    set_block_for_insn (last, bb);
4629 	    df_insn_rescan (last);
4630 	  }
4631       if (!BARRIER_P (last))
4632 	{
4633 	  set_block_for_insn (last, bb);
4634 	  df_insn_rescan (last);
4635 	}
4636       if (BB_END (bb) == after)
4637 	BB_END (bb) = last;
4638     }
4639   else
4640     for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4641       continue;
4642 
4643   after_after = NEXT_INSN (after);
4644 
4645   SET_NEXT_INSN (after) = first;
4646   SET_PREV_INSN (first) = after;
4647   SET_NEXT_INSN (last) = after_after;
4648   if (after_after)
4649     SET_PREV_INSN (after_after) = last;
4650 
4651   if (after == get_last_insn ())
4652     set_last_insn (last);
4653 
4654   return last;
4655 }
4656 
4657 static rtx_insn *
emit_pattern_after_noloc(rtx x,rtx_insn * after,basic_block bb,rtx_insn * (* make_raw)(rtx))4658 emit_pattern_after_noloc (rtx x, rtx_insn *after, basic_block bb,
4659 			  rtx_insn *(*make_raw)(rtx))
4660 {
4661   rtx_insn *last = after;
4662 
4663   gcc_assert (after);
4664 
4665   if (x == NULL_RTX)
4666     return last;
4667 
4668   switch (GET_CODE (x))
4669     {
4670     case DEBUG_INSN:
4671     case INSN:
4672     case JUMP_INSN:
4673     case CALL_INSN:
4674     case CODE_LABEL:
4675     case BARRIER:
4676     case NOTE:
4677       last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4678       break;
4679 
4680 #ifdef ENABLE_RTL_CHECKING
4681     case SEQUENCE:
4682       gcc_unreachable ();
4683       break;
4684 #endif
4685 
4686     default:
4687       last = (*make_raw) (x);
4688       add_insn_after (last, after, bb);
4689       break;
4690     }
4691 
4692   return last;
4693 }
4694 
4695 /* Make X be output after the insn AFTER and set the BB of insn.  If
4696    BB is NULL, an attempt is made to infer the BB from AFTER.  */
4697 
4698 rtx_insn *
emit_insn_after_noloc(rtx x,rtx_insn * after,basic_block bb)4699 emit_insn_after_noloc (rtx x, rtx_insn *after, basic_block bb)
4700 {
4701   return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4702 }
4703 
4704 
4705 /* Make an insn of code JUMP_INSN with body X
4706    and output it after the insn AFTER.  */
4707 
4708 rtx_jump_insn *
emit_jump_insn_after_noloc(rtx x,rtx_insn * after)4709 emit_jump_insn_after_noloc (rtx x, rtx_insn *after)
4710 {
4711   return as_a <rtx_jump_insn *> (
4712 		emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4713 }
4714 
4715 /* Make an instruction with body X and code CALL_INSN
4716    and output it after the instruction AFTER.  */
4717 
4718 rtx_insn *
emit_call_insn_after_noloc(rtx x,rtx_insn * after)4719 emit_call_insn_after_noloc (rtx x, rtx_insn *after)
4720 {
4721   return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4722 }
4723 
4724 /* Make an instruction with body X and code CALL_INSN
4725    and output it after the instruction AFTER.  */
4726 
4727 rtx_insn *
emit_debug_insn_after_noloc(rtx x,rtx_insn * after)4728 emit_debug_insn_after_noloc (rtx x, rtx_insn *after)
4729 {
4730   return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4731 }
4732 
4733 /* Make an insn of code BARRIER
4734    and output it after the insn AFTER.  */
4735 
4736 rtx_barrier *
emit_barrier_after(rtx_insn * after)4737 emit_barrier_after (rtx_insn *after)
4738 {
4739   rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4740 
4741   INSN_UID (insn) = cur_insn_uid++;
4742 
4743   add_insn_after (insn, after, NULL);
4744   return insn;
4745 }
4746 
4747 /* Emit the label LABEL after the insn AFTER.  */
4748 
4749 rtx_insn *
emit_label_after(rtx_insn * label,rtx_insn * after)4750 emit_label_after (rtx_insn *label, rtx_insn *after)
4751 {
4752   gcc_checking_assert (INSN_UID (label) == 0);
4753   INSN_UID (label) = cur_insn_uid++;
4754   add_insn_after (label, after, NULL);
4755   return label;
4756 }
4757 
4758 /* Notes require a bit of special handling: Some notes need to have their
4759    BLOCK_FOR_INSN set, others should never have it set, and some should
4760    have it set or clear depending on the context.   */
4761 
4762 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4763    that never set BLOCK_FOR_INSN on NOTE.  BB_BOUNDARY is true if the
4764    caller is asked to emit a note before BB_HEAD, or after BB_END.  */
4765 
4766 static bool
note_outside_basic_block_p(enum insn_note subtype,bool on_bb_boundary_p)4767 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4768 {
4769   switch (subtype)
4770     {
4771       /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks.  */
4772       case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4773 	return true;
4774 
4775       /* Notes for var tracking and EH region markers can appear between or
4776 	 inside basic blocks.  If the caller is emitting on the basic block
4777 	 boundary, do not set BLOCK_FOR_INSN on the new note.  */
4778       case NOTE_INSN_VAR_LOCATION:
4779       case NOTE_INSN_EH_REGION_BEG:
4780       case NOTE_INSN_EH_REGION_END:
4781 	return on_bb_boundary_p;
4782 
4783       /* Otherwise, BLOCK_FOR_INSN must be set.  */
4784       default:
4785 	return false;
4786     }
4787 }
4788 
4789 /* Emit a note of subtype SUBTYPE after the insn AFTER.  */
4790 
4791 rtx_note *
emit_note_after(enum insn_note subtype,rtx_insn * after)4792 emit_note_after (enum insn_note subtype, rtx_insn *after)
4793 {
4794   rtx_note *note = make_note_raw (subtype);
4795   basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4796   bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4797 
4798   if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4799     add_insn_after_nobb (note, after);
4800   else
4801     add_insn_after (note, after, bb);
4802   return note;
4803 }
4804 
4805 /* Emit a note of subtype SUBTYPE before the insn BEFORE.  */
4806 
4807 rtx_note *
emit_note_before(enum insn_note subtype,rtx_insn * before)4808 emit_note_before (enum insn_note subtype, rtx_insn *before)
4809 {
4810   rtx_note *note = make_note_raw (subtype);
4811   basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4812   bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4813 
4814   if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4815     add_insn_before_nobb (note, before);
4816   else
4817     add_insn_before (note, before, bb);
4818   return note;
4819 }
4820 
4821 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4822    MAKE_RAW indicates how to turn PATTERN into a real insn.  */
4823 
4824 static rtx_insn *
emit_pattern_after_setloc(rtx pattern,rtx_insn * after,location_t loc,rtx_insn * (* make_raw)(rtx))4825 emit_pattern_after_setloc (rtx pattern, rtx_insn *after, location_t loc,
4826 			   rtx_insn *(*make_raw) (rtx))
4827 {
4828   rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4829 
4830   if (pattern == NULL_RTX || !loc)
4831     return last;
4832 
4833   after = NEXT_INSN (after);
4834   while (1)
4835     {
4836       if (active_insn_p (after)
4837 	  && !JUMP_TABLE_DATA_P (after) /* FIXME */
4838 	  && !INSN_LOCATION (after))
4839 	INSN_LOCATION (after) = loc;
4840       if (after == last)
4841 	break;
4842       after = NEXT_INSN (after);
4843     }
4844   return last;
4845 }
4846 
4847 /* Insert PATTERN after AFTER.  MAKE_RAW indicates how to turn PATTERN
4848    into a real insn.  SKIP_DEBUG_INSNS indicates whether to insert after
4849    any DEBUG_INSNs.  */
4850 
4851 static rtx_insn *
emit_pattern_after(rtx pattern,rtx_insn * after,bool skip_debug_insns,rtx_insn * (* make_raw)(rtx))4852 emit_pattern_after (rtx pattern, rtx_insn *after, bool skip_debug_insns,
4853 		    rtx_insn *(*make_raw) (rtx))
4854 {
4855   rtx_insn *prev = after;
4856 
4857   if (skip_debug_insns)
4858     while (DEBUG_INSN_P (prev))
4859       prev = PREV_INSN (prev);
4860 
4861   if (INSN_P (prev))
4862     return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4863 				      make_raw);
4864   else
4865     return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4866 }
4867 
4868 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
4869 rtx_insn *
emit_insn_after_setloc(rtx pattern,rtx_insn * after,location_t loc)4870 emit_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4871 {
4872   return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4873 }
4874 
4875 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
4876 rtx_insn *
emit_insn_after(rtx pattern,rtx_insn * after)4877 emit_insn_after (rtx pattern, rtx_insn *after)
4878 {
4879   return emit_pattern_after (pattern, after, true, make_insn_raw);
4880 }
4881 
4882 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
4883 rtx_jump_insn *
emit_jump_insn_after_setloc(rtx pattern,rtx_insn * after,location_t loc)4884 emit_jump_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4885 {
4886   return as_a <rtx_jump_insn *> (
4887 	emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4888 }
4889 
4890 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
4891 rtx_jump_insn *
emit_jump_insn_after(rtx pattern,rtx_insn * after)4892 emit_jump_insn_after (rtx pattern, rtx_insn *after)
4893 {
4894   return as_a <rtx_jump_insn *> (
4895 	emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4896 }
4897 
4898 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
4899 rtx_insn *
emit_call_insn_after_setloc(rtx pattern,rtx_insn * after,location_t loc)4900 emit_call_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4901 {
4902   return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4903 }
4904 
4905 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
4906 rtx_insn *
emit_call_insn_after(rtx pattern,rtx_insn * after)4907 emit_call_insn_after (rtx pattern, rtx_insn *after)
4908 {
4909   return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4910 }
4911 
4912 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
4913 rtx_insn *
emit_debug_insn_after_setloc(rtx pattern,rtx_insn * after,location_t loc)4914 emit_debug_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
4915 {
4916   return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4917 }
4918 
4919 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
4920 rtx_insn *
emit_debug_insn_after(rtx pattern,rtx_insn * after)4921 emit_debug_insn_after (rtx pattern, rtx_insn *after)
4922 {
4923   return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4924 }
4925 
4926 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4927    MAKE_RAW indicates how to turn PATTERN into a real insn.  INSNP
4928    indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4929    CALL_INSN, etc.  */
4930 
4931 static rtx_insn *
emit_pattern_before_setloc(rtx pattern,rtx_insn * before,location_t loc,bool insnp,rtx_insn * (* make_raw)(rtx))4932 emit_pattern_before_setloc (rtx pattern, rtx_insn *before, location_t loc,
4933 			    bool insnp, rtx_insn *(*make_raw) (rtx))
4934 {
4935   rtx_insn *first = PREV_INSN (before);
4936   rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4937 					      insnp ? before : NULL,
4938 					      NULL, make_raw);
4939 
4940   if (pattern == NULL_RTX || !loc)
4941     return last;
4942 
4943   if (!first)
4944     first = get_insns ();
4945   else
4946     first = NEXT_INSN (first);
4947   while (1)
4948     {
4949       if (active_insn_p (first)
4950 	  && !JUMP_TABLE_DATA_P (first) /* FIXME */
4951 	  && !INSN_LOCATION (first))
4952 	INSN_LOCATION (first) = loc;
4953       if (first == last)
4954 	break;
4955       first = NEXT_INSN (first);
4956     }
4957   return last;
4958 }
4959 
4960 /* Insert PATTERN before BEFORE.  MAKE_RAW indicates how to turn PATTERN
4961    into a real insn.  SKIP_DEBUG_INSNS indicates whether to insert
4962    before any DEBUG_INSNs.  INSNP indicates if PATTERN is meant for an
4963    INSN as opposed to a JUMP_INSN, CALL_INSN, etc.  */
4964 
4965 static rtx_insn *
emit_pattern_before(rtx pattern,rtx_insn * before,bool skip_debug_insns,bool insnp,rtx_insn * (* make_raw)(rtx))4966 emit_pattern_before (rtx pattern, rtx_insn *before, bool skip_debug_insns,
4967 		     bool insnp, rtx_insn *(*make_raw) (rtx))
4968 {
4969   rtx_insn *next = before;
4970 
4971   if (skip_debug_insns)
4972     while (DEBUG_INSN_P (next))
4973       next = PREV_INSN (next);
4974 
4975   if (INSN_P (next))
4976     return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4977 				       insnp, make_raw);
4978   else
4979     return emit_pattern_before_noloc (pattern, before,
4980 				      insnp ? before : NULL,
4981                                       NULL, make_raw);
4982 }
4983 
4984 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
4985 rtx_insn *
emit_insn_before_setloc(rtx pattern,rtx_insn * before,location_t loc)4986 emit_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
4987 {
4988   return emit_pattern_before_setloc (pattern, before, loc, true,
4989 				     make_insn_raw);
4990 }
4991 
4992 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE.  */
4993 rtx_insn *
emit_insn_before(rtx pattern,rtx_insn * before)4994 emit_insn_before (rtx pattern, rtx_insn *before)
4995 {
4996   return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4997 }
4998 
4999 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
5000 rtx_jump_insn *
emit_jump_insn_before_setloc(rtx pattern,rtx_insn * before,location_t loc)5001 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5002 {
5003   return as_a <rtx_jump_insn *> (
5004 	emit_pattern_before_setloc (pattern, before, loc, false,
5005 				    make_jump_insn_raw));
5006 }
5007 
5008 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE.  */
5009 rtx_jump_insn *
emit_jump_insn_before(rtx pattern,rtx_insn * before)5010 emit_jump_insn_before (rtx pattern, rtx_insn *before)
5011 {
5012   return as_a <rtx_jump_insn *> (
5013 	emit_pattern_before (pattern, before, true, false,
5014 			     make_jump_insn_raw));
5015 }
5016 
5017 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
5018 rtx_insn *
emit_call_insn_before_setloc(rtx pattern,rtx_insn * before,location_t loc)5019 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5020 {
5021   return emit_pattern_before_setloc (pattern, before, loc, false,
5022 				     make_call_insn_raw);
5023 }
5024 
5025 /* Like emit_call_insn_before_noloc,
5026    but set insn_location according to BEFORE.  */
5027 rtx_insn *
emit_call_insn_before(rtx pattern,rtx_insn * before)5028 emit_call_insn_before (rtx pattern, rtx_insn *before)
5029 {
5030   return emit_pattern_before (pattern, before, true, false,
5031 			      make_call_insn_raw);
5032 }
5033 
5034 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
5035 rtx_insn *
emit_debug_insn_before_setloc(rtx pattern,rtx_insn * before,location_t loc)5036 emit_debug_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
5037 {
5038   return emit_pattern_before_setloc (pattern, before, loc, false,
5039 				     make_debug_insn_raw);
5040 }
5041 
5042 /* Like emit_debug_insn_before_noloc,
5043    but set insn_location according to BEFORE.  */
5044 rtx_insn *
emit_debug_insn_before(rtx pattern,rtx_insn * before)5045 emit_debug_insn_before (rtx pattern, rtx_insn *before)
5046 {
5047   return emit_pattern_before (pattern, before, false, false,
5048 			      make_debug_insn_raw);
5049 }
5050 
5051 /* Take X and emit it at the end of the doubly-linked
5052    INSN list.
5053 
5054    Returns the last insn emitted.  */
5055 
5056 rtx_insn *
emit_insn(rtx x)5057 emit_insn (rtx x)
5058 {
5059   rtx_insn *last = get_last_insn ();
5060   rtx_insn *insn;
5061 
5062   if (x == NULL_RTX)
5063     return last;
5064 
5065   switch (GET_CODE (x))
5066     {
5067     case DEBUG_INSN:
5068     case INSN:
5069     case JUMP_INSN:
5070     case CALL_INSN:
5071     case CODE_LABEL:
5072     case BARRIER:
5073     case NOTE:
5074       insn = as_a <rtx_insn *> (x);
5075       while (insn)
5076 	{
5077 	  rtx_insn *next = NEXT_INSN (insn);
5078 	  add_insn (insn);
5079 	  last = insn;
5080 	  insn = next;
5081 	}
5082       break;
5083 
5084 #ifdef ENABLE_RTL_CHECKING
5085     case JUMP_TABLE_DATA:
5086     case SEQUENCE:
5087       gcc_unreachable ();
5088       break;
5089 #endif
5090 
5091     default:
5092       last = make_insn_raw (x);
5093       add_insn (last);
5094       break;
5095     }
5096 
5097   return last;
5098 }
5099 
5100 /* Make an insn of code DEBUG_INSN with pattern X
5101    and add it to the end of the doubly-linked list.  */
5102 
5103 rtx_insn *
emit_debug_insn(rtx x)5104 emit_debug_insn (rtx x)
5105 {
5106   rtx_insn *last = get_last_insn ();
5107   rtx_insn *insn;
5108 
5109   if (x == NULL_RTX)
5110     return last;
5111 
5112   switch (GET_CODE (x))
5113     {
5114     case DEBUG_INSN:
5115     case INSN:
5116     case JUMP_INSN:
5117     case CALL_INSN:
5118     case CODE_LABEL:
5119     case BARRIER:
5120     case NOTE:
5121       insn = as_a <rtx_insn *> (x);
5122       while (insn)
5123 	{
5124 	  rtx_insn *next = NEXT_INSN (insn);
5125 	  add_insn (insn);
5126 	  last = insn;
5127 	  insn = next;
5128 	}
5129       break;
5130 
5131 #ifdef ENABLE_RTL_CHECKING
5132     case JUMP_TABLE_DATA:
5133     case SEQUENCE:
5134       gcc_unreachable ();
5135       break;
5136 #endif
5137 
5138     default:
5139       last = make_debug_insn_raw (x);
5140       add_insn (last);
5141       break;
5142     }
5143 
5144   return last;
5145 }
5146 
5147 /* Make an insn of code JUMP_INSN with pattern X
5148    and add it to the end of the doubly-linked list.  */
5149 
5150 rtx_insn *
emit_jump_insn(rtx x)5151 emit_jump_insn (rtx x)
5152 {
5153   rtx_insn *last = NULL;
5154   rtx_insn *insn;
5155 
5156   switch (GET_CODE (x))
5157     {
5158     case DEBUG_INSN:
5159     case INSN:
5160     case JUMP_INSN:
5161     case CALL_INSN:
5162     case CODE_LABEL:
5163     case BARRIER:
5164     case NOTE:
5165       insn = as_a <rtx_insn *> (x);
5166       while (insn)
5167 	{
5168 	  rtx_insn *next = NEXT_INSN (insn);
5169 	  add_insn (insn);
5170 	  last = insn;
5171 	  insn = next;
5172 	}
5173       break;
5174 
5175 #ifdef ENABLE_RTL_CHECKING
5176     case JUMP_TABLE_DATA:
5177     case SEQUENCE:
5178       gcc_unreachable ();
5179       break;
5180 #endif
5181 
5182     default:
5183       last = make_jump_insn_raw (x);
5184       add_insn (last);
5185       break;
5186     }
5187 
5188   return last;
5189 }
5190 
5191 /* Make an insn of code CALL_INSN with pattern X
5192    and add it to the end of the doubly-linked list.  */
5193 
5194 rtx_insn *
emit_call_insn(rtx x)5195 emit_call_insn (rtx x)
5196 {
5197   rtx_insn *insn;
5198 
5199   switch (GET_CODE (x))
5200     {
5201     case DEBUG_INSN:
5202     case INSN:
5203     case JUMP_INSN:
5204     case CALL_INSN:
5205     case CODE_LABEL:
5206     case BARRIER:
5207     case NOTE:
5208       insn = emit_insn (x);
5209       break;
5210 
5211 #ifdef ENABLE_RTL_CHECKING
5212     case SEQUENCE:
5213     case JUMP_TABLE_DATA:
5214       gcc_unreachable ();
5215       break;
5216 #endif
5217 
5218     default:
5219       insn = make_call_insn_raw (x);
5220       add_insn (insn);
5221       break;
5222     }
5223 
5224   return insn;
5225 }
5226 
5227 /* Add the label LABEL to the end of the doubly-linked list.  */
5228 
5229 rtx_code_label *
emit_label(rtx uncast_label)5230 emit_label (rtx uncast_label)
5231 {
5232   rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5233 
5234   gcc_checking_assert (INSN_UID (label) == 0);
5235   INSN_UID (label) = cur_insn_uid++;
5236   add_insn (label);
5237   return label;
5238 }
5239 
5240 /* Make an insn of code JUMP_TABLE_DATA
5241    and add it to the end of the doubly-linked list.  */
5242 
5243 rtx_jump_table_data *
emit_jump_table_data(rtx table)5244 emit_jump_table_data (rtx table)
5245 {
5246   rtx_jump_table_data *jump_table_data =
5247     as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5248   INSN_UID (jump_table_data) = cur_insn_uid++;
5249   PATTERN (jump_table_data) = table;
5250   BLOCK_FOR_INSN (jump_table_data) = NULL;
5251   add_insn (jump_table_data);
5252   return jump_table_data;
5253 }
5254 
5255 /* Make an insn of code BARRIER
5256    and add it to the end of the doubly-linked list.  */
5257 
5258 rtx_barrier *
emit_barrier(void)5259 emit_barrier (void)
5260 {
5261   rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5262   INSN_UID (barrier) = cur_insn_uid++;
5263   add_insn (barrier);
5264   return barrier;
5265 }
5266 
5267 /* Emit a copy of note ORIG.  */
5268 
5269 rtx_note *
emit_note_copy(rtx_note * orig)5270 emit_note_copy (rtx_note *orig)
5271 {
5272   enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5273   rtx_note *note = make_note_raw (kind);
5274   NOTE_DATA (note) = NOTE_DATA (orig);
5275   add_insn (note);
5276   return note;
5277 }
5278 
5279 /* Make an insn of code NOTE or type NOTE_NO
5280    and add it to the end of the doubly-linked list.  */
5281 
5282 rtx_note *
emit_note(enum insn_note kind)5283 emit_note (enum insn_note kind)
5284 {
5285   rtx_note *note = make_note_raw (kind);
5286   add_insn (note);
5287   return note;
5288 }
5289 
5290 /* Emit a clobber of lvalue X.  */
5291 
5292 rtx_insn *
emit_clobber(rtx x)5293 emit_clobber (rtx x)
5294 {
5295   /* CONCATs should not appear in the insn stream.  */
5296   if (GET_CODE (x) == CONCAT)
5297     {
5298       emit_clobber (XEXP (x, 0));
5299       return emit_clobber (XEXP (x, 1));
5300     }
5301   return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5302 }
5303 
5304 /* Return a sequence of insns to clobber lvalue X.  */
5305 
5306 rtx_insn *
gen_clobber(rtx x)5307 gen_clobber (rtx x)
5308 {
5309   rtx_insn *seq;
5310 
5311   start_sequence ();
5312   emit_clobber (x);
5313   seq = get_insns ();
5314   end_sequence ();
5315   return seq;
5316 }
5317 
5318 /* Emit a use of rvalue X.  */
5319 
5320 rtx_insn *
emit_use(rtx x)5321 emit_use (rtx x)
5322 {
5323   /* CONCATs should not appear in the insn stream.  */
5324   if (GET_CODE (x) == CONCAT)
5325     {
5326       emit_use (XEXP (x, 0));
5327       return emit_use (XEXP (x, 1));
5328     }
5329   return emit_insn (gen_rtx_USE (VOIDmode, x));
5330 }
5331 
5332 /* Return a sequence of insns to use rvalue X.  */
5333 
5334 rtx_insn *
gen_use(rtx x)5335 gen_use (rtx x)
5336 {
5337   rtx_insn *seq;
5338 
5339   start_sequence ();
5340   emit_use (x);
5341   seq = get_insns ();
5342   end_sequence ();
5343   return seq;
5344 }
5345 
5346 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5347    Return the set in INSN that such notes describe, or NULL if the notes
5348    have no meaning for INSN.  */
5349 
5350 rtx
set_for_reg_notes(rtx insn)5351 set_for_reg_notes (rtx insn)
5352 {
5353   rtx pat, reg;
5354 
5355   if (!INSN_P (insn))
5356     return NULL_RTX;
5357 
5358   pat = PATTERN (insn);
5359   if (GET_CODE (pat) == PARALLEL)
5360     {
5361       /* We do not use single_set because that ignores SETs of unused
5362 	 registers.  REG_EQUAL and REG_EQUIV notes really do require the
5363 	 PARALLEL to have a single SET.  */
5364       if (multiple_sets (insn))
5365 	return NULL_RTX;
5366       pat = XVECEXP (pat, 0, 0);
5367     }
5368 
5369   if (GET_CODE (pat) != SET)
5370     return NULL_RTX;
5371 
5372   reg = SET_DEST (pat);
5373 
5374   /* Notes apply to the contents of a STRICT_LOW_PART.  */
5375   if (GET_CODE (reg) == STRICT_LOW_PART
5376       || GET_CODE (reg) == ZERO_EXTRACT)
5377     reg = XEXP (reg, 0);
5378 
5379   /* Check that we have a register.  */
5380   if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5381     return NULL_RTX;
5382 
5383   return pat;
5384 }
5385 
5386 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5387    note of this type already exists, remove it first.  */
5388 
5389 rtx
set_unique_reg_note(rtx insn,enum reg_note kind,rtx datum)5390 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5391 {
5392   rtx note = find_reg_note (insn, kind, NULL_RTX);
5393 
5394   switch (kind)
5395     {
5396     case REG_EQUAL:
5397     case REG_EQUIV:
5398       /* We need to support the REG_EQUAL on USE trick of find_reloads.  */
5399       if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
5400 	return NULL_RTX;
5401 
5402       /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5403 	 It serves no useful purpose and breaks eliminate_regs.  */
5404       if (GET_CODE (datum) == ASM_OPERANDS)
5405 	return NULL_RTX;
5406 
5407       /* Notes with side effects are dangerous.  Even if the side-effect
5408 	 initially mirrors one in PATTERN (INSN), later optimizations
5409 	 might alter the way that the final register value is calculated
5410 	 and so move or alter the side-effect in some way.  The note would
5411 	 then no longer be a valid substitution for SET_SRC.  */
5412       if (side_effects_p (datum))
5413 	return NULL_RTX;
5414       break;
5415 
5416     default:
5417       break;
5418     }
5419 
5420   if (note)
5421     XEXP (note, 0) = datum;
5422   else
5423     {
5424       add_reg_note (insn, kind, datum);
5425       note = REG_NOTES (insn);
5426     }
5427 
5428   switch (kind)
5429     {
5430     case REG_EQUAL:
5431     case REG_EQUIV:
5432       df_notes_rescan (as_a <rtx_insn *> (insn));
5433       break;
5434     default:
5435       break;
5436     }
5437 
5438   return note;
5439 }
5440 
5441 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST.  */
5442 rtx
set_dst_reg_note(rtx insn,enum reg_note kind,rtx datum,rtx dst)5443 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5444 {
5445   rtx set = set_for_reg_notes (insn);
5446 
5447   if (set && SET_DEST (set) == dst)
5448     return set_unique_reg_note (insn, kind, datum);
5449   return NULL_RTX;
5450 }
5451 
5452 /* Emit the rtl pattern X as an appropriate kind of insn.  Also emit a
5453    following barrier if the instruction needs one and if ALLOW_BARRIER_P
5454    is true.
5455 
5456    If X is a label, it is simply added into the insn chain.  */
5457 
5458 rtx_insn *
emit(rtx x,bool allow_barrier_p)5459 emit (rtx x, bool allow_barrier_p)
5460 {
5461   enum rtx_code code = classify_insn (x);
5462 
5463   switch (code)
5464     {
5465     case CODE_LABEL:
5466       return emit_label (x);
5467     case INSN:
5468       return emit_insn (x);
5469     case  JUMP_INSN:
5470       {
5471 	rtx_insn *insn = emit_jump_insn (x);
5472 	if (allow_barrier_p
5473 	    && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5474 	  return emit_barrier ();
5475 	return insn;
5476       }
5477     case CALL_INSN:
5478       return emit_call_insn (x);
5479     case DEBUG_INSN:
5480       return emit_debug_insn (x);
5481     default:
5482       gcc_unreachable ();
5483     }
5484 }
5485 
5486 /* Space for free sequence stack entries.  */
5487 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5488 
5489 /* Begin emitting insns to a sequence.  If this sequence will contain
5490    something that might cause the compiler to pop arguments to function
5491    calls (because those pops have previously been deferred; see
5492    INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5493    before calling this function.  That will ensure that the deferred
5494    pops are not accidentally emitted in the middle of this sequence.  */
5495 
5496 void
start_sequence(void)5497 start_sequence (void)
5498 {
5499   struct sequence_stack *tem;
5500 
5501   if (free_sequence_stack != NULL)
5502     {
5503       tem = free_sequence_stack;
5504       free_sequence_stack = tem->next;
5505     }
5506   else
5507     tem = ggc_alloc<sequence_stack> ();
5508 
5509   tem->next = get_current_sequence ()->next;
5510   tem->first = get_insns ();
5511   tem->last = get_last_insn ();
5512   get_current_sequence ()->next = tem;
5513 
5514   set_first_insn (0);
5515   set_last_insn (0);
5516 }
5517 
5518 /* Set up the insn chain starting with FIRST as the current sequence,
5519    saving the previously current one.  See the documentation for
5520    start_sequence for more information about how to use this function.  */
5521 
5522 void
push_to_sequence(rtx_insn * first)5523 push_to_sequence (rtx_insn *first)
5524 {
5525   rtx_insn *last;
5526 
5527   start_sequence ();
5528 
5529   for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5530     ;
5531 
5532   set_first_insn (first);
5533   set_last_insn (last);
5534 }
5535 
5536 /* Like push_to_sequence, but take the last insn as an argument to avoid
5537    looping through the list.  */
5538 
5539 void
push_to_sequence2(rtx_insn * first,rtx_insn * last)5540 push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5541 {
5542   start_sequence ();
5543 
5544   set_first_insn (first);
5545   set_last_insn (last);
5546 }
5547 
5548 /* Set up the outer-level insn chain
5549    as the current sequence, saving the previously current one.  */
5550 
5551 void
push_topmost_sequence(void)5552 push_topmost_sequence (void)
5553 {
5554   struct sequence_stack *top;
5555 
5556   start_sequence ();
5557 
5558   top = get_topmost_sequence ();
5559   set_first_insn (top->first);
5560   set_last_insn (top->last);
5561 }
5562 
5563 /* After emitting to the outer-level insn chain, update the outer-level
5564    insn chain, and restore the previous saved state.  */
5565 
5566 void
pop_topmost_sequence(void)5567 pop_topmost_sequence (void)
5568 {
5569   struct sequence_stack *top;
5570 
5571   top = get_topmost_sequence ();
5572   top->first = get_insns ();
5573   top->last = get_last_insn ();
5574 
5575   end_sequence ();
5576 }
5577 
5578 /* After emitting to a sequence, restore previous saved state.
5579 
5580    To get the contents of the sequence just made, you must call
5581    `get_insns' *before* calling here.
5582 
5583    If the compiler might have deferred popping arguments while
5584    generating this sequence, and this sequence will not be immediately
5585    inserted into the instruction stream, use do_pending_stack_adjust
5586    before calling get_insns.  That will ensure that the deferred
5587    pops are inserted into this sequence, and not into some random
5588    location in the instruction stream.  See INHIBIT_DEFER_POP for more
5589    information about deferred popping of arguments.  */
5590 
5591 void
end_sequence(void)5592 end_sequence (void)
5593 {
5594   struct sequence_stack *tem = get_current_sequence ()->next;
5595 
5596   set_first_insn (tem->first);
5597   set_last_insn (tem->last);
5598   get_current_sequence ()->next = tem->next;
5599 
5600   memset (tem, 0, sizeof (*tem));
5601   tem->next = free_sequence_stack;
5602   free_sequence_stack = tem;
5603 }
5604 
5605 /* Return 1 if currently emitting into a sequence.  */
5606 
5607 int
in_sequence_p(void)5608 in_sequence_p (void)
5609 {
5610   return get_current_sequence ()->next != 0;
5611 }
5612 
5613 /* Put the various virtual registers into REGNO_REG_RTX.  */
5614 
5615 static void
init_virtual_regs(void)5616 init_virtual_regs (void)
5617 {
5618   regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5619   regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5620   regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5621   regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5622   regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5623   regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5624     = virtual_preferred_stack_boundary_rtx;
5625 }
5626 
5627 
5628 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once.  */
5629 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5630 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5631 static int copy_insn_n_scratches;
5632 
5633 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5634    copied an ASM_OPERANDS.
5635    In that case, it is the original input-operand vector.  */
5636 static rtvec orig_asm_operands_vector;
5637 
5638 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5639    copied an ASM_OPERANDS.
5640    In that case, it is the copied input-operand vector.  */
5641 static rtvec copy_asm_operands_vector;
5642 
5643 /* Likewise for the constraints vector.  */
5644 static rtvec orig_asm_constraints_vector;
5645 static rtvec copy_asm_constraints_vector;
5646 
5647 /* Recursively create a new copy of an rtx for copy_insn.
5648    This function differs from copy_rtx in that it handles SCRATCHes and
5649    ASM_OPERANDs properly.
5650    Normally, this function is not used directly; use copy_insn as front end.
5651    However, you could first copy an insn pattern with copy_insn and then use
5652    this function afterwards to properly copy any REG_NOTEs containing
5653    SCRATCHes.  */
5654 
5655 rtx
copy_insn_1(rtx orig)5656 copy_insn_1 (rtx orig)
5657 {
5658   rtx copy;
5659   int i, j;
5660   RTX_CODE code;
5661   const char *format_ptr;
5662 
5663   if (orig == NULL)
5664     return NULL;
5665 
5666   code = GET_CODE (orig);
5667 
5668   switch (code)
5669     {
5670     case REG:
5671     case DEBUG_EXPR:
5672     CASE_CONST_ANY:
5673     case SYMBOL_REF:
5674     case CODE_LABEL:
5675     case PC:
5676     case CC0:
5677     case RETURN:
5678     case SIMPLE_RETURN:
5679       return orig;
5680     case CLOBBER:
5681       /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5682          clobbers or clobbers of hard registers that originated as pseudos.
5683          This is needed to allow safe register renaming.  */
5684       if (REG_P (XEXP (orig, 0))
5685 	  && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
5686 	  && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
5687 	return orig;
5688       break;
5689 
5690     case SCRATCH:
5691       for (i = 0; i < copy_insn_n_scratches; i++)
5692 	if (copy_insn_scratch_in[i] == orig)
5693 	  return copy_insn_scratch_out[i];
5694       break;
5695 
5696     case CONST:
5697       if (shared_const_p (orig))
5698 	return orig;
5699       break;
5700 
5701       /* A MEM with a constant address is not sharable.  The problem is that
5702 	 the constant address may need to be reloaded.  If the mem is shared,
5703 	 then reloading one copy of this mem will cause all copies to appear
5704 	 to have been reloaded.  */
5705 
5706     default:
5707       break;
5708     }
5709 
5710   /* Copy the various flags, fields, and other information.  We assume
5711      that all fields need copying, and then clear the fields that should
5712      not be copied.  That is the sensible default behavior, and forces
5713      us to explicitly document why we are *not* copying a flag.  */
5714   copy = shallow_copy_rtx (orig);
5715 
5716   /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs.  */
5717   if (INSN_P (orig))
5718     {
5719       RTX_FLAG (copy, jump) = 0;
5720       RTX_FLAG (copy, call) = 0;
5721       RTX_FLAG (copy, frame_related) = 0;
5722     }
5723 
5724   format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5725 
5726   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5727     switch (*format_ptr++)
5728       {
5729       case 'e':
5730 	if (XEXP (orig, i) != NULL)
5731 	  XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5732 	break;
5733 
5734       case 'E':
5735       case 'V':
5736 	if (XVEC (orig, i) == orig_asm_constraints_vector)
5737 	  XVEC (copy, i) = copy_asm_constraints_vector;
5738 	else if (XVEC (orig, i) == orig_asm_operands_vector)
5739 	  XVEC (copy, i) = copy_asm_operands_vector;
5740 	else if (XVEC (orig, i) != NULL)
5741 	  {
5742 	    XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5743 	    for (j = 0; j < XVECLEN (copy, i); j++)
5744 	      XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5745 	  }
5746 	break;
5747 
5748       case 't':
5749       case 'w':
5750       case 'i':
5751       case 'p':
5752       case 's':
5753       case 'S':
5754       case 'u':
5755       case '0':
5756 	/* These are left unchanged.  */
5757 	break;
5758 
5759       default:
5760 	gcc_unreachable ();
5761       }
5762 
5763   if (code == SCRATCH)
5764     {
5765       i = copy_insn_n_scratches++;
5766       gcc_assert (i < MAX_RECOG_OPERANDS);
5767       copy_insn_scratch_in[i] = orig;
5768       copy_insn_scratch_out[i] = copy;
5769     }
5770   else if (code == ASM_OPERANDS)
5771     {
5772       orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5773       copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5774       orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5775       copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5776     }
5777 
5778   return copy;
5779 }
5780 
5781 /* Create a new copy of an rtx.
5782    This function differs from copy_rtx in that it handles SCRATCHes and
5783    ASM_OPERANDs properly.
5784    INSN doesn't really have to be a full INSN; it could be just the
5785    pattern.  */
5786 rtx
copy_insn(rtx insn)5787 copy_insn (rtx insn)
5788 {
5789   copy_insn_n_scratches = 0;
5790   orig_asm_operands_vector = 0;
5791   orig_asm_constraints_vector = 0;
5792   copy_asm_operands_vector = 0;
5793   copy_asm_constraints_vector = 0;
5794   return copy_insn_1 (insn);
5795 }
5796 
5797 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5798    on that assumption that INSN itself remains in its original place.  */
5799 
5800 rtx_insn *
copy_delay_slot_insn(rtx_insn * insn)5801 copy_delay_slot_insn (rtx_insn *insn)
5802 {
5803   /* Copy INSN with its rtx_code, all its notes, location etc.  */
5804   insn = as_a <rtx_insn *> (copy_rtx (insn));
5805   INSN_UID (insn) = cur_insn_uid++;
5806   return insn;
5807 }
5808 
5809 /* Initialize data structures and variables in this file
5810    before generating rtl for each function.  */
5811 
5812 void
init_emit(void)5813 init_emit (void)
5814 {
5815   set_first_insn (NULL);
5816   set_last_insn (NULL);
5817   if (param_min_nondebug_insn_uid)
5818     cur_insn_uid = param_min_nondebug_insn_uid;
5819   else
5820     cur_insn_uid = 1;
5821   cur_debug_insn_uid = 1;
5822   reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5823   first_label_num = label_num;
5824   get_current_sequence ()->next = NULL;
5825 
5826   /* Init the tables that describe all the pseudo regs.  */
5827 
5828   crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5829 
5830   crtl->emit.regno_pointer_align
5831     = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5832 
5833   regno_reg_rtx
5834     = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5835 
5836   /* Put copies of all the hard registers into regno_reg_rtx.  */
5837   memcpy (regno_reg_rtx,
5838 	  initial_regno_reg_rtx,
5839 	  FIRST_PSEUDO_REGISTER * sizeof (rtx));
5840 
5841   /* Put copies of all the virtual register rtx into regno_reg_rtx.  */
5842   init_virtual_regs ();
5843 
5844   /* Indicate that the virtual registers and stack locations are
5845      all pointers.  */
5846   REG_POINTER (stack_pointer_rtx) = 1;
5847   REG_POINTER (frame_pointer_rtx) = 1;
5848   REG_POINTER (hard_frame_pointer_rtx) = 1;
5849   REG_POINTER (arg_pointer_rtx) = 1;
5850 
5851   REG_POINTER (virtual_incoming_args_rtx) = 1;
5852   REG_POINTER (virtual_stack_vars_rtx) = 1;
5853   REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5854   REG_POINTER (virtual_outgoing_args_rtx) = 1;
5855   REG_POINTER (virtual_cfa_rtx) = 1;
5856 
5857 #ifdef STACK_BOUNDARY
5858   REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5859   REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5860   REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5861   REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5862 
5863   REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5864   REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5865   REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5866   REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5867 
5868   REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5869 #endif
5870 
5871 #ifdef INIT_EXPANDERS
5872   INIT_EXPANDERS;
5873 #endif
5874 }
5875 
5876 /* Return the value of element I of CONST_VECTOR X as a wide_int.  */
5877 
5878 wide_int
const_vector_int_elt(const_rtx x,unsigned int i)5879 const_vector_int_elt (const_rtx x, unsigned int i)
5880 {
5881   /* First handle elements that are directly encoded.  */
5882   machine_mode elt_mode = GET_MODE_INNER (GET_MODE (x));
5883   if (i < (unsigned int) XVECLEN (x, 0))
5884     return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, i), elt_mode);
5885 
5886   /* Identify the pattern that contains element I and work out the index of
5887      the last encoded element for that pattern.  */
5888   unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5889   unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5890   unsigned int count = i / npatterns;
5891   unsigned int pattern = i % npatterns;
5892   unsigned int final_i = encoded_nelts - npatterns + pattern;
5893 
5894   /* If there are no steps, the final encoded value is the right one.  */
5895   if (!CONST_VECTOR_STEPPED_P (x))
5896     return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, final_i), elt_mode);
5897 
5898   /* Otherwise work out the value from the last two encoded elements.  */
5899   rtx v1 = CONST_VECTOR_ENCODED_ELT (x, final_i - npatterns);
5900   rtx v2 = CONST_VECTOR_ENCODED_ELT (x, final_i);
5901   wide_int diff = wi::sub (rtx_mode_t (v2, elt_mode),
5902 			   rtx_mode_t (v1, elt_mode));
5903   return wi::add (rtx_mode_t (v2, elt_mode), (count - 2) * diff);
5904 }
5905 
5906 /* Return the value of element I of CONST_VECTOR X.  */
5907 
5908 rtx
const_vector_elt(const_rtx x,unsigned int i)5909 const_vector_elt (const_rtx x, unsigned int i)
5910 {
5911   /* First handle elements that are directly encoded.  */
5912   if (i < (unsigned int) XVECLEN (x, 0))
5913     return CONST_VECTOR_ENCODED_ELT (x, i);
5914 
5915   /* If there are no steps, the final encoded value is the right one.  */
5916   if (!CONST_VECTOR_STEPPED_P (x))
5917     {
5918       /* Identify the pattern that contains element I and work out the index of
5919 	 the last encoded element for that pattern.  */
5920       unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5921       unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5922       unsigned int pattern = i % npatterns;
5923       unsigned int final_i = encoded_nelts - npatterns + pattern;
5924       return CONST_VECTOR_ENCODED_ELT (x, final_i);
5925     }
5926 
5927   /* Otherwise work out the value from the last two encoded elements.  */
5928   return immed_wide_int_const (const_vector_int_elt (x, i),
5929 			       GET_MODE_INNER (GET_MODE (x)));
5930 }
5931 
5932 /* Return true if X is a valid element for a CONST_VECTOR of the given
5933   mode.  */
5934 
5935 bool
valid_for_const_vector_p(machine_mode,rtx x)5936 valid_for_const_vector_p (machine_mode, rtx x)
5937 {
5938   return (CONST_SCALAR_INT_P (x)
5939 	  || CONST_POLY_INT_P (x)
5940 	  || CONST_DOUBLE_AS_FLOAT_P (x)
5941 	  || CONST_FIXED_P (x));
5942 }
5943 
5944 /* Generate a vector constant of mode MODE in which every element has
5945    value ELT.  */
5946 
5947 rtx
gen_const_vec_duplicate(machine_mode mode,rtx elt)5948 gen_const_vec_duplicate (machine_mode mode, rtx elt)
5949 {
5950   rtx_vector_builder builder (mode, 1, 1);
5951   builder.quick_push (elt);
5952   return builder.build ();
5953 }
5954 
5955 /* Return a vector rtx of mode MODE in which every element has value X.
5956    The result will be a constant if X is constant.  */
5957 
5958 rtx
gen_vec_duplicate(machine_mode mode,rtx x)5959 gen_vec_duplicate (machine_mode mode, rtx x)
5960 {
5961   if (valid_for_const_vector_p (mode, x))
5962     return gen_const_vec_duplicate (mode, x);
5963   return gen_rtx_VEC_DUPLICATE (mode, x);
5964 }
5965 
5966 /* A subroutine of const_vec_series_p that handles the case in which:
5967 
5968      (GET_CODE (X) == CONST_VECTOR
5969       && CONST_VECTOR_NPATTERNS (X) == 1
5970       && !CONST_VECTOR_DUPLICATE_P (X))
5971 
5972    is known to hold.  */
5973 
5974 bool
const_vec_series_p_1(const_rtx x,rtx * base_out,rtx * step_out)5975 const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out)
5976 {
5977   /* Stepped sequences are only defined for integers, to avoid specifying
5978      rounding behavior.  */
5979   if (GET_MODE_CLASS (GET_MODE (x)) != MODE_VECTOR_INT)
5980     return false;
5981 
5982   /* A non-duplicated vector with two elements can always be seen as a
5983      series with a nonzero step.  Longer vectors must have a stepped
5984      encoding.  */
5985   if (maybe_ne (CONST_VECTOR_NUNITS (x), 2)
5986       && !CONST_VECTOR_STEPPED_P (x))
5987     return false;
5988 
5989   /* Calculate the step between the first and second elements.  */
5990   scalar_mode inner = GET_MODE_INNER (GET_MODE (x));
5991   rtx base = CONST_VECTOR_ELT (x, 0);
5992   rtx step = simplify_binary_operation (MINUS, inner,
5993 					CONST_VECTOR_ENCODED_ELT (x, 1), base);
5994   if (rtx_equal_p (step, CONST0_RTX (inner)))
5995     return false;
5996 
5997   /* If we have a stepped encoding, check that the step between the
5998      second and third elements is the same as STEP.  */
5999   if (CONST_VECTOR_STEPPED_P (x))
6000     {
6001       rtx diff = simplify_binary_operation (MINUS, inner,
6002 					    CONST_VECTOR_ENCODED_ELT (x, 2),
6003 					    CONST_VECTOR_ENCODED_ELT (x, 1));
6004       if (!rtx_equal_p (step, diff))
6005 	return false;
6006     }
6007 
6008   *base_out = base;
6009   *step_out = step;
6010   return true;
6011 }
6012 
6013 /* Generate a vector constant of mode MODE in which element I has
6014    the value BASE + I * STEP.  */
6015 
6016 rtx
gen_const_vec_series(machine_mode mode,rtx base,rtx step)6017 gen_const_vec_series (machine_mode mode, rtx base, rtx step)
6018 {
6019   gcc_assert (valid_for_const_vector_p (mode, base)
6020 	      && valid_for_const_vector_p (mode, step));
6021 
6022   rtx_vector_builder builder (mode, 1, 3);
6023   builder.quick_push (base);
6024   for (int i = 1; i < 3; ++i)
6025     builder.quick_push (simplify_gen_binary (PLUS, GET_MODE_INNER (mode),
6026 					     builder[i - 1], step));
6027   return builder.build ();
6028 }
6029 
6030 /* Generate a vector of mode MODE in which element I has the value
6031    BASE + I * STEP.  The result will be a constant if BASE and STEP
6032    are both constants.  */
6033 
6034 rtx
gen_vec_series(machine_mode mode,rtx base,rtx step)6035 gen_vec_series (machine_mode mode, rtx base, rtx step)
6036 {
6037   if (step == const0_rtx)
6038     return gen_vec_duplicate (mode, base);
6039   if (valid_for_const_vector_p (mode, base)
6040       && valid_for_const_vector_p (mode, step))
6041     return gen_const_vec_series (mode, base, step);
6042   return gen_rtx_VEC_SERIES (mode, base, step);
6043 }
6044 
6045 /* Generate a new vector constant for mode MODE and constant value
6046    CONSTANT.  */
6047 
6048 static rtx
gen_const_vector(machine_mode mode,int constant)6049 gen_const_vector (machine_mode mode, int constant)
6050 {
6051   machine_mode inner = GET_MODE_INNER (mode);
6052 
6053   gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
6054 
6055   rtx el = const_tiny_rtx[constant][(int) inner];
6056   gcc_assert (el);
6057 
6058   return gen_const_vec_duplicate (mode, el);
6059 }
6060 
6061 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6062    all elements are zero, and the one vector when all elements are one.  */
6063 rtx
gen_rtx_CONST_VECTOR(machine_mode mode,rtvec v)6064 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
6065 {
6066   gcc_assert (known_eq (GET_MODE_NUNITS (mode), GET_NUM_ELEM (v)));
6067 
6068   /* If the values are all the same, check to see if we can use one of the
6069      standard constant vectors.  */
6070   if (rtvec_all_equal_p (v))
6071     return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0));
6072 
6073   unsigned int nunits = GET_NUM_ELEM (v);
6074   rtx_vector_builder builder (mode, nunits, 1);
6075   for (unsigned int i = 0; i < nunits; ++i)
6076     builder.quick_push (RTVEC_ELT (v, i));
6077   return builder.build (v);
6078 }
6079 
6080 /* Initialise global register information required by all functions.  */
6081 
6082 void
init_emit_regs(void)6083 init_emit_regs (void)
6084 {
6085   int i;
6086   machine_mode mode;
6087   mem_attrs *attrs;
6088 
6089   /* Reset register attributes */
6090   reg_attrs_htab->empty ();
6091 
6092   /* We need reg_raw_mode, so initialize the modes now.  */
6093   init_reg_modes_target ();
6094 
6095   /* Assign register numbers to the globally defined register rtx.  */
6096   stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
6097   frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
6098   hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
6099   arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
6100   virtual_incoming_args_rtx =
6101     gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
6102   virtual_stack_vars_rtx =
6103     gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
6104   virtual_stack_dynamic_rtx =
6105     gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
6106   virtual_outgoing_args_rtx =
6107     gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
6108   virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
6109   virtual_preferred_stack_boundary_rtx =
6110     gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
6111 
6112   /* Initialize RTL for commonly used hard registers.  These are
6113      copied into regno_reg_rtx as we begin to compile each function.  */
6114   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6115     initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
6116 
6117 #ifdef RETURN_ADDRESS_POINTER_REGNUM
6118   return_address_pointer_rtx
6119     = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
6120 #endif
6121 
6122   pic_offset_table_rtx = NULL_RTX;
6123   if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6124     pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
6125 
6126   for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
6127     {
6128       mode = (machine_mode) i;
6129       attrs = ggc_cleared_alloc<mem_attrs> ();
6130       attrs->align = BITS_PER_UNIT;
6131       attrs->addrspace = ADDR_SPACE_GENERIC;
6132       if (mode != BLKmode && mode != VOIDmode)
6133 	{
6134 	  attrs->size_known_p = true;
6135 	  attrs->size = GET_MODE_SIZE (mode);
6136 	  if (STRICT_ALIGNMENT)
6137 	    attrs->align = GET_MODE_ALIGNMENT (mode);
6138 	}
6139       mode_mem_attrs[i] = attrs;
6140     }
6141 
6142   split_branch_probability = profile_probability::uninitialized ();
6143 }
6144 
6145 /* Initialize global machine_mode variables.  */
6146 
6147 void
init_derived_machine_modes(void)6148 init_derived_machine_modes (void)
6149 {
6150   opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
6151   FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
6152     {
6153       scalar_int_mode mode = mode_iter.require ();
6154 
6155       if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
6156 	  && !opt_byte_mode.exists ())
6157 	opt_byte_mode = mode;
6158 
6159       if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
6160 	  && !opt_word_mode.exists ())
6161 	opt_word_mode = mode;
6162     }
6163 
6164   byte_mode = opt_byte_mode.require ();
6165   word_mode = opt_word_mode.require ();
6166   ptr_mode = as_a <scalar_int_mode>
6167     (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ());
6168 }
6169 
6170 /* Create some permanent unique rtl objects shared between all functions.  */
6171 
6172 void
init_emit_once(void)6173 init_emit_once (void)
6174 {
6175   int i;
6176   machine_mode mode;
6177   scalar_float_mode double_mode;
6178   opt_scalar_mode smode_iter;
6179 
6180   /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
6181      CONST_FIXED, and memory attribute hash tables.  */
6182   const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
6183 
6184 #if TARGET_SUPPORTS_WIDE_INT
6185   const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
6186 #endif
6187   const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
6188 
6189   if (NUM_POLY_INT_COEFFS > 1)
6190     const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37);
6191 
6192   const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
6193 
6194   reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
6195 
6196 #ifdef INIT_EXPANDERS
6197   /* This is to initialize {init|mark|free}_machine_status before the first
6198      call to push_function_context_to.  This is needed by the Chill front
6199      end which calls push_function_context_to before the first call to
6200      init_function_start.  */
6201   INIT_EXPANDERS;
6202 #endif
6203 
6204   /* Create the unique rtx's for certain rtx codes and operand values.  */
6205 
6206   /* Process stack-limiting command-line options.  */
6207   if (opt_fstack_limit_symbol_arg != NULL)
6208     stack_limit_rtx
6209       = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
6210   if (opt_fstack_limit_register_no >= 0)
6211     stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
6212 
6213   /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
6214      tries to use these variables.  */
6215   for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
6216     const_int_rtx[i + MAX_SAVED_CONST_INT] =
6217       gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
6218 
6219   if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
6220       && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
6221     const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
6222   else
6223     const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
6224 
6225   double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require ();
6226 
6227   real_from_integer (&dconst0, double_mode, 0, SIGNED);
6228   real_from_integer (&dconst1, double_mode, 1, SIGNED);
6229   real_from_integer (&dconst2, double_mode, 2, SIGNED);
6230 
6231   dconstm1 = dconst1;
6232   dconstm1.sign = 1;
6233 
6234   dconsthalf = dconst1;
6235   SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
6236 
6237   for (i = 0; i < 3; i++)
6238     {
6239       const REAL_VALUE_TYPE *const r =
6240 	(i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
6241 
6242       FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
6243 	const_tiny_rtx[i][(int) mode] =
6244 	  const_double_from_real_value (*r, mode);
6245 
6246       FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
6247 	const_tiny_rtx[i][(int) mode] =
6248 	  const_double_from_real_value (*r, mode);
6249 
6250       const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
6251 
6252       FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6253 	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6254 
6255       for (mode = MIN_MODE_PARTIAL_INT;
6256 	   mode <= MAX_MODE_PARTIAL_INT;
6257 	   mode = (machine_mode)((int)(mode) + 1))
6258 	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6259     }
6260 
6261   const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6262 
6263   FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6264     const_tiny_rtx[3][(int) mode] = constm1_rtx;
6265 
6266   /* For BImode, 1 and -1 are unsigned and signed interpretations
6267      of the same value.  */
6268   const_tiny_rtx[0][(int) BImode] = const0_rtx;
6269   const_tiny_rtx[1][(int) BImode] = const_true_rtx;
6270   const_tiny_rtx[3][(int) BImode] = const_true_rtx;
6271 
6272   for (mode = MIN_MODE_PARTIAL_INT;
6273        mode <= MAX_MODE_PARTIAL_INT;
6274        mode = (machine_mode)((int)(mode) + 1))
6275     const_tiny_rtx[3][(int) mode] = constm1_rtx;
6276 
6277   FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
6278     {
6279       rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6280       const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6281     }
6282 
6283   FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
6284     {
6285       rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6286       const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6287     }
6288 
6289   /* As for BImode, "all 1" and "all -1" are unsigned and signed
6290      interpretations of the same value.  */
6291   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_BOOL)
6292     {
6293       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6294       const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6295       const_tiny_rtx[1][(int) mode] = const_tiny_rtx[3][(int) mode];
6296     }
6297 
6298   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
6299     {
6300       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6301       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6302       const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6303     }
6304 
6305   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
6306     {
6307       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6308       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6309     }
6310 
6311   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT)
6312     {
6313       scalar_mode smode = smode_iter.require ();
6314       FCONST0 (smode).data.high = 0;
6315       FCONST0 (smode).data.low = 0;
6316       FCONST0 (smode).mode = smode;
6317       const_tiny_rtx[0][(int) smode]
6318 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6319     }
6320 
6321   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT)
6322     {
6323       scalar_mode smode = smode_iter.require ();
6324       FCONST0 (smode).data.high = 0;
6325       FCONST0 (smode).data.low = 0;
6326       FCONST0 (smode).mode = smode;
6327       const_tiny_rtx[0][(int) smode]
6328 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6329     }
6330 
6331   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM)
6332     {
6333       scalar_mode smode = smode_iter.require ();
6334       FCONST0 (smode).data.high = 0;
6335       FCONST0 (smode).data.low = 0;
6336       FCONST0 (smode).mode = smode;
6337       const_tiny_rtx[0][(int) smode]
6338 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6339 
6340       /* We store the value 1.  */
6341       FCONST1 (smode).data.high = 0;
6342       FCONST1 (smode).data.low = 0;
6343       FCONST1 (smode).mode = smode;
6344       FCONST1 (smode).data
6345 	= double_int_one.lshift (GET_MODE_FBIT (smode),
6346 				 HOST_BITS_PER_DOUBLE_INT,
6347 				 SIGNED_FIXED_POINT_MODE_P (smode));
6348       const_tiny_rtx[1][(int) smode]
6349 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6350     }
6351 
6352   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM)
6353     {
6354       scalar_mode smode = smode_iter.require ();
6355       FCONST0 (smode).data.high = 0;
6356       FCONST0 (smode).data.low = 0;
6357       FCONST0 (smode).mode = smode;
6358       const_tiny_rtx[0][(int) smode]
6359 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6360 
6361       /* We store the value 1.  */
6362       FCONST1 (smode).data.high = 0;
6363       FCONST1 (smode).data.low = 0;
6364       FCONST1 (smode).mode = smode;
6365       FCONST1 (smode).data
6366 	= double_int_one.lshift (GET_MODE_FBIT (smode),
6367 				 HOST_BITS_PER_DOUBLE_INT,
6368 				 SIGNED_FIXED_POINT_MODE_P (smode));
6369       const_tiny_rtx[1][(int) smode]
6370 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6371     }
6372 
6373   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
6374     {
6375       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6376     }
6377 
6378   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
6379     {
6380       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6381     }
6382 
6383   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
6384     {
6385       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6386       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6387     }
6388 
6389   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
6390     {
6391       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6392       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6393     }
6394 
6395   for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6396     if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6397       const_tiny_rtx[0][i] = const0_rtx;
6398 
6399   pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6400   ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6401   simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6402   cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6403   invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6404 				   /*prev_insn=*/NULL,
6405 				   /*next_insn=*/NULL,
6406 				   /*bb=*/NULL,
6407 				   /*pattern=*/NULL_RTX,
6408 				   /*location=*/-1,
6409 				   CODE_FOR_nothing,
6410 				   /*reg_notes=*/NULL_RTX);
6411 }
6412 
6413 /* Produce exact duplicate of insn INSN after AFTER.
6414    Care updating of libcall regions if present.  */
6415 
6416 rtx_insn *
emit_copy_of_insn_after(rtx_insn * insn,rtx_insn * after)6417 emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6418 {
6419   rtx_insn *new_rtx;
6420   rtx link;
6421 
6422   switch (GET_CODE (insn))
6423     {
6424     case INSN:
6425       new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6426       break;
6427 
6428     case JUMP_INSN:
6429       new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6430       CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6431       break;
6432 
6433     case DEBUG_INSN:
6434       new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6435       break;
6436 
6437     case CALL_INSN:
6438       new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6439       if (CALL_INSN_FUNCTION_USAGE (insn))
6440 	CALL_INSN_FUNCTION_USAGE (new_rtx)
6441 	  = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6442       SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6443       RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6444       RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6445       RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6446 	= RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6447       break;
6448 
6449     default:
6450       gcc_unreachable ();
6451     }
6452 
6453   /* Update LABEL_NUSES.  */
6454   mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6455 
6456   INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6457 
6458   /* If the old insn is frame related, then so is the new one.  This is
6459      primarily needed for IA-64 unwind info which marks epilogue insns,
6460      which may be duplicated by the basic block reordering code.  */
6461   RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6462 
6463   /* Locate the end of existing REG_NOTES in NEW_RTX.  */
6464   rtx *ptail = &REG_NOTES (new_rtx);
6465   while (*ptail != NULL_RTX)
6466     ptail = &XEXP (*ptail, 1);
6467 
6468   /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6469      will make them.  REG_LABEL_TARGETs are created there too, but are
6470      supposed to be sticky, so we copy them.  */
6471   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6472     if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6473       {
6474 	*ptail = duplicate_reg_note (link);
6475 	ptail = &XEXP (*ptail, 1);
6476       }
6477 
6478   INSN_CODE (new_rtx) = INSN_CODE (insn);
6479   return new_rtx;
6480 }
6481 
6482 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6483 rtx
gen_hard_reg_clobber(machine_mode mode,unsigned int regno)6484 gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6485 {
6486   if (hard_reg_clobbers[mode][regno])
6487     return hard_reg_clobbers[mode][regno];
6488   else
6489     return (hard_reg_clobbers[mode][regno] =
6490 	    gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6491 }
6492 
6493 location_t prologue_location;
6494 location_t epilogue_location;
6495 
6496 /* Hold current location information and last location information, so the
6497    datastructures are built lazily only when some instructions in given
6498    place are needed.  */
6499 static location_t curr_location;
6500 
6501 /* Allocate insn location datastructure.  */
6502 void
insn_locations_init(void)6503 insn_locations_init (void)
6504 {
6505   prologue_location = epilogue_location = 0;
6506   curr_location = UNKNOWN_LOCATION;
6507 }
6508 
6509 /* At the end of emit stage, clear current location.  */
6510 void
insn_locations_finalize(void)6511 insn_locations_finalize (void)
6512 {
6513   epilogue_location = curr_location;
6514   curr_location = UNKNOWN_LOCATION;
6515 }
6516 
6517 /* Set current location.  */
6518 void
set_curr_insn_location(location_t location)6519 set_curr_insn_location (location_t location)
6520 {
6521   curr_location = location;
6522 }
6523 
6524 /* Get current location.  */
6525 location_t
curr_insn_location(void)6526 curr_insn_location (void)
6527 {
6528   return curr_location;
6529 }
6530 
6531 /* Set the location of the insn chain starting at INSN to LOC.  */
6532 void
set_insn_locations(rtx_insn * insn,location_t loc)6533 set_insn_locations (rtx_insn *insn, location_t loc)
6534 {
6535   while (insn)
6536     {
6537       if (INSN_P (insn))
6538 	INSN_LOCATION (insn) = loc;
6539       insn = NEXT_INSN (insn);
6540     }
6541 }
6542 
6543 /* Return lexical scope block insn belongs to.  */
6544 tree
insn_scope(const rtx_insn * insn)6545 insn_scope (const rtx_insn *insn)
6546 {
6547   return LOCATION_BLOCK (INSN_LOCATION (insn));
6548 }
6549 
6550 /* Return line number of the statement that produced this insn.  */
6551 int
insn_line(const rtx_insn * insn)6552 insn_line (const rtx_insn *insn)
6553 {
6554   return LOCATION_LINE (INSN_LOCATION (insn));
6555 }
6556 
6557 /* Return source file of the statement that produced this insn.  */
6558 const char *
insn_file(const rtx_insn * insn)6559 insn_file (const rtx_insn *insn)
6560 {
6561   return LOCATION_FILE (INSN_LOCATION (insn));
6562 }
6563 
6564 /* Return expanded location of the statement that produced this insn.  */
6565 expanded_location
insn_location(const rtx_insn * insn)6566 insn_location (const rtx_insn *insn)
6567 {
6568   return expand_location (INSN_LOCATION (insn));
6569 }
6570 
6571 /* Return true if memory model MODEL requires a pre-operation (release-style)
6572    barrier or a post-operation (acquire-style) barrier.  While not universal,
6573    this function matches behavior of several targets.  */
6574 
6575 bool
need_atomic_barrier_p(enum memmodel model,bool pre)6576 need_atomic_barrier_p (enum memmodel model, bool pre)
6577 {
6578   switch (model & MEMMODEL_BASE_MASK)
6579     {
6580     case MEMMODEL_RELAXED:
6581     case MEMMODEL_CONSUME:
6582       return false;
6583     case MEMMODEL_RELEASE:
6584       return pre;
6585     case MEMMODEL_ACQUIRE:
6586       return !pre;
6587     case MEMMODEL_ACQ_REL:
6588     case MEMMODEL_SEQ_CST:
6589       return true;
6590     default:
6591       gcc_unreachable ();
6592     }
6593 }
6594 
6595 /* Return a constant shift amount for shifting a value of mode MODE
6596    by VALUE bits.  */
6597 
6598 rtx
gen_int_shift_amount(machine_mode,poly_int64 value)6599 gen_int_shift_amount (machine_mode, poly_int64 value)
6600 {
6601   /* Use a 64-bit mode, to avoid any truncation.
6602 
6603      ??? Perhaps this should be automatically derived from the .md files
6604      instead, or perhaps have a target hook.  */
6605   scalar_int_mode shift_mode = (BITS_PER_UNIT == 8
6606 				? DImode
6607 				: int_mode_for_size (64, 0).require ());
6608   return gen_int_mode (value, shift_mode);
6609 }
6610 
6611 /* Initialize fields of rtl_data related to stack alignment.  */
6612 
6613 void
init_stack_alignment()6614 rtl_data::init_stack_alignment ()
6615 {
6616   stack_alignment_needed = STACK_BOUNDARY;
6617   max_used_stack_slot_alignment = STACK_BOUNDARY;
6618   stack_alignment_estimated = 0;
6619   preferred_stack_boundary = STACK_BOUNDARY;
6620 }
6621 
6622 
6623 #include "gt-emit-rtl.h"
6624