xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cfgexpand.c (revision bdc22b2e01993381dcefeff2bc9b56ca75a4235c)
1 /* A pass for lowering trees to RTL.
2    Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "hard-reg-set.h"
26 #include "hash-set.h"
27 #include "machmode.h"
28 #include "vec.h"
29 #include "double-int.h"
30 #include "input.h"
31 #include "alias.h"
32 #include "symtab.h"
33 #include "wide-int.h"
34 #include "inchash.h"
35 #include "tree.h"
36 #include "fold-const.h"
37 #include "stringpool.h"
38 #include "varasm.h"
39 #include "stor-layout.h"
40 #include "stmt.h"
41 #include "print-tree.h"
42 #include "tm_p.h"
43 #include "predict.h"
44 #include "hashtab.h"
45 #include "function.h"
46 #include "dominance.h"
47 #include "cfg.h"
48 #include "cfgrtl.h"
49 #include "cfganal.h"
50 #include "cfgbuild.h"
51 #include "cfgcleanup.h"
52 #include "basic-block.h"
53 #include "insn-codes.h"
54 #include "optabs.h"
55 #include "flags.h"
56 #include "statistics.h"
57 #include "real.h"
58 #include "fixed-value.h"
59 #include "insn-config.h"
60 #include "expmed.h"
61 #include "dojump.h"
62 #include "explow.h"
63 #include "calls.h"
64 #include "emit-rtl.h"
65 #include "expr.h"
66 #include "langhooks.h"
67 #include "bitmap.h"
68 #include "tree-ssa-alias.h"
69 #include "internal-fn.h"
70 #include "tree-eh.h"
71 #include "gimple-expr.h"
72 #include "is-a.h"
73 #include "gimple.h"
74 #include "gimple-iterator.h"
75 #include "gimple-walk.h"
76 #include "gimple-ssa.h"
77 #include "hash-map.h"
78 #include "plugin-api.h"
79 #include "ipa-ref.h"
80 #include "cgraph.h"
81 #include "tree-cfg.h"
82 #include "tree-phinodes.h"
83 #include "ssa-iterators.h"
84 #include "tree-ssanames.h"
85 #include "tree-dfa.h"
86 #include "tree-ssa.h"
87 #include "tree-pass.h"
88 #include "except.h"
89 #include "diagnostic.h"
90 #include "gimple-pretty-print.h"
91 #include "toplev.h"
92 #include "debug.h"
93 #include "params.h"
94 #include "tree-inline.h"
95 #include "value-prof.h"
96 #include "target.h"
97 #include "tree-ssa-live.h"
98 #include "tree-outof-ssa.h"
99 #include "sbitmap.h"
100 #include "cfgloop.h"
101 #include "regs.h" /* For reg_renumber.  */
102 #include "insn-attr.h" /* For INSN_SCHEDULING.  */
103 #include "asan.h"
104 #include "tree-ssa-address.h"
105 #include "recog.h"
106 #include "output.h"
107 #include "builtins.h"
108 #include "tree-chkp.h"
109 #include "rtl-chkp.h"
110 
111 /* Some systems use __main in a way incompatible with its use in gcc, in these
112    cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
113    give the same symbol without quotes for an alternative entry point.  You
114    must define both, or neither.  */
115 #ifndef NAME__MAIN
116 #define NAME__MAIN "__main"
117 #endif
118 
119 /* This variable holds information helping the rewriting of SSA trees
120    into RTL.  */
121 struct ssaexpand SA;
122 
123 /* This variable holds the currently expanded gimple statement for purposes
124    of comminucating the profile info to the builtin expanders.  */
125 gimple currently_expanding_gimple_stmt;
126 
127 static rtx expand_debug_expr (tree);
128 
129 /* Return an expression tree corresponding to the RHS of GIMPLE
130    statement STMT.  */
131 
132 tree
133 gimple_assign_rhs_to_tree (gimple stmt)
134 {
135   tree t;
136   enum gimple_rhs_class grhs_class;
137 
138   grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
139 
140   if (grhs_class == GIMPLE_TERNARY_RHS)
141     t = build3 (gimple_assign_rhs_code (stmt),
142 		TREE_TYPE (gimple_assign_lhs (stmt)),
143 		gimple_assign_rhs1 (stmt),
144 		gimple_assign_rhs2 (stmt),
145 		gimple_assign_rhs3 (stmt));
146   else if (grhs_class == GIMPLE_BINARY_RHS)
147     t = build2 (gimple_assign_rhs_code (stmt),
148 		TREE_TYPE (gimple_assign_lhs (stmt)),
149 		gimple_assign_rhs1 (stmt),
150 		gimple_assign_rhs2 (stmt));
151   else if (grhs_class == GIMPLE_UNARY_RHS)
152     t = build1 (gimple_assign_rhs_code (stmt),
153 		TREE_TYPE (gimple_assign_lhs (stmt)),
154 		gimple_assign_rhs1 (stmt));
155   else if (grhs_class == GIMPLE_SINGLE_RHS)
156     {
157       t = gimple_assign_rhs1 (stmt);
158       /* Avoid modifying this tree in place below.  */
159       if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
160 	   && gimple_location (stmt) != EXPR_LOCATION (t))
161 	  || (gimple_block (stmt)
162 	      && currently_expanding_to_rtl
163 	      && EXPR_P (t)))
164 	t = copy_node (t);
165     }
166   else
167     gcc_unreachable ();
168 
169   if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
170     SET_EXPR_LOCATION (t, gimple_location (stmt));
171 
172   return t;
173 }
174 
175 
176 #ifndef STACK_ALIGNMENT_NEEDED
177 #define STACK_ALIGNMENT_NEEDED 1
178 #endif
179 
180 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
181 
182 /* Associate declaration T with storage space X.  If T is no
183    SSA name this is exactly SET_DECL_RTL, otherwise make the
184    partition of T associated with X.  */
185 static inline void
186 set_rtl (tree t, rtx x)
187 {
188   if (TREE_CODE (t) == SSA_NAME)
189     {
190       SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
191       if (x && !MEM_P (x))
192 	set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
193       /* For the benefit of debug information at -O0 (where vartracking
194          doesn't run) record the place also in the base DECL if it's
195 	 a normal variable (not a parameter).  */
196       if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
197 	{
198 	  tree var = SSA_NAME_VAR (t);
199 	  /* If we don't yet have something recorded, just record it now.  */
200 	  if (!DECL_RTL_SET_P (var))
201 	    SET_DECL_RTL (var, x);
202 	  /* If we have it set already to "multiple places" don't
203 	     change this.  */
204 	  else if (DECL_RTL (var) == pc_rtx)
205 	    ;
206 	  /* If we have something recorded and it's not the same place
207 	     as we want to record now, we have multiple partitions for the
208 	     same base variable, with different places.  We can't just
209 	     randomly chose one, hence we have to say that we don't know.
210 	     This only happens with optimization, and there var-tracking
211 	     will figure out the right thing.  */
212 	  else if (DECL_RTL (var) != x)
213 	    SET_DECL_RTL (var, pc_rtx);
214 	}
215     }
216   else
217     SET_DECL_RTL (t, x);
218 }
219 
220 /* This structure holds data relevant to one variable that will be
221    placed in a stack slot.  */
222 struct stack_var
223 {
224   /* The Variable.  */
225   tree decl;
226 
227   /* Initially, the size of the variable.  Later, the size of the partition,
228      if this variable becomes it's partition's representative.  */
229   HOST_WIDE_INT size;
230 
231   /* The *byte* alignment required for this variable.  Or as, with the
232      size, the alignment for this partition.  */
233   unsigned int alignb;
234 
235   /* The partition representative.  */
236   size_t representative;
237 
238   /* The next stack variable in the partition, or EOC.  */
239   size_t next;
240 
241   /* The numbers of conflicting stack variables.  */
242   bitmap conflicts;
243 };
244 
245 #define EOC  ((size_t)-1)
246 
247 /* We have an array of such objects while deciding allocation.  */
248 static struct stack_var *stack_vars;
249 static size_t stack_vars_alloc;
250 static size_t stack_vars_num;
251 static hash_map<tree, size_t> *decl_to_stack_part;
252 
253 /* Conflict bitmaps go on this obstack.  This allows us to destroy
254    all of them in one big sweep.  */
255 static bitmap_obstack stack_var_bitmap_obstack;
256 
257 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
258    is non-decreasing.  */
259 static size_t *stack_vars_sorted;
260 
261 /* The phase of the stack frame.  This is the known misalignment of
262    virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY.  That is,
263    (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0.  */
264 static int frame_phase;
265 
266 /* Used during expand_used_vars to remember if we saw any decls for
267    which we'd like to enable stack smashing protection.  */
268 static bool has_protected_decls;
269 
270 /* Used during expand_used_vars.  Remember if we say a character buffer
271    smaller than our cutoff threshold.  Used for -Wstack-protector.  */
272 static bool has_short_buffer;
273 
274 /* Compute the byte alignment to use for DECL.  Ignore alignment
275    we can't do with expected alignment of the stack boundary.  */
276 
277 static unsigned int
278 align_local_variable (tree decl)
279 {
280   unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
281   DECL_ALIGN (decl) = align;
282   return align / BITS_PER_UNIT;
283 }
284 
285 /* Align given offset BASE with ALIGN.  Truncate up if ALIGN_UP is true,
286    down otherwise.  Return truncated BASE value.  */
287 
288 static inline unsigned HOST_WIDE_INT
289 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
290 {
291   return align_up ? (base + align - 1) & -align : base & -align;
292 }
293 
294 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
295    Return the frame offset.  */
296 
297 static HOST_WIDE_INT
298 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
299 {
300   HOST_WIDE_INT offset, new_frame_offset;
301 
302   if (FRAME_GROWS_DOWNWARD)
303     {
304       new_frame_offset
305 	= align_base (frame_offset - frame_phase - size,
306 		      align, false) + frame_phase;
307       offset = new_frame_offset;
308     }
309   else
310     {
311       new_frame_offset
312 	= align_base (frame_offset - frame_phase, align, true) + frame_phase;
313       offset = new_frame_offset;
314       new_frame_offset += size;
315     }
316   frame_offset = new_frame_offset;
317 
318   if (frame_offset_overflow (frame_offset, cfun->decl))
319     frame_offset = offset = 0;
320 
321   return offset;
322 }
323 
324 /* Accumulate DECL into STACK_VARS.  */
325 
326 static void
327 add_stack_var (tree decl)
328 {
329   struct stack_var *v;
330 
331   if (stack_vars_num >= stack_vars_alloc)
332     {
333       if (stack_vars_alloc)
334 	stack_vars_alloc = stack_vars_alloc * 3 / 2;
335       else
336 	stack_vars_alloc = 32;
337       stack_vars
338 	= XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
339     }
340   if (!decl_to_stack_part)
341     decl_to_stack_part = new hash_map<tree, size_t>;
342 
343   v = &stack_vars[stack_vars_num];
344   decl_to_stack_part->put (decl, stack_vars_num);
345 
346   v->decl = decl;
347   v->size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (decl)));
348   /* Ensure that all variables have size, so that &a != &b for any two
349      variables that are simultaneously live.  */
350   if (v->size == 0)
351     v->size = 1;
352   v->alignb = align_local_variable (SSAVAR (decl));
353   /* An alignment of zero can mightily confuse us later.  */
354   gcc_assert (v->alignb != 0);
355 
356   /* All variables are initially in their own partition.  */
357   v->representative = stack_vars_num;
358   v->next = EOC;
359 
360   /* All variables initially conflict with no other.  */
361   v->conflicts = NULL;
362 
363   /* Ensure that this decl doesn't get put onto the list twice.  */
364   set_rtl (decl, pc_rtx);
365 
366   stack_vars_num++;
367 }
368 
369 /* Make the decls associated with luid's X and Y conflict.  */
370 
371 static void
372 add_stack_var_conflict (size_t x, size_t y)
373 {
374   struct stack_var *a = &stack_vars[x];
375   struct stack_var *b = &stack_vars[y];
376   if (!a->conflicts)
377     a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
378   if (!b->conflicts)
379     b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
380   bitmap_set_bit (a->conflicts, y);
381   bitmap_set_bit (b->conflicts, x);
382 }
383 
384 /* Check whether the decls associated with luid's X and Y conflict.  */
385 
386 static bool
387 stack_var_conflict_p (size_t x, size_t y)
388 {
389   struct stack_var *a = &stack_vars[x];
390   struct stack_var *b = &stack_vars[y];
391   if (x == y)
392     return false;
393   /* Partitions containing an SSA name result from gimple registers
394      with things like unsupported modes.  They are top-level and
395      hence conflict with everything else.  */
396   if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
397     return true;
398 
399   if (!a->conflicts || !b->conflicts)
400     return false;
401   return bitmap_bit_p (a->conflicts, y);
402 }
403 
404 /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
405    enter its partition number into bitmap DATA.  */
406 
407 static bool
408 visit_op (gimple, tree op, tree, void *data)
409 {
410   bitmap active = (bitmap)data;
411   op = get_base_address (op);
412   if (op
413       && DECL_P (op)
414       && DECL_RTL_IF_SET (op) == pc_rtx)
415     {
416       size_t *v = decl_to_stack_part->get (op);
417       if (v)
418 	bitmap_set_bit (active, *v);
419     }
420   return false;
421 }
422 
423 /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
424    record conflicts between it and all currently active other partitions
425    from bitmap DATA.  */
426 
427 static bool
428 visit_conflict (gimple, tree op, tree, void *data)
429 {
430   bitmap active = (bitmap)data;
431   op = get_base_address (op);
432   if (op
433       && DECL_P (op)
434       && DECL_RTL_IF_SET (op) == pc_rtx)
435     {
436       size_t *v = decl_to_stack_part->get (op);
437       if (v && bitmap_set_bit (active, *v))
438 	{
439 	  size_t num = *v;
440 	  bitmap_iterator bi;
441 	  unsigned i;
442 	  gcc_assert (num < stack_vars_num);
443 	  EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
444 	    add_stack_var_conflict (num, i);
445 	}
446     }
447   return false;
448 }
449 
450 /* Helper routine for add_scope_conflicts, calculating the active partitions
451    at the end of BB, leaving the result in WORK.  We're called to generate
452    conflicts when FOR_CONFLICT is true, otherwise we're just tracking
453    liveness.  */
454 
455 static void
456 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
457 {
458   edge e;
459   edge_iterator ei;
460   gimple_stmt_iterator gsi;
461   walk_stmt_load_store_addr_fn visit;
462 
463   bitmap_clear (work);
464   FOR_EACH_EDGE (e, ei, bb->preds)
465     bitmap_ior_into (work, (bitmap)e->src->aux);
466 
467   visit = visit_op;
468 
469   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
470     {
471       gimple stmt = gsi_stmt (gsi);
472       walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
473     }
474   for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
475     {
476       gimple stmt = gsi_stmt (gsi);
477 
478       if (gimple_clobber_p (stmt))
479 	{
480 	  tree lhs = gimple_assign_lhs (stmt);
481 	  size_t *v;
482 	  /* Nested function lowering might introduce LHSs
483 	     that are COMPONENT_REFs.  */
484 	  if (TREE_CODE (lhs) != VAR_DECL)
485 	    continue;
486 	  if (DECL_RTL_IF_SET (lhs) == pc_rtx
487 	      && (v = decl_to_stack_part->get (lhs)))
488 	    bitmap_clear_bit (work, *v);
489 	}
490       else if (!is_gimple_debug (stmt))
491 	{
492 	  if (for_conflict
493 	      && visit == visit_op)
494 	    {
495 	      /* If this is the first real instruction in this BB we need
496 	         to add conflicts for everything live at this point now.
497 		 Unlike classical liveness for named objects we can't
498 		 rely on seeing a def/use of the names we're interested in.
499 		 There might merely be indirect loads/stores.  We'd not add any
500 		 conflicts for such partitions.  */
501 	      bitmap_iterator bi;
502 	      unsigned i;
503 	      EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
504 		{
505 		  struct stack_var *a = &stack_vars[i];
506 		  if (!a->conflicts)
507 		    a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
508 		  bitmap_ior_into (a->conflicts, work);
509 		}
510 	      visit = visit_conflict;
511 	    }
512 	  walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
513 	}
514     }
515 }
516 
517 /* Generate stack partition conflicts between all partitions that are
518    simultaneously live.  */
519 
520 static void
521 add_scope_conflicts (void)
522 {
523   basic_block bb;
524   bool changed;
525   bitmap work = BITMAP_ALLOC (NULL);
526   int *rpo;
527   int n_bbs;
528 
529   /* We approximate the live range of a stack variable by taking the first
530      mention of its name as starting point(s), and by the end-of-scope
531      death clobber added by gimplify as ending point(s) of the range.
532      This overapproximates in the case we for instance moved an address-taken
533      operation upward, without also moving a dereference to it upwards.
534      But it's conservatively correct as a variable never can hold values
535      before its name is mentioned at least once.
536 
537      We then do a mostly classical bitmap liveness algorithm.  */
538 
539   FOR_ALL_BB_FN (bb, cfun)
540     bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
541 
542   rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
543   n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
544 
545   changed = true;
546   while (changed)
547     {
548       int i;
549       changed = false;
550       for (i = 0; i < n_bbs; i++)
551 	{
552 	  bitmap active;
553 	  bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
554 	  active = (bitmap)bb->aux;
555 	  add_scope_conflicts_1 (bb, work, false);
556 	  if (bitmap_ior_into (active, work))
557 	    changed = true;
558 	}
559     }
560 
561   FOR_EACH_BB_FN (bb, cfun)
562     add_scope_conflicts_1 (bb, work, true);
563 
564   free (rpo);
565   BITMAP_FREE (work);
566   FOR_ALL_BB_FN (bb, cfun)
567     BITMAP_FREE (bb->aux);
568 }
569 
570 /* A subroutine of partition_stack_vars.  A comparison function for qsort,
571    sorting an array of indices by the properties of the object.  */
572 
573 static int
574 stack_var_cmp (const void *a, const void *b)
575 {
576   size_t ia = *(const size_t *)a;
577   size_t ib = *(const size_t *)b;
578   unsigned int aligna = stack_vars[ia].alignb;
579   unsigned int alignb = stack_vars[ib].alignb;
580   HOST_WIDE_INT sizea = stack_vars[ia].size;
581   HOST_WIDE_INT sizeb = stack_vars[ib].size;
582   tree decla = stack_vars[ia].decl;
583   tree declb = stack_vars[ib].decl;
584   bool largea, largeb;
585   unsigned int uida, uidb;
586 
587   /* Primary compare on "large" alignment.  Large comes first.  */
588   largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
589   largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
590   if (largea != largeb)
591     return (int)largeb - (int)largea;
592 
593   /* Secondary compare on size, decreasing  */
594   if (sizea > sizeb)
595     return -1;
596   if (sizea < sizeb)
597     return 1;
598 
599   /* Tertiary compare on true alignment, decreasing.  */
600   if (aligna < alignb)
601     return -1;
602   if (aligna > alignb)
603     return 1;
604 
605   /* Final compare on ID for sort stability, increasing.
606      Two SSA names are compared by their version, SSA names come before
607      non-SSA names, and two normal decls are compared by their DECL_UID.  */
608   if (TREE_CODE (decla) == SSA_NAME)
609     {
610       if (TREE_CODE (declb) == SSA_NAME)
611 	uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
612       else
613 	return -1;
614     }
615   else if (TREE_CODE (declb) == SSA_NAME)
616     return 1;
617   else
618     uida = DECL_UID (decla), uidb = DECL_UID (declb);
619   if (uida < uidb)
620     return 1;
621   if (uida > uidb)
622     return -1;
623   return 0;
624 }
625 
626 struct part_traits : default_hashmap_traits
627 {
628   template<typename T>
629     static bool
630     is_deleted (T &e)
631     { return e.m_value == reinterpret_cast<void *> (1); }
632 
633   template<typename T> static bool is_empty (T &e) { return e.m_value == NULL; }
634   template<typename T>
635     static void
636     mark_deleted (T &e)
637     { e.m_value = reinterpret_cast<T> (1); }
638 
639   template<typename T>
640     static void
641     mark_empty (T &e)
642       { e.m_value = NULL; }
643 };
644 
645 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
646 
647 /* If the points-to solution *PI points to variables that are in a partition
648    together with other variables add all partition members to the pointed-to
649    variables bitmap.  */
650 
651 static void
652 add_partitioned_vars_to_ptset (struct pt_solution *pt,
653 			       part_hashmap *decls_to_partitions,
654 			       hash_set<bitmap> *visited, bitmap temp)
655 {
656   bitmap_iterator bi;
657   unsigned i;
658   bitmap *part;
659 
660   if (pt->anything
661       || pt->vars == NULL
662       /* The pointed-to vars bitmap is shared, it is enough to
663 	 visit it once.  */
664       || visited->add (pt->vars))
665     return;
666 
667   bitmap_clear (temp);
668 
669   /* By using a temporary bitmap to store all members of the partitions
670      we have to add we make sure to visit each of the partitions only
671      once.  */
672   EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
673     if ((!temp
674 	 || !bitmap_bit_p (temp, i))
675 	&& (part = decls_to_partitions->get (i)))
676       bitmap_ior_into (temp, *part);
677   if (!bitmap_empty_p (temp))
678     bitmap_ior_into (pt->vars, temp);
679 }
680 
681 /* Update points-to sets based on partition info, so we can use them on RTL.
682    The bitmaps representing stack partitions will be saved until expand,
683    where partitioned decls used as bases in memory expressions will be
684    rewritten.  */
685 
686 static void
687 update_alias_info_with_stack_vars (void)
688 {
689   part_hashmap *decls_to_partitions = NULL;
690   size_t i, j;
691   tree var = NULL_TREE;
692 
693   for (i = 0; i < stack_vars_num; i++)
694     {
695       bitmap part = NULL;
696       tree name;
697       struct ptr_info_def *pi;
698 
699       /* Not interested in partitions with single variable.  */
700       if (stack_vars[i].representative != i
701           || stack_vars[i].next == EOC)
702         continue;
703 
704       if (!decls_to_partitions)
705 	{
706 	  decls_to_partitions = new part_hashmap;
707 	  cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
708 	}
709 
710       /* Create an SSA_NAME that points to the partition for use
711          as base during alias-oracle queries on RTL for bases that
712 	 have been partitioned.  */
713       if (var == NULL_TREE)
714 	var = create_tmp_var (ptr_type_node);
715       name = make_ssa_name (var);
716 
717       /* Create bitmaps representing partitions.  They will be used for
718          points-to sets later, so use GGC alloc.  */
719       part = BITMAP_GGC_ALLOC ();
720       for (j = i; j != EOC; j = stack_vars[j].next)
721 	{
722 	  tree decl = stack_vars[j].decl;
723 	  unsigned int uid = DECL_PT_UID (decl);
724 	  bitmap_set_bit (part, uid);
725 	  decls_to_partitions->put (uid, part);
726 	  cfun->gimple_df->decls_to_pointers->put (decl, name);
727 	  if (TREE_ADDRESSABLE (decl))
728 	    TREE_ADDRESSABLE (name) = 1;
729 	}
730 
731       /* Make the SSA name point to all partition members.  */
732       pi = get_ptr_info (name);
733       pt_solution_set (&pi->pt, part, false);
734     }
735 
736   /* Make all points-to sets that contain one member of a partition
737      contain all members of the partition.  */
738   if (decls_to_partitions)
739     {
740       unsigned i;
741       hash_set<bitmap> visited;
742       bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
743 
744       for (i = 1; i < num_ssa_names; i++)
745 	{
746 	  tree name = ssa_name (i);
747 	  struct ptr_info_def *pi;
748 
749 	  if (name
750 	      && POINTER_TYPE_P (TREE_TYPE (name))
751 	      && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
752 	    add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
753 					   &visited, temp);
754 	}
755 
756       add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
757 				     decls_to_partitions, &visited, temp);
758 
759       delete decls_to_partitions;
760       BITMAP_FREE (temp);
761     }
762 }
763 
764 /* A subroutine of partition_stack_vars.  The UNION portion of a UNION/FIND
765    partitioning algorithm.  Partitions A and B are known to be non-conflicting.
766    Merge them into a single partition A.  */
767 
768 static void
769 union_stack_vars (size_t a, size_t b)
770 {
771   struct stack_var *vb = &stack_vars[b];
772   bitmap_iterator bi;
773   unsigned u;
774 
775   gcc_assert (stack_vars[b].next == EOC);
776    /* Add B to A's partition.  */
777   stack_vars[b].next = stack_vars[a].next;
778   stack_vars[b].representative = a;
779   stack_vars[a].next = b;
780 
781   /* Update the required alignment of partition A to account for B.  */
782   if (stack_vars[a].alignb < stack_vars[b].alignb)
783     stack_vars[a].alignb = stack_vars[b].alignb;
784 
785   /* Update the interference graph and merge the conflicts.  */
786   if (vb->conflicts)
787     {
788       EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
789 	add_stack_var_conflict (a, stack_vars[u].representative);
790       BITMAP_FREE (vb->conflicts);
791     }
792 }
793 
794 /* A subroutine of expand_used_vars.  Binpack the variables into
795    partitions constrained by the interference graph.  The overall
796    algorithm used is as follows:
797 
798 	Sort the objects by size in descending order.
799 	For each object A {
800 	  S = size(A)
801 	  O = 0
802 	  loop {
803 	    Look for the largest non-conflicting object B with size <= S.
804 	    UNION (A, B)
805 	  }
806 	}
807 */
808 
809 static void
810 partition_stack_vars (void)
811 {
812   size_t si, sj, n = stack_vars_num;
813 
814   stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
815   for (si = 0; si < n; ++si)
816     stack_vars_sorted[si] = si;
817 
818   if (n == 1)
819     return;
820 
821   qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
822 
823   for (si = 0; si < n; ++si)
824     {
825       size_t i = stack_vars_sorted[si];
826       unsigned int ialign = stack_vars[i].alignb;
827       HOST_WIDE_INT isize = stack_vars[i].size;
828 
829       /* Ignore objects that aren't partition representatives. If we
830          see a var that is not a partition representative, it must
831          have been merged earlier.  */
832       if (stack_vars[i].representative != i)
833         continue;
834 
835       for (sj = si + 1; sj < n; ++sj)
836 	{
837 	  size_t j = stack_vars_sorted[sj];
838 	  unsigned int jalign = stack_vars[j].alignb;
839 	  HOST_WIDE_INT jsize = stack_vars[j].size;
840 
841 	  /* Ignore objects that aren't partition representatives.  */
842 	  if (stack_vars[j].representative != j)
843 	    continue;
844 
845 	  /* Do not mix objects of "small" (supported) alignment
846 	     and "large" (unsupported) alignment.  */
847 	  if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
848 	      != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
849 	    break;
850 
851 	  /* For Address Sanitizer do not mix objects with different
852 	     sizes, as the shorter vars wouldn't be adequately protected.
853 	     Don't do that for "large" (unsupported) alignment objects,
854 	     those aren't protected anyway.  */
855 	  if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && isize != jsize
856 	      && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
857 	    break;
858 
859 	  /* Ignore conflicting objects.  */
860 	  if (stack_var_conflict_p (i, j))
861 	    continue;
862 
863 	  /* UNION the objects, placing J at OFFSET.  */
864 	  union_stack_vars (i, j);
865 	}
866     }
867 
868   update_alias_info_with_stack_vars ();
869 }
870 
871 /* A debugging aid for expand_used_vars.  Dump the generated partitions.  */
872 
873 static void
874 dump_stack_var_partition (void)
875 {
876   size_t si, i, j, n = stack_vars_num;
877 
878   for (si = 0; si < n; ++si)
879     {
880       i = stack_vars_sorted[si];
881 
882       /* Skip variables that aren't partition representatives, for now.  */
883       if (stack_vars[i].representative != i)
884 	continue;
885 
886       fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
887 	       " align %u\n", (unsigned long) i, stack_vars[i].size,
888 	       stack_vars[i].alignb);
889 
890       for (j = i; j != EOC; j = stack_vars[j].next)
891 	{
892 	  fputc ('\t', dump_file);
893 	  print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
894 	}
895       fputc ('\n', dump_file);
896     }
897 }
898 
899 /* Assign rtl to DECL at BASE + OFFSET.  */
900 
901 static void
902 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
903 			 HOST_WIDE_INT offset)
904 {
905   unsigned align;
906   rtx x;
907 
908   /* If this fails, we've overflowed the stack frame.  Error nicely?  */
909   gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
910 
911   x = plus_constant (Pmode, base, offset);
912   x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
913 
914   if (TREE_CODE (decl) != SSA_NAME)
915     {
916       /* Set alignment we actually gave this decl if it isn't an SSA name.
917          If it is we generate stack slots only accidentally so it isn't as
918 	 important, we'll simply use the alignment that is already set.  */
919       if (base == virtual_stack_vars_rtx)
920 	offset -= frame_phase;
921       align = offset & -offset;
922       align *= BITS_PER_UNIT;
923       if (align == 0 || align > base_align)
924 	align = base_align;
925 
926       /* One would think that we could assert that we're not decreasing
927 	 alignment here, but (at least) the i386 port does exactly this
928 	 via the MINIMUM_ALIGNMENT hook.  */
929 
930       DECL_ALIGN (decl) = align;
931       DECL_USER_ALIGN (decl) = 0;
932     }
933 
934   set_mem_attributes (x, SSAVAR (decl), true);
935   set_rtl (decl, x);
936 }
937 
938 struct stack_vars_data
939 {
940   /* Vector of offset pairs, always end of some padding followed
941      by start of the padding that needs Address Sanitizer protection.
942      The vector is in reversed, highest offset pairs come first.  */
943   vec<HOST_WIDE_INT> asan_vec;
944 
945   /* Vector of partition representative decls in between the paddings.  */
946   vec<tree> asan_decl_vec;
947 
948   /* Base pseudo register for Address Sanitizer protected automatic vars.  */
949   rtx asan_base;
950 
951   /* Alignment needed for the Address Sanitizer protected automatic vars.  */
952   unsigned int asan_alignb;
953 };
954 
955 /* A subroutine of expand_used_vars.  Give each partition representative
956    a unique location within the stack frame.  Update each partition member
957    with that location.  */
958 
959 static void
960 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
961 {
962   size_t si, i, j, n = stack_vars_num;
963   HOST_WIDE_INT large_size = 0, large_alloc = 0;
964   rtx large_base = NULL;
965   unsigned large_align = 0;
966   tree decl;
967 
968   /* Determine if there are any variables requiring "large" alignment.
969      Since these are dynamically allocated, we only process these if
970      no predicate involved.  */
971   large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
972   if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
973     {
974       /* Find the total size of these variables.  */
975       for (si = 0; si < n; ++si)
976 	{
977 	  unsigned alignb;
978 
979 	  i = stack_vars_sorted[si];
980 	  alignb = stack_vars[i].alignb;
981 
982 	  /* All "large" alignment decls come before all "small" alignment
983 	     decls, but "large" alignment decls are not sorted based on
984 	     their alignment.  Increase large_align to track the largest
985 	     required alignment.  */
986 	  if ((alignb * BITS_PER_UNIT) > large_align)
987 	    large_align = alignb * BITS_PER_UNIT;
988 
989 	  /* Stop when we get to the first decl with "small" alignment.  */
990 	  if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
991 	    break;
992 
993 	  /* Skip variables that aren't partition representatives.  */
994 	  if (stack_vars[i].representative != i)
995 	    continue;
996 
997 	  /* Skip variables that have already had rtl assigned.  See also
998 	     add_stack_var where we perpetrate this pc_rtx hack.  */
999 	  decl = stack_vars[i].decl;
1000 	  if ((TREE_CODE (decl) == SSA_NAME
1001 	      ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
1002 	      : DECL_RTL (decl)) != pc_rtx)
1003 	    continue;
1004 
1005 	  large_size += alignb - 1;
1006 	  large_size &= -(HOST_WIDE_INT)alignb;
1007 	  large_size += stack_vars[i].size;
1008 	}
1009 
1010       /* If there were any, allocate space.  */
1011       if (large_size > 0)
1012 	large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
1013 						   large_align, true);
1014     }
1015 
1016   for (si = 0; si < n; ++si)
1017     {
1018       rtx base;
1019       unsigned base_align, alignb;
1020       HOST_WIDE_INT offset;
1021 
1022       i = stack_vars_sorted[si];
1023 
1024       /* Skip variables that aren't partition representatives, for now.  */
1025       if (stack_vars[i].representative != i)
1026 	continue;
1027 
1028       /* Skip variables that have already had rtl assigned.  See also
1029 	 add_stack_var where we perpetrate this pc_rtx hack.  */
1030       decl = stack_vars[i].decl;
1031       if ((TREE_CODE (decl) == SSA_NAME
1032 	   ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
1033 	   : DECL_RTL (decl)) != pc_rtx)
1034 	continue;
1035 
1036       /* Check the predicate to see whether this variable should be
1037 	 allocated in this pass.  */
1038       if (pred && !pred (i))
1039 	continue;
1040 
1041       alignb = stack_vars[i].alignb;
1042       if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1043 	{
1044 	  base = virtual_stack_vars_rtx;
1045 	  if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && pred)
1046 	    {
1047 	      HOST_WIDE_INT prev_offset
1048 		= align_base (frame_offset,
1049 			      MAX (alignb, ASAN_RED_ZONE_SIZE),
1050 			      FRAME_GROWS_DOWNWARD);
1051 	      tree repr_decl = NULL_TREE;
1052 	      offset
1053 		= alloc_stack_frame_space (stack_vars[i].size
1054 					   + ASAN_RED_ZONE_SIZE,
1055 					   MAX (alignb, ASAN_RED_ZONE_SIZE));
1056 
1057 	      data->asan_vec.safe_push (prev_offset);
1058 	      data->asan_vec.safe_push (offset + stack_vars[i].size);
1059 	      /* Find best representative of the partition.
1060 		 Prefer those with DECL_NAME, even better
1061 		 satisfying asan_protect_stack_decl predicate.  */
1062 	      for (j = i; j != EOC; j = stack_vars[j].next)
1063 		if (asan_protect_stack_decl (stack_vars[j].decl)
1064 		    && DECL_NAME (stack_vars[j].decl))
1065 		  {
1066 		    repr_decl = stack_vars[j].decl;
1067 		    break;
1068 		  }
1069 		else if (repr_decl == NULL_TREE
1070 			 && DECL_P (stack_vars[j].decl)
1071 			 && DECL_NAME (stack_vars[j].decl))
1072 		  repr_decl = stack_vars[j].decl;
1073 	      if (repr_decl == NULL_TREE)
1074 		repr_decl = stack_vars[i].decl;
1075 	      data->asan_decl_vec.safe_push (repr_decl);
1076 	      data->asan_alignb = MAX (data->asan_alignb, alignb);
1077 	      if (data->asan_base == NULL)
1078 		data->asan_base = gen_reg_rtx (Pmode);
1079 	      base = data->asan_base;
1080 
1081 	      if (!STRICT_ALIGNMENT)
1082 		base_align = crtl->max_used_stack_slot_alignment;
1083 	      else
1084 		base_align = MAX (crtl->max_used_stack_slot_alignment,
1085 				  GET_MODE_ALIGNMENT (SImode)
1086 				  << ASAN_SHADOW_SHIFT);
1087 	    }
1088 	  else
1089 	    {
1090 	      offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1091 	      base_align = crtl->max_used_stack_slot_alignment;
1092 	    }
1093 	}
1094       else
1095 	{
1096 	  /* Large alignment is only processed in the last pass.  */
1097 	  if (pred)
1098 	    continue;
1099 	  gcc_assert (large_base != NULL);
1100 
1101 	  large_alloc += alignb - 1;
1102 	  large_alloc &= -(HOST_WIDE_INT)alignb;
1103 	  offset = large_alloc;
1104 	  large_alloc += stack_vars[i].size;
1105 
1106 	  base = large_base;
1107 	  base_align = large_align;
1108 	}
1109 
1110       /* Create rtl for each variable based on their location within the
1111 	 partition.  */
1112       for (j = i; j != EOC; j = stack_vars[j].next)
1113 	{
1114 	  expand_one_stack_var_at (stack_vars[j].decl,
1115 				   base, base_align,
1116 				   offset);
1117 	}
1118     }
1119 
1120   gcc_assert (large_alloc == large_size);
1121 }
1122 
1123 /* Take into account all sizes of partitions and reset DECL_RTLs.  */
1124 static HOST_WIDE_INT
1125 account_stack_vars (void)
1126 {
1127   size_t si, j, i, n = stack_vars_num;
1128   HOST_WIDE_INT size = 0;
1129 
1130   for (si = 0; si < n; ++si)
1131     {
1132       i = stack_vars_sorted[si];
1133 
1134       /* Skip variables that aren't partition representatives, for now.  */
1135       if (stack_vars[i].representative != i)
1136 	continue;
1137 
1138       size += stack_vars[i].size;
1139       for (j = i; j != EOC; j = stack_vars[j].next)
1140 	set_rtl (stack_vars[j].decl, NULL);
1141     }
1142   return size;
1143 }
1144 
1145 /* A subroutine of expand_one_var.  Called to immediately assign rtl
1146    to a variable to be allocated in the stack frame.  */
1147 
1148 static void
1149 expand_one_stack_var (tree var)
1150 {
1151   HOST_WIDE_INT size, offset;
1152   unsigned byte_align;
1153 
1154   size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (var)));
1155   byte_align = align_local_variable (SSAVAR (var));
1156 
1157   /* We handle highly aligned variables in expand_stack_vars.  */
1158   gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1159 
1160   offset = alloc_stack_frame_space (size, byte_align);
1161 
1162   expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1163 			   crtl->max_used_stack_slot_alignment, offset);
1164 }
1165 
1166 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1167    that will reside in a hard register.  */
1168 
1169 static void
1170 expand_one_hard_reg_var (tree var)
1171 {
1172   rest_of_decl_compilation (var, 0, 0);
1173 }
1174 
1175 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1176    that will reside in a pseudo register.  */
1177 
1178 static void
1179 expand_one_register_var (tree var)
1180 {
1181   tree decl = SSAVAR (var);
1182   tree type = TREE_TYPE (decl);
1183   machine_mode reg_mode = promote_decl_mode (decl, NULL);
1184   rtx x = gen_reg_rtx (reg_mode);
1185 
1186   set_rtl (var, x);
1187 
1188   /* Note if the object is a user variable.  */
1189   if (!DECL_ARTIFICIAL (decl))
1190     mark_user_reg (x);
1191 
1192   if (POINTER_TYPE_P (type))
1193     mark_reg_pointer (x, get_pointer_alignment (var));
1194 }
1195 
1196 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL that
1197    has some associated error, e.g. its type is error-mark.  We just need
1198    to pick something that won't crash the rest of the compiler.  */
1199 
1200 static void
1201 expand_one_error_var (tree var)
1202 {
1203   machine_mode mode = DECL_MODE (var);
1204   rtx x;
1205 
1206   if (mode == BLKmode)
1207     x = gen_rtx_MEM (BLKmode, const0_rtx);
1208   else if (mode == VOIDmode)
1209     x = const0_rtx;
1210   else
1211     x = gen_reg_rtx (mode);
1212 
1213   SET_DECL_RTL (var, x);
1214 }
1215 
1216 /* A subroutine of expand_one_var.  VAR is a variable that will be
1217    allocated to the local stack frame.  Return true if we wish to
1218    add VAR to STACK_VARS so that it will be coalesced with other
1219    variables.  Return false to allocate VAR immediately.
1220 
1221    This function is used to reduce the number of variables considered
1222    for coalescing, which reduces the size of the quadratic problem.  */
1223 
1224 static bool
1225 defer_stack_allocation (tree var, bool toplevel)
1226 {
1227   /* Whether the variable is small enough for immediate allocation not to be
1228      a problem with regard to the frame size.  */
1229   bool smallish
1230     = ((HOST_WIDE_INT) tree_to_uhwi (DECL_SIZE_UNIT (var))
1231        < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1232 
1233   /* If stack protection is enabled, *all* stack variables must be deferred,
1234      so that we can re-order the strings to the top of the frame.
1235      Similarly for Address Sanitizer.  */
1236   if (flag_stack_protect || ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK))
1237     return true;
1238 
1239   /* We handle "large" alignment via dynamic allocation.  We want to handle
1240      this extra complication in only one place, so defer them.  */
1241   if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1242     return true;
1243 
1244   /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1245      might be detached from their block and appear at toplevel when we reach
1246      here.  We want to coalesce them with variables from other blocks when
1247      the immediate contribution to the frame size would be noticeable.  */
1248   if (toplevel && optimize > 0 && DECL_IGNORED_P (var) && !smallish)
1249     return true;
1250 
1251   /* Variables declared in the outermost scope automatically conflict
1252      with every other variable.  The only reason to want to defer them
1253      at all is that, after sorting, we can more efficiently pack
1254      small variables in the stack frame.  Continue to defer at -O2.  */
1255   if (toplevel && optimize < 2)
1256     return false;
1257 
1258   /* Without optimization, *most* variables are allocated from the
1259      stack, which makes the quadratic problem large exactly when we
1260      want compilation to proceed as quickly as possible.  On the
1261      other hand, we don't want the function's stack frame size to
1262      get completely out of hand.  So we avoid adding scalars and
1263      "small" aggregates to the list at all.  */
1264   if (optimize == 0 && smallish)
1265     return false;
1266 
1267   return true;
1268 }
1269 
1270 /* A subroutine of expand_used_vars.  Expand one variable according to
1271    its flavor.  Variables to be placed on the stack are not actually
1272    expanded yet, merely recorded.
1273    When REALLY_EXPAND is false, only add stack values to be allocated.
1274    Return stack usage this variable is supposed to take.
1275 */
1276 
1277 static HOST_WIDE_INT
1278 expand_one_var (tree var, bool toplevel, bool really_expand)
1279 {
1280   unsigned int align = BITS_PER_UNIT;
1281   tree origvar = var;
1282 
1283   var = SSAVAR (var);
1284 
1285   if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1286     {
1287       /* Because we don't know if VAR will be in register or on stack,
1288 	 we conservatively assume it will be on stack even if VAR is
1289 	 eventually put into register after RA pass.  For non-automatic
1290 	 variables, which won't be on stack, we collect alignment of
1291 	 type and ignore user specified alignment.  Similarly for
1292 	 SSA_NAMEs for which use_register_for_decl returns true.  */
1293       if (TREE_STATIC (var)
1294 	  || DECL_EXTERNAL (var)
1295 	  || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1296 	align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1297 				   TYPE_MODE (TREE_TYPE (var)),
1298 				   TYPE_ALIGN (TREE_TYPE (var)));
1299       else if (DECL_HAS_VALUE_EXPR_P (var)
1300 	       || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1301 	/* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1302 	   or variables which were assigned a stack slot already by
1303 	   expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1304 	   changed from the offset chosen to it.  */
1305 	align = crtl->stack_alignment_estimated;
1306       else
1307 	align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1308 
1309       /* If the variable alignment is very large we'll dynamicaly allocate
1310 	 it, which means that in-frame portion is just a pointer.  */
1311       if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1312 	align = POINTER_SIZE;
1313     }
1314 
1315   if (SUPPORTS_STACK_ALIGNMENT
1316       && crtl->stack_alignment_estimated < align)
1317     {
1318       /* stack_alignment_estimated shouldn't change after stack
1319          realign decision made */
1320       gcc_assert (!crtl->stack_realign_processed);
1321       crtl->stack_alignment_estimated = align;
1322     }
1323 
1324   /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1325      So here we only make sure stack_alignment_needed >= align.  */
1326   if (crtl->stack_alignment_needed < align)
1327     crtl->stack_alignment_needed = align;
1328   if (crtl->max_used_stack_slot_alignment < align)
1329     crtl->max_used_stack_slot_alignment = align;
1330 
1331   if (TREE_CODE (origvar) == SSA_NAME)
1332     {
1333       gcc_assert (TREE_CODE (var) != VAR_DECL
1334 		  || (!DECL_EXTERNAL (var)
1335 		      && !DECL_HAS_VALUE_EXPR_P (var)
1336 		      && !TREE_STATIC (var)
1337 		      && TREE_TYPE (var) != error_mark_node
1338 		      && !DECL_HARD_REGISTER (var)
1339 		      && really_expand));
1340     }
1341   if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1342     ;
1343   else if (DECL_EXTERNAL (var))
1344     ;
1345   else if (DECL_HAS_VALUE_EXPR_P (var))
1346     ;
1347   else if (TREE_STATIC (var))
1348     ;
1349   else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1350     ;
1351   else if (TREE_TYPE (var) == error_mark_node)
1352     {
1353       if (really_expand)
1354         expand_one_error_var (var);
1355     }
1356   else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1357     {
1358       if (really_expand)
1359 	{
1360 	  expand_one_hard_reg_var (var);
1361 	  if (!DECL_HARD_REGISTER (var))
1362 	    /* Invalid register specification.  */
1363 	    expand_one_error_var (var);
1364 	}
1365     }
1366   else if (use_register_for_decl (var))
1367     {
1368       if (really_expand)
1369         expand_one_register_var (origvar);
1370     }
1371   else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1372     {
1373       /* Reject variables which cover more than half of the address-space.  */
1374       if (really_expand)
1375 	{
1376 	  error ("size of variable %q+D is too large", var);
1377 	  expand_one_error_var (var);
1378 	}
1379     }
1380   else if (defer_stack_allocation (var, toplevel))
1381     add_stack_var (origvar);
1382   else
1383     {
1384       if (really_expand)
1385         expand_one_stack_var (origvar);
1386       return tree_to_uhwi (DECL_SIZE_UNIT (var));
1387     }
1388   return 0;
1389 }
1390 
1391 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1392    expanding variables.  Those variables that can be put into registers
1393    are allocated pseudos; those that can't are put on the stack.
1394 
1395    TOPLEVEL is true if this is the outermost BLOCK.  */
1396 
1397 static void
1398 expand_used_vars_for_block (tree block, bool toplevel)
1399 {
1400   tree t;
1401 
1402   /* Expand all variables at this level.  */
1403   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1404     if (TREE_USED (t)
1405         && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1406 	    || !DECL_NONSHAREABLE (t)))
1407       expand_one_var (t, toplevel, true);
1408 
1409   /* Expand all variables at containing levels.  */
1410   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1411     expand_used_vars_for_block (t, false);
1412 }
1413 
1414 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1415    and clear TREE_USED on all local variables.  */
1416 
1417 static void
1418 clear_tree_used (tree block)
1419 {
1420   tree t;
1421 
1422   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1423     /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1424     if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1425 	|| !DECL_NONSHAREABLE (t))
1426       TREE_USED (t) = 0;
1427 
1428   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1429     clear_tree_used (t);
1430 }
1431 
1432 enum {
1433   SPCT_FLAG_DEFAULT = 1,
1434   SPCT_FLAG_ALL = 2,
1435   SPCT_FLAG_STRONG = 3,
1436   SPCT_FLAG_EXPLICIT = 4
1437 };
1438 
1439 /* Examine TYPE and determine a bit mask of the following features.  */
1440 
1441 #define SPCT_HAS_LARGE_CHAR_ARRAY	1
1442 #define SPCT_HAS_SMALL_CHAR_ARRAY	2
1443 #define SPCT_HAS_ARRAY			4
1444 #define SPCT_HAS_AGGREGATE		8
1445 
1446 static unsigned int
1447 stack_protect_classify_type (tree type)
1448 {
1449   unsigned int ret = 0;
1450   tree t;
1451 
1452   switch (TREE_CODE (type))
1453     {
1454     case ARRAY_TYPE:
1455       t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1456       if (t == char_type_node
1457 	  || t == signed_char_type_node
1458 	  || t == unsigned_char_type_node)
1459 	{
1460 	  unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1461 	  unsigned HOST_WIDE_INT len;
1462 
1463 	  if (!TYPE_SIZE_UNIT (type)
1464 	      || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1465 	    len = max;
1466 	  else
1467 	    len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1468 
1469 	  if (len == 0)
1470 	    ret = SPCT_HAS_ARRAY;
1471 	  else if (len < max)
1472 	    ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1473 	  else
1474 	    ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1475 	}
1476       else
1477 	ret = SPCT_HAS_ARRAY;
1478       break;
1479 
1480     case UNION_TYPE:
1481     case QUAL_UNION_TYPE:
1482     case RECORD_TYPE:
1483       ret = SPCT_HAS_AGGREGATE;
1484       for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1485 	if (TREE_CODE (t) == FIELD_DECL)
1486 	  ret |= stack_protect_classify_type (TREE_TYPE (t));
1487       break;
1488 
1489     default:
1490       break;
1491     }
1492 
1493   return ret;
1494 }
1495 
1496 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1497    part of the local stack frame.  Remember if we ever return nonzero for
1498    any variable in this function.  The return value is the phase number in
1499    which the variable should be allocated.  */
1500 
1501 static int
1502 stack_protect_decl_phase (tree decl)
1503 {
1504   unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1505   int ret = 0;
1506 
1507   if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1508     has_short_buffer = true;
1509 
1510   if (flag_stack_protect == SPCT_FLAG_ALL
1511       || flag_stack_protect == SPCT_FLAG_STRONG
1512       || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1513 	  && lookup_attribute ("stack_protect",
1514 			       DECL_ATTRIBUTES (current_function_decl))))
1515     {
1516       if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1517 	  && !(bits & SPCT_HAS_AGGREGATE))
1518 	ret = 1;
1519       else if (bits & SPCT_HAS_ARRAY)
1520 	ret = 2;
1521     }
1522   else
1523     ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1524 
1525   if (ret)
1526     has_protected_decls = true;
1527 
1528   return ret;
1529 }
1530 
1531 /* Two helper routines that check for phase 1 and phase 2.  These are used
1532    as callbacks for expand_stack_vars.  */
1533 
1534 static bool
1535 stack_protect_decl_phase_1 (size_t i)
1536 {
1537   return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1538 }
1539 
1540 static bool
1541 stack_protect_decl_phase_2 (size_t i)
1542 {
1543   return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1544 }
1545 
1546 /* And helper function that checks for asan phase (with stack protector
1547    it is phase 3).  This is used as callback for expand_stack_vars.
1548    Returns true if any of the vars in the partition need to be protected.  */
1549 
1550 static bool
1551 asan_decl_phase_3 (size_t i)
1552 {
1553   while (i != EOC)
1554     {
1555       if (asan_protect_stack_decl (stack_vars[i].decl))
1556 	return true;
1557       i = stack_vars[i].next;
1558     }
1559   return false;
1560 }
1561 
1562 /* Ensure that variables in different stack protection phases conflict
1563    so that they are not merged and share the same stack slot.  */
1564 
1565 static void
1566 add_stack_protection_conflicts (void)
1567 {
1568   size_t i, j, n = stack_vars_num;
1569   unsigned char *phase;
1570 
1571   phase = XNEWVEC (unsigned char, n);
1572   for (i = 0; i < n; ++i)
1573     phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1574 
1575   for (i = 0; i < n; ++i)
1576     {
1577       unsigned char ph_i = phase[i];
1578       for (j = i + 1; j < n; ++j)
1579 	if (ph_i != phase[j])
1580 	  add_stack_var_conflict (i, j);
1581     }
1582 
1583   XDELETEVEC (phase);
1584 }
1585 
1586 /* Create a decl for the guard at the top of the stack frame.  */
1587 
1588 static void
1589 create_stack_guard (void)
1590 {
1591   tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1592 			   VAR_DECL, NULL, ptr_type_node);
1593   TREE_THIS_VOLATILE (guard) = 1;
1594   TREE_USED (guard) = 1;
1595   expand_one_stack_var (guard);
1596   crtl->stack_protect_guard = guard;
1597 }
1598 
1599 /* Prepare for expanding variables.  */
1600 static void
1601 init_vars_expansion (void)
1602 {
1603   /* Conflict bitmaps, and a few related temporary bitmaps, go here.  */
1604   bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1605 
1606   /* A map from decl to stack partition.  */
1607   decl_to_stack_part = new hash_map<tree, size_t>;
1608 
1609   /* Initialize local stack smashing state.  */
1610   has_protected_decls = false;
1611   has_short_buffer = false;
1612 }
1613 
1614 /* Free up stack variable graph data.  */
1615 static void
1616 fini_vars_expansion (void)
1617 {
1618   bitmap_obstack_release (&stack_var_bitmap_obstack);
1619   if (stack_vars)
1620     XDELETEVEC (stack_vars);
1621   if (stack_vars_sorted)
1622     XDELETEVEC (stack_vars_sorted);
1623   stack_vars = NULL;
1624   stack_vars_sorted = NULL;
1625   stack_vars_alloc = stack_vars_num = 0;
1626   delete decl_to_stack_part;
1627   decl_to_stack_part = NULL;
1628 }
1629 
1630 /* Make a fair guess for the size of the stack frame of the function
1631    in NODE.  This doesn't have to be exact, the result is only used in
1632    the inline heuristics.  So we don't want to run the full stack var
1633    packing algorithm (which is quadratic in the number of stack vars).
1634    Instead, we calculate the total size of all stack vars.  This turns
1635    out to be a pretty fair estimate -- packing of stack vars doesn't
1636    happen very often.  */
1637 
1638 HOST_WIDE_INT
1639 estimated_stack_frame_size (struct cgraph_node *node)
1640 {
1641   HOST_WIDE_INT size = 0;
1642   size_t i;
1643   tree var;
1644   struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1645 
1646   push_cfun (fn);
1647 
1648   init_vars_expansion ();
1649 
1650   FOR_EACH_LOCAL_DECL (fn, i, var)
1651     if (auto_var_in_fn_p (var, fn->decl))
1652       size += expand_one_var (var, true, false);
1653 
1654   if (stack_vars_num > 0)
1655     {
1656       /* Fake sorting the stack vars for account_stack_vars ().  */
1657       stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1658       for (i = 0; i < stack_vars_num; ++i)
1659 	stack_vars_sorted[i] = i;
1660       size += account_stack_vars ();
1661     }
1662 
1663   fini_vars_expansion ();
1664   pop_cfun ();
1665   return size;
1666 }
1667 
1668 /* Helper routine to check if a record or union contains an array field. */
1669 
1670 static int
1671 record_or_union_type_has_array_p (const_tree tree_type)
1672 {
1673   tree fields = TYPE_FIELDS (tree_type);
1674   tree f;
1675 
1676   for (f = fields; f; f = DECL_CHAIN (f))
1677     if (TREE_CODE (f) == FIELD_DECL)
1678       {
1679 	tree field_type = TREE_TYPE (f);
1680 	if (RECORD_OR_UNION_TYPE_P (field_type)
1681 	    && record_or_union_type_has_array_p (field_type))
1682 	  return 1;
1683 	if (TREE_CODE (field_type) == ARRAY_TYPE)
1684 	  return 1;
1685       }
1686   return 0;
1687 }
1688 
1689 /* Check if the current function has local referenced variables that
1690    have their addresses taken, contain an array, or are arrays.  */
1691 
1692 static bool
1693 stack_protect_decl_p ()
1694 {
1695   unsigned i;
1696   tree var;
1697 
1698   FOR_EACH_LOCAL_DECL (cfun, i, var)
1699     if (!is_global_var (var))
1700       {
1701 	tree var_type = TREE_TYPE (var);
1702 	if (TREE_CODE (var) == VAR_DECL
1703 	    && (TREE_CODE (var_type) == ARRAY_TYPE
1704 		|| TREE_ADDRESSABLE (var)
1705 		|| (RECORD_OR_UNION_TYPE_P (var_type)
1706 		    && record_or_union_type_has_array_p (var_type))))
1707 	  return true;
1708       }
1709   return false;
1710 }
1711 
1712 /* Check if the current function has calls that use a return slot.  */
1713 
1714 static bool
1715 stack_protect_return_slot_p ()
1716 {
1717   basic_block bb;
1718 
1719   FOR_ALL_BB_FN (bb, cfun)
1720     for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
1721 	 !gsi_end_p (gsi); gsi_next (&gsi))
1722       {
1723 	gimple stmt = gsi_stmt (gsi);
1724 	/* This assumes that calls to internal-only functions never
1725 	   use a return slot.  */
1726 	if (is_gimple_call (stmt)
1727 	    && !gimple_call_internal_p (stmt)
1728 	    && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
1729 				  gimple_call_fndecl (stmt)))
1730 	  return true;
1731       }
1732   return false;
1733 }
1734 
1735 /* Expand all variables used in the function.  */
1736 
1737 static rtx_insn *
1738 expand_used_vars (void)
1739 {
1740   tree var, outer_block = DECL_INITIAL (current_function_decl);
1741   vec<tree> maybe_local_decls = vNULL;
1742   rtx_insn *var_end_seq = NULL;
1743   unsigned i;
1744   unsigned len;
1745   bool gen_stack_protect_signal = false;
1746 
1747   /* Compute the phase of the stack frame for this function.  */
1748   {
1749     int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1750     int off = STARTING_FRAME_OFFSET % align;
1751     frame_phase = off ? align - off : 0;
1752   }
1753 
1754   /* Set TREE_USED on all variables in the local_decls.  */
1755   FOR_EACH_LOCAL_DECL (cfun, i, var)
1756     TREE_USED (var) = 1;
1757   /* Clear TREE_USED on all variables associated with a block scope.  */
1758   clear_tree_used (DECL_INITIAL (current_function_decl));
1759 
1760   init_vars_expansion ();
1761 
1762   if (targetm.use_pseudo_pic_reg ())
1763     pic_offset_table_rtx = gen_reg_rtx (Pmode);
1764 
1765   hash_map<tree, tree> ssa_name_decls;
1766   for (i = 0; i < SA.map->num_partitions; i++)
1767     {
1768       tree var = partition_to_var (SA.map, i);
1769 
1770       gcc_assert (!virtual_operand_p (var));
1771 
1772       /* Assign decls to each SSA name partition, share decls for partitions
1773          we could have coalesced (those with the same type).  */
1774       if (SSA_NAME_VAR (var) == NULL_TREE)
1775 	{
1776 	  tree *slot = &ssa_name_decls.get_or_insert (TREE_TYPE (var));
1777 	  if (!*slot)
1778 	    *slot = create_tmp_reg (TREE_TYPE (var));
1779 	  replace_ssa_name_symbol (var, *slot);
1780 	}
1781 
1782       /* Always allocate space for partitions based on VAR_DECLs.  But for
1783 	 those based on PARM_DECLs or RESULT_DECLs and which matter for the
1784 	 debug info, there is no need to do so if optimization is disabled
1785 	 because all the SSA_NAMEs based on these DECLs have been coalesced
1786 	 into a single partition, which is thus assigned the canonical RTL
1787 	 location of the DECLs.  If in_lto_p, we can't rely on optimize,
1788 	 a function could be compiled with -O1 -flto first and only the
1789 	 link performed at -O0.  */
1790       if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1791 	expand_one_var (var, true, true);
1792       else if (DECL_IGNORED_P (SSA_NAME_VAR (var)) || optimize || in_lto_p)
1793 	{
1794 	  /* This is a PARM_DECL or RESULT_DECL.  For those partitions that
1795 	     contain the default def (representing the parm or result itself)
1796 	     we don't do anything here.  But those which don't contain the
1797 	     default def (representing a temporary based on the parm/result)
1798 	     we need to allocate space just like for normal VAR_DECLs.  */
1799 	  if (!bitmap_bit_p (SA.partition_has_default_def, i))
1800 	    {
1801 	      expand_one_var (var, true, true);
1802 	      gcc_assert (SA.partition_to_pseudo[i]);
1803 	    }
1804 	}
1805     }
1806 
1807   if (flag_stack_protect == SPCT_FLAG_STRONG)
1808       gen_stack_protect_signal
1809 	= stack_protect_decl_p () || stack_protect_return_slot_p ();
1810 
1811   /* At this point all variables on the local_decls with TREE_USED
1812      set are not associated with any block scope.  Lay them out.  */
1813 
1814   len = vec_safe_length (cfun->local_decls);
1815   FOR_EACH_LOCAL_DECL (cfun, i, var)
1816     {
1817       bool expand_now = false;
1818 
1819       /* Expanded above already.  */
1820       if (is_gimple_reg (var))
1821 	{
1822 	  TREE_USED (var) = 0;
1823 	  goto next;
1824 	}
1825       /* We didn't set a block for static or extern because it's hard
1826 	 to tell the difference between a global variable (re)declared
1827 	 in a local scope, and one that's really declared there to
1828 	 begin with.  And it doesn't really matter much, since we're
1829 	 not giving them stack space.  Expand them now.  */
1830       else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1831 	expand_now = true;
1832 
1833       /* Expand variables not associated with any block now.  Those created by
1834 	 the optimizers could be live anywhere in the function.  Those that
1835 	 could possibly have been scoped originally and detached from their
1836 	 block will have their allocation deferred so we coalesce them with
1837 	 others when optimization is enabled.  */
1838       else if (TREE_USED (var))
1839 	expand_now = true;
1840 
1841       /* Finally, mark all variables on the list as used.  We'll use
1842 	 this in a moment when we expand those associated with scopes.  */
1843       TREE_USED (var) = 1;
1844 
1845       if (expand_now)
1846 	expand_one_var (var, true, true);
1847 
1848     next:
1849       if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1850 	{
1851 	  rtx rtl = DECL_RTL_IF_SET (var);
1852 
1853 	  /* Keep artificial non-ignored vars in cfun->local_decls
1854 	     chain until instantiate_decls.  */
1855 	  if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1856 	    add_local_decl (cfun, var);
1857 	  else if (rtl == NULL_RTX)
1858 	    /* If rtl isn't set yet, which can happen e.g. with
1859 	       -fstack-protector, retry before returning from this
1860 	       function.  */
1861 	    maybe_local_decls.safe_push (var);
1862 	}
1863     }
1864 
1865   /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1866 
1867      +-----------------+-----------------+
1868      | ...processed... | ...duplicates...|
1869      +-----------------+-----------------+
1870                        ^
1871 		       +-- LEN points here.
1872 
1873      We just want the duplicates, as those are the artificial
1874      non-ignored vars that we want to keep until instantiate_decls.
1875      Move them down and truncate the array.  */
1876   if (!vec_safe_is_empty (cfun->local_decls))
1877     cfun->local_decls->block_remove (0, len);
1878 
1879   /* At this point, all variables within the block tree with TREE_USED
1880      set are actually used by the optimized function.  Lay them out.  */
1881   expand_used_vars_for_block (outer_block, true);
1882 
1883   if (stack_vars_num > 0)
1884     {
1885       add_scope_conflicts ();
1886 
1887       /* If stack protection is enabled, we don't share space between
1888 	 vulnerable data and non-vulnerable data.  */
1889       if (flag_stack_protect != 0
1890 	  && (flag_stack_protect != SPCT_FLAG_EXPLICIT
1891 	      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1892 		  && lookup_attribute ("stack_protect",
1893 				       DECL_ATTRIBUTES (current_function_decl)))))
1894 	add_stack_protection_conflicts ();
1895 
1896       /* Now that we have collected all stack variables, and have computed a
1897 	 minimal interference graph, attempt to save some stack space.  */
1898       partition_stack_vars ();
1899       if (dump_file)
1900 	dump_stack_var_partition ();
1901     }
1902 
1903   switch (flag_stack_protect)
1904     {
1905     case SPCT_FLAG_ALL:
1906       create_stack_guard ();
1907       break;
1908 
1909     case SPCT_FLAG_STRONG:
1910       if (gen_stack_protect_signal
1911 	  || cfun->calls_alloca || has_protected_decls
1912 	  || lookup_attribute ("stack_protect",
1913 			       DECL_ATTRIBUTES (current_function_decl)))
1914 	create_stack_guard ();
1915       break;
1916 
1917     case SPCT_FLAG_DEFAULT:
1918       if (cfun->calls_alloca || has_protected_decls
1919 	  || lookup_attribute ("stack_protect",
1920 			       DECL_ATTRIBUTES (current_function_decl)))
1921 	create_stack_guard ();
1922       break;
1923 
1924     case SPCT_FLAG_EXPLICIT:
1925       if (lookup_attribute ("stack_protect",
1926 			    DECL_ATTRIBUTES (current_function_decl)))
1927 	create_stack_guard ();
1928       break;
1929     default:
1930       ;
1931     }
1932 
1933   /* Assign rtl to each variable based on these partitions.  */
1934   if (stack_vars_num > 0)
1935     {
1936       struct stack_vars_data data;
1937 
1938       data.asan_vec = vNULL;
1939       data.asan_decl_vec = vNULL;
1940       data.asan_base = NULL_RTX;
1941       data.asan_alignb = 0;
1942 
1943       /* Reorder decls to be protected by iterating over the variables
1944 	 array multiple times, and allocating out of each phase in turn.  */
1945       /* ??? We could probably integrate this into the qsort we did
1946 	 earlier, such that we naturally see these variables first,
1947 	 and thus naturally allocate things in the right order.  */
1948       if (has_protected_decls)
1949 	{
1950 	  /* Phase 1 contains only character arrays.  */
1951 	  expand_stack_vars (stack_protect_decl_phase_1, &data);
1952 
1953 	  /* Phase 2 contains other kinds of arrays.  */
1954 	  if (flag_stack_protect == SPCT_FLAG_ALL
1955 	      || flag_stack_protect == SPCT_FLAG_STRONG
1956 	      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1957 		  && lookup_attribute ("stack_protect",
1958 				       DECL_ATTRIBUTES (current_function_decl))))
1959 	    expand_stack_vars (stack_protect_decl_phase_2, &data);
1960 	}
1961 
1962       if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK)
1963 	/* Phase 3, any partitions that need asan protection
1964 	   in addition to phase 1 and 2.  */
1965 	expand_stack_vars (asan_decl_phase_3, &data);
1966 
1967       if (!data.asan_vec.is_empty ())
1968 	{
1969 	  HOST_WIDE_INT prev_offset = frame_offset;
1970 	  HOST_WIDE_INT offset, sz, redzonesz;
1971 	  redzonesz = ASAN_RED_ZONE_SIZE;
1972 	  sz = data.asan_vec[0] - prev_offset;
1973 	  if (data.asan_alignb > ASAN_RED_ZONE_SIZE
1974 	      && data.asan_alignb <= 4096
1975 	      && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
1976 	    redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
1977 			 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
1978 	  offset
1979 	    = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
1980 	  data.asan_vec.safe_push (prev_offset);
1981 	  data.asan_vec.safe_push (offset);
1982 	  /* Leave space for alignment if STRICT_ALIGNMENT.  */
1983 	  if (STRICT_ALIGNMENT)
1984 	    alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
1985 				      << ASAN_SHADOW_SHIFT)
1986 				     / BITS_PER_UNIT, 1);
1987 
1988 	  var_end_seq
1989 	    = asan_emit_stack_protection (virtual_stack_vars_rtx,
1990 					  data.asan_base,
1991 					  data.asan_alignb,
1992 					  data.asan_vec.address (),
1993 					  data.asan_decl_vec.address (),
1994 					  data.asan_vec.length ());
1995 	}
1996 
1997       expand_stack_vars (NULL, &data);
1998 
1999       data.asan_vec.release ();
2000       data.asan_decl_vec.release ();
2001     }
2002 
2003   fini_vars_expansion ();
2004 
2005   /* If there were any artificial non-ignored vars without rtl
2006      found earlier, see if deferred stack allocation hasn't assigned
2007      rtl to them.  */
2008   FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2009     {
2010       rtx rtl = DECL_RTL_IF_SET (var);
2011 
2012       /* Keep artificial non-ignored vars in cfun->local_decls
2013 	 chain until instantiate_decls.  */
2014       if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2015 	add_local_decl (cfun, var);
2016     }
2017   maybe_local_decls.release ();
2018 
2019   /* If the target requires that FRAME_OFFSET be aligned, do it.  */
2020   if (STACK_ALIGNMENT_NEEDED)
2021     {
2022       HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2023       if (!FRAME_GROWS_DOWNWARD)
2024 	frame_offset += align - 1;
2025       frame_offset &= -align;
2026     }
2027 
2028   return var_end_seq;
2029 }
2030 
2031 
2032 /* If we need to produce a detailed dump, print the tree representation
2033    for STMT to the dump file.  SINCE is the last RTX after which the RTL
2034    generated for STMT should have been appended.  */
2035 
2036 static void
2037 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx_insn *since)
2038 {
2039   if (dump_file && (dump_flags & TDF_DETAILS))
2040     {
2041       fprintf (dump_file, "\n;; ");
2042       print_gimple_stmt (dump_file, stmt, 0,
2043 			 TDF_SLIM | (dump_flags & TDF_LINENO));
2044       fprintf (dump_file, "\n");
2045 
2046       print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2047     }
2048 }
2049 
2050 /* Maps the blocks that do not contain tree labels to rtx labels.  */
2051 
2052 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2053 
2054 /* Returns the label_rtx expression for a label starting basic block BB.  */
2055 
2056 static rtx
2057 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2058 {
2059   gimple_stmt_iterator gsi;
2060   tree lab;
2061 
2062   if (bb->flags & BB_RTL)
2063     return block_label (bb);
2064 
2065   rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2066   if (elt)
2067     return *elt;
2068 
2069   /* Find the tree label if it is present.  */
2070 
2071   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2072     {
2073       glabel *lab_stmt;
2074 
2075       lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2076       if (!lab_stmt)
2077 	break;
2078 
2079       lab = gimple_label_label (lab_stmt);
2080       if (DECL_NONLOCAL (lab))
2081 	break;
2082 
2083       return label_rtx (lab);
2084     }
2085 
2086   rtx_code_label *l = gen_label_rtx ();
2087   lab_rtx_for_bb->put (bb, l);
2088   return l;
2089 }
2090 
2091 
2092 /* A subroutine of expand_gimple_cond.  Given E, a fallthrough edge
2093    of a basic block where we just expanded the conditional at the end,
2094    possibly clean up the CFG and instruction sequence.  LAST is the
2095    last instruction before the just emitted jump sequence.  */
2096 
2097 static void
2098 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2099 {
2100   /* Special case: when jumpif decides that the condition is
2101      trivial it emits an unconditional jump (and the necessary
2102      barrier).  But we still have two edges, the fallthru one is
2103      wrong.  purge_dead_edges would clean this up later.  Unfortunately
2104      we have to insert insns (and split edges) before
2105      find_many_sub_basic_blocks and hence before purge_dead_edges.
2106      But splitting edges might create new blocks which depend on the
2107      fact that if there are two edges there's no barrier.  So the
2108      barrier would get lost and verify_flow_info would ICE.  Instead
2109      of auditing all edge splitters to care for the barrier (which
2110      normally isn't there in a cleaned CFG), fix it here.  */
2111   if (BARRIER_P (get_last_insn ()))
2112     {
2113       rtx_insn *insn;
2114       remove_edge (e);
2115       /* Now, we have a single successor block, if we have insns to
2116 	 insert on the remaining edge we potentially will insert
2117 	 it at the end of this block (if the dest block isn't feasible)
2118 	 in order to avoid splitting the edge.  This insertion will take
2119 	 place in front of the last jump.  But we might have emitted
2120 	 multiple jumps (conditional and one unconditional) to the
2121 	 same destination.  Inserting in front of the last one then
2122 	 is a problem.  See PR 40021.  We fix this by deleting all
2123 	 jumps except the last unconditional one.  */
2124       insn = PREV_INSN (get_last_insn ());
2125       /* Make sure we have an unconditional jump.  Otherwise we're
2126 	 confused.  */
2127       gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2128       for (insn = PREV_INSN (insn); insn != last;)
2129 	{
2130 	  insn = PREV_INSN (insn);
2131 	  if (JUMP_P (NEXT_INSN (insn)))
2132 	    {
2133 	      if (!any_condjump_p (NEXT_INSN (insn)))
2134 		{
2135 		  gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2136 		  delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2137 		}
2138 	      delete_insn (NEXT_INSN (insn));
2139 	    }
2140 	}
2141     }
2142 }
2143 
2144 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_COND.
2145    Returns a new basic block if we've terminated the current basic
2146    block and created a new one.  */
2147 
2148 static basic_block
2149 expand_gimple_cond (basic_block bb, gcond *stmt)
2150 {
2151   basic_block new_bb, dest;
2152   edge new_edge;
2153   edge true_edge;
2154   edge false_edge;
2155   rtx_insn *last2, *last;
2156   enum tree_code code;
2157   tree op0, op1;
2158 
2159   code = gimple_cond_code (stmt);
2160   op0 = gimple_cond_lhs (stmt);
2161   op1 = gimple_cond_rhs (stmt);
2162   /* We're sometimes presented with such code:
2163        D.123_1 = x < y;
2164        if (D.123_1 != 0)
2165          ...
2166      This would expand to two comparisons which then later might
2167      be cleaned up by combine.  But some pattern matchers like if-conversion
2168      work better when there's only one compare, so make up for this
2169      here as special exception if TER would have made the same change.  */
2170   if (SA.values
2171       && TREE_CODE (op0) == SSA_NAME
2172       && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2173       && TREE_CODE (op1) == INTEGER_CST
2174       && ((gimple_cond_code (stmt) == NE_EXPR
2175 	   && integer_zerop (op1))
2176 	  || (gimple_cond_code (stmt) == EQ_EXPR
2177 	      && integer_onep (op1)))
2178       && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2179     {
2180       gimple second = SSA_NAME_DEF_STMT (op0);
2181       if (gimple_code (second) == GIMPLE_ASSIGN)
2182 	{
2183 	  enum tree_code code2 = gimple_assign_rhs_code (second);
2184 	  if (TREE_CODE_CLASS (code2) == tcc_comparison)
2185 	    {
2186 	      code = code2;
2187 	      op0 = gimple_assign_rhs1 (second);
2188 	      op1 = gimple_assign_rhs2 (second);
2189 	    }
2190 	  /* If jumps are cheap and the target does not support conditional
2191 	     compare, turn some more codes into jumpy sequences.  */
2192 	  else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2193 		   && targetm.gen_ccmp_first == NULL)
2194 	    {
2195 	      if ((code2 == BIT_AND_EXPR
2196 		   && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2197 		   && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2198 		  || code2 == TRUTH_AND_EXPR)
2199 		{
2200 		  code = TRUTH_ANDIF_EXPR;
2201 		  op0 = gimple_assign_rhs1 (second);
2202 		  op1 = gimple_assign_rhs2 (second);
2203 		}
2204 	      else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2205 		{
2206 		  code = TRUTH_ORIF_EXPR;
2207 		  op0 = gimple_assign_rhs1 (second);
2208 		  op1 = gimple_assign_rhs2 (second);
2209 		}
2210 	    }
2211 	}
2212     }
2213 
2214   last2 = last = get_last_insn ();
2215 
2216   extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2217   set_curr_insn_location (gimple_location (stmt));
2218 
2219   /* These flags have no purpose in RTL land.  */
2220   true_edge->flags &= ~EDGE_TRUE_VALUE;
2221   false_edge->flags &= ~EDGE_FALSE_VALUE;
2222 
2223   /* We can either have a pure conditional jump with one fallthru edge or
2224      two-way jump that needs to be decomposed into two basic blocks.  */
2225   if (false_edge->dest == bb->next_bb)
2226     {
2227       jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2228 		true_edge->probability);
2229       maybe_dump_rtl_for_gimple_stmt (stmt, last);
2230       if (true_edge->goto_locus != UNKNOWN_LOCATION)
2231 	set_curr_insn_location (true_edge->goto_locus);
2232       false_edge->flags |= EDGE_FALLTHRU;
2233       maybe_cleanup_end_of_block (false_edge, last);
2234       return NULL;
2235     }
2236   if (true_edge->dest == bb->next_bb)
2237     {
2238       jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2239 		   false_edge->probability);
2240       maybe_dump_rtl_for_gimple_stmt (stmt, last);
2241       if (false_edge->goto_locus != UNKNOWN_LOCATION)
2242 	set_curr_insn_location (false_edge->goto_locus);
2243       true_edge->flags |= EDGE_FALLTHRU;
2244       maybe_cleanup_end_of_block (true_edge, last);
2245       return NULL;
2246     }
2247 
2248   jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2249 	    true_edge->probability);
2250   last = get_last_insn ();
2251   if (false_edge->goto_locus != UNKNOWN_LOCATION)
2252     set_curr_insn_location (false_edge->goto_locus);
2253   emit_jump (label_rtx_for_bb (false_edge->dest));
2254 
2255   BB_END (bb) = last;
2256   if (BARRIER_P (BB_END (bb)))
2257     BB_END (bb) = PREV_INSN (BB_END (bb));
2258   update_bb_for_insn (bb);
2259 
2260   new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2261   dest = false_edge->dest;
2262   redirect_edge_succ (false_edge, new_bb);
2263   false_edge->flags |= EDGE_FALLTHRU;
2264   new_bb->count = false_edge->count;
2265   new_bb->frequency = EDGE_FREQUENCY (false_edge);
2266   add_bb_to_loop (new_bb, bb->loop_father);
2267   new_edge = make_edge (new_bb, dest, 0);
2268   new_edge->probability = REG_BR_PROB_BASE;
2269   new_edge->count = new_bb->count;
2270   if (BARRIER_P (BB_END (new_bb)))
2271     BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2272   update_bb_for_insn (new_bb);
2273 
2274   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2275 
2276   if (true_edge->goto_locus != UNKNOWN_LOCATION)
2277     {
2278       set_curr_insn_location (true_edge->goto_locus);
2279       true_edge->goto_locus = curr_insn_location ();
2280     }
2281 
2282   return new_bb;
2283 }
2284 
2285 /* Mark all calls that can have a transaction restart.  */
2286 
2287 static void
2288 mark_transaction_restart_calls (gimple stmt)
2289 {
2290   struct tm_restart_node dummy;
2291   tm_restart_node **slot;
2292 
2293   if (!cfun->gimple_df->tm_restart)
2294     return;
2295 
2296   dummy.stmt = stmt;
2297   slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2298   if (slot)
2299     {
2300       struct tm_restart_node *n = *slot;
2301       tree list = n->label_or_list;
2302       rtx_insn *insn;
2303 
2304       for (insn = next_real_insn (get_last_insn ());
2305 	   !CALL_P (insn);
2306 	   insn = next_real_insn (insn))
2307 	continue;
2308 
2309       if (TREE_CODE (list) == LABEL_DECL)
2310 	add_reg_note (insn, REG_TM, label_rtx (list));
2311       else
2312 	for (; list ; list = TREE_CHAIN (list))
2313 	  add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2314     }
2315 }
2316 
2317 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2318    statement STMT.  */
2319 
2320 static void
2321 expand_call_stmt (gcall *stmt)
2322 {
2323   tree exp, decl, lhs;
2324   bool builtin_p;
2325   size_t i;
2326 
2327   if (gimple_call_internal_p (stmt))
2328     {
2329       expand_internal_call (stmt);
2330       return;
2331     }
2332 
2333   exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2334 
2335   CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2336   decl = gimple_call_fndecl (stmt);
2337   builtin_p = decl && DECL_BUILT_IN (decl);
2338 
2339   /* If this is not a builtin function, the function type through which the
2340      call is made may be different from the type of the function.  */
2341   if (!builtin_p)
2342     CALL_EXPR_FN (exp)
2343       = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2344 		      CALL_EXPR_FN (exp));
2345 
2346   TREE_TYPE (exp) = gimple_call_return_type (stmt);
2347   CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2348 
2349   for (i = 0; i < gimple_call_num_args (stmt); i++)
2350     {
2351       tree arg = gimple_call_arg (stmt, i);
2352       gimple def;
2353       /* TER addresses into arguments of builtin functions so we have a
2354 	 chance to infer more correct alignment information.  See PR39954.  */
2355       if (builtin_p
2356 	  && TREE_CODE (arg) == SSA_NAME
2357 	  && (def = get_gimple_for_ssa_name (arg))
2358 	  && gimple_assign_rhs_code (def) == ADDR_EXPR)
2359 	arg = gimple_assign_rhs1 (def);
2360       CALL_EXPR_ARG (exp, i) = arg;
2361     }
2362 
2363   if (gimple_has_side_effects (stmt))
2364     TREE_SIDE_EFFECTS (exp) = 1;
2365 
2366   if (gimple_call_nothrow_p (stmt))
2367     TREE_NOTHROW (exp) = 1;
2368 
2369   CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2370   CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2371   if (decl
2372       && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2373       && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2374 	  || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2375     CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2376   else
2377     CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2378   CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2379   SET_EXPR_LOCATION (exp, gimple_location (stmt));
2380   CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
2381 
2382   /* Ensure RTL is created for debug args.  */
2383   if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2384     {
2385       vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2386       unsigned int ix;
2387       tree dtemp;
2388 
2389       if (debug_args)
2390 	for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2391 	  {
2392 	    gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2393 	    expand_debug_expr (dtemp);
2394 	  }
2395     }
2396 
2397   lhs = gimple_call_lhs (stmt);
2398   if (lhs)
2399     expand_assignment (lhs, exp, false);
2400   else
2401     expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2402 
2403   mark_transaction_restart_calls (stmt);
2404 }
2405 
2406 
2407 /* Generate RTL for an asm statement (explicit assembler code).
2408    STRING is a STRING_CST node containing the assembler code text,
2409    or an ADDR_EXPR containing a STRING_CST.  VOL nonzero means the
2410    insn is volatile; don't optimize it.  */
2411 
2412 static void
2413 expand_asm_loc (tree string, int vol, location_t locus)
2414 {
2415   rtx body;
2416 
2417   if (TREE_CODE (string) == ADDR_EXPR)
2418     string = TREE_OPERAND (string, 0);
2419 
2420   body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2421 				ggc_strdup (TREE_STRING_POINTER (string)),
2422 				locus);
2423 
2424   MEM_VOLATILE_P (body) = vol;
2425 
2426   emit_insn (body);
2427 }
2428 
2429 /* Return the number of times character C occurs in string S.  */
2430 static int
2431 n_occurrences (int c, const char *s)
2432 {
2433   int n = 0;
2434   while (*s)
2435     n += (*s++ == c);
2436   return n;
2437 }
2438 
2439 /* A subroutine of expand_asm_operands.  Check that all operands have
2440    the same number of alternatives.  Return true if so.  */
2441 
2442 static bool
2443 check_operand_nalternatives (tree outputs, tree inputs)
2444 {
2445   if (outputs || inputs)
2446     {
2447       tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
2448       int nalternatives
2449 	= n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
2450       tree next = inputs;
2451 
2452       if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2453 	{
2454 	  error ("too many alternatives in %<asm%>");
2455 	  return false;
2456 	}
2457 
2458       tmp = outputs;
2459       while (tmp)
2460 	{
2461 	  const char *constraint
2462 	    = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
2463 
2464 	  if (n_occurrences (',', constraint) != nalternatives)
2465 	    {
2466 	      error ("operand constraints for %<asm%> differ "
2467 		     "in number of alternatives");
2468 	      return false;
2469 	    }
2470 
2471 	  if (TREE_CHAIN (tmp))
2472 	    tmp = TREE_CHAIN (tmp);
2473 	  else
2474 	    tmp = next, next = 0;
2475 	}
2476     }
2477 
2478   return true;
2479 }
2480 
2481 /* Check for overlap between registers marked in CLOBBERED_REGS and
2482    anything inappropriate in T.  Emit error and return the register
2483    variable definition for error, NULL_TREE for ok.  */
2484 
2485 static bool
2486 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2487 {
2488   /* Conflicts between asm-declared register variables and the clobber
2489      list are not allowed.  */
2490   tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2491 
2492   if (overlap)
2493     {
2494       error ("asm-specifier for variable %qE conflicts with asm clobber list",
2495 	     DECL_NAME (overlap));
2496 
2497       /* Reset registerness to stop multiple errors emitted for a single
2498 	 variable.  */
2499       DECL_REGISTER (overlap) = 0;
2500       return true;
2501     }
2502 
2503   return false;
2504 }
2505 
2506 /* Generate RTL for an asm statement with arguments.
2507    STRING is the instruction template.
2508    OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2509    Each output or input has an expression in the TREE_VALUE and
2510    a tree list in TREE_PURPOSE which in turn contains a constraint
2511    name in TREE_VALUE (or NULL_TREE) and a constraint string
2512    in TREE_PURPOSE.
2513    CLOBBERS is a list of STRING_CST nodes each naming a hard register
2514    that is clobbered by this insn.
2515 
2516    LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2517    should be the fallthru basic block of the asm goto.
2518 
2519    Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2520    Some elements of OUTPUTS may be replaced with trees representing temporary
2521    values.  The caller should copy those temporary values to the originally
2522    specified lvalues.
2523 
2524    VOL nonzero means the insn is volatile; don't optimize it.  */
2525 
2526 static void
2527 expand_asm_operands (tree string, tree outputs, tree inputs,
2528 		     tree clobbers, tree labels, basic_block fallthru_bb,
2529 		     int vol, location_t locus)
2530 {
2531   rtvec argvec, constraintvec, labelvec;
2532   rtx body;
2533   int ninputs = list_length (inputs);
2534   int noutputs = list_length (outputs);
2535   int nlabels = list_length (labels);
2536   int ninout;
2537   int nclobbers;
2538   HARD_REG_SET clobbered_regs;
2539   int clobber_conflict_found = 0;
2540   tree tail;
2541   tree t;
2542   int i;
2543   /* Vector of RTX's of evaluated output operands.  */
2544   rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
2545   int *inout_opnum = XALLOCAVEC (int, noutputs);
2546   rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
2547   machine_mode *inout_mode = XALLOCAVEC (machine_mode, noutputs);
2548   const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
2549   int old_generating_concat_p = generating_concat_p;
2550   rtx_code_label *fallthru_label = NULL;
2551 
2552   /* An ASM with no outputs needs to be treated as volatile, for now.  */
2553   if (noutputs == 0)
2554     vol = 1;
2555 
2556   if (! check_operand_nalternatives (outputs, inputs))
2557     return;
2558 
2559   string = resolve_asm_operand_names (string, outputs, inputs, labels);
2560 
2561   /* Collect constraints.  */
2562   i = 0;
2563   for (t = outputs; t ; t = TREE_CHAIN (t), i++)
2564     constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2565   for (t = inputs; t ; t = TREE_CHAIN (t), i++)
2566     constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2567 
2568   /* Sometimes we wish to automatically clobber registers across an asm.
2569      Case in point is when the i386 backend moved from cc0 to a hard reg --
2570      maintaining source-level compatibility means automatically clobbering
2571      the flags register.  */
2572   clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
2573 
2574   /* Count the number of meaningful clobbered registers, ignoring what
2575      we would ignore later.  */
2576   nclobbers = 0;
2577   CLEAR_HARD_REG_SET (clobbered_regs);
2578   for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2579     {
2580       const char *regname;
2581       int nregs;
2582 
2583       if (TREE_VALUE (tail) == error_mark_node)
2584 	return;
2585       regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2586 
2587       i = decode_reg_name_and_count (regname, &nregs);
2588       if (i == -4)
2589 	++nclobbers;
2590       else if (i == -2)
2591 	error ("unknown register name %qs in %<asm%>", regname);
2592 
2593       /* Mark clobbered registers.  */
2594       if (i >= 0)
2595         {
2596 	  int reg;
2597 
2598 	  for (reg = i; reg < i + nregs; reg++)
2599 	    {
2600 	      ++nclobbers;
2601 
2602 	      /* Clobbering the PIC register is an error.  */
2603 	      if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2604 		{
2605 		  error ("PIC register clobbered by %qs in %<asm%>", regname);
2606 		  return;
2607 		}
2608 
2609 	      SET_HARD_REG_BIT (clobbered_regs, reg);
2610 	    }
2611 	}
2612     }
2613 
2614   /* First pass over inputs and outputs checks validity and sets
2615      mark_addressable if needed.  */
2616 
2617   ninout = 0;
2618   for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2619     {
2620       tree val = TREE_VALUE (tail);
2621       tree type = TREE_TYPE (val);
2622       const char *constraint;
2623       bool is_inout;
2624       bool allows_reg;
2625       bool allows_mem;
2626 
2627       /* If there's an erroneous arg, emit no insn.  */
2628       if (type == error_mark_node)
2629 	return;
2630 
2631       /* Try to parse the output constraint.  If that fails, there's
2632 	 no point in going further.  */
2633       constraint = constraints[i];
2634       if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2635 				    &allows_mem, &allows_reg, &is_inout))
2636 	return;
2637 
2638       if (! allows_reg
2639 	  && (allows_mem
2640 	      || is_inout
2641 	      || (DECL_P (val)
2642 		  && REG_P (DECL_RTL (val))
2643 		  && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2644 	mark_addressable (val);
2645 
2646       if (is_inout)
2647 	ninout++;
2648     }
2649 
2650   ninputs += ninout;
2651   if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2652     {
2653       error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2654       return;
2655     }
2656 
2657   for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
2658     {
2659       bool allows_reg, allows_mem;
2660       const char *constraint;
2661 
2662       /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
2663 	 would get VOIDmode and that could cause a crash in reload.  */
2664       if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
2665 	return;
2666 
2667       constraint = constraints[i + noutputs];
2668       if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2669 				    constraints, &allows_mem, &allows_reg))
2670 	return;
2671 
2672       if (! allows_reg && allows_mem)
2673 	mark_addressable (TREE_VALUE (tail));
2674     }
2675 
2676   /* Second pass evaluates arguments.  */
2677 
2678   /* Make sure stack is consistent for asm goto.  */
2679   if (nlabels > 0)
2680     do_pending_stack_adjust ();
2681 
2682   ninout = 0;
2683   for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2684     {
2685       tree val = TREE_VALUE (tail);
2686       tree type = TREE_TYPE (val);
2687       bool is_inout;
2688       bool allows_reg;
2689       bool allows_mem;
2690       rtx op;
2691       bool ok;
2692 
2693       ok = parse_output_constraint (&constraints[i], i, ninputs,
2694 				    noutputs, &allows_mem, &allows_reg,
2695 				    &is_inout);
2696       gcc_assert (ok);
2697 
2698       /* If an output operand is not a decl or indirect ref and our constraint
2699 	 allows a register, make a temporary to act as an intermediate.
2700 	 Make the asm insn write into that, then our caller will copy it to
2701 	 the real output operand.  Likewise for promoted variables.  */
2702 
2703       generating_concat_p = 0;
2704 
2705       real_output_rtx[i] = NULL_RTX;
2706       if ((TREE_CODE (val) == INDIRECT_REF
2707 	   && allows_mem)
2708 	  || (DECL_P (val)
2709 	      && (allows_mem || REG_P (DECL_RTL (val)))
2710 	      && ! (REG_P (DECL_RTL (val))
2711 		    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
2712 	  || ! allows_reg
2713 	  || is_inout)
2714 	{
2715 	  op = expand_expr (val, NULL_RTX, VOIDmode,
2716 			    !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
2717 	  if (MEM_P (op))
2718 	    op = validize_mem (op);
2719 
2720 	  if (! allows_reg && !MEM_P (op))
2721 	    error ("output number %d not directly addressable", i);
2722 	  if ((! allows_mem && MEM_P (op))
2723 	      || GET_CODE (op) == CONCAT)
2724 	    {
2725 	      real_output_rtx[i] = op;
2726 	      op = gen_reg_rtx (GET_MODE (op));
2727 	      if (is_inout)
2728 		emit_move_insn (op, real_output_rtx[i]);
2729 	    }
2730 	}
2731       else
2732 	{
2733 	  op = assign_temp (type, 0, 1);
2734 	  op = validize_mem (op);
2735 	  if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
2736 	    set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
2737 	  TREE_VALUE (tail) = make_tree (type, op);
2738 	}
2739       output_rtx[i] = op;
2740 
2741       generating_concat_p = old_generating_concat_p;
2742 
2743       if (is_inout)
2744 	{
2745 	  inout_mode[ninout] = TYPE_MODE (type);
2746 	  inout_opnum[ninout++] = i;
2747 	}
2748 
2749       if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2750 	clobber_conflict_found = 1;
2751     }
2752 
2753   /* Make vectors for the expression-rtx, constraint strings,
2754      and named operands.  */
2755 
2756   argvec = rtvec_alloc (ninputs);
2757   constraintvec = rtvec_alloc (ninputs);
2758   labelvec = rtvec_alloc (nlabels);
2759 
2760   body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
2761 				: GET_MODE (output_rtx[0])),
2762 			       ggc_strdup (TREE_STRING_POINTER (string)),
2763 			       empty_string, 0, argvec, constraintvec,
2764 			       labelvec, locus);
2765 
2766   MEM_VOLATILE_P (body) = vol;
2767 
2768   /* Eval the inputs and put them into ARGVEC.
2769      Put their constraints into ASM_INPUTs and store in CONSTRAINTS.  */
2770 
2771   for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
2772     {
2773       bool allows_reg, allows_mem;
2774       const char *constraint;
2775       tree val, type;
2776       rtx op;
2777       bool ok;
2778 
2779       constraint = constraints[i + noutputs];
2780       ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2781 				   constraints, &allows_mem, &allows_reg);
2782       gcc_assert (ok);
2783 
2784       generating_concat_p = 0;
2785 
2786       val = TREE_VALUE (tail);
2787       type = TREE_TYPE (val);
2788       /* EXPAND_INITIALIZER will not generate code for valid initializer
2789 	 constants, but will still generate code for other types of operand.
2790 	 This is the behavior we want for constant constraints.  */
2791       op = expand_expr (val, NULL_RTX, VOIDmode,
2792 			allows_reg ? EXPAND_NORMAL
2793 			: allows_mem ? EXPAND_MEMORY
2794 			: EXPAND_INITIALIZER);
2795 
2796       /* Never pass a CONCAT to an ASM.  */
2797       if (GET_CODE (op) == CONCAT)
2798 	op = force_reg (GET_MODE (op), op);
2799       else if (MEM_P (op))
2800 	op = validize_mem (op);
2801 
2802       if (asm_operand_ok (op, constraint, NULL) <= 0)
2803 	{
2804 	  if (allows_reg && TYPE_MODE (type) != BLKmode)
2805 	    op = force_reg (TYPE_MODE (type), op);
2806 	  else if (!allows_mem)
2807 	    warning (0, "asm operand %d probably doesn%'t match constraints",
2808 		     i + noutputs);
2809 	  else if (MEM_P (op))
2810 	    {
2811 	      /* We won't recognize either volatile memory or memory
2812 		 with a queued address as available a memory_operand
2813 		 at this point.  Ignore it: clearly this *is* a memory.  */
2814 	    }
2815 	  else
2816 	    gcc_unreachable ();
2817 	}
2818 
2819       generating_concat_p = old_generating_concat_p;
2820       ASM_OPERANDS_INPUT (body, i) = op;
2821 
2822       ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
2823 	= gen_rtx_ASM_INPUT_loc (TYPE_MODE (type),
2824 				 ggc_strdup (constraints[i + noutputs]),
2825 				 locus);
2826 
2827       if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2828 	clobber_conflict_found = 1;
2829     }
2830 
2831   /* Protect all the operands from the queue now that they have all been
2832      evaluated.  */
2833 
2834   generating_concat_p = 0;
2835 
2836   /* For in-out operands, copy output rtx to input rtx.  */
2837   for (i = 0; i < ninout; i++)
2838     {
2839       int j = inout_opnum[i];
2840       char buffer[16];
2841 
2842       ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
2843 	= output_rtx[j];
2844 
2845       sprintf (buffer, "%d", j);
2846       ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
2847 	= gen_rtx_ASM_INPUT_loc (inout_mode[i], ggc_strdup (buffer), locus);
2848     }
2849 
2850   /* Copy labels to the vector.  */
2851   for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
2852     {
2853       rtx r;
2854       /* If asm goto has any labels in the fallthru basic block, use
2855 	 a label that we emit immediately after the asm goto.  Expansion
2856 	 may insert further instructions into the same basic block after
2857 	 asm goto and if we don't do this, insertion of instructions on
2858 	 the fallthru edge might misbehave.  See PR58670.  */
2859       if (fallthru_bb
2860 	  && label_to_block_fn (cfun, TREE_VALUE (tail)) == fallthru_bb)
2861 	{
2862 	  if (fallthru_label == NULL_RTX)
2863 	    fallthru_label = gen_label_rtx ();
2864 	  r = fallthru_label;
2865 	}
2866       else
2867 	r = label_rtx (TREE_VALUE (tail));
2868       ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
2869     }
2870 
2871   generating_concat_p = old_generating_concat_p;
2872 
2873   /* Now, for each output, construct an rtx
2874      (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
2875 			       ARGVEC CONSTRAINTS OPNAMES))
2876      If there is more than one, put them inside a PARALLEL.  */
2877 
2878   if (nlabels > 0 && nclobbers == 0)
2879     {
2880       gcc_assert (noutputs == 0);
2881       emit_jump_insn (body);
2882     }
2883   else if (noutputs == 0 && nclobbers == 0)
2884     {
2885       /* No output operands: put in a raw ASM_OPERANDS rtx.  */
2886       emit_insn (body);
2887     }
2888   else if (noutputs == 1 && nclobbers == 0)
2889     {
2890       ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
2891       emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
2892     }
2893   else
2894     {
2895       rtx obody = body;
2896       int num = noutputs;
2897 
2898       if (num == 0)
2899 	num = 1;
2900 
2901       body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
2902 
2903       /* For each output operand, store a SET.  */
2904       for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2905 	{
2906 	  XVECEXP (body, 0, i)
2907 	    = gen_rtx_SET (VOIDmode,
2908 			   output_rtx[i],
2909 			   gen_rtx_ASM_OPERANDS
2910 			   (GET_MODE (output_rtx[i]),
2911 			    ggc_strdup (TREE_STRING_POINTER (string)),
2912 			    ggc_strdup (constraints[i]),
2913 			    i, argvec, constraintvec, labelvec, locus));
2914 
2915 	  MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
2916 	}
2917 
2918       /* If there are no outputs (but there are some clobbers)
2919 	 store the bare ASM_OPERANDS into the PARALLEL.  */
2920 
2921       if (i == 0)
2922 	XVECEXP (body, 0, i++) = obody;
2923 
2924       /* Store (clobber REG) for each clobbered register specified.  */
2925 
2926       for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2927 	{
2928 	  const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2929 	  int reg, nregs;
2930 	  int j = decode_reg_name_and_count (regname, &nregs);
2931 	  rtx clobbered_reg;
2932 
2933 	  if (j < 0)
2934 	    {
2935 	      if (j == -3)	/* `cc', which is not a register */
2936 		continue;
2937 
2938 	      if (j == -4)	/* `memory', don't cache memory across asm */
2939 		{
2940 		  XVECEXP (body, 0, i++)
2941 		    = gen_rtx_CLOBBER (VOIDmode,
2942 				       gen_rtx_MEM
2943 				       (BLKmode,
2944 					gen_rtx_SCRATCH (VOIDmode)));
2945 		  continue;
2946 		}
2947 
2948 	      /* Ignore unknown register, error already signaled.  */
2949 	      continue;
2950 	    }
2951 
2952 	  for (reg = j; reg < j + nregs; reg++)
2953 	    {
2954 	      /* Use QImode since that's guaranteed to clobber just
2955 	       * one reg.  */
2956 	      clobbered_reg = gen_rtx_REG (QImode, reg);
2957 
2958 	      /* Do sanity check for overlap between clobbers and
2959 		 respectively input and outputs that hasn't been
2960 		 handled.  Such overlap should have been detected and
2961 		 reported above.  */
2962 	      if (!clobber_conflict_found)
2963 		{
2964 		  int opno;
2965 
2966 		  /* We test the old body (obody) contents to avoid
2967 		     tripping over the under-construction body.  */
2968 		  for (opno = 0; opno < noutputs; opno++)
2969 		    if (reg_overlap_mentioned_p (clobbered_reg,
2970 						 output_rtx[opno]))
2971 		      internal_error
2972 			("asm clobber conflict with output operand");
2973 
2974 		  for (opno = 0; opno < ninputs - ninout; opno++)
2975 		    if (reg_overlap_mentioned_p (clobbered_reg,
2976 						 ASM_OPERANDS_INPUT (obody,
2977 								     opno)))
2978 		      internal_error
2979 			("asm clobber conflict with input operand");
2980 		}
2981 
2982 	      XVECEXP (body, 0, i++)
2983 		= gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
2984 	    }
2985 	}
2986 
2987       if (nlabels > 0)
2988 	emit_jump_insn (body);
2989       else
2990 	emit_insn (body);
2991     }
2992 
2993   if (fallthru_label)
2994     emit_label (fallthru_label);
2995 
2996   /* For any outputs that needed reloading into registers, spill them
2997      back to where they belong.  */
2998   for (i = 0; i < noutputs; ++i)
2999     if (real_output_rtx[i])
3000       emit_move_insn (real_output_rtx[i], output_rtx[i]);
3001 
3002   crtl->has_asm_statement = 1;
3003   free_temp_slots ();
3004 }
3005 
3006 
3007 static void
3008 expand_asm_stmt (gasm *stmt)
3009 {
3010   int noutputs;
3011   tree outputs, tail, t;
3012   tree *o;
3013   size_t i, n;
3014   const char *s;
3015   tree str, out, in, cl, labels;
3016   location_t locus = gimple_location (stmt);
3017   basic_block fallthru_bb = NULL;
3018 
3019   /* Meh... convert the gimple asm operands into real tree lists.
3020      Eventually we should make all routines work on the vectors instead
3021      of relying on TREE_CHAIN.  */
3022   out = NULL_TREE;
3023   n = gimple_asm_noutputs (stmt);
3024   if (n > 0)
3025     {
3026       t = out = gimple_asm_output_op (stmt, 0);
3027       for (i = 1; i < n; i++)
3028 	t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
3029     }
3030 
3031   in = NULL_TREE;
3032   n = gimple_asm_ninputs (stmt);
3033   if (n > 0)
3034     {
3035       t = in = gimple_asm_input_op (stmt, 0);
3036       for (i = 1; i < n; i++)
3037 	t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
3038     }
3039 
3040   cl = NULL_TREE;
3041   n = gimple_asm_nclobbers (stmt);
3042   if (n > 0)
3043     {
3044       t = cl = gimple_asm_clobber_op (stmt, 0);
3045       for (i = 1; i < n; i++)
3046 	t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
3047     }
3048 
3049   labels = NULL_TREE;
3050   n = gimple_asm_nlabels (stmt);
3051   if (n > 0)
3052     {
3053       edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3054       if (fallthru)
3055 	fallthru_bb = fallthru->dest;
3056       t = labels = gimple_asm_label_op (stmt, 0);
3057       for (i = 1; i < n; i++)
3058 	t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
3059     }
3060 
3061   s = gimple_asm_string (stmt);
3062   str = build_string (strlen (s), s);
3063 
3064   if (gimple_asm_input_p (stmt))
3065     {
3066       expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
3067       return;
3068     }
3069 
3070   outputs = out;
3071   noutputs = gimple_asm_noutputs (stmt);
3072   /* o[I] is the place that output number I should be written.  */
3073   o = (tree *) alloca (noutputs * sizeof (tree));
3074 
3075   /* Record the contents of OUTPUTS before it is modified.  */
3076   for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
3077     o[i] = TREE_VALUE (tail);
3078 
3079   /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
3080      OUTPUTS some trees for where the values were actually stored.  */
3081   expand_asm_operands (str, outputs, in, cl, labels, fallthru_bb,
3082 		       gimple_asm_volatile_p (stmt), locus);
3083 
3084   /* Copy all the intermediate outputs into the specified outputs.  */
3085   for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
3086     {
3087       if (o[i] != TREE_VALUE (tail))
3088 	{
3089 	  expand_assignment (o[i], TREE_VALUE (tail), false);
3090 	  free_temp_slots ();
3091 
3092 	  /* Restore the original value so that it's correct the next
3093 	     time we expand this function.  */
3094 	  TREE_VALUE (tail) = o[i];
3095 	}
3096     }
3097 }
3098 
3099 /* Emit code to jump to the address
3100    specified by the pointer expression EXP.  */
3101 
3102 static void
3103 expand_computed_goto (tree exp)
3104 {
3105   rtx x = expand_normal (exp);
3106 
3107   do_pending_stack_adjust ();
3108   emit_indirect_jump (x);
3109 }
3110 
3111 /* Generate RTL code for a `goto' statement with target label LABEL.
3112    LABEL should be a LABEL_DECL tree node that was or will later be
3113    defined with `expand_label'.  */
3114 
3115 static void
3116 expand_goto (tree label)
3117 {
3118 #ifdef ENABLE_CHECKING
3119   /* Check for a nonlocal goto to a containing function.  Should have
3120      gotten translated to __builtin_nonlocal_goto.  */
3121   tree context = decl_function_context (label);
3122   gcc_assert (!context || context == current_function_decl);
3123 #endif
3124 
3125   emit_jump (label_rtx (label));
3126 }
3127 
3128 /* Output a return with no value.  */
3129 
3130 static void
3131 expand_null_return_1 (void)
3132 {
3133   clear_pending_stack_adjust ();
3134   do_pending_stack_adjust ();
3135   emit_jump (return_label);
3136 }
3137 
3138 /* Generate RTL to return from the current function, with no value.
3139    (That is, we do not do anything about returning any value.)  */
3140 
3141 void
3142 expand_null_return (void)
3143 {
3144   /* If this function was declared to return a value, but we
3145      didn't, clobber the return registers so that they are not
3146      propagated live to the rest of the function.  */
3147   clobber_return_register ();
3148 
3149   expand_null_return_1 ();
3150 }
3151 
3152 /* Generate RTL to return from the current function, with value VAL.  */
3153 
3154 static void
3155 expand_value_return (rtx val)
3156 {
3157   /* Copy the value to the return location unless it's already there.  */
3158 
3159   tree decl = DECL_RESULT (current_function_decl);
3160   rtx return_reg = DECL_RTL (decl);
3161   if (return_reg != val)
3162     {
3163       tree funtype = TREE_TYPE (current_function_decl);
3164       tree type = TREE_TYPE (decl);
3165       int unsignedp = TYPE_UNSIGNED (type);
3166       machine_mode old_mode = DECL_MODE (decl);
3167       machine_mode mode;
3168       if (DECL_BY_REFERENCE (decl))
3169         mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3170       else
3171         mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3172 
3173       if (mode != old_mode)
3174 	val = convert_modes (mode, old_mode, val, unsignedp);
3175 
3176       if (GET_CODE (return_reg) == PARALLEL)
3177 	emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3178       else
3179 	emit_move_insn (return_reg, val);
3180     }
3181 
3182   expand_null_return_1 ();
3183 }
3184 
3185 /* Generate RTL to evaluate the expression RETVAL and return it
3186    from the current function.  */
3187 
3188 static void
3189 expand_return (tree retval, tree bounds)
3190 {
3191   rtx result_rtl;
3192   rtx val = 0;
3193   tree retval_rhs;
3194   rtx bounds_rtl;
3195 
3196   /* If function wants no value, give it none.  */
3197   if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3198     {
3199       expand_normal (retval);
3200       expand_null_return ();
3201       return;
3202     }
3203 
3204   if (retval == error_mark_node)
3205     {
3206       /* Treat this like a return of no value from a function that
3207 	 returns a value.  */
3208       expand_null_return ();
3209       return;
3210     }
3211   else if ((TREE_CODE (retval) == MODIFY_EXPR
3212 	    || TREE_CODE (retval) == INIT_EXPR)
3213 	   && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3214     retval_rhs = TREE_OPERAND (retval, 1);
3215   else
3216     retval_rhs = retval;
3217 
3218   result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3219 
3220   /* Put returned bounds to the right place.  */
3221   bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3222   if (bounds_rtl)
3223     {
3224       rtx addr = NULL;
3225       rtx bnd = NULL;
3226 
3227       if (bounds && bounds != error_mark_node)
3228 	{
3229 	  bnd = expand_normal (bounds);
3230 	  targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3231 	}
3232       else if (REG_P (bounds_rtl))
3233 	{
3234 	  if (bounds)
3235 	    bnd = chkp_expand_zero_bounds ();
3236 	  else
3237 	    {
3238 	      addr = expand_normal (build_fold_addr_expr (retval_rhs));
3239 	      addr = gen_rtx_MEM (Pmode, addr);
3240 	      bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3241 	    }
3242 
3243 	  targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3244 	}
3245       else
3246 	{
3247 	  int n;
3248 
3249 	  gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3250 
3251 	  if (bounds)
3252 	    bnd = chkp_expand_zero_bounds ();
3253 	  else
3254 	    {
3255 	      addr = expand_normal (build_fold_addr_expr (retval_rhs));
3256 	      addr = gen_rtx_MEM (Pmode, addr);
3257 	    }
3258 
3259 	  for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3260 	    {
3261 	      rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
3262 	      if (!bounds)
3263 		{
3264 		  rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3265 		  rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3266 		  bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3267 		}
3268 	      targetm.calls.store_returned_bounds (slot, bnd);
3269 	    }
3270 	}
3271     }
3272   else if (chkp_function_instrumented_p (current_function_decl)
3273 	   && !BOUNDED_P (retval_rhs)
3274 	   && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3275 	   && TREE_CODE (retval_rhs) != RESULT_DECL)
3276     {
3277       rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3278       addr = gen_rtx_MEM (Pmode, addr);
3279 
3280       gcc_assert (MEM_P (result_rtl));
3281 
3282       chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3283     }
3284 
3285   /* If we are returning the RESULT_DECL, then the value has already
3286      been stored into it, so we don't have to do anything special.  */
3287   if (TREE_CODE (retval_rhs) == RESULT_DECL)
3288     expand_value_return (result_rtl);
3289 
3290   /* If the result is an aggregate that is being returned in one (or more)
3291      registers, load the registers here.  */
3292 
3293   else if (retval_rhs != 0
3294 	   && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3295 	   && REG_P (result_rtl))
3296     {
3297       val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3298       if (val)
3299 	{
3300 	  /* Use the mode of the result value on the return register.  */
3301 	  PUT_MODE (result_rtl, GET_MODE (val));
3302 	  expand_value_return (val);
3303 	}
3304       else
3305 	expand_null_return ();
3306     }
3307   else if (retval_rhs != 0
3308 	   && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3309 	   && (REG_P (result_rtl)
3310 	       || (GET_CODE (result_rtl) == PARALLEL)))
3311     {
3312       /* Compute the return value into a temporary (usually a pseudo reg).  */
3313       val
3314 	= assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3315       val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3316       val = force_not_mem (val);
3317       expand_value_return (val);
3318     }
3319   else
3320     {
3321       /* No hard reg used; calculate value into hard return reg.  */
3322       expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3323       expand_value_return (result_rtl);
3324     }
3325 }
3326 
3327 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3328    STMT that doesn't require special handling for outgoing edges.  That
3329    is no tailcalls and no GIMPLE_COND.  */
3330 
3331 static void
3332 expand_gimple_stmt_1 (gimple stmt)
3333 {
3334   tree op0;
3335 
3336   set_curr_insn_location (gimple_location (stmt));
3337 
3338   switch (gimple_code (stmt))
3339     {
3340     case GIMPLE_GOTO:
3341       op0 = gimple_goto_dest (stmt);
3342       if (TREE_CODE (op0) == LABEL_DECL)
3343 	expand_goto (op0);
3344       else
3345 	expand_computed_goto (op0);
3346       break;
3347     case GIMPLE_LABEL:
3348       expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3349       break;
3350     case GIMPLE_NOP:
3351     case GIMPLE_PREDICT:
3352       break;
3353     case GIMPLE_SWITCH:
3354       expand_case (as_a <gswitch *> (stmt));
3355       break;
3356     case GIMPLE_ASM:
3357       expand_asm_stmt (as_a <gasm *> (stmt));
3358       break;
3359     case GIMPLE_CALL:
3360       expand_call_stmt (as_a <gcall *> (stmt));
3361       break;
3362 
3363     case GIMPLE_RETURN:
3364       {
3365 	tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt));
3366 	op0 = gimple_return_retval (as_a <greturn *> (stmt));
3367 
3368 	if (op0 && op0 != error_mark_node)
3369 	  {
3370 	    tree result = DECL_RESULT (current_function_decl);
3371 
3372 	    /* Mark we have return statement with missing bounds.  */
3373 	    if (!bnd
3374 		&& chkp_function_instrumented_p (cfun->decl)
3375 		&& !DECL_P (op0))
3376 	      bnd = error_mark_node;
3377 
3378 	    /* If we are not returning the current function's RESULT_DECL,
3379 	       build an assignment to it.  */
3380 	    if (op0 != result)
3381 	      {
3382 		/* I believe that a function's RESULT_DECL is unique.  */
3383 		gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3384 
3385 		/* ??? We'd like to use simply expand_assignment here,
3386 		   but this fails if the value is of BLKmode but the return
3387 		   decl is a register.  expand_return has special handling
3388 		   for this combination, which eventually should move
3389 		   to common code.  See comments there.  Until then, let's
3390 		   build a modify expression :-/  */
3391 		op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3392 			      result, op0);
3393 	      }
3394 	  }
3395 
3396 	if (!op0)
3397 	  expand_null_return ();
3398 	else
3399 	  expand_return (op0, bnd);
3400       }
3401       break;
3402 
3403     case GIMPLE_ASSIGN:
3404       {
3405 	gassign *assign_stmt = as_a <gassign *> (stmt);
3406 	tree lhs = gimple_assign_lhs (assign_stmt);
3407 
3408 	/* Tree expand used to fiddle with |= and &= of two bitfield
3409 	   COMPONENT_REFs here.  This can't happen with gimple, the LHS
3410 	   of binary assigns must be a gimple reg.  */
3411 
3412 	if (TREE_CODE (lhs) != SSA_NAME
3413 	    || get_gimple_rhs_class (gimple_expr_code (stmt))
3414 	       == GIMPLE_SINGLE_RHS)
3415 	  {
3416 	    tree rhs = gimple_assign_rhs1 (assign_stmt);
3417 	    gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3418 			== GIMPLE_SINGLE_RHS);
3419 	    if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
3420 	      SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3421 	    if (TREE_CLOBBER_P (rhs))
3422 	      /* This is a clobber to mark the going out of scope for
3423 		 this LHS.  */
3424 	      ;
3425 	    else
3426 	      expand_assignment (lhs, rhs,
3427 				 gimple_assign_nontemporal_move_p (
3428 				   assign_stmt));
3429 	  }
3430 	else
3431 	  {
3432 	    rtx target, temp;
3433 	    bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3434 	    struct separate_ops ops;
3435 	    bool promoted = false;
3436 
3437 	    target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3438 	    if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3439 	      promoted = true;
3440 
3441 	    ops.code = gimple_assign_rhs_code (assign_stmt);
3442 	    ops.type = TREE_TYPE (lhs);
3443 	    switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
3444 	      {
3445 		case GIMPLE_TERNARY_RHS:
3446 		  ops.op2 = gimple_assign_rhs3 (assign_stmt);
3447 		  /* Fallthru */
3448 		case GIMPLE_BINARY_RHS:
3449 		  ops.op1 = gimple_assign_rhs2 (assign_stmt);
3450 		  /* Fallthru */
3451 		case GIMPLE_UNARY_RHS:
3452 		  ops.op0 = gimple_assign_rhs1 (assign_stmt);
3453 		  break;
3454 		default:
3455 		  gcc_unreachable ();
3456 	      }
3457 	    ops.location = gimple_location (stmt);
3458 
3459 	    /* If we want to use a nontemporal store, force the value to
3460 	       register first.  If we store into a promoted register,
3461 	       don't directly expand to target.  */
3462 	    temp = nontemporal || promoted ? NULL_RTX : target;
3463 	    temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3464 				       EXPAND_NORMAL);
3465 
3466 	    if (temp == target)
3467 	      ;
3468 	    else if (promoted)
3469 	      {
3470 		int unsignedp = SUBREG_PROMOTED_SIGN (target);
3471 		/* If TEMP is a VOIDmode constant, use convert_modes to make
3472 		   sure that we properly convert it.  */
3473 		if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3474 		  {
3475 		    temp = convert_modes (GET_MODE (target),
3476 					  TYPE_MODE (ops.type),
3477 					  temp, unsignedp);
3478 		    temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3479 					  GET_MODE (target), temp, unsignedp);
3480 		  }
3481 
3482 		convert_move (SUBREG_REG (target), temp, unsignedp);
3483 	      }
3484 	    else if (nontemporal && emit_storent_insn (target, temp))
3485 	      ;
3486 	    else
3487 	      {
3488 		temp = force_operand (temp, target);
3489 		if (temp != target)
3490 		  emit_move_insn (target, temp);
3491 	      }
3492 	  }
3493       }
3494       break;
3495 
3496     default:
3497       gcc_unreachable ();
3498     }
3499 }
3500 
3501 /* Expand one gimple statement STMT and return the last RTL instruction
3502    before any of the newly generated ones.
3503 
3504    In addition to generating the necessary RTL instructions this also
3505    sets REG_EH_REGION notes if necessary and sets the current source
3506    location for diagnostics.  */
3507 
3508 static rtx_insn *
3509 expand_gimple_stmt (gimple stmt)
3510 {
3511   location_t saved_location = input_location;
3512   rtx_insn *last = get_last_insn ();
3513   int lp_nr;
3514 
3515   gcc_assert (cfun);
3516 
3517   /* We need to save and restore the current source location so that errors
3518      discovered during expansion are emitted with the right location.  But
3519      it would be better if the diagnostic routines used the source location
3520      embedded in the tree nodes rather than globals.  */
3521   if (gimple_has_location (stmt))
3522     input_location = gimple_location (stmt);
3523 
3524   expand_gimple_stmt_1 (stmt);
3525 
3526   /* Free any temporaries used to evaluate this statement.  */
3527   free_temp_slots ();
3528 
3529   input_location = saved_location;
3530 
3531   /* Mark all insns that may trap.  */
3532   lp_nr = lookup_stmt_eh_lp (stmt);
3533   if (lp_nr)
3534     {
3535       rtx_insn *insn;
3536       for (insn = next_real_insn (last); insn;
3537 	   insn = next_real_insn (insn))
3538 	{
3539 	  if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3540 	      /* If we want exceptions for non-call insns, any
3541 		 may_trap_p instruction may throw.  */
3542 	      && GET_CODE (PATTERN (insn)) != CLOBBER
3543 	      && GET_CODE (PATTERN (insn)) != USE
3544 	      && insn_could_throw_p (insn))
3545 	    make_reg_eh_region_note (insn, 0, lp_nr);
3546 	}
3547     }
3548 
3549   return last;
3550 }
3551 
3552 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_CALL
3553    that has CALL_EXPR_TAILCALL set.  Returns non-null if we actually
3554    generated a tail call (something that might be denied by the ABI
3555    rules governing the call; see calls.c).
3556 
3557    Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3558    can still reach the rest of BB.  The case here is __builtin_sqrt,
3559    where the NaN result goes through the external function (with a
3560    tailcall) and the normal result happens via a sqrt instruction.  */
3561 
3562 static basic_block
3563 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3564 {
3565   rtx_insn *last2, *last;
3566   edge e;
3567   edge_iterator ei;
3568   int probability;
3569   gcov_type count;
3570 
3571   last2 = last = expand_gimple_stmt (stmt);
3572 
3573   for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3574     if (CALL_P (last) && SIBLING_CALL_P (last))
3575       goto found;
3576 
3577   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3578 
3579   *can_fallthru = true;
3580   return NULL;
3581 
3582  found:
3583   /* ??? Wouldn't it be better to just reset any pending stack adjust?
3584      Any instructions emitted here are about to be deleted.  */
3585   do_pending_stack_adjust ();
3586 
3587   /* Remove any non-eh, non-abnormal edges that don't go to exit.  */
3588   /* ??? I.e. the fallthrough edge.  HOWEVER!  If there were to be
3589      EH or abnormal edges, we shouldn't have created a tail call in
3590      the first place.  So it seems to me we should just be removing
3591      all edges here, or redirecting the existing fallthru edge to
3592      the exit block.  */
3593 
3594   probability = 0;
3595   count = 0;
3596 
3597   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3598     {
3599       if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3600 	{
3601 	  if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3602 	    {
3603 	      e->dest->count -= e->count;
3604 	      e->dest->frequency -= EDGE_FREQUENCY (e);
3605 	      if (e->dest->count < 0)
3606 		e->dest->count = 0;
3607 	      if (e->dest->frequency < 0)
3608 		e->dest->frequency = 0;
3609 	    }
3610 	  count += e->count;
3611 	  probability += e->probability;
3612 	  remove_edge (e);
3613 	}
3614       else
3615 	ei_next (&ei);
3616     }
3617 
3618   /* This is somewhat ugly: the call_expr expander often emits instructions
3619      after the sibcall (to perform the function return).  These confuse the
3620      find_many_sub_basic_blocks code, so we need to get rid of these.  */
3621   last = NEXT_INSN (last);
3622   gcc_assert (BARRIER_P (last));
3623 
3624   *can_fallthru = false;
3625   while (NEXT_INSN (last))
3626     {
3627       /* For instance an sqrt builtin expander expands if with
3628 	 sibcall in the then and label for `else`.  */
3629       if (LABEL_P (NEXT_INSN (last)))
3630 	{
3631 	  *can_fallthru = true;
3632 	  break;
3633 	}
3634       delete_insn (NEXT_INSN (last));
3635     }
3636 
3637   e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3638 		 | EDGE_SIBCALL);
3639   e->probability += probability;
3640   e->count += count;
3641   BB_END (bb) = last;
3642   update_bb_for_insn (bb);
3643 
3644   if (NEXT_INSN (last))
3645     {
3646       bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3647 
3648       last = BB_END (bb);
3649       if (BARRIER_P (last))
3650 	BB_END (bb) = PREV_INSN (last);
3651     }
3652 
3653   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3654 
3655   return bb;
3656 }
3657 
3658 /* Return the difference between the floor and the truncated result of
3659    a signed division by OP1 with remainder MOD.  */
3660 static rtx
3661 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3662 {
3663   /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3664   return gen_rtx_IF_THEN_ELSE
3665     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3666      gen_rtx_IF_THEN_ELSE
3667      (mode, gen_rtx_LT (BImode,
3668 			gen_rtx_DIV (mode, op1, mod),
3669 			const0_rtx),
3670       constm1_rtx, const0_rtx),
3671      const0_rtx);
3672 }
3673 
3674 /* Return the difference between the ceil and the truncated result of
3675    a signed division by OP1 with remainder MOD.  */
3676 static rtx
3677 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3678 {
3679   /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3680   return gen_rtx_IF_THEN_ELSE
3681     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3682      gen_rtx_IF_THEN_ELSE
3683      (mode, gen_rtx_GT (BImode,
3684 			gen_rtx_DIV (mode, op1, mod),
3685 			const0_rtx),
3686       const1_rtx, const0_rtx),
3687      const0_rtx);
3688 }
3689 
3690 /* Return the difference between the ceil and the truncated result of
3691    an unsigned division by OP1 with remainder MOD.  */
3692 static rtx
3693 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3694 {
3695   /* (mod != 0 ? 1 : 0) */
3696   return gen_rtx_IF_THEN_ELSE
3697     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3698      const1_rtx, const0_rtx);
3699 }
3700 
3701 /* Return the difference between the rounded and the truncated result
3702    of a signed division by OP1 with remainder MOD.  Halfway cases are
3703    rounded away from zero, rather than to the nearest even number.  */
3704 static rtx
3705 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3706 {
3707   /* (abs (mod) >= abs (op1) - abs (mod)
3708       ? (op1 / mod > 0 ? 1 : -1)
3709       : 0) */
3710   return gen_rtx_IF_THEN_ELSE
3711     (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3712 		       gen_rtx_MINUS (mode,
3713 				      gen_rtx_ABS (mode, op1),
3714 				      gen_rtx_ABS (mode, mod))),
3715      gen_rtx_IF_THEN_ELSE
3716      (mode, gen_rtx_GT (BImode,
3717 			gen_rtx_DIV (mode, op1, mod),
3718 			const0_rtx),
3719       const1_rtx, constm1_rtx),
3720      const0_rtx);
3721 }
3722 
3723 /* Return the difference between the rounded and the truncated result
3724    of a unsigned division by OP1 with remainder MOD.  Halfway cases
3725    are rounded away from zero, rather than to the nearest even
3726    number.  */
3727 static rtx
3728 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
3729 {
3730   /* (mod >= op1 - mod ? 1 : 0) */
3731   return gen_rtx_IF_THEN_ELSE
3732     (mode, gen_rtx_GE (BImode, mod,
3733 		       gen_rtx_MINUS (mode, op1, mod)),
3734      const1_rtx, const0_rtx);
3735 }
3736 
3737 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3738    any rtl.  */
3739 
3740 static rtx
3741 convert_debug_memory_address (machine_mode mode, rtx x,
3742 			      addr_space_t as)
3743 {
3744   machine_mode xmode = GET_MODE (x);
3745 
3746 #ifndef POINTERS_EXTEND_UNSIGNED
3747   gcc_assert (mode == Pmode
3748 	      || mode == targetm.addr_space.address_mode (as));
3749   gcc_assert (xmode == mode || xmode == VOIDmode);
3750 #else
3751   rtx temp;
3752 
3753   gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
3754 
3755   if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3756     return x;
3757 
3758   if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
3759     x = simplify_gen_subreg (mode, x, xmode,
3760 			     subreg_lowpart_offset
3761 			     (mode, xmode));
3762   else if (POINTERS_EXTEND_UNSIGNED > 0)
3763     x = gen_rtx_ZERO_EXTEND (mode, x);
3764   else if (!POINTERS_EXTEND_UNSIGNED)
3765     x = gen_rtx_SIGN_EXTEND (mode, x);
3766   else
3767     {
3768       switch (GET_CODE (x))
3769 	{
3770 	case SUBREG:
3771 	  if ((SUBREG_PROMOTED_VAR_P (x)
3772 	       || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3773 	       || (GET_CODE (SUBREG_REG (x)) == PLUS
3774 		   && REG_P (XEXP (SUBREG_REG (x), 0))
3775 		   && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3776 		   && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3777 	      && GET_MODE (SUBREG_REG (x)) == mode)
3778 	    return SUBREG_REG (x);
3779 	  break;
3780 	case LABEL_REF:
3781 	  temp = gen_rtx_LABEL_REF (mode, LABEL_REF_LABEL (x));
3782 	  LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3783 	  return temp;
3784 	case SYMBOL_REF:
3785 	  temp = shallow_copy_rtx (x);
3786 	  PUT_MODE (temp, mode);
3787 	  return temp;
3788 	case CONST:
3789 	  temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3790 	  if (temp)
3791 	    temp = gen_rtx_CONST (mode, temp);
3792 	  return temp;
3793 	case PLUS:
3794 	case MINUS:
3795 	  if (CONST_INT_P (XEXP (x, 1)))
3796 	    {
3797 	      temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3798 	      if (temp)
3799 		return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
3800 	    }
3801 	  break;
3802 	default:
3803 	  break;
3804 	}
3805       /* Don't know how to express ptr_extend as operation in debug info.  */
3806       return NULL;
3807     }
3808 #endif /* POINTERS_EXTEND_UNSIGNED */
3809 
3810   return x;
3811 }
3812 
3813 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
3814    by avoid_deep_ter_for_debug.  */
3815 
3816 static hash_map<tree, tree> *deep_ter_debug_map;
3817 
3818 /* Split too deep TER chains for debug stmts using debug temporaries.  */
3819 
3820 static void
3821 avoid_deep_ter_for_debug (gimple stmt, int depth)
3822 {
3823   use_operand_p use_p;
3824   ssa_op_iter iter;
3825   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
3826     {
3827       tree use = USE_FROM_PTR (use_p);
3828       if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
3829 	continue;
3830       gimple g = get_gimple_for_ssa_name (use);
3831       if (g == NULL)
3832 	continue;
3833       if (depth > 6 && !stmt_ends_bb_p (g))
3834 	{
3835 	  if (deep_ter_debug_map == NULL)
3836 	    deep_ter_debug_map = new hash_map<tree, tree>;
3837 
3838 	  tree &vexpr = deep_ter_debug_map->get_or_insert (use);
3839 	  if (vexpr != NULL)
3840 	    continue;
3841 	  vexpr = make_node (DEBUG_EXPR_DECL);
3842 	  gimple def_temp = gimple_build_debug_bind (vexpr, use, g);
3843 	  DECL_ARTIFICIAL (vexpr) = 1;
3844 	  TREE_TYPE (vexpr) = TREE_TYPE (use);
3845 	  DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (use));
3846 	  gimple_stmt_iterator gsi = gsi_for_stmt (g);
3847 	  gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
3848 	  avoid_deep_ter_for_debug (def_temp, 0);
3849 	}
3850       else
3851 	avoid_deep_ter_for_debug (g, depth + 1);
3852     }
3853 }
3854 
3855 /* Return an RTX equivalent to the value of the parameter DECL.  */
3856 
3857 static rtx
3858 expand_debug_parm_decl (tree decl)
3859 {
3860   rtx incoming = DECL_INCOMING_RTL (decl);
3861 
3862   if (incoming
3863       && GET_MODE (incoming) != BLKmode
3864       && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3865 	  || (MEM_P (incoming)
3866 	      && REG_P (XEXP (incoming, 0))
3867 	      && HARD_REGISTER_P (XEXP (incoming, 0)))))
3868     {
3869       rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3870 
3871 #ifdef HAVE_window_save
3872       /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
3873 	 If the target machine has an explicit window save instruction, the
3874 	 actual entry value is the corresponding OUTGOING_REGNO instead.  */
3875       if (REG_P (incoming)
3876 	  && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
3877 	incoming
3878 	  = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
3879 				OUTGOING_REGNO (REGNO (incoming)), 0);
3880       else if (MEM_P (incoming))
3881 	{
3882 	  rtx reg = XEXP (incoming, 0);
3883 	  if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
3884 	    {
3885 	      reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
3886 	      incoming = replace_equiv_address_nv (incoming, reg);
3887 	    }
3888 	  else
3889 	    incoming = copy_rtx (incoming);
3890 	}
3891 #endif
3892 
3893       ENTRY_VALUE_EXP (rtl) = incoming;
3894       return rtl;
3895     }
3896 
3897   if (incoming
3898       && GET_MODE (incoming) != BLKmode
3899       && !TREE_ADDRESSABLE (decl)
3900       && MEM_P (incoming)
3901       && (XEXP (incoming, 0) == virtual_incoming_args_rtx
3902 	  || (GET_CODE (XEXP (incoming, 0)) == PLUS
3903 	      && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
3904 	      && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
3905     return copy_rtx (incoming);
3906 
3907   return NULL_RTX;
3908 }
3909 
3910 /* Return an RTX equivalent to the value of the tree expression EXP.  */
3911 
3912 static rtx
3913 expand_debug_expr (tree exp)
3914 {
3915   rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
3916   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3917   machine_mode inner_mode = VOIDmode;
3918   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
3919   addr_space_t as;
3920 
3921   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3922     {
3923     case tcc_expression:
3924       switch (TREE_CODE (exp))
3925 	{
3926 	case COND_EXPR:
3927 	case DOT_PROD_EXPR:
3928 	case SAD_EXPR:
3929 	case WIDEN_MULT_PLUS_EXPR:
3930 	case WIDEN_MULT_MINUS_EXPR:
3931 	case FMA_EXPR:
3932 	  goto ternary;
3933 
3934 	case TRUTH_ANDIF_EXPR:
3935 	case TRUTH_ORIF_EXPR:
3936 	case TRUTH_AND_EXPR:
3937 	case TRUTH_OR_EXPR:
3938 	case TRUTH_XOR_EXPR:
3939 	  goto binary;
3940 
3941 	case TRUTH_NOT_EXPR:
3942 	  goto unary;
3943 
3944 	default:
3945 	  break;
3946 	}
3947       break;
3948 
3949     ternary:
3950       op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
3951       if (!op2)
3952 	return NULL_RTX;
3953       /* Fall through.  */
3954 
3955     binary:
3956     case tcc_binary:
3957       op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3958       if (!op1)
3959 	return NULL_RTX;
3960       switch (TREE_CODE (exp))
3961 	{
3962 	case LSHIFT_EXPR:
3963 	case RSHIFT_EXPR:
3964 	case LROTATE_EXPR:
3965 	case RROTATE_EXPR:
3966 	case WIDEN_LSHIFT_EXPR:
3967 	  /* Ensure second operand isn't wider than the first one.  */
3968 	  inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
3969 	  if (SCALAR_INT_MODE_P (inner_mode))
3970 	    {
3971 	      machine_mode opmode = mode;
3972 	      if (VECTOR_MODE_P (mode))
3973 		opmode = GET_MODE_INNER (mode);
3974 	      if (SCALAR_INT_MODE_P (opmode)
3975 		  && (GET_MODE_PRECISION (opmode)
3976 		      < GET_MODE_PRECISION (inner_mode)))
3977 		op1 = simplify_gen_subreg (opmode, op1, inner_mode,
3978 					   subreg_lowpart_offset (opmode,
3979 								  inner_mode));
3980 	    }
3981 	  break;
3982 	default:
3983 	  break;
3984 	}
3985       /* Fall through.  */
3986 
3987     unary:
3988     case tcc_unary:
3989       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3990       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3991       if (!op0)
3992 	return NULL_RTX;
3993       break;
3994 
3995     case tcc_comparison:
3996       unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
3997       goto binary;
3998 
3999     case tcc_type:
4000     case tcc_statement:
4001       gcc_unreachable ();
4002 
4003     case tcc_constant:
4004     case tcc_exceptional:
4005     case tcc_declaration:
4006     case tcc_reference:
4007     case tcc_vl_exp:
4008       break;
4009     }
4010 
4011   switch (TREE_CODE (exp))
4012     {
4013     case STRING_CST:
4014       if (!lookup_constant_def (exp))
4015 	{
4016 	  if (strlen (TREE_STRING_POINTER (exp)) + 1
4017 	      != (size_t) TREE_STRING_LENGTH (exp))
4018 	    return NULL_RTX;
4019 	  op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4020 	  op0 = gen_rtx_MEM (BLKmode, op0);
4021 	  set_mem_attributes (op0, exp, 0);
4022 	  return op0;
4023 	}
4024       /* Fall through...  */
4025 
4026     case INTEGER_CST:
4027     case REAL_CST:
4028     case FIXED_CST:
4029       op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4030       return op0;
4031 
4032     case COMPLEX_CST:
4033       gcc_assert (COMPLEX_MODE_P (mode));
4034       op0 = expand_debug_expr (TREE_REALPART (exp));
4035       op1 = expand_debug_expr (TREE_IMAGPART (exp));
4036       return gen_rtx_CONCAT (mode, op0, op1);
4037 
4038     case DEBUG_EXPR_DECL:
4039       op0 = DECL_RTL_IF_SET (exp);
4040 
4041       if (op0)
4042 	return op0;
4043 
4044       op0 = gen_rtx_DEBUG_EXPR (mode);
4045       DEBUG_EXPR_TREE_DECL (op0) = exp;
4046       SET_DECL_RTL (exp, op0);
4047 
4048       return op0;
4049 
4050     case VAR_DECL:
4051     case PARM_DECL:
4052     case FUNCTION_DECL:
4053     case LABEL_DECL:
4054     case CONST_DECL:
4055     case RESULT_DECL:
4056       op0 = DECL_RTL_IF_SET (exp);
4057 
4058       /* This decl was probably optimized away.  */
4059       if (!op0)
4060 	{
4061 	  if (TREE_CODE (exp) != VAR_DECL
4062 	      || DECL_EXTERNAL (exp)
4063 	      || !TREE_STATIC (exp)
4064 	      || !DECL_NAME (exp)
4065 	      || DECL_HARD_REGISTER (exp)
4066 	      || DECL_IN_CONSTANT_POOL (exp)
4067 	      || mode == VOIDmode)
4068 	    return NULL;
4069 
4070 	  op0 = make_decl_rtl_for_debug (exp);
4071 	  if (!MEM_P (op0)
4072 	      || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4073 	      || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4074 	    return NULL;
4075 	}
4076       else
4077 	op0 = copy_rtx (op0);
4078 
4079       if (GET_MODE (op0) == BLKmode
4080 	  /* If op0 is not BLKmode, but mode is, adjust_mode
4081 	     below would ICE.  While it is likely a FE bug,
4082 	     try to be robust here.  See PR43166.  */
4083 	  || mode == BLKmode
4084 	  || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4085 	{
4086 	  gcc_assert (MEM_P (op0));
4087 	  op0 = adjust_address_nv (op0, mode, 0);
4088 	  return op0;
4089 	}
4090 
4091       /* Fall through.  */
4092 
4093     adjust_mode:
4094     case PAREN_EXPR:
4095     CASE_CONVERT:
4096       {
4097 	inner_mode = GET_MODE (op0);
4098 
4099 	if (mode == inner_mode)
4100 	  return op0;
4101 
4102 	if (inner_mode == VOIDmode)
4103 	  {
4104 	    if (TREE_CODE (exp) == SSA_NAME)
4105 	      inner_mode = TYPE_MODE (TREE_TYPE (exp));
4106 	    else
4107 	      inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4108 	    if (mode == inner_mode)
4109 	      return op0;
4110 	  }
4111 
4112 	if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4113 	  {
4114 	    if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4115 	      op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4116 	    else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4117 	      op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4118 	    else
4119 	      op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4120 	  }
4121 	else if (FLOAT_MODE_P (mode))
4122 	  {
4123 	    gcc_assert (TREE_CODE (exp) != SSA_NAME);
4124 	    if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4125 	      op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4126 	    else
4127 	      op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4128 	  }
4129 	else if (FLOAT_MODE_P (inner_mode))
4130 	  {
4131 	    if (unsignedp)
4132 	      op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4133 	    else
4134 	      op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4135 	  }
4136 	else if (CONSTANT_P (op0)
4137 		 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
4138 	  op0 = simplify_gen_subreg (mode, op0, inner_mode,
4139 				     subreg_lowpart_offset (mode,
4140 							    inner_mode));
4141 	else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
4142 		 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4143 		 : unsignedp)
4144 	  op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4145 	else
4146 	  op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4147 
4148 	return op0;
4149       }
4150 
4151     case MEM_REF:
4152       if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4153 	{
4154 	  tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4155 				     TREE_OPERAND (exp, 0),
4156 				     TREE_OPERAND (exp, 1));
4157 	  if (newexp)
4158 	    return expand_debug_expr (newexp);
4159 	}
4160       /* FALLTHROUGH */
4161     case INDIRECT_REF:
4162       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4163       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4164       if (!op0)
4165 	return NULL;
4166 
4167       if (TREE_CODE (exp) == MEM_REF)
4168 	{
4169 	  if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4170 	      || (GET_CODE (op0) == PLUS
4171 		  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4172 	    /* (mem (debug_implicit_ptr)) might confuse aliasing.
4173 	       Instead just use get_inner_reference.  */
4174 	    goto component_ref;
4175 
4176 	  op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4177 	  if (!op1 || !CONST_INT_P (op1))
4178 	    return NULL;
4179 
4180 	  op0 = plus_constant (inner_mode, op0, INTVAL (op1));
4181 	}
4182 
4183       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4184 
4185       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4186 					  op0, as);
4187       if (op0 == NULL_RTX)
4188 	return NULL;
4189 
4190       op0 = gen_rtx_MEM (mode, op0);
4191       set_mem_attributes (op0, exp, 0);
4192       if (TREE_CODE (exp) == MEM_REF
4193 	  && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4194 	set_mem_expr (op0, NULL_TREE);
4195       set_mem_addr_space (op0, as);
4196 
4197       return op0;
4198 
4199     case TARGET_MEM_REF:
4200       if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4201 	  && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4202 	return NULL;
4203 
4204       op0 = expand_debug_expr
4205 	    (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4206       if (!op0)
4207 	return NULL;
4208 
4209       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4210       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4211 					  op0, as);
4212       if (op0 == NULL_RTX)
4213 	return NULL;
4214 
4215       op0 = gen_rtx_MEM (mode, op0);
4216 
4217       set_mem_attributes (op0, exp, 0);
4218       set_mem_addr_space (op0, as);
4219 
4220       return op0;
4221 
4222     component_ref:
4223     case ARRAY_REF:
4224     case ARRAY_RANGE_REF:
4225     case COMPONENT_REF:
4226     case BIT_FIELD_REF:
4227     case REALPART_EXPR:
4228     case IMAGPART_EXPR:
4229     case VIEW_CONVERT_EXPR:
4230       {
4231 	machine_mode mode1;
4232 	HOST_WIDE_INT bitsize, bitpos;
4233 	tree offset;
4234 	int volatilep = 0;
4235 	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4236 					&mode1, &unsignedp, &volatilep, false);
4237 	rtx orig_op0;
4238 
4239 	if (bitsize == 0)
4240 	  return NULL;
4241 
4242 	orig_op0 = op0 = expand_debug_expr (tem);
4243 
4244 	if (!op0)
4245 	  return NULL;
4246 
4247 	if (offset)
4248 	  {
4249 	    machine_mode addrmode, offmode;
4250 
4251 	    if (!MEM_P (op0))
4252 	      return NULL;
4253 
4254 	    op0 = XEXP (op0, 0);
4255 	    addrmode = GET_MODE (op0);
4256 	    if (addrmode == VOIDmode)
4257 	      addrmode = Pmode;
4258 
4259 	    op1 = expand_debug_expr (offset);
4260 	    if (!op1)
4261 	      return NULL;
4262 
4263 	    offmode = GET_MODE (op1);
4264 	    if (offmode == VOIDmode)
4265 	      offmode = TYPE_MODE (TREE_TYPE (offset));
4266 
4267 	    if (addrmode != offmode)
4268 	      op1 = simplify_gen_subreg (addrmode, op1, offmode,
4269 					 subreg_lowpart_offset (addrmode,
4270 								offmode));
4271 
4272 	    /* Don't use offset_address here, we don't need a
4273 	       recognizable address, and we don't want to generate
4274 	       code.  */
4275 	    op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4276 							  op0, op1));
4277 	  }
4278 
4279 	if (MEM_P (op0))
4280 	  {
4281 	    if (mode1 == VOIDmode)
4282 	      /* Bitfield.  */
4283 	      mode1 = smallest_mode_for_size (bitsize, MODE_INT);
4284 	    if (bitpos >= BITS_PER_UNIT)
4285 	      {
4286 		op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4287 		bitpos %= BITS_PER_UNIT;
4288 	      }
4289 	    else if (bitpos < 0)
4290 	      {
4291 		HOST_WIDE_INT units
4292 		  = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
4293 		op0 = adjust_address_nv (op0, mode1, units);
4294 		bitpos += units * BITS_PER_UNIT;
4295 	      }
4296 	    else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4297 	      op0 = adjust_address_nv (op0, mode, 0);
4298 	    else if (GET_MODE (op0) != mode1)
4299 	      op0 = adjust_address_nv (op0, mode1, 0);
4300 	    else
4301 	      op0 = copy_rtx (op0);
4302 	    if (op0 == orig_op0)
4303 	      op0 = shallow_copy_rtx (op0);
4304 	    set_mem_attributes (op0, exp, 0);
4305 	  }
4306 
4307 	if (bitpos == 0 && mode == GET_MODE (op0))
4308 	  return op0;
4309 
4310         if (bitpos < 0)
4311           return NULL;
4312 
4313 	if (GET_MODE (op0) == BLKmode)
4314 	  return NULL;
4315 
4316 	if ((bitpos % BITS_PER_UNIT) == 0
4317 	    && bitsize == GET_MODE_BITSIZE (mode1))
4318 	  {
4319 	    machine_mode opmode = GET_MODE (op0);
4320 
4321 	    if (opmode == VOIDmode)
4322 	      opmode = TYPE_MODE (TREE_TYPE (tem));
4323 
4324 	    /* This condition may hold if we're expanding the address
4325 	       right past the end of an array that turned out not to
4326 	       be addressable (i.e., the address was only computed in
4327 	       debug stmts).  The gen_subreg below would rightfully
4328 	       crash, and the address doesn't really exist, so just
4329 	       drop it.  */
4330 	    if (bitpos >= GET_MODE_BITSIZE (opmode))
4331 	      return NULL;
4332 
4333 	    if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4334 	      return simplify_gen_subreg (mode, op0, opmode,
4335 					  bitpos / BITS_PER_UNIT);
4336 	  }
4337 
4338 	return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4339 				     && TYPE_UNSIGNED (TREE_TYPE (exp))
4340 				     ? SIGN_EXTRACT
4341 				     : ZERO_EXTRACT, mode,
4342 				     GET_MODE (op0) != VOIDmode
4343 				     ? GET_MODE (op0)
4344 				     : TYPE_MODE (TREE_TYPE (tem)),
4345 				     op0, GEN_INT (bitsize), GEN_INT (bitpos));
4346       }
4347 
4348     case ABS_EXPR:
4349       return simplify_gen_unary (ABS, mode, op0, mode);
4350 
4351     case NEGATE_EXPR:
4352       return simplify_gen_unary (NEG, mode, op0, mode);
4353 
4354     case BIT_NOT_EXPR:
4355       return simplify_gen_unary (NOT, mode, op0, mode);
4356 
4357     case FLOAT_EXPR:
4358       return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4359 									 0)))
4360 				 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4361 				 inner_mode);
4362 
4363     case FIX_TRUNC_EXPR:
4364       return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4365 				 inner_mode);
4366 
4367     case POINTER_PLUS_EXPR:
4368       /* For the rare target where pointers are not the same size as
4369 	 size_t, we need to check for mis-matched modes and correct
4370 	 the addend.  */
4371       if (op0 && op1
4372 	  && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4373 	  && GET_MODE (op0) != GET_MODE (op1))
4374 	{
4375 	  if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4376 	      /* If OP0 is a partial mode, then we must truncate, even if it has
4377 		 the same bitsize as OP1 as GCC's representation of partial modes
4378 		 is opaque.  */
4379 	      || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4380 		  && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
4381 	    op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4382 				      GET_MODE (op1));
4383 	  else
4384 	    /* We always sign-extend, regardless of the signedness of
4385 	       the operand, because the operand is always unsigned
4386 	       here even if the original C expression is signed.  */
4387 	    op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4388 				      GET_MODE (op1));
4389 	}
4390       /* Fall through.  */
4391     case PLUS_EXPR:
4392       return simplify_gen_binary (PLUS, mode, op0, op1);
4393 
4394     case MINUS_EXPR:
4395       return simplify_gen_binary (MINUS, mode, op0, op1);
4396 
4397     case MULT_EXPR:
4398       return simplify_gen_binary (MULT, mode, op0, op1);
4399 
4400     case RDIV_EXPR:
4401     case TRUNC_DIV_EXPR:
4402     case EXACT_DIV_EXPR:
4403       if (unsignedp)
4404 	return simplify_gen_binary (UDIV, mode, op0, op1);
4405       else
4406 	return simplify_gen_binary (DIV, mode, op0, op1);
4407 
4408     case TRUNC_MOD_EXPR:
4409       return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4410 
4411     case FLOOR_DIV_EXPR:
4412       if (unsignedp)
4413 	return simplify_gen_binary (UDIV, mode, op0, op1);
4414       else
4415 	{
4416 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4417 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4418 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
4419 	  return simplify_gen_binary (PLUS, mode, div, adj);
4420 	}
4421 
4422     case FLOOR_MOD_EXPR:
4423       if (unsignedp)
4424 	return simplify_gen_binary (UMOD, mode, op0, op1);
4425       else
4426 	{
4427 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4428 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
4429 	  adj = simplify_gen_unary (NEG, mode,
4430 				    simplify_gen_binary (MULT, mode, adj, op1),
4431 				    mode);
4432 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4433 	}
4434 
4435     case CEIL_DIV_EXPR:
4436       if (unsignedp)
4437 	{
4438 	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4439 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4440 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
4441 	  return simplify_gen_binary (PLUS, mode, div, adj);
4442 	}
4443       else
4444 	{
4445 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4446 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4447 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4448 	  return simplify_gen_binary (PLUS, mode, div, adj);
4449 	}
4450 
4451     case CEIL_MOD_EXPR:
4452       if (unsignedp)
4453 	{
4454 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4455 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
4456 	  adj = simplify_gen_unary (NEG, mode,
4457 				    simplify_gen_binary (MULT, mode, adj, op1),
4458 				    mode);
4459 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4460 	}
4461       else
4462 	{
4463 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4464 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4465 	  adj = simplify_gen_unary (NEG, mode,
4466 				    simplify_gen_binary (MULT, mode, adj, op1),
4467 				    mode);
4468 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4469 	}
4470 
4471     case ROUND_DIV_EXPR:
4472       if (unsignedp)
4473 	{
4474 	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4475 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4476 	  rtx adj = round_udiv_adjust (mode, mod, op1);
4477 	  return simplify_gen_binary (PLUS, mode, div, adj);
4478 	}
4479       else
4480 	{
4481 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4482 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4483 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
4484 	  return simplify_gen_binary (PLUS, mode, div, adj);
4485 	}
4486 
4487     case ROUND_MOD_EXPR:
4488       if (unsignedp)
4489 	{
4490 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4491 	  rtx adj = round_udiv_adjust (mode, mod, op1);
4492 	  adj = simplify_gen_unary (NEG, mode,
4493 				    simplify_gen_binary (MULT, mode, adj, op1),
4494 				    mode);
4495 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4496 	}
4497       else
4498 	{
4499 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4500 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
4501 	  adj = simplify_gen_unary (NEG, mode,
4502 				    simplify_gen_binary (MULT, mode, adj, op1),
4503 				    mode);
4504 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4505 	}
4506 
4507     case LSHIFT_EXPR:
4508       return simplify_gen_binary (ASHIFT, mode, op0, op1);
4509 
4510     case RSHIFT_EXPR:
4511       if (unsignedp)
4512 	return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4513       else
4514 	return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4515 
4516     case LROTATE_EXPR:
4517       return simplify_gen_binary (ROTATE, mode, op0, op1);
4518 
4519     case RROTATE_EXPR:
4520       return simplify_gen_binary (ROTATERT, mode, op0, op1);
4521 
4522     case MIN_EXPR:
4523       return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4524 
4525     case MAX_EXPR:
4526       return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4527 
4528     case BIT_AND_EXPR:
4529     case TRUTH_AND_EXPR:
4530       return simplify_gen_binary (AND, mode, op0, op1);
4531 
4532     case BIT_IOR_EXPR:
4533     case TRUTH_OR_EXPR:
4534       return simplify_gen_binary (IOR, mode, op0, op1);
4535 
4536     case BIT_XOR_EXPR:
4537     case TRUTH_XOR_EXPR:
4538       return simplify_gen_binary (XOR, mode, op0, op1);
4539 
4540     case TRUTH_ANDIF_EXPR:
4541       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4542 
4543     case TRUTH_ORIF_EXPR:
4544       return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4545 
4546     case TRUTH_NOT_EXPR:
4547       return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4548 
4549     case LT_EXPR:
4550       return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4551 				      op0, op1);
4552 
4553     case LE_EXPR:
4554       return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4555 				      op0, op1);
4556 
4557     case GT_EXPR:
4558       return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4559 				      op0, op1);
4560 
4561     case GE_EXPR:
4562       return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4563 				      op0, op1);
4564 
4565     case EQ_EXPR:
4566       return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4567 
4568     case NE_EXPR:
4569       return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4570 
4571     case UNORDERED_EXPR:
4572       return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4573 
4574     case ORDERED_EXPR:
4575       return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4576 
4577     case UNLT_EXPR:
4578       return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4579 
4580     case UNLE_EXPR:
4581       return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4582 
4583     case UNGT_EXPR:
4584       return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4585 
4586     case UNGE_EXPR:
4587       return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4588 
4589     case UNEQ_EXPR:
4590       return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4591 
4592     case LTGT_EXPR:
4593       return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4594 
4595     case COND_EXPR:
4596       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4597 
4598     case COMPLEX_EXPR:
4599       gcc_assert (COMPLEX_MODE_P (mode));
4600       if (GET_MODE (op0) == VOIDmode)
4601 	op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4602       if (GET_MODE (op1) == VOIDmode)
4603 	op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4604       return gen_rtx_CONCAT (mode, op0, op1);
4605 
4606     case CONJ_EXPR:
4607       if (GET_CODE (op0) == CONCAT)
4608 	return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4609 			       simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4610 						   XEXP (op0, 1),
4611 						   GET_MODE_INNER (mode)));
4612       else
4613 	{
4614 	  machine_mode imode = GET_MODE_INNER (mode);
4615 	  rtx re, im;
4616 
4617 	  if (MEM_P (op0))
4618 	    {
4619 	      re = adjust_address_nv (op0, imode, 0);
4620 	      im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4621 	    }
4622 	  else
4623 	    {
4624 	      machine_mode ifmode = int_mode_for_mode (mode);
4625 	      machine_mode ihmode = int_mode_for_mode (imode);
4626 	      rtx halfsize;
4627 	      if (ifmode == BLKmode || ihmode == BLKmode)
4628 		return NULL;
4629 	      halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4630 	      re = op0;
4631 	      if (mode != ifmode)
4632 		re = gen_rtx_SUBREG (ifmode, re, 0);
4633 	      re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4634 	      if (imode != ihmode)
4635 		re = gen_rtx_SUBREG (imode, re, 0);
4636 	      im = copy_rtx (op0);
4637 	      if (mode != ifmode)
4638 		im = gen_rtx_SUBREG (ifmode, im, 0);
4639 	      im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4640 	      if (imode != ihmode)
4641 		im = gen_rtx_SUBREG (imode, im, 0);
4642 	    }
4643 	  im = gen_rtx_NEG (imode, im);
4644 	  return gen_rtx_CONCAT (mode, re, im);
4645 	}
4646 
4647     case ADDR_EXPR:
4648       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4649       if (!op0 || !MEM_P (op0))
4650 	{
4651 	  if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4652 	       || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4653 	       || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4654 	      && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4655 		  || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4656 	    return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4657 
4658 	  if (handled_component_p (TREE_OPERAND (exp, 0)))
4659 	    {
4660 	      HOST_WIDE_INT bitoffset, bitsize, maxsize;
4661 	      tree decl
4662 		= get_ref_base_and_extent (TREE_OPERAND (exp, 0),
4663 					   &bitoffset, &bitsize, &maxsize);
4664 	      if ((TREE_CODE (decl) == VAR_DECL
4665 		   || TREE_CODE (decl) == PARM_DECL
4666 		   || TREE_CODE (decl) == RESULT_DECL)
4667 		  && (!TREE_ADDRESSABLE (decl)
4668 		      || target_for_debug_bind (decl))
4669 		  && (bitoffset % BITS_PER_UNIT) == 0
4670 		  && bitsize > 0
4671 		  && bitsize == maxsize)
4672 		{
4673 		  rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4674 		  return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4675 		}
4676 	    }
4677 
4678 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4679 	      && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4680 		 == ADDR_EXPR)
4681 	    {
4682 	      op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4683 						     0));
4684 	      if (op0 != NULL
4685 		  && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4686 		      || (GET_CODE (op0) == PLUS
4687 			  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4688 			  && CONST_INT_P (XEXP (op0, 1)))))
4689 		{
4690 		  op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4691 							 1));
4692 		  if (!op1 || !CONST_INT_P (op1))
4693 		    return NULL;
4694 
4695 		  return plus_constant (mode, op0, INTVAL (op1));
4696 		}
4697 	    }
4698 
4699 	  return NULL;
4700 	}
4701 
4702       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
4703       op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
4704 
4705       return op0;
4706 
4707     case VECTOR_CST:
4708       {
4709 	unsigned i;
4710 
4711 	op0 = gen_rtx_CONCATN
4712 	  (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4713 
4714 	for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4715 	  {
4716 	    op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4717 	    if (!op1)
4718 	      return NULL;
4719 	    XVECEXP (op0, 0, i) = op1;
4720 	  }
4721 
4722 	return op0;
4723       }
4724 
4725     case CONSTRUCTOR:
4726       if (TREE_CLOBBER_P (exp))
4727 	return NULL;
4728       else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
4729 	{
4730 	  unsigned i;
4731 	  tree val;
4732 
4733 	  op0 = gen_rtx_CONCATN
4734 	    (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4735 
4736 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4737 	    {
4738 	      op1 = expand_debug_expr (val);
4739 	      if (!op1)
4740 		return NULL;
4741 	      XVECEXP (op0, 0, i) = op1;
4742 	    }
4743 
4744 	  if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4745 	    {
4746 	      op1 = expand_debug_expr
4747 		(build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
4748 
4749 	      if (!op1)
4750 		return NULL;
4751 
4752 	      for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4753 		XVECEXP (op0, 0, i) = op1;
4754 	    }
4755 
4756 	  return op0;
4757 	}
4758       else
4759 	goto flag_unsupported;
4760 
4761     case CALL_EXPR:
4762       /* ??? Maybe handle some builtins?  */
4763       return NULL;
4764 
4765     case SSA_NAME:
4766       {
4767 	gimple g = get_gimple_for_ssa_name (exp);
4768 	if (g)
4769 	  {
4770 	    tree t = NULL_TREE;
4771 	    if (deep_ter_debug_map)
4772 	      {
4773 		tree *slot = deep_ter_debug_map->get (exp);
4774 		if (slot)
4775 		  t = *slot;
4776 	      }
4777 	    if (t == NULL_TREE)
4778 	      t = gimple_assign_rhs_to_tree (g);
4779 	    op0 = expand_debug_expr (t);
4780 	    if (!op0)
4781 	      return NULL;
4782 	  }
4783 	else
4784 	  {
4785 	    int part = var_to_partition (SA.map, exp);
4786 
4787 	    if (part == NO_PARTITION)
4788 	      {
4789 		/* If this is a reference to an incoming value of parameter
4790 		   that is never used in the code or where the incoming
4791 		   value is never used in the code, use PARM_DECL's
4792 		   DECL_RTL if set.  */
4793 		if (SSA_NAME_IS_DEFAULT_DEF (exp)
4794 		    && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
4795 		  {
4796 		    op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
4797 		    if (op0)
4798 		      goto adjust_mode;
4799 		    op0 = expand_debug_expr (SSA_NAME_VAR (exp));
4800 		    if (op0)
4801 		      goto adjust_mode;
4802 		  }
4803 		return NULL;
4804 	      }
4805 
4806 	    gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
4807 
4808 	    op0 = copy_rtx (SA.partition_to_pseudo[part]);
4809 	  }
4810 	goto adjust_mode;
4811       }
4812 
4813     case ERROR_MARK:
4814       return NULL;
4815 
4816     /* Vector stuff.  For most of the codes we don't have rtl codes.  */
4817     case REALIGN_LOAD_EXPR:
4818     case REDUC_MAX_EXPR:
4819     case REDUC_MIN_EXPR:
4820     case REDUC_PLUS_EXPR:
4821     case VEC_COND_EXPR:
4822     case VEC_PACK_FIX_TRUNC_EXPR:
4823     case VEC_PACK_SAT_EXPR:
4824     case VEC_PACK_TRUNC_EXPR:
4825     case VEC_UNPACK_FLOAT_HI_EXPR:
4826     case VEC_UNPACK_FLOAT_LO_EXPR:
4827     case VEC_UNPACK_HI_EXPR:
4828     case VEC_UNPACK_LO_EXPR:
4829     case VEC_WIDEN_MULT_HI_EXPR:
4830     case VEC_WIDEN_MULT_LO_EXPR:
4831     case VEC_WIDEN_MULT_EVEN_EXPR:
4832     case VEC_WIDEN_MULT_ODD_EXPR:
4833     case VEC_WIDEN_LSHIFT_HI_EXPR:
4834     case VEC_WIDEN_LSHIFT_LO_EXPR:
4835     case VEC_PERM_EXPR:
4836       return NULL;
4837 
4838     /* Misc codes.  */
4839     case ADDR_SPACE_CONVERT_EXPR:
4840     case FIXED_CONVERT_EXPR:
4841     case OBJ_TYPE_REF:
4842     case WITH_SIZE_EXPR:
4843       return NULL;
4844 
4845     case DOT_PROD_EXPR:
4846       if (SCALAR_INT_MODE_P (GET_MODE (op0))
4847 	  && SCALAR_INT_MODE_P (mode))
4848 	{
4849 	  op0
4850 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4851 									  0)))
4852 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4853 				  inner_mode);
4854 	  op1
4855 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4856 									  1)))
4857 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
4858 				  inner_mode);
4859 	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
4860 	  return simplify_gen_binary (PLUS, mode, op0, op2);
4861 	}
4862       return NULL;
4863 
4864     case WIDEN_MULT_EXPR:
4865     case WIDEN_MULT_PLUS_EXPR:
4866     case WIDEN_MULT_MINUS_EXPR:
4867       if (SCALAR_INT_MODE_P (GET_MODE (op0))
4868 	  && SCALAR_INT_MODE_P (mode))
4869 	{
4870 	  inner_mode = GET_MODE (op0);
4871 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4872 	    op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4873 	  else
4874 	    op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4875 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
4876 	    op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
4877 	  else
4878 	    op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
4879 	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
4880 	  if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
4881 	    return op0;
4882 	  else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
4883 	    return simplify_gen_binary (PLUS, mode, op0, op2);
4884 	  else
4885 	    return simplify_gen_binary (MINUS, mode, op2, op0);
4886 	}
4887       return NULL;
4888 
4889     case MULT_HIGHPART_EXPR:
4890       /* ??? Similar to the above.  */
4891       return NULL;
4892 
4893     case WIDEN_SUM_EXPR:
4894     case WIDEN_LSHIFT_EXPR:
4895       if (SCALAR_INT_MODE_P (GET_MODE (op0))
4896 	  && SCALAR_INT_MODE_P (mode))
4897 	{
4898 	  op0
4899 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4900 									  0)))
4901 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4902 				  inner_mode);
4903 	  return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
4904 				      ? ASHIFT : PLUS, mode, op0, op1);
4905 	}
4906       return NULL;
4907 
4908     case FMA_EXPR:
4909       return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
4910 
4911     default:
4912     flag_unsupported:
4913 #ifdef ENABLE_CHECKING
4914       debug_tree (exp);
4915       gcc_unreachable ();
4916 #else
4917       return NULL;
4918 #endif
4919     }
4920 }
4921 
4922 /* Return an RTX equivalent to the source bind value of the tree expression
4923    EXP.  */
4924 
4925 static rtx
4926 expand_debug_source_expr (tree exp)
4927 {
4928   rtx op0 = NULL_RTX;
4929   machine_mode mode = VOIDmode, inner_mode;
4930 
4931   switch (TREE_CODE (exp))
4932     {
4933     case PARM_DECL:
4934       {
4935 	mode = DECL_MODE (exp);
4936 	op0 = expand_debug_parm_decl (exp);
4937 	if (op0)
4938 	   break;
4939 	/* See if this isn't an argument that has been completely
4940 	   optimized out.  */
4941 	if (!DECL_RTL_SET_P (exp)
4942 	    && !DECL_INCOMING_RTL (exp)
4943 	    && DECL_ABSTRACT_ORIGIN (current_function_decl))
4944 	  {
4945 	    tree aexp = DECL_ORIGIN (exp);
4946 	    if (DECL_CONTEXT (aexp)
4947 		== DECL_ABSTRACT_ORIGIN (current_function_decl))
4948 	      {
4949 		vec<tree, va_gc> **debug_args;
4950 		unsigned int ix;
4951 		tree ddecl;
4952 		debug_args = decl_debug_args_lookup (current_function_decl);
4953 		if (debug_args != NULL)
4954 		  {
4955 		    for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
4956 			 ix += 2)
4957 		      if (ddecl == aexp)
4958 			return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
4959 		  }
4960 	      }
4961 	  }
4962 	break;
4963       }
4964     default:
4965       break;
4966     }
4967 
4968   if (op0 == NULL_RTX)
4969     return NULL_RTX;
4970 
4971   inner_mode = GET_MODE (op0);
4972   if (mode == inner_mode)
4973     return op0;
4974 
4975   if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4976     {
4977       if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4978 	op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4979       else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4980 	op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4981       else
4982 	op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4983     }
4984   else if (FLOAT_MODE_P (mode))
4985     gcc_unreachable ();
4986   else if (FLOAT_MODE_P (inner_mode))
4987     {
4988       if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4989 	op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4990       else
4991 	op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4992     }
4993   else if (CONSTANT_P (op0)
4994 	   || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
4995     op0 = simplify_gen_subreg (mode, op0, inner_mode,
4996 			       subreg_lowpart_offset (mode, inner_mode));
4997   else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4998     op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4999   else
5000     op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5001 
5002   return op0;
5003 }
5004 
5005 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5006    Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5007    deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN.  */
5008 
5009 static void
5010 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
5011 {
5012   rtx exp = *exp_p;
5013 
5014   if (exp == NULL_RTX)
5015     return;
5016 
5017   if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5018     return;
5019 
5020   if (depth == 4)
5021     {
5022       /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL).  */
5023       rtx dval = make_debug_expr_from_rtl (exp);
5024 
5025       /* Emit a debug bind insn before INSN.  */
5026       rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5027 				       DEBUG_EXPR_TREE_DECL (dval), exp,
5028 				       VAR_INIT_STATUS_INITIALIZED);
5029 
5030       emit_debug_insn_before (bind, insn);
5031       *exp_p = dval;
5032       return;
5033     }
5034 
5035   const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5036   int i, j;
5037   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5038     switch (*format_ptr++)
5039       {
5040       case 'e':
5041 	avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5042 	break;
5043 
5044       case 'E':
5045       case 'V':
5046 	for (j = 0; j < XVECLEN (exp, i); j++)
5047 	  avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5048 	break;
5049 
5050       default:
5051 	break;
5052       }
5053 }
5054 
5055 /* Expand the _LOCs in debug insns.  We run this after expanding all
5056    regular insns, so that any variables referenced in the function
5057    will have their DECL_RTLs set.  */
5058 
5059 static void
5060 expand_debug_locations (void)
5061 {
5062   rtx_insn *insn;
5063   rtx_insn *last = get_last_insn ();
5064   int save_strict_alias = flag_strict_aliasing;
5065 
5066   /* New alias sets while setting up memory attributes cause
5067      -fcompare-debug failures, even though it doesn't bring about any
5068      codegen changes.  */
5069   flag_strict_aliasing = 0;
5070 
5071   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5072     if (DEBUG_INSN_P (insn))
5073       {
5074 	tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5075 	rtx val;
5076 	rtx_insn *prev_insn, *insn2;
5077 	machine_mode mode;
5078 
5079 	if (value == NULL_TREE)
5080 	  val = NULL_RTX;
5081 	else
5082 	  {
5083 	    if (INSN_VAR_LOCATION_STATUS (insn)
5084 		== VAR_INIT_STATUS_UNINITIALIZED)
5085 	      val = expand_debug_source_expr (value);
5086 	    /* The avoid_deep_ter_for_debug function inserts
5087 	       debug bind stmts after SSA_NAME definition, with the
5088 	       SSA_NAME as the whole bind location.  Disable temporarily
5089 	       expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5090 	       being defined in this DEBUG_INSN.  */
5091 	    else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5092 	      {
5093 		tree *slot = deep_ter_debug_map->get (value);
5094 		if (slot)
5095 		  {
5096 		    if (*slot == INSN_VAR_LOCATION_DECL (insn))
5097 		      *slot = NULL_TREE;
5098 		    else
5099 		      slot = NULL;
5100 		  }
5101 		val = expand_debug_expr (value);
5102 		if (slot)
5103 		  *slot = INSN_VAR_LOCATION_DECL (insn);
5104 	      }
5105 	    else
5106 	      val = expand_debug_expr (value);
5107 	    gcc_assert (last == get_last_insn ());
5108 	  }
5109 
5110 	if (!val)
5111 	  val = gen_rtx_UNKNOWN_VAR_LOC ();
5112 	else
5113 	  {
5114 	    mode = GET_MODE (INSN_VAR_LOCATION (insn));
5115 
5116 	    gcc_assert (mode == GET_MODE (val)
5117 			|| (GET_MODE (val) == VOIDmode
5118 			    && (CONST_SCALAR_INT_P (val)
5119 				|| GET_CODE (val) == CONST_FIXED
5120 				|| GET_CODE (val) == LABEL_REF)));
5121 	  }
5122 
5123 	INSN_VAR_LOCATION_LOC (insn) = val;
5124 	prev_insn = PREV_INSN (insn);
5125 	for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5126 	  avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5127       }
5128 
5129   flag_strict_aliasing = save_strict_alias;
5130 }
5131 
5132 /* Performs swapping operands of commutative operations to expand
5133    the expensive one first.  */
5134 
5135 static void
5136 reorder_operands (basic_block bb)
5137 {
5138   unsigned int *lattice;  /* Hold cost of each statement.  */
5139   unsigned int i = 0, n = 0;
5140   gimple_stmt_iterator gsi;
5141   gimple_seq stmts;
5142   gimple stmt;
5143   bool swap;
5144   tree op0, op1;
5145   ssa_op_iter iter;
5146   use_operand_p use_p;
5147   gimple def0, def1;
5148 
5149   /* Compute cost of each statement using estimate_num_insns.  */
5150   stmts = bb_seq (bb);
5151   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5152     {
5153       stmt = gsi_stmt (gsi);
5154       if (!is_gimple_debug (stmt))
5155         gimple_set_uid (stmt, n++);
5156     }
5157   lattice = XNEWVEC (unsigned int, n);
5158   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5159     {
5160       unsigned cost;
5161       stmt = gsi_stmt (gsi);
5162       if (is_gimple_debug (stmt))
5163 	continue;
5164       cost = estimate_num_insns (stmt, &eni_size_weights);
5165       lattice[i] = cost;
5166       FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5167 	{
5168 	  tree use = USE_FROM_PTR (use_p);
5169 	  gimple def_stmt;
5170 	  if (TREE_CODE (use) != SSA_NAME)
5171 	    continue;
5172 	  def_stmt = get_gimple_for_ssa_name (use);
5173 	  if (!def_stmt)
5174 	    continue;
5175 	  lattice[i] += lattice[gimple_uid (def_stmt)];
5176 	}
5177       i++;
5178       if (!is_gimple_assign (stmt)
5179 	  || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5180 	continue;
5181       op0 = gimple_op (stmt, 1);
5182       op1 = gimple_op (stmt, 2);
5183       if (TREE_CODE (op0) != SSA_NAME
5184 	  || TREE_CODE (op1) != SSA_NAME)
5185 	continue;
5186       /* Swap operands if the second one is more expensive.  */
5187       def0 = get_gimple_for_ssa_name (op0);
5188       def1 = get_gimple_for_ssa_name (op1);
5189       if (!def1)
5190 	continue;
5191       swap = false;
5192       if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5193 	swap = true;
5194       if (swap)
5195 	{
5196 	  if (dump_file && (dump_flags & TDF_DETAILS))
5197 	    {
5198 	      fprintf (dump_file, "Swap operands in stmt:\n");
5199 	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5200 	      fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5201 		       def0 ? lattice[gimple_uid (def0)] : 0,
5202 		       lattice[gimple_uid (def1)]);
5203 	    }
5204 	  swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5205 			     gimple_assign_rhs2_ptr (stmt));
5206 	}
5207     }
5208   XDELETE (lattice);
5209 }
5210 
5211 /* Expand basic block BB from GIMPLE trees to RTL.  */
5212 
5213 static basic_block
5214 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5215 {
5216   gimple_stmt_iterator gsi;
5217   gimple_seq stmts;
5218   gimple stmt = NULL;
5219   rtx_note *note;
5220   rtx_insn *last;
5221   edge e;
5222   edge_iterator ei;
5223 
5224   if (dump_file)
5225     fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5226 	     bb->index);
5227 
5228   /* Note that since we are now transitioning from GIMPLE to RTL, we
5229      cannot use the gsi_*_bb() routines because they expect the basic
5230      block to be in GIMPLE, instead of RTL.  Therefore, we need to
5231      access the BB sequence directly.  */
5232   if (optimize)
5233     reorder_operands (bb);
5234   stmts = bb_seq (bb);
5235   bb->il.gimple.seq = NULL;
5236   bb->il.gimple.phi_nodes = NULL;
5237   rtl_profile_for_bb (bb);
5238   init_rtl_bb_info (bb);
5239   bb->flags |= BB_RTL;
5240 
5241   /* Remove the RETURN_EXPR if we may fall though to the exit
5242      instead.  */
5243   gsi = gsi_last (stmts);
5244   if (!gsi_end_p (gsi)
5245       && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5246     {
5247       greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5248 
5249       gcc_assert (single_succ_p (bb));
5250       gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5251 
5252       if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5253 	  && !gimple_return_retval (ret_stmt))
5254 	{
5255 	  gsi_remove (&gsi, false);
5256 	  single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5257 	}
5258     }
5259 
5260   gsi = gsi_start (stmts);
5261   if (!gsi_end_p (gsi))
5262     {
5263       stmt = gsi_stmt (gsi);
5264       if (gimple_code (stmt) != GIMPLE_LABEL)
5265 	stmt = NULL;
5266     }
5267 
5268   rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5269 
5270   if (stmt || elt)
5271     {
5272       last = get_last_insn ();
5273 
5274       if (stmt)
5275 	{
5276 	  expand_gimple_stmt (stmt);
5277 	  gsi_next (&gsi);
5278 	}
5279 
5280       if (elt)
5281 	emit_label (*elt);
5282 
5283       /* Java emits line number notes in the top of labels.
5284 	 ??? Make this go away once line number notes are obsoleted.  */
5285       BB_HEAD (bb) = NEXT_INSN (last);
5286       if (NOTE_P (BB_HEAD (bb)))
5287 	BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5288       note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5289 
5290       maybe_dump_rtl_for_gimple_stmt (stmt, last);
5291     }
5292   else
5293     BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5294 
5295   NOTE_BASIC_BLOCK (note) = bb;
5296 
5297   for (; !gsi_end_p (gsi); gsi_next (&gsi))
5298     {
5299       basic_block new_bb;
5300 
5301       stmt = gsi_stmt (gsi);
5302 
5303       /* If this statement is a non-debug one, and we generate debug
5304 	 insns, then this one might be the last real use of a TERed
5305 	 SSA_NAME, but where there are still some debug uses further
5306 	 down.  Expanding the current SSA name in such further debug
5307 	 uses by their RHS might lead to wrong debug info, as coalescing
5308 	 might make the operands of such RHS be placed into the same
5309 	 pseudo as something else.  Like so:
5310 	   a_1 = a_0 + 1;   // Assume a_1 is TERed and a_0 is dead
5311 	   use(a_1);
5312 	   a_2 = ...
5313            #DEBUG ... => a_1
5314 	 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5315 	 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5316 	 the write to a_2 would actually have clobbered the place which
5317 	 formerly held a_0.
5318 
5319 	 So, instead of that, we recognize the situation, and generate
5320 	 debug temporaries at the last real use of TERed SSA names:
5321 	   a_1 = a_0 + 1;
5322            #DEBUG #D1 => a_1
5323 	   use(a_1);
5324 	   a_2 = ...
5325            #DEBUG ... => #D1
5326 	 */
5327       if (MAY_HAVE_DEBUG_INSNS
5328 	  && SA.values
5329 	  && !is_gimple_debug (stmt))
5330 	{
5331 	  ssa_op_iter iter;
5332 	  tree op;
5333 	  gimple def;
5334 
5335 	  location_t sloc = curr_insn_location ();
5336 
5337 	  /* Look for SSA names that have their last use here (TERed
5338 	     names always have only one real use).  */
5339 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5340 	    if ((def = get_gimple_for_ssa_name (op)))
5341 	      {
5342 		imm_use_iterator imm_iter;
5343 		use_operand_p use_p;
5344 		bool have_debug_uses = false;
5345 
5346 		FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5347 		  {
5348 		    if (gimple_debug_bind_p (USE_STMT (use_p)))
5349 		      {
5350 			have_debug_uses = true;
5351 			break;
5352 		      }
5353 		  }
5354 
5355 		if (have_debug_uses)
5356 		  {
5357 		    /* OP is a TERed SSA name, with DEF its defining
5358 		       statement, and where OP is used in further debug
5359 		       instructions.  Generate a debug temporary, and
5360 		       replace all uses of OP in debug insns with that
5361 		       temporary.  */
5362 		    gimple debugstmt;
5363 		    tree value = gimple_assign_rhs_to_tree (def);
5364 		    tree vexpr = make_node (DEBUG_EXPR_DECL);
5365 		    rtx val;
5366 		    machine_mode mode;
5367 
5368 		    set_curr_insn_location (gimple_location (def));
5369 
5370 		    DECL_ARTIFICIAL (vexpr) = 1;
5371 		    TREE_TYPE (vexpr) = TREE_TYPE (value);
5372 		    if (DECL_P (value))
5373 		      mode = DECL_MODE (value);
5374 		    else
5375 		      mode = TYPE_MODE (TREE_TYPE (value));
5376 		    DECL_MODE (vexpr) = mode;
5377 
5378 		    val = gen_rtx_VAR_LOCATION
5379 			(mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5380 
5381 		    emit_debug_insn (val);
5382 
5383 		    FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5384 		      {
5385 			if (!gimple_debug_bind_p (debugstmt))
5386 			  continue;
5387 
5388 			FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5389 			  SET_USE (use_p, vexpr);
5390 
5391 			update_stmt (debugstmt);
5392 		      }
5393 		  }
5394 	      }
5395 	  set_curr_insn_location (sloc);
5396 	}
5397 
5398       currently_expanding_gimple_stmt = stmt;
5399 
5400       /* Expand this statement, then evaluate the resulting RTL and
5401 	 fixup the CFG accordingly.  */
5402       if (gimple_code (stmt) == GIMPLE_COND)
5403 	{
5404 	  new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5405 	  if (new_bb)
5406 	    return new_bb;
5407 	}
5408       else if (gimple_debug_bind_p (stmt))
5409 	{
5410 	  location_t sloc = curr_insn_location ();
5411 	  gimple_stmt_iterator nsi = gsi;
5412 
5413 	  for (;;)
5414 	    {
5415 	      tree var = gimple_debug_bind_get_var (stmt);
5416 	      tree value;
5417 	      rtx val;
5418 	      machine_mode mode;
5419 
5420 	      if (TREE_CODE (var) != DEBUG_EXPR_DECL
5421 		  && TREE_CODE (var) != LABEL_DECL
5422 		  && !target_for_debug_bind (var))
5423 		goto delink_debug_stmt;
5424 
5425 	      if (gimple_debug_bind_has_value_p (stmt))
5426 		value = gimple_debug_bind_get_value (stmt);
5427 	      else
5428 		value = NULL_TREE;
5429 
5430 	      last = get_last_insn ();
5431 
5432 	      set_curr_insn_location (gimple_location (stmt));
5433 
5434 	      if (DECL_P (var))
5435 		mode = DECL_MODE (var);
5436 	      else
5437 		mode = TYPE_MODE (TREE_TYPE (var));
5438 
5439 	      val = gen_rtx_VAR_LOCATION
5440 		(mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5441 
5442 	      emit_debug_insn (val);
5443 
5444 	      if (dump_file && (dump_flags & TDF_DETAILS))
5445 		{
5446 		  /* We can't dump the insn with a TREE where an RTX
5447 		     is expected.  */
5448 		  PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5449 		  maybe_dump_rtl_for_gimple_stmt (stmt, last);
5450 		  PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5451 		}
5452 
5453 	    delink_debug_stmt:
5454 	      /* In order not to generate too many debug temporaries,
5455 	         we delink all uses of debug statements we already expanded.
5456 		 Therefore debug statements between definition and real
5457 		 use of TERed SSA names will continue to use the SSA name,
5458 		 and not be replaced with debug temps.  */
5459 	      delink_stmt_imm_use (stmt);
5460 
5461 	      gsi = nsi;
5462 	      gsi_next (&nsi);
5463 	      if (gsi_end_p (nsi))
5464 		break;
5465 	      stmt = gsi_stmt (nsi);
5466 	      if (!gimple_debug_bind_p (stmt))
5467 		break;
5468 	    }
5469 
5470 	  set_curr_insn_location (sloc);
5471 	}
5472       else if (gimple_debug_source_bind_p (stmt))
5473 	{
5474 	  location_t sloc = curr_insn_location ();
5475 	  tree var = gimple_debug_source_bind_get_var (stmt);
5476 	  tree value = gimple_debug_source_bind_get_value (stmt);
5477 	  rtx val;
5478 	  machine_mode mode;
5479 
5480 	  last = get_last_insn ();
5481 
5482 	  set_curr_insn_location (gimple_location (stmt));
5483 
5484 	  mode = DECL_MODE (var);
5485 
5486 	  val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5487 				      VAR_INIT_STATUS_UNINITIALIZED);
5488 
5489 	  emit_debug_insn (val);
5490 
5491 	  if (dump_file && (dump_flags & TDF_DETAILS))
5492 	    {
5493 	      /* We can't dump the insn with a TREE where an RTX
5494 		 is expected.  */
5495 	      PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5496 	      maybe_dump_rtl_for_gimple_stmt (stmt, last);
5497 	      PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5498 	    }
5499 
5500 	  set_curr_insn_location (sloc);
5501 	}
5502       else
5503 	{
5504 	  gcall *call_stmt = dyn_cast <gcall *> (stmt);
5505 	  if (call_stmt
5506 	      && gimple_call_tail_p (call_stmt)
5507 	      && disable_tail_calls)
5508 	    gimple_call_set_tail (call_stmt, false);
5509 
5510 	  if (call_stmt && gimple_call_tail_p (call_stmt))
5511 	    {
5512 	      bool can_fallthru;
5513 	      new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5514 	      if (new_bb)
5515 		{
5516 		  if (can_fallthru)
5517 		    bb = new_bb;
5518 		  else
5519 		    return new_bb;
5520 		}
5521 	    }
5522 	  else
5523 	    {
5524 	      def_operand_p def_p;
5525 	      def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5526 
5527 	      if (def_p != NULL)
5528 		{
5529 		  /* Ignore this stmt if it is in the list of
5530 		     replaceable expressions.  */
5531 		  if (SA.values
5532 		      && bitmap_bit_p (SA.values,
5533 				       SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5534 		    continue;
5535 		}
5536 	      last = expand_gimple_stmt (stmt);
5537 	      maybe_dump_rtl_for_gimple_stmt (stmt, last);
5538 	    }
5539 	}
5540     }
5541 
5542   currently_expanding_gimple_stmt = NULL;
5543 
5544   /* Expand implicit goto and convert goto_locus.  */
5545   FOR_EACH_EDGE (e, ei, bb->succs)
5546     {
5547       if (e->goto_locus != UNKNOWN_LOCATION)
5548 	set_curr_insn_location (e->goto_locus);
5549       if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5550 	{
5551 	  emit_jump (label_rtx_for_bb (e->dest));
5552 	  e->flags &= ~EDGE_FALLTHRU;
5553 	}
5554     }
5555 
5556   /* Expanded RTL can create a jump in the last instruction of block.
5557      This later might be assumed to be a jump to successor and break edge insertion.
5558      We need to insert dummy move to prevent this. PR41440. */
5559   if (single_succ_p (bb)
5560       && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5561       && (last = get_last_insn ())
5562       && JUMP_P (last))
5563     {
5564       rtx dummy = gen_reg_rtx (SImode);
5565       emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5566     }
5567 
5568   do_pending_stack_adjust ();
5569 
5570   /* Find the block tail.  The last insn in the block is the insn
5571      before a barrier and/or table jump insn.  */
5572   last = get_last_insn ();
5573   if (BARRIER_P (last))
5574     last = PREV_INSN (last);
5575   if (JUMP_TABLE_DATA_P (last))
5576     last = PREV_INSN (PREV_INSN (last));
5577   BB_END (bb) = last;
5578 
5579   update_bb_for_insn (bb);
5580 
5581   return bb;
5582 }
5583 
5584 
5585 /* Create a basic block for initialization code.  */
5586 
5587 static basic_block
5588 construct_init_block (void)
5589 {
5590   basic_block init_block, first_block;
5591   edge e = NULL;
5592   int flags;
5593 
5594   /* Multiple entry points not supported yet.  */
5595   gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5596   init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5597   init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5598   ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5599   EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5600 
5601   e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5602 
5603   /* When entry edge points to first basic block, we don't need jump,
5604      otherwise we have to jump into proper target.  */
5605   if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5606     {
5607       tree label = gimple_block_label (e->dest);
5608 
5609       emit_jump (label_rtx (label));
5610       flags = 0;
5611     }
5612   else
5613     flags = EDGE_FALLTHRU;
5614 
5615   init_block = create_basic_block (NEXT_INSN (get_insns ()),
5616 				   get_last_insn (),
5617 				   ENTRY_BLOCK_PTR_FOR_FN (cfun));
5618   init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
5619   init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5620   add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5621   if (e)
5622     {
5623       first_block = e->dest;
5624       redirect_edge_succ (e, init_block);
5625       e = make_edge (init_block, first_block, flags);
5626     }
5627   else
5628     e = make_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5629   e->probability = REG_BR_PROB_BASE;
5630   e->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5631 
5632   update_bb_for_insn (init_block);
5633   return init_block;
5634 }
5635 
5636 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5637    found in the block tree.  */
5638 
5639 static void
5640 set_block_levels (tree block, int level)
5641 {
5642   while (block)
5643     {
5644       BLOCK_NUMBER (block) = level;
5645       set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5646       block = BLOCK_CHAIN (block);
5647     }
5648 }
5649 
5650 /* Create a block containing landing pads and similar stuff.  */
5651 
5652 static void
5653 construct_exit_block (void)
5654 {
5655   rtx_insn *head = get_last_insn ();
5656   rtx_insn *end;
5657   basic_block exit_block;
5658   edge e, e2;
5659   unsigned ix;
5660   edge_iterator ei;
5661   basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5662   rtx_insn *orig_end = BB_END (prev_bb);
5663 
5664   rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5665 
5666   /* Make sure the locus is set to the end of the function, so that
5667      epilogue line numbers and warnings are set properly.  */
5668   if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5669     input_location = cfun->function_end_locus;
5670 
5671   /* Generate rtl for function exit.  */
5672   expand_function_end ();
5673 
5674   end = get_last_insn ();
5675   if (head == end)
5676     return;
5677   /* While emitting the function end we could move end of the last basic
5678      block.  */
5679   BB_END (prev_bb) = orig_end;
5680   while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5681     head = NEXT_INSN (head);
5682   /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5683      bb frequency counting will be confused.  Any instructions before that
5684      label are emitted for the case where PREV_BB falls through into the
5685      exit block, so append those instructions to prev_bb in that case.  */
5686   if (NEXT_INSN (head) != return_label)
5687     {
5688       while (NEXT_INSN (head) != return_label)
5689 	{
5690 	  if (!NOTE_P (NEXT_INSN (head)))
5691 	    BB_END (prev_bb) = NEXT_INSN (head);
5692 	  head = NEXT_INSN (head);
5693 	}
5694     }
5695   exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
5696   exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
5697   exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5698   add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5699 
5700   ix = 0;
5701   while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
5702     {
5703       e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
5704       if (!(e->flags & EDGE_ABNORMAL))
5705 	redirect_edge_succ (e, exit_block);
5706       else
5707 	ix++;
5708     }
5709 
5710   e = make_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5711   e->probability = REG_BR_PROB_BASE;
5712   e->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5713   FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5714     if (e2 != e)
5715       {
5716 	e->count -= e2->count;
5717 	exit_block->count -= e2->count;
5718 	exit_block->frequency -= EDGE_FREQUENCY (e2);
5719       }
5720   if (e->count < 0)
5721     e->count = 0;
5722   if (exit_block->count < 0)
5723     exit_block->count = 0;
5724   if (exit_block->frequency < 0)
5725     exit_block->frequency = 0;
5726   update_bb_for_insn (exit_block);
5727 }
5728 
5729 /* Helper function for discover_nonconstant_array_refs.
5730    Look for ARRAY_REF nodes with non-constant indexes and mark them
5731    addressable.  */
5732 
5733 static tree
5734 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5735 				   void *data ATTRIBUTE_UNUSED)
5736 {
5737   tree t = *tp;
5738 
5739   if (IS_TYPE_OR_DECL_P (t))
5740     *walk_subtrees = 0;
5741   else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5742     {
5743       while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5744 	      && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5745 	      && (!TREE_OPERAND (t, 2)
5746 		  || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5747 	     || (TREE_CODE (t) == COMPONENT_REF
5748 		 && (!TREE_OPERAND (t,2)
5749 		     || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5750 	     || TREE_CODE (t) == BIT_FIELD_REF
5751 	     || TREE_CODE (t) == REALPART_EXPR
5752 	     || TREE_CODE (t) == IMAGPART_EXPR
5753 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
5754 	     || CONVERT_EXPR_P (t))
5755 	t = TREE_OPERAND (t, 0);
5756 
5757       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5758 	{
5759 	  t = get_base_address (t);
5760 	  if (t && DECL_P (t)
5761               && DECL_MODE (t) != BLKmode)
5762 	    TREE_ADDRESSABLE (t) = 1;
5763 	}
5764 
5765       *walk_subtrees = 0;
5766     }
5767 
5768   return NULL_TREE;
5769 }
5770 
5771 /* RTL expansion is not able to compile array references with variable
5772    offsets for arrays stored in single register.  Discover such
5773    expressions and mark variables as addressable to avoid this
5774    scenario.  */
5775 
5776 static void
5777 discover_nonconstant_array_refs (void)
5778 {
5779   basic_block bb;
5780   gimple_stmt_iterator gsi;
5781 
5782   FOR_EACH_BB_FN (bb, cfun)
5783     for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5784       {
5785 	gimple stmt = gsi_stmt (gsi);
5786 	if (!is_gimple_debug (stmt))
5787 	  walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
5788       }
5789 }
5790 
5791 /* This function sets crtl->args.internal_arg_pointer to a virtual
5792    register if DRAP is needed.  Local register allocator will replace
5793    virtual_incoming_args_rtx with the virtual register.  */
5794 
5795 static void
5796 expand_stack_alignment (void)
5797 {
5798   rtx drap_rtx;
5799   unsigned int preferred_stack_boundary;
5800 
5801   if (! SUPPORTS_STACK_ALIGNMENT)
5802     return;
5803 
5804   if (cfun->calls_alloca
5805       || cfun->has_nonlocal_label
5806       || crtl->has_nonlocal_goto)
5807     crtl->need_drap = true;
5808 
5809   /* Call update_stack_boundary here again to update incoming stack
5810      boundary.  It may set incoming stack alignment to a different
5811      value after RTL expansion.  TARGET_FUNCTION_OK_FOR_SIBCALL may
5812      use the minimum incoming stack alignment to check if it is OK
5813      to perform sibcall optimization since sibcall optimization will
5814      only align the outgoing stack to incoming stack boundary.  */
5815   if (targetm.calls.update_stack_boundary)
5816     targetm.calls.update_stack_boundary ();
5817 
5818   /* The incoming stack frame has to be aligned at least at
5819      parm_stack_boundary.  */
5820   gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
5821 
5822   /* Update crtl->stack_alignment_estimated and use it later to align
5823      stack.  We check PREFERRED_STACK_BOUNDARY if there may be non-call
5824      exceptions since callgraph doesn't collect incoming stack alignment
5825      in this case.  */
5826   if (cfun->can_throw_non_call_exceptions
5827       && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
5828     preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5829   else
5830     preferred_stack_boundary = crtl->preferred_stack_boundary;
5831   if (preferred_stack_boundary > crtl->stack_alignment_estimated)
5832     crtl->stack_alignment_estimated = preferred_stack_boundary;
5833   if (preferred_stack_boundary > crtl->stack_alignment_needed)
5834     crtl->stack_alignment_needed = preferred_stack_boundary;
5835 
5836   gcc_assert (crtl->stack_alignment_needed
5837 	      <= crtl->stack_alignment_estimated);
5838 
5839   crtl->stack_realign_needed
5840     = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
5841   crtl->stack_realign_tried = crtl->stack_realign_needed;
5842 
5843   crtl->stack_realign_processed = true;
5844 
5845   /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
5846      alignment.  */
5847   gcc_assert (targetm.calls.get_drap_rtx != NULL);
5848   drap_rtx = targetm.calls.get_drap_rtx ();
5849 
5850   /* stack_realign_drap and drap_rtx must match.  */
5851   gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
5852 
5853   /* Do nothing if NULL is returned, which means DRAP is not needed.  */
5854   if (NULL != drap_rtx)
5855     {
5856       crtl->args.internal_arg_pointer = drap_rtx;
5857 
5858       /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
5859          needed. */
5860       fixup_tail_calls ();
5861     }
5862 }
5863 
5864 
5865 static void
5866 expand_main_function (void)
5867 {
5868 #if (defined(INVOKE__main)				\
5869      || (!defined(HAS_INIT_SECTION)			\
5870 	 && !defined(INIT_SECTION_ASM_OP)		\
5871 	 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
5872   emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
5873 #endif
5874 }
5875 
5876 
5877 /* Expand code to initialize the stack_protect_guard.  This is invoked at
5878    the beginning of a function to be protected.  */
5879 
5880 #ifndef HAVE_stack_protect_set
5881 # define HAVE_stack_protect_set		0
5882 # define gen_stack_protect_set(x,y)	(gcc_unreachable (), NULL_RTX)
5883 #endif
5884 
5885 static void
5886 stack_protect_prologue (void)
5887 {
5888   tree guard_decl = targetm.stack_protect_guard ();
5889   rtx x, y;
5890 
5891   x = expand_normal (crtl->stack_protect_guard);
5892   y = expand_normal (guard_decl);
5893 
5894   /* Allow the target to copy from Y to X without leaking Y into a
5895      register.  */
5896   if (HAVE_stack_protect_set)
5897     {
5898       rtx insn = gen_stack_protect_set (x, y);
5899       if (insn)
5900 	{
5901 	  emit_insn (insn);
5902 	  return;
5903 	}
5904     }
5905 
5906   /* Otherwise do a straight move.  */
5907   emit_move_insn (x, y);
5908 }
5909 
5910 /* Translate the intermediate representation contained in the CFG
5911    from GIMPLE trees to RTL.
5912 
5913    We do conversion per basic block and preserve/update the tree CFG.
5914    This implies we have to do some magic as the CFG can simultaneously
5915    consist of basic blocks containing RTL and GIMPLE trees.  This can
5916    confuse the CFG hooks, so be careful to not manipulate CFG during
5917    the expansion.  */
5918 
5919 namespace {
5920 
5921 const pass_data pass_data_expand =
5922 {
5923   RTL_PASS, /* type */
5924   "expand", /* name */
5925   OPTGROUP_NONE, /* optinfo_flags */
5926   TV_EXPAND, /* tv_id */
5927   ( PROP_ssa | PROP_gimple_leh | PROP_cfg
5928     | PROP_gimple_lcx
5929     | PROP_gimple_lvec ), /* properties_required */
5930   PROP_rtl, /* properties_provided */
5931   ( PROP_ssa | PROP_trees ), /* properties_destroyed */
5932   0, /* todo_flags_start */
5933   0, /* todo_flags_finish */
5934 };
5935 
5936 class pass_expand : public rtl_opt_pass
5937 {
5938 public:
5939   pass_expand (gcc::context *ctxt)
5940     : rtl_opt_pass (pass_data_expand, ctxt)
5941   {}
5942 
5943   /* opt_pass methods: */
5944   virtual unsigned int execute (function *);
5945 
5946 }; // class pass_expand
5947 
5948 unsigned int
5949 pass_expand::execute (function *fun)
5950 {
5951   basic_block bb, init_block;
5952   sbitmap blocks;
5953   edge_iterator ei;
5954   edge e;
5955   rtx_insn *var_seq, *var_ret_seq;
5956   unsigned i;
5957 
5958   timevar_push (TV_OUT_OF_SSA);
5959   rewrite_out_of_ssa (&SA);
5960   timevar_pop (TV_OUT_OF_SSA);
5961   SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
5962 
5963   if (MAY_HAVE_DEBUG_STMTS && flag_tree_ter)
5964     {
5965       gimple_stmt_iterator gsi;
5966       FOR_EACH_BB_FN (bb, cfun)
5967 	for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5968 	  if (gimple_debug_bind_p (gsi_stmt (gsi)))
5969 	    avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
5970     }
5971 
5972   /* Make sure all values used by the optimization passes have sane
5973      defaults.  */
5974   reg_renumber = 0;
5975 
5976   /* Some backends want to know that we are expanding to RTL.  */
5977   currently_expanding_to_rtl = 1;
5978   /* Dominators are not kept up-to-date as we may create new basic-blocks.  */
5979   free_dominance_info (CDI_DOMINATORS);
5980 
5981   rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
5982 
5983   if (chkp_function_instrumented_p (current_function_decl))
5984     chkp_reset_rtl_bounds ();
5985 
5986   insn_locations_init ();
5987   if (!DECL_IS_BUILTIN (current_function_decl))
5988     {
5989       /* Eventually, all FEs should explicitly set function_start_locus.  */
5990       if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
5991 	set_curr_insn_location
5992 	  (DECL_SOURCE_LOCATION (current_function_decl));
5993       else
5994 	set_curr_insn_location (fun->function_start_locus);
5995     }
5996   else
5997     set_curr_insn_location (UNKNOWN_LOCATION);
5998   prologue_location = curr_insn_location ();
5999 
6000 #ifdef INSN_SCHEDULING
6001   init_sched_attrs ();
6002 #endif
6003 
6004   /* Make sure first insn is a note even if we don't want linenums.
6005      This makes sure the first insn will never be deleted.
6006      Also, final expects a note to appear there.  */
6007   emit_note (NOTE_INSN_DELETED);
6008 
6009   /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE.  */
6010   discover_nonconstant_array_refs ();
6011 
6012   targetm.expand_to_rtl_hook ();
6013   crtl->stack_alignment_needed = STACK_BOUNDARY;
6014   crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
6015   crtl->stack_alignment_estimated = 0;
6016   crtl->preferred_stack_boundary = STACK_BOUNDARY;
6017   fun->cfg->max_jumptable_ents = 0;
6018 
6019   /* Resovle the function section.  Some targets, like ARM EABI rely on knowledge
6020      of the function section at exapnsion time to predict distance of calls.  */
6021   resolve_unique_section (current_function_decl, 0, flag_function_sections);
6022 
6023   /* Expand the variables recorded during gimple lowering.  */
6024   timevar_push (TV_VAR_EXPAND);
6025   start_sequence ();
6026 
6027   var_ret_seq = expand_used_vars ();
6028 
6029   var_seq = get_insns ();
6030   end_sequence ();
6031   timevar_pop (TV_VAR_EXPAND);
6032 
6033   /* Honor stack protection warnings.  */
6034   if (warn_stack_protect)
6035     {
6036       if (fun->calls_alloca)
6037 	warning (OPT_Wstack_protector,
6038 		 "stack protector not protecting local variables: "
6039 		 "variable length buffer");
6040       if (has_short_buffer && !crtl->stack_protect_guard)
6041 	warning (OPT_Wstack_protector,
6042 		 "stack protector not protecting function: "
6043 		 "all local arrays are less than %d bytes long",
6044 		 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6045     }
6046 
6047   /* Set up parameters and prepare for return, for the function.  */
6048   expand_function_start (current_function_decl);
6049 
6050   /* If we emitted any instructions for setting up the variables,
6051      emit them before the FUNCTION_START note.  */
6052   if (var_seq)
6053     {
6054       emit_insn_before (var_seq, parm_birth_insn);
6055 
6056       /* In expand_function_end we'll insert the alloca save/restore
6057 	 before parm_birth_insn.  We've just insertted an alloca call.
6058 	 Adjust the pointer to match.  */
6059       parm_birth_insn = var_seq;
6060     }
6061 
6062   /* Now that we also have the parameter RTXs, copy them over to our
6063      partitions.  */
6064   for (i = 0; i < SA.map->num_partitions; i++)
6065     {
6066       tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
6067 
6068       if (TREE_CODE (var) != VAR_DECL
6069 	  && !SA.partition_to_pseudo[i])
6070 	SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
6071       gcc_assert (SA.partition_to_pseudo[i]);
6072 
6073       /* If this decl was marked as living in multiple places, reset
6074 	 this now to NULL.  */
6075       if (DECL_RTL_IF_SET (var) == pc_rtx)
6076 	SET_DECL_RTL (var, NULL);
6077 
6078       /* Some RTL parts really want to look at DECL_RTL(x) when x
6079 	 was a decl marked in REG_ATTR or MEM_ATTR.  We could use
6080 	 SET_DECL_RTL here making this available, but that would mean
6081 	 to select one of the potentially many RTLs for one DECL.  Instead
6082 	 of doing that we simply reset the MEM_EXPR of the RTL in question,
6083 	 then nobody can get at it and hence nobody can call DECL_RTL on it.  */
6084       if (!DECL_RTL_SET_P (var))
6085 	{
6086 	  if (MEM_P (SA.partition_to_pseudo[i]))
6087 	    set_mem_expr (SA.partition_to_pseudo[i], NULL);
6088 	}
6089     }
6090 
6091   /* If we have a class containing differently aligned pointers
6092      we need to merge those into the corresponding RTL pointer
6093      alignment.  */
6094   for (i = 1; i < num_ssa_names; i++)
6095     {
6096       tree name = ssa_name (i);
6097       int part;
6098       rtx r;
6099 
6100       if (!name
6101 	  /* We might have generated new SSA names in
6102 	     update_alias_info_with_stack_vars.  They will have a NULL
6103 	     defining statements, and won't be part of the partitioning,
6104 	     so ignore those.  */
6105 	  || !SSA_NAME_DEF_STMT (name))
6106 	continue;
6107       part = var_to_partition (SA.map, name);
6108       if (part == NO_PARTITION)
6109 	continue;
6110 
6111       /* Adjust all partition members to get the underlying decl of
6112 	 the representative which we might have created in expand_one_var.  */
6113       if (SSA_NAME_VAR (name) == NULL_TREE)
6114 	{
6115 	  tree leader = partition_to_var (SA.map, part);
6116 	  gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
6117 	  replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
6118 	}
6119       if (!POINTER_TYPE_P (TREE_TYPE (name)))
6120 	continue;
6121 
6122       r = SA.partition_to_pseudo[part];
6123       if (REG_P (r))
6124 	mark_reg_pointer (r, get_pointer_alignment (name));
6125     }
6126 
6127   /* If this function is `main', emit a call to `__main'
6128      to run global initializers, etc.  */
6129   if (DECL_NAME (current_function_decl)
6130       && MAIN_NAME_P (DECL_NAME (current_function_decl))
6131       && DECL_FILE_SCOPE_P (current_function_decl))
6132     expand_main_function ();
6133 
6134   /* Initialize the stack_protect_guard field.  This must happen after the
6135      call to __main (if any) so that the external decl is initialized.  */
6136   if (crtl->stack_protect_guard)
6137     stack_protect_prologue ();
6138 
6139   expand_phi_nodes (&SA);
6140 
6141   /* Register rtl specific functions for cfg.  */
6142   rtl_register_cfg_hooks ();
6143 
6144   init_block = construct_init_block ();
6145 
6146   /* Clear EDGE_EXECUTABLE on the entry edge(s).  It is cleaned from the
6147      remaining edges later.  */
6148   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6149     e->flags &= ~EDGE_EXECUTABLE;
6150 
6151   lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6152   FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6153 		  next_bb)
6154     bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6155 
6156   if (MAY_HAVE_DEBUG_INSNS)
6157     expand_debug_locations ();
6158 
6159   if (deep_ter_debug_map)
6160     {
6161       delete deep_ter_debug_map;
6162       deep_ter_debug_map = NULL;
6163     }
6164 
6165   /* Free stuff we no longer need after GIMPLE optimizations.  */
6166   free_dominance_info (CDI_DOMINATORS);
6167   free_dominance_info (CDI_POST_DOMINATORS);
6168   delete_tree_cfg_annotations ();
6169 
6170   timevar_push (TV_OUT_OF_SSA);
6171   finish_out_of_ssa (&SA);
6172   timevar_pop (TV_OUT_OF_SSA);
6173 
6174   timevar_push (TV_POST_EXPAND);
6175   /* We are no longer in SSA form.  */
6176   fun->gimple_df->in_ssa_p = false;
6177   loops_state_clear (LOOP_CLOSED_SSA);
6178 
6179   /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6180      conservatively to true until they are all profile aware.  */
6181   delete lab_rtx_for_bb;
6182   free_histograms ();
6183 
6184   construct_exit_block ();
6185   insn_locations_finalize ();
6186 
6187   if (var_ret_seq)
6188     {
6189       rtx_insn *after = return_label;
6190       rtx_insn *next = NEXT_INSN (after);
6191       if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6192 	after = next;
6193       emit_insn_after (var_ret_seq, after);
6194     }
6195 
6196   /* Zap the tree EH table.  */
6197   set_eh_throw_stmt_table (fun, NULL);
6198 
6199   /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6200      split edges which edge insertions might do.  */
6201   rebuild_jump_labels (get_insns ());
6202 
6203   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6204 		  EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6205     {
6206       edge e;
6207       edge_iterator ei;
6208       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6209 	{
6210 	  if (e->insns.r)
6211 	    {
6212 	      rebuild_jump_labels_chain (e->insns.r);
6213 	      /* Put insns after parm birth, but before
6214 		 NOTE_INSNS_FUNCTION_BEG.  */
6215 	      if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6216 		  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6217 		{
6218 		  rtx_insn *insns = e->insns.r;
6219 		  e->insns.r = NULL;
6220 		  if (NOTE_P (parm_birth_insn)
6221 		      && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6222 		    emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6223 		  else
6224 		    emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6225 		}
6226 	      else
6227 		commit_one_edge_insertion (e);
6228 	    }
6229 	  else
6230 	    ei_next (&ei);
6231 	}
6232     }
6233 
6234   /* We're done expanding trees to RTL.  */
6235   currently_expanding_to_rtl = 0;
6236 
6237   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6238 		  EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6239     {
6240       edge e;
6241       edge_iterator ei;
6242       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6243 	{
6244 	  /* Clear EDGE_EXECUTABLE.  This flag is never used in the backend.  */
6245 	  e->flags &= ~EDGE_EXECUTABLE;
6246 
6247 	  /* At the moment not all abnormal edges match the RTL
6248 	     representation.  It is safe to remove them here as
6249 	     find_many_sub_basic_blocks will rediscover them.
6250 	     In the future we should get this fixed properly.  */
6251 	  if ((e->flags & EDGE_ABNORMAL)
6252 	      && !(e->flags & EDGE_SIBCALL))
6253 	    remove_edge (e);
6254 	  else
6255 	    ei_next (&ei);
6256 	}
6257     }
6258 
6259   blocks = sbitmap_alloc (last_basic_block_for_fn (fun));
6260   bitmap_ones (blocks);
6261   find_many_sub_basic_blocks (blocks);
6262   sbitmap_free (blocks);
6263   purge_all_dead_edges ();
6264 
6265   expand_stack_alignment ();
6266 
6267   /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6268      function.  */
6269   if (crtl->tail_call_emit)
6270     fixup_tail_calls ();
6271 
6272   /* After initial rtl generation, call back to finish generating
6273      exception support code.  We need to do this before cleaning up
6274      the CFG as the code does not expect dead landing pads.  */
6275   if (fun->eh->region_tree != NULL)
6276     finish_eh_generation ();
6277 
6278   /* Remove unreachable blocks, otherwise we cannot compute dominators
6279      which are needed for loop state verification.  As a side-effect
6280      this also compacts blocks.
6281      ???  We cannot remove trivially dead insns here as for example
6282      the DRAP reg on i?86 is not magically live at this point.
6283      gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise.  */
6284   cleanup_cfg (CLEANUP_NO_INSN_DEL);
6285 
6286 #ifdef ENABLE_CHECKING
6287   verify_flow_info ();
6288 #endif
6289 
6290   /* Initialize pseudos allocated for hard registers.  */
6291   emit_initial_value_sets ();
6292 
6293   /* And finally unshare all RTL.  */
6294   unshare_all_rtl ();
6295 
6296   /* There's no need to defer outputting this function any more; we
6297      know we want to output it.  */
6298   DECL_DEFER_OUTPUT (current_function_decl) = 0;
6299 
6300   /* Now that we're done expanding trees to RTL, we shouldn't have any
6301      more CONCATs anywhere.  */
6302   generating_concat_p = 0;
6303 
6304   if (dump_file)
6305     {
6306       fprintf (dump_file,
6307 	       "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6308       /* And the pass manager will dump RTL for us.  */
6309     }
6310 
6311   /* If we're emitting a nested function, make sure its parent gets
6312      emitted as well.  Doing otherwise confuses debug info.  */
6313     {
6314       tree parent;
6315       for (parent = DECL_CONTEXT (current_function_decl);
6316 	   parent != NULL_TREE;
6317 	   parent = get_containing_scope (parent))
6318 	if (TREE_CODE (parent) == FUNCTION_DECL)
6319 	  TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6320     }
6321 
6322   /* We are now committed to emitting code for this function.  Do any
6323      preparation, such as emitting abstract debug info for the inline
6324      before it gets mangled by optimization.  */
6325   if (cgraph_function_possibly_inlined_p (current_function_decl))
6326     (*debug_hooks->outlining_inline_function) (current_function_decl);
6327 
6328   TREE_ASM_WRITTEN (current_function_decl) = 1;
6329 
6330   /* After expanding, the return labels are no longer needed. */
6331   return_label = NULL;
6332   naked_return_label = NULL;
6333 
6334   /* After expanding, the tm_restart map is no longer needed.  */
6335   if (fun->gimple_df->tm_restart)
6336     fun->gimple_df->tm_restart = NULL;
6337 
6338   /* Tag the blocks with a depth number so that change_scope can find
6339      the common parent easily.  */
6340   set_block_levels (DECL_INITIAL (fun->decl), 0);
6341   default_rtl_profile ();
6342 
6343   timevar_pop (TV_POST_EXPAND);
6344 
6345   return 0;
6346 }
6347 
6348 } // anon namespace
6349 
6350 rtl_opt_pass *
6351 make_pass_expand (gcc::context *ctxt)
6352 {
6353   return new pass_expand (ctxt);
6354 }
6355