xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cfgexpand.c (revision 181254a7b1bdde6873432bffef2d2decc4b5c22f)
1 /* A pass for lowering trees to RTL.
2    Copyright (C) 2004-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "optabs.h"
34 #include "regs.h" /* For reg_renumber.  */
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "stor-layout.h"
42 #include "stmt.h"
43 #include "print-tree.h"
44 #include "cfgrtl.h"
45 #include "cfganal.h"
46 #include "cfgbuild.h"
47 #include "cfgcleanup.h"
48 #include "dojump.h"
49 #include "explow.h"
50 #include "calls.h"
51 #include "expr.h"
52 #include "internal-fn.h"
53 #include "tree-eh.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
57 #include "tree-cfg.h"
58 #include "tree-dfa.h"
59 #include "tree-ssa.h"
60 #include "except.h"
61 #include "gimple-pretty-print.h"
62 #include "toplev.h"
63 #include "debug.h"
64 #include "params.h"
65 #include "tree-inline.h"
66 #include "value-prof.h"
67 #include "tree-ssa-live.h"
68 #include "tree-outof-ssa.h"
69 #include "cfgloop.h"
70 #include "insn-attr.h" /* For INSN_SCHEDULING.  */
71 #include "stringpool.h"
72 #include "attribs.h"
73 #include "asan.h"
74 #include "tree-ssa-address.h"
75 #include "output.h"
76 #include "builtins.h"
77 #include "tree-chkp.h"
78 #include "rtl-chkp.h"
79 
80 /* Some systems use __main in a way incompatible with its use in gcc, in these
81    cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
82    give the same symbol without quotes for an alternative entry point.  You
83    must define both, or neither.  */
84 #ifndef NAME__MAIN
85 #define NAME__MAIN "__main"
86 #endif
87 
88 /* This variable holds information helping the rewriting of SSA trees
89    into RTL.  */
90 struct ssaexpand SA;
91 
92 /* This variable holds the currently expanded gimple statement for purposes
93    of comminucating the profile info to the builtin expanders.  */
94 gimple *currently_expanding_gimple_stmt;
95 
96 static rtx expand_debug_expr (tree);
97 
98 static bool defer_stack_allocation (tree, bool);
99 
100 static void record_alignment_for_reg_var (unsigned int);
101 
102 /* Return an expression tree corresponding to the RHS of GIMPLE
103    statement STMT.  */
104 
105 tree
106 gimple_assign_rhs_to_tree (gimple *stmt)
107 {
108   tree t;
109   enum gimple_rhs_class grhs_class;
110 
111   grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
112 
113   if (grhs_class == GIMPLE_TERNARY_RHS)
114     t = build3 (gimple_assign_rhs_code (stmt),
115 		TREE_TYPE (gimple_assign_lhs (stmt)),
116 		gimple_assign_rhs1 (stmt),
117 		gimple_assign_rhs2 (stmt),
118 		gimple_assign_rhs3 (stmt));
119   else if (grhs_class == GIMPLE_BINARY_RHS)
120     t = build2 (gimple_assign_rhs_code (stmt),
121 		TREE_TYPE (gimple_assign_lhs (stmt)),
122 		gimple_assign_rhs1 (stmt),
123 		gimple_assign_rhs2 (stmt));
124   else if (grhs_class == GIMPLE_UNARY_RHS)
125     t = build1 (gimple_assign_rhs_code (stmt),
126 		TREE_TYPE (gimple_assign_lhs (stmt)),
127 		gimple_assign_rhs1 (stmt));
128   else if (grhs_class == GIMPLE_SINGLE_RHS)
129     {
130       t = gimple_assign_rhs1 (stmt);
131       /* Avoid modifying this tree in place below.  */
132       if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
133 	   && gimple_location (stmt) != EXPR_LOCATION (t))
134 	  || (gimple_block (stmt)
135 	      && currently_expanding_to_rtl
136 	      && EXPR_P (t)))
137 	t = copy_node (t);
138     }
139   else
140     gcc_unreachable ();
141 
142   if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
143     SET_EXPR_LOCATION (t, gimple_location (stmt));
144 
145   return t;
146 }
147 
148 
149 #ifndef STACK_ALIGNMENT_NEEDED
150 #define STACK_ALIGNMENT_NEEDED 1
151 #endif
152 
153 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
154 
155 /* Choose either CUR or NEXT as the leader DECL for a partition.
156    Prefer ignored decls, to simplify debug dumps and reduce ambiguity
157    out of the same user variable being in multiple partitions (this is
158    less likely for compiler-introduced temps).  */
159 
160 static tree
161 leader_merge (tree cur, tree next)
162 {
163   if (cur == NULL || cur == next)
164     return next;
165 
166   if (DECL_P (cur) && DECL_IGNORED_P (cur))
167     return cur;
168 
169   if (DECL_P (next) && DECL_IGNORED_P (next))
170     return next;
171 
172   return cur;
173 }
174 
175 /* Associate declaration T with storage space X.  If T is no
176    SSA name this is exactly SET_DECL_RTL, otherwise make the
177    partition of T associated with X.  */
178 static inline void
179 set_rtl (tree t, rtx x)
180 {
181   gcc_checking_assert (!x
182 		       || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
183 		       || (use_register_for_decl (t)
184 			   ? (REG_P (x)
185 			      || (GET_CODE (x) == CONCAT
186 				  && (REG_P (XEXP (x, 0))
187 				      || SUBREG_P (XEXP (x, 0)))
188 				  && (REG_P (XEXP (x, 1))
189 				      || SUBREG_P (XEXP (x, 1))))
190 			      /* We need to accept PARALLELs for RESUT_DECLs
191 				 because of vector types with BLKmode returned
192 				 in multiple registers, but they are supposed
193 				 to be uncoalesced.  */
194 			      || (GET_CODE (x) == PARALLEL
195 				  && SSAVAR (t)
196 				  && TREE_CODE (SSAVAR (t)) == RESULT_DECL
197 				  && (GET_MODE (x) == BLKmode
198 				      || !flag_tree_coalesce_vars)))
199 			   : (MEM_P (x) || x == pc_rtx
200 			      || (GET_CODE (x) == CONCAT
201 				  && MEM_P (XEXP (x, 0))
202 				  && MEM_P (XEXP (x, 1))))));
203   /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
204      RESULT_DECLs has the expected mode.  For memory, we accept
205      unpromoted modes, since that's what we're likely to get.  For
206      PARM_DECLs and RESULT_DECLs, we'll have been called by
207      set_parm_rtl, which will give us the default def, so we don't
208      have to compute it ourselves.  For RESULT_DECLs, we accept mode
209      mismatches too, as long as we have BLKmode or are not coalescing
210      across variables, so that we don't reject BLKmode PARALLELs or
211      unpromoted REGs.  */
212   gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
213 		       || (SSAVAR (t)
214 			   && TREE_CODE (SSAVAR (t)) == RESULT_DECL
215 			   && (promote_ssa_mode (t, NULL) == BLKmode
216 			       || !flag_tree_coalesce_vars))
217 		       || !use_register_for_decl (t)
218 		       || GET_MODE (x) == promote_ssa_mode (t, NULL));
219 
220   if (x)
221     {
222       bool skip = false;
223       tree cur = NULL_TREE;
224       rtx xm = x;
225 
226     retry:
227       if (MEM_P (xm))
228 	cur = MEM_EXPR (xm);
229       else if (REG_P (xm))
230 	cur = REG_EXPR (xm);
231       else if (SUBREG_P (xm))
232 	{
233 	  gcc_assert (subreg_lowpart_p (xm));
234 	  xm = SUBREG_REG (xm);
235 	  goto retry;
236 	}
237       else if (GET_CODE (xm) == CONCAT)
238 	{
239 	  xm = XEXP (xm, 0);
240 	  goto retry;
241 	}
242       else if (GET_CODE (xm) == PARALLEL)
243 	{
244 	  xm = XVECEXP (xm, 0, 0);
245 	  gcc_assert (GET_CODE (xm) == EXPR_LIST);
246 	  xm = XEXP (xm, 0);
247 	  goto retry;
248 	}
249       else if (xm == pc_rtx)
250 	skip = true;
251       else
252 	gcc_unreachable ();
253 
254       tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
255 
256       if (cur != next)
257 	{
258 	  if (MEM_P (x))
259 	    set_mem_attributes (x,
260 				next && TREE_CODE (next) == SSA_NAME
261 				? TREE_TYPE (next)
262 				: next, true);
263 	  else
264 	    set_reg_attrs_for_decl_rtl (next, x);
265 	}
266     }
267 
268   if (TREE_CODE (t) == SSA_NAME)
269     {
270       int part = var_to_partition (SA.map, t);
271       if (part != NO_PARTITION)
272 	{
273 	  if (SA.partition_to_pseudo[part])
274 	    gcc_assert (SA.partition_to_pseudo[part] == x);
275 	  else if (x != pc_rtx)
276 	    SA.partition_to_pseudo[part] = x;
277 	}
278       /* For the benefit of debug information at -O0 (where
279          vartracking doesn't run) record the place also in the base
280          DECL.  For PARMs and RESULTs, do so only when setting the
281          default def.  */
282       if (x && x != pc_rtx && SSA_NAME_VAR (t)
283 	  && (VAR_P (SSA_NAME_VAR (t))
284 	      || SSA_NAME_IS_DEFAULT_DEF (t)))
285 	{
286 	  tree var = SSA_NAME_VAR (t);
287 	  /* If we don't yet have something recorded, just record it now.  */
288 	  if (!DECL_RTL_SET_P (var))
289 	    SET_DECL_RTL (var, x);
290 	  /* If we have it set already to "multiple places" don't
291 	     change this.  */
292 	  else if (DECL_RTL (var) == pc_rtx)
293 	    ;
294 	  /* If we have something recorded and it's not the same place
295 	     as we want to record now, we have multiple partitions for the
296 	     same base variable, with different places.  We can't just
297 	     randomly chose one, hence we have to say that we don't know.
298 	     This only happens with optimization, and there var-tracking
299 	     will figure out the right thing.  */
300 	  else if (DECL_RTL (var) != x)
301 	    SET_DECL_RTL (var, pc_rtx);
302 	}
303     }
304   else
305     SET_DECL_RTL (t, x);
306 }
307 
308 /* This structure holds data relevant to one variable that will be
309    placed in a stack slot.  */
310 struct stack_var
311 {
312   /* The Variable.  */
313   tree decl;
314 
315   /* Initially, the size of the variable.  Later, the size of the partition,
316      if this variable becomes it's partition's representative.  */
317   poly_uint64 size;
318 
319   /* The *byte* alignment required for this variable.  Or as, with the
320      size, the alignment for this partition.  */
321   unsigned int alignb;
322 
323   /* The partition representative.  */
324   size_t representative;
325 
326   /* The next stack variable in the partition, or EOC.  */
327   size_t next;
328 
329   /* The numbers of conflicting stack variables.  */
330   bitmap conflicts;
331 };
332 
333 #define EOC  ((size_t)-1)
334 
335 /* We have an array of such objects while deciding allocation.  */
336 static struct stack_var *stack_vars;
337 static size_t stack_vars_alloc;
338 static size_t stack_vars_num;
339 static hash_map<tree, size_t> *decl_to_stack_part;
340 
341 /* Conflict bitmaps go on this obstack.  This allows us to destroy
342    all of them in one big sweep.  */
343 static bitmap_obstack stack_var_bitmap_obstack;
344 
345 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
346    is non-decreasing.  */
347 static size_t *stack_vars_sorted;
348 
349 /* The phase of the stack frame.  This is the known misalignment of
350    virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY.  That is,
351    (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0.  */
352 static int frame_phase;
353 
354 /* Used during expand_used_vars to remember if we saw any decls for
355    which we'd like to enable stack smashing protection.  */
356 static bool has_protected_decls;
357 
358 /* Used during expand_used_vars.  Remember if we say a character buffer
359    smaller than our cutoff threshold.  Used for -Wstack-protector.  */
360 static bool has_short_buffer;
361 
362 /* Compute the byte alignment to use for DECL.  Ignore alignment
363    we can't do with expected alignment of the stack boundary.  */
364 
365 static unsigned int
366 align_local_variable (tree decl)
367 {
368   unsigned int align;
369 
370   if (TREE_CODE (decl) == SSA_NAME)
371     align = TYPE_ALIGN (TREE_TYPE (decl));
372   else
373     {
374       align = LOCAL_DECL_ALIGNMENT (decl);
375       SET_DECL_ALIGN (decl, align);
376     }
377   return align / BITS_PER_UNIT;
378 }
379 
380 /* Align given offset BASE with ALIGN.  Truncate up if ALIGN_UP is true,
381    down otherwise.  Return truncated BASE value.  */
382 
383 static inline unsigned HOST_WIDE_INT
384 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
385 {
386   return align_up ? (base + align - 1) & -align : base & -align;
387 }
388 
389 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
390    Return the frame offset.  */
391 
392 static poly_int64
393 alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
394 {
395   poly_int64 offset, new_frame_offset;
396 
397   if (FRAME_GROWS_DOWNWARD)
398     {
399       new_frame_offset
400 	= aligned_lower_bound (frame_offset - frame_phase - size,
401 			       align) + frame_phase;
402       offset = new_frame_offset;
403     }
404   else
405     {
406       new_frame_offset
407 	= aligned_upper_bound (frame_offset - frame_phase,
408 			       align) + frame_phase;
409       offset = new_frame_offset;
410       new_frame_offset += size;
411     }
412   frame_offset = new_frame_offset;
413 
414   if (frame_offset_overflow (frame_offset, cfun->decl))
415     frame_offset = offset = 0;
416 
417   return offset;
418 }
419 
420 /* Accumulate DECL into STACK_VARS.  */
421 
422 static void
423 add_stack_var (tree decl)
424 {
425   struct stack_var *v;
426 
427   if (stack_vars_num >= stack_vars_alloc)
428     {
429       if (stack_vars_alloc)
430 	stack_vars_alloc = stack_vars_alloc * 3 / 2;
431       else
432 	stack_vars_alloc = 32;
433       stack_vars
434 	= XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
435     }
436   if (!decl_to_stack_part)
437     decl_to_stack_part = new hash_map<tree, size_t>;
438 
439   v = &stack_vars[stack_vars_num];
440   decl_to_stack_part->put (decl, stack_vars_num);
441 
442   v->decl = decl;
443   tree size = TREE_CODE (decl) == SSA_NAME
444     ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
445     : DECL_SIZE_UNIT (decl);
446   v->size = tree_to_poly_uint64 (size);
447   /* Ensure that all variables have size, so that &a != &b for any two
448      variables that are simultaneously live.  */
449   if (known_eq (v->size, 0U))
450     v->size = 1;
451   v->alignb = align_local_variable (decl);
452   /* An alignment of zero can mightily confuse us later.  */
453   gcc_assert (v->alignb != 0);
454 
455   /* All variables are initially in their own partition.  */
456   v->representative = stack_vars_num;
457   v->next = EOC;
458 
459   /* All variables initially conflict with no other.  */
460   v->conflicts = NULL;
461 
462   /* Ensure that this decl doesn't get put onto the list twice.  */
463   set_rtl (decl, pc_rtx);
464 
465   stack_vars_num++;
466 }
467 
468 /* Make the decls associated with luid's X and Y conflict.  */
469 
470 static void
471 add_stack_var_conflict (size_t x, size_t y)
472 {
473   struct stack_var *a = &stack_vars[x];
474   struct stack_var *b = &stack_vars[y];
475   if (!a->conflicts)
476     a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
477   if (!b->conflicts)
478     b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
479   bitmap_set_bit (a->conflicts, y);
480   bitmap_set_bit (b->conflicts, x);
481 }
482 
483 /* Check whether the decls associated with luid's X and Y conflict.  */
484 
485 static bool
486 stack_var_conflict_p (size_t x, size_t y)
487 {
488   struct stack_var *a = &stack_vars[x];
489   struct stack_var *b = &stack_vars[y];
490   if (x == y)
491     return false;
492   /* Partitions containing an SSA name result from gimple registers
493      with things like unsupported modes.  They are top-level and
494      hence conflict with everything else.  */
495   if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
496     return true;
497 
498   if (!a->conflicts || !b->conflicts)
499     return false;
500   return bitmap_bit_p (a->conflicts, y);
501 }
502 
503 /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
504    enter its partition number into bitmap DATA.  */
505 
506 static bool
507 visit_op (gimple *, tree op, tree, void *data)
508 {
509   bitmap active = (bitmap)data;
510   op = get_base_address (op);
511   if (op
512       && DECL_P (op)
513       && DECL_RTL_IF_SET (op) == pc_rtx)
514     {
515       size_t *v = decl_to_stack_part->get (op);
516       if (v)
517 	bitmap_set_bit (active, *v);
518     }
519   return false;
520 }
521 
522 /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
523    record conflicts between it and all currently active other partitions
524    from bitmap DATA.  */
525 
526 static bool
527 visit_conflict (gimple *, tree op, tree, void *data)
528 {
529   bitmap active = (bitmap)data;
530   op = get_base_address (op);
531   if (op
532       && DECL_P (op)
533       && DECL_RTL_IF_SET (op) == pc_rtx)
534     {
535       size_t *v = decl_to_stack_part->get (op);
536       if (v && bitmap_set_bit (active, *v))
537 	{
538 	  size_t num = *v;
539 	  bitmap_iterator bi;
540 	  unsigned i;
541 	  gcc_assert (num < stack_vars_num);
542 	  EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
543 	    add_stack_var_conflict (num, i);
544 	}
545     }
546   return false;
547 }
548 
549 /* Helper routine for add_scope_conflicts, calculating the active partitions
550    at the end of BB, leaving the result in WORK.  We're called to generate
551    conflicts when FOR_CONFLICT is true, otherwise we're just tracking
552    liveness.  */
553 
554 static void
555 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
556 {
557   edge e;
558   edge_iterator ei;
559   gimple_stmt_iterator gsi;
560   walk_stmt_load_store_addr_fn visit;
561 
562   bitmap_clear (work);
563   FOR_EACH_EDGE (e, ei, bb->preds)
564     bitmap_ior_into (work, (bitmap)e->src->aux);
565 
566   visit = visit_op;
567 
568   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
569     {
570       gimple *stmt = gsi_stmt (gsi);
571       walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
572     }
573   for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
574     {
575       gimple *stmt = gsi_stmt (gsi);
576 
577       if (gimple_clobber_p (stmt))
578 	{
579 	  tree lhs = gimple_assign_lhs (stmt);
580 	  size_t *v;
581 	  /* Nested function lowering might introduce LHSs
582 	     that are COMPONENT_REFs.  */
583 	  if (!VAR_P (lhs))
584 	    continue;
585 	  if (DECL_RTL_IF_SET (lhs) == pc_rtx
586 	      && (v = decl_to_stack_part->get (lhs)))
587 	    bitmap_clear_bit (work, *v);
588 	}
589       else if (!is_gimple_debug (stmt))
590 	{
591 	  if (for_conflict
592 	      && visit == visit_op)
593 	    {
594 	      /* If this is the first real instruction in this BB we need
595 	         to add conflicts for everything live at this point now.
596 		 Unlike classical liveness for named objects we can't
597 		 rely on seeing a def/use of the names we're interested in.
598 		 There might merely be indirect loads/stores.  We'd not add any
599 		 conflicts for such partitions.  */
600 	      bitmap_iterator bi;
601 	      unsigned i;
602 	      EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
603 		{
604 		  struct stack_var *a = &stack_vars[i];
605 		  if (!a->conflicts)
606 		    a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
607 		  bitmap_ior_into (a->conflicts, work);
608 		}
609 	      visit = visit_conflict;
610 	    }
611 	  walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
612 	}
613     }
614 }
615 
616 /* Generate stack partition conflicts between all partitions that are
617    simultaneously live.  */
618 
619 static void
620 add_scope_conflicts (void)
621 {
622   basic_block bb;
623   bool changed;
624   bitmap work = BITMAP_ALLOC (NULL);
625   int *rpo;
626   int n_bbs;
627 
628   /* We approximate the live range of a stack variable by taking the first
629      mention of its name as starting point(s), and by the end-of-scope
630      death clobber added by gimplify as ending point(s) of the range.
631      This overapproximates in the case we for instance moved an address-taken
632      operation upward, without also moving a dereference to it upwards.
633      But it's conservatively correct as a variable never can hold values
634      before its name is mentioned at least once.
635 
636      We then do a mostly classical bitmap liveness algorithm.  */
637 
638   FOR_ALL_BB_FN (bb, cfun)
639     bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
640 
641   rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
642   n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
643 
644   changed = true;
645   while (changed)
646     {
647       int i;
648       changed = false;
649       for (i = 0; i < n_bbs; i++)
650 	{
651 	  bitmap active;
652 	  bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
653 	  active = (bitmap)bb->aux;
654 	  add_scope_conflicts_1 (bb, work, false);
655 	  if (bitmap_ior_into (active, work))
656 	    changed = true;
657 	}
658     }
659 
660   FOR_EACH_BB_FN (bb, cfun)
661     add_scope_conflicts_1 (bb, work, true);
662 
663   free (rpo);
664   BITMAP_FREE (work);
665   FOR_ALL_BB_FN (bb, cfun)
666     BITMAP_FREE (bb->aux);
667 }
668 
669 /* A subroutine of partition_stack_vars.  A comparison function for qsort,
670    sorting an array of indices by the properties of the object.  */
671 
672 static int
673 stack_var_cmp (const void *a, const void *b)
674 {
675   size_t ia = *(const size_t *)a;
676   size_t ib = *(const size_t *)b;
677   unsigned int aligna = stack_vars[ia].alignb;
678   unsigned int alignb = stack_vars[ib].alignb;
679   poly_int64 sizea = stack_vars[ia].size;
680   poly_int64 sizeb = stack_vars[ib].size;
681   tree decla = stack_vars[ia].decl;
682   tree declb = stack_vars[ib].decl;
683   bool largea, largeb;
684   unsigned int uida, uidb;
685 
686   /* Primary compare on "large" alignment.  Large comes first.  */
687   largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
688   largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
689   if (largea != largeb)
690     return (int)largeb - (int)largea;
691 
692   /* Secondary compare on size, decreasing  */
693   int diff = compare_sizes_for_sort (sizeb, sizea);
694   if (diff != 0)
695     return diff;
696 
697   /* Tertiary compare on true alignment, decreasing.  */
698   if (aligna < alignb)
699     return -1;
700   if (aligna > alignb)
701     return 1;
702 
703   /* Final compare on ID for sort stability, increasing.
704      Two SSA names are compared by their version, SSA names come before
705      non-SSA names, and two normal decls are compared by their DECL_UID.  */
706   if (TREE_CODE (decla) == SSA_NAME)
707     {
708       if (TREE_CODE (declb) == SSA_NAME)
709 	uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
710       else
711 	return -1;
712     }
713   else if (TREE_CODE (declb) == SSA_NAME)
714     return 1;
715   else
716     uida = DECL_UID (decla), uidb = DECL_UID (declb);
717   if (uida < uidb)
718     return 1;
719   if (uida > uidb)
720     return -1;
721   return 0;
722 }
723 
724 struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
725 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
726 
727 /* If the points-to solution *PI points to variables that are in a partition
728    together with other variables add all partition members to the pointed-to
729    variables bitmap.  */
730 
731 static void
732 add_partitioned_vars_to_ptset (struct pt_solution *pt,
733 			       part_hashmap *decls_to_partitions,
734 			       hash_set<bitmap> *visited, bitmap temp)
735 {
736   bitmap_iterator bi;
737   unsigned i;
738   bitmap *part;
739 
740   if (pt->anything
741       || pt->vars == NULL
742       /* The pointed-to vars bitmap is shared, it is enough to
743 	 visit it once.  */
744       || visited->add (pt->vars))
745     return;
746 
747   bitmap_clear (temp);
748 
749   /* By using a temporary bitmap to store all members of the partitions
750      we have to add we make sure to visit each of the partitions only
751      once.  */
752   EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
753     if ((!temp
754 	 || !bitmap_bit_p (temp, i))
755 	&& (part = decls_to_partitions->get (i)))
756       bitmap_ior_into (temp, *part);
757   if (!bitmap_empty_p (temp))
758     bitmap_ior_into (pt->vars, temp);
759 }
760 
761 /* Update points-to sets based on partition info, so we can use them on RTL.
762    The bitmaps representing stack partitions will be saved until expand,
763    where partitioned decls used as bases in memory expressions will be
764    rewritten.  */
765 
766 static void
767 update_alias_info_with_stack_vars (void)
768 {
769   part_hashmap *decls_to_partitions = NULL;
770   size_t i, j;
771   tree var = NULL_TREE;
772 
773   for (i = 0; i < stack_vars_num; i++)
774     {
775       bitmap part = NULL;
776       tree name;
777       struct ptr_info_def *pi;
778 
779       /* Not interested in partitions with single variable.  */
780       if (stack_vars[i].representative != i
781           || stack_vars[i].next == EOC)
782         continue;
783 
784       if (!decls_to_partitions)
785 	{
786 	  decls_to_partitions = new part_hashmap;
787 	  cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
788 	}
789 
790       /* Create an SSA_NAME that points to the partition for use
791          as base during alias-oracle queries on RTL for bases that
792 	 have been partitioned.  */
793       if (var == NULL_TREE)
794 	var = create_tmp_var (ptr_type_node);
795       name = make_ssa_name (var);
796 
797       /* Create bitmaps representing partitions.  They will be used for
798          points-to sets later, so use GGC alloc.  */
799       part = BITMAP_GGC_ALLOC ();
800       for (j = i; j != EOC; j = stack_vars[j].next)
801 	{
802 	  tree decl = stack_vars[j].decl;
803 	  unsigned int uid = DECL_PT_UID (decl);
804 	  bitmap_set_bit (part, uid);
805 	  decls_to_partitions->put (uid, part);
806 	  cfun->gimple_df->decls_to_pointers->put (decl, name);
807 	  if (TREE_ADDRESSABLE (decl))
808 	    TREE_ADDRESSABLE (name) = 1;
809 	}
810 
811       /* Make the SSA name point to all partition members.  */
812       pi = get_ptr_info (name);
813       pt_solution_set (&pi->pt, part, false);
814     }
815 
816   /* Make all points-to sets that contain one member of a partition
817      contain all members of the partition.  */
818   if (decls_to_partitions)
819     {
820       unsigned i;
821       tree name;
822       hash_set<bitmap> visited;
823       bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
824 
825       FOR_EACH_SSA_NAME (i, name, cfun)
826 	{
827 	  struct ptr_info_def *pi;
828 
829 	  if (POINTER_TYPE_P (TREE_TYPE (name))
830 	      && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
831 	    add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
832 					   &visited, temp);
833 	}
834 
835       add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
836 				     decls_to_partitions, &visited, temp);
837 
838       delete decls_to_partitions;
839       BITMAP_FREE (temp);
840     }
841 }
842 
843 /* A subroutine of partition_stack_vars.  The UNION portion of a UNION/FIND
844    partitioning algorithm.  Partitions A and B are known to be non-conflicting.
845    Merge them into a single partition A.  */
846 
847 static void
848 union_stack_vars (size_t a, size_t b)
849 {
850   struct stack_var *vb = &stack_vars[b];
851   bitmap_iterator bi;
852   unsigned u;
853 
854   gcc_assert (stack_vars[b].next == EOC);
855    /* Add B to A's partition.  */
856   stack_vars[b].next = stack_vars[a].next;
857   stack_vars[b].representative = a;
858   stack_vars[a].next = b;
859 
860   /* Update the required alignment of partition A to account for B.  */
861   if (stack_vars[a].alignb < stack_vars[b].alignb)
862     stack_vars[a].alignb = stack_vars[b].alignb;
863 
864   /* Update the interference graph and merge the conflicts.  */
865   if (vb->conflicts)
866     {
867       EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
868 	add_stack_var_conflict (a, stack_vars[u].representative);
869       BITMAP_FREE (vb->conflicts);
870     }
871 }
872 
873 /* A subroutine of expand_used_vars.  Binpack the variables into
874    partitions constrained by the interference graph.  The overall
875    algorithm used is as follows:
876 
877 	Sort the objects by size in descending order.
878 	For each object A {
879 	  S = size(A)
880 	  O = 0
881 	  loop {
882 	    Look for the largest non-conflicting object B with size <= S.
883 	    UNION (A, B)
884 	  }
885 	}
886 */
887 
888 static void
889 partition_stack_vars (void)
890 {
891   size_t si, sj, n = stack_vars_num;
892 
893   stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
894   for (si = 0; si < n; ++si)
895     stack_vars_sorted[si] = si;
896 
897   if (n == 1)
898     return;
899 
900   qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
901 
902   for (si = 0; si < n; ++si)
903     {
904       size_t i = stack_vars_sorted[si];
905       unsigned int ialign = stack_vars[i].alignb;
906       poly_int64 isize = stack_vars[i].size;
907 
908       /* Ignore objects that aren't partition representatives. If we
909          see a var that is not a partition representative, it must
910          have been merged earlier.  */
911       if (stack_vars[i].representative != i)
912         continue;
913 
914       for (sj = si + 1; sj < n; ++sj)
915 	{
916 	  size_t j = stack_vars_sorted[sj];
917 	  unsigned int jalign = stack_vars[j].alignb;
918 	  poly_int64 jsize = stack_vars[j].size;
919 
920 	  /* Ignore objects that aren't partition representatives.  */
921 	  if (stack_vars[j].representative != j)
922 	    continue;
923 
924 	  /* Do not mix objects of "small" (supported) alignment
925 	     and "large" (unsupported) alignment.  */
926 	  if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
927 	      != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
928 	    break;
929 
930 	  /* For Address Sanitizer do not mix objects with different
931 	     sizes, as the shorter vars wouldn't be adequately protected.
932 	     Don't do that for "large" (unsupported) alignment objects,
933 	     those aren't protected anyway.  */
934 	  if (asan_sanitize_stack_p ()
935 	      && maybe_ne (isize, jsize)
936 	      && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
937 	    break;
938 
939 	  /* Ignore conflicting objects.  */
940 	  if (stack_var_conflict_p (i, j))
941 	    continue;
942 
943 	  /* UNION the objects, placing J at OFFSET.  */
944 	  union_stack_vars (i, j);
945 	}
946     }
947 
948   update_alias_info_with_stack_vars ();
949 }
950 
951 /* A debugging aid for expand_used_vars.  Dump the generated partitions.  */
952 
953 static void
954 dump_stack_var_partition (void)
955 {
956   size_t si, i, j, n = stack_vars_num;
957 
958   for (si = 0; si < n; ++si)
959     {
960       i = stack_vars_sorted[si];
961 
962       /* Skip variables that aren't partition representatives, for now.  */
963       if (stack_vars[i].representative != i)
964 	continue;
965 
966       fprintf (dump_file, "Partition %lu: size ", (unsigned long) i);
967       print_dec (stack_vars[i].size, dump_file);
968       fprintf (dump_file, " align %u\n", stack_vars[i].alignb);
969 
970       for (j = i; j != EOC; j = stack_vars[j].next)
971 	{
972 	  fputc ('\t', dump_file);
973 	  print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
974 	}
975       fputc ('\n', dump_file);
976     }
977 }
978 
979 /* Assign rtl to DECL at BASE + OFFSET.  */
980 
981 static void
982 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
983 			 poly_int64 offset)
984 {
985   unsigned align;
986   rtx x;
987 
988   /* If this fails, we've overflowed the stack frame.  Error nicely?  */
989   gcc_assert (known_eq (offset, trunc_int_for_mode (offset, Pmode)));
990 
991   x = plus_constant (Pmode, base, offset);
992   x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
993 		   ? TYPE_MODE (TREE_TYPE (decl))
994 		   : DECL_MODE (SSAVAR (decl)), x);
995 
996   if (TREE_CODE (decl) != SSA_NAME)
997     {
998       /* Set alignment we actually gave this decl if it isn't an SSA name.
999          If it is we generate stack slots only accidentally so it isn't as
1000 	 important, we'll simply use the alignment that is already set.  */
1001       if (base == virtual_stack_vars_rtx)
1002 	offset -= frame_phase;
1003       align = known_alignment (offset);
1004       align *= BITS_PER_UNIT;
1005       if (align == 0 || align > base_align)
1006 	align = base_align;
1007 
1008       /* One would think that we could assert that we're not decreasing
1009 	 alignment here, but (at least) the i386 port does exactly this
1010 	 via the MINIMUM_ALIGNMENT hook.  */
1011 
1012       SET_DECL_ALIGN (decl, align);
1013       DECL_USER_ALIGN (decl) = 0;
1014     }
1015 
1016   set_rtl (decl, x);
1017 }
1018 
1019 struct stack_vars_data
1020 {
1021   /* Vector of offset pairs, always end of some padding followed
1022      by start of the padding that needs Address Sanitizer protection.
1023      The vector is in reversed, highest offset pairs come first.  */
1024   auto_vec<HOST_WIDE_INT> asan_vec;
1025 
1026   /* Vector of partition representative decls in between the paddings.  */
1027   auto_vec<tree> asan_decl_vec;
1028 
1029   /* Base pseudo register for Address Sanitizer protected automatic vars.  */
1030   rtx asan_base;
1031 
1032   /* Alignment needed for the Address Sanitizer protected automatic vars.  */
1033   unsigned int asan_alignb;
1034 };
1035 
1036 /* A subroutine of expand_used_vars.  Give each partition representative
1037    a unique location within the stack frame.  Update each partition member
1038    with that location.  */
1039 
1040 static void
1041 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1042 {
1043   size_t si, i, j, n = stack_vars_num;
1044   poly_uint64 large_size = 0, large_alloc = 0;
1045   rtx large_base = NULL;
1046   unsigned large_align = 0;
1047   bool large_allocation_done = false;
1048   tree decl;
1049 
1050   /* Determine if there are any variables requiring "large" alignment.
1051      Since these are dynamically allocated, we only process these if
1052      no predicate involved.  */
1053   large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1054   if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1055     {
1056       /* Find the total size of these variables.  */
1057       for (si = 0; si < n; ++si)
1058 	{
1059 	  unsigned alignb;
1060 
1061 	  i = stack_vars_sorted[si];
1062 	  alignb = stack_vars[i].alignb;
1063 
1064 	  /* All "large" alignment decls come before all "small" alignment
1065 	     decls, but "large" alignment decls are not sorted based on
1066 	     their alignment.  Increase large_align to track the largest
1067 	     required alignment.  */
1068 	  if ((alignb * BITS_PER_UNIT) > large_align)
1069 	    large_align = alignb * BITS_PER_UNIT;
1070 
1071 	  /* Stop when we get to the first decl with "small" alignment.  */
1072 	  if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1073 	    break;
1074 
1075 	  /* Skip variables that aren't partition representatives.  */
1076 	  if (stack_vars[i].representative != i)
1077 	    continue;
1078 
1079 	  /* Skip variables that have already had rtl assigned.  See also
1080 	     add_stack_var where we perpetrate this pc_rtx hack.  */
1081 	  decl = stack_vars[i].decl;
1082 	  if (TREE_CODE (decl) == SSA_NAME
1083 	      ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1084 	      : DECL_RTL (decl) != pc_rtx)
1085 	    continue;
1086 
1087 	  large_size = aligned_upper_bound (large_size, alignb);
1088 	  large_size += stack_vars[i].size;
1089 	}
1090     }
1091 
1092   for (si = 0; si < n; ++si)
1093     {
1094       rtx base;
1095       unsigned base_align, alignb;
1096       poly_int64 offset;
1097 
1098       i = stack_vars_sorted[si];
1099 
1100       /* Skip variables that aren't partition representatives, for now.  */
1101       if (stack_vars[i].representative != i)
1102 	continue;
1103 
1104       /* Skip variables that have already had rtl assigned.  See also
1105 	 add_stack_var where we perpetrate this pc_rtx hack.  */
1106       decl = stack_vars[i].decl;
1107       if (TREE_CODE (decl) == SSA_NAME
1108 	  ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1109 	  : DECL_RTL (decl) != pc_rtx)
1110 	continue;
1111 
1112       /* Check the predicate to see whether this variable should be
1113 	 allocated in this pass.  */
1114       if (pred && !pred (i))
1115 	continue;
1116 
1117       alignb = stack_vars[i].alignb;
1118       if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1119 	{
1120 	  base = virtual_stack_vars_rtx;
1121 	  /* ASAN description strings don't yet have a syntax for expressing
1122 	     polynomial offsets.  */
1123 	  HOST_WIDE_INT prev_offset;
1124 	  if (asan_sanitize_stack_p ()
1125 	      && pred
1126 	      && frame_offset.is_constant (&prev_offset)
1127 	      && stack_vars[i].size.is_constant ())
1128 	    {
1129 	      prev_offset = align_base (prev_offset,
1130 					MAX (alignb, ASAN_RED_ZONE_SIZE),
1131 					!FRAME_GROWS_DOWNWARD);
1132 	      tree repr_decl = NULL_TREE;
1133 	      offset
1134 		= alloc_stack_frame_space (stack_vars[i].size
1135 					   + ASAN_RED_ZONE_SIZE,
1136 					   MAX (alignb, ASAN_RED_ZONE_SIZE));
1137 
1138 	      data->asan_vec.safe_push (prev_offset);
1139 	      /* Allocating a constant amount of space from a constant
1140 		 starting offset must give a constant result.  */
1141 	      data->asan_vec.safe_push ((offset + stack_vars[i].size)
1142 					.to_constant ());
1143 	      /* Find best representative of the partition.
1144 		 Prefer those with DECL_NAME, even better
1145 		 satisfying asan_protect_stack_decl predicate.  */
1146 	      for (j = i; j != EOC; j = stack_vars[j].next)
1147 		if (asan_protect_stack_decl (stack_vars[j].decl)
1148 		    && DECL_NAME (stack_vars[j].decl))
1149 		  {
1150 		    repr_decl = stack_vars[j].decl;
1151 		    break;
1152 		  }
1153 		else if (repr_decl == NULL_TREE
1154 			 && DECL_P (stack_vars[j].decl)
1155 			 && DECL_NAME (stack_vars[j].decl))
1156 		  repr_decl = stack_vars[j].decl;
1157 	      if (repr_decl == NULL_TREE)
1158 		repr_decl = stack_vars[i].decl;
1159 	      data->asan_decl_vec.safe_push (repr_decl);
1160 
1161 	      /* Make sure a representative is unpoison if another
1162 		 variable in the partition is handled by
1163 		 use-after-scope sanitization.  */
1164 	      if (asan_handled_variables != NULL
1165 		  && !asan_handled_variables->contains (repr_decl))
1166 		{
1167 		  for (j = i; j != EOC; j = stack_vars[j].next)
1168 		    if (asan_handled_variables->contains (stack_vars[j].decl))
1169 		      break;
1170 		  if (j != EOC)
1171 		    asan_handled_variables->add (repr_decl);
1172 		}
1173 
1174 	      data->asan_alignb = MAX (data->asan_alignb, alignb);
1175 	      if (data->asan_base == NULL)
1176 		data->asan_base = gen_reg_rtx (Pmode);
1177 	      base = data->asan_base;
1178 
1179 	      if (!STRICT_ALIGNMENT)
1180 		base_align = crtl->max_used_stack_slot_alignment;
1181 	      else
1182 		base_align = MAX (crtl->max_used_stack_slot_alignment,
1183 				  GET_MODE_ALIGNMENT (SImode)
1184 				  << ASAN_SHADOW_SHIFT);
1185 	    }
1186 	  else
1187 	    {
1188 	      offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1189 	      base_align = crtl->max_used_stack_slot_alignment;
1190 	    }
1191 	}
1192       else
1193 	{
1194 	  /* Large alignment is only processed in the last pass.  */
1195 	  if (pred)
1196 	    continue;
1197 
1198 	  /* If there were any variables requiring "large" alignment, allocate
1199 	     space.  */
1200 	  if (maybe_ne (large_size, 0U) && ! large_allocation_done)
1201 	    {
1202 	      poly_int64 loffset;
1203 	      rtx large_allocsize;
1204 
1205 	      large_allocsize = gen_int_mode (large_size, Pmode);
1206 	      get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1207 	      loffset = alloc_stack_frame_space
1208 		(rtx_to_poly_int64 (large_allocsize),
1209 		 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1210 	      large_base = get_dynamic_stack_base (loffset, large_align);
1211 	      large_allocation_done = true;
1212 	    }
1213 	  gcc_assert (large_base != NULL);
1214 
1215 	  large_alloc = aligned_upper_bound (large_alloc, alignb);
1216 	  offset = large_alloc;
1217 	  large_alloc += stack_vars[i].size;
1218 
1219 	  base = large_base;
1220 	  base_align = large_align;
1221 	}
1222 
1223       /* Create rtl for each variable based on their location within the
1224 	 partition.  */
1225       for (j = i; j != EOC; j = stack_vars[j].next)
1226 	{
1227 	  expand_one_stack_var_at (stack_vars[j].decl,
1228 				   base, base_align,
1229 				   offset);
1230 	}
1231     }
1232 
1233   gcc_assert (known_eq (large_alloc, large_size));
1234 }
1235 
1236 /* Take into account all sizes of partitions and reset DECL_RTLs.  */
1237 static poly_uint64
1238 account_stack_vars (void)
1239 {
1240   size_t si, j, i, n = stack_vars_num;
1241   poly_uint64 size = 0;
1242 
1243   for (si = 0; si < n; ++si)
1244     {
1245       i = stack_vars_sorted[si];
1246 
1247       /* Skip variables that aren't partition representatives, for now.  */
1248       if (stack_vars[i].representative != i)
1249 	continue;
1250 
1251       size += stack_vars[i].size;
1252       for (j = i; j != EOC; j = stack_vars[j].next)
1253 	set_rtl (stack_vars[j].decl, NULL);
1254     }
1255   return size;
1256 }
1257 
1258 /* Record the RTL assignment X for the default def of PARM.  */
1259 
1260 extern void
1261 set_parm_rtl (tree parm, rtx x)
1262 {
1263   gcc_assert (TREE_CODE (parm) == PARM_DECL
1264 	      || TREE_CODE (parm) == RESULT_DECL);
1265 
1266   if (x && !MEM_P (x))
1267     {
1268       unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1269 					      TYPE_MODE (TREE_TYPE (parm)),
1270 					      TYPE_ALIGN (TREE_TYPE (parm)));
1271 
1272       /* If the variable alignment is very large we'll dynamicaly
1273 	 allocate it, which means that in-frame portion is just a
1274 	 pointer.  ??? We've got a pseudo for sure here, do we
1275 	 actually dynamically allocate its spilling area if needed?
1276 	 ??? Isn't it a problem when Pmode alignment also exceeds
1277 	 MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32?  */
1278       if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1279 	align = GET_MODE_ALIGNMENT (Pmode);
1280 
1281       record_alignment_for_reg_var (align);
1282     }
1283 
1284   tree ssa = ssa_default_def (cfun, parm);
1285   if (!ssa)
1286     return set_rtl (parm, x);
1287 
1288   int part = var_to_partition (SA.map, ssa);
1289   gcc_assert (part != NO_PARTITION);
1290 
1291   bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1292   gcc_assert (changed);
1293 
1294   set_rtl (ssa, x);
1295   gcc_assert (DECL_RTL (parm) == x);
1296 }
1297 
1298 /* A subroutine of expand_one_var.  Called to immediately assign rtl
1299    to a variable to be allocated in the stack frame.  */
1300 
1301 static void
1302 expand_one_stack_var_1 (tree var)
1303 {
1304   poly_uint64 size;
1305   poly_int64 offset;
1306   unsigned byte_align;
1307 
1308   if (TREE_CODE (var) == SSA_NAME)
1309     {
1310       tree type = TREE_TYPE (var);
1311       size = tree_to_poly_uint64 (TYPE_SIZE_UNIT (type));
1312       byte_align = TYPE_ALIGN_UNIT (type);
1313     }
1314   else
1315     {
1316       size = tree_to_poly_uint64 (DECL_SIZE_UNIT (var));
1317       byte_align = align_local_variable (var);
1318     }
1319 
1320   /* We handle highly aligned variables in expand_stack_vars.  */
1321   gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1322 
1323   offset = alloc_stack_frame_space (size, byte_align);
1324 
1325   expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1326 			   crtl->max_used_stack_slot_alignment, offset);
1327 }
1328 
1329 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1330    already assigned some MEM.  */
1331 
1332 static void
1333 expand_one_stack_var (tree var)
1334 {
1335   if (TREE_CODE (var) == SSA_NAME)
1336     {
1337       int part = var_to_partition (SA.map, var);
1338       if (part != NO_PARTITION)
1339 	{
1340 	  rtx x = SA.partition_to_pseudo[part];
1341 	  gcc_assert (x);
1342 	  gcc_assert (MEM_P (x));
1343 	  return;
1344 	}
1345     }
1346 
1347   return expand_one_stack_var_1 (var);
1348 }
1349 
1350 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1351    that will reside in a hard register.  */
1352 
1353 static void
1354 expand_one_hard_reg_var (tree var)
1355 {
1356   rest_of_decl_compilation (var, 0, 0);
1357 }
1358 
1359 /* Record the alignment requirements of some variable assigned to a
1360    pseudo.  */
1361 
1362 static void
1363 record_alignment_for_reg_var (unsigned int align)
1364 {
1365   if (SUPPORTS_STACK_ALIGNMENT
1366       && crtl->stack_alignment_estimated < align)
1367     {
1368       /* stack_alignment_estimated shouldn't change after stack
1369          realign decision made */
1370       gcc_assert (!crtl->stack_realign_processed);
1371       crtl->stack_alignment_estimated = align;
1372     }
1373 
1374   /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1375      So here we only make sure stack_alignment_needed >= align.  */
1376   if (crtl->stack_alignment_needed < align)
1377     crtl->stack_alignment_needed = align;
1378   if (crtl->max_used_stack_slot_alignment < align)
1379     crtl->max_used_stack_slot_alignment = align;
1380 }
1381 
1382 /* Create RTL for an SSA partition.  */
1383 
1384 static void
1385 expand_one_ssa_partition (tree var)
1386 {
1387   int part = var_to_partition (SA.map, var);
1388   gcc_assert (part != NO_PARTITION);
1389 
1390   if (SA.partition_to_pseudo[part])
1391     return;
1392 
1393   unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1394 					  TYPE_MODE (TREE_TYPE (var)),
1395 					  TYPE_ALIGN (TREE_TYPE (var)));
1396 
1397   /* If the variable alignment is very large we'll dynamicaly allocate
1398      it, which means that in-frame portion is just a pointer.  */
1399   if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1400     align = GET_MODE_ALIGNMENT (Pmode);
1401 
1402   record_alignment_for_reg_var (align);
1403 
1404   if (!use_register_for_decl (var))
1405     {
1406       if (defer_stack_allocation (var, true))
1407 	add_stack_var (var);
1408       else
1409 	expand_one_stack_var_1 (var);
1410       return;
1411     }
1412 
1413   machine_mode reg_mode = promote_ssa_mode (var, NULL);
1414   rtx x = gen_reg_rtx (reg_mode);
1415 
1416   set_rtl (var, x);
1417 
1418   /* For a promoted variable, X will not be used directly but wrapped in a
1419      SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1420      will assume that its upper bits can be inferred from its lower bits.
1421      Therefore, if X isn't initialized on every path from the entry, then
1422      we must do it manually in order to fulfill the above assumption.  */
1423   if (reg_mode != TYPE_MODE (TREE_TYPE (var))
1424       && bitmap_bit_p (SA.partitions_for_undefined_values, part))
1425     emit_move_insn (x, CONST0_RTX (reg_mode));
1426 }
1427 
1428 /* Record the association between the RTL generated for partition PART
1429    and the underlying variable of the SSA_NAME VAR.  */
1430 
1431 static void
1432 adjust_one_expanded_partition_var (tree var)
1433 {
1434   if (!var)
1435     return;
1436 
1437   tree decl = SSA_NAME_VAR (var);
1438 
1439   int part = var_to_partition (SA.map, var);
1440   if (part == NO_PARTITION)
1441     return;
1442 
1443   rtx x = SA.partition_to_pseudo[part];
1444 
1445   gcc_assert (x);
1446 
1447   set_rtl (var, x);
1448 
1449   if (!REG_P (x))
1450     return;
1451 
1452   /* Note if the object is a user variable.  */
1453   if (decl && !DECL_ARTIFICIAL (decl))
1454     mark_user_reg (x);
1455 
1456   if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1457     mark_reg_pointer (x, get_pointer_alignment (var));
1458 }
1459 
1460 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1461    that will reside in a pseudo register.  */
1462 
1463 static void
1464 expand_one_register_var (tree var)
1465 {
1466   if (TREE_CODE (var) == SSA_NAME)
1467     {
1468       int part = var_to_partition (SA.map, var);
1469       if (part != NO_PARTITION)
1470 	{
1471 	  rtx x = SA.partition_to_pseudo[part];
1472 	  gcc_assert (x);
1473 	  gcc_assert (REG_P (x));
1474 	  return;
1475 	}
1476       gcc_unreachable ();
1477     }
1478 
1479   tree decl = var;
1480   tree type = TREE_TYPE (decl);
1481   machine_mode reg_mode = promote_decl_mode (decl, NULL);
1482   rtx x = gen_reg_rtx (reg_mode);
1483 
1484   set_rtl (var, x);
1485 
1486   /* Note if the object is a user variable.  */
1487   if (!DECL_ARTIFICIAL (decl))
1488     mark_user_reg (x);
1489 
1490   if (POINTER_TYPE_P (type))
1491     mark_reg_pointer (x, get_pointer_alignment (var));
1492 }
1493 
1494 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL that
1495    has some associated error, e.g. its type is error-mark.  We just need
1496    to pick something that won't crash the rest of the compiler.  */
1497 
1498 static void
1499 expand_one_error_var (tree var)
1500 {
1501   machine_mode mode = DECL_MODE (var);
1502   rtx x;
1503 
1504   if (mode == BLKmode)
1505     x = gen_rtx_MEM (BLKmode, const0_rtx);
1506   else if (mode == VOIDmode)
1507     x = const0_rtx;
1508   else
1509     x = gen_reg_rtx (mode);
1510 
1511   SET_DECL_RTL (var, x);
1512 }
1513 
1514 /* A subroutine of expand_one_var.  VAR is a variable that will be
1515    allocated to the local stack frame.  Return true if we wish to
1516    add VAR to STACK_VARS so that it will be coalesced with other
1517    variables.  Return false to allocate VAR immediately.
1518 
1519    This function is used to reduce the number of variables considered
1520    for coalescing, which reduces the size of the quadratic problem.  */
1521 
1522 static bool
1523 defer_stack_allocation (tree var, bool toplevel)
1524 {
1525   tree size_unit = TREE_CODE (var) == SSA_NAME
1526     ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1527     : DECL_SIZE_UNIT (var);
1528   poly_uint64 size;
1529 
1530   /* Whether the variable is small enough for immediate allocation not to be
1531      a problem with regard to the frame size.  */
1532   bool smallish
1533     = (poly_int_tree_p (size_unit, &size)
1534        && (estimated_poly_value (size)
1535 	   < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING)));
1536 
1537   /* If stack protection is enabled, *all* stack variables must be deferred,
1538      so that we can re-order the strings to the top of the frame.
1539      Similarly for Address Sanitizer.  */
1540   if (flag_stack_protect || asan_sanitize_stack_p ())
1541     return true;
1542 
1543   unsigned int align = TREE_CODE (var) == SSA_NAME
1544     ? TYPE_ALIGN (TREE_TYPE (var))
1545     : DECL_ALIGN (var);
1546 
1547   /* We handle "large" alignment via dynamic allocation.  We want to handle
1548      this extra complication in only one place, so defer them.  */
1549   if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1550     return true;
1551 
1552   bool ignored = TREE_CODE (var) == SSA_NAME
1553     ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1554     : DECL_IGNORED_P (var);
1555 
1556   /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1557      might be detached from their block and appear at toplevel when we reach
1558      here.  We want to coalesce them with variables from other blocks when
1559      the immediate contribution to the frame size would be noticeable.  */
1560   if (toplevel && optimize > 0 && ignored && !smallish)
1561     return true;
1562 
1563   /* Variables declared in the outermost scope automatically conflict
1564      with every other variable.  The only reason to want to defer them
1565      at all is that, after sorting, we can more efficiently pack
1566      small variables in the stack frame.  Continue to defer at -O2.  */
1567   if (toplevel && optimize < 2)
1568     return false;
1569 
1570   /* Without optimization, *most* variables are allocated from the
1571      stack, which makes the quadratic problem large exactly when we
1572      want compilation to proceed as quickly as possible.  On the
1573      other hand, we don't want the function's stack frame size to
1574      get completely out of hand.  So we avoid adding scalars and
1575      "small" aggregates to the list at all.  */
1576   if (optimize == 0 && smallish)
1577     return false;
1578 
1579   return true;
1580 }
1581 
1582 /* A subroutine of expand_used_vars.  Expand one variable according to
1583    its flavor.  Variables to be placed on the stack are not actually
1584    expanded yet, merely recorded.
1585    When REALLY_EXPAND is false, only add stack values to be allocated.
1586    Return stack usage this variable is supposed to take.
1587 */
1588 
1589 static poly_uint64
1590 expand_one_var (tree var, bool toplevel, bool really_expand)
1591 {
1592   unsigned int align = BITS_PER_UNIT;
1593   tree origvar = var;
1594 
1595   var = SSAVAR (var);
1596 
1597   if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
1598     {
1599       if (is_global_var (var))
1600 	return 0;
1601 
1602       /* Because we don't know if VAR will be in register or on stack,
1603 	 we conservatively assume it will be on stack even if VAR is
1604 	 eventually put into register after RA pass.  For non-automatic
1605 	 variables, which won't be on stack, we collect alignment of
1606 	 type and ignore user specified alignment.  Similarly for
1607 	 SSA_NAMEs for which use_register_for_decl returns true.  */
1608       if (TREE_STATIC (var)
1609 	  || DECL_EXTERNAL (var)
1610 	  || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1611 	align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1612 				   TYPE_MODE (TREE_TYPE (var)),
1613 				   TYPE_ALIGN (TREE_TYPE (var)));
1614       else if (DECL_HAS_VALUE_EXPR_P (var)
1615 	       || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1616 	/* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1617 	   or variables which were assigned a stack slot already by
1618 	   expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1619 	   changed from the offset chosen to it.  */
1620 	align = crtl->stack_alignment_estimated;
1621       else
1622 	align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1623 
1624       /* If the variable alignment is very large we'll dynamicaly allocate
1625 	 it, which means that in-frame portion is just a pointer.  */
1626       if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1627 	align = GET_MODE_ALIGNMENT (Pmode);
1628     }
1629 
1630   record_alignment_for_reg_var (align);
1631 
1632   poly_uint64 size;
1633   if (TREE_CODE (origvar) == SSA_NAME)
1634     {
1635       gcc_assert (!VAR_P (var)
1636 		  || (!DECL_EXTERNAL (var)
1637 		      && !DECL_HAS_VALUE_EXPR_P (var)
1638 		      && !TREE_STATIC (var)
1639 		      && TREE_TYPE (var) != error_mark_node
1640 		      && !DECL_HARD_REGISTER (var)
1641 		      && really_expand));
1642     }
1643   if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
1644     ;
1645   else if (DECL_EXTERNAL (var))
1646     ;
1647   else if (DECL_HAS_VALUE_EXPR_P (var))
1648     ;
1649   else if (TREE_STATIC (var))
1650     ;
1651   else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1652     ;
1653   else if (TREE_TYPE (var) == error_mark_node)
1654     {
1655       if (really_expand)
1656         expand_one_error_var (var);
1657     }
1658   else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1659     {
1660       if (really_expand)
1661 	{
1662 	  expand_one_hard_reg_var (var);
1663 	  if (!DECL_HARD_REGISTER (var))
1664 	    /* Invalid register specification.  */
1665 	    expand_one_error_var (var);
1666 	}
1667     }
1668   else if (use_register_for_decl (var))
1669     {
1670       if (really_expand)
1671         expand_one_register_var (origvar);
1672     }
1673   else if (!poly_int_tree_p (DECL_SIZE_UNIT (var), &size)
1674 	   || !valid_constant_size_p (DECL_SIZE_UNIT (var)))
1675     {
1676       /* Reject variables which cover more than half of the address-space.  */
1677       if (really_expand)
1678 	{
1679 	  error ("size of variable %q+D is too large", var);
1680 	  expand_one_error_var (var);
1681 	}
1682     }
1683   else if (defer_stack_allocation (var, toplevel))
1684     add_stack_var (origvar);
1685   else
1686     {
1687       if (really_expand)
1688         {
1689           if (lookup_attribute ("naked",
1690                                 DECL_ATTRIBUTES (current_function_decl)))
1691             error ("cannot allocate stack for variable %q+D, naked function.",
1692                    var);
1693 
1694           expand_one_stack_var (origvar);
1695         }
1696       return size;
1697     }
1698   return 0;
1699 }
1700 
1701 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1702    expanding variables.  Those variables that can be put into registers
1703    are allocated pseudos; those that can't are put on the stack.
1704 
1705    TOPLEVEL is true if this is the outermost BLOCK.  */
1706 
1707 static void
1708 expand_used_vars_for_block (tree block, bool toplevel)
1709 {
1710   tree t;
1711 
1712   /* Expand all variables at this level.  */
1713   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1714     if (TREE_USED (t)
1715         && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1716 	    || !DECL_NONSHAREABLE (t)))
1717       expand_one_var (t, toplevel, true);
1718 
1719   /* Expand all variables at containing levels.  */
1720   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1721     expand_used_vars_for_block (t, false);
1722 }
1723 
1724 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1725    and clear TREE_USED on all local variables.  */
1726 
1727 static void
1728 clear_tree_used (tree block)
1729 {
1730   tree t;
1731 
1732   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1733     /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1734     if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1735 	|| !DECL_NONSHAREABLE (t))
1736       TREE_USED (t) = 0;
1737 
1738   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1739     clear_tree_used (t);
1740 }
1741 
1742 enum {
1743   SPCT_FLAG_DEFAULT = 1,
1744   SPCT_FLAG_ALL = 2,
1745   SPCT_FLAG_STRONG = 3,
1746   SPCT_FLAG_EXPLICIT = 4
1747 };
1748 
1749 /* Examine TYPE and determine a bit mask of the following features.  */
1750 
1751 #define SPCT_HAS_LARGE_CHAR_ARRAY	1
1752 #define SPCT_HAS_SMALL_CHAR_ARRAY	2
1753 #define SPCT_HAS_ARRAY			4
1754 #define SPCT_HAS_AGGREGATE		8
1755 
1756 static unsigned int
1757 stack_protect_classify_type (tree type)
1758 {
1759   unsigned int ret = 0;
1760   tree t;
1761 
1762   switch (TREE_CODE (type))
1763     {
1764     case ARRAY_TYPE:
1765       t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1766       if (t == char_type_node
1767 	  || t == signed_char_type_node
1768 	  || t == unsigned_char_type_node)
1769 	{
1770 	  unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1771 	  unsigned HOST_WIDE_INT len;
1772 
1773 	  if (!TYPE_SIZE_UNIT (type)
1774 	      || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1775 	    len = max;
1776 	  else
1777 	    len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1778 
1779 	  if (len == 0)
1780 	    ret = SPCT_HAS_ARRAY;
1781 	  else if (len < max)
1782 	    ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1783 	  else
1784 	    ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1785 	}
1786       else
1787 	ret = SPCT_HAS_ARRAY;
1788       break;
1789 
1790     case UNION_TYPE:
1791     case QUAL_UNION_TYPE:
1792     case RECORD_TYPE:
1793       ret = SPCT_HAS_AGGREGATE;
1794       for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1795 	if (TREE_CODE (t) == FIELD_DECL)
1796 	  ret |= stack_protect_classify_type (TREE_TYPE (t));
1797       break;
1798 
1799     default:
1800       break;
1801     }
1802 
1803   return ret;
1804 }
1805 
1806 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1807    part of the local stack frame.  Remember if we ever return nonzero for
1808    any variable in this function.  The return value is the phase number in
1809    which the variable should be allocated.  */
1810 
1811 static int
1812 stack_protect_decl_phase (tree decl)
1813 {
1814   unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1815   int ret = 0;
1816 
1817   if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1818     has_short_buffer = true;
1819 
1820   if (flag_stack_protect == SPCT_FLAG_ALL
1821       || flag_stack_protect == SPCT_FLAG_STRONG
1822       || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1823 	  && lookup_attribute ("stack_protect",
1824 			       DECL_ATTRIBUTES (current_function_decl))))
1825     {
1826       if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1827 	  && !(bits & SPCT_HAS_AGGREGATE))
1828 	ret = 1;
1829       else if (bits & SPCT_HAS_ARRAY)
1830 	ret = 2;
1831     }
1832   else
1833     ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1834 
1835   if (ret)
1836     has_protected_decls = true;
1837 
1838   return ret;
1839 }
1840 
1841 /* Two helper routines that check for phase 1 and phase 2.  These are used
1842    as callbacks for expand_stack_vars.  */
1843 
1844 static bool
1845 stack_protect_decl_phase_1 (size_t i)
1846 {
1847   return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1848 }
1849 
1850 static bool
1851 stack_protect_decl_phase_2 (size_t i)
1852 {
1853   return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1854 }
1855 
1856 /* And helper function that checks for asan phase (with stack protector
1857    it is phase 3).  This is used as callback for expand_stack_vars.
1858    Returns true if any of the vars in the partition need to be protected.  */
1859 
1860 static bool
1861 asan_decl_phase_3 (size_t i)
1862 {
1863   while (i != EOC)
1864     {
1865       if (asan_protect_stack_decl (stack_vars[i].decl))
1866 	return true;
1867       i = stack_vars[i].next;
1868     }
1869   return false;
1870 }
1871 
1872 /* Ensure that variables in different stack protection phases conflict
1873    so that they are not merged and share the same stack slot.  */
1874 
1875 static void
1876 add_stack_protection_conflicts (void)
1877 {
1878   size_t i, j, n = stack_vars_num;
1879   unsigned char *phase;
1880 
1881   phase = XNEWVEC (unsigned char, n);
1882   for (i = 0; i < n; ++i)
1883     phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1884 
1885   for (i = 0; i < n; ++i)
1886     {
1887       unsigned char ph_i = phase[i];
1888       for (j = i + 1; j < n; ++j)
1889 	if (ph_i != phase[j])
1890 	  add_stack_var_conflict (i, j);
1891     }
1892 
1893   XDELETEVEC (phase);
1894 }
1895 
1896 /* Create a decl for the guard at the top of the stack frame.  */
1897 
1898 static void
1899 create_stack_guard (void)
1900 {
1901   tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1902 			   VAR_DECL, NULL, ptr_type_node);
1903   TREE_THIS_VOLATILE (guard) = 1;
1904   TREE_USED (guard) = 1;
1905   expand_one_stack_var (guard);
1906   crtl->stack_protect_guard = guard;
1907 }
1908 
1909 /* Prepare for expanding variables.  */
1910 static void
1911 init_vars_expansion (void)
1912 {
1913   /* Conflict bitmaps, and a few related temporary bitmaps, go here.  */
1914   bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1915 
1916   /* A map from decl to stack partition.  */
1917   decl_to_stack_part = new hash_map<tree, size_t>;
1918 
1919   /* Initialize local stack smashing state.  */
1920   has_protected_decls = false;
1921   has_short_buffer = false;
1922 }
1923 
1924 /* Free up stack variable graph data.  */
1925 static void
1926 fini_vars_expansion (void)
1927 {
1928   bitmap_obstack_release (&stack_var_bitmap_obstack);
1929   if (stack_vars)
1930     XDELETEVEC (stack_vars);
1931   if (stack_vars_sorted)
1932     XDELETEVEC (stack_vars_sorted);
1933   stack_vars = NULL;
1934   stack_vars_sorted = NULL;
1935   stack_vars_alloc = stack_vars_num = 0;
1936   delete decl_to_stack_part;
1937   decl_to_stack_part = NULL;
1938 }
1939 
1940 /* Make a fair guess for the size of the stack frame of the function
1941    in NODE.  This doesn't have to be exact, the result is only used in
1942    the inline heuristics.  So we don't want to run the full stack var
1943    packing algorithm (which is quadratic in the number of stack vars).
1944    Instead, we calculate the total size of all stack vars.  This turns
1945    out to be a pretty fair estimate -- packing of stack vars doesn't
1946    happen very often.  */
1947 
1948 HOST_WIDE_INT
1949 estimated_stack_frame_size (struct cgraph_node *node)
1950 {
1951   poly_int64 size = 0;
1952   size_t i;
1953   tree var;
1954   struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1955 
1956   push_cfun (fn);
1957 
1958   init_vars_expansion ();
1959 
1960   FOR_EACH_LOCAL_DECL (fn, i, var)
1961     if (auto_var_in_fn_p (var, fn->decl))
1962       size += expand_one_var (var, true, false);
1963 
1964   if (stack_vars_num > 0)
1965     {
1966       /* Fake sorting the stack vars for account_stack_vars ().  */
1967       stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1968       for (i = 0; i < stack_vars_num; ++i)
1969 	stack_vars_sorted[i] = i;
1970       size += account_stack_vars ();
1971     }
1972 
1973   fini_vars_expansion ();
1974   pop_cfun ();
1975   return estimated_poly_value (size);
1976 }
1977 
1978 /* Helper routine to check if a record or union contains an array field. */
1979 
1980 static int
1981 record_or_union_type_has_array_p (const_tree tree_type)
1982 {
1983   tree fields = TYPE_FIELDS (tree_type);
1984   tree f;
1985 
1986   for (f = fields; f; f = DECL_CHAIN (f))
1987     if (TREE_CODE (f) == FIELD_DECL)
1988       {
1989 	tree field_type = TREE_TYPE (f);
1990 	if (RECORD_OR_UNION_TYPE_P (field_type)
1991 	    && record_or_union_type_has_array_p (field_type))
1992 	  return 1;
1993 	if (TREE_CODE (field_type) == ARRAY_TYPE)
1994 	  return 1;
1995       }
1996   return 0;
1997 }
1998 
1999 /* Check if the current function has local referenced variables that
2000    have their addresses taken, contain an array, or are arrays.  */
2001 
2002 static bool
2003 stack_protect_decl_p ()
2004 {
2005   unsigned i;
2006   tree var;
2007 
2008   FOR_EACH_LOCAL_DECL (cfun, i, var)
2009     if (!is_global_var (var))
2010       {
2011 	tree var_type = TREE_TYPE (var);
2012 	if (VAR_P (var)
2013 	    && (TREE_CODE (var_type) == ARRAY_TYPE
2014 		|| TREE_ADDRESSABLE (var)
2015 		|| (RECORD_OR_UNION_TYPE_P (var_type)
2016 		    && record_or_union_type_has_array_p (var_type))))
2017 	  return true;
2018       }
2019   return false;
2020 }
2021 
2022 /* Check if the current function has calls that use a return slot.  */
2023 
2024 static bool
2025 stack_protect_return_slot_p ()
2026 {
2027   basic_block bb;
2028 
2029   FOR_ALL_BB_FN (bb, cfun)
2030     for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2031 	 !gsi_end_p (gsi); gsi_next (&gsi))
2032       {
2033 	gimple *stmt = gsi_stmt (gsi);
2034 	/* This assumes that calls to internal-only functions never
2035 	   use a return slot.  */
2036 	if (is_gimple_call (stmt)
2037 	    && !gimple_call_internal_p (stmt)
2038 	    && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2039 				  gimple_call_fndecl (stmt)))
2040 	  return true;
2041       }
2042   return false;
2043 }
2044 
2045 /* Expand all variables used in the function.  */
2046 
2047 static rtx_insn *
2048 expand_used_vars (void)
2049 {
2050   tree var, outer_block = DECL_INITIAL (current_function_decl);
2051   auto_vec<tree> maybe_local_decls;
2052   rtx_insn *var_end_seq = NULL;
2053   unsigned i;
2054   unsigned len;
2055   bool gen_stack_protect_signal = false;
2056 
2057   /* Compute the phase of the stack frame for this function.  */
2058   {
2059     int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2060     int off = targetm.starting_frame_offset () % align;
2061     frame_phase = off ? align - off : 0;
2062   }
2063 
2064   /* Set TREE_USED on all variables in the local_decls.  */
2065   FOR_EACH_LOCAL_DECL (cfun, i, var)
2066     TREE_USED (var) = 1;
2067   /* Clear TREE_USED on all variables associated with a block scope.  */
2068   clear_tree_used (DECL_INITIAL (current_function_decl));
2069 
2070   init_vars_expansion ();
2071 
2072   if (targetm.use_pseudo_pic_reg ())
2073     pic_offset_table_rtx = gen_reg_rtx (Pmode);
2074 
2075   for (i = 0; i < SA.map->num_partitions; i++)
2076     {
2077       if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2078 	continue;
2079 
2080       tree var = partition_to_var (SA.map, i);
2081 
2082       gcc_assert (!virtual_operand_p (var));
2083 
2084       expand_one_ssa_partition (var);
2085     }
2086 
2087   if (flag_stack_protect == SPCT_FLAG_STRONG)
2088       gen_stack_protect_signal
2089 	= stack_protect_decl_p () || stack_protect_return_slot_p ();
2090 
2091   /* At this point all variables on the local_decls with TREE_USED
2092      set are not associated with any block scope.  Lay them out.  */
2093 
2094   len = vec_safe_length (cfun->local_decls);
2095   FOR_EACH_LOCAL_DECL (cfun, i, var)
2096     {
2097       bool expand_now = false;
2098 
2099       /* Expanded above already.  */
2100       if (is_gimple_reg (var))
2101 	{
2102 	  TREE_USED (var) = 0;
2103 	  goto next;
2104 	}
2105       /* We didn't set a block for static or extern because it's hard
2106 	 to tell the difference between a global variable (re)declared
2107 	 in a local scope, and one that's really declared there to
2108 	 begin with.  And it doesn't really matter much, since we're
2109 	 not giving them stack space.  Expand them now.  */
2110       else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
2111 	expand_now = true;
2112 
2113       /* Expand variables not associated with any block now.  Those created by
2114 	 the optimizers could be live anywhere in the function.  Those that
2115 	 could possibly have been scoped originally and detached from their
2116 	 block will have their allocation deferred so we coalesce them with
2117 	 others when optimization is enabled.  */
2118       else if (TREE_USED (var))
2119 	expand_now = true;
2120 
2121       /* Finally, mark all variables on the list as used.  We'll use
2122 	 this in a moment when we expand those associated with scopes.  */
2123       TREE_USED (var) = 1;
2124 
2125       if (expand_now)
2126 	expand_one_var (var, true, true);
2127 
2128     next:
2129       if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
2130 	{
2131 	  rtx rtl = DECL_RTL_IF_SET (var);
2132 
2133 	  /* Keep artificial non-ignored vars in cfun->local_decls
2134 	     chain until instantiate_decls.  */
2135 	  if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2136 	    add_local_decl (cfun, var);
2137 	  else if (rtl == NULL_RTX)
2138 	    /* If rtl isn't set yet, which can happen e.g. with
2139 	       -fstack-protector, retry before returning from this
2140 	       function.  */
2141 	    maybe_local_decls.safe_push (var);
2142 	}
2143     }
2144 
2145   /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2146 
2147      +-----------------+-----------------+
2148      | ...processed... | ...duplicates...|
2149      +-----------------+-----------------+
2150                        ^
2151 		       +-- LEN points here.
2152 
2153      We just want the duplicates, as those are the artificial
2154      non-ignored vars that we want to keep until instantiate_decls.
2155      Move them down and truncate the array.  */
2156   if (!vec_safe_is_empty (cfun->local_decls))
2157     cfun->local_decls->block_remove (0, len);
2158 
2159   /* At this point, all variables within the block tree with TREE_USED
2160      set are actually used by the optimized function.  Lay them out.  */
2161   expand_used_vars_for_block (outer_block, true);
2162 
2163   if (stack_vars_num > 0)
2164     {
2165       add_scope_conflicts ();
2166 
2167       /* If stack protection is enabled, we don't share space between
2168 	 vulnerable data and non-vulnerable data.  */
2169       if (flag_stack_protect != 0
2170 	  && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2171 	      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2172 		  && lookup_attribute ("stack_protect",
2173 				       DECL_ATTRIBUTES (current_function_decl)))))
2174 	add_stack_protection_conflicts ();
2175 
2176       /* Now that we have collected all stack variables, and have computed a
2177 	 minimal interference graph, attempt to save some stack space.  */
2178       partition_stack_vars ();
2179       if (dump_file)
2180 	dump_stack_var_partition ();
2181     }
2182 
2183   switch (flag_stack_protect)
2184     {
2185     case SPCT_FLAG_ALL:
2186       create_stack_guard ();
2187       break;
2188 
2189     case SPCT_FLAG_STRONG:
2190       if (gen_stack_protect_signal
2191 	  || cfun->calls_alloca || has_protected_decls
2192 	  || lookup_attribute ("stack_protect",
2193 			       DECL_ATTRIBUTES (current_function_decl)))
2194 	create_stack_guard ();
2195       break;
2196 
2197     case SPCT_FLAG_DEFAULT:
2198       if (cfun->calls_alloca || has_protected_decls
2199 	  || lookup_attribute ("stack_protect",
2200 			       DECL_ATTRIBUTES (current_function_decl)))
2201 	create_stack_guard ();
2202       break;
2203 
2204     case SPCT_FLAG_EXPLICIT:
2205       if (lookup_attribute ("stack_protect",
2206 			    DECL_ATTRIBUTES (current_function_decl)))
2207 	create_stack_guard ();
2208       break;
2209     default:
2210       ;
2211     }
2212 
2213   /* Assign rtl to each variable based on these partitions.  */
2214   if (stack_vars_num > 0)
2215     {
2216       struct stack_vars_data data;
2217 
2218       data.asan_base = NULL_RTX;
2219       data.asan_alignb = 0;
2220 
2221       /* Reorder decls to be protected by iterating over the variables
2222 	 array multiple times, and allocating out of each phase in turn.  */
2223       /* ??? We could probably integrate this into the qsort we did
2224 	 earlier, such that we naturally see these variables first,
2225 	 and thus naturally allocate things in the right order.  */
2226       if (has_protected_decls)
2227 	{
2228 	  /* Phase 1 contains only character arrays.  */
2229 	  expand_stack_vars (stack_protect_decl_phase_1, &data);
2230 
2231 	  /* Phase 2 contains other kinds of arrays.  */
2232 	  if (flag_stack_protect == SPCT_FLAG_ALL
2233 	      || flag_stack_protect == SPCT_FLAG_STRONG
2234 	      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2235 		  && lookup_attribute ("stack_protect",
2236 				       DECL_ATTRIBUTES (current_function_decl))))
2237 	    expand_stack_vars (stack_protect_decl_phase_2, &data);
2238 	}
2239 
2240       if (asan_sanitize_stack_p ())
2241 	/* Phase 3, any partitions that need asan protection
2242 	   in addition to phase 1 and 2.  */
2243 	expand_stack_vars (asan_decl_phase_3, &data);
2244 
2245       /* ASAN description strings don't yet have a syntax for expressing
2246 	 polynomial offsets.  */
2247       HOST_WIDE_INT prev_offset;
2248       if (!data.asan_vec.is_empty ()
2249 	  && frame_offset.is_constant (&prev_offset))
2250 	{
2251 	  HOST_WIDE_INT offset, sz, redzonesz;
2252 	  redzonesz = ASAN_RED_ZONE_SIZE;
2253 	  sz = data.asan_vec[0] - prev_offset;
2254 	  if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2255 	      && data.asan_alignb <= 4096
2256 	      && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
2257 	    redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2258 			 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2259 	  /* Allocating a constant amount of space from a constant
2260 	     starting offset must give a constant result.  */
2261 	  offset = (alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE)
2262 		    .to_constant ());
2263 	  data.asan_vec.safe_push (prev_offset);
2264 	  data.asan_vec.safe_push (offset);
2265 	  /* Leave space for alignment if STRICT_ALIGNMENT.  */
2266 	  if (STRICT_ALIGNMENT)
2267 	    alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2268 				      << ASAN_SHADOW_SHIFT)
2269 				     / BITS_PER_UNIT, 1);
2270 
2271 	  var_end_seq
2272 	    = asan_emit_stack_protection (virtual_stack_vars_rtx,
2273 					  data.asan_base,
2274 					  data.asan_alignb,
2275 					  data.asan_vec.address (),
2276 					  data.asan_decl_vec.address (),
2277 					  data.asan_vec.length ());
2278 	}
2279 
2280       expand_stack_vars (NULL, &data);
2281     }
2282 
2283   if (asan_sanitize_allocas_p () && cfun->calls_alloca)
2284     var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
2285 					      virtual_stack_vars_rtx,
2286 					      var_end_seq);
2287 
2288   fini_vars_expansion ();
2289 
2290   /* If there were any artificial non-ignored vars without rtl
2291      found earlier, see if deferred stack allocation hasn't assigned
2292      rtl to them.  */
2293   FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2294     {
2295       rtx rtl = DECL_RTL_IF_SET (var);
2296 
2297       /* Keep artificial non-ignored vars in cfun->local_decls
2298 	 chain until instantiate_decls.  */
2299       if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2300 	add_local_decl (cfun, var);
2301     }
2302 
2303   /* If the target requires that FRAME_OFFSET be aligned, do it.  */
2304   if (STACK_ALIGNMENT_NEEDED)
2305     {
2306       HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2307       if (FRAME_GROWS_DOWNWARD)
2308 	frame_offset = aligned_lower_bound (frame_offset, align);
2309       else
2310 	frame_offset = aligned_upper_bound (frame_offset, align);
2311     }
2312 
2313   return var_end_seq;
2314 }
2315 
2316 
2317 /* If we need to produce a detailed dump, print the tree representation
2318    for STMT to the dump file.  SINCE is the last RTX after which the RTL
2319    generated for STMT should have been appended.  */
2320 
2321 static void
2322 maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
2323 {
2324   if (dump_file && (dump_flags & TDF_DETAILS))
2325     {
2326       fprintf (dump_file, "\n;; ");
2327       print_gimple_stmt (dump_file, stmt, 0,
2328 			 TDF_SLIM | (dump_flags & TDF_LINENO));
2329       fprintf (dump_file, "\n");
2330 
2331       print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2332     }
2333 }
2334 
2335 /* Maps the blocks that do not contain tree labels to rtx labels.  */
2336 
2337 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2338 
2339 /* Returns the label_rtx expression for a label starting basic block BB.  */
2340 
2341 static rtx_code_label *
2342 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2343 {
2344   gimple_stmt_iterator gsi;
2345   tree lab;
2346 
2347   if (bb->flags & BB_RTL)
2348     return block_label (bb);
2349 
2350   rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2351   if (elt)
2352     return *elt;
2353 
2354   /* Find the tree label if it is present.  */
2355 
2356   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2357     {
2358       glabel *lab_stmt;
2359 
2360       lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2361       if (!lab_stmt)
2362 	break;
2363 
2364       lab = gimple_label_label (lab_stmt);
2365       if (DECL_NONLOCAL (lab))
2366 	break;
2367 
2368       return jump_target_rtx (lab);
2369     }
2370 
2371   rtx_code_label *l = gen_label_rtx ();
2372   lab_rtx_for_bb->put (bb, l);
2373   return l;
2374 }
2375 
2376 
2377 /* A subroutine of expand_gimple_cond.  Given E, a fallthrough edge
2378    of a basic block where we just expanded the conditional at the end,
2379    possibly clean up the CFG and instruction sequence.  LAST is the
2380    last instruction before the just emitted jump sequence.  */
2381 
2382 static void
2383 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2384 {
2385   /* Special case: when jumpif decides that the condition is
2386      trivial it emits an unconditional jump (and the necessary
2387      barrier).  But we still have two edges, the fallthru one is
2388      wrong.  purge_dead_edges would clean this up later.  Unfortunately
2389      we have to insert insns (and split edges) before
2390      find_many_sub_basic_blocks and hence before purge_dead_edges.
2391      But splitting edges might create new blocks which depend on the
2392      fact that if there are two edges there's no barrier.  So the
2393      barrier would get lost and verify_flow_info would ICE.  Instead
2394      of auditing all edge splitters to care for the barrier (which
2395      normally isn't there in a cleaned CFG), fix it here.  */
2396   if (BARRIER_P (get_last_insn ()))
2397     {
2398       rtx_insn *insn;
2399       remove_edge (e);
2400       /* Now, we have a single successor block, if we have insns to
2401 	 insert on the remaining edge we potentially will insert
2402 	 it at the end of this block (if the dest block isn't feasible)
2403 	 in order to avoid splitting the edge.  This insertion will take
2404 	 place in front of the last jump.  But we might have emitted
2405 	 multiple jumps (conditional and one unconditional) to the
2406 	 same destination.  Inserting in front of the last one then
2407 	 is a problem.  See PR 40021.  We fix this by deleting all
2408 	 jumps except the last unconditional one.  */
2409       insn = PREV_INSN (get_last_insn ());
2410       /* Make sure we have an unconditional jump.  Otherwise we're
2411 	 confused.  */
2412       gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2413       for (insn = PREV_INSN (insn); insn != last;)
2414 	{
2415 	  insn = PREV_INSN (insn);
2416 	  if (JUMP_P (NEXT_INSN (insn)))
2417 	    {
2418 	      if (!any_condjump_p (NEXT_INSN (insn)))
2419 		{
2420 		  gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2421 		  delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2422 		}
2423 	      delete_insn (NEXT_INSN (insn));
2424 	    }
2425 	}
2426     }
2427 }
2428 
2429 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_COND.
2430    Returns a new basic block if we've terminated the current basic
2431    block and created a new one.  */
2432 
2433 static basic_block
2434 expand_gimple_cond (basic_block bb, gcond *stmt)
2435 {
2436   basic_block new_bb, dest;
2437   edge true_edge;
2438   edge false_edge;
2439   rtx_insn *last2, *last;
2440   enum tree_code code;
2441   tree op0, op1;
2442 
2443   code = gimple_cond_code (stmt);
2444   op0 = gimple_cond_lhs (stmt);
2445   op1 = gimple_cond_rhs (stmt);
2446   /* We're sometimes presented with such code:
2447        D.123_1 = x < y;
2448        if (D.123_1 != 0)
2449          ...
2450      This would expand to two comparisons which then later might
2451      be cleaned up by combine.  But some pattern matchers like if-conversion
2452      work better when there's only one compare, so make up for this
2453      here as special exception if TER would have made the same change.  */
2454   if (SA.values
2455       && TREE_CODE (op0) == SSA_NAME
2456       && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2457       && TREE_CODE (op1) == INTEGER_CST
2458       && ((gimple_cond_code (stmt) == NE_EXPR
2459 	   && integer_zerop (op1))
2460 	  || (gimple_cond_code (stmt) == EQ_EXPR
2461 	      && integer_onep (op1)))
2462       && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2463     {
2464       gimple *second = SSA_NAME_DEF_STMT (op0);
2465       if (gimple_code (second) == GIMPLE_ASSIGN)
2466 	{
2467 	  enum tree_code code2 = gimple_assign_rhs_code (second);
2468 	  if (TREE_CODE_CLASS (code2) == tcc_comparison)
2469 	    {
2470 	      code = code2;
2471 	      op0 = gimple_assign_rhs1 (second);
2472 	      op1 = gimple_assign_rhs2 (second);
2473 	    }
2474 	  /* If jumps are cheap and the target does not support conditional
2475 	     compare, turn some more codes into jumpy sequences.  */
2476 	  else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2477 		   && targetm.gen_ccmp_first == NULL)
2478 	    {
2479 	      if ((code2 == BIT_AND_EXPR
2480 		   && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2481 		   && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2482 		  || code2 == TRUTH_AND_EXPR)
2483 		{
2484 		  code = TRUTH_ANDIF_EXPR;
2485 		  op0 = gimple_assign_rhs1 (second);
2486 		  op1 = gimple_assign_rhs2 (second);
2487 		}
2488 	      else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2489 		{
2490 		  code = TRUTH_ORIF_EXPR;
2491 		  op0 = gimple_assign_rhs1 (second);
2492 		  op1 = gimple_assign_rhs2 (second);
2493 		}
2494 	    }
2495 	}
2496     }
2497 
2498   last2 = last = get_last_insn ();
2499 
2500   extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2501   set_curr_insn_location (gimple_location (stmt));
2502 
2503   /* These flags have no purpose in RTL land.  */
2504   true_edge->flags &= ~EDGE_TRUE_VALUE;
2505   false_edge->flags &= ~EDGE_FALSE_VALUE;
2506 
2507   /* We can either have a pure conditional jump with one fallthru edge or
2508      two-way jump that needs to be decomposed into two basic blocks.  */
2509   if (false_edge->dest == bb->next_bb)
2510     {
2511       jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2512 		true_edge->probability);
2513       maybe_dump_rtl_for_gimple_stmt (stmt, last);
2514       if (true_edge->goto_locus != UNKNOWN_LOCATION)
2515 	set_curr_insn_location (true_edge->goto_locus);
2516       false_edge->flags |= EDGE_FALLTHRU;
2517       maybe_cleanup_end_of_block (false_edge, last);
2518       return NULL;
2519     }
2520   if (true_edge->dest == bb->next_bb)
2521     {
2522       jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2523 		   false_edge->probability);
2524       maybe_dump_rtl_for_gimple_stmt (stmt, last);
2525       if (false_edge->goto_locus != UNKNOWN_LOCATION)
2526 	set_curr_insn_location (false_edge->goto_locus);
2527       true_edge->flags |= EDGE_FALLTHRU;
2528       maybe_cleanup_end_of_block (true_edge, last);
2529       return NULL;
2530     }
2531 
2532   jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2533 	    true_edge->probability);
2534   last = get_last_insn ();
2535   if (false_edge->goto_locus != UNKNOWN_LOCATION)
2536     set_curr_insn_location (false_edge->goto_locus);
2537   emit_jump (label_rtx_for_bb (false_edge->dest));
2538 
2539   BB_END (bb) = last;
2540   if (BARRIER_P (BB_END (bb)))
2541     BB_END (bb) = PREV_INSN (BB_END (bb));
2542   update_bb_for_insn (bb);
2543 
2544   new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2545   dest = false_edge->dest;
2546   redirect_edge_succ (false_edge, new_bb);
2547   false_edge->flags |= EDGE_FALLTHRU;
2548   new_bb->count = false_edge->count ();
2549   loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
2550   add_bb_to_loop (new_bb, loop);
2551   if (loop->latch == bb
2552       && loop->header == dest)
2553     loop->latch = new_bb;
2554   make_single_succ_edge (new_bb, dest, 0);
2555   if (BARRIER_P (BB_END (new_bb)))
2556     BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2557   update_bb_for_insn (new_bb);
2558 
2559   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2560 
2561   if (true_edge->goto_locus != UNKNOWN_LOCATION)
2562     {
2563       set_curr_insn_location (true_edge->goto_locus);
2564       true_edge->goto_locus = curr_insn_location ();
2565     }
2566 
2567   return new_bb;
2568 }
2569 
2570 /* Mark all calls that can have a transaction restart.  */
2571 
2572 static void
2573 mark_transaction_restart_calls (gimple *stmt)
2574 {
2575   struct tm_restart_node dummy;
2576   tm_restart_node **slot;
2577 
2578   if (!cfun->gimple_df->tm_restart)
2579     return;
2580 
2581   dummy.stmt = stmt;
2582   slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2583   if (slot)
2584     {
2585       struct tm_restart_node *n = *slot;
2586       tree list = n->label_or_list;
2587       rtx_insn *insn;
2588 
2589       for (insn = next_real_insn (get_last_insn ());
2590 	   !CALL_P (insn);
2591 	   insn = next_real_insn (insn))
2592 	continue;
2593 
2594       if (TREE_CODE (list) == LABEL_DECL)
2595 	add_reg_note (insn, REG_TM, label_rtx (list));
2596       else
2597 	for (; list ; list = TREE_CHAIN (list))
2598 	  add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2599     }
2600 }
2601 
2602 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2603    statement STMT.  */
2604 
2605 static void
2606 expand_call_stmt (gcall *stmt)
2607 {
2608   tree exp, decl, lhs;
2609   bool builtin_p;
2610   size_t i;
2611 
2612   if (gimple_call_internal_p (stmt))
2613     {
2614       expand_internal_call (stmt);
2615       return;
2616     }
2617 
2618   /* If this is a call to a built-in function and it has no effect other
2619      than setting the lhs, try to implement it using an internal function
2620      instead.  */
2621   decl = gimple_call_fndecl (stmt);
2622   if (gimple_call_lhs (stmt)
2623       && !gimple_has_side_effects (stmt)
2624       && (optimize || (decl && called_as_built_in (decl))))
2625     {
2626       internal_fn ifn = replacement_internal_fn (stmt);
2627       if (ifn != IFN_LAST)
2628 	{
2629 	  expand_internal_call (ifn, stmt);
2630 	  return;
2631 	}
2632     }
2633 
2634   exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2635 
2636   CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2637   builtin_p = decl && DECL_BUILT_IN (decl);
2638 
2639   /* If this is not a builtin function, the function type through which the
2640      call is made may be different from the type of the function.  */
2641   if (!builtin_p)
2642     CALL_EXPR_FN (exp)
2643       = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2644 		      CALL_EXPR_FN (exp));
2645 
2646   TREE_TYPE (exp) = gimple_call_return_type (stmt);
2647   CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2648 
2649   for (i = 0; i < gimple_call_num_args (stmt); i++)
2650     {
2651       tree arg = gimple_call_arg (stmt, i);
2652       gimple *def;
2653       /* TER addresses into arguments of builtin functions so we have a
2654 	 chance to infer more correct alignment information.  See PR39954.  */
2655       if (builtin_p
2656 	  && TREE_CODE (arg) == SSA_NAME
2657 	  && (def = get_gimple_for_ssa_name (arg))
2658 	  && gimple_assign_rhs_code (def) == ADDR_EXPR)
2659 	arg = gimple_assign_rhs1 (def);
2660       CALL_EXPR_ARG (exp, i) = arg;
2661     }
2662 
2663   if (gimple_has_side_effects (stmt))
2664     TREE_SIDE_EFFECTS (exp) = 1;
2665 
2666   if (gimple_call_nothrow_p (stmt))
2667     TREE_NOTHROW (exp) = 1;
2668 
2669   if (gimple_no_warning_p (stmt))
2670     TREE_NO_WARNING (exp) = 1;
2671 
2672   CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2673   CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
2674   CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2675   if (decl
2676       && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2677       && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2678     CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2679   else
2680     CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2681   CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2682   CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
2683   SET_EXPR_LOCATION (exp, gimple_location (stmt));
2684   CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
2685 
2686   /* Ensure RTL is created for debug args.  */
2687   if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2688     {
2689       vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2690       unsigned int ix;
2691       tree dtemp;
2692 
2693       if (debug_args)
2694 	for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2695 	  {
2696 	    gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2697 	    expand_debug_expr (dtemp);
2698 	  }
2699     }
2700 
2701   rtx_insn *before_call = get_last_insn ();
2702   lhs = gimple_call_lhs (stmt);
2703   if (lhs)
2704     expand_assignment (lhs, exp, false);
2705   else
2706     expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2707 
2708   /* If the gimple call is an indirect call and has 'nocf_check'
2709      attribute find a generated CALL insn to mark it as no
2710      control-flow verification is needed.  */
2711   if (gimple_call_nocf_check_p (stmt)
2712       && !gimple_call_fndecl (stmt))
2713     {
2714       rtx_insn *last = get_last_insn ();
2715       while (!CALL_P (last)
2716 	     && last != before_call)
2717 	last = PREV_INSN (last);
2718 
2719       if (last != before_call)
2720 	add_reg_note (last, REG_CALL_NOCF_CHECK, const0_rtx);
2721     }
2722 
2723   mark_transaction_restart_calls (stmt);
2724 }
2725 
2726 
2727 /* Generate RTL for an asm statement (explicit assembler code).
2728    STRING is a STRING_CST node containing the assembler code text,
2729    or an ADDR_EXPR containing a STRING_CST.  VOL nonzero means the
2730    insn is volatile; don't optimize it.  */
2731 
2732 static void
2733 expand_asm_loc (tree string, int vol, location_t locus)
2734 {
2735   rtx body;
2736 
2737   body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2738 				ggc_strdup (TREE_STRING_POINTER (string)),
2739 				locus);
2740 
2741   MEM_VOLATILE_P (body) = vol;
2742 
2743   /* Non-empty basic ASM implicitly clobbers memory.  */
2744   if (TREE_STRING_LENGTH (string) != 0)
2745     {
2746       rtx asm_op, clob;
2747       unsigned i, nclobbers;
2748       auto_vec<rtx> input_rvec, output_rvec;
2749       auto_vec<const char *> constraints;
2750       auto_vec<rtx> clobber_rvec;
2751       HARD_REG_SET clobbered_regs;
2752       CLEAR_HARD_REG_SET (clobbered_regs);
2753 
2754       clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2755       clobber_rvec.safe_push (clob);
2756 
2757       if (targetm.md_asm_adjust)
2758 	targetm.md_asm_adjust (output_rvec, input_rvec,
2759 			       constraints, clobber_rvec,
2760 			       clobbered_regs);
2761 
2762       asm_op = body;
2763       nclobbers = clobber_rvec.length ();
2764       body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
2765 
2766       XVECEXP (body, 0, 0) = asm_op;
2767       for (i = 0; i < nclobbers; i++)
2768 	XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
2769     }
2770 
2771   emit_insn (body);
2772 }
2773 
2774 /* Return the number of times character C occurs in string S.  */
2775 static int
2776 n_occurrences (int c, const char *s)
2777 {
2778   int n = 0;
2779   while (*s)
2780     n += (*s++ == c);
2781   return n;
2782 }
2783 
2784 /* A subroutine of expand_asm_operands.  Check that all operands have
2785    the same number of alternatives.  Return true if so.  */
2786 
2787 static bool
2788 check_operand_nalternatives (const vec<const char *> &constraints)
2789 {
2790   unsigned len = constraints.length();
2791   if (len > 0)
2792     {
2793       int nalternatives = n_occurrences (',', constraints[0]);
2794 
2795       if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2796 	{
2797 	  error ("too many alternatives in %<asm%>");
2798 	  return false;
2799 	}
2800 
2801       for (unsigned i = 1; i < len; ++i)
2802 	if (n_occurrences (',', constraints[i]) != nalternatives)
2803 	  {
2804 	    error ("operand constraints for %<asm%> differ "
2805 		   "in number of alternatives");
2806 	    return false;
2807 	  }
2808     }
2809   return true;
2810 }
2811 
2812 /* Check for overlap between registers marked in CLOBBERED_REGS and
2813    anything inappropriate in T.  Emit error and return the register
2814    variable definition for error, NULL_TREE for ok.  */
2815 
2816 static bool
2817 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2818 {
2819   /* Conflicts between asm-declared register variables and the clobber
2820      list are not allowed.  */
2821   tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2822 
2823   if (overlap)
2824     {
2825       error ("asm-specifier for variable %qE conflicts with asm clobber list",
2826 	     DECL_NAME (overlap));
2827 
2828       /* Reset registerness to stop multiple errors emitted for a single
2829 	 variable.  */
2830       DECL_REGISTER (overlap) = 0;
2831       return true;
2832     }
2833 
2834   return false;
2835 }
2836 
2837 /* Generate RTL for an asm statement with arguments.
2838    STRING is the instruction template.
2839    OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2840    Each output or input has an expression in the TREE_VALUE and
2841    a tree list in TREE_PURPOSE which in turn contains a constraint
2842    name in TREE_VALUE (or NULL_TREE) and a constraint string
2843    in TREE_PURPOSE.
2844    CLOBBERS is a list of STRING_CST nodes each naming a hard register
2845    that is clobbered by this insn.
2846 
2847    LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2848    should be the fallthru basic block of the asm goto.
2849 
2850    Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2851    Some elements of OUTPUTS may be replaced with trees representing temporary
2852    values.  The caller should copy those temporary values to the originally
2853    specified lvalues.
2854 
2855    VOL nonzero means the insn is volatile; don't optimize it.  */
2856 
2857 static void
2858 expand_asm_stmt (gasm *stmt)
2859 {
2860   class save_input_location
2861   {
2862     location_t old;
2863 
2864   public:
2865     explicit save_input_location(location_t where)
2866     {
2867       old = input_location;
2868       input_location = where;
2869     }
2870 
2871     ~save_input_location()
2872     {
2873       input_location = old;
2874     }
2875   };
2876 
2877   location_t locus = gimple_location (stmt);
2878 
2879   if (gimple_asm_input_p (stmt))
2880     {
2881       const char *s = gimple_asm_string (stmt);
2882       tree string = build_string (strlen (s), s);
2883       expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2884       return;
2885     }
2886 
2887   /* There are some legacy diagnostics in here, and also avoids a
2888      sixth parameger to targetm.md_asm_adjust.  */
2889   save_input_location s_i_l(locus);
2890 
2891   unsigned noutputs = gimple_asm_noutputs (stmt);
2892   unsigned ninputs = gimple_asm_ninputs (stmt);
2893   unsigned nlabels = gimple_asm_nlabels (stmt);
2894   unsigned i;
2895 
2896   /* ??? Diagnose during gimplification?  */
2897   if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2898     {
2899       error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2900       return;
2901     }
2902 
2903   auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2904   auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2905   auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
2906 
2907   /* Copy the gimple vectors into new vectors that we can manipulate.  */
2908 
2909   output_tvec.safe_grow (noutputs);
2910   input_tvec.safe_grow (ninputs);
2911   constraints.safe_grow (noutputs + ninputs);
2912 
2913   for (i = 0; i < noutputs; ++i)
2914     {
2915       tree t = gimple_asm_output_op (stmt, i);
2916       output_tvec[i] = TREE_VALUE (t);
2917       constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2918     }
2919   for (i = 0; i < ninputs; i++)
2920     {
2921       tree t = gimple_asm_input_op (stmt, i);
2922       input_tvec[i] = TREE_VALUE (t);
2923       constraints[i + noutputs]
2924 	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2925     }
2926 
2927   /* ??? Diagnose during gimplification?  */
2928   if (! check_operand_nalternatives (constraints))
2929     return;
2930 
2931   /* Count the number of meaningful clobbered registers, ignoring what
2932      we would ignore later.  */
2933   auto_vec<rtx> clobber_rvec;
2934   HARD_REG_SET clobbered_regs;
2935   CLEAR_HARD_REG_SET (clobbered_regs);
2936 
2937   if (unsigned n = gimple_asm_nclobbers (stmt))
2938     {
2939       clobber_rvec.reserve (n);
2940       for (i = 0; i < n; i++)
2941 	{
2942 	  tree t = gimple_asm_clobber_op (stmt, i);
2943           const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2944 	  int nregs, j;
2945 
2946 	  j = decode_reg_name_and_count (regname, &nregs);
2947 	  if (j < 0)
2948 	    {
2949 	      if (j == -2)
2950 		{
2951 		  /* ??? Diagnose during gimplification?  */
2952 		  error ("unknown register name %qs in %<asm%>", regname);
2953 		}
2954 	      else if (j == -4)
2955 		{
2956 		  rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2957 		  clobber_rvec.safe_push (x);
2958 		}
2959 	      else
2960 		{
2961 		  /* Otherwise we should have -1 == empty string
2962 		     or -3 == cc, which is not a register.  */
2963 		  gcc_assert (j == -1 || j == -3);
2964 		}
2965 	    }
2966 	  else
2967 	    for (int reg = j; reg < j + nregs; reg++)
2968 	      {
2969 		/* Clobbering the PIC register is an error.  */
2970 		if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2971 		  {
2972 		    /* ??? Diagnose during gimplification?  */
2973 		    error ("PIC register clobbered by %qs in %<asm%>",
2974 			   regname);
2975 		    return;
2976 		  }
2977 
2978 	        SET_HARD_REG_BIT (clobbered_regs, reg);
2979 	        rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
2980 		clobber_rvec.safe_push (x);
2981 	      }
2982 	}
2983     }
2984   unsigned nclobbers = clobber_rvec.length();
2985 
2986   /* First pass over inputs and outputs checks validity and sets
2987      mark_addressable if needed.  */
2988   /* ??? Diagnose during gimplification?  */
2989 
2990   for (i = 0; i < noutputs; ++i)
2991     {
2992       tree val = output_tvec[i];
2993       tree type = TREE_TYPE (val);
2994       const char *constraint;
2995       bool is_inout;
2996       bool allows_reg;
2997       bool allows_mem;
2998 
2999       /* Try to parse the output constraint.  If that fails, there's
3000 	 no point in going further.  */
3001       constraint = constraints[i];
3002       if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
3003 				    &allows_mem, &allows_reg, &is_inout))
3004 	return;
3005 
3006       if (! allows_reg
3007 	  && (allows_mem
3008 	      || is_inout
3009 	      || (DECL_P (val)
3010 		  && REG_P (DECL_RTL (val))
3011 		  && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
3012 	mark_addressable (val);
3013     }
3014 
3015   for (i = 0; i < ninputs; ++i)
3016     {
3017       bool allows_reg, allows_mem;
3018       const char *constraint;
3019 
3020       constraint = constraints[i + noutputs];
3021       if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3022 				    constraints.address (),
3023 				    &allows_mem, &allows_reg))
3024 	return;
3025 
3026       if (! allows_reg && allows_mem)
3027 	mark_addressable (input_tvec[i]);
3028     }
3029 
3030   /* Second pass evaluates arguments.  */
3031 
3032   /* Make sure stack is consistent for asm goto.  */
3033   if (nlabels > 0)
3034     do_pending_stack_adjust ();
3035   int old_generating_concat_p = generating_concat_p;
3036 
3037   /* Vector of RTX's of evaluated output operands.  */
3038   auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
3039   auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
3040   rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
3041 
3042   output_rvec.safe_grow (noutputs);
3043 
3044   for (i = 0; i < noutputs; ++i)
3045     {
3046       tree val = output_tvec[i];
3047       tree type = TREE_TYPE (val);
3048       bool is_inout, allows_reg, allows_mem, ok;
3049       rtx op;
3050 
3051       ok = parse_output_constraint (&constraints[i], i, ninputs,
3052 				    noutputs, &allows_mem, &allows_reg,
3053 				    &is_inout);
3054       gcc_assert (ok);
3055 
3056       /* If an output operand is not a decl or indirect ref and our constraint
3057 	 allows a register, make a temporary to act as an intermediate.
3058 	 Make the asm insn write into that, then we will copy it to
3059 	 the real output operand.  Likewise for promoted variables.  */
3060 
3061       generating_concat_p = 0;
3062 
3063       if ((TREE_CODE (val) == INDIRECT_REF && allows_mem)
3064 	  || (DECL_P (val)
3065 	      && (allows_mem || REG_P (DECL_RTL (val)))
3066 	      && ! (REG_P (DECL_RTL (val))
3067 		    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3068 	  || ! allows_reg
3069 	  || is_inout
3070 	  || TREE_ADDRESSABLE (type))
3071 	{
3072 	  op = expand_expr (val, NULL_RTX, VOIDmode,
3073 			    !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3074 	  if (MEM_P (op))
3075 	    op = validize_mem (op);
3076 
3077 	  if (! allows_reg && !MEM_P (op))
3078 	    error ("output number %d not directly addressable", i);
3079 	  if ((! allows_mem && MEM_P (op) && GET_MODE (op) != BLKmode)
3080 	      || GET_CODE (op) == CONCAT)
3081 	    {
3082 	      rtx old_op = op;
3083 	      op = gen_reg_rtx (GET_MODE (op));
3084 
3085 	      generating_concat_p = old_generating_concat_p;
3086 
3087 	      if (is_inout)
3088 		emit_move_insn (op, old_op);
3089 
3090 	      push_to_sequence2 (after_rtl_seq, after_rtl_end);
3091 	      emit_move_insn (old_op, op);
3092 	      after_rtl_seq = get_insns ();
3093 	      after_rtl_end = get_last_insn ();
3094 	      end_sequence ();
3095 	    }
3096 	}
3097       else
3098 	{
3099 	  op = assign_temp (type, 0, 1);
3100 	  op = validize_mem (op);
3101 	  if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3102 	    set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
3103 
3104 	  generating_concat_p = old_generating_concat_p;
3105 
3106 	  push_to_sequence2 (after_rtl_seq, after_rtl_end);
3107 	  expand_assignment (val, make_tree (type, op), false);
3108 	  after_rtl_seq = get_insns ();
3109 	  after_rtl_end = get_last_insn ();
3110 	  end_sequence ();
3111 	}
3112       output_rvec[i] = op;
3113 
3114       if (is_inout)
3115 	inout_opnum.safe_push (i);
3116     }
3117 
3118   auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3119   auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
3120 
3121   input_rvec.safe_grow (ninputs);
3122   input_mode.safe_grow (ninputs);
3123 
3124   generating_concat_p = 0;
3125 
3126   for (i = 0; i < ninputs; ++i)
3127     {
3128       tree val = input_tvec[i];
3129       tree type = TREE_TYPE (val);
3130       bool allows_reg, allows_mem, ok;
3131       const char *constraint;
3132       rtx op;
3133 
3134       constraint = constraints[i + noutputs];
3135       ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3136 				   constraints.address (),
3137 				   &allows_mem, &allows_reg);
3138       gcc_assert (ok);
3139 
3140       /* EXPAND_INITIALIZER will not generate code for valid initializer
3141 	 constants, but will still generate code for other types of operand.
3142 	 This is the behavior we want for constant constraints.  */
3143       op = expand_expr (val, NULL_RTX, VOIDmode,
3144 			allows_reg ? EXPAND_NORMAL
3145 			: allows_mem ? EXPAND_MEMORY
3146 			: EXPAND_INITIALIZER);
3147 
3148       /* Never pass a CONCAT to an ASM.  */
3149       if (GET_CODE (op) == CONCAT)
3150 	op = force_reg (GET_MODE (op), op);
3151       else if (MEM_P (op))
3152 	op = validize_mem (op);
3153 
3154       if (asm_operand_ok (op, constraint, NULL) <= 0)
3155 	{
3156 	  if (allows_reg && TYPE_MODE (type) != BLKmode)
3157 	    op = force_reg (TYPE_MODE (type), op);
3158 	  else if (!allows_mem)
3159 	    warning (0, "asm operand %d probably doesn%'t match constraints",
3160 		     i + noutputs);
3161 	  else if (MEM_P (op))
3162 	    {
3163 	      /* We won't recognize either volatile memory or memory
3164 		 with a queued address as available a memory_operand
3165 		 at this point.  Ignore it: clearly this *is* a memory.  */
3166 	    }
3167 	  else
3168 	    gcc_unreachable ();
3169 	}
3170       input_rvec[i] = op;
3171       input_mode[i] = TYPE_MODE (type);
3172     }
3173 
3174   /* For in-out operands, copy output rtx to input rtx.  */
3175   unsigned ninout = inout_opnum.length();
3176   for (i = 0; i < ninout; i++)
3177     {
3178       int j = inout_opnum[i];
3179       rtx o = output_rvec[j];
3180 
3181       input_rvec.safe_push (o);
3182       input_mode.safe_push (GET_MODE (o));
3183 
3184       char buffer[16];
3185       sprintf (buffer, "%d", j);
3186       constraints.safe_push (ggc_strdup (buffer));
3187     }
3188   ninputs += ninout;
3189 
3190   /* Sometimes we wish to automatically clobber registers across an asm.
3191      Case in point is when the i386 backend moved from cc0 to a hard reg --
3192      maintaining source-level compatibility means automatically clobbering
3193      the flags register.  */
3194   rtx_insn *after_md_seq = NULL;
3195   if (targetm.md_asm_adjust)
3196     after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3197 					  constraints, clobber_rvec,
3198 					  clobbered_regs);
3199 
3200   /* Do not allow the hook to change the output and input count,
3201      lest it mess up the operand numbering.  */
3202   gcc_assert (output_rvec.length() == noutputs);
3203   gcc_assert (input_rvec.length() == ninputs);
3204   gcc_assert (constraints.length() == noutputs + ninputs);
3205 
3206   /* But it certainly can adjust the clobbers.  */
3207   nclobbers = clobber_rvec.length();
3208 
3209   /* Third pass checks for easy conflicts.  */
3210   /* ??? Why are we doing this on trees instead of rtx.  */
3211 
3212   bool clobber_conflict_found = 0;
3213   for (i = 0; i < noutputs; ++i)
3214     if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3215 	clobber_conflict_found = 1;
3216   for (i = 0; i < ninputs - ninout; ++i)
3217     if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3218 	clobber_conflict_found = 1;
3219 
3220   /* Make vectors for the expression-rtx, constraint strings,
3221      and named operands.  */
3222 
3223   rtvec argvec = rtvec_alloc (ninputs);
3224   rtvec constraintvec = rtvec_alloc (ninputs);
3225   rtvec labelvec = rtvec_alloc (nlabels);
3226 
3227   rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3228 				    : GET_MODE (output_rvec[0])),
3229 				   ggc_strdup (gimple_asm_string (stmt)),
3230 				   "", 0, argvec, constraintvec,
3231 				   labelvec, locus);
3232   MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3233 
3234   for (i = 0; i < ninputs; ++i)
3235     {
3236       ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3237       ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3238 	= gen_rtx_ASM_INPUT_loc (input_mode[i],
3239 				 constraints[i + noutputs],
3240 				 locus);
3241     }
3242 
3243   /* Copy labels to the vector.  */
3244   rtx_code_label *fallthru_label = NULL;
3245   if (nlabels > 0)
3246     {
3247       basic_block fallthru_bb = NULL;
3248       edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3249       if (fallthru)
3250 	fallthru_bb = fallthru->dest;
3251 
3252       for (i = 0; i < nlabels; ++i)
3253 	{
3254 	  tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
3255 	  rtx_insn *r;
3256 	  /* If asm goto has any labels in the fallthru basic block, use
3257 	     a label that we emit immediately after the asm goto.  Expansion
3258 	     may insert further instructions into the same basic block after
3259 	     asm goto and if we don't do this, insertion of instructions on
3260 	     the fallthru edge might misbehave.  See PR58670.  */
3261 	  if (fallthru_bb && label_to_block_fn (cfun, label) == fallthru_bb)
3262 	    {
3263 	      if (fallthru_label == NULL_RTX)
3264 	        fallthru_label = gen_label_rtx ();
3265 	      r = fallthru_label;
3266 	    }
3267 	  else
3268 	    r = label_rtx (label);
3269 	  ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
3270 	}
3271     }
3272 
3273   /* Now, for each output, construct an rtx
3274      (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3275 			       ARGVEC CONSTRAINTS OPNAMES))
3276      If there is more than one, put them inside a PARALLEL.  */
3277 
3278   if (nlabels > 0 && nclobbers == 0)
3279     {
3280       gcc_assert (noutputs == 0);
3281       emit_jump_insn (body);
3282     }
3283   else if (noutputs == 0 && nclobbers == 0)
3284     {
3285       /* No output operands: put in a raw ASM_OPERANDS rtx.  */
3286       emit_insn (body);
3287     }
3288   else if (noutputs == 1 && nclobbers == 0)
3289     {
3290       ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3291       emit_insn (gen_rtx_SET (output_rvec[0], body));
3292     }
3293   else
3294     {
3295       rtx obody = body;
3296       int num = noutputs;
3297 
3298       if (num == 0)
3299 	num = 1;
3300 
3301       body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3302 
3303       /* For each output operand, store a SET.  */
3304       for (i = 0; i < noutputs; ++i)
3305 	{
3306 	  rtx src, o = output_rvec[i];
3307 	  if (i == 0)
3308 	    {
3309 	      ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3310 	      src = obody;
3311 	    }
3312 	  else
3313 	    {
3314 	      src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3315 					  ASM_OPERANDS_TEMPLATE (obody),
3316 					  constraints[i], i, argvec,
3317 					  constraintvec, labelvec, locus);
3318 	      MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3319 	    }
3320 	  XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
3321 	}
3322 
3323       /* If there are no outputs (but there are some clobbers)
3324 	 store the bare ASM_OPERANDS into the PARALLEL.  */
3325       if (i == 0)
3326 	XVECEXP (body, 0, i++) = obody;
3327 
3328       /* Store (clobber REG) for each clobbered register specified.  */
3329       for (unsigned j = 0; j < nclobbers; ++j)
3330 	{
3331 	  rtx clobbered_reg = clobber_rvec[j];
3332 
3333 	  /* Do sanity check for overlap between clobbers and respectively
3334 	     input and outputs that hasn't been handled.  Such overlap
3335 	     should have been detected and reported above.  */
3336 	  if (!clobber_conflict_found && REG_P (clobbered_reg))
3337 	    {
3338 	      /* We test the old body (obody) contents to avoid
3339 		 tripping over the under-construction body.  */
3340 	      for (unsigned k = 0; k < noutputs; ++k)
3341 		if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3342 		  internal_error ("asm clobber conflict with output operand");
3343 
3344 	      for (unsigned k = 0; k < ninputs - ninout; ++k)
3345 		if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3346 		  internal_error ("asm clobber conflict with input operand");
3347 	    }
3348 
3349 	  XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
3350 	}
3351 
3352       if (nlabels > 0)
3353 	emit_jump_insn (body);
3354       else
3355 	emit_insn (body);
3356     }
3357 
3358   generating_concat_p = old_generating_concat_p;
3359 
3360   if (fallthru_label)
3361     emit_label (fallthru_label);
3362 
3363   if (after_md_seq)
3364     emit_insn (after_md_seq);
3365   if (after_rtl_seq)
3366     emit_insn (after_rtl_seq);
3367 
3368   free_temp_slots ();
3369   crtl->has_asm_statement = 1;
3370 }
3371 
3372 /* Emit code to jump to the address
3373    specified by the pointer expression EXP.  */
3374 
3375 static void
3376 expand_computed_goto (tree exp)
3377 {
3378   rtx x = expand_normal (exp);
3379 
3380   do_pending_stack_adjust ();
3381   emit_indirect_jump (x);
3382 }
3383 
3384 /* Generate RTL code for a `goto' statement with target label LABEL.
3385    LABEL should be a LABEL_DECL tree node that was or will later be
3386    defined with `expand_label'.  */
3387 
3388 static void
3389 expand_goto (tree label)
3390 {
3391   if (flag_checking)
3392     {
3393       /* Check for a nonlocal goto to a containing function.  Should have
3394 	 gotten translated to __builtin_nonlocal_goto.  */
3395       tree context = decl_function_context (label);
3396       gcc_assert (!context || context == current_function_decl);
3397     }
3398 
3399   emit_jump (jump_target_rtx (label));
3400 }
3401 
3402 /* Output a return with no value.  */
3403 
3404 static void
3405 expand_null_return_1 (void)
3406 {
3407   clear_pending_stack_adjust ();
3408   do_pending_stack_adjust ();
3409   emit_jump (return_label);
3410 }
3411 
3412 /* Generate RTL to return from the current function, with no value.
3413    (That is, we do not do anything about returning any value.)  */
3414 
3415 void
3416 expand_null_return (void)
3417 {
3418   /* If this function was declared to return a value, but we
3419      didn't, clobber the return registers so that they are not
3420      propagated live to the rest of the function.  */
3421   clobber_return_register ();
3422 
3423   expand_null_return_1 ();
3424 }
3425 
3426 /* Generate RTL to return from the current function, with value VAL.  */
3427 
3428 static void
3429 expand_value_return (rtx val)
3430 {
3431   /* Copy the value to the return location unless it's already there.  */
3432 
3433   tree decl = DECL_RESULT (current_function_decl);
3434   rtx return_reg = DECL_RTL (decl);
3435   if (return_reg != val)
3436     {
3437       tree funtype = TREE_TYPE (current_function_decl);
3438       tree type = TREE_TYPE (decl);
3439       int unsignedp = TYPE_UNSIGNED (type);
3440       machine_mode old_mode = DECL_MODE (decl);
3441       machine_mode mode;
3442       if (DECL_BY_REFERENCE (decl))
3443         mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3444       else
3445         mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3446 
3447       if (mode != old_mode)
3448 	val = convert_modes (mode, old_mode, val, unsignedp);
3449 
3450       if (GET_CODE (return_reg) == PARALLEL)
3451 	emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3452       else
3453 	emit_move_insn (return_reg, val);
3454     }
3455 
3456   expand_null_return_1 ();
3457 }
3458 
3459 /* Generate RTL to evaluate the expression RETVAL and return it
3460    from the current function.  */
3461 
3462 static void
3463 expand_return (tree retval, tree bounds)
3464 {
3465   rtx result_rtl;
3466   rtx val = 0;
3467   tree retval_rhs;
3468   rtx bounds_rtl;
3469 
3470   /* If function wants no value, give it none.  */
3471   if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3472     {
3473       expand_normal (retval);
3474       expand_null_return ();
3475       return;
3476     }
3477 
3478   if (retval == error_mark_node)
3479     {
3480       /* Treat this like a return of no value from a function that
3481 	 returns a value.  */
3482       expand_null_return ();
3483       return;
3484     }
3485   else if ((TREE_CODE (retval) == MODIFY_EXPR
3486 	    || TREE_CODE (retval) == INIT_EXPR)
3487 	   && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3488     retval_rhs = TREE_OPERAND (retval, 1);
3489   else
3490     retval_rhs = retval;
3491 
3492   result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3493 
3494   /* Put returned bounds to the right place.  */
3495   bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3496   if (bounds_rtl)
3497     {
3498       rtx addr = NULL;
3499       rtx bnd = NULL;
3500 
3501       if (bounds && bounds != error_mark_node)
3502 	{
3503 	  bnd = expand_normal (bounds);
3504 	  targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3505 	}
3506       else if (REG_P (bounds_rtl))
3507 	{
3508 	  if (bounds)
3509 	    bnd = chkp_expand_zero_bounds ();
3510 	  else
3511 	    {
3512 	      addr = expand_normal (build_fold_addr_expr (retval_rhs));
3513 	      addr = gen_rtx_MEM (Pmode, addr);
3514 	      bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3515 	    }
3516 
3517 	  targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3518 	}
3519       else
3520 	{
3521 	  int n;
3522 
3523 	  gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3524 
3525 	  if (bounds)
3526 	    bnd = chkp_expand_zero_bounds ();
3527 	  else
3528 	    {
3529 	      addr = expand_normal (build_fold_addr_expr (retval_rhs));
3530 	      addr = gen_rtx_MEM (Pmode, addr);
3531 	    }
3532 
3533 	  for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3534 	    {
3535 	      rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
3536 	      if (!bounds)
3537 		{
3538 		  rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3539 		  rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3540 		  bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3541 		}
3542 	      targetm.calls.store_returned_bounds (slot, bnd);
3543 	    }
3544 	}
3545     }
3546   else if (chkp_function_instrumented_p (current_function_decl)
3547 	   && !BOUNDED_P (retval_rhs)
3548 	   && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3549 	   && TREE_CODE (retval_rhs) != RESULT_DECL)
3550     {
3551       rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3552       addr = gen_rtx_MEM (Pmode, addr);
3553 
3554       gcc_assert (MEM_P (result_rtl));
3555 
3556       chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3557     }
3558 
3559   /* If we are returning the RESULT_DECL, then the value has already
3560      been stored into it, so we don't have to do anything special.  */
3561   if (TREE_CODE (retval_rhs) == RESULT_DECL)
3562     expand_value_return (result_rtl);
3563 
3564   /* If the result is an aggregate that is being returned in one (or more)
3565      registers, load the registers here.  */
3566 
3567   else if (retval_rhs != 0
3568 	   && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3569 	   && REG_P (result_rtl))
3570     {
3571       val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3572       if (val)
3573 	{
3574 	  /* Use the mode of the result value on the return register.  */
3575 	  PUT_MODE (result_rtl, GET_MODE (val));
3576 	  expand_value_return (val);
3577 	}
3578       else
3579 	expand_null_return ();
3580     }
3581   else if (retval_rhs != 0
3582 	   && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3583 	   && (REG_P (result_rtl)
3584 	       || (GET_CODE (result_rtl) == PARALLEL)))
3585     {
3586       /* Compute the return value into a temporary (usually a pseudo reg).  */
3587       val
3588 	= assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3589       val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3590       val = force_not_mem (val);
3591       expand_value_return (val);
3592     }
3593   else
3594     {
3595       /* No hard reg used; calculate value into hard return reg.  */
3596       expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3597       expand_value_return (result_rtl);
3598     }
3599 }
3600 
3601 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3602    STMT that doesn't require special handling for outgoing edges.  That
3603    is no tailcalls and no GIMPLE_COND.  */
3604 
3605 static void
3606 expand_gimple_stmt_1 (gimple *stmt)
3607 {
3608   tree op0;
3609 
3610   set_curr_insn_location (gimple_location (stmt));
3611 
3612   switch (gimple_code (stmt))
3613     {
3614     case GIMPLE_GOTO:
3615       op0 = gimple_goto_dest (stmt);
3616       if (TREE_CODE (op0) == LABEL_DECL)
3617 	expand_goto (op0);
3618       else
3619 	expand_computed_goto (op0);
3620       break;
3621     case GIMPLE_LABEL:
3622       expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3623       break;
3624     case GIMPLE_NOP:
3625     case GIMPLE_PREDICT:
3626       break;
3627     case GIMPLE_SWITCH:
3628       {
3629 	gswitch *swtch = as_a <gswitch *> (stmt);
3630 	if (gimple_switch_num_labels (swtch) == 1)
3631 	  expand_goto (CASE_LABEL (gimple_switch_default_label (swtch)));
3632 	else
3633 	  expand_case (swtch);
3634       }
3635       break;
3636     case GIMPLE_ASM:
3637       expand_asm_stmt (as_a <gasm *> (stmt));
3638       break;
3639     case GIMPLE_CALL:
3640       expand_call_stmt (as_a <gcall *> (stmt));
3641       break;
3642 
3643     case GIMPLE_RETURN:
3644       {
3645 	tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt));
3646 	op0 = gimple_return_retval (as_a <greturn *> (stmt));
3647 
3648 	if (op0 && op0 != error_mark_node)
3649 	  {
3650 	    tree result = DECL_RESULT (current_function_decl);
3651 
3652 	    /* Mark we have return statement with missing bounds.  */
3653 	    if (!bnd
3654 		&& chkp_function_instrumented_p (cfun->decl)
3655 		&& !DECL_P (op0))
3656 	      bnd = error_mark_node;
3657 
3658 	    /* If we are not returning the current function's RESULT_DECL,
3659 	       build an assignment to it.  */
3660 	    if (op0 != result)
3661 	      {
3662 		/* I believe that a function's RESULT_DECL is unique.  */
3663 		gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3664 
3665 		/* ??? We'd like to use simply expand_assignment here,
3666 		   but this fails if the value is of BLKmode but the return
3667 		   decl is a register.  expand_return has special handling
3668 		   for this combination, which eventually should move
3669 		   to common code.  See comments there.  Until then, let's
3670 		   build a modify expression :-/  */
3671 		op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3672 			      result, op0);
3673 	      }
3674 	  }
3675 
3676 	if (!op0)
3677 	  expand_null_return ();
3678 	else
3679 	  expand_return (op0, bnd);
3680       }
3681       break;
3682 
3683     case GIMPLE_ASSIGN:
3684       {
3685 	gassign *assign_stmt = as_a <gassign *> (stmt);
3686 	tree lhs = gimple_assign_lhs (assign_stmt);
3687 
3688 	/* Tree expand used to fiddle with |= and &= of two bitfield
3689 	   COMPONENT_REFs here.  This can't happen with gimple, the LHS
3690 	   of binary assigns must be a gimple reg.  */
3691 
3692 	if (TREE_CODE (lhs) != SSA_NAME
3693 	    || get_gimple_rhs_class (gimple_expr_code (stmt))
3694 	       == GIMPLE_SINGLE_RHS)
3695 	  {
3696 	    tree rhs = gimple_assign_rhs1 (assign_stmt);
3697 	    gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3698 			== GIMPLE_SINGLE_RHS);
3699 	    if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3700 		/* Do not put locations on possibly shared trees.  */
3701 		&& !is_gimple_min_invariant (rhs))
3702 	      SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3703 	    if (TREE_CLOBBER_P (rhs))
3704 	      /* This is a clobber to mark the going out of scope for
3705 		 this LHS.  */
3706 	      ;
3707 	    else
3708 	      expand_assignment (lhs, rhs,
3709 				 gimple_assign_nontemporal_move_p (
3710 				   assign_stmt));
3711 	  }
3712 	else
3713 	  {
3714 	    rtx target, temp;
3715 	    bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3716 	    struct separate_ops ops;
3717 	    bool promoted = false;
3718 
3719 	    target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3720 	    if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3721 	      promoted = true;
3722 
3723 	    ops.code = gimple_assign_rhs_code (assign_stmt);
3724 	    ops.type = TREE_TYPE (lhs);
3725 	    switch (get_gimple_rhs_class (ops.code))
3726 	      {
3727 		case GIMPLE_TERNARY_RHS:
3728 		  ops.op2 = gimple_assign_rhs3 (assign_stmt);
3729 		  /* Fallthru */
3730 		case GIMPLE_BINARY_RHS:
3731 		  ops.op1 = gimple_assign_rhs2 (assign_stmt);
3732 		  /* Fallthru */
3733 		case GIMPLE_UNARY_RHS:
3734 		  ops.op0 = gimple_assign_rhs1 (assign_stmt);
3735 		  break;
3736 		default:
3737 		  gcc_unreachable ();
3738 	      }
3739 	    ops.location = gimple_location (stmt);
3740 
3741 	    /* If we want to use a nontemporal store, force the value to
3742 	       register first.  If we store into a promoted register,
3743 	       don't directly expand to target.  */
3744 	    temp = nontemporal || promoted ? NULL_RTX : target;
3745 	    temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3746 				       EXPAND_NORMAL);
3747 
3748 	    if (temp == target)
3749 	      ;
3750 	    else if (promoted)
3751 	      {
3752 		int unsignedp = SUBREG_PROMOTED_SIGN (target);
3753 		/* If TEMP is a VOIDmode constant, use convert_modes to make
3754 		   sure that we properly convert it.  */
3755 		if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3756 		  {
3757 		    temp = convert_modes (GET_MODE (target),
3758 					  TYPE_MODE (ops.type),
3759 					  temp, unsignedp);
3760 		    temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3761 					  GET_MODE (target), temp, unsignedp);
3762 		  }
3763 
3764 		convert_move (SUBREG_REG (target), temp, unsignedp);
3765 	      }
3766 	    else if (nontemporal && emit_storent_insn (target, temp))
3767 	      ;
3768 	    else
3769 	      {
3770 		temp = force_operand (temp, target);
3771 		if (temp != target)
3772 		  emit_move_insn (target, temp);
3773 	      }
3774 	  }
3775       }
3776       break;
3777 
3778     default:
3779       gcc_unreachable ();
3780     }
3781 }
3782 
3783 /* Expand one gimple statement STMT and return the last RTL instruction
3784    before any of the newly generated ones.
3785 
3786    In addition to generating the necessary RTL instructions this also
3787    sets REG_EH_REGION notes if necessary and sets the current source
3788    location for diagnostics.  */
3789 
3790 static rtx_insn *
3791 expand_gimple_stmt (gimple *stmt)
3792 {
3793   location_t saved_location = input_location;
3794   rtx_insn *last = get_last_insn ();
3795   int lp_nr;
3796 
3797   gcc_assert (cfun);
3798 
3799   /* We need to save and restore the current source location so that errors
3800      discovered during expansion are emitted with the right location.  But
3801      it would be better if the diagnostic routines used the source location
3802      embedded in the tree nodes rather than globals.  */
3803   if (gimple_has_location (stmt))
3804     input_location = gimple_location (stmt);
3805 
3806   expand_gimple_stmt_1 (stmt);
3807 
3808   /* Free any temporaries used to evaluate this statement.  */
3809   free_temp_slots ();
3810 
3811   input_location = saved_location;
3812 
3813   /* Mark all insns that may trap.  */
3814   lp_nr = lookup_stmt_eh_lp (stmt);
3815   if (lp_nr)
3816     {
3817       rtx_insn *insn;
3818       for (insn = next_real_insn (last); insn;
3819 	   insn = next_real_insn (insn))
3820 	{
3821 	  if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3822 	      /* If we want exceptions for non-call insns, any
3823 		 may_trap_p instruction may throw.  */
3824 	      && GET_CODE (PATTERN (insn)) != CLOBBER
3825 	      && GET_CODE (PATTERN (insn)) != USE
3826 	      && insn_could_throw_p (insn))
3827 	    make_reg_eh_region_note (insn, 0, lp_nr);
3828 	}
3829     }
3830 
3831   return last;
3832 }
3833 
3834 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_CALL
3835    that has CALL_EXPR_TAILCALL set.  Returns non-null if we actually
3836    generated a tail call (something that might be denied by the ABI
3837    rules governing the call; see calls.c).
3838 
3839    Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3840    can still reach the rest of BB.  The case here is __builtin_sqrt,
3841    where the NaN result goes through the external function (with a
3842    tailcall) and the normal result happens via a sqrt instruction.  */
3843 
3844 static basic_block
3845 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3846 {
3847   rtx_insn *last2, *last;
3848   edge e;
3849   edge_iterator ei;
3850   profile_probability probability;
3851 
3852   last2 = last = expand_gimple_stmt (stmt);
3853 
3854   for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3855     if (CALL_P (last) && SIBLING_CALL_P (last))
3856       goto found;
3857 
3858   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3859 
3860   *can_fallthru = true;
3861   return NULL;
3862 
3863  found:
3864   /* ??? Wouldn't it be better to just reset any pending stack adjust?
3865      Any instructions emitted here are about to be deleted.  */
3866   do_pending_stack_adjust ();
3867 
3868   /* Remove any non-eh, non-abnormal edges that don't go to exit.  */
3869   /* ??? I.e. the fallthrough edge.  HOWEVER!  If there were to be
3870      EH or abnormal edges, we shouldn't have created a tail call in
3871      the first place.  So it seems to me we should just be removing
3872      all edges here, or redirecting the existing fallthru edge to
3873      the exit block.  */
3874 
3875   probability = profile_probability::never ();
3876 
3877   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3878     {
3879       if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3880 	{
3881 	  if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3882 	    e->dest->count -= e->count ();
3883 	  probability += e->probability;
3884 	  remove_edge (e);
3885 	}
3886       else
3887 	ei_next (&ei);
3888     }
3889 
3890   /* This is somewhat ugly: the call_expr expander often emits instructions
3891      after the sibcall (to perform the function return).  These confuse the
3892      find_many_sub_basic_blocks code, so we need to get rid of these.  */
3893   last = NEXT_INSN (last);
3894   gcc_assert (BARRIER_P (last));
3895 
3896   *can_fallthru = false;
3897   while (NEXT_INSN (last))
3898     {
3899       /* For instance an sqrt builtin expander expands if with
3900 	 sibcall in the then and label for `else`.  */
3901       if (LABEL_P (NEXT_INSN (last)))
3902 	{
3903 	  *can_fallthru = true;
3904 	  break;
3905 	}
3906       delete_insn (NEXT_INSN (last));
3907     }
3908 
3909   e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3910 		 | EDGE_SIBCALL);
3911   e->probability = probability;
3912   BB_END (bb) = last;
3913   update_bb_for_insn (bb);
3914 
3915   if (NEXT_INSN (last))
3916     {
3917       bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3918 
3919       last = BB_END (bb);
3920       if (BARRIER_P (last))
3921 	BB_END (bb) = PREV_INSN (last);
3922     }
3923 
3924   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3925 
3926   return bb;
3927 }
3928 
3929 /* Return the difference between the floor and the truncated result of
3930    a signed division by OP1 with remainder MOD.  */
3931 static rtx
3932 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3933 {
3934   /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3935   return gen_rtx_IF_THEN_ELSE
3936     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3937      gen_rtx_IF_THEN_ELSE
3938      (mode, gen_rtx_LT (BImode,
3939 			gen_rtx_DIV (mode, op1, mod),
3940 			const0_rtx),
3941       constm1_rtx, const0_rtx),
3942      const0_rtx);
3943 }
3944 
3945 /* Return the difference between the ceil and the truncated result of
3946    a signed division by OP1 with remainder MOD.  */
3947 static rtx
3948 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3949 {
3950   /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3951   return gen_rtx_IF_THEN_ELSE
3952     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3953      gen_rtx_IF_THEN_ELSE
3954      (mode, gen_rtx_GT (BImode,
3955 			gen_rtx_DIV (mode, op1, mod),
3956 			const0_rtx),
3957       const1_rtx, const0_rtx),
3958      const0_rtx);
3959 }
3960 
3961 /* Return the difference between the ceil and the truncated result of
3962    an unsigned division by OP1 with remainder MOD.  */
3963 static rtx
3964 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3965 {
3966   /* (mod != 0 ? 1 : 0) */
3967   return gen_rtx_IF_THEN_ELSE
3968     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3969      const1_rtx, const0_rtx);
3970 }
3971 
3972 /* Return the difference between the rounded and the truncated result
3973    of a signed division by OP1 with remainder MOD.  Halfway cases are
3974    rounded away from zero, rather than to the nearest even number.  */
3975 static rtx
3976 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3977 {
3978   /* (abs (mod) >= abs (op1) - abs (mod)
3979       ? (op1 / mod > 0 ? 1 : -1)
3980       : 0) */
3981   return gen_rtx_IF_THEN_ELSE
3982     (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3983 		       gen_rtx_MINUS (mode,
3984 				      gen_rtx_ABS (mode, op1),
3985 				      gen_rtx_ABS (mode, mod))),
3986      gen_rtx_IF_THEN_ELSE
3987      (mode, gen_rtx_GT (BImode,
3988 			gen_rtx_DIV (mode, op1, mod),
3989 			const0_rtx),
3990       const1_rtx, constm1_rtx),
3991      const0_rtx);
3992 }
3993 
3994 /* Return the difference between the rounded and the truncated result
3995    of a unsigned division by OP1 with remainder MOD.  Halfway cases
3996    are rounded away from zero, rather than to the nearest even
3997    number.  */
3998 static rtx
3999 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
4000 {
4001   /* (mod >= op1 - mod ? 1 : 0) */
4002   return gen_rtx_IF_THEN_ELSE
4003     (mode, gen_rtx_GE (BImode, mod,
4004 		       gen_rtx_MINUS (mode, op1, mod)),
4005      const1_rtx, const0_rtx);
4006 }
4007 
4008 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
4009    any rtl.  */
4010 
4011 static rtx
4012 convert_debug_memory_address (scalar_int_mode mode, rtx x,
4013 			      addr_space_t as)
4014 {
4015 #ifndef POINTERS_EXTEND_UNSIGNED
4016   gcc_assert (mode == Pmode
4017 	      || mode == targetm.addr_space.address_mode (as));
4018   gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
4019 #else
4020   rtx temp;
4021 
4022   gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
4023 
4024   if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
4025     return x;
4026 
4027   /* X must have some form of address mode already.  */
4028   scalar_int_mode xmode = as_a <scalar_int_mode> (GET_MODE (x));
4029   if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
4030     x = lowpart_subreg (mode, x, xmode);
4031   else if (POINTERS_EXTEND_UNSIGNED > 0)
4032     x = gen_rtx_ZERO_EXTEND (mode, x);
4033   else if (!POINTERS_EXTEND_UNSIGNED)
4034     x = gen_rtx_SIGN_EXTEND (mode, x);
4035   else
4036     {
4037       switch (GET_CODE (x))
4038 	{
4039 	case SUBREG:
4040 	  if ((SUBREG_PROMOTED_VAR_P (x)
4041 	       || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
4042 	       || (GET_CODE (SUBREG_REG (x)) == PLUS
4043 		   && REG_P (XEXP (SUBREG_REG (x), 0))
4044 		   && REG_POINTER (XEXP (SUBREG_REG (x), 0))
4045 		   && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
4046 	      && GET_MODE (SUBREG_REG (x)) == mode)
4047 	    return SUBREG_REG (x);
4048 	  break;
4049 	case LABEL_REF:
4050 	  temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
4051 	  LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
4052 	  return temp;
4053 	case SYMBOL_REF:
4054 	  temp = shallow_copy_rtx (x);
4055 	  PUT_MODE (temp, mode);
4056 	  return temp;
4057 	case CONST:
4058 	  temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4059 	  if (temp)
4060 	    temp = gen_rtx_CONST (mode, temp);
4061 	  return temp;
4062 	case PLUS:
4063 	case MINUS:
4064 	  if (CONST_INT_P (XEXP (x, 1)))
4065 	    {
4066 	      temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4067 	      if (temp)
4068 		return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4069 	    }
4070 	  break;
4071 	default:
4072 	  break;
4073 	}
4074       /* Don't know how to express ptr_extend as operation in debug info.  */
4075       return NULL;
4076     }
4077 #endif /* POINTERS_EXTEND_UNSIGNED */
4078 
4079   return x;
4080 }
4081 
4082 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4083    by avoid_deep_ter_for_debug.  */
4084 
4085 static hash_map<tree, tree> *deep_ter_debug_map;
4086 
4087 /* Split too deep TER chains for debug stmts using debug temporaries.  */
4088 
4089 static void
4090 avoid_deep_ter_for_debug (gimple *stmt, int depth)
4091 {
4092   use_operand_p use_p;
4093   ssa_op_iter iter;
4094   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4095     {
4096       tree use = USE_FROM_PTR (use_p);
4097       if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4098 	continue;
4099       gimple *g = get_gimple_for_ssa_name (use);
4100       if (g == NULL)
4101 	continue;
4102       if (depth > 6 && !stmt_ends_bb_p (g))
4103 	{
4104 	  if (deep_ter_debug_map == NULL)
4105 	    deep_ter_debug_map = new hash_map<tree, tree>;
4106 
4107 	  tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4108 	  if (vexpr != NULL)
4109 	    continue;
4110 	  vexpr = make_node (DEBUG_EXPR_DECL);
4111 	  gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
4112 	  DECL_ARTIFICIAL (vexpr) = 1;
4113 	  TREE_TYPE (vexpr) = TREE_TYPE (use);
4114 	  SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (use)));
4115 	  gimple_stmt_iterator gsi = gsi_for_stmt (g);
4116 	  gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4117 	  avoid_deep_ter_for_debug (def_temp, 0);
4118 	}
4119       else
4120 	avoid_deep_ter_for_debug (g, depth + 1);
4121     }
4122 }
4123 
4124 /* Return an RTX equivalent to the value of the parameter DECL.  */
4125 
4126 static rtx
4127 expand_debug_parm_decl (tree decl)
4128 {
4129   rtx incoming = DECL_INCOMING_RTL (decl);
4130 
4131   if (incoming
4132       && GET_MODE (incoming) != BLKmode
4133       && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4134 	  || (MEM_P (incoming)
4135 	      && REG_P (XEXP (incoming, 0))
4136 	      && HARD_REGISTER_P (XEXP (incoming, 0)))))
4137     {
4138       rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4139 
4140 #ifdef HAVE_window_save
4141       /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4142 	 If the target machine has an explicit window save instruction, the
4143 	 actual entry value is the corresponding OUTGOING_REGNO instead.  */
4144       if (REG_P (incoming)
4145 	  && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4146 	incoming
4147 	  = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4148 				OUTGOING_REGNO (REGNO (incoming)), 0);
4149       else if (MEM_P (incoming))
4150 	{
4151 	  rtx reg = XEXP (incoming, 0);
4152 	  if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4153 	    {
4154 	      reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4155 	      incoming = replace_equiv_address_nv (incoming, reg);
4156 	    }
4157 	  else
4158 	    incoming = copy_rtx (incoming);
4159 	}
4160 #endif
4161 
4162       ENTRY_VALUE_EXP (rtl) = incoming;
4163       return rtl;
4164     }
4165 
4166   if (incoming
4167       && GET_MODE (incoming) != BLKmode
4168       && !TREE_ADDRESSABLE (decl)
4169       && MEM_P (incoming)
4170       && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4171 	  || (GET_CODE (XEXP (incoming, 0)) == PLUS
4172 	      && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4173 	      && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
4174     return copy_rtx (incoming);
4175 
4176   return NULL_RTX;
4177 }
4178 
4179 /* Return an RTX equivalent to the value of the tree expression EXP.  */
4180 
4181 static rtx
4182 expand_debug_expr (tree exp)
4183 {
4184   rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
4185   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4186   machine_mode inner_mode = VOIDmode;
4187   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4188   addr_space_t as;
4189   scalar_int_mode op0_mode, op1_mode, addr_mode;
4190 
4191   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4192     {
4193     case tcc_expression:
4194       switch (TREE_CODE (exp))
4195 	{
4196 	case COND_EXPR:
4197 	case DOT_PROD_EXPR:
4198 	case SAD_EXPR:
4199 	case WIDEN_MULT_PLUS_EXPR:
4200 	case WIDEN_MULT_MINUS_EXPR:
4201 	case FMA_EXPR:
4202 	  goto ternary;
4203 
4204 	case TRUTH_ANDIF_EXPR:
4205 	case TRUTH_ORIF_EXPR:
4206 	case TRUTH_AND_EXPR:
4207 	case TRUTH_OR_EXPR:
4208 	case TRUTH_XOR_EXPR:
4209 	  goto binary;
4210 
4211 	case TRUTH_NOT_EXPR:
4212 	  goto unary;
4213 
4214 	default:
4215 	  break;
4216 	}
4217       break;
4218 
4219     ternary:
4220       op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4221       if (!op2)
4222 	return NULL_RTX;
4223       /* Fall through.  */
4224 
4225     binary:
4226     case tcc_binary:
4227       if (mode == BLKmode)
4228 	return NULL_RTX;
4229       op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4230       if (!op1)
4231 	return NULL_RTX;
4232       switch (TREE_CODE (exp))
4233 	{
4234 	case LSHIFT_EXPR:
4235 	case RSHIFT_EXPR:
4236 	case LROTATE_EXPR:
4237 	case RROTATE_EXPR:
4238 	case WIDEN_LSHIFT_EXPR:
4239 	  /* Ensure second operand isn't wider than the first one.  */
4240 	  inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4241 	  if (is_a <scalar_int_mode> (inner_mode, &op1_mode)
4242 	      && (GET_MODE_UNIT_PRECISION (mode)
4243 		  < GET_MODE_PRECISION (op1_mode)))
4244 	    op1 = lowpart_subreg (GET_MODE_INNER (mode), op1, op1_mode);
4245 	  break;
4246 	default:
4247 	  break;
4248 	}
4249       /* Fall through.  */
4250 
4251     unary:
4252     case tcc_unary:
4253       if (mode == BLKmode)
4254 	return NULL_RTX;
4255       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4256       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4257       if (!op0)
4258 	return NULL_RTX;
4259       break;
4260 
4261     case tcc_comparison:
4262       unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4263       goto binary;
4264 
4265     case tcc_type:
4266     case tcc_statement:
4267       gcc_unreachable ();
4268 
4269     case tcc_constant:
4270     case tcc_exceptional:
4271     case tcc_declaration:
4272     case tcc_reference:
4273     case tcc_vl_exp:
4274       break;
4275     }
4276 
4277   switch (TREE_CODE (exp))
4278     {
4279     case STRING_CST:
4280       if (!lookup_constant_def (exp))
4281 	{
4282 	  if (strlen (TREE_STRING_POINTER (exp)) + 1
4283 	      != (size_t) TREE_STRING_LENGTH (exp))
4284 	    return NULL_RTX;
4285 	  op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4286 	  op0 = gen_rtx_MEM (BLKmode, op0);
4287 	  set_mem_attributes (op0, exp, 0);
4288 	  return op0;
4289 	}
4290       /* Fall through.  */
4291 
4292     case INTEGER_CST:
4293     case REAL_CST:
4294     case FIXED_CST:
4295       op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4296       return op0;
4297 
4298     case POLY_INT_CST:
4299       return immed_wide_int_const (poly_int_cst_value (exp), mode);
4300 
4301     case COMPLEX_CST:
4302       gcc_assert (COMPLEX_MODE_P (mode));
4303       op0 = expand_debug_expr (TREE_REALPART (exp));
4304       op1 = expand_debug_expr (TREE_IMAGPART (exp));
4305       return gen_rtx_CONCAT (mode, op0, op1);
4306 
4307     case DEBUG_EXPR_DECL:
4308       op0 = DECL_RTL_IF_SET (exp);
4309 
4310       if (op0)
4311 	return op0;
4312 
4313       op0 = gen_rtx_DEBUG_EXPR (mode);
4314       DEBUG_EXPR_TREE_DECL (op0) = exp;
4315       SET_DECL_RTL (exp, op0);
4316 
4317       return op0;
4318 
4319     case VAR_DECL:
4320     case PARM_DECL:
4321     case FUNCTION_DECL:
4322     case LABEL_DECL:
4323     case CONST_DECL:
4324     case RESULT_DECL:
4325       op0 = DECL_RTL_IF_SET (exp);
4326 
4327       /* This decl was probably optimized away.  */
4328       if (!op0
4329 	  /* At least label RTXen are sometimes replaced by
4330 	     NOTE_INSN_DELETED_LABEL.  Any notes here are not
4331 	     handled by copy_rtx.  */
4332 	  || NOTE_P (op0))
4333 	{
4334 	  if (!VAR_P (exp)
4335 	      || DECL_EXTERNAL (exp)
4336 	      || !TREE_STATIC (exp)
4337 	      || !DECL_NAME (exp)
4338 	      || DECL_HARD_REGISTER (exp)
4339 	      || DECL_IN_CONSTANT_POOL (exp)
4340 	      || mode == VOIDmode)
4341 	    return NULL;
4342 
4343 	  op0 = make_decl_rtl_for_debug (exp);
4344 	  if (!MEM_P (op0)
4345 	      || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4346 	      || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4347 	    return NULL;
4348 	}
4349       else
4350 	op0 = copy_rtx (op0);
4351 
4352       if (GET_MODE (op0) == BLKmode
4353 	  /* If op0 is not BLKmode, but mode is, adjust_mode
4354 	     below would ICE.  While it is likely a FE bug,
4355 	     try to be robust here.  See PR43166.  */
4356 	  || mode == BLKmode
4357 	  || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4358 	{
4359 	  gcc_assert (MEM_P (op0));
4360 	  op0 = adjust_address_nv (op0, mode, 0);
4361 	  return op0;
4362 	}
4363 
4364       /* Fall through.  */
4365 
4366     adjust_mode:
4367     case PAREN_EXPR:
4368     CASE_CONVERT:
4369       {
4370 	inner_mode = GET_MODE (op0);
4371 
4372 	if (mode == inner_mode)
4373 	  return op0;
4374 
4375 	if (inner_mode == VOIDmode)
4376 	  {
4377 	    if (TREE_CODE (exp) == SSA_NAME)
4378 	      inner_mode = TYPE_MODE (TREE_TYPE (exp));
4379 	    else
4380 	      inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4381 	    if (mode == inner_mode)
4382 	      return op0;
4383 	  }
4384 
4385 	if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4386 	  {
4387 	    if (GET_MODE_UNIT_BITSIZE (mode)
4388 		== GET_MODE_UNIT_BITSIZE (inner_mode))
4389 	      op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4390 	    else if (GET_MODE_UNIT_BITSIZE (mode)
4391 		     < GET_MODE_UNIT_BITSIZE (inner_mode))
4392 	      op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4393 	    else
4394 	      op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4395 	  }
4396 	else if (FLOAT_MODE_P (mode))
4397 	  {
4398 	    gcc_assert (TREE_CODE (exp) != SSA_NAME);
4399 	    if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4400 	      op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4401 	    else
4402 	      op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4403 	  }
4404 	else if (FLOAT_MODE_P (inner_mode))
4405 	  {
4406 	    if (unsignedp)
4407 	      op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4408 	    else
4409 	      op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4410 	  }
4411 	else if (GET_MODE_UNIT_PRECISION (mode)
4412 		 == GET_MODE_UNIT_PRECISION (inner_mode))
4413 	  op0 = lowpart_subreg (mode, op0, inner_mode);
4414 	else if (GET_MODE_UNIT_PRECISION (mode)
4415 		 < GET_MODE_UNIT_PRECISION (inner_mode))
4416 	  op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
4417 	else if (UNARY_CLASS_P (exp)
4418 		 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4419 		 : unsignedp)
4420 	  op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4421 	else
4422 	  op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4423 
4424 	return op0;
4425       }
4426 
4427     case MEM_REF:
4428       if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4429 	{
4430 	  tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4431 				     TREE_OPERAND (exp, 0),
4432 				     TREE_OPERAND (exp, 1));
4433 	  if (newexp)
4434 	    return expand_debug_expr (newexp);
4435 	}
4436       /* FALLTHROUGH */
4437     case INDIRECT_REF:
4438       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4439       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4440       if (!op0)
4441 	return NULL;
4442 
4443       if (TREE_CODE (exp) == MEM_REF)
4444 	{
4445 	  if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4446 	      || (GET_CODE (op0) == PLUS
4447 		  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4448 	    /* (mem (debug_implicit_ptr)) might confuse aliasing.
4449 	       Instead just use get_inner_reference.  */
4450 	    goto component_ref;
4451 
4452 	  op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4453 	  if (!op1 || !CONST_INT_P (op1))
4454 	    return NULL;
4455 
4456 	  op0 = plus_constant (inner_mode, op0, INTVAL (op1));
4457 	}
4458 
4459       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4460 
4461       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4462 					  op0, as);
4463       if (op0 == NULL_RTX)
4464 	return NULL;
4465 
4466       op0 = gen_rtx_MEM (mode, op0);
4467       set_mem_attributes (op0, exp, 0);
4468       if (TREE_CODE (exp) == MEM_REF
4469 	  && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4470 	set_mem_expr (op0, NULL_TREE);
4471       set_mem_addr_space (op0, as);
4472 
4473       return op0;
4474 
4475     case TARGET_MEM_REF:
4476       if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4477 	  && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4478 	return NULL;
4479 
4480       op0 = expand_debug_expr
4481 	    (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4482       if (!op0)
4483 	return NULL;
4484 
4485       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4486       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4487 					  op0, as);
4488       if (op0 == NULL_RTX)
4489 	return NULL;
4490 
4491       op0 = gen_rtx_MEM (mode, op0);
4492 
4493       set_mem_attributes (op0, exp, 0);
4494       set_mem_addr_space (op0, as);
4495 
4496       return op0;
4497 
4498     component_ref:
4499     case ARRAY_REF:
4500     case ARRAY_RANGE_REF:
4501     case COMPONENT_REF:
4502     case BIT_FIELD_REF:
4503     case REALPART_EXPR:
4504     case IMAGPART_EXPR:
4505     case VIEW_CONVERT_EXPR:
4506       {
4507 	machine_mode mode1;
4508 	poly_int64 bitsize, bitpos;
4509 	tree offset;
4510 	int reversep, volatilep = 0;
4511 	tree tem
4512 	  = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
4513 				 &unsignedp, &reversep, &volatilep);
4514 	rtx orig_op0;
4515 
4516 	if (known_eq (bitsize, 0))
4517 	  return NULL;
4518 
4519 	orig_op0 = op0 = expand_debug_expr (tem);
4520 
4521 	if (!op0)
4522 	  return NULL;
4523 
4524 	if (offset)
4525 	  {
4526 	    machine_mode addrmode, offmode;
4527 
4528 	    if (!MEM_P (op0))
4529 	      return NULL;
4530 
4531 	    op0 = XEXP (op0, 0);
4532 	    addrmode = GET_MODE (op0);
4533 	    if (addrmode == VOIDmode)
4534 	      addrmode = Pmode;
4535 
4536 	    op1 = expand_debug_expr (offset);
4537 	    if (!op1)
4538 	      return NULL;
4539 
4540 	    offmode = GET_MODE (op1);
4541 	    if (offmode == VOIDmode)
4542 	      offmode = TYPE_MODE (TREE_TYPE (offset));
4543 
4544 	    if (addrmode != offmode)
4545 	      op1 = lowpart_subreg (addrmode, op1, offmode);
4546 
4547 	    /* Don't use offset_address here, we don't need a
4548 	       recognizable address, and we don't want to generate
4549 	       code.  */
4550 	    op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4551 							  op0, op1));
4552 	  }
4553 
4554 	if (MEM_P (op0))
4555 	  {
4556 	    if (mode1 == VOIDmode)
4557 	      {
4558 		if (maybe_gt (bitsize, MAX_BITSIZE_MODE_ANY_INT))
4559 		  return NULL;
4560 		/* Bitfield.  */
4561 		mode1 = smallest_int_mode_for_size (bitsize);
4562 	      }
4563 	    poly_int64 bytepos = bits_to_bytes_round_down (bitpos);
4564 	    if (maybe_ne (bytepos, 0))
4565 	      {
4566 		op0 = adjust_address_nv (op0, mode1, bytepos);
4567 		bitpos = num_trailing_bits (bitpos);
4568 	      }
4569 	    else if (known_eq (bitpos, 0)
4570 		     && known_eq (bitsize, GET_MODE_BITSIZE (mode)))
4571 	      op0 = adjust_address_nv (op0, mode, 0);
4572 	    else if (GET_MODE (op0) != mode1)
4573 	      op0 = adjust_address_nv (op0, mode1, 0);
4574 	    else
4575 	      op0 = copy_rtx (op0);
4576 	    if (op0 == orig_op0)
4577 	      op0 = shallow_copy_rtx (op0);
4578 	    set_mem_attributes (op0, exp, 0);
4579 	  }
4580 
4581 	if (known_eq (bitpos, 0) && mode == GET_MODE (op0))
4582 	  return op0;
4583 
4584 	if (maybe_lt (bitpos, 0))
4585           return NULL;
4586 
4587 	if (GET_MODE (op0) == BLKmode || mode == BLKmode)
4588 	  return NULL;
4589 
4590 	poly_int64 bytepos;
4591 	if (multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
4592 	    && known_eq (bitsize, GET_MODE_BITSIZE (mode1)))
4593 	  {
4594 	    machine_mode opmode = GET_MODE (op0);
4595 
4596 	    if (opmode == VOIDmode)
4597 	      opmode = TYPE_MODE (TREE_TYPE (tem));
4598 
4599 	    /* This condition may hold if we're expanding the address
4600 	       right past the end of an array that turned out not to
4601 	       be addressable (i.e., the address was only computed in
4602 	       debug stmts).  The gen_subreg below would rightfully
4603 	       crash, and the address doesn't really exist, so just
4604 	       drop it.  */
4605 	    if (known_ge (bitpos, GET_MODE_BITSIZE (opmode)))
4606 	      return NULL;
4607 
4608 	    if (multiple_p (bitpos, GET_MODE_BITSIZE (mode)))
4609 	      return simplify_gen_subreg (mode, op0, opmode, bytepos);
4610 	  }
4611 
4612 	return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4613 				     && TYPE_UNSIGNED (TREE_TYPE (exp))
4614 				     ? SIGN_EXTRACT
4615 				     : ZERO_EXTRACT, mode,
4616 				     GET_MODE (op0) != VOIDmode
4617 				     ? GET_MODE (op0)
4618 				     : TYPE_MODE (TREE_TYPE (tem)),
4619 				     op0, gen_int_mode (bitsize, word_mode),
4620 				     gen_int_mode (bitpos, word_mode));
4621       }
4622 
4623     case ABS_EXPR:
4624       return simplify_gen_unary (ABS, mode, op0, mode);
4625 
4626     case NEGATE_EXPR:
4627       return simplify_gen_unary (NEG, mode, op0, mode);
4628 
4629     case BIT_NOT_EXPR:
4630       return simplify_gen_unary (NOT, mode, op0, mode);
4631 
4632     case FLOAT_EXPR:
4633       return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4634 									 0)))
4635 				 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4636 				 inner_mode);
4637 
4638     case FIX_TRUNC_EXPR:
4639       return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4640 				 inner_mode);
4641 
4642     case POINTER_PLUS_EXPR:
4643       /* For the rare target where pointers are not the same size as
4644 	 size_t, we need to check for mis-matched modes and correct
4645 	 the addend.  */
4646       if (op0 && op1
4647 	  && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
4648 	  && is_a <scalar_int_mode> (GET_MODE (op1), &op1_mode)
4649 	  && op0_mode != op1_mode)
4650 	{
4651 	  if (GET_MODE_BITSIZE (op0_mode) < GET_MODE_BITSIZE (op1_mode)
4652 	      /* If OP0 is a partial mode, then we must truncate, even
4653 		 if it has the same bitsize as OP1 as GCC's
4654 		 representation of partial modes is opaque.  */
4655 	      || (GET_MODE_CLASS (op0_mode) == MODE_PARTIAL_INT
4656 		  && (GET_MODE_BITSIZE (op0_mode)
4657 		      == GET_MODE_BITSIZE (op1_mode))))
4658 	    op1 = simplify_gen_unary (TRUNCATE, op0_mode, op1, op1_mode);
4659 	  else
4660 	    /* We always sign-extend, regardless of the signedness of
4661 	       the operand, because the operand is always unsigned
4662 	       here even if the original C expression is signed.  */
4663 	    op1 = simplify_gen_unary (SIGN_EXTEND, op0_mode, op1, op1_mode);
4664 	}
4665       /* Fall through.  */
4666     case PLUS_EXPR:
4667       return simplify_gen_binary (PLUS, mode, op0, op1);
4668 
4669     case MINUS_EXPR:
4670     case POINTER_DIFF_EXPR:
4671       return simplify_gen_binary (MINUS, mode, op0, op1);
4672 
4673     case MULT_EXPR:
4674       return simplify_gen_binary (MULT, mode, op0, op1);
4675 
4676     case RDIV_EXPR:
4677     case TRUNC_DIV_EXPR:
4678     case EXACT_DIV_EXPR:
4679       if (unsignedp)
4680 	return simplify_gen_binary (UDIV, mode, op0, op1);
4681       else
4682 	return simplify_gen_binary (DIV, mode, op0, op1);
4683 
4684     case TRUNC_MOD_EXPR:
4685       return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4686 
4687     case FLOOR_DIV_EXPR:
4688       if (unsignedp)
4689 	return simplify_gen_binary (UDIV, mode, op0, op1);
4690       else
4691 	{
4692 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4693 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4694 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
4695 	  return simplify_gen_binary (PLUS, mode, div, adj);
4696 	}
4697 
4698     case FLOOR_MOD_EXPR:
4699       if (unsignedp)
4700 	return simplify_gen_binary (UMOD, mode, op0, op1);
4701       else
4702 	{
4703 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4704 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
4705 	  adj = simplify_gen_unary (NEG, mode,
4706 				    simplify_gen_binary (MULT, mode, adj, op1),
4707 				    mode);
4708 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4709 	}
4710 
4711     case CEIL_DIV_EXPR:
4712       if (unsignedp)
4713 	{
4714 	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4715 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4716 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
4717 	  return simplify_gen_binary (PLUS, mode, div, adj);
4718 	}
4719       else
4720 	{
4721 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4722 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4723 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4724 	  return simplify_gen_binary (PLUS, mode, div, adj);
4725 	}
4726 
4727     case CEIL_MOD_EXPR:
4728       if (unsignedp)
4729 	{
4730 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4731 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
4732 	  adj = simplify_gen_unary (NEG, mode,
4733 				    simplify_gen_binary (MULT, mode, adj, op1),
4734 				    mode);
4735 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4736 	}
4737       else
4738 	{
4739 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4740 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4741 	  adj = simplify_gen_unary (NEG, mode,
4742 				    simplify_gen_binary (MULT, mode, adj, op1),
4743 				    mode);
4744 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4745 	}
4746 
4747     case ROUND_DIV_EXPR:
4748       if (unsignedp)
4749 	{
4750 	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4751 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4752 	  rtx adj = round_udiv_adjust (mode, mod, op1);
4753 	  return simplify_gen_binary (PLUS, mode, div, adj);
4754 	}
4755       else
4756 	{
4757 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4758 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4759 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
4760 	  return simplify_gen_binary (PLUS, mode, div, adj);
4761 	}
4762 
4763     case ROUND_MOD_EXPR:
4764       if (unsignedp)
4765 	{
4766 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4767 	  rtx adj = round_udiv_adjust (mode, mod, op1);
4768 	  adj = simplify_gen_unary (NEG, mode,
4769 				    simplify_gen_binary (MULT, mode, adj, op1),
4770 				    mode);
4771 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4772 	}
4773       else
4774 	{
4775 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4776 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
4777 	  adj = simplify_gen_unary (NEG, mode,
4778 				    simplify_gen_binary (MULT, mode, adj, op1),
4779 				    mode);
4780 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4781 	}
4782 
4783     case LSHIFT_EXPR:
4784       return simplify_gen_binary (ASHIFT, mode, op0, op1);
4785 
4786     case RSHIFT_EXPR:
4787       if (unsignedp)
4788 	return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4789       else
4790 	return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4791 
4792     case LROTATE_EXPR:
4793       return simplify_gen_binary (ROTATE, mode, op0, op1);
4794 
4795     case RROTATE_EXPR:
4796       return simplify_gen_binary (ROTATERT, mode, op0, op1);
4797 
4798     case MIN_EXPR:
4799       return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4800 
4801     case MAX_EXPR:
4802       return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4803 
4804     case BIT_AND_EXPR:
4805     case TRUTH_AND_EXPR:
4806       return simplify_gen_binary (AND, mode, op0, op1);
4807 
4808     case BIT_IOR_EXPR:
4809     case TRUTH_OR_EXPR:
4810       return simplify_gen_binary (IOR, mode, op0, op1);
4811 
4812     case BIT_XOR_EXPR:
4813     case TRUTH_XOR_EXPR:
4814       return simplify_gen_binary (XOR, mode, op0, op1);
4815 
4816     case TRUTH_ANDIF_EXPR:
4817       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4818 
4819     case TRUTH_ORIF_EXPR:
4820       return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4821 
4822     case TRUTH_NOT_EXPR:
4823       return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4824 
4825     case LT_EXPR:
4826       return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4827 				      op0, op1);
4828 
4829     case LE_EXPR:
4830       return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4831 				      op0, op1);
4832 
4833     case GT_EXPR:
4834       return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4835 				      op0, op1);
4836 
4837     case GE_EXPR:
4838       return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4839 				      op0, op1);
4840 
4841     case EQ_EXPR:
4842       return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4843 
4844     case NE_EXPR:
4845       return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4846 
4847     case UNORDERED_EXPR:
4848       return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4849 
4850     case ORDERED_EXPR:
4851       return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4852 
4853     case UNLT_EXPR:
4854       return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4855 
4856     case UNLE_EXPR:
4857       return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4858 
4859     case UNGT_EXPR:
4860       return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4861 
4862     case UNGE_EXPR:
4863       return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4864 
4865     case UNEQ_EXPR:
4866       return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4867 
4868     case LTGT_EXPR:
4869       return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4870 
4871     case COND_EXPR:
4872       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4873 
4874     case COMPLEX_EXPR:
4875       gcc_assert (COMPLEX_MODE_P (mode));
4876       if (GET_MODE (op0) == VOIDmode)
4877 	op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4878       if (GET_MODE (op1) == VOIDmode)
4879 	op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4880       return gen_rtx_CONCAT (mode, op0, op1);
4881 
4882     case CONJ_EXPR:
4883       if (GET_CODE (op0) == CONCAT)
4884 	return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4885 			       simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4886 						   XEXP (op0, 1),
4887 						   GET_MODE_INNER (mode)));
4888       else
4889 	{
4890 	  scalar_mode imode = GET_MODE_INNER (mode);
4891 	  rtx re, im;
4892 
4893 	  if (MEM_P (op0))
4894 	    {
4895 	      re = adjust_address_nv (op0, imode, 0);
4896 	      im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4897 	    }
4898 	  else
4899 	    {
4900 	      scalar_int_mode ifmode;
4901 	      scalar_int_mode ihmode;
4902 	      rtx halfsize;
4903 	      if (!int_mode_for_mode (mode).exists (&ifmode)
4904 		  || !int_mode_for_mode (imode).exists (&ihmode))
4905 		return NULL;
4906 	      halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4907 	      re = op0;
4908 	      if (mode != ifmode)
4909 		re = gen_rtx_SUBREG (ifmode, re, 0);
4910 	      re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4911 	      if (imode != ihmode)
4912 		re = gen_rtx_SUBREG (imode, re, 0);
4913 	      im = copy_rtx (op0);
4914 	      if (mode != ifmode)
4915 		im = gen_rtx_SUBREG (ifmode, im, 0);
4916 	      im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4917 	      if (imode != ihmode)
4918 		im = gen_rtx_SUBREG (imode, im, 0);
4919 	    }
4920 	  im = gen_rtx_NEG (imode, im);
4921 	  return gen_rtx_CONCAT (mode, re, im);
4922 	}
4923 
4924     case ADDR_EXPR:
4925       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4926       if (!op0 || !MEM_P (op0))
4927 	{
4928 	  if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4929 	       || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4930 	       || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4931 	      && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4932 		  || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4933 	    return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4934 
4935 	  if (handled_component_p (TREE_OPERAND (exp, 0)))
4936 	    {
4937 	      poly_int64 bitoffset, bitsize, maxsize, byteoffset;
4938 	      bool reverse;
4939 	      tree decl
4940 		= get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4941 					   &bitsize, &maxsize, &reverse);
4942 	      if ((VAR_P (decl)
4943 		   || TREE_CODE (decl) == PARM_DECL
4944 		   || TREE_CODE (decl) == RESULT_DECL)
4945 		  && (!TREE_ADDRESSABLE (decl)
4946 		      || target_for_debug_bind (decl))
4947 		  && multiple_p (bitoffset, BITS_PER_UNIT, &byteoffset)
4948 		  && known_gt (bitsize, 0)
4949 		  && known_eq (bitsize, maxsize))
4950 		{
4951 		  rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4952 		  return plus_constant (mode, base, byteoffset);
4953 		}
4954 	    }
4955 
4956 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4957 	      && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4958 		 == ADDR_EXPR)
4959 	    {
4960 	      op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4961 						     0));
4962 	      if (op0 != NULL
4963 		  && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4964 		      || (GET_CODE (op0) == PLUS
4965 			  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4966 			  && CONST_INT_P (XEXP (op0, 1)))))
4967 		{
4968 		  op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4969 							 1));
4970 		  if (!op1 || !CONST_INT_P (op1))
4971 		    return NULL;
4972 
4973 		  return plus_constant (mode, op0, INTVAL (op1));
4974 		}
4975 	    }
4976 
4977 	  return NULL;
4978 	}
4979 
4980       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
4981       addr_mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
4982       op0 = convert_debug_memory_address (addr_mode, XEXP (op0, 0), as);
4983 
4984       return op0;
4985 
4986     case VECTOR_CST:
4987       {
4988 	unsigned HOST_WIDE_INT i, nelts;
4989 
4990 	if (!VECTOR_CST_NELTS (exp).is_constant (&nelts))
4991 	  return NULL;
4992 
4993 	op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
4994 
4995 	for (i = 0; i < nelts; ++i)
4996 	  {
4997 	    op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4998 	    if (!op1)
4999 	      return NULL;
5000 	    XVECEXP (op0, 0, i) = op1;
5001 	  }
5002 
5003 	return op0;
5004       }
5005 
5006     case CONSTRUCTOR:
5007       if (TREE_CLOBBER_P (exp))
5008 	return NULL;
5009       else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
5010 	{
5011 	  unsigned i;
5012 	  unsigned HOST_WIDE_INT nelts;
5013 	  tree val;
5014 
5015 	  if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)).is_constant (&nelts))
5016 	    goto flag_unsupported;
5017 
5018 	  op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
5019 
5020 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
5021 	    {
5022 	      op1 = expand_debug_expr (val);
5023 	      if (!op1)
5024 		return NULL;
5025 	      XVECEXP (op0, 0, i) = op1;
5026 	    }
5027 
5028 	  if (i < nelts)
5029 	    {
5030 	      op1 = expand_debug_expr
5031 		(build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
5032 
5033 	      if (!op1)
5034 		return NULL;
5035 
5036 	      for (; i < nelts; i++)
5037 		XVECEXP (op0, 0, i) = op1;
5038 	    }
5039 
5040 	  return op0;
5041 	}
5042       else
5043 	goto flag_unsupported;
5044 
5045     case CALL_EXPR:
5046       /* ??? Maybe handle some builtins?  */
5047       return NULL;
5048 
5049     case SSA_NAME:
5050       {
5051 	gimple *g = get_gimple_for_ssa_name (exp);
5052 	if (g)
5053 	  {
5054 	    tree t = NULL_TREE;
5055 	    if (deep_ter_debug_map)
5056 	      {
5057 		tree *slot = deep_ter_debug_map->get (exp);
5058 		if (slot)
5059 		  t = *slot;
5060 	      }
5061 	    if (t == NULL_TREE)
5062 	      t = gimple_assign_rhs_to_tree (g);
5063 	    op0 = expand_debug_expr (t);
5064 	    if (!op0)
5065 	      return NULL;
5066 	  }
5067 	else
5068 	  {
5069 	    /* If this is a reference to an incoming value of
5070 	       parameter that is never used in the code or where the
5071 	       incoming value is never used in the code, use
5072 	       PARM_DECL's DECL_RTL if set.  */
5073 	    if (SSA_NAME_IS_DEFAULT_DEF (exp)
5074 		&& SSA_NAME_VAR (exp)
5075 		&& TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5076 		&& has_zero_uses (exp))
5077 	      {
5078 		op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5079 		if (op0)
5080 		  goto adjust_mode;
5081 		op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5082 		if (op0)
5083 		  goto adjust_mode;
5084 	      }
5085 
5086 	    int part = var_to_partition (SA.map, exp);
5087 
5088 	    if (part == NO_PARTITION)
5089 	      return NULL;
5090 
5091 	    gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
5092 
5093 	    op0 = copy_rtx (SA.partition_to_pseudo[part]);
5094 	  }
5095 	goto adjust_mode;
5096       }
5097 
5098     case ERROR_MARK:
5099       return NULL;
5100 
5101     /* Vector stuff.  For most of the codes we don't have rtl codes.  */
5102     case REALIGN_LOAD_EXPR:
5103     case VEC_COND_EXPR:
5104     case VEC_PACK_FIX_TRUNC_EXPR:
5105     case VEC_PACK_SAT_EXPR:
5106     case VEC_PACK_TRUNC_EXPR:
5107     case VEC_UNPACK_FLOAT_HI_EXPR:
5108     case VEC_UNPACK_FLOAT_LO_EXPR:
5109     case VEC_UNPACK_HI_EXPR:
5110     case VEC_UNPACK_LO_EXPR:
5111     case VEC_WIDEN_MULT_HI_EXPR:
5112     case VEC_WIDEN_MULT_LO_EXPR:
5113     case VEC_WIDEN_MULT_EVEN_EXPR:
5114     case VEC_WIDEN_MULT_ODD_EXPR:
5115     case VEC_WIDEN_LSHIFT_HI_EXPR:
5116     case VEC_WIDEN_LSHIFT_LO_EXPR:
5117     case VEC_PERM_EXPR:
5118     case VEC_DUPLICATE_EXPR:
5119     case VEC_SERIES_EXPR:
5120       return NULL;
5121 
5122     /* Misc codes.  */
5123     case ADDR_SPACE_CONVERT_EXPR:
5124     case FIXED_CONVERT_EXPR:
5125     case OBJ_TYPE_REF:
5126     case WITH_SIZE_EXPR:
5127     case BIT_INSERT_EXPR:
5128       return NULL;
5129 
5130     case DOT_PROD_EXPR:
5131       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5132 	  && SCALAR_INT_MODE_P (mode))
5133 	{
5134 	  op0
5135 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5136 									  0)))
5137 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5138 				  inner_mode);
5139 	  op1
5140 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5141 									  1)))
5142 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5143 				  inner_mode);
5144 	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
5145 	  return simplify_gen_binary (PLUS, mode, op0, op2);
5146 	}
5147       return NULL;
5148 
5149     case WIDEN_MULT_EXPR:
5150     case WIDEN_MULT_PLUS_EXPR:
5151     case WIDEN_MULT_MINUS_EXPR:
5152       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5153 	  && SCALAR_INT_MODE_P (mode))
5154 	{
5155 	  inner_mode = GET_MODE (op0);
5156 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5157 	    op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5158 	  else
5159 	    op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5160 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5161 	    op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
5162 	  else
5163 	    op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
5164 	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
5165 	  if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5166 	    return op0;
5167 	  else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
5168 	    return simplify_gen_binary (PLUS, mode, op0, op2);
5169 	  else
5170 	    return simplify_gen_binary (MINUS, mode, op2, op0);
5171 	}
5172       return NULL;
5173 
5174     case MULT_HIGHPART_EXPR:
5175       /* ??? Similar to the above.  */
5176       return NULL;
5177 
5178     case WIDEN_SUM_EXPR:
5179     case WIDEN_LSHIFT_EXPR:
5180       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5181 	  && SCALAR_INT_MODE_P (mode))
5182 	{
5183 	  op0
5184 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5185 									  0)))
5186 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5187 				  inner_mode);
5188 	  return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5189 				      ? ASHIFT : PLUS, mode, op0, op1);
5190 	}
5191       return NULL;
5192 
5193     case FMA_EXPR:
5194       return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
5195 
5196     default:
5197     flag_unsupported:
5198       if (flag_checking)
5199 	{
5200 	  debug_tree (exp);
5201 	  gcc_unreachable ();
5202 	}
5203       return NULL;
5204     }
5205 }
5206 
5207 /* Return an RTX equivalent to the source bind value of the tree expression
5208    EXP.  */
5209 
5210 static rtx
5211 expand_debug_source_expr (tree exp)
5212 {
5213   rtx op0 = NULL_RTX;
5214   machine_mode mode = VOIDmode, inner_mode;
5215 
5216   switch (TREE_CODE (exp))
5217     {
5218     case PARM_DECL:
5219       {
5220 	mode = DECL_MODE (exp);
5221 	op0 = expand_debug_parm_decl (exp);
5222 	if (op0)
5223 	   break;
5224 	/* See if this isn't an argument that has been completely
5225 	   optimized out.  */
5226 	if (!DECL_RTL_SET_P (exp)
5227 	    && !DECL_INCOMING_RTL (exp)
5228 	    && DECL_ABSTRACT_ORIGIN (current_function_decl))
5229 	  {
5230 	    tree aexp = DECL_ORIGIN (exp);
5231 	    if (DECL_CONTEXT (aexp)
5232 		== DECL_ABSTRACT_ORIGIN (current_function_decl))
5233 	      {
5234 		vec<tree, va_gc> **debug_args;
5235 		unsigned int ix;
5236 		tree ddecl;
5237 		debug_args = decl_debug_args_lookup (current_function_decl);
5238 		if (debug_args != NULL)
5239 		  {
5240 		    for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
5241 			 ix += 2)
5242 		      if (ddecl == aexp)
5243 			return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5244 		  }
5245 	      }
5246 	  }
5247 	break;
5248       }
5249     default:
5250       break;
5251     }
5252 
5253   if (op0 == NULL_RTX)
5254     return NULL_RTX;
5255 
5256   inner_mode = GET_MODE (op0);
5257   if (mode == inner_mode)
5258     return op0;
5259 
5260   if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5261     {
5262       if (GET_MODE_UNIT_BITSIZE (mode)
5263 	  == GET_MODE_UNIT_BITSIZE (inner_mode))
5264 	op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5265       else if (GET_MODE_UNIT_BITSIZE (mode)
5266 	       < GET_MODE_UNIT_BITSIZE (inner_mode))
5267 	op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5268       else
5269 	op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5270     }
5271   else if (FLOAT_MODE_P (mode))
5272     gcc_unreachable ();
5273   else if (FLOAT_MODE_P (inner_mode))
5274     {
5275       if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5276 	op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5277       else
5278 	op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5279     }
5280   else if (GET_MODE_UNIT_PRECISION (mode)
5281 	   == GET_MODE_UNIT_PRECISION (inner_mode))
5282     op0 = lowpart_subreg (mode, op0, inner_mode);
5283   else if (GET_MODE_UNIT_PRECISION (mode)
5284 	   < GET_MODE_UNIT_PRECISION (inner_mode))
5285     op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
5286   else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5287     op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5288   else
5289     op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5290 
5291   return op0;
5292 }
5293 
5294 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5295    Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5296    deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN.  */
5297 
5298 static void
5299 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
5300 {
5301   rtx exp = *exp_p;
5302 
5303   if (exp == NULL_RTX)
5304     return;
5305 
5306   if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5307     return;
5308 
5309   if (depth == 4)
5310     {
5311       /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL).  */
5312       rtx dval = make_debug_expr_from_rtl (exp);
5313 
5314       /* Emit a debug bind insn before INSN.  */
5315       rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5316 				       DEBUG_EXPR_TREE_DECL (dval), exp,
5317 				       VAR_INIT_STATUS_INITIALIZED);
5318 
5319       emit_debug_insn_before (bind, insn);
5320       *exp_p = dval;
5321       return;
5322     }
5323 
5324   const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5325   int i, j;
5326   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5327     switch (*format_ptr++)
5328       {
5329       case 'e':
5330 	avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5331 	break;
5332 
5333       case 'E':
5334       case 'V':
5335 	for (j = 0; j < XVECLEN (exp, i); j++)
5336 	  avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5337 	break;
5338 
5339       default:
5340 	break;
5341       }
5342 }
5343 
5344 /* Expand the _LOCs in debug insns.  We run this after expanding all
5345    regular insns, so that any variables referenced in the function
5346    will have their DECL_RTLs set.  */
5347 
5348 static void
5349 expand_debug_locations (void)
5350 {
5351   rtx_insn *insn;
5352   rtx_insn *last = get_last_insn ();
5353   int save_strict_alias = flag_strict_aliasing;
5354 
5355   /* New alias sets while setting up memory attributes cause
5356      -fcompare-debug failures, even though it doesn't bring about any
5357      codegen changes.  */
5358   flag_strict_aliasing = 0;
5359 
5360   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5361     if (DEBUG_BIND_INSN_P (insn))
5362       {
5363 	tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5364 	rtx val;
5365 	rtx_insn *prev_insn, *insn2;
5366 	machine_mode mode;
5367 
5368 	if (value == NULL_TREE)
5369 	  val = NULL_RTX;
5370 	else
5371 	  {
5372 	    if (INSN_VAR_LOCATION_STATUS (insn)
5373 		== VAR_INIT_STATUS_UNINITIALIZED)
5374 	      val = expand_debug_source_expr (value);
5375 	    /* The avoid_deep_ter_for_debug function inserts
5376 	       debug bind stmts after SSA_NAME definition, with the
5377 	       SSA_NAME as the whole bind location.  Disable temporarily
5378 	       expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5379 	       being defined in this DEBUG_INSN.  */
5380 	    else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5381 	      {
5382 		tree *slot = deep_ter_debug_map->get (value);
5383 		if (slot)
5384 		  {
5385 		    if (*slot == INSN_VAR_LOCATION_DECL (insn))
5386 		      *slot = NULL_TREE;
5387 		    else
5388 		      slot = NULL;
5389 		  }
5390 		val = expand_debug_expr (value);
5391 		if (slot)
5392 		  *slot = INSN_VAR_LOCATION_DECL (insn);
5393 	      }
5394 	    else
5395 	      val = expand_debug_expr (value);
5396 	    gcc_assert (last == get_last_insn ());
5397 	  }
5398 
5399 	if (!val)
5400 	  val = gen_rtx_UNKNOWN_VAR_LOC ();
5401 	else
5402 	  {
5403 	    mode = GET_MODE (INSN_VAR_LOCATION (insn));
5404 
5405 	    gcc_assert (mode == GET_MODE (val)
5406 			|| (GET_MODE (val) == VOIDmode
5407 			    && (CONST_SCALAR_INT_P (val)
5408 				|| GET_CODE (val) == CONST_FIXED
5409 				|| GET_CODE (val) == LABEL_REF)));
5410 	  }
5411 
5412 	INSN_VAR_LOCATION_LOC (insn) = val;
5413 	prev_insn = PREV_INSN (insn);
5414 	for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5415 	  avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5416       }
5417 
5418   flag_strict_aliasing = save_strict_alias;
5419 }
5420 
5421 /* Performs swapping operands of commutative operations to expand
5422    the expensive one first.  */
5423 
5424 static void
5425 reorder_operands (basic_block bb)
5426 {
5427   unsigned int *lattice;  /* Hold cost of each statement.  */
5428   unsigned int i = 0, n = 0;
5429   gimple_stmt_iterator gsi;
5430   gimple_seq stmts;
5431   gimple *stmt;
5432   bool swap;
5433   tree op0, op1;
5434   ssa_op_iter iter;
5435   use_operand_p use_p;
5436   gimple *def0, *def1;
5437 
5438   /* Compute cost of each statement using estimate_num_insns.  */
5439   stmts = bb_seq (bb);
5440   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5441     {
5442       stmt = gsi_stmt (gsi);
5443       if (!is_gimple_debug (stmt))
5444         gimple_set_uid (stmt, n++);
5445     }
5446   lattice = XNEWVEC (unsigned int, n);
5447   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5448     {
5449       unsigned cost;
5450       stmt = gsi_stmt (gsi);
5451       if (is_gimple_debug (stmt))
5452 	continue;
5453       cost = estimate_num_insns (stmt, &eni_size_weights);
5454       lattice[i] = cost;
5455       FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5456 	{
5457 	  tree use = USE_FROM_PTR (use_p);
5458 	  gimple *def_stmt;
5459 	  if (TREE_CODE (use) != SSA_NAME)
5460 	    continue;
5461 	  def_stmt = get_gimple_for_ssa_name (use);
5462 	  if (!def_stmt)
5463 	    continue;
5464 	  lattice[i] += lattice[gimple_uid (def_stmt)];
5465 	}
5466       i++;
5467       if (!is_gimple_assign (stmt)
5468 	  || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5469 	continue;
5470       op0 = gimple_op (stmt, 1);
5471       op1 = gimple_op (stmt, 2);
5472       if (TREE_CODE (op0) != SSA_NAME
5473 	  || TREE_CODE (op1) != SSA_NAME)
5474 	continue;
5475       /* Swap operands if the second one is more expensive.  */
5476       def0 = get_gimple_for_ssa_name (op0);
5477       def1 = get_gimple_for_ssa_name (op1);
5478       if (!def1)
5479 	continue;
5480       swap = false;
5481       if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5482 	swap = true;
5483       if (swap)
5484 	{
5485 	  if (dump_file && (dump_flags & TDF_DETAILS))
5486 	    {
5487 	      fprintf (dump_file, "Swap operands in stmt:\n");
5488 	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5489 	      fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5490 		       def0 ? lattice[gimple_uid (def0)] : 0,
5491 		       lattice[gimple_uid (def1)]);
5492 	    }
5493 	  swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5494 			     gimple_assign_rhs2_ptr (stmt));
5495 	}
5496     }
5497   XDELETE (lattice);
5498 }
5499 
5500 /* Expand basic block BB from GIMPLE trees to RTL.  */
5501 
5502 static basic_block
5503 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5504 {
5505   gimple_stmt_iterator gsi;
5506   gimple_seq stmts;
5507   gimple *stmt = NULL;
5508   rtx_note *note = NULL;
5509   rtx_insn *last;
5510   edge e;
5511   edge_iterator ei;
5512 
5513   if (dump_file)
5514     fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5515 	     bb->index);
5516 
5517   /* Note that since we are now transitioning from GIMPLE to RTL, we
5518      cannot use the gsi_*_bb() routines because they expect the basic
5519      block to be in GIMPLE, instead of RTL.  Therefore, we need to
5520      access the BB sequence directly.  */
5521   if (optimize)
5522     reorder_operands (bb);
5523   stmts = bb_seq (bb);
5524   bb->il.gimple.seq = NULL;
5525   bb->il.gimple.phi_nodes = NULL;
5526   rtl_profile_for_bb (bb);
5527   init_rtl_bb_info (bb);
5528   bb->flags |= BB_RTL;
5529 
5530   /* Remove the RETURN_EXPR if we may fall though to the exit
5531      instead.  */
5532   gsi = gsi_last (stmts);
5533   if (!gsi_end_p (gsi)
5534       && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5535     {
5536       greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5537 
5538       gcc_assert (single_succ_p (bb));
5539       gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5540 
5541       if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5542 	  && !gimple_return_retval (ret_stmt))
5543 	{
5544 	  gsi_remove (&gsi, false);
5545 	  single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5546 	}
5547     }
5548 
5549   gsi = gsi_start (stmts);
5550   if (!gsi_end_p (gsi))
5551     {
5552       stmt = gsi_stmt (gsi);
5553       if (gimple_code (stmt) != GIMPLE_LABEL)
5554 	stmt = NULL;
5555     }
5556 
5557   rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5558 
5559   if (stmt || elt)
5560     {
5561       gcc_checking_assert (!note);
5562       last = get_last_insn ();
5563 
5564       if (stmt)
5565 	{
5566 	  expand_gimple_stmt (stmt);
5567 	  gsi_next (&gsi);
5568 	}
5569 
5570       if (elt)
5571 	emit_label (*elt);
5572 
5573       BB_HEAD (bb) = NEXT_INSN (last);
5574       if (NOTE_P (BB_HEAD (bb)))
5575 	BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5576       gcc_assert (LABEL_P (BB_HEAD (bb)));
5577       note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5578 
5579       maybe_dump_rtl_for_gimple_stmt (stmt, last);
5580     }
5581   else
5582     BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5583 
5584   if (note)
5585     NOTE_BASIC_BLOCK (note) = bb;
5586 
5587   for (; !gsi_end_p (gsi); gsi_next (&gsi))
5588     {
5589       basic_block new_bb;
5590 
5591       stmt = gsi_stmt (gsi);
5592 
5593       /* If this statement is a non-debug one, and we generate debug
5594 	 insns, then this one might be the last real use of a TERed
5595 	 SSA_NAME, but where there are still some debug uses further
5596 	 down.  Expanding the current SSA name in such further debug
5597 	 uses by their RHS might lead to wrong debug info, as coalescing
5598 	 might make the operands of such RHS be placed into the same
5599 	 pseudo as something else.  Like so:
5600 	   a_1 = a_0 + 1;   // Assume a_1 is TERed and a_0 is dead
5601 	   use(a_1);
5602 	   a_2 = ...
5603            #DEBUG ... => a_1
5604 	 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5605 	 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5606 	 the write to a_2 would actually have clobbered the place which
5607 	 formerly held a_0.
5608 
5609 	 So, instead of that, we recognize the situation, and generate
5610 	 debug temporaries at the last real use of TERed SSA names:
5611 	   a_1 = a_0 + 1;
5612            #DEBUG #D1 => a_1
5613 	   use(a_1);
5614 	   a_2 = ...
5615            #DEBUG ... => #D1
5616 	 */
5617       if (MAY_HAVE_DEBUG_BIND_INSNS
5618 	  && SA.values
5619 	  && !is_gimple_debug (stmt))
5620 	{
5621 	  ssa_op_iter iter;
5622 	  tree op;
5623 	  gimple *def;
5624 
5625 	  location_t sloc = curr_insn_location ();
5626 
5627 	  /* Look for SSA names that have their last use here (TERed
5628 	     names always have only one real use).  */
5629 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5630 	    if ((def = get_gimple_for_ssa_name (op)))
5631 	      {
5632 		imm_use_iterator imm_iter;
5633 		use_operand_p use_p;
5634 		bool have_debug_uses = false;
5635 
5636 		FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5637 		  {
5638 		    if (gimple_debug_bind_p (USE_STMT (use_p)))
5639 		      {
5640 			have_debug_uses = true;
5641 			break;
5642 		      }
5643 		  }
5644 
5645 		if (have_debug_uses)
5646 		  {
5647 		    /* OP is a TERed SSA name, with DEF its defining
5648 		       statement, and where OP is used in further debug
5649 		       instructions.  Generate a debug temporary, and
5650 		       replace all uses of OP in debug insns with that
5651 		       temporary.  */
5652 		    gimple *debugstmt;
5653 		    tree value = gimple_assign_rhs_to_tree (def);
5654 		    tree vexpr = make_node (DEBUG_EXPR_DECL);
5655 		    rtx val;
5656 		    machine_mode mode;
5657 
5658 		    set_curr_insn_location (gimple_location (def));
5659 
5660 		    DECL_ARTIFICIAL (vexpr) = 1;
5661 		    TREE_TYPE (vexpr) = TREE_TYPE (value);
5662 		    if (DECL_P (value))
5663 		      mode = DECL_MODE (value);
5664 		    else
5665 		      mode = TYPE_MODE (TREE_TYPE (value));
5666 		    SET_DECL_MODE (vexpr, mode);
5667 
5668 		    val = gen_rtx_VAR_LOCATION
5669 			(mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5670 
5671 		    emit_debug_insn (val);
5672 
5673 		    FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5674 		      {
5675 			if (!gimple_debug_bind_p (debugstmt))
5676 			  continue;
5677 
5678 			FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5679 			  SET_USE (use_p, vexpr);
5680 
5681 			update_stmt (debugstmt);
5682 		      }
5683 		  }
5684 	      }
5685 	  set_curr_insn_location (sloc);
5686 	}
5687 
5688       currently_expanding_gimple_stmt = stmt;
5689 
5690       /* Expand this statement, then evaluate the resulting RTL and
5691 	 fixup the CFG accordingly.  */
5692       if (gimple_code (stmt) == GIMPLE_COND)
5693 	{
5694 	  new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5695 	  if (new_bb)
5696 	    return new_bb;
5697 	}
5698       else if (is_gimple_debug (stmt))
5699 	{
5700 	  location_t sloc = curr_insn_location ();
5701 	  gimple_stmt_iterator nsi = gsi;
5702 
5703 	  for (;;)
5704 	    {
5705 	      tree var;
5706 	      tree value = NULL_TREE;
5707 	      rtx val = NULL_RTX;
5708 	      machine_mode mode;
5709 
5710 	      if (!gimple_debug_nonbind_marker_p (stmt))
5711 		{
5712 		  if (gimple_debug_bind_p (stmt))
5713 		    {
5714 		      var = gimple_debug_bind_get_var (stmt);
5715 
5716 		      if (TREE_CODE (var) != DEBUG_EXPR_DECL
5717 			  && TREE_CODE (var) != LABEL_DECL
5718 			  && !target_for_debug_bind (var))
5719 			goto delink_debug_stmt;
5720 
5721 		      if (DECL_P (var))
5722 			mode = DECL_MODE (var);
5723 		      else
5724 			mode = TYPE_MODE (TREE_TYPE (var));
5725 
5726 		      if (gimple_debug_bind_has_value_p (stmt))
5727 			value = gimple_debug_bind_get_value (stmt);
5728 
5729 		      val = gen_rtx_VAR_LOCATION
5730 			(mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5731 		    }
5732 		  else if (gimple_debug_source_bind_p (stmt))
5733 		    {
5734 		      var = gimple_debug_source_bind_get_var (stmt);
5735 
5736 		      value = gimple_debug_source_bind_get_value (stmt);
5737 
5738 		      mode = DECL_MODE (var);
5739 
5740 		      val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5741 						  VAR_INIT_STATUS_UNINITIALIZED);
5742 		    }
5743 		  else
5744 		    gcc_unreachable ();
5745 		}
5746 	      /* If this function was first compiled with markers
5747 		 enabled, but they're now disable (e.g. LTO), drop
5748 		 them on the floor.  */
5749 	      else if (gimple_debug_nonbind_marker_p (stmt)
5750 		       && !MAY_HAVE_DEBUG_MARKER_INSNS)
5751 		goto delink_debug_stmt;
5752 	      else if (gimple_debug_begin_stmt_p (stmt))
5753 		val = GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
5754 	      else if (gimple_debug_inline_entry_p (stmt))
5755 		{
5756 		  tree block = gimple_block (stmt);
5757 
5758 		  if (block)
5759 		    val = GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
5760 		  else
5761 		    goto delink_debug_stmt;
5762 		}
5763 	      else
5764 		gcc_unreachable ();
5765 
5766 	      last = get_last_insn ();
5767 
5768 	      set_curr_insn_location (gimple_location (stmt));
5769 
5770 	      emit_debug_insn (val);
5771 
5772 	      if (dump_file && (dump_flags & TDF_DETAILS))
5773 		{
5774 		  /* We can't dump the insn with a TREE where an RTX
5775 		     is expected.  */
5776 		  if (GET_CODE (val) == VAR_LOCATION)
5777 		    {
5778 		      gcc_checking_assert (PAT_VAR_LOCATION_LOC (val) == (rtx)value);
5779 		      PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5780 		    }
5781 		  maybe_dump_rtl_for_gimple_stmt (stmt, last);
5782 		  if (GET_CODE (val) == VAR_LOCATION)
5783 		    PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5784 		}
5785 
5786 	    delink_debug_stmt:
5787 	      /* In order not to generate too many debug temporaries,
5788 	         we delink all uses of debug statements we already expanded.
5789 		 Therefore debug statements between definition and real
5790 		 use of TERed SSA names will continue to use the SSA name,
5791 		 and not be replaced with debug temps.  */
5792 	      delink_stmt_imm_use (stmt);
5793 
5794 	      gsi = nsi;
5795 	      gsi_next (&nsi);
5796 	      if (gsi_end_p (nsi))
5797 		break;
5798 	      stmt = gsi_stmt (nsi);
5799 	      if (!is_gimple_debug (stmt))
5800 		break;
5801 	    }
5802 
5803 	  set_curr_insn_location (sloc);
5804 	}
5805       else
5806 	{
5807 	  gcall *call_stmt = dyn_cast <gcall *> (stmt);
5808 	  if (call_stmt
5809 	      && gimple_call_tail_p (call_stmt)
5810 	      && disable_tail_calls)
5811 	    gimple_call_set_tail (call_stmt, false);
5812 
5813 	  if (call_stmt && gimple_call_tail_p (call_stmt))
5814 	    {
5815 	      bool can_fallthru;
5816 	      new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5817 	      if (new_bb)
5818 		{
5819 		  if (can_fallthru)
5820 		    bb = new_bb;
5821 		  else
5822 		    return new_bb;
5823 		}
5824 	    }
5825 	  else
5826 	    {
5827 	      def_operand_p def_p;
5828 	      def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5829 
5830 	      if (def_p != NULL)
5831 		{
5832 		  /* Ignore this stmt if it is in the list of
5833 		     replaceable expressions.  */
5834 		  if (SA.values
5835 		      && bitmap_bit_p (SA.values,
5836 				       SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5837 		    continue;
5838 		}
5839 	      last = expand_gimple_stmt (stmt);
5840 	      maybe_dump_rtl_for_gimple_stmt (stmt, last);
5841 	    }
5842 	}
5843     }
5844 
5845   currently_expanding_gimple_stmt = NULL;
5846 
5847   /* Expand implicit goto and convert goto_locus.  */
5848   FOR_EACH_EDGE (e, ei, bb->succs)
5849     {
5850       if (e->goto_locus != UNKNOWN_LOCATION)
5851 	set_curr_insn_location (e->goto_locus);
5852       if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5853 	{
5854 	  emit_jump (label_rtx_for_bb (e->dest));
5855 	  e->flags &= ~EDGE_FALLTHRU;
5856 	}
5857     }
5858 
5859   /* Expanded RTL can create a jump in the last instruction of block.
5860      This later might be assumed to be a jump to successor and break edge insertion.
5861      We need to insert dummy move to prevent this. PR41440. */
5862   if (single_succ_p (bb)
5863       && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5864       && (last = get_last_insn ())
5865       && (JUMP_P (last)
5866 	  || (DEBUG_INSN_P (last)
5867 	      && JUMP_P (prev_nondebug_insn (last)))))
5868     {
5869       rtx dummy = gen_reg_rtx (SImode);
5870       emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5871     }
5872 
5873   do_pending_stack_adjust ();
5874 
5875   /* Find the block tail.  The last insn in the block is the insn
5876      before a barrier and/or table jump insn.  */
5877   last = get_last_insn ();
5878   if (BARRIER_P (last))
5879     last = PREV_INSN (last);
5880   if (JUMP_TABLE_DATA_P (last))
5881     last = PREV_INSN (PREV_INSN (last));
5882   BB_END (bb) = last;
5883 
5884   update_bb_for_insn (bb);
5885 
5886   return bb;
5887 }
5888 
5889 
5890 /* Create a basic block for initialization code.  */
5891 
5892 static basic_block
5893 construct_init_block (void)
5894 {
5895   basic_block init_block, first_block;
5896   edge e = NULL;
5897   int flags;
5898 
5899   /* Multiple entry points not supported yet.  */
5900   gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5901   init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5902   init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5903   ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5904   EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5905 
5906   e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5907 
5908   /* When entry edge points to first basic block, we don't need jump,
5909      otherwise we have to jump into proper target.  */
5910   if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5911     {
5912       tree label = gimple_block_label (e->dest);
5913 
5914       emit_jump (jump_target_rtx (label));
5915       flags = 0;
5916     }
5917   else
5918     flags = EDGE_FALLTHRU;
5919 
5920   init_block = create_basic_block (NEXT_INSN (get_insns ()),
5921 				   get_last_insn (),
5922 				   ENTRY_BLOCK_PTR_FOR_FN (cfun));
5923   init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5924   add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5925   if (e)
5926     {
5927       first_block = e->dest;
5928       redirect_edge_succ (e, init_block);
5929       e = make_single_succ_edge (init_block, first_block, flags);
5930     }
5931   else
5932     e = make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5933 			       EDGE_FALLTHRU);
5934 
5935   update_bb_for_insn (init_block);
5936   return init_block;
5937 }
5938 
5939 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5940    found in the block tree.  */
5941 
5942 static void
5943 set_block_levels (tree block, int level)
5944 {
5945   while (block)
5946     {
5947       BLOCK_NUMBER (block) = level;
5948       set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5949       block = BLOCK_CHAIN (block);
5950     }
5951 }
5952 
5953 /* Create a block containing landing pads and similar stuff.  */
5954 
5955 static void
5956 construct_exit_block (void)
5957 {
5958   rtx_insn *head = get_last_insn ();
5959   rtx_insn *end;
5960   basic_block exit_block;
5961   edge e, e2;
5962   unsigned ix;
5963   edge_iterator ei;
5964   basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5965   rtx_insn *orig_end = BB_END (prev_bb);
5966 
5967   rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5968 
5969   /* Make sure the locus is set to the end of the function, so that
5970      epilogue line numbers and warnings are set properly.  */
5971   if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5972     input_location = cfun->function_end_locus;
5973 
5974   /* Generate rtl for function exit.  */
5975   expand_function_end ();
5976 
5977   end = get_last_insn ();
5978   if (head == end)
5979     return;
5980   /* While emitting the function end we could move end of the last basic
5981      block.  */
5982   BB_END (prev_bb) = orig_end;
5983   while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5984     head = NEXT_INSN (head);
5985   /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5986      bb count counting will be confused.  Any instructions before that
5987      label are emitted for the case where PREV_BB falls through into the
5988      exit block, so append those instructions to prev_bb in that case.  */
5989   if (NEXT_INSN (head) != return_label)
5990     {
5991       while (NEXT_INSN (head) != return_label)
5992 	{
5993 	  if (!NOTE_P (NEXT_INSN (head)))
5994 	    BB_END (prev_bb) = NEXT_INSN (head);
5995 	  head = NEXT_INSN (head);
5996 	}
5997     }
5998   exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
5999   exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
6000   add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
6001 
6002   ix = 0;
6003   while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
6004     {
6005       e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
6006       if (!(e->flags & EDGE_ABNORMAL))
6007 	redirect_edge_succ (e, exit_block);
6008       else
6009 	ix++;
6010     }
6011 
6012   e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
6013 			     EDGE_FALLTHRU);
6014   FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6015     if (e2 != e)
6016       {
6017 	exit_block->count -= e2->count ();
6018       }
6019   update_bb_for_insn (exit_block);
6020 }
6021 
6022 /* Helper function for discover_nonconstant_array_refs.
6023    Look for ARRAY_REF nodes with non-constant indexes and mark them
6024    addressable.  */
6025 
6026 static tree
6027 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
6028 				   void *data ATTRIBUTE_UNUSED)
6029 {
6030   tree t = *tp;
6031 
6032   if (IS_TYPE_OR_DECL_P (t))
6033     *walk_subtrees = 0;
6034   else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6035     {
6036       while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6037 	      && is_gimple_min_invariant (TREE_OPERAND (t, 1))
6038 	      && (!TREE_OPERAND (t, 2)
6039 		  || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6040 	     || (TREE_CODE (t) == COMPONENT_REF
6041 		 && (!TREE_OPERAND (t,2)
6042 		     || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6043 	     || TREE_CODE (t) == BIT_FIELD_REF
6044 	     || TREE_CODE (t) == REALPART_EXPR
6045 	     || TREE_CODE (t) == IMAGPART_EXPR
6046 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
6047 	     || CONVERT_EXPR_P (t))
6048 	t = TREE_OPERAND (t, 0);
6049 
6050       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6051 	{
6052 	  t = get_base_address (t);
6053 	  if (t && DECL_P (t)
6054               && DECL_MODE (t) != BLKmode)
6055 	    TREE_ADDRESSABLE (t) = 1;
6056 	}
6057 
6058       *walk_subtrees = 0;
6059     }
6060 
6061   return NULL_TREE;
6062 }
6063 
6064 /* RTL expansion is not able to compile array references with variable
6065    offsets for arrays stored in single register.  Discover such
6066    expressions and mark variables as addressable to avoid this
6067    scenario.  */
6068 
6069 static void
6070 discover_nonconstant_array_refs (void)
6071 {
6072   basic_block bb;
6073   gimple_stmt_iterator gsi;
6074 
6075   FOR_EACH_BB_FN (bb, cfun)
6076     for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6077       {
6078 	gimple *stmt = gsi_stmt (gsi);
6079 	if (!is_gimple_debug (stmt))
6080 	  walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
6081       }
6082 }
6083 
6084 /* This function sets crtl->args.internal_arg_pointer to a virtual
6085    register if DRAP is needed.  Local register allocator will replace
6086    virtual_incoming_args_rtx with the virtual register.  */
6087 
6088 static void
6089 expand_stack_alignment (void)
6090 {
6091   rtx drap_rtx;
6092   unsigned int preferred_stack_boundary;
6093 
6094   if (! SUPPORTS_STACK_ALIGNMENT)
6095     return;
6096 
6097   if (cfun->calls_alloca
6098       || cfun->has_nonlocal_label
6099       || crtl->has_nonlocal_goto)
6100     crtl->need_drap = true;
6101 
6102   /* Call update_stack_boundary here again to update incoming stack
6103      boundary.  It may set incoming stack alignment to a different
6104      value after RTL expansion.  TARGET_FUNCTION_OK_FOR_SIBCALL may
6105      use the minimum incoming stack alignment to check if it is OK
6106      to perform sibcall optimization since sibcall optimization will
6107      only align the outgoing stack to incoming stack boundary.  */
6108   if (targetm.calls.update_stack_boundary)
6109     targetm.calls.update_stack_boundary ();
6110 
6111   /* The incoming stack frame has to be aligned at least at
6112      parm_stack_boundary.  */
6113   gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
6114 
6115   /* Update crtl->stack_alignment_estimated and use it later to align
6116      stack.  We check PREFERRED_STACK_BOUNDARY if there may be non-call
6117      exceptions since callgraph doesn't collect incoming stack alignment
6118      in this case.  */
6119   if (cfun->can_throw_non_call_exceptions
6120       && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6121     preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6122   else
6123     preferred_stack_boundary = crtl->preferred_stack_boundary;
6124   if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6125     crtl->stack_alignment_estimated = preferred_stack_boundary;
6126   if (preferred_stack_boundary > crtl->stack_alignment_needed)
6127     crtl->stack_alignment_needed = preferred_stack_boundary;
6128 
6129   gcc_assert (crtl->stack_alignment_needed
6130 	      <= crtl->stack_alignment_estimated);
6131 
6132   crtl->stack_realign_needed
6133     = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
6134   crtl->stack_realign_tried = crtl->stack_realign_needed;
6135 
6136   crtl->stack_realign_processed = true;
6137 
6138   /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6139      alignment.  */
6140   gcc_assert (targetm.calls.get_drap_rtx != NULL);
6141   drap_rtx = targetm.calls.get_drap_rtx ();
6142 
6143   /* stack_realign_drap and drap_rtx must match.  */
6144   gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6145 
6146   /* Do nothing if NULL is returned, which means DRAP is not needed.  */
6147   if (drap_rtx != NULL)
6148     {
6149       crtl->args.internal_arg_pointer = drap_rtx;
6150 
6151       /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6152          needed. */
6153       fixup_tail_calls ();
6154     }
6155 }
6156 
6157 
6158 static void
6159 expand_main_function (void)
6160 {
6161 #if (defined(INVOKE__main)				\
6162      || (!defined(HAS_INIT_SECTION)			\
6163 	 && !defined(INIT_SECTION_ASM_OP)		\
6164 	 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6165   emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode);
6166 #endif
6167 }
6168 
6169 
6170 /* Expand code to initialize the stack_protect_guard.  This is invoked at
6171    the beginning of a function to be protected.  */
6172 
6173 static void
6174 stack_protect_prologue (void)
6175 {
6176   tree guard_decl = targetm.stack_protect_guard ();
6177   rtx x, y;
6178 
6179   x = expand_normal (crtl->stack_protect_guard);
6180   if (guard_decl)
6181     y = expand_normal (guard_decl);
6182   else
6183     y = const0_rtx;
6184 
6185   /* Allow the target to copy from Y to X without leaking Y into a
6186      register.  */
6187   if (targetm.have_stack_protect_set ())
6188     if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6189       {
6190 	emit_insn (insn);
6191 	return;
6192       }
6193 
6194   /* Otherwise do a straight move.  */
6195   emit_move_insn (x, y);
6196 }
6197 
6198 /* Translate the intermediate representation contained in the CFG
6199    from GIMPLE trees to RTL.
6200 
6201    We do conversion per basic block and preserve/update the tree CFG.
6202    This implies we have to do some magic as the CFG can simultaneously
6203    consist of basic blocks containing RTL and GIMPLE trees.  This can
6204    confuse the CFG hooks, so be careful to not manipulate CFG during
6205    the expansion.  */
6206 
6207 namespace {
6208 
6209 const pass_data pass_data_expand =
6210 {
6211   RTL_PASS, /* type */
6212   "expand", /* name */
6213   OPTGROUP_NONE, /* optinfo_flags */
6214   TV_EXPAND, /* tv_id */
6215   ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6216     | PROP_gimple_lcx
6217     | PROP_gimple_lvec
6218     | PROP_gimple_lva), /* properties_required */
6219   PROP_rtl, /* properties_provided */
6220   ( PROP_ssa | PROP_trees ), /* properties_destroyed */
6221   0, /* todo_flags_start */
6222   0, /* todo_flags_finish */
6223 };
6224 
6225 class pass_expand : public rtl_opt_pass
6226 {
6227 public:
6228   pass_expand (gcc::context *ctxt)
6229     : rtl_opt_pass (pass_data_expand, ctxt)
6230   {}
6231 
6232   /* opt_pass methods: */
6233   virtual unsigned int execute (function *);
6234 
6235 }; // class pass_expand
6236 
6237 unsigned int
6238 pass_expand::execute (function *fun)
6239 {
6240   basic_block bb, init_block;
6241   edge_iterator ei;
6242   edge e;
6243   rtx_insn *var_seq, *var_ret_seq;
6244   unsigned i;
6245 
6246   timevar_push (TV_OUT_OF_SSA);
6247   rewrite_out_of_ssa (&SA);
6248   timevar_pop (TV_OUT_OF_SSA);
6249   SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
6250 
6251   if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
6252     {
6253       gimple_stmt_iterator gsi;
6254       FOR_EACH_BB_FN (bb, cfun)
6255 	for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6256 	  if (gimple_debug_bind_p (gsi_stmt (gsi)))
6257 	    avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6258     }
6259 
6260   /* Make sure all values used by the optimization passes have sane
6261      defaults.  */
6262   reg_renumber = 0;
6263 
6264   /* Some backends want to know that we are expanding to RTL.  */
6265   currently_expanding_to_rtl = 1;
6266   /* Dominators are not kept up-to-date as we may create new basic-blocks.  */
6267   free_dominance_info (CDI_DOMINATORS);
6268 
6269   rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
6270 
6271   if (chkp_function_instrumented_p (current_function_decl))
6272     chkp_reset_rtl_bounds ();
6273 
6274   insn_locations_init ();
6275   if (!DECL_IS_BUILTIN (current_function_decl))
6276     {
6277       /* Eventually, all FEs should explicitly set function_start_locus.  */
6278       if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6279 	set_curr_insn_location
6280 	  (DECL_SOURCE_LOCATION (current_function_decl));
6281       else
6282 	set_curr_insn_location (fun->function_start_locus);
6283     }
6284   else
6285     set_curr_insn_location (UNKNOWN_LOCATION);
6286   prologue_location = curr_insn_location ();
6287 
6288 #ifdef INSN_SCHEDULING
6289   init_sched_attrs ();
6290 #endif
6291 
6292   /* Make sure first insn is a note even if we don't want linenums.
6293      This makes sure the first insn will never be deleted.
6294      Also, final expects a note to appear there.  */
6295   emit_note (NOTE_INSN_DELETED);
6296 
6297   /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE.  */
6298   discover_nonconstant_array_refs ();
6299 
6300   targetm.expand_to_rtl_hook ();
6301   crtl->init_stack_alignment ();
6302   fun->cfg->max_jumptable_ents = 0;
6303 
6304   /* Resovle the function section.  Some targets, like ARM EABI rely on knowledge
6305      of the function section at exapnsion time to predict distance of calls.  */
6306   resolve_unique_section (current_function_decl, 0, flag_function_sections);
6307 
6308   /* Expand the variables recorded during gimple lowering.  */
6309   timevar_push (TV_VAR_EXPAND);
6310   start_sequence ();
6311 
6312   var_ret_seq = expand_used_vars ();
6313 
6314   var_seq = get_insns ();
6315   end_sequence ();
6316   timevar_pop (TV_VAR_EXPAND);
6317 
6318   /* Honor stack protection warnings.  */
6319   if (warn_stack_protect)
6320     {
6321       if (fun->calls_alloca)
6322 	warning (OPT_Wstack_protector,
6323 		 "stack protector not protecting local variables: "
6324 		 "variable length buffer");
6325       if (has_short_buffer && !crtl->stack_protect_guard)
6326 	warning (OPT_Wstack_protector,
6327 		 "stack protector not protecting function: "
6328 		 "all local arrays are less than %d bytes long",
6329 		 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6330     }
6331 
6332   /* Set up parameters and prepare for return, for the function.  */
6333   expand_function_start (current_function_decl);
6334 
6335   /* If we emitted any instructions for setting up the variables,
6336      emit them before the FUNCTION_START note.  */
6337   if (var_seq)
6338     {
6339       emit_insn_before (var_seq, parm_birth_insn);
6340 
6341       /* In expand_function_end we'll insert the alloca save/restore
6342 	 before parm_birth_insn.  We've just insertted an alloca call.
6343 	 Adjust the pointer to match.  */
6344       parm_birth_insn = var_seq;
6345     }
6346 
6347   /* Now propagate the RTL assignment of each partition to the
6348      underlying var of each SSA_NAME.  */
6349   tree name;
6350 
6351   FOR_EACH_SSA_NAME (i, name, cfun)
6352     {
6353       /* We might have generated new SSA names in
6354 	 update_alias_info_with_stack_vars.  They will have a NULL
6355 	 defining statements, and won't be part of the partitioning,
6356 	 so ignore those.  */
6357       if (!SSA_NAME_DEF_STMT (name))
6358 	continue;
6359 
6360       adjust_one_expanded_partition_var (name);
6361     }
6362 
6363   /* Clean up RTL of variables that straddle across multiple
6364      partitions, and check that the rtl of any PARM_DECLs that are not
6365      cleaned up is that of their default defs.  */
6366   FOR_EACH_SSA_NAME (i, name, cfun)
6367     {
6368       int part;
6369 
6370       /* We might have generated new SSA names in
6371 	 update_alias_info_with_stack_vars.  They will have a NULL
6372 	 defining statements, and won't be part of the partitioning,
6373 	 so ignore those.  */
6374       if (!SSA_NAME_DEF_STMT (name))
6375 	continue;
6376       part = var_to_partition (SA.map, name);
6377       if (part == NO_PARTITION)
6378 	continue;
6379 
6380       /* If this decl was marked as living in multiple places, reset
6381 	 this now to NULL.  */
6382       tree var = SSA_NAME_VAR (name);
6383       if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6384 	SET_DECL_RTL (var, NULL);
6385       /* Check that the pseudos chosen by assign_parms are those of
6386 	 the corresponding default defs.  */
6387       else if (SSA_NAME_IS_DEFAULT_DEF (name)
6388 	       && (TREE_CODE (var) == PARM_DECL
6389 		   || TREE_CODE (var) == RESULT_DECL))
6390 	{
6391 	  rtx in = DECL_RTL_IF_SET (var);
6392 	  gcc_assert (in);
6393 	  rtx out = SA.partition_to_pseudo[part];
6394 	  gcc_assert (in == out);
6395 
6396 	  /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6397 	     those expected by debug backends for each parm and for
6398 	     the result.  This is particularly important for stabs,
6399 	     whose register elimination from parm's DECL_RTL may cause
6400 	     -fcompare-debug differences as SET_DECL_RTL changes reg's
6401 	     attrs.  So, make sure the RTL already has the parm as the
6402 	     EXPR, so that it won't change.  */
6403 	  SET_DECL_RTL (var, NULL_RTX);
6404 	  if (MEM_P (in))
6405 	    set_mem_attributes (in, var, true);
6406 	  SET_DECL_RTL (var, in);
6407 	}
6408     }
6409 
6410   /* If this function is `main', emit a call to `__main'
6411      to run global initializers, etc.  */
6412   if (DECL_NAME (current_function_decl)
6413       && MAIN_NAME_P (DECL_NAME (current_function_decl))
6414       && DECL_FILE_SCOPE_P (current_function_decl))
6415     expand_main_function ();
6416 
6417   /* Initialize the stack_protect_guard field.  This must happen after the
6418      call to __main (if any) so that the external decl is initialized.  */
6419   if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
6420     stack_protect_prologue ();
6421 
6422   expand_phi_nodes (&SA);
6423 
6424   /* Release any stale SSA redirection data.  */
6425   redirect_edge_var_map_empty ();
6426 
6427   /* Register rtl specific functions for cfg.  */
6428   rtl_register_cfg_hooks ();
6429 
6430   init_block = construct_init_block ();
6431 
6432   /* Clear EDGE_EXECUTABLE on the entry edge(s).  It is cleaned from the
6433      remaining edges later.  */
6434   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6435     e->flags &= ~EDGE_EXECUTABLE;
6436 
6437   /* If the function has too many markers, drop them while expanding.  */
6438   if (cfun->debug_marker_count
6439       >= PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
6440     cfun->debug_nonbind_markers = false;
6441 
6442   lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6443   FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6444 		  next_bb)
6445     bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6446 
6447   if (MAY_HAVE_DEBUG_BIND_INSNS)
6448     expand_debug_locations ();
6449 
6450   if (deep_ter_debug_map)
6451     {
6452       delete deep_ter_debug_map;
6453       deep_ter_debug_map = NULL;
6454     }
6455 
6456   /* Free stuff we no longer need after GIMPLE optimizations.  */
6457   free_dominance_info (CDI_DOMINATORS);
6458   free_dominance_info (CDI_POST_DOMINATORS);
6459   delete_tree_cfg_annotations (fun);
6460 
6461   timevar_push (TV_OUT_OF_SSA);
6462   finish_out_of_ssa (&SA);
6463   timevar_pop (TV_OUT_OF_SSA);
6464 
6465   timevar_push (TV_POST_EXPAND);
6466   /* We are no longer in SSA form.  */
6467   fun->gimple_df->in_ssa_p = false;
6468   loops_state_clear (LOOP_CLOSED_SSA);
6469 
6470   /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6471      conservatively to true until they are all profile aware.  */
6472   delete lab_rtx_for_bb;
6473   free_histograms (fun);
6474 
6475   construct_exit_block ();
6476   insn_locations_finalize ();
6477 
6478   if (var_ret_seq)
6479     {
6480       rtx_insn *after = return_label;
6481       rtx_insn *next = NEXT_INSN (after);
6482       if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6483 	after = next;
6484       emit_insn_after (var_ret_seq, after);
6485     }
6486 
6487   /* Zap the tree EH table.  */
6488   set_eh_throw_stmt_table (fun, NULL);
6489 
6490   /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6491      split edges which edge insertions might do.  */
6492   rebuild_jump_labels (get_insns ());
6493 
6494   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6495 		  EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6496     {
6497       edge e;
6498       edge_iterator ei;
6499       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6500 	{
6501 	  if (e->insns.r)
6502 	    {
6503 	      rebuild_jump_labels_chain (e->insns.r);
6504 	      /* Put insns after parm birth, but before
6505 		 NOTE_INSNS_FUNCTION_BEG.  */
6506 	      if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6507 		  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6508 		{
6509 		  rtx_insn *insns = e->insns.r;
6510 		  e->insns.r = NULL;
6511 		  if (NOTE_P (parm_birth_insn)
6512 		      && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6513 		    emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6514 		  else
6515 		    emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6516 		}
6517 	      else
6518 		commit_one_edge_insertion (e);
6519 	    }
6520 	  else
6521 	    ei_next (&ei);
6522 	}
6523     }
6524 
6525   /* We're done expanding trees to RTL.  */
6526   currently_expanding_to_rtl = 0;
6527 
6528   flush_mark_addressable_queue ();
6529 
6530   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6531 		  EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6532     {
6533       edge e;
6534       edge_iterator ei;
6535       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6536 	{
6537 	  /* Clear EDGE_EXECUTABLE.  This flag is never used in the backend.  */
6538 	  e->flags &= ~EDGE_EXECUTABLE;
6539 
6540 	  /* At the moment not all abnormal edges match the RTL
6541 	     representation.  It is safe to remove them here as
6542 	     find_many_sub_basic_blocks will rediscover them.
6543 	     In the future we should get this fixed properly.  */
6544 	  if ((e->flags & EDGE_ABNORMAL)
6545 	      && !(e->flags & EDGE_SIBCALL))
6546 	    remove_edge (e);
6547 	  else
6548 	    ei_next (&ei);
6549 	}
6550     }
6551 
6552   auto_sbitmap blocks (last_basic_block_for_fn (fun));
6553   bitmap_ones (blocks);
6554   find_many_sub_basic_blocks (blocks);
6555   purge_all_dead_edges ();
6556 
6557   /* After initial rtl generation, call back to finish generating
6558      exception support code.  We need to do this before cleaning up
6559      the CFG as the code does not expect dead landing pads.  */
6560   if (fun->eh->region_tree != NULL)
6561     finish_eh_generation ();
6562 
6563   /* Call expand_stack_alignment after finishing all
6564      updates to crtl->preferred_stack_boundary.  */
6565   expand_stack_alignment ();
6566 
6567   /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6568      function.  */
6569   if (crtl->tail_call_emit)
6570     fixup_tail_calls ();
6571 
6572   /* BB subdivision may have created basic blocks that are are only reachable
6573      from unlikely bbs but not marked as such in the profile.  */
6574   if (optimize)
6575     propagate_unlikely_bbs_forward ();
6576 
6577   /* Remove unreachable blocks, otherwise we cannot compute dominators
6578      which are needed for loop state verification.  As a side-effect
6579      this also compacts blocks.
6580      ???  We cannot remove trivially dead insns here as for example
6581      the DRAP reg on i?86 is not magically live at this point.
6582      gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise.  */
6583   cleanup_cfg (CLEANUP_NO_INSN_DEL);
6584 
6585   checking_verify_flow_info ();
6586 
6587   /* Initialize pseudos allocated for hard registers.  */
6588   emit_initial_value_sets ();
6589 
6590   /* And finally unshare all RTL.  */
6591   unshare_all_rtl ();
6592 
6593   /* There's no need to defer outputting this function any more; we
6594      know we want to output it.  */
6595   DECL_DEFER_OUTPUT (current_function_decl) = 0;
6596 
6597   /* Now that we're done expanding trees to RTL, we shouldn't have any
6598      more CONCATs anywhere.  */
6599   generating_concat_p = 0;
6600 
6601   if (dump_file)
6602     {
6603       fprintf (dump_file,
6604 	       "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6605       /* And the pass manager will dump RTL for us.  */
6606     }
6607 
6608   /* If we're emitting a nested function, make sure its parent gets
6609      emitted as well.  Doing otherwise confuses debug info.  */
6610     {
6611       tree parent;
6612       for (parent = DECL_CONTEXT (current_function_decl);
6613 	   parent != NULL_TREE;
6614 	   parent = get_containing_scope (parent))
6615 	if (TREE_CODE (parent) == FUNCTION_DECL)
6616 	  TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6617     }
6618 
6619   TREE_ASM_WRITTEN (current_function_decl) = 1;
6620 
6621   /* After expanding, the return labels are no longer needed. */
6622   return_label = NULL;
6623   naked_return_label = NULL;
6624 
6625   /* After expanding, the tm_restart map is no longer needed.  */
6626   if (fun->gimple_df->tm_restart)
6627     fun->gimple_df->tm_restart = NULL;
6628 
6629   /* Tag the blocks with a depth number so that change_scope can find
6630      the common parent easily.  */
6631   set_block_levels (DECL_INITIAL (fun->decl), 0);
6632   default_rtl_profile ();
6633 
6634   /* For -dx discard loops now, otherwise IL verify in clean_state will
6635      ICE.  */
6636   if (rtl_dump_and_exit)
6637     {
6638       cfun->curr_properties &= ~PROP_loops;
6639       loop_optimizer_finalize ();
6640     }
6641 
6642   timevar_pop (TV_POST_EXPAND);
6643 
6644   return 0;
6645 }
6646 
6647 } // anon namespace
6648 
6649 rtl_opt_pass *
6650 make_pass_expand (gcc::context *ctxt)
6651 {
6652   return new pass_expand (ctxt);
6653 }
6654