xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cfgexpand.c (revision e6c7e151de239c49d2e38720a061ed9d1fa99309)
1 /* A pass for lowering trees to RTL.
2    Copyright (C) 2004-2017 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "optabs.h"
34 #include "regs.h" /* For reg_renumber.  */
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "stor-layout.h"
42 #include "stmt.h"
43 #include "print-tree.h"
44 #include "cfgrtl.h"
45 #include "cfganal.h"
46 #include "cfgbuild.h"
47 #include "cfgcleanup.h"
48 #include "dojump.h"
49 #include "explow.h"
50 #include "calls.h"
51 #include "expr.h"
52 #include "internal-fn.h"
53 #include "tree-eh.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
57 #include "tree-cfg.h"
58 #include "tree-dfa.h"
59 #include "tree-ssa.h"
60 #include "except.h"
61 #include "gimple-pretty-print.h"
62 #include "toplev.h"
63 #include "debug.h"
64 #include "params.h"
65 #include "tree-inline.h"
66 #include "value-prof.h"
67 #include "tree-ssa-live.h"
68 #include "tree-outof-ssa.h"
69 #include "cfgloop.h"
70 #include "insn-attr.h" /* For INSN_SCHEDULING.  */
71 #include "asan.h"
72 #include "tree-ssa-address.h"
73 #include "output.h"
74 #include "builtins.h"
75 #include "tree-chkp.h"
76 #include "rtl-chkp.h"
77 
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79    cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80    give the same symbol without quotes for an alternative entry point.  You
81    must define both, or neither.  */
82 #ifndef NAME__MAIN
83 #define NAME__MAIN "__main"
84 #endif
85 
86 /* This variable holds information helping the rewriting of SSA trees
87    into RTL.  */
88 struct ssaexpand SA;
89 
90 /* This variable holds the currently expanded gimple statement for purposes
91    of comminucating the profile info to the builtin expanders.  */
92 gimple *currently_expanding_gimple_stmt;
93 
94 static rtx expand_debug_expr (tree);
95 
96 static bool defer_stack_allocation (tree, bool);
97 
98 static void record_alignment_for_reg_var (unsigned int);
99 
100 /* Return an expression tree corresponding to the RHS of GIMPLE
101    statement STMT.  */
102 
103 tree
104 gimple_assign_rhs_to_tree (gimple *stmt)
105 {
106   tree t;
107   enum gimple_rhs_class grhs_class;
108 
109   grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
110 
111   if (grhs_class == GIMPLE_TERNARY_RHS)
112     t = build3 (gimple_assign_rhs_code (stmt),
113 		TREE_TYPE (gimple_assign_lhs (stmt)),
114 		gimple_assign_rhs1 (stmt),
115 		gimple_assign_rhs2 (stmt),
116 		gimple_assign_rhs3 (stmt));
117   else if (grhs_class == GIMPLE_BINARY_RHS)
118     t = build2 (gimple_assign_rhs_code (stmt),
119 		TREE_TYPE (gimple_assign_lhs (stmt)),
120 		gimple_assign_rhs1 (stmt),
121 		gimple_assign_rhs2 (stmt));
122   else if (grhs_class == GIMPLE_UNARY_RHS)
123     t = build1 (gimple_assign_rhs_code (stmt),
124 		TREE_TYPE (gimple_assign_lhs (stmt)),
125 		gimple_assign_rhs1 (stmt));
126   else if (grhs_class == GIMPLE_SINGLE_RHS)
127     {
128       t = gimple_assign_rhs1 (stmt);
129       /* Avoid modifying this tree in place below.  */
130       if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
131 	   && gimple_location (stmt) != EXPR_LOCATION (t))
132 	  || (gimple_block (stmt)
133 	      && currently_expanding_to_rtl
134 	      && EXPR_P (t)))
135 	t = copy_node (t);
136     }
137   else
138     gcc_unreachable ();
139 
140   if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
141     SET_EXPR_LOCATION (t, gimple_location (stmt));
142 
143   return t;
144 }
145 
146 
147 #ifndef STACK_ALIGNMENT_NEEDED
148 #define STACK_ALIGNMENT_NEEDED 1
149 #endif
150 
151 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
152 
153 /* Choose either CUR or NEXT as the leader DECL for a partition.
154    Prefer ignored decls, to simplify debug dumps and reduce ambiguity
155    out of the same user variable being in multiple partitions (this is
156    less likely for compiler-introduced temps).  */
157 
158 static tree
159 leader_merge (tree cur, tree next)
160 {
161   if (cur == NULL || cur == next)
162     return next;
163 
164   if (DECL_P (cur) && DECL_IGNORED_P (cur))
165     return cur;
166 
167   if (DECL_P (next) && DECL_IGNORED_P (next))
168     return next;
169 
170   return cur;
171 }
172 
173 /* Associate declaration T with storage space X.  If T is no
174    SSA name this is exactly SET_DECL_RTL, otherwise make the
175    partition of T associated with X.  */
176 static inline void
177 set_rtl (tree t, rtx x)
178 {
179   gcc_checking_assert (!x
180 		       || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
181 		       || (use_register_for_decl (t)
182 			   ? (REG_P (x)
183 			      || (GET_CODE (x) == CONCAT
184 				  && (REG_P (XEXP (x, 0))
185 				      || SUBREG_P (XEXP (x, 0)))
186 				  && (REG_P (XEXP (x, 1))
187 				      || SUBREG_P (XEXP (x, 1))))
188 			      /* We need to accept PARALLELs for RESUT_DECLs
189 				 because of vector types with BLKmode returned
190 				 in multiple registers, but they are supposed
191 				 to be uncoalesced.  */
192 			      || (GET_CODE (x) == PARALLEL
193 				  && SSAVAR (t)
194 				  && TREE_CODE (SSAVAR (t)) == RESULT_DECL
195 				  && (GET_MODE (x) == BLKmode
196 				      || !flag_tree_coalesce_vars)))
197 			   : (MEM_P (x) || x == pc_rtx
198 			      || (GET_CODE (x) == CONCAT
199 				  && MEM_P (XEXP (x, 0))
200 				  && MEM_P (XEXP (x, 1))))));
201   /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
202      RESULT_DECLs has the expected mode.  For memory, we accept
203      unpromoted modes, since that's what we're likely to get.  For
204      PARM_DECLs and RESULT_DECLs, we'll have been called by
205      set_parm_rtl, which will give us the default def, so we don't
206      have to compute it ourselves.  For RESULT_DECLs, we accept mode
207      mismatches too, as long as we have BLKmode or are not coalescing
208      across variables, so that we don't reject BLKmode PARALLELs or
209      unpromoted REGs.  */
210   gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
211 		       || (SSAVAR (t)
212 			   && TREE_CODE (SSAVAR (t)) == RESULT_DECL
213 			   && (promote_ssa_mode (t, NULL) == BLKmode
214 			       || !flag_tree_coalesce_vars))
215 		       || !use_register_for_decl (t)
216 		       || GET_MODE (x) == promote_ssa_mode (t, NULL));
217 
218   if (x)
219     {
220       bool skip = false;
221       tree cur = NULL_TREE;
222       rtx xm = x;
223 
224     retry:
225       if (MEM_P (xm))
226 	cur = MEM_EXPR (xm);
227       else if (REG_P (xm))
228 	cur = REG_EXPR (xm);
229       else if (SUBREG_P (xm))
230 	{
231 	  gcc_assert (subreg_lowpart_p (xm));
232 	  xm = SUBREG_REG (xm);
233 	  goto retry;
234 	}
235       else if (GET_CODE (xm) == CONCAT)
236 	{
237 	  xm = XEXP (xm, 0);
238 	  goto retry;
239 	}
240       else if (GET_CODE (xm) == PARALLEL)
241 	{
242 	  xm = XVECEXP (xm, 0, 0);
243 	  gcc_assert (GET_CODE (xm) == EXPR_LIST);
244 	  xm = XEXP (xm, 0);
245 	  goto retry;
246 	}
247       else if (xm == pc_rtx)
248 	skip = true;
249       else
250 	gcc_unreachable ();
251 
252       tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
253 
254       if (cur != next)
255 	{
256 	  if (MEM_P (x))
257 	    set_mem_attributes (x,
258 				next && TREE_CODE (next) == SSA_NAME
259 				? TREE_TYPE (next)
260 				: next, true);
261 	  else
262 	    set_reg_attrs_for_decl_rtl (next, x);
263 	}
264     }
265 
266   if (TREE_CODE (t) == SSA_NAME)
267     {
268       int part = var_to_partition (SA.map, t);
269       if (part != NO_PARTITION)
270 	{
271 	  if (SA.partition_to_pseudo[part])
272 	    gcc_assert (SA.partition_to_pseudo[part] == x);
273 	  else if (x != pc_rtx)
274 	    SA.partition_to_pseudo[part] = x;
275 	}
276       /* For the benefit of debug information at -O0 (where
277          vartracking doesn't run) record the place also in the base
278          DECL.  For PARMs and RESULTs, do so only when setting the
279          default def.  */
280       if (x && x != pc_rtx && SSA_NAME_VAR (t)
281 	  && (VAR_P (SSA_NAME_VAR (t))
282 	      || SSA_NAME_IS_DEFAULT_DEF (t)))
283 	{
284 	  tree var = SSA_NAME_VAR (t);
285 	  /* If we don't yet have something recorded, just record it now.  */
286 	  if (!DECL_RTL_SET_P (var))
287 	    SET_DECL_RTL (var, x);
288 	  /* If we have it set already to "multiple places" don't
289 	     change this.  */
290 	  else if (DECL_RTL (var) == pc_rtx)
291 	    ;
292 	  /* If we have something recorded and it's not the same place
293 	     as we want to record now, we have multiple partitions for the
294 	     same base variable, with different places.  We can't just
295 	     randomly chose one, hence we have to say that we don't know.
296 	     This only happens with optimization, and there var-tracking
297 	     will figure out the right thing.  */
298 	  else if (DECL_RTL (var) != x)
299 	    SET_DECL_RTL (var, pc_rtx);
300 	}
301     }
302   else
303     SET_DECL_RTL (t, x);
304 }
305 
306 /* This structure holds data relevant to one variable that will be
307    placed in a stack slot.  */
308 struct stack_var
309 {
310   /* The Variable.  */
311   tree decl;
312 
313   /* Initially, the size of the variable.  Later, the size of the partition,
314      if this variable becomes it's partition's representative.  */
315   HOST_WIDE_INT size;
316 
317   /* The *byte* alignment required for this variable.  Or as, with the
318      size, the alignment for this partition.  */
319   unsigned int alignb;
320 
321   /* The partition representative.  */
322   size_t representative;
323 
324   /* The next stack variable in the partition, or EOC.  */
325   size_t next;
326 
327   /* The numbers of conflicting stack variables.  */
328   bitmap conflicts;
329 };
330 
331 #define EOC  ((size_t)-1)
332 
333 /* We have an array of such objects while deciding allocation.  */
334 static struct stack_var *stack_vars;
335 static size_t stack_vars_alloc;
336 static size_t stack_vars_num;
337 static hash_map<tree, size_t> *decl_to_stack_part;
338 
339 /* Conflict bitmaps go on this obstack.  This allows us to destroy
340    all of them in one big sweep.  */
341 static bitmap_obstack stack_var_bitmap_obstack;
342 
343 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
344    is non-decreasing.  */
345 static size_t *stack_vars_sorted;
346 
347 /* The phase of the stack frame.  This is the known misalignment of
348    virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY.  That is,
349    (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0.  */
350 static int frame_phase;
351 
352 /* Used during expand_used_vars to remember if we saw any decls for
353    which we'd like to enable stack smashing protection.  */
354 static bool has_protected_decls;
355 
356 /* Used during expand_used_vars.  Remember if we say a character buffer
357    smaller than our cutoff threshold.  Used for -Wstack-protector.  */
358 static bool has_short_buffer;
359 
360 /* Compute the byte alignment to use for DECL.  Ignore alignment
361    we can't do with expected alignment of the stack boundary.  */
362 
363 static unsigned int
364 align_local_variable (tree decl)
365 {
366   unsigned int align;
367 
368   if (TREE_CODE (decl) == SSA_NAME)
369     align = TYPE_ALIGN (TREE_TYPE (decl));
370   else
371     {
372       align = LOCAL_DECL_ALIGNMENT (decl);
373       SET_DECL_ALIGN (decl, align);
374     }
375   return align / BITS_PER_UNIT;
376 }
377 
378 /* Align given offset BASE with ALIGN.  Truncate up if ALIGN_UP is true,
379    down otherwise.  Return truncated BASE value.  */
380 
381 static inline unsigned HOST_WIDE_INT
382 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
383 {
384   return align_up ? (base + align - 1) & -align : base & -align;
385 }
386 
387 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
388    Return the frame offset.  */
389 
390 static HOST_WIDE_INT
391 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
392 {
393   HOST_WIDE_INT offset, new_frame_offset;
394 
395   if (FRAME_GROWS_DOWNWARD)
396     {
397       new_frame_offset
398 	= align_base (frame_offset - frame_phase - size,
399 		      align, false) + frame_phase;
400       offset = new_frame_offset;
401     }
402   else
403     {
404       new_frame_offset
405 	= align_base (frame_offset - frame_phase, align, true) + frame_phase;
406       offset = new_frame_offset;
407       new_frame_offset += size;
408     }
409   frame_offset = new_frame_offset;
410 
411   if (frame_offset_overflow (frame_offset, cfun->decl))
412     frame_offset = offset = 0;
413 
414   return offset;
415 }
416 
417 /* Accumulate DECL into STACK_VARS.  */
418 
419 static void
420 add_stack_var (tree decl)
421 {
422   struct stack_var *v;
423 
424   if (stack_vars_num >= stack_vars_alloc)
425     {
426       if (stack_vars_alloc)
427 	stack_vars_alloc = stack_vars_alloc * 3 / 2;
428       else
429 	stack_vars_alloc = 32;
430       stack_vars
431 	= XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
432     }
433   if (!decl_to_stack_part)
434     decl_to_stack_part = new hash_map<tree, size_t>;
435 
436   v = &stack_vars[stack_vars_num];
437   decl_to_stack_part->put (decl, stack_vars_num);
438 
439   v->decl = decl;
440   tree size = TREE_CODE (decl) == SSA_NAME
441     ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
442     : DECL_SIZE_UNIT (decl);
443   v->size = tree_to_uhwi (size);
444   /* Ensure that all variables have size, so that &a != &b for any two
445      variables that are simultaneously live.  */
446   if (v->size == 0)
447     v->size = 1;
448   v->alignb = align_local_variable (decl);
449   /* An alignment of zero can mightily confuse us later.  */
450   gcc_assert (v->alignb != 0);
451 
452   /* All variables are initially in their own partition.  */
453   v->representative = stack_vars_num;
454   v->next = EOC;
455 
456   /* All variables initially conflict with no other.  */
457   v->conflicts = NULL;
458 
459   /* Ensure that this decl doesn't get put onto the list twice.  */
460   set_rtl (decl, pc_rtx);
461 
462   stack_vars_num++;
463 }
464 
465 /* Make the decls associated with luid's X and Y conflict.  */
466 
467 static void
468 add_stack_var_conflict (size_t x, size_t y)
469 {
470   struct stack_var *a = &stack_vars[x];
471   struct stack_var *b = &stack_vars[y];
472   if (!a->conflicts)
473     a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
474   if (!b->conflicts)
475     b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
476   bitmap_set_bit (a->conflicts, y);
477   bitmap_set_bit (b->conflicts, x);
478 }
479 
480 /* Check whether the decls associated with luid's X and Y conflict.  */
481 
482 static bool
483 stack_var_conflict_p (size_t x, size_t y)
484 {
485   struct stack_var *a = &stack_vars[x];
486   struct stack_var *b = &stack_vars[y];
487   if (x == y)
488     return false;
489   /* Partitions containing an SSA name result from gimple registers
490      with things like unsupported modes.  They are top-level and
491      hence conflict with everything else.  */
492   if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
493     return true;
494 
495   if (!a->conflicts || !b->conflicts)
496     return false;
497   return bitmap_bit_p (a->conflicts, y);
498 }
499 
500 /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
501    enter its partition number into bitmap DATA.  */
502 
503 static bool
504 visit_op (gimple *, tree op, tree, void *data)
505 {
506   bitmap active = (bitmap)data;
507   op = get_base_address (op);
508   if (op
509       && DECL_P (op)
510       && DECL_RTL_IF_SET (op) == pc_rtx)
511     {
512       size_t *v = decl_to_stack_part->get (op);
513       if (v)
514 	bitmap_set_bit (active, *v);
515     }
516   return false;
517 }
518 
519 /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
520    record conflicts between it and all currently active other partitions
521    from bitmap DATA.  */
522 
523 static bool
524 visit_conflict (gimple *, tree op, tree, void *data)
525 {
526   bitmap active = (bitmap)data;
527   op = get_base_address (op);
528   if (op
529       && DECL_P (op)
530       && DECL_RTL_IF_SET (op) == pc_rtx)
531     {
532       size_t *v = decl_to_stack_part->get (op);
533       if (v && bitmap_set_bit (active, *v))
534 	{
535 	  size_t num = *v;
536 	  bitmap_iterator bi;
537 	  unsigned i;
538 	  gcc_assert (num < stack_vars_num);
539 	  EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
540 	    add_stack_var_conflict (num, i);
541 	}
542     }
543   return false;
544 }
545 
546 /* Helper routine for add_scope_conflicts, calculating the active partitions
547    at the end of BB, leaving the result in WORK.  We're called to generate
548    conflicts when FOR_CONFLICT is true, otherwise we're just tracking
549    liveness.  */
550 
551 static void
552 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
553 {
554   edge e;
555   edge_iterator ei;
556   gimple_stmt_iterator gsi;
557   walk_stmt_load_store_addr_fn visit;
558 
559   bitmap_clear (work);
560   FOR_EACH_EDGE (e, ei, bb->preds)
561     bitmap_ior_into (work, (bitmap)e->src->aux);
562 
563   visit = visit_op;
564 
565   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
566     {
567       gimple *stmt = gsi_stmt (gsi);
568       walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
569     }
570   for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
571     {
572       gimple *stmt = gsi_stmt (gsi);
573 
574       if (gimple_clobber_p (stmt))
575 	{
576 	  tree lhs = gimple_assign_lhs (stmt);
577 	  size_t *v;
578 	  /* Nested function lowering might introduce LHSs
579 	     that are COMPONENT_REFs.  */
580 	  if (!VAR_P (lhs))
581 	    continue;
582 	  if (DECL_RTL_IF_SET (lhs) == pc_rtx
583 	      && (v = decl_to_stack_part->get (lhs)))
584 	    bitmap_clear_bit (work, *v);
585 	}
586       else if (!is_gimple_debug (stmt))
587 	{
588 	  if (for_conflict
589 	      && visit == visit_op)
590 	    {
591 	      /* If this is the first real instruction in this BB we need
592 	         to add conflicts for everything live at this point now.
593 		 Unlike classical liveness for named objects we can't
594 		 rely on seeing a def/use of the names we're interested in.
595 		 There might merely be indirect loads/stores.  We'd not add any
596 		 conflicts for such partitions.  */
597 	      bitmap_iterator bi;
598 	      unsigned i;
599 	      EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
600 		{
601 		  struct stack_var *a = &stack_vars[i];
602 		  if (!a->conflicts)
603 		    a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
604 		  bitmap_ior_into (a->conflicts, work);
605 		}
606 	      visit = visit_conflict;
607 	    }
608 	  walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
609 	}
610     }
611 }
612 
613 /* Generate stack partition conflicts between all partitions that are
614    simultaneously live.  */
615 
616 static void
617 add_scope_conflicts (void)
618 {
619   basic_block bb;
620   bool changed;
621   bitmap work = BITMAP_ALLOC (NULL);
622   int *rpo;
623   int n_bbs;
624 
625   /* We approximate the live range of a stack variable by taking the first
626      mention of its name as starting point(s), and by the end-of-scope
627      death clobber added by gimplify as ending point(s) of the range.
628      This overapproximates in the case we for instance moved an address-taken
629      operation upward, without also moving a dereference to it upwards.
630      But it's conservatively correct as a variable never can hold values
631      before its name is mentioned at least once.
632 
633      We then do a mostly classical bitmap liveness algorithm.  */
634 
635   FOR_ALL_BB_FN (bb, cfun)
636     bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
637 
638   rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
639   n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
640 
641   changed = true;
642   while (changed)
643     {
644       int i;
645       changed = false;
646       for (i = 0; i < n_bbs; i++)
647 	{
648 	  bitmap active;
649 	  bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
650 	  active = (bitmap)bb->aux;
651 	  add_scope_conflicts_1 (bb, work, false);
652 	  if (bitmap_ior_into (active, work))
653 	    changed = true;
654 	}
655     }
656 
657   FOR_EACH_BB_FN (bb, cfun)
658     add_scope_conflicts_1 (bb, work, true);
659 
660   free (rpo);
661   BITMAP_FREE (work);
662   FOR_ALL_BB_FN (bb, cfun)
663     BITMAP_FREE (bb->aux);
664 }
665 
666 /* A subroutine of partition_stack_vars.  A comparison function for qsort,
667    sorting an array of indices by the properties of the object.  */
668 
669 static int
670 stack_var_cmp (const void *a, const void *b)
671 {
672   size_t ia = *(const size_t *)a;
673   size_t ib = *(const size_t *)b;
674   unsigned int aligna = stack_vars[ia].alignb;
675   unsigned int alignb = stack_vars[ib].alignb;
676   HOST_WIDE_INT sizea = stack_vars[ia].size;
677   HOST_WIDE_INT sizeb = stack_vars[ib].size;
678   tree decla = stack_vars[ia].decl;
679   tree declb = stack_vars[ib].decl;
680   bool largea, largeb;
681   unsigned int uida, uidb;
682 
683   /* Primary compare on "large" alignment.  Large comes first.  */
684   largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
685   largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
686   if (largea != largeb)
687     return (int)largeb - (int)largea;
688 
689   /* Secondary compare on size, decreasing  */
690   if (sizea > sizeb)
691     return -1;
692   if (sizea < sizeb)
693     return 1;
694 
695   /* Tertiary compare on true alignment, decreasing.  */
696   if (aligna < alignb)
697     return -1;
698   if (aligna > alignb)
699     return 1;
700 
701   /* Final compare on ID for sort stability, increasing.
702      Two SSA names are compared by their version, SSA names come before
703      non-SSA names, and two normal decls are compared by their DECL_UID.  */
704   if (TREE_CODE (decla) == SSA_NAME)
705     {
706       if (TREE_CODE (declb) == SSA_NAME)
707 	uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
708       else
709 	return -1;
710     }
711   else if (TREE_CODE (declb) == SSA_NAME)
712     return 1;
713   else
714     uida = DECL_UID (decla), uidb = DECL_UID (declb);
715   if (uida < uidb)
716     return 1;
717   if (uida > uidb)
718     return -1;
719   return 0;
720 }
721 
722 struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
723 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
724 
725 /* If the points-to solution *PI points to variables that are in a partition
726    together with other variables add all partition members to the pointed-to
727    variables bitmap.  */
728 
729 static void
730 add_partitioned_vars_to_ptset (struct pt_solution *pt,
731 			       part_hashmap *decls_to_partitions,
732 			       hash_set<bitmap> *visited, bitmap temp)
733 {
734   bitmap_iterator bi;
735   unsigned i;
736   bitmap *part;
737 
738   if (pt->anything
739       || pt->vars == NULL
740       /* The pointed-to vars bitmap is shared, it is enough to
741 	 visit it once.  */
742       || visited->add (pt->vars))
743     return;
744 
745   bitmap_clear (temp);
746 
747   /* By using a temporary bitmap to store all members of the partitions
748      we have to add we make sure to visit each of the partitions only
749      once.  */
750   EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
751     if ((!temp
752 	 || !bitmap_bit_p (temp, i))
753 	&& (part = decls_to_partitions->get (i)))
754       bitmap_ior_into (temp, *part);
755   if (!bitmap_empty_p (temp))
756     bitmap_ior_into (pt->vars, temp);
757 }
758 
759 /* Update points-to sets based on partition info, so we can use them on RTL.
760    The bitmaps representing stack partitions will be saved until expand,
761    where partitioned decls used as bases in memory expressions will be
762    rewritten.  */
763 
764 static void
765 update_alias_info_with_stack_vars (void)
766 {
767   part_hashmap *decls_to_partitions = NULL;
768   size_t i, j;
769   tree var = NULL_TREE;
770 
771   for (i = 0; i < stack_vars_num; i++)
772     {
773       bitmap part = NULL;
774       tree name;
775       struct ptr_info_def *pi;
776 
777       /* Not interested in partitions with single variable.  */
778       if (stack_vars[i].representative != i
779           || stack_vars[i].next == EOC)
780         continue;
781 
782       if (!decls_to_partitions)
783 	{
784 	  decls_to_partitions = new part_hashmap;
785 	  cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
786 	}
787 
788       /* Create an SSA_NAME that points to the partition for use
789          as base during alias-oracle queries on RTL for bases that
790 	 have been partitioned.  */
791       if (var == NULL_TREE)
792 	var = create_tmp_var (ptr_type_node);
793       name = make_ssa_name (var);
794 
795       /* Create bitmaps representing partitions.  They will be used for
796          points-to sets later, so use GGC alloc.  */
797       part = BITMAP_GGC_ALLOC ();
798       for (j = i; j != EOC; j = stack_vars[j].next)
799 	{
800 	  tree decl = stack_vars[j].decl;
801 	  unsigned int uid = DECL_PT_UID (decl);
802 	  bitmap_set_bit (part, uid);
803 	  decls_to_partitions->put (uid, part);
804 	  cfun->gimple_df->decls_to_pointers->put (decl, name);
805 	  if (TREE_ADDRESSABLE (decl))
806 	    TREE_ADDRESSABLE (name) = 1;
807 	}
808 
809       /* Make the SSA name point to all partition members.  */
810       pi = get_ptr_info (name);
811       pt_solution_set (&pi->pt, part, false);
812     }
813 
814   /* Make all points-to sets that contain one member of a partition
815      contain all members of the partition.  */
816   if (decls_to_partitions)
817     {
818       unsigned i;
819       tree name;
820       hash_set<bitmap> visited;
821       bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
822 
823       FOR_EACH_SSA_NAME (i, name, cfun)
824 	{
825 	  struct ptr_info_def *pi;
826 
827 	  if (POINTER_TYPE_P (TREE_TYPE (name))
828 	      && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
829 	    add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
830 					   &visited, temp);
831 	}
832 
833       add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
834 				     decls_to_partitions, &visited, temp);
835 
836       delete decls_to_partitions;
837       BITMAP_FREE (temp);
838     }
839 }
840 
841 /* A subroutine of partition_stack_vars.  The UNION portion of a UNION/FIND
842    partitioning algorithm.  Partitions A and B are known to be non-conflicting.
843    Merge them into a single partition A.  */
844 
845 static void
846 union_stack_vars (size_t a, size_t b)
847 {
848   struct stack_var *vb = &stack_vars[b];
849   bitmap_iterator bi;
850   unsigned u;
851 
852   gcc_assert (stack_vars[b].next == EOC);
853    /* Add B to A's partition.  */
854   stack_vars[b].next = stack_vars[a].next;
855   stack_vars[b].representative = a;
856   stack_vars[a].next = b;
857 
858   /* Update the required alignment of partition A to account for B.  */
859   if (stack_vars[a].alignb < stack_vars[b].alignb)
860     stack_vars[a].alignb = stack_vars[b].alignb;
861 
862   /* Update the interference graph and merge the conflicts.  */
863   if (vb->conflicts)
864     {
865       EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
866 	add_stack_var_conflict (a, stack_vars[u].representative);
867       BITMAP_FREE (vb->conflicts);
868     }
869 }
870 
871 /* A subroutine of expand_used_vars.  Binpack the variables into
872    partitions constrained by the interference graph.  The overall
873    algorithm used is as follows:
874 
875 	Sort the objects by size in descending order.
876 	For each object A {
877 	  S = size(A)
878 	  O = 0
879 	  loop {
880 	    Look for the largest non-conflicting object B with size <= S.
881 	    UNION (A, B)
882 	  }
883 	}
884 */
885 
886 static void
887 partition_stack_vars (void)
888 {
889   size_t si, sj, n = stack_vars_num;
890 
891   stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
892   for (si = 0; si < n; ++si)
893     stack_vars_sorted[si] = si;
894 
895   if (n == 1)
896     return;
897 
898   qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
899 
900   for (si = 0; si < n; ++si)
901     {
902       size_t i = stack_vars_sorted[si];
903       unsigned int ialign = stack_vars[i].alignb;
904       HOST_WIDE_INT isize = stack_vars[i].size;
905 
906       /* Ignore objects that aren't partition representatives. If we
907          see a var that is not a partition representative, it must
908          have been merged earlier.  */
909       if (stack_vars[i].representative != i)
910         continue;
911 
912       for (sj = si + 1; sj < n; ++sj)
913 	{
914 	  size_t j = stack_vars_sorted[sj];
915 	  unsigned int jalign = stack_vars[j].alignb;
916 	  HOST_WIDE_INT jsize = stack_vars[j].size;
917 
918 	  /* Ignore objects that aren't partition representatives.  */
919 	  if (stack_vars[j].representative != j)
920 	    continue;
921 
922 	  /* Do not mix objects of "small" (supported) alignment
923 	     and "large" (unsupported) alignment.  */
924 	  if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
925 	      != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
926 	    break;
927 
928 	  /* For Address Sanitizer do not mix objects with different
929 	     sizes, as the shorter vars wouldn't be adequately protected.
930 	     Don't do that for "large" (unsupported) alignment objects,
931 	     those aren't protected anyway.  */
932 	  if ((asan_sanitize_stack_p ())
933 	      && isize != jsize
934 	      && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
935 	    break;
936 
937 	  /* Ignore conflicting objects.  */
938 	  if (stack_var_conflict_p (i, j))
939 	    continue;
940 
941 	  /* UNION the objects, placing J at OFFSET.  */
942 	  union_stack_vars (i, j);
943 	}
944     }
945 
946   update_alias_info_with_stack_vars ();
947 }
948 
949 /* A debugging aid for expand_used_vars.  Dump the generated partitions.  */
950 
951 static void
952 dump_stack_var_partition (void)
953 {
954   size_t si, i, j, n = stack_vars_num;
955 
956   for (si = 0; si < n; ++si)
957     {
958       i = stack_vars_sorted[si];
959 
960       /* Skip variables that aren't partition representatives, for now.  */
961       if (stack_vars[i].representative != i)
962 	continue;
963 
964       fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
965 	       " align %u\n", (unsigned long) i, stack_vars[i].size,
966 	       stack_vars[i].alignb);
967 
968       for (j = i; j != EOC; j = stack_vars[j].next)
969 	{
970 	  fputc ('\t', dump_file);
971 	  print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
972 	}
973       fputc ('\n', dump_file);
974     }
975 }
976 
977 /* Assign rtl to DECL at BASE + OFFSET.  */
978 
979 static void
980 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
981 			 HOST_WIDE_INT offset)
982 {
983   unsigned align;
984   rtx x;
985 
986   /* If this fails, we've overflowed the stack frame.  Error nicely?  */
987   gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
988 
989   x = plus_constant (Pmode, base, offset);
990   x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
991 		   ? TYPE_MODE (TREE_TYPE (decl))
992 		   : DECL_MODE (SSAVAR (decl)), x);
993 
994   if (TREE_CODE (decl) != SSA_NAME)
995     {
996       /* Set alignment we actually gave this decl if it isn't an SSA name.
997          If it is we generate stack slots only accidentally so it isn't as
998 	 important, we'll simply use the alignment that is already set.  */
999       if (base == virtual_stack_vars_rtx)
1000 	offset -= frame_phase;
1001       align = least_bit_hwi (offset);
1002       align *= BITS_PER_UNIT;
1003       if (align == 0 || align > base_align)
1004 	align = base_align;
1005 
1006       /* One would think that we could assert that we're not decreasing
1007 	 alignment here, but (at least) the i386 port does exactly this
1008 	 via the MINIMUM_ALIGNMENT hook.  */
1009 
1010       SET_DECL_ALIGN (decl, align);
1011       DECL_USER_ALIGN (decl) = 0;
1012     }
1013 
1014   set_rtl (decl, x);
1015 }
1016 
1017 struct stack_vars_data
1018 {
1019   /* Vector of offset pairs, always end of some padding followed
1020      by start of the padding that needs Address Sanitizer protection.
1021      The vector is in reversed, highest offset pairs come first.  */
1022   auto_vec<HOST_WIDE_INT> asan_vec;
1023 
1024   /* Vector of partition representative decls in between the paddings.  */
1025   auto_vec<tree> asan_decl_vec;
1026 
1027   /* Base pseudo register for Address Sanitizer protected automatic vars.  */
1028   rtx asan_base;
1029 
1030   /* Alignment needed for the Address Sanitizer protected automatic vars.  */
1031   unsigned int asan_alignb;
1032 };
1033 
1034 /* A subroutine of expand_used_vars.  Give each partition representative
1035    a unique location within the stack frame.  Update each partition member
1036    with that location.  */
1037 
1038 static void
1039 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1040 {
1041   size_t si, i, j, n = stack_vars_num;
1042   HOST_WIDE_INT large_size = 0, large_alloc = 0;
1043   rtx large_base = NULL;
1044   unsigned large_align = 0;
1045   bool large_allocation_done = false;
1046   tree decl;
1047 
1048   /* Determine if there are any variables requiring "large" alignment.
1049      Since these are dynamically allocated, we only process these if
1050      no predicate involved.  */
1051   large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1052   if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1053     {
1054       /* Find the total size of these variables.  */
1055       for (si = 0; si < n; ++si)
1056 	{
1057 	  unsigned alignb;
1058 
1059 	  i = stack_vars_sorted[si];
1060 	  alignb = stack_vars[i].alignb;
1061 
1062 	  /* All "large" alignment decls come before all "small" alignment
1063 	     decls, but "large" alignment decls are not sorted based on
1064 	     their alignment.  Increase large_align to track the largest
1065 	     required alignment.  */
1066 	  if ((alignb * BITS_PER_UNIT) > large_align)
1067 	    large_align = alignb * BITS_PER_UNIT;
1068 
1069 	  /* Stop when we get to the first decl with "small" alignment.  */
1070 	  if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1071 	    break;
1072 
1073 	  /* Skip variables that aren't partition representatives.  */
1074 	  if (stack_vars[i].representative != i)
1075 	    continue;
1076 
1077 	  /* Skip variables that have already had rtl assigned.  See also
1078 	     add_stack_var where we perpetrate this pc_rtx hack.  */
1079 	  decl = stack_vars[i].decl;
1080 	  if (TREE_CODE (decl) == SSA_NAME
1081 	      ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1082 	      : DECL_RTL (decl) != pc_rtx)
1083 	    continue;
1084 
1085 	  large_size += alignb - 1;
1086 	  large_size &= -(HOST_WIDE_INT)alignb;
1087 	  large_size += stack_vars[i].size;
1088 	}
1089     }
1090 
1091   for (si = 0; si < n; ++si)
1092     {
1093       rtx base;
1094       unsigned base_align, alignb;
1095       HOST_WIDE_INT offset;
1096 
1097       i = stack_vars_sorted[si];
1098 
1099       /* Skip variables that aren't partition representatives, for now.  */
1100       if (stack_vars[i].representative != i)
1101 	continue;
1102 
1103       /* Skip variables that have already had rtl assigned.  See also
1104 	 add_stack_var where we perpetrate this pc_rtx hack.  */
1105       decl = stack_vars[i].decl;
1106       if (TREE_CODE (decl) == SSA_NAME
1107 	  ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1108 	  : DECL_RTL (decl) != pc_rtx)
1109 	continue;
1110 
1111       /* Check the predicate to see whether this variable should be
1112 	 allocated in this pass.  */
1113       if (pred && !pred (i))
1114 	continue;
1115 
1116       alignb = stack_vars[i].alignb;
1117       if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1118 	{
1119 	  base = virtual_stack_vars_rtx;
1120 	  if ((asan_sanitize_stack_p ())
1121 	      && pred)
1122 	    {
1123 	      HOST_WIDE_INT prev_offset
1124 		= align_base (frame_offset,
1125 			      MAX (alignb, ASAN_RED_ZONE_SIZE),
1126 			      !FRAME_GROWS_DOWNWARD);
1127 	      tree repr_decl = NULL_TREE;
1128 	      offset
1129 		= alloc_stack_frame_space (stack_vars[i].size
1130 					   + ASAN_RED_ZONE_SIZE,
1131 					   MAX (alignb, ASAN_RED_ZONE_SIZE));
1132 
1133 	      data->asan_vec.safe_push (prev_offset);
1134 	      data->asan_vec.safe_push (offset + stack_vars[i].size);
1135 	      /* Find best representative of the partition.
1136 		 Prefer those with DECL_NAME, even better
1137 		 satisfying asan_protect_stack_decl predicate.  */
1138 	      for (j = i; j != EOC; j = stack_vars[j].next)
1139 		if (asan_protect_stack_decl (stack_vars[j].decl)
1140 		    && DECL_NAME (stack_vars[j].decl))
1141 		  {
1142 		    repr_decl = stack_vars[j].decl;
1143 		    break;
1144 		  }
1145 		else if (repr_decl == NULL_TREE
1146 			 && DECL_P (stack_vars[j].decl)
1147 			 && DECL_NAME (stack_vars[j].decl))
1148 		  repr_decl = stack_vars[j].decl;
1149 	      if (repr_decl == NULL_TREE)
1150 		repr_decl = stack_vars[i].decl;
1151 	      data->asan_decl_vec.safe_push (repr_decl);
1152 	      data->asan_alignb = MAX (data->asan_alignb, alignb);
1153 	      if (data->asan_base == NULL)
1154 		data->asan_base = gen_reg_rtx (Pmode);
1155 	      base = data->asan_base;
1156 
1157 	      if (!STRICT_ALIGNMENT)
1158 		base_align = crtl->max_used_stack_slot_alignment;
1159 	      else
1160 		base_align = MAX (crtl->max_used_stack_slot_alignment,
1161 				  GET_MODE_ALIGNMENT (SImode)
1162 				  << ASAN_SHADOW_SHIFT);
1163 	    }
1164 	  else
1165 	    {
1166 	      offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1167 	      base_align = crtl->max_used_stack_slot_alignment;
1168 	    }
1169 	}
1170       else
1171 	{
1172 	  /* Large alignment is only processed in the last pass.  */
1173 	  if (pred)
1174 	    continue;
1175 
1176 	  /* If there were any variables requiring "large" alignment, allocate
1177 	     space.  */
1178 	  if (large_size > 0 && ! large_allocation_done)
1179 	    {
1180 	      HOST_WIDE_INT loffset;
1181 	      rtx large_allocsize;
1182 
1183 	      large_allocsize = GEN_INT (large_size);
1184 	      get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1185 	      loffset = alloc_stack_frame_space
1186 		(INTVAL (large_allocsize),
1187 		 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1188 	      large_base = get_dynamic_stack_base (loffset, large_align);
1189 	      large_allocation_done = true;
1190 	    }
1191 	  gcc_assert (large_base != NULL);
1192 
1193 	  large_alloc += alignb - 1;
1194 	  large_alloc &= -(HOST_WIDE_INT)alignb;
1195 	  offset = large_alloc;
1196 	  large_alloc += stack_vars[i].size;
1197 
1198 	  base = large_base;
1199 	  base_align = large_align;
1200 	}
1201 
1202       /* Create rtl for each variable based on their location within the
1203 	 partition.  */
1204       for (j = i; j != EOC; j = stack_vars[j].next)
1205 	{
1206 	  expand_one_stack_var_at (stack_vars[j].decl,
1207 				   base, base_align,
1208 				   offset);
1209 	}
1210     }
1211 
1212   gcc_assert (large_alloc == large_size);
1213 }
1214 
1215 /* Take into account all sizes of partitions and reset DECL_RTLs.  */
1216 static HOST_WIDE_INT
1217 account_stack_vars (void)
1218 {
1219   size_t si, j, i, n = stack_vars_num;
1220   HOST_WIDE_INT size = 0;
1221 
1222   for (si = 0; si < n; ++si)
1223     {
1224       i = stack_vars_sorted[si];
1225 
1226       /* Skip variables that aren't partition representatives, for now.  */
1227       if (stack_vars[i].representative != i)
1228 	continue;
1229 
1230       size += stack_vars[i].size;
1231       for (j = i; j != EOC; j = stack_vars[j].next)
1232 	set_rtl (stack_vars[j].decl, NULL);
1233     }
1234   return size;
1235 }
1236 
1237 /* Record the RTL assignment X for the default def of PARM.  */
1238 
1239 extern void
1240 set_parm_rtl (tree parm, rtx x)
1241 {
1242   gcc_assert (TREE_CODE (parm) == PARM_DECL
1243 	      || TREE_CODE (parm) == RESULT_DECL);
1244 
1245   if (x && !MEM_P (x))
1246     {
1247       unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1248 					      TYPE_MODE (TREE_TYPE (parm)),
1249 					      TYPE_ALIGN (TREE_TYPE (parm)));
1250 
1251       /* If the variable alignment is very large we'll dynamicaly
1252 	 allocate it, which means that in-frame portion is just a
1253 	 pointer.  ??? We've got a pseudo for sure here, do we
1254 	 actually dynamically allocate its spilling area if needed?
1255 	 ??? Isn't it a problem when Pmode alignment also exceeds
1256 	 MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32?  */
1257       if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1258 	align = GET_MODE_ALIGNMENT (Pmode);
1259 
1260       record_alignment_for_reg_var (align);
1261     }
1262 
1263   tree ssa = ssa_default_def (cfun, parm);
1264   if (!ssa)
1265     return set_rtl (parm, x);
1266 
1267   int part = var_to_partition (SA.map, ssa);
1268   gcc_assert (part != NO_PARTITION);
1269 
1270   bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1271   gcc_assert (changed);
1272 
1273   set_rtl (ssa, x);
1274   gcc_assert (DECL_RTL (parm) == x);
1275 }
1276 
1277 /* A subroutine of expand_one_var.  Called to immediately assign rtl
1278    to a variable to be allocated in the stack frame.  */
1279 
1280 static void
1281 expand_one_stack_var_1 (tree var)
1282 {
1283   HOST_WIDE_INT size, offset;
1284   unsigned byte_align;
1285 
1286   if (TREE_CODE (var) == SSA_NAME)
1287     {
1288       tree type = TREE_TYPE (var);
1289       size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1290       byte_align = TYPE_ALIGN_UNIT (type);
1291     }
1292   else
1293     {
1294       size = tree_to_uhwi (DECL_SIZE_UNIT (var));
1295       byte_align = align_local_variable (var);
1296     }
1297 
1298   /* We handle highly aligned variables in expand_stack_vars.  */
1299   gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1300 
1301   offset = alloc_stack_frame_space (size, byte_align);
1302 
1303   expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1304 			   crtl->max_used_stack_slot_alignment, offset);
1305 }
1306 
1307 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1308    already assigned some MEM.  */
1309 
1310 static void
1311 expand_one_stack_var (tree var)
1312 {
1313   if (TREE_CODE (var) == SSA_NAME)
1314     {
1315       int part = var_to_partition (SA.map, var);
1316       if (part != NO_PARTITION)
1317 	{
1318 	  rtx x = SA.partition_to_pseudo[part];
1319 	  gcc_assert (x);
1320 	  gcc_assert (MEM_P (x));
1321 	  return;
1322 	}
1323     }
1324 
1325   return expand_one_stack_var_1 (var);
1326 }
1327 
1328 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1329    that will reside in a hard register.  */
1330 
1331 static void
1332 expand_one_hard_reg_var (tree var)
1333 {
1334   rest_of_decl_compilation (var, 0, 0);
1335 }
1336 
1337 /* Record the alignment requirements of some variable assigned to a
1338    pseudo.  */
1339 
1340 static void
1341 record_alignment_for_reg_var (unsigned int align)
1342 {
1343   if (SUPPORTS_STACK_ALIGNMENT
1344       && crtl->stack_alignment_estimated < align)
1345     {
1346       /* stack_alignment_estimated shouldn't change after stack
1347          realign decision made */
1348       gcc_assert (!crtl->stack_realign_processed);
1349       crtl->stack_alignment_estimated = align;
1350     }
1351 
1352   /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1353      So here we only make sure stack_alignment_needed >= align.  */
1354   if (crtl->stack_alignment_needed < align)
1355     crtl->stack_alignment_needed = align;
1356   if (crtl->max_used_stack_slot_alignment < align)
1357     crtl->max_used_stack_slot_alignment = align;
1358 }
1359 
1360 /* Create RTL for an SSA partition.  */
1361 
1362 static void
1363 expand_one_ssa_partition (tree var)
1364 {
1365   int part = var_to_partition (SA.map, var);
1366   gcc_assert (part != NO_PARTITION);
1367 
1368   if (SA.partition_to_pseudo[part])
1369     return;
1370 
1371   unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1372 					  TYPE_MODE (TREE_TYPE (var)),
1373 					  TYPE_ALIGN (TREE_TYPE (var)));
1374 
1375   /* If the variable alignment is very large we'll dynamicaly allocate
1376      it, which means that in-frame portion is just a pointer.  */
1377   if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1378     align = GET_MODE_ALIGNMENT (Pmode);
1379 
1380   record_alignment_for_reg_var (align);
1381 
1382   if (!use_register_for_decl (var))
1383     {
1384       if (defer_stack_allocation (var, true))
1385 	add_stack_var (var);
1386       else
1387 	expand_one_stack_var_1 (var);
1388       return;
1389     }
1390 
1391   machine_mode reg_mode = promote_ssa_mode (var, NULL);
1392 
1393   rtx x = gen_reg_rtx (reg_mode);
1394 
1395   set_rtl (var, x);
1396 }
1397 
1398 /* Record the association between the RTL generated for partition PART
1399    and the underlying variable of the SSA_NAME VAR.  */
1400 
1401 static void
1402 adjust_one_expanded_partition_var (tree var)
1403 {
1404   if (!var)
1405     return;
1406 
1407   tree decl = SSA_NAME_VAR (var);
1408 
1409   int part = var_to_partition (SA.map, var);
1410   if (part == NO_PARTITION)
1411     return;
1412 
1413   rtx x = SA.partition_to_pseudo[part];
1414 
1415   gcc_assert (x);
1416 
1417   set_rtl (var, x);
1418 
1419   if (!REG_P (x))
1420     return;
1421 
1422   /* Note if the object is a user variable.  */
1423   if (decl && !DECL_ARTIFICIAL (decl))
1424     mark_user_reg (x);
1425 
1426   if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1427     mark_reg_pointer (x, get_pointer_alignment (var));
1428 }
1429 
1430 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1431    that will reside in a pseudo register.  */
1432 
1433 static void
1434 expand_one_register_var (tree var)
1435 {
1436   if (TREE_CODE (var) == SSA_NAME)
1437     {
1438       int part = var_to_partition (SA.map, var);
1439       if (part != NO_PARTITION)
1440 	{
1441 	  rtx x = SA.partition_to_pseudo[part];
1442 	  gcc_assert (x);
1443 	  gcc_assert (REG_P (x));
1444 	  return;
1445 	}
1446       gcc_unreachable ();
1447     }
1448 
1449   tree decl = var;
1450   tree type = TREE_TYPE (decl);
1451   machine_mode reg_mode = promote_decl_mode (decl, NULL);
1452   rtx x = gen_reg_rtx (reg_mode);
1453 
1454   set_rtl (var, x);
1455 
1456   /* Note if the object is a user variable.  */
1457   if (!DECL_ARTIFICIAL (decl))
1458     mark_user_reg (x);
1459 
1460   if (POINTER_TYPE_P (type))
1461     mark_reg_pointer (x, get_pointer_alignment (var));
1462 }
1463 
1464 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL that
1465    has some associated error, e.g. its type is error-mark.  We just need
1466    to pick something that won't crash the rest of the compiler.  */
1467 
1468 static void
1469 expand_one_error_var (tree var)
1470 {
1471   machine_mode mode = DECL_MODE (var);
1472   rtx x;
1473 
1474   if (mode == BLKmode)
1475     x = gen_rtx_MEM (BLKmode, const0_rtx);
1476   else if (mode == VOIDmode)
1477     x = const0_rtx;
1478   else
1479     x = gen_reg_rtx (mode);
1480 
1481   SET_DECL_RTL (var, x);
1482 }
1483 
1484 /* A subroutine of expand_one_var.  VAR is a variable that will be
1485    allocated to the local stack frame.  Return true if we wish to
1486    add VAR to STACK_VARS so that it will be coalesced with other
1487    variables.  Return false to allocate VAR immediately.
1488 
1489    This function is used to reduce the number of variables considered
1490    for coalescing, which reduces the size of the quadratic problem.  */
1491 
1492 static bool
1493 defer_stack_allocation (tree var, bool toplevel)
1494 {
1495   tree size_unit = TREE_CODE (var) == SSA_NAME
1496     ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1497     : DECL_SIZE_UNIT (var);
1498 
1499   /* Whether the variable is small enough for immediate allocation not to be
1500      a problem with regard to the frame size.  */
1501   bool smallish
1502     = ((HOST_WIDE_INT) tree_to_uhwi (size_unit)
1503        < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1504 
1505   /* If stack protection is enabled, *all* stack variables must be deferred,
1506      so that we can re-order the strings to the top of the frame.
1507      Similarly for Address Sanitizer.  */
1508   if (flag_stack_protect || asan_sanitize_stack_p ())
1509     return true;
1510 
1511   unsigned int align = TREE_CODE (var) == SSA_NAME
1512     ? TYPE_ALIGN (TREE_TYPE (var))
1513     : DECL_ALIGN (var);
1514 
1515   /* We handle "large" alignment via dynamic allocation.  We want to handle
1516      this extra complication in only one place, so defer them.  */
1517   if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1518     return true;
1519 
1520   bool ignored = TREE_CODE (var) == SSA_NAME
1521     ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1522     : DECL_IGNORED_P (var);
1523 
1524   /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1525      might be detached from their block and appear at toplevel when we reach
1526      here.  We want to coalesce them with variables from other blocks when
1527      the immediate contribution to the frame size would be noticeable.  */
1528   if (toplevel && optimize > 0 && ignored && !smallish)
1529     return true;
1530 
1531   /* Variables declared in the outermost scope automatically conflict
1532      with every other variable.  The only reason to want to defer them
1533      at all is that, after sorting, we can more efficiently pack
1534      small variables in the stack frame.  Continue to defer at -O2.  */
1535   if (toplevel && optimize < 2)
1536     return false;
1537 
1538   /* Without optimization, *most* variables are allocated from the
1539      stack, which makes the quadratic problem large exactly when we
1540      want compilation to proceed as quickly as possible.  On the
1541      other hand, we don't want the function's stack frame size to
1542      get completely out of hand.  So we avoid adding scalars and
1543      "small" aggregates to the list at all.  */
1544   if (optimize == 0 && smallish)
1545     return false;
1546 
1547   return true;
1548 }
1549 
1550 /* A subroutine of expand_used_vars.  Expand one variable according to
1551    its flavor.  Variables to be placed on the stack are not actually
1552    expanded yet, merely recorded.
1553    When REALLY_EXPAND is false, only add stack values to be allocated.
1554    Return stack usage this variable is supposed to take.
1555 */
1556 
1557 static HOST_WIDE_INT
1558 expand_one_var (tree var, bool toplevel, bool really_expand)
1559 {
1560   unsigned int align = BITS_PER_UNIT;
1561   tree origvar = var;
1562 
1563   var = SSAVAR (var);
1564 
1565   if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
1566     {
1567       if (is_global_var (var))
1568 	return 0;
1569 
1570       /* Because we don't know if VAR will be in register or on stack,
1571 	 we conservatively assume it will be on stack even if VAR is
1572 	 eventually put into register after RA pass.  For non-automatic
1573 	 variables, which won't be on stack, we collect alignment of
1574 	 type and ignore user specified alignment.  Similarly for
1575 	 SSA_NAMEs for which use_register_for_decl returns true.  */
1576       if (TREE_STATIC (var)
1577 	  || DECL_EXTERNAL (var)
1578 	  || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1579 	align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1580 				   TYPE_MODE (TREE_TYPE (var)),
1581 				   TYPE_ALIGN (TREE_TYPE (var)));
1582       else if (DECL_HAS_VALUE_EXPR_P (var)
1583 	       || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1584 	/* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1585 	   or variables which were assigned a stack slot already by
1586 	   expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1587 	   changed from the offset chosen to it.  */
1588 	align = crtl->stack_alignment_estimated;
1589       else
1590 	align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1591 
1592       /* If the variable alignment is very large we'll dynamicaly allocate
1593 	 it, which means that in-frame portion is just a pointer.  */
1594       if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1595 	align = GET_MODE_ALIGNMENT (Pmode);
1596     }
1597 
1598   record_alignment_for_reg_var (align);
1599 
1600   if (TREE_CODE (origvar) == SSA_NAME)
1601     {
1602       gcc_assert (!VAR_P (var)
1603 		  || (!DECL_EXTERNAL (var)
1604 		      && !DECL_HAS_VALUE_EXPR_P (var)
1605 		      && !TREE_STATIC (var)
1606 		      && TREE_TYPE (var) != error_mark_node
1607 		      && !DECL_HARD_REGISTER (var)
1608 		      && really_expand));
1609     }
1610   if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
1611     ;
1612   else if (DECL_EXTERNAL (var))
1613     ;
1614   else if (DECL_HAS_VALUE_EXPR_P (var))
1615     ;
1616   else if (TREE_STATIC (var))
1617     ;
1618   else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1619     ;
1620   else if (TREE_TYPE (var) == error_mark_node)
1621     {
1622       if (really_expand)
1623         expand_one_error_var (var);
1624     }
1625   else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1626     {
1627       if (really_expand)
1628 	{
1629 	  expand_one_hard_reg_var (var);
1630 	  if (!DECL_HARD_REGISTER (var))
1631 	    /* Invalid register specification.  */
1632 	    expand_one_error_var (var);
1633 	}
1634     }
1635   else if (use_register_for_decl (var))
1636     {
1637       if (really_expand)
1638         expand_one_register_var (origvar);
1639     }
1640   else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1641     {
1642       /* Reject variables which cover more than half of the address-space.  */
1643       if (really_expand)
1644 	{
1645 	  error ("size of variable %q+D is too large", var);
1646 	  expand_one_error_var (var);
1647 	}
1648     }
1649   else if (defer_stack_allocation (var, toplevel))
1650     add_stack_var (origvar);
1651   else
1652     {
1653       if (really_expand)
1654         {
1655           if (lookup_attribute ("naked",
1656                                 DECL_ATTRIBUTES (current_function_decl)))
1657             error ("cannot allocate stack for variable %q+D, naked function.",
1658                    var);
1659 
1660           expand_one_stack_var (origvar);
1661         }
1662 
1663 
1664       return tree_to_uhwi (DECL_SIZE_UNIT (var));
1665     }
1666   return 0;
1667 }
1668 
1669 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1670    expanding variables.  Those variables that can be put into registers
1671    are allocated pseudos; those that can't are put on the stack.
1672 
1673    TOPLEVEL is true if this is the outermost BLOCK.  */
1674 
1675 static void
1676 expand_used_vars_for_block (tree block, bool toplevel)
1677 {
1678   tree t;
1679 
1680   /* Expand all variables at this level.  */
1681   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1682     if (TREE_USED (t)
1683         && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1684 	    || !DECL_NONSHAREABLE (t)))
1685       expand_one_var (t, toplevel, true);
1686 
1687   /* Expand all variables at containing levels.  */
1688   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1689     expand_used_vars_for_block (t, false);
1690 }
1691 
1692 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1693    and clear TREE_USED on all local variables.  */
1694 
1695 static void
1696 clear_tree_used (tree block)
1697 {
1698   tree t;
1699 
1700   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1701     /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1702     if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1703 	|| !DECL_NONSHAREABLE (t))
1704       TREE_USED (t) = 0;
1705 
1706   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1707     clear_tree_used (t);
1708 }
1709 
1710 enum {
1711   SPCT_FLAG_DEFAULT = 1,
1712   SPCT_FLAG_ALL = 2,
1713   SPCT_FLAG_STRONG = 3,
1714   SPCT_FLAG_EXPLICIT = 4
1715 };
1716 
1717 /* Examine TYPE and determine a bit mask of the following features.  */
1718 
1719 #define SPCT_HAS_LARGE_CHAR_ARRAY	1
1720 #define SPCT_HAS_SMALL_CHAR_ARRAY	2
1721 #define SPCT_HAS_ARRAY			4
1722 #define SPCT_HAS_AGGREGATE		8
1723 
1724 static unsigned int
1725 stack_protect_classify_type (tree type)
1726 {
1727   unsigned int ret = 0;
1728   tree t;
1729 
1730   switch (TREE_CODE (type))
1731     {
1732     case ARRAY_TYPE:
1733       t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1734       if (t == char_type_node
1735 	  || t == signed_char_type_node
1736 	  || t == unsigned_char_type_node)
1737 	{
1738 	  unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1739 	  unsigned HOST_WIDE_INT len;
1740 
1741 	  if (!TYPE_SIZE_UNIT (type)
1742 	      || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1743 	    len = max;
1744 	  else
1745 	    len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1746 
1747 	  if (len == 0)
1748 	    ret = SPCT_HAS_ARRAY;
1749 	  else if (len < max)
1750 	    ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1751 	  else
1752 	    ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1753 	}
1754       else
1755 	ret = SPCT_HAS_ARRAY;
1756       break;
1757 
1758     case UNION_TYPE:
1759     case QUAL_UNION_TYPE:
1760     case RECORD_TYPE:
1761       ret = SPCT_HAS_AGGREGATE;
1762       for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1763 	if (TREE_CODE (t) == FIELD_DECL)
1764 	  ret |= stack_protect_classify_type (TREE_TYPE (t));
1765       break;
1766 
1767     default:
1768       break;
1769     }
1770 
1771   return ret;
1772 }
1773 
1774 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1775    part of the local stack frame.  Remember if we ever return nonzero for
1776    any variable in this function.  The return value is the phase number in
1777    which the variable should be allocated.  */
1778 
1779 static int
1780 stack_protect_decl_phase (tree decl)
1781 {
1782   unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1783   int ret = 0;
1784 
1785   if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1786     has_short_buffer = true;
1787 
1788   if (flag_stack_protect == SPCT_FLAG_ALL
1789       || flag_stack_protect == SPCT_FLAG_STRONG
1790       || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1791 	  && lookup_attribute ("stack_protect",
1792 			       DECL_ATTRIBUTES (current_function_decl))))
1793     {
1794       if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1795 	  && !(bits & SPCT_HAS_AGGREGATE))
1796 	ret = 1;
1797       else if (bits & SPCT_HAS_ARRAY)
1798 	ret = 2;
1799     }
1800   else
1801     ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1802 
1803   if (ret)
1804     has_protected_decls = true;
1805 
1806   return ret;
1807 }
1808 
1809 /* Two helper routines that check for phase 1 and phase 2.  These are used
1810    as callbacks for expand_stack_vars.  */
1811 
1812 static bool
1813 stack_protect_decl_phase_1 (size_t i)
1814 {
1815   return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1816 }
1817 
1818 static bool
1819 stack_protect_decl_phase_2 (size_t i)
1820 {
1821   return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1822 }
1823 
1824 /* And helper function that checks for asan phase (with stack protector
1825    it is phase 3).  This is used as callback for expand_stack_vars.
1826    Returns true if any of the vars in the partition need to be protected.  */
1827 
1828 static bool
1829 asan_decl_phase_3 (size_t i)
1830 {
1831   while (i != EOC)
1832     {
1833       if (asan_protect_stack_decl (stack_vars[i].decl))
1834 	return true;
1835       i = stack_vars[i].next;
1836     }
1837   return false;
1838 }
1839 
1840 /* Ensure that variables in different stack protection phases conflict
1841    so that they are not merged and share the same stack slot.  */
1842 
1843 static void
1844 add_stack_protection_conflicts (void)
1845 {
1846   size_t i, j, n = stack_vars_num;
1847   unsigned char *phase;
1848 
1849   phase = XNEWVEC (unsigned char, n);
1850   for (i = 0; i < n; ++i)
1851     phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1852 
1853   for (i = 0; i < n; ++i)
1854     {
1855       unsigned char ph_i = phase[i];
1856       for (j = i + 1; j < n; ++j)
1857 	if (ph_i != phase[j])
1858 	  add_stack_var_conflict (i, j);
1859     }
1860 
1861   XDELETEVEC (phase);
1862 }
1863 
1864 /* Create a decl for the guard at the top of the stack frame.  */
1865 
1866 static void
1867 create_stack_guard (void)
1868 {
1869   tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1870 			   VAR_DECL, NULL, ptr_type_node);
1871   TREE_THIS_VOLATILE (guard) = 1;
1872   TREE_USED (guard) = 1;
1873   expand_one_stack_var (guard);
1874   crtl->stack_protect_guard = guard;
1875 }
1876 
1877 /* Prepare for expanding variables.  */
1878 static void
1879 init_vars_expansion (void)
1880 {
1881   /* Conflict bitmaps, and a few related temporary bitmaps, go here.  */
1882   bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1883 
1884   /* A map from decl to stack partition.  */
1885   decl_to_stack_part = new hash_map<tree, size_t>;
1886 
1887   /* Initialize local stack smashing state.  */
1888   has_protected_decls = false;
1889   has_short_buffer = false;
1890 }
1891 
1892 /* Free up stack variable graph data.  */
1893 static void
1894 fini_vars_expansion (void)
1895 {
1896   bitmap_obstack_release (&stack_var_bitmap_obstack);
1897   if (stack_vars)
1898     XDELETEVEC (stack_vars);
1899   if (stack_vars_sorted)
1900     XDELETEVEC (stack_vars_sorted);
1901   stack_vars = NULL;
1902   stack_vars_sorted = NULL;
1903   stack_vars_alloc = stack_vars_num = 0;
1904   delete decl_to_stack_part;
1905   decl_to_stack_part = NULL;
1906 }
1907 
1908 /* Make a fair guess for the size of the stack frame of the function
1909    in NODE.  This doesn't have to be exact, the result is only used in
1910    the inline heuristics.  So we don't want to run the full stack var
1911    packing algorithm (which is quadratic in the number of stack vars).
1912    Instead, we calculate the total size of all stack vars.  This turns
1913    out to be a pretty fair estimate -- packing of stack vars doesn't
1914    happen very often.  */
1915 
1916 HOST_WIDE_INT
1917 estimated_stack_frame_size (struct cgraph_node *node)
1918 {
1919   HOST_WIDE_INT size = 0;
1920   size_t i;
1921   tree var;
1922   struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1923 
1924   push_cfun (fn);
1925 
1926   init_vars_expansion ();
1927 
1928   FOR_EACH_LOCAL_DECL (fn, i, var)
1929     if (auto_var_in_fn_p (var, fn->decl))
1930       size += expand_one_var (var, true, false);
1931 
1932   if (stack_vars_num > 0)
1933     {
1934       /* Fake sorting the stack vars for account_stack_vars ().  */
1935       stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1936       for (i = 0; i < stack_vars_num; ++i)
1937 	stack_vars_sorted[i] = i;
1938       size += account_stack_vars ();
1939     }
1940 
1941   fini_vars_expansion ();
1942   pop_cfun ();
1943   return size;
1944 }
1945 
1946 /* Helper routine to check if a record or union contains an array field. */
1947 
1948 static int
1949 record_or_union_type_has_array_p (const_tree tree_type)
1950 {
1951   tree fields = TYPE_FIELDS (tree_type);
1952   tree f;
1953 
1954   for (f = fields; f; f = DECL_CHAIN (f))
1955     if (TREE_CODE (f) == FIELD_DECL)
1956       {
1957 	tree field_type = TREE_TYPE (f);
1958 	if (RECORD_OR_UNION_TYPE_P (field_type)
1959 	    && record_or_union_type_has_array_p (field_type))
1960 	  return 1;
1961 	if (TREE_CODE (field_type) == ARRAY_TYPE)
1962 	  return 1;
1963       }
1964   return 0;
1965 }
1966 
1967 /* Check if the current function has local referenced variables that
1968    have their addresses taken, contain an array, or are arrays.  */
1969 
1970 static bool
1971 stack_protect_decl_p ()
1972 {
1973   unsigned i;
1974   tree var;
1975 
1976   FOR_EACH_LOCAL_DECL (cfun, i, var)
1977     if (!is_global_var (var))
1978       {
1979 	tree var_type = TREE_TYPE (var);
1980 	if (VAR_P (var)
1981 	    && (TREE_CODE (var_type) == ARRAY_TYPE
1982 		|| TREE_ADDRESSABLE (var)
1983 		|| (RECORD_OR_UNION_TYPE_P (var_type)
1984 		    && record_or_union_type_has_array_p (var_type))))
1985 	  return true;
1986       }
1987   return false;
1988 }
1989 
1990 /* Check if the current function has calls that use a return slot.  */
1991 
1992 static bool
1993 stack_protect_return_slot_p ()
1994 {
1995   basic_block bb;
1996 
1997   FOR_ALL_BB_FN (bb, cfun)
1998     for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
1999 	 !gsi_end_p (gsi); gsi_next (&gsi))
2000       {
2001 	gimple *stmt = gsi_stmt (gsi);
2002 	/* This assumes that calls to internal-only functions never
2003 	   use a return slot.  */
2004 	if (is_gimple_call (stmt)
2005 	    && !gimple_call_internal_p (stmt)
2006 	    && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2007 				  gimple_call_fndecl (stmt)))
2008 	  return true;
2009       }
2010   return false;
2011 }
2012 
2013 /* Expand all variables used in the function.  */
2014 
2015 static rtx_insn *
2016 expand_used_vars (void)
2017 {
2018   tree var, outer_block = DECL_INITIAL (current_function_decl);
2019   auto_vec<tree> maybe_local_decls;
2020   rtx_insn *var_end_seq = NULL;
2021   unsigned i;
2022   unsigned len;
2023   bool gen_stack_protect_signal = false;
2024 
2025   /* Compute the phase of the stack frame for this function.  */
2026   {
2027     int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2028     int off = STARTING_FRAME_OFFSET % align;
2029     frame_phase = off ? align - off : 0;
2030   }
2031 
2032   /* Set TREE_USED on all variables in the local_decls.  */
2033   FOR_EACH_LOCAL_DECL (cfun, i, var)
2034     TREE_USED (var) = 1;
2035   /* Clear TREE_USED on all variables associated with a block scope.  */
2036   clear_tree_used (DECL_INITIAL (current_function_decl));
2037 
2038   init_vars_expansion ();
2039 
2040   if (targetm.use_pseudo_pic_reg ())
2041     pic_offset_table_rtx = gen_reg_rtx (Pmode);
2042 
2043   for (i = 0; i < SA.map->num_partitions; i++)
2044     {
2045       if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2046 	continue;
2047 
2048       tree var = partition_to_var (SA.map, i);
2049 
2050       gcc_assert (!virtual_operand_p (var));
2051 
2052       expand_one_ssa_partition (var);
2053     }
2054 
2055   if (flag_stack_protect == SPCT_FLAG_STRONG)
2056       gen_stack_protect_signal
2057 	= stack_protect_decl_p () || stack_protect_return_slot_p ();
2058 
2059   /* At this point all variables on the local_decls with TREE_USED
2060      set are not associated with any block scope.  Lay them out.  */
2061 
2062   len = vec_safe_length (cfun->local_decls);
2063   FOR_EACH_LOCAL_DECL (cfun, i, var)
2064     {
2065       bool expand_now = false;
2066 
2067       /* Expanded above already.  */
2068       if (is_gimple_reg (var))
2069 	{
2070 	  TREE_USED (var) = 0;
2071 	  goto next;
2072 	}
2073       /* We didn't set a block for static or extern because it's hard
2074 	 to tell the difference between a global variable (re)declared
2075 	 in a local scope, and one that's really declared there to
2076 	 begin with.  And it doesn't really matter much, since we're
2077 	 not giving them stack space.  Expand them now.  */
2078       else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
2079 	expand_now = true;
2080 
2081       /* Expand variables not associated with any block now.  Those created by
2082 	 the optimizers could be live anywhere in the function.  Those that
2083 	 could possibly have been scoped originally and detached from their
2084 	 block will have their allocation deferred so we coalesce them with
2085 	 others when optimization is enabled.  */
2086       else if (TREE_USED (var))
2087 	expand_now = true;
2088 
2089       /* Finally, mark all variables on the list as used.  We'll use
2090 	 this in a moment when we expand those associated with scopes.  */
2091       TREE_USED (var) = 1;
2092 
2093       if (expand_now)
2094 	expand_one_var (var, true, true);
2095 
2096     next:
2097       if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
2098 	{
2099 	  rtx rtl = DECL_RTL_IF_SET (var);
2100 
2101 	  /* Keep artificial non-ignored vars in cfun->local_decls
2102 	     chain until instantiate_decls.  */
2103 	  if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2104 	    add_local_decl (cfun, var);
2105 	  else if (rtl == NULL_RTX)
2106 	    /* If rtl isn't set yet, which can happen e.g. with
2107 	       -fstack-protector, retry before returning from this
2108 	       function.  */
2109 	    maybe_local_decls.safe_push (var);
2110 	}
2111     }
2112 
2113   /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2114 
2115      +-----------------+-----------------+
2116      | ...processed... | ...duplicates...|
2117      +-----------------+-----------------+
2118                        ^
2119 		       +-- LEN points here.
2120 
2121      We just want the duplicates, as those are the artificial
2122      non-ignored vars that we want to keep until instantiate_decls.
2123      Move them down and truncate the array.  */
2124   if (!vec_safe_is_empty (cfun->local_decls))
2125     cfun->local_decls->block_remove (0, len);
2126 
2127   /* At this point, all variables within the block tree with TREE_USED
2128      set are actually used by the optimized function.  Lay them out.  */
2129   expand_used_vars_for_block (outer_block, true);
2130 
2131   if (stack_vars_num > 0)
2132     {
2133       add_scope_conflicts ();
2134 
2135       /* If stack protection is enabled, we don't share space between
2136 	 vulnerable data and non-vulnerable data.  */
2137       if (flag_stack_protect != 0
2138 	  && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2139 	      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2140 		  && lookup_attribute ("stack_protect",
2141 				       DECL_ATTRIBUTES (current_function_decl)))))
2142 	add_stack_protection_conflicts ();
2143 
2144       /* Now that we have collected all stack variables, and have computed a
2145 	 minimal interference graph, attempt to save some stack space.  */
2146       partition_stack_vars ();
2147       if (dump_file)
2148 	dump_stack_var_partition ();
2149     }
2150 
2151   switch (flag_stack_protect)
2152     {
2153     case SPCT_FLAG_ALL:
2154       create_stack_guard ();
2155       break;
2156 
2157     case SPCT_FLAG_STRONG:
2158       if (gen_stack_protect_signal
2159 	  || cfun->calls_alloca || has_protected_decls
2160 	  || lookup_attribute ("stack_protect",
2161 			       DECL_ATTRIBUTES (current_function_decl)))
2162 	create_stack_guard ();
2163       break;
2164 
2165     case SPCT_FLAG_DEFAULT:
2166       if (cfun->calls_alloca || has_protected_decls
2167 	  || lookup_attribute ("stack_protect",
2168 			       DECL_ATTRIBUTES (current_function_decl)))
2169 	create_stack_guard ();
2170       break;
2171 
2172     case SPCT_FLAG_EXPLICIT:
2173       if (lookup_attribute ("stack_protect",
2174 			    DECL_ATTRIBUTES (current_function_decl)))
2175 	create_stack_guard ();
2176       break;
2177     default:
2178       ;
2179     }
2180 
2181   /* Assign rtl to each variable based on these partitions.  */
2182   if (stack_vars_num > 0)
2183     {
2184       struct stack_vars_data data;
2185 
2186       data.asan_base = NULL_RTX;
2187       data.asan_alignb = 0;
2188 
2189       /* Reorder decls to be protected by iterating over the variables
2190 	 array multiple times, and allocating out of each phase in turn.  */
2191       /* ??? We could probably integrate this into the qsort we did
2192 	 earlier, such that we naturally see these variables first,
2193 	 and thus naturally allocate things in the right order.  */
2194       if (has_protected_decls)
2195 	{
2196 	  /* Phase 1 contains only character arrays.  */
2197 	  expand_stack_vars (stack_protect_decl_phase_1, &data);
2198 
2199 	  /* Phase 2 contains other kinds of arrays.  */
2200 	  if (flag_stack_protect == SPCT_FLAG_ALL
2201 	      || flag_stack_protect == SPCT_FLAG_STRONG
2202 	      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2203 		  && lookup_attribute ("stack_protect",
2204 				       DECL_ATTRIBUTES (current_function_decl))))
2205 	    expand_stack_vars (stack_protect_decl_phase_2, &data);
2206 	}
2207 
2208       if (asan_sanitize_stack_p ())
2209 	/* Phase 3, any partitions that need asan protection
2210 	   in addition to phase 1 and 2.  */
2211 	expand_stack_vars (asan_decl_phase_3, &data);
2212 
2213       if (!data.asan_vec.is_empty ())
2214 	{
2215 	  HOST_WIDE_INT prev_offset = frame_offset;
2216 	  HOST_WIDE_INT offset, sz, redzonesz;
2217 	  redzonesz = ASAN_RED_ZONE_SIZE;
2218 	  sz = data.asan_vec[0] - prev_offset;
2219 	  if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2220 	      && data.asan_alignb <= 4096
2221 	      && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
2222 	    redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2223 			 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2224 	  offset
2225 	    = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
2226 	  data.asan_vec.safe_push (prev_offset);
2227 	  data.asan_vec.safe_push (offset);
2228 	  /* Leave space for alignment if STRICT_ALIGNMENT.  */
2229 	  if (STRICT_ALIGNMENT)
2230 	    alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2231 				      << ASAN_SHADOW_SHIFT)
2232 				     / BITS_PER_UNIT, 1);
2233 
2234 	  var_end_seq
2235 	    = asan_emit_stack_protection (virtual_stack_vars_rtx,
2236 					  data.asan_base,
2237 					  data.asan_alignb,
2238 					  data.asan_vec.address (),
2239 					  data.asan_decl_vec.address (),
2240 					  data.asan_vec.length ());
2241 	}
2242 
2243       expand_stack_vars (NULL, &data);
2244     }
2245 
2246   fini_vars_expansion ();
2247 
2248   /* If there were any artificial non-ignored vars without rtl
2249      found earlier, see if deferred stack allocation hasn't assigned
2250      rtl to them.  */
2251   FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2252     {
2253       rtx rtl = DECL_RTL_IF_SET (var);
2254 
2255       /* Keep artificial non-ignored vars in cfun->local_decls
2256 	 chain until instantiate_decls.  */
2257       if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2258 	add_local_decl (cfun, var);
2259     }
2260 
2261   /* If the target requires that FRAME_OFFSET be aligned, do it.  */
2262   if (STACK_ALIGNMENT_NEEDED)
2263     {
2264       HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2265       if (!FRAME_GROWS_DOWNWARD)
2266 	frame_offset += align - 1;
2267       frame_offset &= -align;
2268     }
2269 
2270   return var_end_seq;
2271 }
2272 
2273 
2274 /* If we need to produce a detailed dump, print the tree representation
2275    for STMT to the dump file.  SINCE is the last RTX after which the RTL
2276    generated for STMT should have been appended.  */
2277 
2278 static void
2279 maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
2280 {
2281   if (dump_file && (dump_flags & TDF_DETAILS))
2282     {
2283       fprintf (dump_file, "\n;; ");
2284       print_gimple_stmt (dump_file, stmt, 0,
2285 			 TDF_SLIM | (dump_flags & TDF_LINENO));
2286       fprintf (dump_file, "\n");
2287 
2288       print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2289     }
2290 }
2291 
2292 /* Maps the blocks that do not contain tree labels to rtx labels.  */
2293 
2294 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2295 
2296 /* Returns the label_rtx expression for a label starting basic block BB.  */
2297 
2298 static rtx_code_label *
2299 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2300 {
2301   gimple_stmt_iterator gsi;
2302   tree lab;
2303 
2304   if (bb->flags & BB_RTL)
2305     return block_label (bb);
2306 
2307   rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2308   if (elt)
2309     return *elt;
2310 
2311   /* Find the tree label if it is present.  */
2312 
2313   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2314     {
2315       glabel *lab_stmt;
2316 
2317       lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2318       if (!lab_stmt)
2319 	break;
2320 
2321       lab = gimple_label_label (lab_stmt);
2322       if (DECL_NONLOCAL (lab))
2323 	break;
2324 
2325       return jump_target_rtx (lab);
2326     }
2327 
2328   rtx_code_label *l = gen_label_rtx ();
2329   lab_rtx_for_bb->put (bb, l);
2330   return l;
2331 }
2332 
2333 
2334 /* A subroutine of expand_gimple_cond.  Given E, a fallthrough edge
2335    of a basic block where we just expanded the conditional at the end,
2336    possibly clean up the CFG and instruction sequence.  LAST is the
2337    last instruction before the just emitted jump sequence.  */
2338 
2339 static void
2340 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2341 {
2342   /* Special case: when jumpif decides that the condition is
2343      trivial it emits an unconditional jump (and the necessary
2344      barrier).  But we still have two edges, the fallthru one is
2345      wrong.  purge_dead_edges would clean this up later.  Unfortunately
2346      we have to insert insns (and split edges) before
2347      find_many_sub_basic_blocks and hence before purge_dead_edges.
2348      But splitting edges might create new blocks which depend on the
2349      fact that if there are two edges there's no barrier.  So the
2350      barrier would get lost and verify_flow_info would ICE.  Instead
2351      of auditing all edge splitters to care for the barrier (which
2352      normally isn't there in a cleaned CFG), fix it here.  */
2353   if (BARRIER_P (get_last_insn ()))
2354     {
2355       rtx_insn *insn;
2356       remove_edge (e);
2357       /* Now, we have a single successor block, if we have insns to
2358 	 insert on the remaining edge we potentially will insert
2359 	 it at the end of this block (if the dest block isn't feasible)
2360 	 in order to avoid splitting the edge.  This insertion will take
2361 	 place in front of the last jump.  But we might have emitted
2362 	 multiple jumps (conditional and one unconditional) to the
2363 	 same destination.  Inserting in front of the last one then
2364 	 is a problem.  See PR 40021.  We fix this by deleting all
2365 	 jumps except the last unconditional one.  */
2366       insn = PREV_INSN (get_last_insn ());
2367       /* Make sure we have an unconditional jump.  Otherwise we're
2368 	 confused.  */
2369       gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2370       for (insn = PREV_INSN (insn); insn != last;)
2371 	{
2372 	  insn = PREV_INSN (insn);
2373 	  if (JUMP_P (NEXT_INSN (insn)))
2374 	    {
2375 	      if (!any_condjump_p (NEXT_INSN (insn)))
2376 		{
2377 		  gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2378 		  delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2379 		}
2380 	      delete_insn (NEXT_INSN (insn));
2381 	    }
2382 	}
2383     }
2384 }
2385 
2386 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_COND.
2387    Returns a new basic block if we've terminated the current basic
2388    block and created a new one.  */
2389 
2390 static basic_block
2391 expand_gimple_cond (basic_block bb, gcond *stmt)
2392 {
2393   basic_block new_bb, dest;
2394   edge new_edge;
2395   edge true_edge;
2396   edge false_edge;
2397   rtx_insn *last2, *last;
2398   enum tree_code code;
2399   tree op0, op1;
2400 
2401   code = gimple_cond_code (stmt);
2402   op0 = gimple_cond_lhs (stmt);
2403   op1 = gimple_cond_rhs (stmt);
2404   /* We're sometimes presented with such code:
2405        D.123_1 = x < y;
2406        if (D.123_1 != 0)
2407          ...
2408      This would expand to two comparisons which then later might
2409      be cleaned up by combine.  But some pattern matchers like if-conversion
2410      work better when there's only one compare, so make up for this
2411      here as special exception if TER would have made the same change.  */
2412   if (SA.values
2413       && TREE_CODE (op0) == SSA_NAME
2414       && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2415       && TREE_CODE (op1) == INTEGER_CST
2416       && ((gimple_cond_code (stmt) == NE_EXPR
2417 	   && integer_zerop (op1))
2418 	  || (gimple_cond_code (stmt) == EQ_EXPR
2419 	      && integer_onep (op1)))
2420       && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2421     {
2422       gimple *second = SSA_NAME_DEF_STMT (op0);
2423       if (gimple_code (second) == GIMPLE_ASSIGN)
2424 	{
2425 	  enum tree_code code2 = gimple_assign_rhs_code (second);
2426 	  if (TREE_CODE_CLASS (code2) == tcc_comparison)
2427 	    {
2428 	      code = code2;
2429 	      op0 = gimple_assign_rhs1 (second);
2430 	      op1 = gimple_assign_rhs2 (second);
2431 	    }
2432 	  /* If jumps are cheap and the target does not support conditional
2433 	     compare, turn some more codes into jumpy sequences.  */
2434 	  else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2435 		   && targetm.gen_ccmp_first == NULL)
2436 	    {
2437 	      if ((code2 == BIT_AND_EXPR
2438 		   && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2439 		   && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2440 		  || code2 == TRUTH_AND_EXPR)
2441 		{
2442 		  code = TRUTH_ANDIF_EXPR;
2443 		  op0 = gimple_assign_rhs1 (second);
2444 		  op1 = gimple_assign_rhs2 (second);
2445 		}
2446 	      else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2447 		{
2448 		  code = TRUTH_ORIF_EXPR;
2449 		  op0 = gimple_assign_rhs1 (second);
2450 		  op1 = gimple_assign_rhs2 (second);
2451 		}
2452 	    }
2453 	}
2454     }
2455 
2456   last2 = last = get_last_insn ();
2457 
2458   extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2459   set_curr_insn_location (gimple_location (stmt));
2460 
2461   /* These flags have no purpose in RTL land.  */
2462   true_edge->flags &= ~EDGE_TRUE_VALUE;
2463   false_edge->flags &= ~EDGE_FALSE_VALUE;
2464 
2465   /* We can either have a pure conditional jump with one fallthru edge or
2466      two-way jump that needs to be decomposed into two basic blocks.  */
2467   if (false_edge->dest == bb->next_bb)
2468     {
2469       jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2470 		true_edge->probability);
2471       maybe_dump_rtl_for_gimple_stmt (stmt, last);
2472       if (true_edge->goto_locus != UNKNOWN_LOCATION)
2473 	set_curr_insn_location (true_edge->goto_locus);
2474       false_edge->flags |= EDGE_FALLTHRU;
2475       maybe_cleanup_end_of_block (false_edge, last);
2476       return NULL;
2477     }
2478   if (true_edge->dest == bb->next_bb)
2479     {
2480       jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2481 		   false_edge->probability);
2482       maybe_dump_rtl_for_gimple_stmt (stmt, last);
2483       if (false_edge->goto_locus != UNKNOWN_LOCATION)
2484 	set_curr_insn_location (false_edge->goto_locus);
2485       true_edge->flags |= EDGE_FALLTHRU;
2486       maybe_cleanup_end_of_block (true_edge, last);
2487       return NULL;
2488     }
2489 
2490   jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2491 	    true_edge->probability);
2492   last = get_last_insn ();
2493   if (false_edge->goto_locus != UNKNOWN_LOCATION)
2494     set_curr_insn_location (false_edge->goto_locus);
2495   emit_jump (label_rtx_for_bb (false_edge->dest));
2496 
2497   BB_END (bb) = last;
2498   if (BARRIER_P (BB_END (bb)))
2499     BB_END (bb) = PREV_INSN (BB_END (bb));
2500   update_bb_for_insn (bb);
2501 
2502   new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2503   dest = false_edge->dest;
2504   redirect_edge_succ (false_edge, new_bb);
2505   false_edge->flags |= EDGE_FALLTHRU;
2506   new_bb->count = false_edge->count;
2507   new_bb->frequency = EDGE_FREQUENCY (false_edge);
2508   add_bb_to_loop (new_bb, bb->loop_father);
2509   new_edge = make_edge (new_bb, dest, 0);
2510   new_edge->probability = REG_BR_PROB_BASE;
2511   new_edge->count = new_bb->count;
2512   if (BARRIER_P (BB_END (new_bb)))
2513     BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2514   update_bb_for_insn (new_bb);
2515 
2516   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2517 
2518   if (true_edge->goto_locus != UNKNOWN_LOCATION)
2519     {
2520       set_curr_insn_location (true_edge->goto_locus);
2521       true_edge->goto_locus = curr_insn_location ();
2522     }
2523 
2524   return new_bb;
2525 }
2526 
2527 /* Mark all calls that can have a transaction restart.  */
2528 
2529 static void
2530 mark_transaction_restart_calls (gimple *stmt)
2531 {
2532   struct tm_restart_node dummy;
2533   tm_restart_node **slot;
2534 
2535   if (!cfun->gimple_df->tm_restart)
2536     return;
2537 
2538   dummy.stmt = stmt;
2539   slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2540   if (slot)
2541     {
2542       struct tm_restart_node *n = *slot;
2543       tree list = n->label_or_list;
2544       rtx_insn *insn;
2545 
2546       for (insn = next_real_insn (get_last_insn ());
2547 	   !CALL_P (insn);
2548 	   insn = next_real_insn (insn))
2549 	continue;
2550 
2551       if (TREE_CODE (list) == LABEL_DECL)
2552 	add_reg_note (insn, REG_TM, label_rtx (list));
2553       else
2554 	for (; list ; list = TREE_CHAIN (list))
2555 	  add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2556     }
2557 }
2558 
2559 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2560    statement STMT.  */
2561 
2562 static void
2563 expand_call_stmt (gcall *stmt)
2564 {
2565   tree exp, decl, lhs;
2566   bool builtin_p;
2567   size_t i;
2568 
2569   if (gimple_call_internal_p (stmt))
2570     {
2571       expand_internal_call (stmt);
2572       return;
2573     }
2574 
2575   /* If this is a call to a built-in function and it has no effect other
2576      than setting the lhs, try to implement it using an internal function
2577      instead.  */
2578   decl = gimple_call_fndecl (stmt);
2579   if (gimple_call_lhs (stmt)
2580       && !gimple_has_side_effects (stmt)
2581       && (optimize || (decl && called_as_built_in (decl))))
2582     {
2583       internal_fn ifn = replacement_internal_fn (stmt);
2584       if (ifn != IFN_LAST)
2585 	{
2586 	  expand_internal_call (ifn, stmt);
2587 	  return;
2588 	}
2589     }
2590 
2591   exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2592 
2593   CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2594   builtin_p = decl && DECL_BUILT_IN (decl);
2595 
2596   /* If this is not a builtin function, the function type through which the
2597      call is made may be different from the type of the function.  */
2598   if (!builtin_p)
2599     CALL_EXPR_FN (exp)
2600       = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2601 		      CALL_EXPR_FN (exp));
2602 
2603   TREE_TYPE (exp) = gimple_call_return_type (stmt);
2604   CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2605 
2606   for (i = 0; i < gimple_call_num_args (stmt); i++)
2607     {
2608       tree arg = gimple_call_arg (stmt, i);
2609       gimple *def;
2610       /* TER addresses into arguments of builtin functions so we have a
2611 	 chance to infer more correct alignment information.  See PR39954.  */
2612       if (builtin_p
2613 	  && TREE_CODE (arg) == SSA_NAME
2614 	  && (def = get_gimple_for_ssa_name (arg))
2615 	  && gimple_assign_rhs_code (def) == ADDR_EXPR)
2616 	arg = gimple_assign_rhs1 (def);
2617       CALL_EXPR_ARG (exp, i) = arg;
2618     }
2619 
2620   if (gimple_has_side_effects (stmt))
2621     TREE_SIDE_EFFECTS (exp) = 1;
2622 
2623   if (gimple_call_nothrow_p (stmt))
2624     TREE_NOTHROW (exp) = 1;
2625 
2626   CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2627   CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
2628   CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2629   if (decl
2630       && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2631       && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2632 	  || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2633     CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2634   else
2635     CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2636   CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2637   CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
2638   SET_EXPR_LOCATION (exp, gimple_location (stmt));
2639   CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
2640 
2641   /* Ensure RTL is created for debug args.  */
2642   if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2643     {
2644       vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2645       unsigned int ix;
2646       tree dtemp;
2647 
2648       if (debug_args)
2649 	for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2650 	  {
2651 	    gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2652 	    expand_debug_expr (dtemp);
2653 	  }
2654     }
2655 
2656   lhs = gimple_call_lhs (stmt);
2657   if (lhs)
2658     expand_assignment (lhs, exp, false);
2659   else
2660     expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2661 
2662   mark_transaction_restart_calls (stmt);
2663 }
2664 
2665 
2666 /* Generate RTL for an asm statement (explicit assembler code).
2667    STRING is a STRING_CST node containing the assembler code text,
2668    or an ADDR_EXPR containing a STRING_CST.  VOL nonzero means the
2669    insn is volatile; don't optimize it.  */
2670 
2671 static void
2672 expand_asm_loc (tree string, int vol, location_t locus)
2673 {
2674   rtx body;
2675 
2676   body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2677 				ggc_strdup (TREE_STRING_POINTER (string)),
2678 				locus);
2679 
2680   MEM_VOLATILE_P (body) = vol;
2681 
2682   /* Non-empty basic ASM implicitly clobbers memory.  */
2683   if (TREE_STRING_LENGTH (string) != 0)
2684     {
2685       rtx asm_op, clob;
2686       unsigned i, nclobbers;
2687       auto_vec<rtx> input_rvec, output_rvec;
2688       auto_vec<const char *> constraints;
2689       auto_vec<rtx> clobber_rvec;
2690       HARD_REG_SET clobbered_regs;
2691       CLEAR_HARD_REG_SET (clobbered_regs);
2692 
2693       clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2694       clobber_rvec.safe_push (clob);
2695 
2696       if (targetm.md_asm_adjust)
2697 	targetm.md_asm_adjust (output_rvec, input_rvec,
2698 			       constraints, clobber_rvec,
2699 			       clobbered_regs);
2700 
2701       asm_op = body;
2702       nclobbers = clobber_rvec.length ();
2703       body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
2704 
2705       XVECEXP (body, 0, 0) = asm_op;
2706       for (i = 0; i < nclobbers; i++)
2707 	XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
2708     }
2709 
2710   emit_insn (body);
2711 }
2712 
2713 /* Return the number of times character C occurs in string S.  */
2714 static int
2715 n_occurrences (int c, const char *s)
2716 {
2717   int n = 0;
2718   while (*s)
2719     n += (*s++ == c);
2720   return n;
2721 }
2722 
2723 /* A subroutine of expand_asm_operands.  Check that all operands have
2724    the same number of alternatives.  Return true if so.  */
2725 
2726 static bool
2727 check_operand_nalternatives (const vec<const char *> &constraints)
2728 {
2729   unsigned len = constraints.length();
2730   if (len > 0)
2731     {
2732       int nalternatives = n_occurrences (',', constraints[0]);
2733 
2734       if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2735 	{
2736 	  error ("too many alternatives in %<asm%>");
2737 	  return false;
2738 	}
2739 
2740       for (unsigned i = 1; i < len; ++i)
2741 	if (n_occurrences (',', constraints[i]) != nalternatives)
2742 	  {
2743 	    error ("operand constraints for %<asm%> differ "
2744 		   "in number of alternatives");
2745 	    return false;
2746 	  }
2747     }
2748   return true;
2749 }
2750 
2751 /* Check for overlap between registers marked in CLOBBERED_REGS and
2752    anything inappropriate in T.  Emit error and return the register
2753    variable definition for error, NULL_TREE for ok.  */
2754 
2755 static bool
2756 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2757 {
2758   /* Conflicts between asm-declared register variables and the clobber
2759      list are not allowed.  */
2760   tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2761 
2762   if (overlap)
2763     {
2764       error ("asm-specifier for variable %qE conflicts with asm clobber list",
2765 	     DECL_NAME (overlap));
2766 
2767       /* Reset registerness to stop multiple errors emitted for a single
2768 	 variable.  */
2769       DECL_REGISTER (overlap) = 0;
2770       return true;
2771     }
2772 
2773   return false;
2774 }
2775 
2776 /* Generate RTL for an asm statement with arguments.
2777    STRING is the instruction template.
2778    OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2779    Each output or input has an expression in the TREE_VALUE and
2780    a tree list in TREE_PURPOSE which in turn contains a constraint
2781    name in TREE_VALUE (or NULL_TREE) and a constraint string
2782    in TREE_PURPOSE.
2783    CLOBBERS is a list of STRING_CST nodes each naming a hard register
2784    that is clobbered by this insn.
2785 
2786    LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2787    should be the fallthru basic block of the asm goto.
2788 
2789    Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2790    Some elements of OUTPUTS may be replaced with trees representing temporary
2791    values.  The caller should copy those temporary values to the originally
2792    specified lvalues.
2793 
2794    VOL nonzero means the insn is volatile; don't optimize it.  */
2795 
2796 static void
2797 expand_asm_stmt (gasm *stmt)
2798 {
2799   class save_input_location
2800   {
2801     location_t old;
2802 
2803   public:
2804     explicit save_input_location(location_t where)
2805     {
2806       old = input_location;
2807       input_location = where;
2808     }
2809 
2810     ~save_input_location()
2811     {
2812       input_location = old;
2813     }
2814   };
2815 
2816   location_t locus = gimple_location (stmt);
2817 
2818   if (gimple_asm_input_p (stmt))
2819     {
2820       const char *s = gimple_asm_string (stmt);
2821       tree string = build_string (strlen (s), s);
2822       expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2823       return;
2824     }
2825 
2826   /* There are some legacy diagnostics in here, and also avoids a
2827      sixth parameger to targetm.md_asm_adjust.  */
2828   save_input_location s_i_l(locus);
2829 
2830   unsigned noutputs = gimple_asm_noutputs (stmt);
2831   unsigned ninputs = gimple_asm_ninputs (stmt);
2832   unsigned nlabels = gimple_asm_nlabels (stmt);
2833   unsigned i;
2834 
2835   /* ??? Diagnose during gimplification?  */
2836   if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2837     {
2838       error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2839       return;
2840     }
2841 
2842   auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2843   auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2844   auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
2845 
2846   /* Copy the gimple vectors into new vectors that we can manipulate.  */
2847 
2848   output_tvec.safe_grow (noutputs);
2849   input_tvec.safe_grow (ninputs);
2850   constraints.safe_grow (noutputs + ninputs);
2851 
2852   for (i = 0; i < noutputs; ++i)
2853     {
2854       tree t = gimple_asm_output_op (stmt, i);
2855       output_tvec[i] = TREE_VALUE (t);
2856       constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2857     }
2858   for (i = 0; i < ninputs; i++)
2859     {
2860       tree t = gimple_asm_input_op (stmt, i);
2861       input_tvec[i] = TREE_VALUE (t);
2862       constraints[i + noutputs]
2863 	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2864     }
2865 
2866   /* ??? Diagnose during gimplification?  */
2867   if (! check_operand_nalternatives (constraints))
2868     return;
2869 
2870   /* Count the number of meaningful clobbered registers, ignoring what
2871      we would ignore later.  */
2872   auto_vec<rtx> clobber_rvec;
2873   HARD_REG_SET clobbered_regs;
2874   CLEAR_HARD_REG_SET (clobbered_regs);
2875 
2876   if (unsigned n = gimple_asm_nclobbers (stmt))
2877     {
2878       clobber_rvec.reserve (n);
2879       for (i = 0; i < n; i++)
2880 	{
2881 	  tree t = gimple_asm_clobber_op (stmt, i);
2882           const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2883 	  int nregs, j;
2884 
2885 	  j = decode_reg_name_and_count (regname, &nregs);
2886 	  if (j < 0)
2887 	    {
2888 	      if (j == -2)
2889 		{
2890 		  /* ??? Diagnose during gimplification?  */
2891 		  error ("unknown register name %qs in %<asm%>", regname);
2892 		}
2893 	      else if (j == -4)
2894 		{
2895 		  rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2896 		  clobber_rvec.safe_push (x);
2897 		}
2898 	      else
2899 		{
2900 		  /* Otherwise we should have -1 == empty string
2901 		     or -3 == cc, which is not a register.  */
2902 		  gcc_assert (j == -1 || j == -3);
2903 		}
2904 	    }
2905 	  else
2906 	    for (int reg = j; reg < j + nregs; reg++)
2907 	      {
2908 		/* Clobbering the PIC register is an error.  */
2909 		if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2910 		  {
2911 		    /* ??? Diagnose during gimplification?  */
2912 		    error ("PIC register clobbered by %qs in %<asm%>",
2913 			   regname);
2914 		    return;
2915 		  }
2916 
2917 	        SET_HARD_REG_BIT (clobbered_regs, reg);
2918 	        rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
2919 		clobber_rvec.safe_push (x);
2920 	      }
2921 	}
2922     }
2923   unsigned nclobbers = clobber_rvec.length();
2924 
2925   /* First pass over inputs and outputs checks validity and sets
2926      mark_addressable if needed.  */
2927   /* ??? Diagnose during gimplification?  */
2928 
2929   for (i = 0; i < noutputs; ++i)
2930     {
2931       tree val = output_tvec[i];
2932       tree type = TREE_TYPE (val);
2933       const char *constraint;
2934       bool is_inout;
2935       bool allows_reg;
2936       bool allows_mem;
2937 
2938       /* Try to parse the output constraint.  If that fails, there's
2939 	 no point in going further.  */
2940       constraint = constraints[i];
2941       if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2942 				    &allows_mem, &allows_reg, &is_inout))
2943 	return;
2944 
2945       if (! allows_reg
2946 	  && (allows_mem
2947 	      || is_inout
2948 	      || (DECL_P (val)
2949 		  && REG_P (DECL_RTL (val))
2950 		  && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2951 	mark_addressable (val);
2952     }
2953 
2954   for (i = 0; i < ninputs; ++i)
2955     {
2956       bool allows_reg, allows_mem;
2957       const char *constraint;
2958 
2959       constraint = constraints[i + noutputs];
2960       if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
2961 				    constraints.address (),
2962 				    &allows_mem, &allows_reg))
2963 	return;
2964 
2965       if (! allows_reg && allows_mem)
2966 	mark_addressable (input_tvec[i]);
2967     }
2968 
2969   /* Second pass evaluates arguments.  */
2970 
2971   /* Make sure stack is consistent for asm goto.  */
2972   if (nlabels > 0)
2973     do_pending_stack_adjust ();
2974   int old_generating_concat_p = generating_concat_p;
2975 
2976   /* Vector of RTX's of evaluated output operands.  */
2977   auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
2978   auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
2979   rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
2980 
2981   output_rvec.safe_grow (noutputs);
2982 
2983   for (i = 0; i < noutputs; ++i)
2984     {
2985       tree val = output_tvec[i];
2986       tree type = TREE_TYPE (val);
2987       bool is_inout, allows_reg, allows_mem, ok;
2988       rtx op;
2989 
2990       ok = parse_output_constraint (&constraints[i], i, ninputs,
2991 				    noutputs, &allows_mem, &allows_reg,
2992 				    &is_inout);
2993       gcc_assert (ok);
2994 
2995       /* If an output operand is not a decl or indirect ref and our constraint
2996 	 allows a register, make a temporary to act as an intermediate.
2997 	 Make the asm insn write into that, then we will copy it to
2998 	 the real output operand.  Likewise for promoted variables.  */
2999 
3000       generating_concat_p = 0;
3001 
3002       if ((TREE_CODE (val) == INDIRECT_REF && allows_mem)
3003 	  || (DECL_P (val)
3004 	      && (allows_mem || REG_P (DECL_RTL (val)))
3005 	      && ! (REG_P (DECL_RTL (val))
3006 		    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3007 	  || ! allows_reg
3008 	  || is_inout
3009 	  || TREE_ADDRESSABLE (type))
3010 	{
3011 	  op = expand_expr (val, NULL_RTX, VOIDmode,
3012 			    !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3013 	  if (MEM_P (op))
3014 	    op = validize_mem (op);
3015 
3016 	  if (! allows_reg && !MEM_P (op))
3017 	    error ("output number %d not directly addressable", i);
3018 	  if ((! allows_mem && MEM_P (op) && GET_MODE (op) != BLKmode)
3019 	      || GET_CODE (op) == CONCAT)
3020 	    {
3021 	      rtx old_op = op;
3022 	      op = gen_reg_rtx (GET_MODE (op));
3023 
3024 	      generating_concat_p = old_generating_concat_p;
3025 
3026 	      if (is_inout)
3027 		emit_move_insn (op, old_op);
3028 
3029 	      push_to_sequence2 (after_rtl_seq, after_rtl_end);
3030 	      emit_move_insn (old_op, op);
3031 	      after_rtl_seq = get_insns ();
3032 	      after_rtl_end = get_last_insn ();
3033 	      end_sequence ();
3034 	    }
3035 	}
3036       else
3037 	{
3038 	  op = assign_temp (type, 0, 1);
3039 	  op = validize_mem (op);
3040 	  if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3041 	    set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
3042 
3043 	  generating_concat_p = old_generating_concat_p;
3044 
3045 	  push_to_sequence2 (after_rtl_seq, after_rtl_end);
3046 	  expand_assignment (val, make_tree (type, op), false);
3047 	  after_rtl_seq = get_insns ();
3048 	  after_rtl_end = get_last_insn ();
3049 	  end_sequence ();
3050 	}
3051       output_rvec[i] = op;
3052 
3053       if (is_inout)
3054 	inout_opnum.safe_push (i);
3055     }
3056 
3057   auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3058   auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
3059 
3060   input_rvec.safe_grow (ninputs);
3061   input_mode.safe_grow (ninputs);
3062 
3063   generating_concat_p = 0;
3064 
3065   for (i = 0; i < ninputs; ++i)
3066     {
3067       tree val = input_tvec[i];
3068       tree type = TREE_TYPE (val);
3069       bool allows_reg, allows_mem, ok;
3070       const char *constraint;
3071       rtx op;
3072 
3073       constraint = constraints[i + noutputs];
3074       ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3075 				   constraints.address (),
3076 				   &allows_mem, &allows_reg);
3077       gcc_assert (ok);
3078 
3079       /* EXPAND_INITIALIZER will not generate code for valid initializer
3080 	 constants, but will still generate code for other types of operand.
3081 	 This is the behavior we want for constant constraints.  */
3082       op = expand_expr (val, NULL_RTX, VOIDmode,
3083 			allows_reg ? EXPAND_NORMAL
3084 			: allows_mem ? EXPAND_MEMORY
3085 			: EXPAND_INITIALIZER);
3086 
3087       /* Never pass a CONCAT to an ASM.  */
3088       if (GET_CODE (op) == CONCAT)
3089 	op = force_reg (GET_MODE (op), op);
3090       else if (MEM_P (op))
3091 	op = validize_mem (op);
3092 
3093       if (asm_operand_ok (op, constraint, NULL) <= 0)
3094 	{
3095 	  if (allows_reg && TYPE_MODE (type) != BLKmode)
3096 	    op = force_reg (TYPE_MODE (type), op);
3097 	  else if (!allows_mem)
3098 	    warning (0, "asm operand %d probably doesn%'t match constraints",
3099 		     i + noutputs);
3100 	  else if (MEM_P (op))
3101 	    {
3102 	      /* We won't recognize either volatile memory or memory
3103 		 with a queued address as available a memory_operand
3104 		 at this point.  Ignore it: clearly this *is* a memory.  */
3105 	    }
3106 	  else
3107 	    gcc_unreachable ();
3108 	}
3109       input_rvec[i] = op;
3110       input_mode[i] = TYPE_MODE (type);
3111     }
3112 
3113   /* For in-out operands, copy output rtx to input rtx.  */
3114   unsigned ninout = inout_opnum.length();
3115   for (i = 0; i < ninout; i++)
3116     {
3117       int j = inout_opnum[i];
3118       rtx o = output_rvec[j];
3119 
3120       input_rvec.safe_push (o);
3121       input_mode.safe_push (GET_MODE (o));
3122 
3123       char buffer[16];
3124       sprintf (buffer, "%d", j);
3125       constraints.safe_push (ggc_strdup (buffer));
3126     }
3127   ninputs += ninout;
3128 
3129   /* Sometimes we wish to automatically clobber registers across an asm.
3130      Case in point is when the i386 backend moved from cc0 to a hard reg --
3131      maintaining source-level compatibility means automatically clobbering
3132      the flags register.  */
3133   rtx_insn *after_md_seq = NULL;
3134   if (targetm.md_asm_adjust)
3135     after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3136 					  constraints, clobber_rvec,
3137 					  clobbered_regs);
3138 
3139   /* Do not allow the hook to change the output and input count,
3140      lest it mess up the operand numbering.  */
3141   gcc_assert (output_rvec.length() == noutputs);
3142   gcc_assert (input_rvec.length() == ninputs);
3143   gcc_assert (constraints.length() == noutputs + ninputs);
3144 
3145   /* But it certainly can adjust the clobbers.  */
3146   nclobbers = clobber_rvec.length();
3147 
3148   /* Third pass checks for easy conflicts.  */
3149   /* ??? Why are we doing this on trees instead of rtx.  */
3150 
3151   bool clobber_conflict_found = 0;
3152   for (i = 0; i < noutputs; ++i)
3153     if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3154 	clobber_conflict_found = 1;
3155   for (i = 0; i < ninputs - ninout; ++i)
3156     if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3157 	clobber_conflict_found = 1;
3158 
3159   /* Make vectors for the expression-rtx, constraint strings,
3160      and named operands.  */
3161 
3162   rtvec argvec = rtvec_alloc (ninputs);
3163   rtvec constraintvec = rtvec_alloc (ninputs);
3164   rtvec labelvec = rtvec_alloc (nlabels);
3165 
3166   rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3167 				    : GET_MODE (output_rvec[0])),
3168 				   ggc_strdup (gimple_asm_string (stmt)),
3169 				   empty_string, 0, argvec, constraintvec,
3170 				   labelvec, locus);
3171   MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3172 
3173   for (i = 0; i < ninputs; ++i)
3174     {
3175       ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3176       ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3177 	= gen_rtx_ASM_INPUT_loc (input_mode[i],
3178 				 constraints[i + noutputs],
3179 				 locus);
3180     }
3181 
3182   /* Copy labels to the vector.  */
3183   rtx_code_label *fallthru_label = NULL;
3184   if (nlabels > 0)
3185     {
3186       basic_block fallthru_bb = NULL;
3187       edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3188       if (fallthru)
3189 	fallthru_bb = fallthru->dest;
3190 
3191       for (i = 0; i < nlabels; ++i)
3192 	{
3193 	  tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
3194 	  rtx_insn *r;
3195 	  /* If asm goto has any labels in the fallthru basic block, use
3196 	     a label that we emit immediately after the asm goto.  Expansion
3197 	     may insert further instructions into the same basic block after
3198 	     asm goto and if we don't do this, insertion of instructions on
3199 	     the fallthru edge might misbehave.  See PR58670.  */
3200 	  if (fallthru_bb && label_to_block_fn (cfun, label) == fallthru_bb)
3201 	    {
3202 	      if (fallthru_label == NULL_RTX)
3203 	        fallthru_label = gen_label_rtx ();
3204 	      r = fallthru_label;
3205 	    }
3206 	  else
3207 	    r = label_rtx (label);
3208 	  ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
3209 	}
3210     }
3211 
3212   /* Now, for each output, construct an rtx
3213      (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3214 			       ARGVEC CONSTRAINTS OPNAMES))
3215      If there is more than one, put them inside a PARALLEL.  */
3216 
3217   if (nlabels > 0 && nclobbers == 0)
3218     {
3219       gcc_assert (noutputs == 0);
3220       emit_jump_insn (body);
3221     }
3222   else if (noutputs == 0 && nclobbers == 0)
3223     {
3224       /* No output operands: put in a raw ASM_OPERANDS rtx.  */
3225       emit_insn (body);
3226     }
3227   else if (noutputs == 1 && nclobbers == 0)
3228     {
3229       ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3230       emit_insn (gen_rtx_SET (output_rvec[0], body));
3231     }
3232   else
3233     {
3234       rtx obody = body;
3235       int num = noutputs;
3236 
3237       if (num == 0)
3238 	num = 1;
3239 
3240       body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3241 
3242       /* For each output operand, store a SET.  */
3243       for (i = 0; i < noutputs; ++i)
3244 	{
3245 	  rtx src, o = output_rvec[i];
3246 	  if (i == 0)
3247 	    {
3248 	      ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3249 	      src = obody;
3250 	    }
3251 	  else
3252 	    {
3253 	      src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3254 					  ASM_OPERANDS_TEMPLATE (obody),
3255 					  constraints[i], i, argvec,
3256 					  constraintvec, labelvec, locus);
3257 	      MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3258 	    }
3259 	  XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
3260 	}
3261 
3262       /* If there are no outputs (but there are some clobbers)
3263 	 store the bare ASM_OPERANDS into the PARALLEL.  */
3264       if (i == 0)
3265 	XVECEXP (body, 0, i++) = obody;
3266 
3267       /* Store (clobber REG) for each clobbered register specified.  */
3268       for (unsigned j = 0; j < nclobbers; ++j)
3269 	{
3270 	  rtx clobbered_reg = clobber_rvec[j];
3271 
3272 	  /* Do sanity check for overlap between clobbers and respectively
3273 	     input and outputs that hasn't been handled.  Such overlap
3274 	     should have been detected and reported above.  */
3275 	  if (!clobber_conflict_found && REG_P (clobbered_reg))
3276 	    {
3277 	      /* We test the old body (obody) contents to avoid
3278 		 tripping over the under-construction body.  */
3279 	      for (unsigned k = 0; k < noutputs; ++k)
3280 		if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3281 		  internal_error ("asm clobber conflict with output operand");
3282 
3283 	      for (unsigned k = 0; k < ninputs - ninout; ++k)
3284 		if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3285 		  internal_error ("asm clobber conflict with input operand");
3286 	    }
3287 
3288 	  XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
3289 	}
3290 
3291       if (nlabels > 0)
3292 	emit_jump_insn (body);
3293       else
3294 	emit_insn (body);
3295     }
3296 
3297   generating_concat_p = old_generating_concat_p;
3298 
3299   if (fallthru_label)
3300     emit_label (fallthru_label);
3301 
3302   if (after_md_seq)
3303     emit_insn (after_md_seq);
3304   if (after_rtl_seq)
3305     emit_insn (after_rtl_seq);
3306 
3307   free_temp_slots ();
3308   crtl->has_asm_statement = 1;
3309 }
3310 
3311 /* Emit code to jump to the address
3312    specified by the pointer expression EXP.  */
3313 
3314 static void
3315 expand_computed_goto (tree exp)
3316 {
3317   rtx x = expand_normal (exp);
3318 
3319   do_pending_stack_adjust ();
3320   emit_indirect_jump (x);
3321 }
3322 
3323 /* Generate RTL code for a `goto' statement with target label LABEL.
3324    LABEL should be a LABEL_DECL tree node that was or will later be
3325    defined with `expand_label'.  */
3326 
3327 static void
3328 expand_goto (tree label)
3329 {
3330   if (flag_checking)
3331     {
3332       /* Check for a nonlocal goto to a containing function.  Should have
3333 	 gotten translated to __builtin_nonlocal_goto.  */
3334       tree context = decl_function_context (label);
3335       gcc_assert (!context || context == current_function_decl);
3336     }
3337 
3338   emit_jump (jump_target_rtx (label));
3339 }
3340 
3341 /* Output a return with no value.  */
3342 
3343 static void
3344 expand_null_return_1 (void)
3345 {
3346   clear_pending_stack_adjust ();
3347   do_pending_stack_adjust ();
3348   emit_jump (return_label);
3349 }
3350 
3351 /* Generate RTL to return from the current function, with no value.
3352    (That is, we do not do anything about returning any value.)  */
3353 
3354 void
3355 expand_null_return (void)
3356 {
3357   /* If this function was declared to return a value, but we
3358      didn't, clobber the return registers so that they are not
3359      propagated live to the rest of the function.  */
3360   clobber_return_register ();
3361 
3362   expand_null_return_1 ();
3363 }
3364 
3365 /* Generate RTL to return from the current function, with value VAL.  */
3366 
3367 static void
3368 expand_value_return (rtx val)
3369 {
3370   /* Copy the value to the return location unless it's already there.  */
3371 
3372   tree decl = DECL_RESULT (current_function_decl);
3373   rtx return_reg = DECL_RTL (decl);
3374   if (return_reg != val)
3375     {
3376       tree funtype = TREE_TYPE (current_function_decl);
3377       tree type = TREE_TYPE (decl);
3378       int unsignedp = TYPE_UNSIGNED (type);
3379       machine_mode old_mode = DECL_MODE (decl);
3380       machine_mode mode;
3381       if (DECL_BY_REFERENCE (decl))
3382         mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3383       else
3384         mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3385 
3386       if (mode != old_mode)
3387 	val = convert_modes (mode, old_mode, val, unsignedp);
3388 
3389       if (GET_CODE (return_reg) == PARALLEL)
3390 	emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3391       else
3392 	emit_move_insn (return_reg, val);
3393     }
3394 
3395   expand_null_return_1 ();
3396 }
3397 
3398 /* Generate RTL to evaluate the expression RETVAL and return it
3399    from the current function.  */
3400 
3401 static void
3402 expand_return (tree retval, tree bounds)
3403 {
3404   rtx result_rtl;
3405   rtx val = 0;
3406   tree retval_rhs;
3407   rtx bounds_rtl;
3408 
3409   /* If function wants no value, give it none.  */
3410   if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3411     {
3412       expand_normal (retval);
3413       expand_null_return ();
3414       return;
3415     }
3416 
3417   if (retval == error_mark_node)
3418     {
3419       /* Treat this like a return of no value from a function that
3420 	 returns a value.  */
3421       expand_null_return ();
3422       return;
3423     }
3424   else if ((TREE_CODE (retval) == MODIFY_EXPR
3425 	    || TREE_CODE (retval) == INIT_EXPR)
3426 	   && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3427     retval_rhs = TREE_OPERAND (retval, 1);
3428   else
3429     retval_rhs = retval;
3430 
3431   result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3432 
3433   /* Put returned bounds to the right place.  */
3434   bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3435   if (bounds_rtl)
3436     {
3437       rtx addr = NULL;
3438       rtx bnd = NULL;
3439 
3440       if (bounds && bounds != error_mark_node)
3441 	{
3442 	  bnd = expand_normal (bounds);
3443 	  targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3444 	}
3445       else if (REG_P (bounds_rtl))
3446 	{
3447 	  if (bounds)
3448 	    bnd = chkp_expand_zero_bounds ();
3449 	  else
3450 	    {
3451 	      addr = expand_normal (build_fold_addr_expr (retval_rhs));
3452 	      addr = gen_rtx_MEM (Pmode, addr);
3453 	      bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3454 	    }
3455 
3456 	  targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3457 	}
3458       else
3459 	{
3460 	  int n;
3461 
3462 	  gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3463 
3464 	  if (bounds)
3465 	    bnd = chkp_expand_zero_bounds ();
3466 	  else
3467 	    {
3468 	      addr = expand_normal (build_fold_addr_expr (retval_rhs));
3469 	      addr = gen_rtx_MEM (Pmode, addr);
3470 	    }
3471 
3472 	  for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3473 	    {
3474 	      rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
3475 	      if (!bounds)
3476 		{
3477 		  rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3478 		  rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3479 		  bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3480 		}
3481 	      targetm.calls.store_returned_bounds (slot, bnd);
3482 	    }
3483 	}
3484     }
3485   else if (chkp_function_instrumented_p (current_function_decl)
3486 	   && !BOUNDED_P (retval_rhs)
3487 	   && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3488 	   && TREE_CODE (retval_rhs) != RESULT_DECL)
3489     {
3490       rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3491       addr = gen_rtx_MEM (Pmode, addr);
3492 
3493       gcc_assert (MEM_P (result_rtl));
3494 
3495       chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3496     }
3497 
3498   /* If we are returning the RESULT_DECL, then the value has already
3499      been stored into it, so we don't have to do anything special.  */
3500   if (TREE_CODE (retval_rhs) == RESULT_DECL)
3501     expand_value_return (result_rtl);
3502 
3503   /* If the result is an aggregate that is being returned in one (or more)
3504      registers, load the registers here.  */
3505 
3506   else if (retval_rhs != 0
3507 	   && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3508 	   && REG_P (result_rtl))
3509     {
3510       val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3511       if (val)
3512 	{
3513 	  /* Use the mode of the result value on the return register.  */
3514 	  PUT_MODE (result_rtl, GET_MODE (val));
3515 	  expand_value_return (val);
3516 	}
3517       else
3518 	expand_null_return ();
3519     }
3520   else if (retval_rhs != 0
3521 	   && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3522 	   && (REG_P (result_rtl)
3523 	       || (GET_CODE (result_rtl) == PARALLEL)))
3524     {
3525       /* Compute the return value into a temporary (usually a pseudo reg).  */
3526       val
3527 	= assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3528       val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3529       val = force_not_mem (val);
3530       expand_value_return (val);
3531     }
3532   else
3533     {
3534       /* No hard reg used; calculate value into hard return reg.  */
3535       expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3536       expand_value_return (result_rtl);
3537     }
3538 }
3539 
3540 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3541    STMT that doesn't require special handling for outgoing edges.  That
3542    is no tailcalls and no GIMPLE_COND.  */
3543 
3544 static void
3545 expand_gimple_stmt_1 (gimple *stmt)
3546 {
3547   tree op0;
3548 
3549   set_curr_insn_location (gimple_location (stmt));
3550 
3551   switch (gimple_code (stmt))
3552     {
3553     case GIMPLE_GOTO:
3554       op0 = gimple_goto_dest (stmt);
3555       if (TREE_CODE (op0) == LABEL_DECL)
3556 	expand_goto (op0);
3557       else
3558 	expand_computed_goto (op0);
3559       break;
3560     case GIMPLE_LABEL:
3561       expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3562       break;
3563     case GIMPLE_NOP:
3564     case GIMPLE_PREDICT:
3565       break;
3566     case GIMPLE_SWITCH:
3567       expand_case (as_a <gswitch *> (stmt));
3568       break;
3569     case GIMPLE_ASM:
3570       expand_asm_stmt (as_a <gasm *> (stmt));
3571       break;
3572     case GIMPLE_CALL:
3573       expand_call_stmt (as_a <gcall *> (stmt));
3574       break;
3575 
3576     case GIMPLE_RETURN:
3577       {
3578 	tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt));
3579 	op0 = gimple_return_retval (as_a <greturn *> (stmt));
3580 
3581 	if (op0 && op0 != error_mark_node)
3582 	  {
3583 	    tree result = DECL_RESULT (current_function_decl);
3584 
3585 	    /* Mark we have return statement with missing bounds.  */
3586 	    if (!bnd
3587 		&& chkp_function_instrumented_p (cfun->decl)
3588 		&& !DECL_P (op0))
3589 	      bnd = error_mark_node;
3590 
3591 	    /* If we are not returning the current function's RESULT_DECL,
3592 	       build an assignment to it.  */
3593 	    if (op0 != result)
3594 	      {
3595 		/* I believe that a function's RESULT_DECL is unique.  */
3596 		gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3597 
3598 		/* ??? We'd like to use simply expand_assignment here,
3599 		   but this fails if the value is of BLKmode but the return
3600 		   decl is a register.  expand_return has special handling
3601 		   for this combination, which eventually should move
3602 		   to common code.  See comments there.  Until then, let's
3603 		   build a modify expression :-/  */
3604 		op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3605 			      result, op0);
3606 	      }
3607 	  }
3608 
3609 	if (!op0)
3610 	  expand_null_return ();
3611 	else
3612 	  expand_return (op0, bnd);
3613       }
3614       break;
3615 
3616     case GIMPLE_ASSIGN:
3617       {
3618 	gassign *assign_stmt = as_a <gassign *> (stmt);
3619 	tree lhs = gimple_assign_lhs (assign_stmt);
3620 
3621 	/* Tree expand used to fiddle with |= and &= of two bitfield
3622 	   COMPONENT_REFs here.  This can't happen with gimple, the LHS
3623 	   of binary assigns must be a gimple reg.  */
3624 
3625 	if (TREE_CODE (lhs) != SSA_NAME
3626 	    || get_gimple_rhs_class (gimple_expr_code (stmt))
3627 	       == GIMPLE_SINGLE_RHS)
3628 	  {
3629 	    tree rhs = gimple_assign_rhs1 (assign_stmt);
3630 	    gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3631 			== GIMPLE_SINGLE_RHS);
3632 	    if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3633 		/* Do not put locations on possibly shared trees.  */
3634 		&& !is_gimple_min_invariant (rhs))
3635 	      SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3636 	    if (TREE_CLOBBER_P (rhs))
3637 	      /* This is a clobber to mark the going out of scope for
3638 		 this LHS.  */
3639 	      ;
3640 	    else
3641 	      expand_assignment (lhs, rhs,
3642 				 gimple_assign_nontemporal_move_p (
3643 				   assign_stmt));
3644 	  }
3645 	else
3646 	  {
3647 	    rtx target, temp;
3648 	    bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3649 	    struct separate_ops ops;
3650 	    bool promoted = false;
3651 
3652 	    target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3653 	    if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3654 	      promoted = true;
3655 
3656 	    ops.code = gimple_assign_rhs_code (assign_stmt);
3657 	    ops.type = TREE_TYPE (lhs);
3658 	    switch (get_gimple_rhs_class (ops.code))
3659 	      {
3660 		case GIMPLE_TERNARY_RHS:
3661 		  ops.op2 = gimple_assign_rhs3 (assign_stmt);
3662 		  /* Fallthru */
3663 		case GIMPLE_BINARY_RHS:
3664 		  ops.op1 = gimple_assign_rhs2 (assign_stmt);
3665 		  /* Fallthru */
3666 		case GIMPLE_UNARY_RHS:
3667 		  ops.op0 = gimple_assign_rhs1 (assign_stmt);
3668 		  break;
3669 		default:
3670 		  gcc_unreachable ();
3671 	      }
3672 	    ops.location = gimple_location (stmt);
3673 
3674 	    /* If we want to use a nontemporal store, force the value to
3675 	       register first.  If we store into a promoted register,
3676 	       don't directly expand to target.  */
3677 	    temp = nontemporal || promoted ? NULL_RTX : target;
3678 	    temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3679 				       EXPAND_NORMAL);
3680 
3681 	    if (temp == target)
3682 	      ;
3683 	    else if (promoted)
3684 	      {
3685 		int unsignedp = SUBREG_PROMOTED_SIGN (target);
3686 		/* If TEMP is a VOIDmode constant, use convert_modes to make
3687 		   sure that we properly convert it.  */
3688 		if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3689 		  {
3690 		    temp = convert_modes (GET_MODE (target),
3691 					  TYPE_MODE (ops.type),
3692 					  temp, unsignedp);
3693 		    temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3694 					  GET_MODE (target), temp, unsignedp);
3695 		  }
3696 
3697 		convert_move (SUBREG_REG (target), temp, unsignedp);
3698 	      }
3699 	    else if (nontemporal && emit_storent_insn (target, temp))
3700 	      ;
3701 	    else
3702 	      {
3703 		temp = force_operand (temp, target);
3704 		if (temp != target)
3705 		  emit_move_insn (target, temp);
3706 	      }
3707 	  }
3708       }
3709       break;
3710 
3711     default:
3712       gcc_unreachable ();
3713     }
3714 }
3715 
3716 /* Expand one gimple statement STMT and return the last RTL instruction
3717    before any of the newly generated ones.
3718 
3719    In addition to generating the necessary RTL instructions this also
3720    sets REG_EH_REGION notes if necessary and sets the current source
3721    location for diagnostics.  */
3722 
3723 static rtx_insn *
3724 expand_gimple_stmt (gimple *stmt)
3725 {
3726   location_t saved_location = input_location;
3727   rtx_insn *last = get_last_insn ();
3728   int lp_nr;
3729 
3730   gcc_assert (cfun);
3731 
3732   /* We need to save and restore the current source location so that errors
3733      discovered during expansion are emitted with the right location.  But
3734      it would be better if the diagnostic routines used the source location
3735      embedded in the tree nodes rather than globals.  */
3736   if (gimple_has_location (stmt))
3737     input_location = gimple_location (stmt);
3738 
3739   expand_gimple_stmt_1 (stmt);
3740 
3741   /* Free any temporaries used to evaluate this statement.  */
3742   free_temp_slots ();
3743 
3744   input_location = saved_location;
3745 
3746   /* Mark all insns that may trap.  */
3747   lp_nr = lookup_stmt_eh_lp (stmt);
3748   if (lp_nr)
3749     {
3750       rtx_insn *insn;
3751       for (insn = next_real_insn (last); insn;
3752 	   insn = next_real_insn (insn))
3753 	{
3754 	  if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3755 	      /* If we want exceptions for non-call insns, any
3756 		 may_trap_p instruction may throw.  */
3757 	      && GET_CODE (PATTERN (insn)) != CLOBBER
3758 	      && GET_CODE (PATTERN (insn)) != USE
3759 	      && insn_could_throw_p (insn))
3760 	    make_reg_eh_region_note (insn, 0, lp_nr);
3761 	}
3762     }
3763 
3764   return last;
3765 }
3766 
3767 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_CALL
3768    that has CALL_EXPR_TAILCALL set.  Returns non-null if we actually
3769    generated a tail call (something that might be denied by the ABI
3770    rules governing the call; see calls.c).
3771 
3772    Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3773    can still reach the rest of BB.  The case here is __builtin_sqrt,
3774    where the NaN result goes through the external function (with a
3775    tailcall) and the normal result happens via a sqrt instruction.  */
3776 
3777 static basic_block
3778 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3779 {
3780   rtx_insn *last2, *last;
3781   edge e;
3782   edge_iterator ei;
3783   int probability;
3784   gcov_type count;
3785 
3786   last2 = last = expand_gimple_stmt (stmt);
3787 
3788   for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3789     if (CALL_P (last) && SIBLING_CALL_P (last))
3790       goto found;
3791 
3792   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3793 
3794   *can_fallthru = true;
3795   return NULL;
3796 
3797  found:
3798   /* ??? Wouldn't it be better to just reset any pending stack adjust?
3799      Any instructions emitted here are about to be deleted.  */
3800   do_pending_stack_adjust ();
3801 
3802   /* Remove any non-eh, non-abnormal edges that don't go to exit.  */
3803   /* ??? I.e. the fallthrough edge.  HOWEVER!  If there were to be
3804      EH or abnormal edges, we shouldn't have created a tail call in
3805      the first place.  So it seems to me we should just be removing
3806      all edges here, or redirecting the existing fallthru edge to
3807      the exit block.  */
3808 
3809   probability = 0;
3810   count = 0;
3811 
3812   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3813     {
3814       if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3815 	{
3816 	  if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3817 	    {
3818 	      e->dest->count -= e->count;
3819 	      e->dest->frequency -= EDGE_FREQUENCY (e);
3820 	      if (e->dest->count < 0)
3821 		e->dest->count = 0;
3822 	      if (e->dest->frequency < 0)
3823 		e->dest->frequency = 0;
3824 	    }
3825 	  count += e->count;
3826 	  probability += e->probability;
3827 	  remove_edge (e);
3828 	}
3829       else
3830 	ei_next (&ei);
3831     }
3832 
3833   /* This is somewhat ugly: the call_expr expander often emits instructions
3834      after the sibcall (to perform the function return).  These confuse the
3835      find_many_sub_basic_blocks code, so we need to get rid of these.  */
3836   last = NEXT_INSN (last);
3837   gcc_assert (BARRIER_P (last));
3838 
3839   *can_fallthru = false;
3840   while (NEXT_INSN (last))
3841     {
3842       /* For instance an sqrt builtin expander expands if with
3843 	 sibcall in the then and label for `else`.  */
3844       if (LABEL_P (NEXT_INSN (last)))
3845 	{
3846 	  *can_fallthru = true;
3847 	  break;
3848 	}
3849       delete_insn (NEXT_INSN (last));
3850     }
3851 
3852   e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3853 		 | EDGE_SIBCALL);
3854   e->probability += probability;
3855   e->count += count;
3856   BB_END (bb) = last;
3857   update_bb_for_insn (bb);
3858 
3859   if (NEXT_INSN (last))
3860     {
3861       bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3862 
3863       last = BB_END (bb);
3864       if (BARRIER_P (last))
3865 	BB_END (bb) = PREV_INSN (last);
3866     }
3867 
3868   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3869 
3870   return bb;
3871 }
3872 
3873 /* Return the difference between the floor and the truncated result of
3874    a signed division by OP1 with remainder MOD.  */
3875 static rtx
3876 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3877 {
3878   /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3879   return gen_rtx_IF_THEN_ELSE
3880     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3881      gen_rtx_IF_THEN_ELSE
3882      (mode, gen_rtx_LT (BImode,
3883 			gen_rtx_DIV (mode, op1, mod),
3884 			const0_rtx),
3885       constm1_rtx, const0_rtx),
3886      const0_rtx);
3887 }
3888 
3889 /* Return the difference between the ceil and the truncated result of
3890    a signed division by OP1 with remainder MOD.  */
3891 static rtx
3892 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3893 {
3894   /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3895   return gen_rtx_IF_THEN_ELSE
3896     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3897      gen_rtx_IF_THEN_ELSE
3898      (mode, gen_rtx_GT (BImode,
3899 			gen_rtx_DIV (mode, op1, mod),
3900 			const0_rtx),
3901       const1_rtx, const0_rtx),
3902      const0_rtx);
3903 }
3904 
3905 /* Return the difference between the ceil and the truncated result of
3906    an unsigned division by OP1 with remainder MOD.  */
3907 static rtx
3908 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3909 {
3910   /* (mod != 0 ? 1 : 0) */
3911   return gen_rtx_IF_THEN_ELSE
3912     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3913      const1_rtx, const0_rtx);
3914 }
3915 
3916 /* Return the difference between the rounded and the truncated result
3917    of a signed division by OP1 with remainder MOD.  Halfway cases are
3918    rounded away from zero, rather than to the nearest even number.  */
3919 static rtx
3920 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3921 {
3922   /* (abs (mod) >= abs (op1) - abs (mod)
3923       ? (op1 / mod > 0 ? 1 : -1)
3924       : 0) */
3925   return gen_rtx_IF_THEN_ELSE
3926     (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3927 		       gen_rtx_MINUS (mode,
3928 				      gen_rtx_ABS (mode, op1),
3929 				      gen_rtx_ABS (mode, mod))),
3930      gen_rtx_IF_THEN_ELSE
3931      (mode, gen_rtx_GT (BImode,
3932 			gen_rtx_DIV (mode, op1, mod),
3933 			const0_rtx),
3934       const1_rtx, constm1_rtx),
3935      const0_rtx);
3936 }
3937 
3938 /* Return the difference between the rounded and the truncated result
3939    of a unsigned division by OP1 with remainder MOD.  Halfway cases
3940    are rounded away from zero, rather than to the nearest even
3941    number.  */
3942 static rtx
3943 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
3944 {
3945   /* (mod >= op1 - mod ? 1 : 0) */
3946   return gen_rtx_IF_THEN_ELSE
3947     (mode, gen_rtx_GE (BImode, mod,
3948 		       gen_rtx_MINUS (mode, op1, mod)),
3949      const1_rtx, const0_rtx);
3950 }
3951 
3952 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3953    any rtl.  */
3954 
3955 static rtx
3956 convert_debug_memory_address (machine_mode mode, rtx x,
3957 			      addr_space_t as)
3958 {
3959   machine_mode xmode = GET_MODE (x);
3960 
3961 #ifndef POINTERS_EXTEND_UNSIGNED
3962   gcc_assert (mode == Pmode
3963 	      || mode == targetm.addr_space.address_mode (as));
3964   gcc_assert (xmode == mode || xmode == VOIDmode);
3965 #else
3966   rtx temp;
3967 
3968   gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
3969 
3970   if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3971     return x;
3972 
3973   if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
3974     x = lowpart_subreg (mode, x, xmode);
3975   else if (POINTERS_EXTEND_UNSIGNED > 0)
3976     x = gen_rtx_ZERO_EXTEND (mode, x);
3977   else if (!POINTERS_EXTEND_UNSIGNED)
3978     x = gen_rtx_SIGN_EXTEND (mode, x);
3979   else
3980     {
3981       switch (GET_CODE (x))
3982 	{
3983 	case SUBREG:
3984 	  if ((SUBREG_PROMOTED_VAR_P (x)
3985 	       || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3986 	       || (GET_CODE (SUBREG_REG (x)) == PLUS
3987 		   && REG_P (XEXP (SUBREG_REG (x), 0))
3988 		   && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3989 		   && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3990 	      && GET_MODE (SUBREG_REG (x)) == mode)
3991 	    return SUBREG_REG (x);
3992 	  break;
3993 	case LABEL_REF:
3994 	  temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
3995 	  LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3996 	  return temp;
3997 	case SYMBOL_REF:
3998 	  temp = shallow_copy_rtx (x);
3999 	  PUT_MODE (temp, mode);
4000 	  return temp;
4001 	case CONST:
4002 	  temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4003 	  if (temp)
4004 	    temp = gen_rtx_CONST (mode, temp);
4005 	  return temp;
4006 	case PLUS:
4007 	case MINUS:
4008 	  if (CONST_INT_P (XEXP (x, 1)))
4009 	    {
4010 	      temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4011 	      if (temp)
4012 		return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4013 	    }
4014 	  break;
4015 	default:
4016 	  break;
4017 	}
4018       /* Don't know how to express ptr_extend as operation in debug info.  */
4019       return NULL;
4020     }
4021 #endif /* POINTERS_EXTEND_UNSIGNED */
4022 
4023   return x;
4024 }
4025 
4026 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4027    by avoid_deep_ter_for_debug.  */
4028 
4029 static hash_map<tree, tree> *deep_ter_debug_map;
4030 
4031 /* Split too deep TER chains for debug stmts using debug temporaries.  */
4032 
4033 static void
4034 avoid_deep_ter_for_debug (gimple *stmt, int depth)
4035 {
4036   use_operand_p use_p;
4037   ssa_op_iter iter;
4038   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4039     {
4040       tree use = USE_FROM_PTR (use_p);
4041       if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4042 	continue;
4043       gimple *g = get_gimple_for_ssa_name (use);
4044       if (g == NULL)
4045 	continue;
4046       if (depth > 6 && !stmt_ends_bb_p (g))
4047 	{
4048 	  if (deep_ter_debug_map == NULL)
4049 	    deep_ter_debug_map = new hash_map<tree, tree>;
4050 
4051 	  tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4052 	  if (vexpr != NULL)
4053 	    continue;
4054 	  vexpr = make_node (DEBUG_EXPR_DECL);
4055 	  gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
4056 	  DECL_ARTIFICIAL (vexpr) = 1;
4057 	  TREE_TYPE (vexpr) = TREE_TYPE (use);
4058 	  SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (use)));
4059 	  gimple_stmt_iterator gsi = gsi_for_stmt (g);
4060 	  gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4061 	  avoid_deep_ter_for_debug (def_temp, 0);
4062 	}
4063       else
4064 	avoid_deep_ter_for_debug (g, depth + 1);
4065     }
4066 }
4067 
4068 /* Return an RTX equivalent to the value of the parameter DECL.  */
4069 
4070 static rtx
4071 expand_debug_parm_decl (tree decl)
4072 {
4073   rtx incoming = DECL_INCOMING_RTL (decl);
4074 
4075   if (incoming
4076       && GET_MODE (incoming) != BLKmode
4077       && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4078 	  || (MEM_P (incoming)
4079 	      && REG_P (XEXP (incoming, 0))
4080 	      && HARD_REGISTER_P (XEXP (incoming, 0)))))
4081     {
4082       rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4083 
4084 #ifdef HAVE_window_save
4085       /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4086 	 If the target machine has an explicit window save instruction, the
4087 	 actual entry value is the corresponding OUTGOING_REGNO instead.  */
4088       if (REG_P (incoming)
4089 	  && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4090 	incoming
4091 	  = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4092 				OUTGOING_REGNO (REGNO (incoming)), 0);
4093       else if (MEM_P (incoming))
4094 	{
4095 	  rtx reg = XEXP (incoming, 0);
4096 	  if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4097 	    {
4098 	      reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4099 	      incoming = replace_equiv_address_nv (incoming, reg);
4100 	    }
4101 	  else
4102 	    incoming = copy_rtx (incoming);
4103 	}
4104 #endif
4105 
4106       ENTRY_VALUE_EXP (rtl) = incoming;
4107       return rtl;
4108     }
4109 
4110   if (incoming
4111       && GET_MODE (incoming) != BLKmode
4112       && !TREE_ADDRESSABLE (decl)
4113       && MEM_P (incoming)
4114       && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4115 	  || (GET_CODE (XEXP (incoming, 0)) == PLUS
4116 	      && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4117 	      && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
4118     return copy_rtx (incoming);
4119 
4120   return NULL_RTX;
4121 }
4122 
4123 /* Return an RTX equivalent to the value of the tree expression EXP.  */
4124 
4125 static rtx
4126 expand_debug_expr (tree exp)
4127 {
4128   rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
4129   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4130   machine_mode inner_mode = VOIDmode;
4131   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4132   addr_space_t as;
4133 
4134   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4135     {
4136     case tcc_expression:
4137       switch (TREE_CODE (exp))
4138 	{
4139 	case COND_EXPR:
4140 	case DOT_PROD_EXPR:
4141 	case SAD_EXPR:
4142 	case WIDEN_MULT_PLUS_EXPR:
4143 	case WIDEN_MULT_MINUS_EXPR:
4144 	case FMA_EXPR:
4145 	  goto ternary;
4146 
4147 	case TRUTH_ANDIF_EXPR:
4148 	case TRUTH_ORIF_EXPR:
4149 	case TRUTH_AND_EXPR:
4150 	case TRUTH_OR_EXPR:
4151 	case TRUTH_XOR_EXPR:
4152 	  goto binary;
4153 
4154 	case TRUTH_NOT_EXPR:
4155 	  goto unary;
4156 
4157 	default:
4158 	  break;
4159 	}
4160       break;
4161 
4162     ternary:
4163       op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4164       if (!op2)
4165 	return NULL_RTX;
4166       /* Fall through.  */
4167 
4168     binary:
4169     case tcc_binary:
4170       op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4171       if (!op1)
4172 	return NULL_RTX;
4173       switch (TREE_CODE (exp))
4174 	{
4175 	case LSHIFT_EXPR:
4176 	case RSHIFT_EXPR:
4177 	case LROTATE_EXPR:
4178 	case RROTATE_EXPR:
4179 	case WIDEN_LSHIFT_EXPR:
4180 	  /* Ensure second operand isn't wider than the first one.  */
4181 	  inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4182 	  if (SCALAR_INT_MODE_P (inner_mode))
4183 	    {
4184 	      machine_mode opmode = mode;
4185 	      if (VECTOR_MODE_P (mode))
4186 		opmode = GET_MODE_INNER (mode);
4187 	      if (SCALAR_INT_MODE_P (opmode)
4188 		  && (GET_MODE_PRECISION (opmode)
4189 		      < GET_MODE_PRECISION (inner_mode)))
4190 		op1 = lowpart_subreg (opmode, op1, inner_mode);
4191 	    }
4192 	  break;
4193 	default:
4194 	  break;
4195 	}
4196       /* Fall through.  */
4197 
4198     unary:
4199     case tcc_unary:
4200       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4201       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4202       if (!op0)
4203 	return NULL_RTX;
4204       break;
4205 
4206     case tcc_comparison:
4207       unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4208       goto binary;
4209 
4210     case tcc_type:
4211     case tcc_statement:
4212       gcc_unreachable ();
4213 
4214     case tcc_constant:
4215     case tcc_exceptional:
4216     case tcc_declaration:
4217     case tcc_reference:
4218     case tcc_vl_exp:
4219       break;
4220     }
4221 
4222   switch (TREE_CODE (exp))
4223     {
4224     case STRING_CST:
4225       if (!lookup_constant_def (exp))
4226 	{
4227 	  if (strlen (TREE_STRING_POINTER (exp)) + 1
4228 	      != (size_t) TREE_STRING_LENGTH (exp))
4229 	    return NULL_RTX;
4230 	  op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4231 	  op0 = gen_rtx_MEM (BLKmode, op0);
4232 	  set_mem_attributes (op0, exp, 0);
4233 	  return op0;
4234 	}
4235       /* Fall through.  */
4236 
4237     case INTEGER_CST:
4238     case REAL_CST:
4239     case FIXED_CST:
4240       op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4241       return op0;
4242 
4243     case COMPLEX_CST:
4244       gcc_assert (COMPLEX_MODE_P (mode));
4245       op0 = expand_debug_expr (TREE_REALPART (exp));
4246       op1 = expand_debug_expr (TREE_IMAGPART (exp));
4247       return gen_rtx_CONCAT (mode, op0, op1);
4248 
4249     case DEBUG_EXPR_DECL:
4250       op0 = DECL_RTL_IF_SET (exp);
4251 
4252       if (op0)
4253 	return op0;
4254 
4255       op0 = gen_rtx_DEBUG_EXPR (mode);
4256       DEBUG_EXPR_TREE_DECL (op0) = exp;
4257       SET_DECL_RTL (exp, op0);
4258 
4259       return op0;
4260 
4261     case VAR_DECL:
4262     case PARM_DECL:
4263     case FUNCTION_DECL:
4264     case LABEL_DECL:
4265     case CONST_DECL:
4266     case RESULT_DECL:
4267       op0 = DECL_RTL_IF_SET (exp);
4268 
4269       /* This decl was probably optimized away.  */
4270       if (!op0)
4271 	{
4272 	  if (!VAR_P (exp)
4273 	      || DECL_EXTERNAL (exp)
4274 	      || !TREE_STATIC (exp)
4275 	      || !DECL_NAME (exp)
4276 	      || DECL_HARD_REGISTER (exp)
4277 	      || DECL_IN_CONSTANT_POOL (exp)
4278 	      || mode == VOIDmode)
4279 	    return NULL;
4280 
4281 	  op0 = make_decl_rtl_for_debug (exp);
4282 	  if (!MEM_P (op0)
4283 	      || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4284 	      || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4285 	    return NULL;
4286 	}
4287       else
4288 	op0 = copy_rtx (op0);
4289 
4290       if (GET_MODE (op0) == BLKmode
4291 	  /* If op0 is not BLKmode, but mode is, adjust_mode
4292 	     below would ICE.  While it is likely a FE bug,
4293 	     try to be robust here.  See PR43166.  */
4294 	  || mode == BLKmode
4295 	  || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4296 	{
4297 	  gcc_assert (MEM_P (op0));
4298 	  op0 = adjust_address_nv (op0, mode, 0);
4299 	  return op0;
4300 	}
4301 
4302       /* Fall through.  */
4303 
4304     adjust_mode:
4305     case PAREN_EXPR:
4306     CASE_CONVERT:
4307       {
4308 	inner_mode = GET_MODE (op0);
4309 
4310 	if (mode == inner_mode)
4311 	  return op0;
4312 
4313 	if (inner_mode == VOIDmode)
4314 	  {
4315 	    if (TREE_CODE (exp) == SSA_NAME)
4316 	      inner_mode = TYPE_MODE (TREE_TYPE (exp));
4317 	    else
4318 	      inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4319 	    if (mode == inner_mode)
4320 	      return op0;
4321 	  }
4322 
4323 	if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4324 	  {
4325 	    if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4326 	      op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4327 	    else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4328 	      op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4329 	    else
4330 	      op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4331 	  }
4332 	else if (FLOAT_MODE_P (mode))
4333 	  {
4334 	    gcc_assert (TREE_CODE (exp) != SSA_NAME);
4335 	    if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4336 	      op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4337 	    else
4338 	      op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4339 	  }
4340 	else if (FLOAT_MODE_P (inner_mode))
4341 	  {
4342 	    if (unsignedp)
4343 	      op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4344 	    else
4345 	      op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4346 	  }
4347 	else if (CONSTANT_P (op0)
4348 		 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
4349 	  op0 = lowpart_subreg (mode, op0, inner_mode);
4350 	else if (UNARY_CLASS_P (exp)
4351 		 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4352 		 : unsignedp)
4353 	  op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4354 	else
4355 	  op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4356 
4357 	return op0;
4358       }
4359 
4360     case MEM_REF:
4361       if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4362 	{
4363 	  tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4364 				     TREE_OPERAND (exp, 0),
4365 				     TREE_OPERAND (exp, 1));
4366 	  if (newexp)
4367 	    return expand_debug_expr (newexp);
4368 	}
4369       /* FALLTHROUGH */
4370     case INDIRECT_REF:
4371       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4372       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4373       if (!op0)
4374 	return NULL;
4375 
4376       if (TREE_CODE (exp) == MEM_REF)
4377 	{
4378 	  if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4379 	      || (GET_CODE (op0) == PLUS
4380 		  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4381 	    /* (mem (debug_implicit_ptr)) might confuse aliasing.
4382 	       Instead just use get_inner_reference.  */
4383 	    goto component_ref;
4384 
4385 	  op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4386 	  if (!op1 || !CONST_INT_P (op1))
4387 	    return NULL;
4388 
4389 	  op0 = plus_constant (inner_mode, op0, INTVAL (op1));
4390 	}
4391 
4392       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4393 
4394       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4395 					  op0, as);
4396       if (op0 == NULL_RTX)
4397 	return NULL;
4398 
4399       op0 = gen_rtx_MEM (mode, op0);
4400       set_mem_attributes (op0, exp, 0);
4401       if (TREE_CODE (exp) == MEM_REF
4402 	  && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4403 	set_mem_expr (op0, NULL_TREE);
4404       set_mem_addr_space (op0, as);
4405 
4406       return op0;
4407 
4408     case TARGET_MEM_REF:
4409       if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4410 	  && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4411 	return NULL;
4412 
4413       op0 = expand_debug_expr
4414 	    (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4415       if (!op0)
4416 	return NULL;
4417 
4418       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4419       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4420 					  op0, as);
4421       if (op0 == NULL_RTX)
4422 	return NULL;
4423 
4424       op0 = gen_rtx_MEM (mode, op0);
4425 
4426       set_mem_attributes (op0, exp, 0);
4427       set_mem_addr_space (op0, as);
4428 
4429       return op0;
4430 
4431     component_ref:
4432     case ARRAY_REF:
4433     case ARRAY_RANGE_REF:
4434     case COMPONENT_REF:
4435     case BIT_FIELD_REF:
4436     case REALPART_EXPR:
4437     case IMAGPART_EXPR:
4438     case VIEW_CONVERT_EXPR:
4439       {
4440 	machine_mode mode1;
4441 	HOST_WIDE_INT bitsize, bitpos;
4442 	tree offset;
4443 	int reversep, volatilep = 0;
4444 	tree tem
4445 	  = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
4446 				 &unsignedp, &reversep, &volatilep);
4447 	rtx orig_op0;
4448 
4449 	if (bitsize == 0)
4450 	  return NULL;
4451 
4452 	orig_op0 = op0 = expand_debug_expr (tem);
4453 
4454 	if (!op0)
4455 	  return NULL;
4456 
4457 	if (offset)
4458 	  {
4459 	    machine_mode addrmode, offmode;
4460 
4461 	    if (!MEM_P (op0))
4462 	      return NULL;
4463 
4464 	    op0 = XEXP (op0, 0);
4465 	    addrmode = GET_MODE (op0);
4466 	    if (addrmode == VOIDmode)
4467 	      addrmode = Pmode;
4468 
4469 	    op1 = expand_debug_expr (offset);
4470 	    if (!op1)
4471 	      return NULL;
4472 
4473 	    offmode = GET_MODE (op1);
4474 	    if (offmode == VOIDmode)
4475 	      offmode = TYPE_MODE (TREE_TYPE (offset));
4476 
4477 	    if (addrmode != offmode)
4478 	      op1 = lowpart_subreg (addrmode, op1, offmode);
4479 
4480 	    /* Don't use offset_address here, we don't need a
4481 	       recognizable address, and we don't want to generate
4482 	       code.  */
4483 	    op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4484 							  op0, op1));
4485 	  }
4486 
4487 	if (MEM_P (op0))
4488 	  {
4489 	    if (mode1 == VOIDmode)
4490 	      /* Bitfield.  */
4491 	      mode1 = smallest_mode_for_size (bitsize, MODE_INT);
4492 	    if (bitpos >= BITS_PER_UNIT)
4493 	      {
4494 		op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4495 		bitpos %= BITS_PER_UNIT;
4496 	      }
4497 	    else if (bitpos < 0)
4498 	      {
4499 		HOST_WIDE_INT units
4500 		  = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
4501 		op0 = adjust_address_nv (op0, mode1, -units);
4502 		bitpos += units * BITS_PER_UNIT;
4503 	      }
4504 	    else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4505 	      op0 = adjust_address_nv (op0, mode, 0);
4506 	    else if (GET_MODE (op0) != mode1)
4507 	      op0 = adjust_address_nv (op0, mode1, 0);
4508 	    else
4509 	      op0 = copy_rtx (op0);
4510 	    if (op0 == orig_op0)
4511 	      op0 = shallow_copy_rtx (op0);
4512 	    set_mem_attributes (op0, exp, 0);
4513 	  }
4514 
4515 	if (bitpos == 0 && mode == GET_MODE (op0))
4516 	  return op0;
4517 
4518         if (bitpos < 0)
4519           return NULL;
4520 
4521 	if (GET_MODE (op0) == BLKmode)
4522 	  return NULL;
4523 
4524 	if ((bitpos % BITS_PER_UNIT) == 0
4525 	    && bitsize == GET_MODE_BITSIZE (mode1))
4526 	  {
4527 	    machine_mode opmode = GET_MODE (op0);
4528 
4529 	    if (opmode == VOIDmode)
4530 	      opmode = TYPE_MODE (TREE_TYPE (tem));
4531 
4532 	    /* This condition may hold if we're expanding the address
4533 	       right past the end of an array that turned out not to
4534 	       be addressable (i.e., the address was only computed in
4535 	       debug stmts).  The gen_subreg below would rightfully
4536 	       crash, and the address doesn't really exist, so just
4537 	       drop it.  */
4538 	    if (bitpos >= GET_MODE_BITSIZE (opmode))
4539 	      return NULL;
4540 
4541 	    if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4542 	      return simplify_gen_subreg (mode, op0, opmode,
4543 					  bitpos / BITS_PER_UNIT);
4544 	  }
4545 
4546 	return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4547 				     && TYPE_UNSIGNED (TREE_TYPE (exp))
4548 				     ? SIGN_EXTRACT
4549 				     : ZERO_EXTRACT, mode,
4550 				     GET_MODE (op0) != VOIDmode
4551 				     ? GET_MODE (op0)
4552 				     : TYPE_MODE (TREE_TYPE (tem)),
4553 				     op0, GEN_INT (bitsize), GEN_INT (bitpos));
4554       }
4555 
4556     case ABS_EXPR:
4557       return simplify_gen_unary (ABS, mode, op0, mode);
4558 
4559     case NEGATE_EXPR:
4560       return simplify_gen_unary (NEG, mode, op0, mode);
4561 
4562     case BIT_NOT_EXPR:
4563       return simplify_gen_unary (NOT, mode, op0, mode);
4564 
4565     case FLOAT_EXPR:
4566       return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4567 									 0)))
4568 				 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4569 				 inner_mode);
4570 
4571     case FIX_TRUNC_EXPR:
4572       return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4573 				 inner_mode);
4574 
4575     case POINTER_PLUS_EXPR:
4576       /* For the rare target where pointers are not the same size as
4577 	 size_t, we need to check for mis-matched modes and correct
4578 	 the addend.  */
4579       if (op0 && op1
4580 	  && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4581 	  && GET_MODE (op0) != GET_MODE (op1))
4582 	{
4583 	  if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4584 	      /* If OP0 is a partial mode, then we must truncate, even if it has
4585 		 the same bitsize as OP1 as GCC's representation of partial modes
4586 		 is opaque.  */
4587 	      || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4588 		  && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
4589 	    op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4590 				      GET_MODE (op1));
4591 	  else
4592 	    /* We always sign-extend, regardless of the signedness of
4593 	       the operand, because the operand is always unsigned
4594 	       here even if the original C expression is signed.  */
4595 	    op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4596 				      GET_MODE (op1));
4597 	}
4598       /* Fall through.  */
4599     case PLUS_EXPR:
4600       return simplify_gen_binary (PLUS, mode, op0, op1);
4601 
4602     case MINUS_EXPR:
4603       return simplify_gen_binary (MINUS, mode, op0, op1);
4604 
4605     case MULT_EXPR:
4606       return simplify_gen_binary (MULT, mode, op0, op1);
4607 
4608     case RDIV_EXPR:
4609     case TRUNC_DIV_EXPR:
4610     case EXACT_DIV_EXPR:
4611       if (unsignedp)
4612 	return simplify_gen_binary (UDIV, mode, op0, op1);
4613       else
4614 	return simplify_gen_binary (DIV, mode, op0, op1);
4615 
4616     case TRUNC_MOD_EXPR:
4617       return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4618 
4619     case FLOOR_DIV_EXPR:
4620       if (unsignedp)
4621 	return simplify_gen_binary (UDIV, mode, op0, op1);
4622       else
4623 	{
4624 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4625 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4626 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
4627 	  return simplify_gen_binary (PLUS, mode, div, adj);
4628 	}
4629 
4630     case FLOOR_MOD_EXPR:
4631       if (unsignedp)
4632 	return simplify_gen_binary (UMOD, mode, op0, op1);
4633       else
4634 	{
4635 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4636 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
4637 	  adj = simplify_gen_unary (NEG, mode,
4638 				    simplify_gen_binary (MULT, mode, adj, op1),
4639 				    mode);
4640 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4641 	}
4642 
4643     case CEIL_DIV_EXPR:
4644       if (unsignedp)
4645 	{
4646 	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4647 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4648 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
4649 	  return simplify_gen_binary (PLUS, mode, div, adj);
4650 	}
4651       else
4652 	{
4653 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4654 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4655 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4656 	  return simplify_gen_binary (PLUS, mode, div, adj);
4657 	}
4658 
4659     case CEIL_MOD_EXPR:
4660       if (unsignedp)
4661 	{
4662 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4663 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
4664 	  adj = simplify_gen_unary (NEG, mode,
4665 				    simplify_gen_binary (MULT, mode, adj, op1),
4666 				    mode);
4667 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4668 	}
4669       else
4670 	{
4671 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4672 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4673 	  adj = simplify_gen_unary (NEG, mode,
4674 				    simplify_gen_binary (MULT, mode, adj, op1),
4675 				    mode);
4676 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4677 	}
4678 
4679     case ROUND_DIV_EXPR:
4680       if (unsignedp)
4681 	{
4682 	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4683 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4684 	  rtx adj = round_udiv_adjust (mode, mod, op1);
4685 	  return simplify_gen_binary (PLUS, mode, div, adj);
4686 	}
4687       else
4688 	{
4689 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4690 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4691 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
4692 	  return simplify_gen_binary (PLUS, mode, div, adj);
4693 	}
4694 
4695     case ROUND_MOD_EXPR:
4696       if (unsignedp)
4697 	{
4698 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4699 	  rtx adj = round_udiv_adjust (mode, mod, op1);
4700 	  adj = simplify_gen_unary (NEG, mode,
4701 				    simplify_gen_binary (MULT, mode, adj, op1),
4702 				    mode);
4703 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4704 	}
4705       else
4706 	{
4707 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4708 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
4709 	  adj = simplify_gen_unary (NEG, mode,
4710 				    simplify_gen_binary (MULT, mode, adj, op1),
4711 				    mode);
4712 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4713 	}
4714 
4715     case LSHIFT_EXPR:
4716       return simplify_gen_binary (ASHIFT, mode, op0, op1);
4717 
4718     case RSHIFT_EXPR:
4719       if (unsignedp)
4720 	return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4721       else
4722 	return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4723 
4724     case LROTATE_EXPR:
4725       return simplify_gen_binary (ROTATE, mode, op0, op1);
4726 
4727     case RROTATE_EXPR:
4728       return simplify_gen_binary (ROTATERT, mode, op0, op1);
4729 
4730     case MIN_EXPR:
4731       return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4732 
4733     case MAX_EXPR:
4734       return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4735 
4736     case BIT_AND_EXPR:
4737     case TRUTH_AND_EXPR:
4738       return simplify_gen_binary (AND, mode, op0, op1);
4739 
4740     case BIT_IOR_EXPR:
4741     case TRUTH_OR_EXPR:
4742       return simplify_gen_binary (IOR, mode, op0, op1);
4743 
4744     case BIT_XOR_EXPR:
4745     case TRUTH_XOR_EXPR:
4746       return simplify_gen_binary (XOR, mode, op0, op1);
4747 
4748     case TRUTH_ANDIF_EXPR:
4749       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4750 
4751     case TRUTH_ORIF_EXPR:
4752       return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4753 
4754     case TRUTH_NOT_EXPR:
4755       return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4756 
4757     case LT_EXPR:
4758       return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4759 				      op0, op1);
4760 
4761     case LE_EXPR:
4762       return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4763 				      op0, op1);
4764 
4765     case GT_EXPR:
4766       return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4767 				      op0, op1);
4768 
4769     case GE_EXPR:
4770       return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4771 				      op0, op1);
4772 
4773     case EQ_EXPR:
4774       return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4775 
4776     case NE_EXPR:
4777       return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4778 
4779     case UNORDERED_EXPR:
4780       return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4781 
4782     case ORDERED_EXPR:
4783       return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4784 
4785     case UNLT_EXPR:
4786       return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4787 
4788     case UNLE_EXPR:
4789       return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4790 
4791     case UNGT_EXPR:
4792       return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4793 
4794     case UNGE_EXPR:
4795       return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4796 
4797     case UNEQ_EXPR:
4798       return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4799 
4800     case LTGT_EXPR:
4801       return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4802 
4803     case COND_EXPR:
4804       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4805 
4806     case COMPLEX_EXPR:
4807       gcc_assert (COMPLEX_MODE_P (mode));
4808       if (GET_MODE (op0) == VOIDmode)
4809 	op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4810       if (GET_MODE (op1) == VOIDmode)
4811 	op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4812       return gen_rtx_CONCAT (mode, op0, op1);
4813 
4814     case CONJ_EXPR:
4815       if (GET_CODE (op0) == CONCAT)
4816 	return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4817 			       simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4818 						   XEXP (op0, 1),
4819 						   GET_MODE_INNER (mode)));
4820       else
4821 	{
4822 	  machine_mode imode = GET_MODE_INNER (mode);
4823 	  rtx re, im;
4824 
4825 	  if (MEM_P (op0))
4826 	    {
4827 	      re = adjust_address_nv (op0, imode, 0);
4828 	      im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4829 	    }
4830 	  else
4831 	    {
4832 	      machine_mode ifmode = int_mode_for_mode (mode);
4833 	      machine_mode ihmode = int_mode_for_mode (imode);
4834 	      rtx halfsize;
4835 	      if (ifmode == BLKmode || ihmode == BLKmode)
4836 		return NULL;
4837 	      halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4838 	      re = op0;
4839 	      if (mode != ifmode)
4840 		re = gen_rtx_SUBREG (ifmode, re, 0);
4841 	      re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4842 	      if (imode != ihmode)
4843 		re = gen_rtx_SUBREG (imode, re, 0);
4844 	      im = copy_rtx (op0);
4845 	      if (mode != ifmode)
4846 		im = gen_rtx_SUBREG (ifmode, im, 0);
4847 	      im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4848 	      if (imode != ihmode)
4849 		im = gen_rtx_SUBREG (imode, im, 0);
4850 	    }
4851 	  im = gen_rtx_NEG (imode, im);
4852 	  return gen_rtx_CONCAT (mode, re, im);
4853 	}
4854 
4855     case ADDR_EXPR:
4856       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4857       if (!op0 || !MEM_P (op0))
4858 	{
4859 	  if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4860 	       || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4861 	       || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4862 	      && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4863 		  || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4864 	    return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4865 
4866 	  if (handled_component_p (TREE_OPERAND (exp, 0)))
4867 	    {
4868 	      HOST_WIDE_INT bitoffset, bitsize, maxsize;
4869 	      bool reverse;
4870 	      tree decl
4871 		= get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4872 					   &bitsize, &maxsize, &reverse);
4873 	      if ((VAR_P (decl)
4874 		   || TREE_CODE (decl) == PARM_DECL
4875 		   || TREE_CODE (decl) == RESULT_DECL)
4876 		  && (!TREE_ADDRESSABLE (decl)
4877 		      || target_for_debug_bind (decl))
4878 		  && (bitoffset % BITS_PER_UNIT) == 0
4879 		  && bitsize > 0
4880 		  && bitsize == maxsize)
4881 		{
4882 		  rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4883 		  return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4884 		}
4885 	    }
4886 
4887 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4888 	      && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4889 		 == ADDR_EXPR)
4890 	    {
4891 	      op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4892 						     0));
4893 	      if (op0 != NULL
4894 		  && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4895 		      || (GET_CODE (op0) == PLUS
4896 			  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4897 			  && CONST_INT_P (XEXP (op0, 1)))))
4898 		{
4899 		  op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4900 							 1));
4901 		  if (!op1 || !CONST_INT_P (op1))
4902 		    return NULL;
4903 
4904 		  return plus_constant (mode, op0, INTVAL (op1));
4905 		}
4906 	    }
4907 
4908 	  return NULL;
4909 	}
4910 
4911       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
4912       op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
4913 
4914       return op0;
4915 
4916     case VECTOR_CST:
4917       {
4918 	unsigned i;
4919 
4920 	op0 = gen_rtx_CONCATN
4921 	  (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4922 
4923 	for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4924 	  {
4925 	    op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4926 	    if (!op1)
4927 	      return NULL;
4928 	    XVECEXP (op0, 0, i) = op1;
4929 	  }
4930 
4931 	return op0;
4932       }
4933 
4934     case CONSTRUCTOR:
4935       if (TREE_CLOBBER_P (exp))
4936 	return NULL;
4937       else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
4938 	{
4939 	  unsigned i;
4940 	  tree val;
4941 
4942 	  op0 = gen_rtx_CONCATN
4943 	    (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4944 
4945 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4946 	    {
4947 	      op1 = expand_debug_expr (val);
4948 	      if (!op1)
4949 		return NULL;
4950 	      XVECEXP (op0, 0, i) = op1;
4951 	    }
4952 
4953 	  if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4954 	    {
4955 	      op1 = expand_debug_expr
4956 		(build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
4957 
4958 	      if (!op1)
4959 		return NULL;
4960 
4961 	      for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4962 		XVECEXP (op0, 0, i) = op1;
4963 	    }
4964 
4965 	  return op0;
4966 	}
4967       else
4968 	goto flag_unsupported;
4969 
4970     case CALL_EXPR:
4971       /* ??? Maybe handle some builtins?  */
4972       return NULL;
4973 
4974     case SSA_NAME:
4975       {
4976 	gimple *g = get_gimple_for_ssa_name (exp);
4977 	if (g)
4978 	  {
4979 	    tree t = NULL_TREE;
4980 	    if (deep_ter_debug_map)
4981 	      {
4982 		tree *slot = deep_ter_debug_map->get (exp);
4983 		if (slot)
4984 		  t = *slot;
4985 	      }
4986 	    if (t == NULL_TREE)
4987 	      t = gimple_assign_rhs_to_tree (g);
4988 	    op0 = expand_debug_expr (t);
4989 	    if (!op0)
4990 	      return NULL;
4991 	  }
4992 	else
4993 	  {
4994 	    /* If this is a reference to an incoming value of
4995 	       parameter that is never used in the code or where the
4996 	       incoming value is never used in the code, use
4997 	       PARM_DECL's DECL_RTL if set.  */
4998 	    if (SSA_NAME_IS_DEFAULT_DEF (exp)
4999 		&& SSA_NAME_VAR (exp)
5000 		&& TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5001 		&& has_zero_uses (exp))
5002 	      {
5003 		op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5004 		if (op0)
5005 		  goto adjust_mode;
5006 		op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5007 		if (op0)
5008 		  goto adjust_mode;
5009 	      }
5010 
5011 	    int part = var_to_partition (SA.map, exp);
5012 
5013 	    if (part == NO_PARTITION)
5014 	      return NULL;
5015 
5016 	    gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
5017 
5018 	    op0 = copy_rtx (SA.partition_to_pseudo[part]);
5019 	  }
5020 	goto adjust_mode;
5021       }
5022 
5023     case ERROR_MARK:
5024       return NULL;
5025 
5026     /* Vector stuff.  For most of the codes we don't have rtl codes.  */
5027     case REALIGN_LOAD_EXPR:
5028     case REDUC_MAX_EXPR:
5029     case REDUC_MIN_EXPR:
5030     case REDUC_PLUS_EXPR:
5031     case VEC_COND_EXPR:
5032     case VEC_PACK_FIX_TRUNC_EXPR:
5033     case VEC_PACK_SAT_EXPR:
5034     case VEC_PACK_TRUNC_EXPR:
5035     case VEC_UNPACK_FLOAT_HI_EXPR:
5036     case VEC_UNPACK_FLOAT_LO_EXPR:
5037     case VEC_UNPACK_HI_EXPR:
5038     case VEC_UNPACK_LO_EXPR:
5039     case VEC_WIDEN_MULT_HI_EXPR:
5040     case VEC_WIDEN_MULT_LO_EXPR:
5041     case VEC_WIDEN_MULT_EVEN_EXPR:
5042     case VEC_WIDEN_MULT_ODD_EXPR:
5043     case VEC_WIDEN_LSHIFT_HI_EXPR:
5044     case VEC_WIDEN_LSHIFT_LO_EXPR:
5045     case VEC_PERM_EXPR:
5046       return NULL;
5047 
5048     /* Misc codes.  */
5049     case ADDR_SPACE_CONVERT_EXPR:
5050     case FIXED_CONVERT_EXPR:
5051     case OBJ_TYPE_REF:
5052     case WITH_SIZE_EXPR:
5053     case BIT_INSERT_EXPR:
5054       return NULL;
5055 
5056     case DOT_PROD_EXPR:
5057       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5058 	  && SCALAR_INT_MODE_P (mode))
5059 	{
5060 	  op0
5061 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5062 									  0)))
5063 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5064 				  inner_mode);
5065 	  op1
5066 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5067 									  1)))
5068 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5069 				  inner_mode);
5070 	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
5071 	  return simplify_gen_binary (PLUS, mode, op0, op2);
5072 	}
5073       return NULL;
5074 
5075     case WIDEN_MULT_EXPR:
5076     case WIDEN_MULT_PLUS_EXPR:
5077     case WIDEN_MULT_MINUS_EXPR:
5078       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5079 	  && SCALAR_INT_MODE_P (mode))
5080 	{
5081 	  inner_mode = GET_MODE (op0);
5082 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5083 	    op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5084 	  else
5085 	    op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5086 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5087 	    op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
5088 	  else
5089 	    op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
5090 	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
5091 	  if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5092 	    return op0;
5093 	  else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
5094 	    return simplify_gen_binary (PLUS, mode, op0, op2);
5095 	  else
5096 	    return simplify_gen_binary (MINUS, mode, op2, op0);
5097 	}
5098       return NULL;
5099 
5100     case MULT_HIGHPART_EXPR:
5101       /* ??? Similar to the above.  */
5102       return NULL;
5103 
5104     case WIDEN_SUM_EXPR:
5105     case WIDEN_LSHIFT_EXPR:
5106       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5107 	  && SCALAR_INT_MODE_P (mode))
5108 	{
5109 	  op0
5110 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5111 									  0)))
5112 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5113 				  inner_mode);
5114 	  return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5115 				      ? ASHIFT : PLUS, mode, op0, op1);
5116 	}
5117       return NULL;
5118 
5119     case FMA_EXPR:
5120       return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
5121 
5122     default:
5123     flag_unsupported:
5124       if (flag_checking)
5125 	{
5126 	  debug_tree (exp);
5127 	  gcc_unreachable ();
5128 	}
5129       return NULL;
5130     }
5131 }
5132 
5133 /* Return an RTX equivalent to the source bind value of the tree expression
5134    EXP.  */
5135 
5136 static rtx
5137 expand_debug_source_expr (tree exp)
5138 {
5139   rtx op0 = NULL_RTX;
5140   machine_mode mode = VOIDmode, inner_mode;
5141 
5142   switch (TREE_CODE (exp))
5143     {
5144     case PARM_DECL:
5145       {
5146 	mode = DECL_MODE (exp);
5147 	op0 = expand_debug_parm_decl (exp);
5148 	if (op0)
5149 	   break;
5150 	/* See if this isn't an argument that has been completely
5151 	   optimized out.  */
5152 	if (!DECL_RTL_SET_P (exp)
5153 	    && !DECL_INCOMING_RTL (exp)
5154 	    && DECL_ABSTRACT_ORIGIN (current_function_decl))
5155 	  {
5156 	    tree aexp = DECL_ORIGIN (exp);
5157 	    if (DECL_CONTEXT (aexp)
5158 		== DECL_ABSTRACT_ORIGIN (current_function_decl))
5159 	      {
5160 		vec<tree, va_gc> **debug_args;
5161 		unsigned int ix;
5162 		tree ddecl;
5163 		debug_args = decl_debug_args_lookup (current_function_decl);
5164 		if (debug_args != NULL)
5165 		  {
5166 		    for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
5167 			 ix += 2)
5168 		      if (ddecl == aexp)
5169 			return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5170 		  }
5171 	      }
5172 	  }
5173 	break;
5174       }
5175     default:
5176       break;
5177     }
5178 
5179   if (op0 == NULL_RTX)
5180     return NULL_RTX;
5181 
5182   inner_mode = GET_MODE (op0);
5183   if (mode == inner_mode)
5184     return op0;
5185 
5186   if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5187     {
5188       if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
5189 	op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5190       else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
5191 	op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5192       else
5193 	op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5194     }
5195   else if (FLOAT_MODE_P (mode))
5196     gcc_unreachable ();
5197   else if (FLOAT_MODE_P (inner_mode))
5198     {
5199       if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5200 	op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5201       else
5202 	op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5203     }
5204   else if (CONSTANT_P (op0)
5205 	   || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
5206     op0 = lowpart_subreg (mode, op0, inner_mode);
5207   else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5208     op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5209   else
5210     op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5211 
5212   return op0;
5213 }
5214 
5215 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5216    Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5217    deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN.  */
5218 
5219 static void
5220 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
5221 {
5222   rtx exp = *exp_p;
5223 
5224   if (exp == NULL_RTX)
5225     return;
5226 
5227   if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5228     return;
5229 
5230   if (depth == 4)
5231     {
5232       /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL).  */
5233       rtx dval = make_debug_expr_from_rtl (exp);
5234 
5235       /* Emit a debug bind insn before INSN.  */
5236       rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5237 				       DEBUG_EXPR_TREE_DECL (dval), exp,
5238 				       VAR_INIT_STATUS_INITIALIZED);
5239 
5240       emit_debug_insn_before (bind, insn);
5241       *exp_p = dval;
5242       return;
5243     }
5244 
5245   const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5246   int i, j;
5247   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5248     switch (*format_ptr++)
5249       {
5250       case 'e':
5251 	avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5252 	break;
5253 
5254       case 'E':
5255       case 'V':
5256 	for (j = 0; j < XVECLEN (exp, i); j++)
5257 	  avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5258 	break;
5259 
5260       default:
5261 	break;
5262       }
5263 }
5264 
5265 /* Expand the _LOCs in debug insns.  We run this after expanding all
5266    regular insns, so that any variables referenced in the function
5267    will have their DECL_RTLs set.  */
5268 
5269 static void
5270 expand_debug_locations (void)
5271 {
5272   rtx_insn *insn;
5273   rtx_insn *last = get_last_insn ();
5274   int save_strict_alias = flag_strict_aliasing;
5275 
5276   /* New alias sets while setting up memory attributes cause
5277      -fcompare-debug failures, even though it doesn't bring about any
5278      codegen changes.  */
5279   flag_strict_aliasing = 0;
5280 
5281   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5282     if (DEBUG_INSN_P (insn))
5283       {
5284 	tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5285 	rtx val;
5286 	rtx_insn *prev_insn, *insn2;
5287 	machine_mode mode;
5288 
5289 	if (value == NULL_TREE)
5290 	  val = NULL_RTX;
5291 	else
5292 	  {
5293 	    if (INSN_VAR_LOCATION_STATUS (insn)
5294 		== VAR_INIT_STATUS_UNINITIALIZED)
5295 	      val = expand_debug_source_expr (value);
5296 	    /* The avoid_deep_ter_for_debug function inserts
5297 	       debug bind stmts after SSA_NAME definition, with the
5298 	       SSA_NAME as the whole bind location.  Disable temporarily
5299 	       expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5300 	       being defined in this DEBUG_INSN.  */
5301 	    else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5302 	      {
5303 		tree *slot = deep_ter_debug_map->get (value);
5304 		if (slot)
5305 		  {
5306 		    if (*slot == INSN_VAR_LOCATION_DECL (insn))
5307 		      *slot = NULL_TREE;
5308 		    else
5309 		      slot = NULL;
5310 		  }
5311 		val = expand_debug_expr (value);
5312 		if (slot)
5313 		  *slot = INSN_VAR_LOCATION_DECL (insn);
5314 	      }
5315 	    else
5316 	      val = expand_debug_expr (value);
5317 	    gcc_assert (last == get_last_insn ());
5318 	  }
5319 
5320 	if (!val)
5321 	  val = gen_rtx_UNKNOWN_VAR_LOC ();
5322 	else
5323 	  {
5324 	    mode = GET_MODE (INSN_VAR_LOCATION (insn));
5325 
5326 	    gcc_assert (mode == GET_MODE (val)
5327 			|| (GET_MODE (val) == VOIDmode
5328 			    && (CONST_SCALAR_INT_P (val)
5329 				|| GET_CODE (val) == CONST_FIXED
5330 				|| GET_CODE (val) == LABEL_REF)));
5331 	  }
5332 
5333 	INSN_VAR_LOCATION_LOC (insn) = val;
5334 	prev_insn = PREV_INSN (insn);
5335 	for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5336 	  avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5337       }
5338 
5339   flag_strict_aliasing = save_strict_alias;
5340 }
5341 
5342 /* Performs swapping operands of commutative operations to expand
5343    the expensive one first.  */
5344 
5345 static void
5346 reorder_operands (basic_block bb)
5347 {
5348   unsigned int *lattice;  /* Hold cost of each statement.  */
5349   unsigned int i = 0, n = 0;
5350   gimple_stmt_iterator gsi;
5351   gimple_seq stmts;
5352   gimple *stmt;
5353   bool swap;
5354   tree op0, op1;
5355   ssa_op_iter iter;
5356   use_operand_p use_p;
5357   gimple *def0, *def1;
5358 
5359   /* Compute cost of each statement using estimate_num_insns.  */
5360   stmts = bb_seq (bb);
5361   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5362     {
5363       stmt = gsi_stmt (gsi);
5364       if (!is_gimple_debug (stmt))
5365         gimple_set_uid (stmt, n++);
5366     }
5367   lattice = XNEWVEC (unsigned int, n);
5368   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5369     {
5370       unsigned cost;
5371       stmt = gsi_stmt (gsi);
5372       if (is_gimple_debug (stmt))
5373 	continue;
5374       cost = estimate_num_insns (stmt, &eni_size_weights);
5375       lattice[i] = cost;
5376       FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5377 	{
5378 	  tree use = USE_FROM_PTR (use_p);
5379 	  gimple *def_stmt;
5380 	  if (TREE_CODE (use) != SSA_NAME)
5381 	    continue;
5382 	  def_stmt = get_gimple_for_ssa_name (use);
5383 	  if (!def_stmt)
5384 	    continue;
5385 	  lattice[i] += lattice[gimple_uid (def_stmt)];
5386 	}
5387       i++;
5388       if (!is_gimple_assign (stmt)
5389 	  || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5390 	continue;
5391       op0 = gimple_op (stmt, 1);
5392       op1 = gimple_op (stmt, 2);
5393       if (TREE_CODE (op0) != SSA_NAME
5394 	  || TREE_CODE (op1) != SSA_NAME)
5395 	continue;
5396       /* Swap operands if the second one is more expensive.  */
5397       def0 = get_gimple_for_ssa_name (op0);
5398       def1 = get_gimple_for_ssa_name (op1);
5399       if (!def1)
5400 	continue;
5401       swap = false;
5402       if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5403 	swap = true;
5404       if (swap)
5405 	{
5406 	  if (dump_file && (dump_flags & TDF_DETAILS))
5407 	    {
5408 	      fprintf (dump_file, "Swap operands in stmt:\n");
5409 	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5410 	      fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5411 		       def0 ? lattice[gimple_uid (def0)] : 0,
5412 		       lattice[gimple_uid (def1)]);
5413 	    }
5414 	  swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5415 			     gimple_assign_rhs2_ptr (stmt));
5416 	}
5417     }
5418   XDELETE (lattice);
5419 }
5420 
5421 /* Expand basic block BB from GIMPLE trees to RTL.  */
5422 
5423 static basic_block
5424 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5425 {
5426   gimple_stmt_iterator gsi;
5427   gimple_seq stmts;
5428   gimple *stmt = NULL;
5429   rtx_note *note;
5430   rtx_insn *last;
5431   edge e;
5432   edge_iterator ei;
5433 
5434   if (dump_file)
5435     fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5436 	     bb->index);
5437 
5438   /* Note that since we are now transitioning from GIMPLE to RTL, we
5439      cannot use the gsi_*_bb() routines because they expect the basic
5440      block to be in GIMPLE, instead of RTL.  Therefore, we need to
5441      access the BB sequence directly.  */
5442   if (optimize)
5443     reorder_operands (bb);
5444   stmts = bb_seq (bb);
5445   bb->il.gimple.seq = NULL;
5446   bb->il.gimple.phi_nodes = NULL;
5447   rtl_profile_for_bb (bb);
5448   init_rtl_bb_info (bb);
5449   bb->flags |= BB_RTL;
5450 
5451   /* Remove the RETURN_EXPR if we may fall though to the exit
5452      instead.  */
5453   gsi = gsi_last (stmts);
5454   if (!gsi_end_p (gsi)
5455       && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5456     {
5457       greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5458 
5459       gcc_assert (single_succ_p (bb));
5460       gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5461 
5462       if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5463 	  && !gimple_return_retval (ret_stmt))
5464 	{
5465 	  gsi_remove (&gsi, false);
5466 	  single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5467 	}
5468     }
5469 
5470   gsi = gsi_start (stmts);
5471   if (!gsi_end_p (gsi))
5472     {
5473       stmt = gsi_stmt (gsi);
5474       if (gimple_code (stmt) != GIMPLE_LABEL)
5475 	stmt = NULL;
5476     }
5477 
5478   rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5479 
5480   if (stmt || elt)
5481     {
5482       last = get_last_insn ();
5483 
5484       if (stmt)
5485 	{
5486 	  expand_gimple_stmt (stmt);
5487 	  gsi_next (&gsi);
5488 	}
5489 
5490       if (elt)
5491 	emit_label (*elt);
5492 
5493       /* Java emits line number notes in the top of labels.
5494 	 ??? Make this go away once line number notes are obsoleted.  */
5495       BB_HEAD (bb) = NEXT_INSN (last);
5496       if (NOTE_P (BB_HEAD (bb)))
5497 	BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5498       note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5499 
5500       maybe_dump_rtl_for_gimple_stmt (stmt, last);
5501     }
5502   else
5503     BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5504 
5505   NOTE_BASIC_BLOCK (note) = bb;
5506 
5507   for (; !gsi_end_p (gsi); gsi_next (&gsi))
5508     {
5509       basic_block new_bb;
5510 
5511       stmt = gsi_stmt (gsi);
5512 
5513       /* If this statement is a non-debug one, and we generate debug
5514 	 insns, then this one might be the last real use of a TERed
5515 	 SSA_NAME, but where there are still some debug uses further
5516 	 down.  Expanding the current SSA name in such further debug
5517 	 uses by their RHS might lead to wrong debug info, as coalescing
5518 	 might make the operands of such RHS be placed into the same
5519 	 pseudo as something else.  Like so:
5520 	   a_1 = a_0 + 1;   // Assume a_1 is TERed and a_0 is dead
5521 	   use(a_1);
5522 	   a_2 = ...
5523            #DEBUG ... => a_1
5524 	 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5525 	 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5526 	 the write to a_2 would actually have clobbered the place which
5527 	 formerly held a_0.
5528 
5529 	 So, instead of that, we recognize the situation, and generate
5530 	 debug temporaries at the last real use of TERed SSA names:
5531 	   a_1 = a_0 + 1;
5532            #DEBUG #D1 => a_1
5533 	   use(a_1);
5534 	   a_2 = ...
5535            #DEBUG ... => #D1
5536 	 */
5537       if (MAY_HAVE_DEBUG_INSNS
5538 	  && SA.values
5539 	  && !is_gimple_debug (stmt))
5540 	{
5541 	  ssa_op_iter iter;
5542 	  tree op;
5543 	  gimple *def;
5544 
5545 	  location_t sloc = curr_insn_location ();
5546 
5547 	  /* Look for SSA names that have their last use here (TERed
5548 	     names always have only one real use).  */
5549 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5550 	    if ((def = get_gimple_for_ssa_name (op)))
5551 	      {
5552 		imm_use_iterator imm_iter;
5553 		use_operand_p use_p;
5554 		bool have_debug_uses = false;
5555 
5556 		FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5557 		  {
5558 		    if (gimple_debug_bind_p (USE_STMT (use_p)))
5559 		      {
5560 			have_debug_uses = true;
5561 			break;
5562 		      }
5563 		  }
5564 
5565 		if (have_debug_uses)
5566 		  {
5567 		    /* OP is a TERed SSA name, with DEF its defining
5568 		       statement, and where OP is used in further debug
5569 		       instructions.  Generate a debug temporary, and
5570 		       replace all uses of OP in debug insns with that
5571 		       temporary.  */
5572 		    gimple *debugstmt;
5573 		    tree value = gimple_assign_rhs_to_tree (def);
5574 		    tree vexpr = make_node (DEBUG_EXPR_DECL);
5575 		    rtx val;
5576 		    machine_mode mode;
5577 
5578 		    set_curr_insn_location (gimple_location (def));
5579 
5580 		    DECL_ARTIFICIAL (vexpr) = 1;
5581 		    TREE_TYPE (vexpr) = TREE_TYPE (value);
5582 		    if (DECL_P (value))
5583 		      mode = DECL_MODE (value);
5584 		    else
5585 		      mode = TYPE_MODE (TREE_TYPE (value));
5586 		    SET_DECL_MODE (vexpr, mode);
5587 
5588 		    val = gen_rtx_VAR_LOCATION
5589 			(mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5590 
5591 		    emit_debug_insn (val);
5592 
5593 		    FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5594 		      {
5595 			if (!gimple_debug_bind_p (debugstmt))
5596 			  continue;
5597 
5598 			FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5599 			  SET_USE (use_p, vexpr);
5600 
5601 			update_stmt (debugstmt);
5602 		      }
5603 		  }
5604 	      }
5605 	  set_curr_insn_location (sloc);
5606 	}
5607 
5608       currently_expanding_gimple_stmt = stmt;
5609 
5610       /* Expand this statement, then evaluate the resulting RTL and
5611 	 fixup the CFG accordingly.  */
5612       if (gimple_code (stmt) == GIMPLE_COND)
5613 	{
5614 	  new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5615 	  if (new_bb)
5616 	    return new_bb;
5617 	}
5618       else if (gimple_debug_bind_p (stmt))
5619 	{
5620 	  location_t sloc = curr_insn_location ();
5621 	  gimple_stmt_iterator nsi = gsi;
5622 
5623 	  for (;;)
5624 	    {
5625 	      tree var = gimple_debug_bind_get_var (stmt);
5626 	      tree value;
5627 	      rtx val;
5628 	      machine_mode mode;
5629 
5630 	      if (TREE_CODE (var) != DEBUG_EXPR_DECL
5631 		  && TREE_CODE (var) != LABEL_DECL
5632 		  && !target_for_debug_bind (var))
5633 		goto delink_debug_stmt;
5634 
5635 	      if (gimple_debug_bind_has_value_p (stmt))
5636 		value = gimple_debug_bind_get_value (stmt);
5637 	      else
5638 		value = NULL_TREE;
5639 
5640 	      last = get_last_insn ();
5641 
5642 	      set_curr_insn_location (gimple_location (stmt));
5643 
5644 	      if (DECL_P (var))
5645 		mode = DECL_MODE (var);
5646 	      else
5647 		mode = TYPE_MODE (TREE_TYPE (var));
5648 
5649 	      val = gen_rtx_VAR_LOCATION
5650 		(mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5651 
5652 	      emit_debug_insn (val);
5653 
5654 	      if (dump_file && (dump_flags & TDF_DETAILS))
5655 		{
5656 		  /* We can't dump the insn with a TREE where an RTX
5657 		     is expected.  */
5658 		  PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5659 		  maybe_dump_rtl_for_gimple_stmt (stmt, last);
5660 		  PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5661 		}
5662 
5663 	    delink_debug_stmt:
5664 	      /* In order not to generate too many debug temporaries,
5665 	         we delink all uses of debug statements we already expanded.
5666 		 Therefore debug statements between definition and real
5667 		 use of TERed SSA names will continue to use the SSA name,
5668 		 and not be replaced with debug temps.  */
5669 	      delink_stmt_imm_use (stmt);
5670 
5671 	      gsi = nsi;
5672 	      gsi_next (&nsi);
5673 	      if (gsi_end_p (nsi))
5674 		break;
5675 	      stmt = gsi_stmt (nsi);
5676 	      if (!gimple_debug_bind_p (stmt))
5677 		break;
5678 	    }
5679 
5680 	  set_curr_insn_location (sloc);
5681 	}
5682       else if (gimple_debug_source_bind_p (stmt))
5683 	{
5684 	  location_t sloc = curr_insn_location ();
5685 	  tree var = gimple_debug_source_bind_get_var (stmt);
5686 	  tree value = gimple_debug_source_bind_get_value (stmt);
5687 	  rtx val;
5688 	  machine_mode mode;
5689 
5690 	  last = get_last_insn ();
5691 
5692 	  set_curr_insn_location (gimple_location (stmt));
5693 
5694 	  mode = DECL_MODE (var);
5695 
5696 	  val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5697 				      VAR_INIT_STATUS_UNINITIALIZED);
5698 
5699 	  emit_debug_insn (val);
5700 
5701 	  if (dump_file && (dump_flags & TDF_DETAILS))
5702 	    {
5703 	      /* We can't dump the insn with a TREE where an RTX
5704 		 is expected.  */
5705 	      PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5706 	      maybe_dump_rtl_for_gimple_stmt (stmt, last);
5707 	      PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5708 	    }
5709 
5710 	  set_curr_insn_location (sloc);
5711 	}
5712       else
5713 	{
5714 	  gcall *call_stmt = dyn_cast <gcall *> (stmt);
5715 	  if (call_stmt
5716 	      && gimple_call_tail_p (call_stmt)
5717 	      && disable_tail_calls)
5718 	    gimple_call_set_tail (call_stmt, false);
5719 
5720 	  if (call_stmt && gimple_call_tail_p (call_stmt))
5721 	    {
5722 	      bool can_fallthru;
5723 	      new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5724 	      if (new_bb)
5725 		{
5726 		  if (can_fallthru)
5727 		    bb = new_bb;
5728 		  else
5729 		    return new_bb;
5730 		}
5731 	    }
5732 	  else
5733 	    {
5734 	      def_operand_p def_p;
5735 	      def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5736 
5737 	      if (def_p != NULL)
5738 		{
5739 		  /* Ignore this stmt if it is in the list of
5740 		     replaceable expressions.  */
5741 		  if (SA.values
5742 		      && bitmap_bit_p (SA.values,
5743 				       SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5744 		    continue;
5745 		}
5746 	      last = expand_gimple_stmt (stmt);
5747 	      maybe_dump_rtl_for_gimple_stmt (stmt, last);
5748 	    }
5749 	}
5750     }
5751 
5752   currently_expanding_gimple_stmt = NULL;
5753 
5754   /* Expand implicit goto and convert goto_locus.  */
5755   FOR_EACH_EDGE (e, ei, bb->succs)
5756     {
5757       if (e->goto_locus != UNKNOWN_LOCATION)
5758 	set_curr_insn_location (e->goto_locus);
5759       if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5760 	{
5761 	  emit_jump (label_rtx_for_bb (e->dest));
5762 	  e->flags &= ~EDGE_FALLTHRU;
5763 	}
5764     }
5765 
5766   /* Expanded RTL can create a jump in the last instruction of block.
5767      This later might be assumed to be a jump to successor and break edge insertion.
5768      We need to insert dummy move to prevent this. PR41440. */
5769   if (single_succ_p (bb)
5770       && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5771       && (last = get_last_insn ())
5772       && (JUMP_P (last)
5773 	  || (DEBUG_INSN_P (last)
5774 	      && JUMP_P (prev_nondebug_insn (last)))))
5775     {
5776       rtx dummy = gen_reg_rtx (SImode);
5777       emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5778     }
5779 
5780   do_pending_stack_adjust ();
5781 
5782   /* Find the block tail.  The last insn in the block is the insn
5783      before a barrier and/or table jump insn.  */
5784   last = get_last_insn ();
5785   if (BARRIER_P (last))
5786     last = PREV_INSN (last);
5787   if (JUMP_TABLE_DATA_P (last))
5788     last = PREV_INSN (PREV_INSN (last));
5789   BB_END (bb) = last;
5790 
5791   update_bb_for_insn (bb);
5792 
5793   return bb;
5794 }
5795 
5796 
5797 /* Create a basic block for initialization code.  */
5798 
5799 static basic_block
5800 construct_init_block (void)
5801 {
5802   basic_block init_block, first_block;
5803   edge e = NULL;
5804   int flags;
5805 
5806   /* Multiple entry points not supported yet.  */
5807   gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5808   init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5809   init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5810   ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5811   EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5812 
5813   e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5814 
5815   /* When entry edge points to first basic block, we don't need jump,
5816      otherwise we have to jump into proper target.  */
5817   if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5818     {
5819       tree label = gimple_block_label (e->dest);
5820 
5821       emit_jump (jump_target_rtx (label));
5822       flags = 0;
5823     }
5824   else
5825     flags = EDGE_FALLTHRU;
5826 
5827   init_block = create_basic_block (NEXT_INSN (get_insns ()),
5828 				   get_last_insn (),
5829 				   ENTRY_BLOCK_PTR_FOR_FN (cfun));
5830   init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
5831   init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5832   add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5833   if (e)
5834     {
5835       first_block = e->dest;
5836       redirect_edge_succ (e, init_block);
5837       e = make_edge (init_block, first_block, flags);
5838     }
5839   else
5840     e = make_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5841   e->probability = REG_BR_PROB_BASE;
5842   e->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5843 
5844   update_bb_for_insn (init_block);
5845   return init_block;
5846 }
5847 
5848 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5849    found in the block tree.  */
5850 
5851 static void
5852 set_block_levels (tree block, int level)
5853 {
5854   while (block)
5855     {
5856       BLOCK_NUMBER (block) = level;
5857       set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5858       block = BLOCK_CHAIN (block);
5859     }
5860 }
5861 
5862 /* Create a block containing landing pads and similar stuff.  */
5863 
5864 static void
5865 construct_exit_block (void)
5866 {
5867   rtx_insn *head = get_last_insn ();
5868   rtx_insn *end;
5869   basic_block exit_block;
5870   edge e, e2;
5871   unsigned ix;
5872   edge_iterator ei;
5873   basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5874   rtx_insn *orig_end = BB_END (prev_bb);
5875 
5876   rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5877 
5878   /* Make sure the locus is set to the end of the function, so that
5879      epilogue line numbers and warnings are set properly.  */
5880   if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5881     input_location = cfun->function_end_locus;
5882 
5883   /* Generate rtl for function exit.  */
5884   expand_function_end ();
5885 
5886   end = get_last_insn ();
5887   if (head == end)
5888     return;
5889   /* While emitting the function end we could move end of the last basic
5890      block.  */
5891   BB_END (prev_bb) = orig_end;
5892   while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5893     head = NEXT_INSN (head);
5894   /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5895      bb frequency counting will be confused.  Any instructions before that
5896      label are emitted for the case where PREV_BB falls through into the
5897      exit block, so append those instructions to prev_bb in that case.  */
5898   if (NEXT_INSN (head) != return_label)
5899     {
5900       while (NEXT_INSN (head) != return_label)
5901 	{
5902 	  if (!NOTE_P (NEXT_INSN (head)))
5903 	    BB_END (prev_bb) = NEXT_INSN (head);
5904 	  head = NEXT_INSN (head);
5905 	}
5906     }
5907   exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
5908   exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
5909   exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5910   add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5911 
5912   ix = 0;
5913   while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
5914     {
5915       e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
5916       if (!(e->flags & EDGE_ABNORMAL))
5917 	redirect_edge_succ (e, exit_block);
5918       else
5919 	ix++;
5920     }
5921 
5922   e = make_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5923   e->probability = REG_BR_PROB_BASE;
5924   e->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5925   FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5926     if (e2 != e)
5927       {
5928 	e->count -= e2->count;
5929 	exit_block->count -= e2->count;
5930 	exit_block->frequency -= EDGE_FREQUENCY (e2);
5931       }
5932   if (e->count < 0)
5933     e->count = 0;
5934   if (exit_block->count < 0)
5935     exit_block->count = 0;
5936   if (exit_block->frequency < 0)
5937     exit_block->frequency = 0;
5938   update_bb_for_insn (exit_block);
5939 }
5940 
5941 /* Helper function for discover_nonconstant_array_refs.
5942    Look for ARRAY_REF nodes with non-constant indexes and mark them
5943    addressable.  */
5944 
5945 static tree
5946 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5947 				   void *data ATTRIBUTE_UNUSED)
5948 {
5949   tree t = *tp;
5950 
5951   if (IS_TYPE_OR_DECL_P (t))
5952     *walk_subtrees = 0;
5953   else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5954     {
5955       while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5956 	      && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5957 	      && (!TREE_OPERAND (t, 2)
5958 		  || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5959 	     || (TREE_CODE (t) == COMPONENT_REF
5960 		 && (!TREE_OPERAND (t,2)
5961 		     || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5962 	     || TREE_CODE (t) == BIT_FIELD_REF
5963 	     || TREE_CODE (t) == REALPART_EXPR
5964 	     || TREE_CODE (t) == IMAGPART_EXPR
5965 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
5966 	     || CONVERT_EXPR_P (t))
5967 	t = TREE_OPERAND (t, 0);
5968 
5969       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5970 	{
5971 	  t = get_base_address (t);
5972 	  if (t && DECL_P (t)
5973               && DECL_MODE (t) != BLKmode)
5974 	    TREE_ADDRESSABLE (t) = 1;
5975 	}
5976 
5977       *walk_subtrees = 0;
5978     }
5979 
5980   return NULL_TREE;
5981 }
5982 
5983 /* RTL expansion is not able to compile array references with variable
5984    offsets for arrays stored in single register.  Discover such
5985    expressions and mark variables as addressable to avoid this
5986    scenario.  */
5987 
5988 static void
5989 discover_nonconstant_array_refs (void)
5990 {
5991   basic_block bb;
5992   gimple_stmt_iterator gsi;
5993 
5994   FOR_EACH_BB_FN (bb, cfun)
5995     for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5996       {
5997 	gimple *stmt = gsi_stmt (gsi);
5998 	if (!is_gimple_debug (stmt))
5999 	  walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
6000       }
6001 }
6002 
6003 /* This function sets crtl->args.internal_arg_pointer to a virtual
6004    register if DRAP is needed.  Local register allocator will replace
6005    virtual_incoming_args_rtx with the virtual register.  */
6006 
6007 static void
6008 expand_stack_alignment (void)
6009 {
6010   rtx drap_rtx;
6011   unsigned int preferred_stack_boundary;
6012 
6013   if (! SUPPORTS_STACK_ALIGNMENT)
6014     return;
6015 
6016   if (cfun->calls_alloca
6017       || cfun->has_nonlocal_label
6018       || crtl->has_nonlocal_goto)
6019     crtl->need_drap = true;
6020 
6021   /* Call update_stack_boundary here again to update incoming stack
6022      boundary.  It may set incoming stack alignment to a different
6023      value after RTL expansion.  TARGET_FUNCTION_OK_FOR_SIBCALL may
6024      use the minimum incoming stack alignment to check if it is OK
6025      to perform sibcall optimization since sibcall optimization will
6026      only align the outgoing stack to incoming stack boundary.  */
6027   if (targetm.calls.update_stack_boundary)
6028     targetm.calls.update_stack_boundary ();
6029 
6030   /* The incoming stack frame has to be aligned at least at
6031      parm_stack_boundary.  */
6032   gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
6033 
6034   /* Update crtl->stack_alignment_estimated and use it later to align
6035      stack.  We check PREFERRED_STACK_BOUNDARY if there may be non-call
6036      exceptions since callgraph doesn't collect incoming stack alignment
6037      in this case.  */
6038   if (cfun->can_throw_non_call_exceptions
6039       && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6040     preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6041   else
6042     preferred_stack_boundary = crtl->preferred_stack_boundary;
6043   if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6044     crtl->stack_alignment_estimated = preferred_stack_boundary;
6045   if (preferred_stack_boundary > crtl->stack_alignment_needed)
6046     crtl->stack_alignment_needed = preferred_stack_boundary;
6047 
6048   gcc_assert (crtl->stack_alignment_needed
6049 	      <= crtl->stack_alignment_estimated);
6050 
6051   crtl->stack_realign_needed
6052     = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
6053   crtl->stack_realign_tried = crtl->stack_realign_needed;
6054 
6055   crtl->stack_realign_processed = true;
6056 
6057   /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6058      alignment.  */
6059   gcc_assert (targetm.calls.get_drap_rtx != NULL);
6060   drap_rtx = targetm.calls.get_drap_rtx ();
6061 
6062   /* stack_realign_drap and drap_rtx must match.  */
6063   gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6064 
6065   /* Do nothing if NULL is returned, which means DRAP is not needed.  */
6066   if (NULL != drap_rtx)
6067     {
6068       crtl->args.internal_arg_pointer = drap_rtx;
6069 
6070       /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6071          needed. */
6072       fixup_tail_calls ();
6073     }
6074 }
6075 
6076 
6077 static void
6078 expand_main_function (void)
6079 {
6080 #if (defined(INVOKE__main)				\
6081      || (!defined(HAS_INIT_SECTION)			\
6082 	 && !defined(INIT_SECTION_ASM_OP)		\
6083 	 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6084   emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
6085 #endif
6086 }
6087 
6088 
6089 /* Expand code to initialize the stack_protect_guard.  This is invoked at
6090    the beginning of a function to be protected.  */
6091 
6092 static void
6093 stack_protect_prologue (void)
6094 {
6095   tree guard_decl = targetm.stack_protect_guard ();
6096   rtx x, y;
6097 
6098   x = expand_normal (crtl->stack_protect_guard);
6099   if (guard_decl)
6100     y = expand_normal (guard_decl);
6101   else
6102     y = const0_rtx;
6103 
6104   /* Allow the target to copy from Y to X without leaking Y into a
6105      register.  */
6106   if (targetm.have_stack_protect_set ())
6107     if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6108       {
6109 	emit_insn (insn);
6110 	return;
6111       }
6112 
6113   /* Otherwise do a straight move.  */
6114   emit_move_insn (x, y);
6115 }
6116 
6117 /* Translate the intermediate representation contained in the CFG
6118    from GIMPLE trees to RTL.
6119 
6120    We do conversion per basic block and preserve/update the tree CFG.
6121    This implies we have to do some magic as the CFG can simultaneously
6122    consist of basic blocks containing RTL and GIMPLE trees.  This can
6123    confuse the CFG hooks, so be careful to not manipulate CFG during
6124    the expansion.  */
6125 
6126 namespace {
6127 
6128 const pass_data pass_data_expand =
6129 {
6130   RTL_PASS, /* type */
6131   "expand", /* name */
6132   OPTGROUP_NONE, /* optinfo_flags */
6133   TV_EXPAND, /* tv_id */
6134   ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6135     | PROP_gimple_lcx
6136     | PROP_gimple_lvec
6137     | PROP_gimple_lva), /* properties_required */
6138   PROP_rtl, /* properties_provided */
6139   ( PROP_ssa | PROP_trees ), /* properties_destroyed */
6140   0, /* todo_flags_start */
6141   0, /* todo_flags_finish */
6142 };
6143 
6144 class pass_expand : public rtl_opt_pass
6145 {
6146 public:
6147   pass_expand (gcc::context *ctxt)
6148     : rtl_opt_pass (pass_data_expand, ctxt)
6149   {}
6150 
6151   /* opt_pass methods: */
6152   virtual unsigned int execute (function *);
6153 
6154 }; // class pass_expand
6155 
6156 unsigned int
6157 pass_expand::execute (function *fun)
6158 {
6159   basic_block bb, init_block;
6160   edge_iterator ei;
6161   edge e;
6162   rtx_insn *var_seq, *var_ret_seq;
6163   unsigned i;
6164 
6165   timevar_push (TV_OUT_OF_SSA);
6166   rewrite_out_of_ssa (&SA);
6167   timevar_pop (TV_OUT_OF_SSA);
6168   SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
6169 
6170   if (MAY_HAVE_DEBUG_STMTS && flag_tree_ter)
6171     {
6172       gimple_stmt_iterator gsi;
6173       FOR_EACH_BB_FN (bb, cfun)
6174 	for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6175 	  if (gimple_debug_bind_p (gsi_stmt (gsi)))
6176 	    avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6177     }
6178 
6179   /* Make sure all values used by the optimization passes have sane
6180      defaults.  */
6181   reg_renumber = 0;
6182 
6183   /* Some backends want to know that we are expanding to RTL.  */
6184   currently_expanding_to_rtl = 1;
6185   /* Dominators are not kept up-to-date as we may create new basic-blocks.  */
6186   free_dominance_info (CDI_DOMINATORS);
6187 
6188   rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
6189 
6190   if (chkp_function_instrumented_p (current_function_decl))
6191     chkp_reset_rtl_bounds ();
6192 
6193   insn_locations_init ();
6194   if (!DECL_IS_BUILTIN (current_function_decl))
6195     {
6196       /* Eventually, all FEs should explicitly set function_start_locus.  */
6197       if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6198 	set_curr_insn_location
6199 	  (DECL_SOURCE_LOCATION (current_function_decl));
6200       else
6201 	set_curr_insn_location (fun->function_start_locus);
6202     }
6203   else
6204     set_curr_insn_location (UNKNOWN_LOCATION);
6205   prologue_location = curr_insn_location ();
6206 
6207 #ifdef INSN_SCHEDULING
6208   init_sched_attrs ();
6209 #endif
6210 
6211   /* Make sure first insn is a note even if we don't want linenums.
6212      This makes sure the first insn will never be deleted.
6213      Also, final expects a note to appear there.  */
6214   emit_note (NOTE_INSN_DELETED);
6215 
6216   /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE.  */
6217   discover_nonconstant_array_refs ();
6218 
6219   targetm.expand_to_rtl_hook ();
6220   crtl->init_stack_alignment ();
6221   fun->cfg->max_jumptable_ents = 0;
6222 
6223   /* Resovle the function section.  Some targets, like ARM EABI rely on knowledge
6224      of the function section at exapnsion time to predict distance of calls.  */
6225   resolve_unique_section (current_function_decl, 0, flag_function_sections);
6226 
6227   /* Expand the variables recorded during gimple lowering.  */
6228   timevar_push (TV_VAR_EXPAND);
6229   start_sequence ();
6230 
6231   var_ret_seq = expand_used_vars ();
6232 
6233   var_seq = get_insns ();
6234   end_sequence ();
6235   timevar_pop (TV_VAR_EXPAND);
6236 
6237   /* Honor stack protection warnings.  */
6238   if (warn_stack_protect)
6239     {
6240       if (fun->calls_alloca)
6241 	warning (OPT_Wstack_protector,
6242 		 "stack protector not protecting local variables: "
6243 		 "variable length buffer");
6244       if (has_short_buffer && !crtl->stack_protect_guard)
6245 	warning (OPT_Wstack_protector,
6246 		 "stack protector not protecting function: "
6247 		 "all local arrays are less than %d bytes long",
6248 		 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6249     }
6250 
6251   /* Set up parameters and prepare for return, for the function.  */
6252   expand_function_start (current_function_decl);
6253 
6254   /* If we emitted any instructions for setting up the variables,
6255      emit them before the FUNCTION_START note.  */
6256   if (var_seq)
6257     {
6258       emit_insn_before (var_seq, parm_birth_insn);
6259 
6260       /* In expand_function_end we'll insert the alloca save/restore
6261 	 before parm_birth_insn.  We've just insertted an alloca call.
6262 	 Adjust the pointer to match.  */
6263       parm_birth_insn = var_seq;
6264     }
6265 
6266   /* Now propagate the RTL assignment of each partition to the
6267      underlying var of each SSA_NAME.  */
6268   tree name;
6269 
6270   FOR_EACH_SSA_NAME (i, name, cfun)
6271     {
6272       /* We might have generated new SSA names in
6273 	 update_alias_info_with_stack_vars.  They will have a NULL
6274 	 defining statements, and won't be part of the partitioning,
6275 	 so ignore those.  */
6276       if (!SSA_NAME_DEF_STMT (name))
6277 	continue;
6278 
6279       adjust_one_expanded_partition_var (name);
6280     }
6281 
6282   /* Clean up RTL of variables that straddle across multiple
6283      partitions, and check that the rtl of any PARM_DECLs that are not
6284      cleaned up is that of their default defs.  */
6285   FOR_EACH_SSA_NAME (i, name, cfun)
6286     {
6287       int part;
6288 
6289       /* We might have generated new SSA names in
6290 	 update_alias_info_with_stack_vars.  They will have a NULL
6291 	 defining statements, and won't be part of the partitioning,
6292 	 so ignore those.  */
6293       if (!SSA_NAME_DEF_STMT (name))
6294 	continue;
6295       part = var_to_partition (SA.map, name);
6296       if (part == NO_PARTITION)
6297 	continue;
6298 
6299       /* If this decl was marked as living in multiple places, reset
6300 	 this now to NULL.  */
6301       tree var = SSA_NAME_VAR (name);
6302       if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6303 	SET_DECL_RTL (var, NULL);
6304       /* Check that the pseudos chosen by assign_parms are those of
6305 	 the corresponding default defs.  */
6306       else if (SSA_NAME_IS_DEFAULT_DEF (name)
6307 	       && (TREE_CODE (var) == PARM_DECL
6308 		   || TREE_CODE (var) == RESULT_DECL))
6309 	{
6310 	  rtx in = DECL_RTL_IF_SET (var);
6311 	  gcc_assert (in);
6312 	  rtx out = SA.partition_to_pseudo[part];
6313 	  gcc_assert (in == out);
6314 
6315 	  /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6316 	     those expected by debug backends for each parm and for
6317 	     the result.  This is particularly important for stabs,
6318 	     whose register elimination from parm's DECL_RTL may cause
6319 	     -fcompare-debug differences as SET_DECL_RTL changes reg's
6320 	     attrs.  So, make sure the RTL already has the parm as the
6321 	     EXPR, so that it won't change.  */
6322 	  SET_DECL_RTL (var, NULL_RTX);
6323 	  if (MEM_P (in))
6324 	    set_mem_attributes (in, var, true);
6325 	  SET_DECL_RTL (var, in);
6326 	}
6327     }
6328 
6329   /* If this function is `main', emit a call to `__main'
6330      to run global initializers, etc.  */
6331   if (DECL_NAME (current_function_decl)
6332       && MAIN_NAME_P (DECL_NAME (current_function_decl))
6333       && DECL_FILE_SCOPE_P (current_function_decl))
6334     expand_main_function ();
6335 
6336   /* Initialize the stack_protect_guard field.  This must happen after the
6337      call to __main (if any) so that the external decl is initialized.  */
6338   if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
6339     stack_protect_prologue ();
6340 
6341   expand_phi_nodes (&SA);
6342 
6343   /* Release any stale SSA redirection data.  */
6344   redirect_edge_var_map_empty ();
6345 
6346   /* Register rtl specific functions for cfg.  */
6347   rtl_register_cfg_hooks ();
6348 
6349   init_block = construct_init_block ();
6350 
6351   /* Clear EDGE_EXECUTABLE on the entry edge(s).  It is cleaned from the
6352      remaining edges later.  */
6353   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6354     e->flags &= ~EDGE_EXECUTABLE;
6355 
6356   lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6357   FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6358 		  next_bb)
6359     bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6360 
6361   if (MAY_HAVE_DEBUG_INSNS)
6362     expand_debug_locations ();
6363 
6364   if (deep_ter_debug_map)
6365     {
6366       delete deep_ter_debug_map;
6367       deep_ter_debug_map = NULL;
6368     }
6369 
6370   /* Free stuff we no longer need after GIMPLE optimizations.  */
6371   free_dominance_info (CDI_DOMINATORS);
6372   free_dominance_info (CDI_POST_DOMINATORS);
6373   delete_tree_cfg_annotations (fun);
6374 
6375   timevar_push (TV_OUT_OF_SSA);
6376   finish_out_of_ssa (&SA);
6377   timevar_pop (TV_OUT_OF_SSA);
6378 
6379   timevar_push (TV_POST_EXPAND);
6380   /* We are no longer in SSA form.  */
6381   fun->gimple_df->in_ssa_p = false;
6382   loops_state_clear (LOOP_CLOSED_SSA);
6383 
6384   /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6385      conservatively to true until they are all profile aware.  */
6386   delete lab_rtx_for_bb;
6387   free_histograms (fun);
6388 
6389   construct_exit_block ();
6390   insn_locations_finalize ();
6391 
6392   if (var_ret_seq)
6393     {
6394       rtx_insn *after = return_label;
6395       rtx_insn *next = NEXT_INSN (after);
6396       if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6397 	after = next;
6398       emit_insn_after (var_ret_seq, after);
6399     }
6400 
6401   /* Zap the tree EH table.  */
6402   set_eh_throw_stmt_table (fun, NULL);
6403 
6404   /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6405      split edges which edge insertions might do.  */
6406   rebuild_jump_labels (get_insns ());
6407 
6408   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6409 		  EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6410     {
6411       edge e;
6412       edge_iterator ei;
6413       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6414 	{
6415 	  if (e->insns.r)
6416 	    {
6417 	      rebuild_jump_labels_chain (e->insns.r);
6418 	      /* Put insns after parm birth, but before
6419 		 NOTE_INSNS_FUNCTION_BEG.  */
6420 	      if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6421 		  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6422 		{
6423 		  rtx_insn *insns = e->insns.r;
6424 		  e->insns.r = NULL;
6425 		  if (NOTE_P (parm_birth_insn)
6426 		      && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6427 		    emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6428 		  else
6429 		    emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6430 		}
6431 	      else
6432 		commit_one_edge_insertion (e);
6433 	    }
6434 	  else
6435 	    ei_next (&ei);
6436 	}
6437     }
6438 
6439   /* We're done expanding trees to RTL.  */
6440   currently_expanding_to_rtl = 0;
6441 
6442   flush_mark_addressable_queue ();
6443 
6444   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6445 		  EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6446     {
6447       edge e;
6448       edge_iterator ei;
6449       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6450 	{
6451 	  /* Clear EDGE_EXECUTABLE.  This flag is never used in the backend.  */
6452 	  e->flags &= ~EDGE_EXECUTABLE;
6453 
6454 	  /* At the moment not all abnormal edges match the RTL
6455 	     representation.  It is safe to remove them here as
6456 	     find_many_sub_basic_blocks will rediscover them.
6457 	     In the future we should get this fixed properly.  */
6458 	  if ((e->flags & EDGE_ABNORMAL)
6459 	      && !(e->flags & EDGE_SIBCALL))
6460 	    remove_edge (e);
6461 	  else
6462 	    ei_next (&ei);
6463 	}
6464     }
6465 
6466   auto_sbitmap blocks (last_basic_block_for_fn (fun));
6467   bitmap_ones (blocks);
6468   find_many_sub_basic_blocks (blocks);
6469   purge_all_dead_edges ();
6470 
6471   /* After initial rtl generation, call back to finish generating
6472      exception support code.  We need to do this before cleaning up
6473      the CFG as the code does not expect dead landing pads.  */
6474   if (fun->eh->region_tree != NULL)
6475     finish_eh_generation ();
6476 
6477   /* Call expand_stack_alignment after finishing all
6478      updates to crtl->preferred_stack_boundary.  */
6479   expand_stack_alignment ();
6480 
6481   /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6482      function.  */
6483   if (crtl->tail_call_emit)
6484     fixup_tail_calls ();
6485 
6486   /* Remove unreachable blocks, otherwise we cannot compute dominators
6487      which are needed for loop state verification.  As a side-effect
6488      this also compacts blocks.
6489      ???  We cannot remove trivially dead insns here as for example
6490      the DRAP reg on i?86 is not magically live at this point.
6491      gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise.  */
6492   cleanup_cfg (CLEANUP_NO_INSN_DEL);
6493 
6494   checking_verify_flow_info ();
6495 
6496   /* Initialize pseudos allocated for hard registers.  */
6497   emit_initial_value_sets ();
6498 
6499   /* And finally unshare all RTL.  */
6500   unshare_all_rtl ();
6501 
6502   /* There's no need to defer outputting this function any more; we
6503      know we want to output it.  */
6504   DECL_DEFER_OUTPUT (current_function_decl) = 0;
6505 
6506   /* Now that we're done expanding trees to RTL, we shouldn't have any
6507      more CONCATs anywhere.  */
6508   generating_concat_p = 0;
6509 
6510   if (dump_file)
6511     {
6512       fprintf (dump_file,
6513 	       "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6514       /* And the pass manager will dump RTL for us.  */
6515     }
6516 
6517   /* If we're emitting a nested function, make sure its parent gets
6518      emitted as well.  Doing otherwise confuses debug info.  */
6519     {
6520       tree parent;
6521       for (parent = DECL_CONTEXT (current_function_decl);
6522 	   parent != NULL_TREE;
6523 	   parent = get_containing_scope (parent))
6524 	if (TREE_CODE (parent) == FUNCTION_DECL)
6525 	  TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6526     }
6527 
6528   /* We are now committed to emitting code for this function.  Do any
6529      preparation, such as emitting abstract debug info for the inline
6530      before it gets mangled by optimization.  */
6531   if (cgraph_function_possibly_inlined_p (current_function_decl))
6532     (*debug_hooks->outlining_inline_function) (current_function_decl);
6533 
6534   TREE_ASM_WRITTEN (current_function_decl) = 1;
6535 
6536   /* After expanding, the return labels are no longer needed. */
6537   return_label = NULL;
6538   naked_return_label = NULL;
6539 
6540   /* After expanding, the tm_restart map is no longer needed.  */
6541   if (fun->gimple_df->tm_restart)
6542     fun->gimple_df->tm_restart = NULL;
6543 
6544   /* Tag the blocks with a depth number so that change_scope can find
6545      the common parent easily.  */
6546   set_block_levels (DECL_INITIAL (fun->decl), 0);
6547   default_rtl_profile ();
6548 
6549   timevar_pop (TV_POST_EXPAND);
6550 
6551   return 0;
6552 }
6553 
6554 } // anon namespace
6555 
6556 rtl_opt_pass *
6557 make_pass_expand (gcc::context *ctxt)
6558 {
6559   return new pass_expand (ctxt);
6560 }
6561