xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cfgexpand.c (revision 82d56013d7b633d116a93943de88e08335357a7c)
1 /* A pass for lowering trees to RTL.
2    Copyright (C) 2004-2019 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "optabs.h"
34 #include "regs.h" /* For reg_renumber.  */
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "stor-layout.h"
42 #include "stmt.h"
43 #include "print-tree.h"
44 #include "cfgrtl.h"
45 #include "cfganal.h"
46 #include "cfgbuild.h"
47 #include "cfgcleanup.h"
48 #include "dojump.h"
49 #include "explow.h"
50 #include "calls.h"
51 #include "expr.h"
52 #include "internal-fn.h"
53 #include "tree-eh.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
57 #include "tree-cfg.h"
58 #include "tree-dfa.h"
59 #include "tree-ssa.h"
60 #include "except.h"
61 #include "gimple-pretty-print.h"
62 #include "toplev.h"
63 #include "debug.h"
64 #include "params.h"
65 #include "tree-inline.h"
66 #include "value-prof.h"
67 #include "tree-ssa-live.h"
68 #include "tree-outof-ssa.h"
69 #include "cfgloop.h"
70 #include "insn-attr.h" /* For INSN_SCHEDULING.  */
71 #include "stringpool.h"
72 #include "attribs.h"
73 #include "asan.h"
74 #include "tree-ssa-address.h"
75 #include "output.h"
76 #include "builtins.h"
77 
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79    cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80    give the same symbol without quotes for an alternative entry point.  You
81    must define both, or neither.  */
82 #ifndef NAME__MAIN
83 #define NAME__MAIN "__main"
84 #endif
85 
86 /* This variable holds information helping the rewriting of SSA trees
87    into RTL.  */
88 struct ssaexpand SA;
89 
90 /* This variable holds the currently expanded gimple statement for purposes
91    of comminucating the profile info to the builtin expanders.  */
92 gimple *currently_expanding_gimple_stmt;
93 
94 static rtx expand_debug_expr (tree);
95 
96 static bool defer_stack_allocation (tree, bool);
97 
98 static void record_alignment_for_reg_var (unsigned int);
99 
100 /* Return an expression tree corresponding to the RHS of GIMPLE
101    statement STMT.  */
102 
103 tree
104 gimple_assign_rhs_to_tree (gimple *stmt)
105 {
106   tree t;
107   enum gimple_rhs_class grhs_class;
108 
109   grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
110 
111   if (grhs_class == GIMPLE_TERNARY_RHS)
112     t = build3 (gimple_assign_rhs_code (stmt),
113 		TREE_TYPE (gimple_assign_lhs (stmt)),
114 		gimple_assign_rhs1 (stmt),
115 		gimple_assign_rhs2 (stmt),
116 		gimple_assign_rhs3 (stmt));
117   else if (grhs_class == GIMPLE_BINARY_RHS)
118     t = build2 (gimple_assign_rhs_code (stmt),
119 		TREE_TYPE (gimple_assign_lhs (stmt)),
120 		gimple_assign_rhs1 (stmt),
121 		gimple_assign_rhs2 (stmt));
122   else if (grhs_class == GIMPLE_UNARY_RHS)
123     t = build1 (gimple_assign_rhs_code (stmt),
124 		TREE_TYPE (gimple_assign_lhs (stmt)),
125 		gimple_assign_rhs1 (stmt));
126   else if (grhs_class == GIMPLE_SINGLE_RHS)
127     {
128       t = gimple_assign_rhs1 (stmt);
129       /* Avoid modifying this tree in place below.  */
130       if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
131 	   && gimple_location (stmt) != EXPR_LOCATION (t))
132 	  || (gimple_block (stmt)
133 	      && currently_expanding_to_rtl
134 	      && EXPR_P (t)))
135 	t = copy_node (t);
136     }
137   else
138     gcc_unreachable ();
139 
140   if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
141     SET_EXPR_LOCATION (t, gimple_location (stmt));
142 
143   return t;
144 }
145 
146 
147 #ifndef STACK_ALIGNMENT_NEEDED
148 #define STACK_ALIGNMENT_NEEDED 1
149 #endif
150 
151 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
152 
153 /* Choose either CUR or NEXT as the leader DECL for a partition.
154    Prefer ignored decls, to simplify debug dumps and reduce ambiguity
155    out of the same user variable being in multiple partitions (this is
156    less likely for compiler-introduced temps).  */
157 
158 static tree
159 leader_merge (tree cur, tree next)
160 {
161   if (cur == NULL || cur == next)
162     return next;
163 
164   if (DECL_P (cur) && DECL_IGNORED_P (cur))
165     return cur;
166 
167   if (DECL_P (next) && DECL_IGNORED_P (next))
168     return next;
169 
170   return cur;
171 }
172 
173 /* Associate declaration T with storage space X.  If T is no
174    SSA name this is exactly SET_DECL_RTL, otherwise make the
175    partition of T associated with X.  */
176 static inline void
177 set_rtl (tree t, rtx x)
178 {
179   gcc_checking_assert (!x
180 		       || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
181 		       || (use_register_for_decl (t)
182 			   ? (REG_P (x)
183 			      || (GET_CODE (x) == CONCAT
184 				  && (REG_P (XEXP (x, 0))
185 				      || SUBREG_P (XEXP (x, 0)))
186 				  && (REG_P (XEXP (x, 1))
187 				      || SUBREG_P (XEXP (x, 1))))
188 			      /* We need to accept PARALLELs for RESUT_DECLs
189 				 because of vector types with BLKmode returned
190 				 in multiple registers, but they are supposed
191 				 to be uncoalesced.  */
192 			      || (GET_CODE (x) == PARALLEL
193 				  && SSAVAR (t)
194 				  && TREE_CODE (SSAVAR (t)) == RESULT_DECL
195 				  && (GET_MODE (x) == BLKmode
196 				      || !flag_tree_coalesce_vars)))
197 			   : (MEM_P (x) || x == pc_rtx
198 			      || (GET_CODE (x) == CONCAT
199 				  && MEM_P (XEXP (x, 0))
200 				  && MEM_P (XEXP (x, 1))))));
201   /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
202      RESULT_DECLs has the expected mode.  For memory, we accept
203      unpromoted modes, since that's what we're likely to get.  For
204      PARM_DECLs and RESULT_DECLs, we'll have been called by
205      set_parm_rtl, which will give us the default def, so we don't
206      have to compute it ourselves.  For RESULT_DECLs, we accept mode
207      mismatches too, as long as we have BLKmode or are not coalescing
208      across variables, so that we don't reject BLKmode PARALLELs or
209      unpromoted REGs.  */
210   gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
211 		       || (SSAVAR (t)
212 			   && TREE_CODE (SSAVAR (t)) == RESULT_DECL
213 			   && (promote_ssa_mode (t, NULL) == BLKmode
214 			       || !flag_tree_coalesce_vars))
215 		       || !use_register_for_decl (t)
216 		       || GET_MODE (x) == promote_ssa_mode (t, NULL));
217 
218   if (x)
219     {
220       bool skip = false;
221       tree cur = NULL_TREE;
222       rtx xm = x;
223 
224     retry:
225       if (MEM_P (xm))
226 	cur = MEM_EXPR (xm);
227       else if (REG_P (xm))
228 	cur = REG_EXPR (xm);
229       else if (SUBREG_P (xm))
230 	{
231 	  gcc_assert (subreg_lowpart_p (xm));
232 	  xm = SUBREG_REG (xm);
233 	  goto retry;
234 	}
235       else if (GET_CODE (xm) == CONCAT)
236 	{
237 	  xm = XEXP (xm, 0);
238 	  goto retry;
239 	}
240       else if (GET_CODE (xm) == PARALLEL)
241 	{
242 	  xm = XVECEXP (xm, 0, 0);
243 	  gcc_assert (GET_CODE (xm) == EXPR_LIST);
244 	  xm = XEXP (xm, 0);
245 	  goto retry;
246 	}
247       else if (xm == pc_rtx)
248 	skip = true;
249       else
250 	gcc_unreachable ();
251 
252       tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
253 
254       if (cur != next)
255 	{
256 	  if (MEM_P (x))
257 	    set_mem_attributes (x,
258 				next && TREE_CODE (next) == SSA_NAME
259 				? TREE_TYPE (next)
260 				: next, true);
261 	  else
262 	    set_reg_attrs_for_decl_rtl (next, x);
263 	}
264     }
265 
266   if (TREE_CODE (t) == SSA_NAME)
267     {
268       int part = var_to_partition (SA.map, t);
269       if (part != NO_PARTITION)
270 	{
271 	  if (SA.partition_to_pseudo[part])
272 	    gcc_assert (SA.partition_to_pseudo[part] == x);
273 	  else if (x != pc_rtx)
274 	    SA.partition_to_pseudo[part] = x;
275 	}
276       /* For the benefit of debug information at -O0 (where
277          vartracking doesn't run) record the place also in the base
278          DECL.  For PARMs and RESULTs, do so only when setting the
279          default def.  */
280       if (x && x != pc_rtx && SSA_NAME_VAR (t)
281 	  && (VAR_P (SSA_NAME_VAR (t))
282 	      || SSA_NAME_IS_DEFAULT_DEF (t)))
283 	{
284 	  tree var = SSA_NAME_VAR (t);
285 	  /* If we don't yet have something recorded, just record it now.  */
286 	  if (!DECL_RTL_SET_P (var))
287 	    SET_DECL_RTL (var, x);
288 	  /* If we have it set already to "multiple places" don't
289 	     change this.  */
290 	  else if (DECL_RTL (var) == pc_rtx)
291 	    ;
292 	  /* If we have something recorded and it's not the same place
293 	     as we want to record now, we have multiple partitions for the
294 	     same base variable, with different places.  We can't just
295 	     randomly chose one, hence we have to say that we don't know.
296 	     This only happens with optimization, and there var-tracking
297 	     will figure out the right thing.  */
298 	  else if (DECL_RTL (var) != x)
299 	    SET_DECL_RTL (var, pc_rtx);
300 	}
301     }
302   else
303     SET_DECL_RTL (t, x);
304 }
305 
306 /* This structure holds data relevant to one variable that will be
307    placed in a stack slot.  */
308 struct stack_var
309 {
310   /* The Variable.  */
311   tree decl;
312 
313   /* Initially, the size of the variable.  Later, the size of the partition,
314      if this variable becomes it's partition's representative.  */
315   poly_uint64 size;
316 
317   /* The *byte* alignment required for this variable.  Or as, with the
318      size, the alignment for this partition.  */
319   unsigned int alignb;
320 
321   /* The partition representative.  */
322   size_t representative;
323 
324   /* The next stack variable in the partition, or EOC.  */
325   size_t next;
326 
327   /* The numbers of conflicting stack variables.  */
328   bitmap conflicts;
329 };
330 
331 #define EOC  ((size_t)-1)
332 
333 /* We have an array of such objects while deciding allocation.  */
334 static struct stack_var *stack_vars;
335 static size_t stack_vars_alloc;
336 static size_t stack_vars_num;
337 static hash_map<tree, size_t> *decl_to_stack_part;
338 
339 /* Conflict bitmaps go on this obstack.  This allows us to destroy
340    all of them in one big sweep.  */
341 static bitmap_obstack stack_var_bitmap_obstack;
342 
343 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
344    is non-decreasing.  */
345 static size_t *stack_vars_sorted;
346 
347 /* The phase of the stack frame.  This is the known misalignment of
348    virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY.  That is,
349    (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0.  */
350 static int frame_phase;
351 
352 /* Used during expand_used_vars to remember if we saw any decls for
353    which we'd like to enable stack smashing protection.  */
354 static bool has_protected_decls;
355 
356 /* Used during expand_used_vars.  Remember if we say a character buffer
357    smaller than our cutoff threshold.  Used for -Wstack-protector.  */
358 static bool has_short_buffer;
359 
360 /* Compute the byte alignment to use for DECL.  Ignore alignment
361    we can't do with expected alignment of the stack boundary.  */
362 
363 static unsigned int
364 align_local_variable (tree decl)
365 {
366   unsigned int align;
367 
368   if (TREE_CODE (decl) == SSA_NAME)
369     align = TYPE_ALIGN (TREE_TYPE (decl));
370   else
371     {
372       align = LOCAL_DECL_ALIGNMENT (decl);
373       SET_DECL_ALIGN (decl, align);
374     }
375   return align / BITS_PER_UNIT;
376 }
377 
378 /* Align given offset BASE with ALIGN.  Truncate up if ALIGN_UP is true,
379    down otherwise.  Return truncated BASE value.  */
380 
381 static inline unsigned HOST_WIDE_INT
382 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
383 {
384   return align_up ? (base + align - 1) & -align : base & -align;
385 }
386 
387 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
388    Return the frame offset.  */
389 
390 static poly_int64
391 alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
392 {
393   poly_int64 offset, new_frame_offset;
394 
395   if (FRAME_GROWS_DOWNWARD)
396     {
397       new_frame_offset
398 	= aligned_lower_bound (frame_offset - frame_phase - size,
399 			       align) + frame_phase;
400       offset = new_frame_offset;
401     }
402   else
403     {
404       new_frame_offset
405 	= aligned_upper_bound (frame_offset - frame_phase,
406 			       align) + frame_phase;
407       offset = new_frame_offset;
408       new_frame_offset += size;
409     }
410   frame_offset = new_frame_offset;
411 
412   if (frame_offset_overflow (frame_offset, cfun->decl))
413     frame_offset = offset = 0;
414 
415   return offset;
416 }
417 
418 /* Accumulate DECL into STACK_VARS.  */
419 
420 static void
421 add_stack_var (tree decl)
422 {
423   struct stack_var *v;
424 
425   if (stack_vars_num >= stack_vars_alloc)
426     {
427       if (stack_vars_alloc)
428 	stack_vars_alloc = stack_vars_alloc * 3 / 2;
429       else
430 	stack_vars_alloc = 32;
431       stack_vars
432 	= XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
433     }
434   if (!decl_to_stack_part)
435     decl_to_stack_part = new hash_map<tree, size_t>;
436 
437   v = &stack_vars[stack_vars_num];
438   decl_to_stack_part->put (decl, stack_vars_num);
439 
440   v->decl = decl;
441   tree size = TREE_CODE (decl) == SSA_NAME
442     ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
443     : DECL_SIZE_UNIT (decl);
444   v->size = tree_to_poly_uint64 (size);
445   /* Ensure that all variables have size, so that &a != &b for any two
446      variables that are simultaneously live.  */
447   if (known_eq (v->size, 0U))
448     v->size = 1;
449   v->alignb = align_local_variable (decl);
450   /* An alignment of zero can mightily confuse us later.  */
451   gcc_assert (v->alignb != 0);
452 
453   /* All variables are initially in their own partition.  */
454   v->representative = stack_vars_num;
455   v->next = EOC;
456 
457   /* All variables initially conflict with no other.  */
458   v->conflicts = NULL;
459 
460   /* Ensure that this decl doesn't get put onto the list twice.  */
461   set_rtl (decl, pc_rtx);
462 
463   stack_vars_num++;
464 }
465 
466 /* Make the decls associated with luid's X and Y conflict.  */
467 
468 static void
469 add_stack_var_conflict (size_t x, size_t y)
470 {
471   struct stack_var *a = &stack_vars[x];
472   struct stack_var *b = &stack_vars[y];
473   if (x == y)
474     return;
475   if (!a->conflicts)
476     a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
477   if (!b->conflicts)
478     b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
479   bitmap_set_bit (a->conflicts, y);
480   bitmap_set_bit (b->conflicts, x);
481 }
482 
483 /* Check whether the decls associated with luid's X and Y conflict.  */
484 
485 static bool
486 stack_var_conflict_p (size_t x, size_t y)
487 {
488   struct stack_var *a = &stack_vars[x];
489   struct stack_var *b = &stack_vars[y];
490   if (x == y)
491     return false;
492   /* Partitions containing an SSA name result from gimple registers
493      with things like unsupported modes.  They are top-level and
494      hence conflict with everything else.  */
495   if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
496     return true;
497 
498   if (!a->conflicts || !b->conflicts)
499     return false;
500   return bitmap_bit_p (a->conflicts, y);
501 }
502 
503 /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
504    enter its partition number into bitmap DATA.  */
505 
506 static bool
507 visit_op (gimple *, tree op, tree, void *data)
508 {
509   bitmap active = (bitmap)data;
510   op = get_base_address (op);
511   if (op
512       && DECL_P (op)
513       && DECL_RTL_IF_SET (op) == pc_rtx)
514     {
515       size_t *v = decl_to_stack_part->get (op);
516       if (v)
517 	bitmap_set_bit (active, *v);
518     }
519   return false;
520 }
521 
522 /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
523    record conflicts between it and all currently active other partitions
524    from bitmap DATA.  */
525 
526 static bool
527 visit_conflict (gimple *, tree op, tree, void *data)
528 {
529   bitmap active = (bitmap)data;
530   op = get_base_address (op);
531   if (op
532       && DECL_P (op)
533       && DECL_RTL_IF_SET (op) == pc_rtx)
534     {
535       size_t *v = decl_to_stack_part->get (op);
536       if (v && bitmap_set_bit (active, *v))
537 	{
538 	  size_t num = *v;
539 	  bitmap_iterator bi;
540 	  unsigned i;
541 	  gcc_assert (num < stack_vars_num);
542 	  EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
543 	    add_stack_var_conflict (num, i);
544 	}
545     }
546   return false;
547 }
548 
549 /* Helper routine for add_scope_conflicts, calculating the active partitions
550    at the end of BB, leaving the result in WORK.  We're called to generate
551    conflicts when FOR_CONFLICT is true, otherwise we're just tracking
552    liveness.  */
553 
554 static void
555 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
556 {
557   edge e;
558   edge_iterator ei;
559   gimple_stmt_iterator gsi;
560   walk_stmt_load_store_addr_fn visit;
561 
562   bitmap_clear (work);
563   FOR_EACH_EDGE (e, ei, bb->preds)
564     bitmap_ior_into (work, (bitmap)e->src->aux);
565 
566   visit = visit_op;
567 
568   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
569     {
570       gimple *stmt = gsi_stmt (gsi);
571       walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
572     }
573   for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
574     {
575       gimple *stmt = gsi_stmt (gsi);
576 
577       if (gimple_clobber_p (stmt))
578 	{
579 	  tree lhs = gimple_assign_lhs (stmt);
580 	  size_t *v;
581 	  /* Nested function lowering might introduce LHSs
582 	     that are COMPONENT_REFs.  */
583 	  if (!VAR_P (lhs))
584 	    continue;
585 	  if (DECL_RTL_IF_SET (lhs) == pc_rtx
586 	      && (v = decl_to_stack_part->get (lhs)))
587 	    bitmap_clear_bit (work, *v);
588 	}
589       else if (!is_gimple_debug (stmt))
590 	{
591 	  if (for_conflict
592 	      && visit == visit_op)
593 	    {
594 	      /* If this is the first real instruction in this BB we need
595 	         to add conflicts for everything live at this point now.
596 		 Unlike classical liveness for named objects we can't
597 		 rely on seeing a def/use of the names we're interested in.
598 		 There might merely be indirect loads/stores.  We'd not add any
599 		 conflicts for such partitions.  */
600 	      bitmap_iterator bi;
601 	      unsigned i;
602 	      EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
603 		{
604 		  struct stack_var *a = &stack_vars[i];
605 		  if (!a->conflicts)
606 		    a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
607 		  bitmap_ior_into (a->conflicts, work);
608 		}
609 	      visit = visit_conflict;
610 	    }
611 	  walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
612 	}
613     }
614 }
615 
616 /* Generate stack partition conflicts between all partitions that are
617    simultaneously live.  */
618 
619 static void
620 add_scope_conflicts (void)
621 {
622   basic_block bb;
623   bool changed;
624   bitmap work = BITMAP_ALLOC (NULL);
625   int *rpo;
626   int n_bbs;
627 
628   /* We approximate the live range of a stack variable by taking the first
629      mention of its name as starting point(s), and by the end-of-scope
630      death clobber added by gimplify as ending point(s) of the range.
631      This overapproximates in the case we for instance moved an address-taken
632      operation upward, without also moving a dereference to it upwards.
633      But it's conservatively correct as a variable never can hold values
634      before its name is mentioned at least once.
635 
636      We then do a mostly classical bitmap liveness algorithm.  */
637 
638   FOR_ALL_BB_FN (bb, cfun)
639     bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
640 
641   rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
642   n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
643 
644   changed = true;
645   while (changed)
646     {
647       int i;
648       changed = false;
649       for (i = 0; i < n_bbs; i++)
650 	{
651 	  bitmap active;
652 	  bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
653 	  active = (bitmap)bb->aux;
654 	  add_scope_conflicts_1 (bb, work, false);
655 	  if (bitmap_ior_into (active, work))
656 	    changed = true;
657 	}
658     }
659 
660   FOR_EACH_BB_FN (bb, cfun)
661     add_scope_conflicts_1 (bb, work, true);
662 
663   free (rpo);
664   BITMAP_FREE (work);
665   FOR_ALL_BB_FN (bb, cfun)
666     BITMAP_FREE (bb->aux);
667 }
668 
669 /* A subroutine of partition_stack_vars.  A comparison function for qsort,
670    sorting an array of indices by the properties of the object.  */
671 
672 static int
673 stack_var_cmp (const void *a, const void *b)
674 {
675   size_t ia = *(const size_t *)a;
676   size_t ib = *(const size_t *)b;
677   unsigned int aligna = stack_vars[ia].alignb;
678   unsigned int alignb = stack_vars[ib].alignb;
679   poly_int64 sizea = stack_vars[ia].size;
680   poly_int64 sizeb = stack_vars[ib].size;
681   tree decla = stack_vars[ia].decl;
682   tree declb = stack_vars[ib].decl;
683   bool largea, largeb;
684   unsigned int uida, uidb;
685 
686   /* Primary compare on "large" alignment.  Large comes first.  */
687   largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
688   largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
689   if (largea != largeb)
690     return (int)largeb - (int)largea;
691 
692   /* Secondary compare on size, decreasing  */
693   int diff = compare_sizes_for_sort (sizeb, sizea);
694   if (diff != 0)
695     return diff;
696 
697   /* Tertiary compare on true alignment, decreasing.  */
698   if (aligna < alignb)
699     return -1;
700   if (aligna > alignb)
701     return 1;
702 
703   /* Final compare on ID for sort stability, increasing.
704      Two SSA names are compared by their version, SSA names come before
705      non-SSA names, and two normal decls are compared by their DECL_UID.  */
706   if (TREE_CODE (decla) == SSA_NAME)
707     {
708       if (TREE_CODE (declb) == SSA_NAME)
709 	uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
710       else
711 	return -1;
712     }
713   else if (TREE_CODE (declb) == SSA_NAME)
714     return 1;
715   else
716     uida = DECL_UID (decla), uidb = DECL_UID (declb);
717   if (uida < uidb)
718     return 1;
719   if (uida > uidb)
720     return -1;
721   return 0;
722 }
723 
724 struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
725 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
726 
727 /* If the points-to solution *PI points to variables that are in a partition
728    together with other variables add all partition members to the pointed-to
729    variables bitmap.  */
730 
731 static void
732 add_partitioned_vars_to_ptset (struct pt_solution *pt,
733 			       part_hashmap *decls_to_partitions,
734 			       hash_set<bitmap> *visited, bitmap temp)
735 {
736   bitmap_iterator bi;
737   unsigned i;
738   bitmap *part;
739 
740   if (pt->anything
741       || pt->vars == NULL
742       /* The pointed-to vars bitmap is shared, it is enough to
743 	 visit it once.  */
744       || visited->add (pt->vars))
745     return;
746 
747   bitmap_clear (temp);
748 
749   /* By using a temporary bitmap to store all members of the partitions
750      we have to add we make sure to visit each of the partitions only
751      once.  */
752   EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
753     if ((!temp
754 	 || !bitmap_bit_p (temp, i))
755 	&& (part = decls_to_partitions->get (i)))
756       bitmap_ior_into (temp, *part);
757   if (!bitmap_empty_p (temp))
758     bitmap_ior_into (pt->vars, temp);
759 }
760 
761 /* Update points-to sets based on partition info, so we can use them on RTL.
762    The bitmaps representing stack partitions will be saved until expand,
763    where partitioned decls used as bases in memory expressions will be
764    rewritten.  */
765 
766 static void
767 update_alias_info_with_stack_vars (void)
768 {
769   part_hashmap *decls_to_partitions = NULL;
770   size_t i, j;
771   tree var = NULL_TREE;
772 
773   for (i = 0; i < stack_vars_num; i++)
774     {
775       bitmap part = NULL;
776       tree name;
777       struct ptr_info_def *pi;
778 
779       /* Not interested in partitions with single variable.  */
780       if (stack_vars[i].representative != i
781           || stack_vars[i].next == EOC)
782         continue;
783 
784       if (!decls_to_partitions)
785 	{
786 	  decls_to_partitions = new part_hashmap;
787 	  cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
788 	}
789 
790       /* Create an SSA_NAME that points to the partition for use
791          as base during alias-oracle queries on RTL for bases that
792 	 have been partitioned.  */
793       if (var == NULL_TREE)
794 	var = create_tmp_var (ptr_type_node);
795       name = make_ssa_name (var);
796 
797       /* Create bitmaps representing partitions.  They will be used for
798          points-to sets later, so use GGC alloc.  */
799       part = BITMAP_GGC_ALLOC ();
800       for (j = i; j != EOC; j = stack_vars[j].next)
801 	{
802 	  tree decl = stack_vars[j].decl;
803 	  unsigned int uid = DECL_PT_UID (decl);
804 	  bitmap_set_bit (part, uid);
805 	  decls_to_partitions->put (uid, part);
806 	  cfun->gimple_df->decls_to_pointers->put (decl, name);
807 	  if (TREE_ADDRESSABLE (decl))
808 	    TREE_ADDRESSABLE (name) = 1;
809 	}
810 
811       /* Make the SSA name point to all partition members.  */
812       pi = get_ptr_info (name);
813       pt_solution_set (&pi->pt, part, false);
814     }
815 
816   /* Make all points-to sets that contain one member of a partition
817      contain all members of the partition.  */
818   if (decls_to_partitions)
819     {
820       unsigned i;
821       tree name;
822       hash_set<bitmap> visited;
823       bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
824 
825       FOR_EACH_SSA_NAME (i, name, cfun)
826 	{
827 	  struct ptr_info_def *pi;
828 
829 	  if (POINTER_TYPE_P (TREE_TYPE (name))
830 	      && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
831 	    add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
832 					   &visited, temp);
833 	}
834 
835       add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
836 				     decls_to_partitions, &visited, temp);
837 
838       delete decls_to_partitions;
839       BITMAP_FREE (temp);
840     }
841 }
842 
843 /* A subroutine of partition_stack_vars.  The UNION portion of a UNION/FIND
844    partitioning algorithm.  Partitions A and B are known to be non-conflicting.
845    Merge them into a single partition A.  */
846 
847 static void
848 union_stack_vars (size_t a, size_t b)
849 {
850   struct stack_var *vb = &stack_vars[b];
851   bitmap_iterator bi;
852   unsigned u;
853 
854   gcc_assert (stack_vars[b].next == EOC);
855    /* Add B to A's partition.  */
856   stack_vars[b].next = stack_vars[a].next;
857   stack_vars[b].representative = a;
858   stack_vars[a].next = b;
859 
860   /* Update the required alignment of partition A to account for B.  */
861   if (stack_vars[a].alignb < stack_vars[b].alignb)
862     stack_vars[a].alignb = stack_vars[b].alignb;
863 
864   /* Update the interference graph and merge the conflicts.  */
865   if (vb->conflicts)
866     {
867       EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
868 	add_stack_var_conflict (a, stack_vars[u].representative);
869       BITMAP_FREE (vb->conflicts);
870     }
871 }
872 
873 /* A subroutine of expand_used_vars.  Binpack the variables into
874    partitions constrained by the interference graph.  The overall
875    algorithm used is as follows:
876 
877 	Sort the objects by size in descending order.
878 	For each object A {
879 	  S = size(A)
880 	  O = 0
881 	  loop {
882 	    Look for the largest non-conflicting object B with size <= S.
883 	    UNION (A, B)
884 	  }
885 	}
886 */
887 
888 static void
889 partition_stack_vars (void)
890 {
891   size_t si, sj, n = stack_vars_num;
892 
893   stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
894   for (si = 0; si < n; ++si)
895     stack_vars_sorted[si] = si;
896 
897   if (n == 1)
898     return;
899 
900   qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
901 
902   for (si = 0; si < n; ++si)
903     {
904       size_t i = stack_vars_sorted[si];
905       unsigned int ialign = stack_vars[i].alignb;
906       poly_int64 isize = stack_vars[i].size;
907 
908       /* Ignore objects that aren't partition representatives. If we
909          see a var that is not a partition representative, it must
910          have been merged earlier.  */
911       if (stack_vars[i].representative != i)
912         continue;
913 
914       for (sj = si + 1; sj < n; ++sj)
915 	{
916 	  size_t j = stack_vars_sorted[sj];
917 	  unsigned int jalign = stack_vars[j].alignb;
918 	  poly_int64 jsize = stack_vars[j].size;
919 
920 	  /* Ignore objects that aren't partition representatives.  */
921 	  if (stack_vars[j].representative != j)
922 	    continue;
923 
924 	  /* Do not mix objects of "small" (supported) alignment
925 	     and "large" (unsupported) alignment.  */
926 	  if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
927 	      != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
928 	    break;
929 
930 	  /* For Address Sanitizer do not mix objects with different
931 	     sizes, as the shorter vars wouldn't be adequately protected.
932 	     Don't do that for "large" (unsupported) alignment objects,
933 	     those aren't protected anyway.  */
934 	  if (asan_sanitize_stack_p ()
935 	      && maybe_ne (isize, jsize)
936 	      && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
937 	    break;
938 
939 	  /* Ignore conflicting objects.  */
940 	  if (stack_var_conflict_p (i, j))
941 	    continue;
942 
943 	  /* UNION the objects, placing J at OFFSET.  */
944 	  union_stack_vars (i, j);
945 	}
946     }
947 
948   update_alias_info_with_stack_vars ();
949 }
950 
951 /* A debugging aid for expand_used_vars.  Dump the generated partitions.  */
952 
953 static void
954 dump_stack_var_partition (void)
955 {
956   size_t si, i, j, n = stack_vars_num;
957 
958   for (si = 0; si < n; ++si)
959     {
960       i = stack_vars_sorted[si];
961 
962       /* Skip variables that aren't partition representatives, for now.  */
963       if (stack_vars[i].representative != i)
964 	continue;
965 
966       fprintf (dump_file, "Partition %lu: size ", (unsigned long) i);
967       print_dec (stack_vars[i].size, dump_file);
968       fprintf (dump_file, " align %u\n", stack_vars[i].alignb);
969 
970       for (j = i; j != EOC; j = stack_vars[j].next)
971 	{
972 	  fputc ('\t', dump_file);
973 	  print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
974 	}
975       fputc ('\n', dump_file);
976     }
977 }
978 
979 /* Assign rtl to DECL at BASE + OFFSET.  */
980 
981 static void
982 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
983 			 poly_int64 offset)
984 {
985   unsigned align;
986   rtx x;
987 
988   /* If this fails, we've overflowed the stack frame.  Error nicely?  */
989   gcc_assert (known_eq (offset, trunc_int_for_mode (offset, Pmode)));
990 
991   x = plus_constant (Pmode, base, offset);
992   x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
993 		   ? TYPE_MODE (TREE_TYPE (decl))
994 		   : DECL_MODE (SSAVAR (decl)), x);
995 
996   if (TREE_CODE (decl) != SSA_NAME)
997     {
998       /* Set alignment we actually gave this decl if it isn't an SSA name.
999          If it is we generate stack slots only accidentally so it isn't as
1000 	 important, we'll simply use the alignment that is already set.  */
1001       if (base == virtual_stack_vars_rtx)
1002 	offset -= frame_phase;
1003       align = known_alignment (offset);
1004       align *= BITS_PER_UNIT;
1005       if (align == 0 || align > base_align)
1006 	align = base_align;
1007 
1008       /* One would think that we could assert that we're not decreasing
1009 	 alignment here, but (at least) the i386 port does exactly this
1010 	 via the MINIMUM_ALIGNMENT hook.  */
1011 
1012       SET_DECL_ALIGN (decl, align);
1013       DECL_USER_ALIGN (decl) = 0;
1014     }
1015 
1016   set_rtl (decl, x);
1017 }
1018 
1019 struct stack_vars_data
1020 {
1021   /* Vector of offset pairs, always end of some padding followed
1022      by start of the padding that needs Address Sanitizer protection.
1023      The vector is in reversed, highest offset pairs come first.  */
1024   auto_vec<HOST_WIDE_INT> asan_vec;
1025 
1026   /* Vector of partition representative decls in between the paddings.  */
1027   auto_vec<tree> asan_decl_vec;
1028 
1029   /* Base pseudo register for Address Sanitizer protected automatic vars.  */
1030   rtx asan_base;
1031 
1032   /* Alignment needed for the Address Sanitizer protected automatic vars.  */
1033   unsigned int asan_alignb;
1034 };
1035 
1036 /* A subroutine of expand_used_vars.  Give each partition representative
1037    a unique location within the stack frame.  Update each partition member
1038    with that location.  */
1039 
1040 static void
1041 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1042 {
1043   size_t si, i, j, n = stack_vars_num;
1044   poly_uint64 large_size = 0, large_alloc = 0;
1045   rtx large_base = NULL;
1046   unsigned large_align = 0;
1047   bool large_allocation_done = false;
1048   tree decl;
1049 
1050   /* Determine if there are any variables requiring "large" alignment.
1051      Since these are dynamically allocated, we only process these if
1052      no predicate involved.  */
1053   large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1054   if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1055     {
1056       /* Find the total size of these variables.  */
1057       for (si = 0; si < n; ++si)
1058 	{
1059 	  unsigned alignb;
1060 
1061 	  i = stack_vars_sorted[si];
1062 	  alignb = stack_vars[i].alignb;
1063 
1064 	  /* All "large" alignment decls come before all "small" alignment
1065 	     decls, but "large" alignment decls are not sorted based on
1066 	     their alignment.  Increase large_align to track the largest
1067 	     required alignment.  */
1068 	  if ((alignb * BITS_PER_UNIT) > large_align)
1069 	    large_align = alignb * BITS_PER_UNIT;
1070 
1071 	  /* Stop when we get to the first decl with "small" alignment.  */
1072 	  if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1073 	    break;
1074 
1075 	  /* Skip variables that aren't partition representatives.  */
1076 	  if (stack_vars[i].representative != i)
1077 	    continue;
1078 
1079 	  /* Skip variables that have already had rtl assigned.  See also
1080 	     add_stack_var where we perpetrate this pc_rtx hack.  */
1081 	  decl = stack_vars[i].decl;
1082 	  if (TREE_CODE (decl) == SSA_NAME
1083 	      ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1084 	      : DECL_RTL (decl) != pc_rtx)
1085 	    continue;
1086 
1087 	  large_size = aligned_upper_bound (large_size, alignb);
1088 	  large_size += stack_vars[i].size;
1089 	}
1090     }
1091 
1092   for (si = 0; si < n; ++si)
1093     {
1094       rtx base;
1095       unsigned base_align, alignb;
1096       poly_int64 offset;
1097 
1098       i = stack_vars_sorted[si];
1099 
1100       /* Skip variables that aren't partition representatives, for now.  */
1101       if (stack_vars[i].representative != i)
1102 	continue;
1103 
1104       /* Skip variables that have already had rtl assigned.  See also
1105 	 add_stack_var where we perpetrate this pc_rtx hack.  */
1106       decl = stack_vars[i].decl;
1107       if (TREE_CODE (decl) == SSA_NAME
1108 	  ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1109 	  : DECL_RTL (decl) != pc_rtx)
1110 	continue;
1111 
1112       /* Check the predicate to see whether this variable should be
1113 	 allocated in this pass.  */
1114       if (pred && !pred (i))
1115 	continue;
1116 
1117       alignb = stack_vars[i].alignb;
1118       if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1119 	{
1120 	  base = virtual_stack_vars_rtx;
1121 	  /* ASAN description strings don't yet have a syntax for expressing
1122 	     polynomial offsets.  */
1123 	  HOST_WIDE_INT prev_offset;
1124 	  if (asan_sanitize_stack_p ()
1125 	      && pred
1126 	      && frame_offset.is_constant (&prev_offset)
1127 	      && stack_vars[i].size.is_constant ())
1128 	    {
1129 	      if (data->asan_vec.is_empty ())
1130 		{
1131 		  alloc_stack_frame_space (0, ASAN_RED_ZONE_SIZE);
1132 		  prev_offset = frame_offset.to_constant ();
1133 		}
1134 	      prev_offset = align_base (prev_offset,
1135 					ASAN_MIN_RED_ZONE_SIZE,
1136 					!FRAME_GROWS_DOWNWARD);
1137 	      tree repr_decl = NULL_TREE;
1138 	      unsigned HOST_WIDE_INT size
1139 		= asan_var_and_redzone_size (stack_vars[i].size.to_constant ());
1140 	      if (data->asan_vec.is_empty ())
1141 		size = MAX (size, ASAN_RED_ZONE_SIZE);
1142 
1143 	      unsigned HOST_WIDE_INT alignment = MAX (alignb,
1144 						      ASAN_MIN_RED_ZONE_SIZE);
1145 	      offset = alloc_stack_frame_space (size, alignment);
1146 
1147 	      data->asan_vec.safe_push (prev_offset);
1148 	      /* Allocating a constant amount of space from a constant
1149 		 starting offset must give a constant result.  */
1150 	      data->asan_vec.safe_push ((offset + stack_vars[i].size)
1151 					.to_constant ());
1152 	      /* Find best representative of the partition.
1153 		 Prefer those with DECL_NAME, even better
1154 		 satisfying asan_protect_stack_decl predicate.  */
1155 	      for (j = i; j != EOC; j = stack_vars[j].next)
1156 		if (asan_protect_stack_decl (stack_vars[j].decl)
1157 		    && DECL_NAME (stack_vars[j].decl))
1158 		  {
1159 		    repr_decl = stack_vars[j].decl;
1160 		    break;
1161 		  }
1162 		else if (repr_decl == NULL_TREE
1163 			 && DECL_P (stack_vars[j].decl)
1164 			 && DECL_NAME (stack_vars[j].decl))
1165 		  repr_decl = stack_vars[j].decl;
1166 	      if (repr_decl == NULL_TREE)
1167 		repr_decl = stack_vars[i].decl;
1168 	      data->asan_decl_vec.safe_push (repr_decl);
1169 
1170 	      /* Make sure a representative is unpoison if another
1171 		 variable in the partition is handled by
1172 		 use-after-scope sanitization.  */
1173 	      if (asan_handled_variables != NULL
1174 		  && !asan_handled_variables->contains (repr_decl))
1175 		{
1176 		  for (j = i; j != EOC; j = stack_vars[j].next)
1177 		    if (asan_handled_variables->contains (stack_vars[j].decl))
1178 		      break;
1179 		  if (j != EOC)
1180 		    asan_handled_variables->add (repr_decl);
1181 		}
1182 
1183 	      data->asan_alignb = MAX (data->asan_alignb, alignb);
1184 	      if (data->asan_base == NULL)
1185 		data->asan_base = gen_reg_rtx (Pmode);
1186 	      base = data->asan_base;
1187 
1188 	      if (!STRICT_ALIGNMENT)
1189 		base_align = crtl->max_used_stack_slot_alignment;
1190 	      else
1191 		base_align = MAX (crtl->max_used_stack_slot_alignment,
1192 				  GET_MODE_ALIGNMENT (SImode)
1193 				  << ASAN_SHADOW_SHIFT);
1194 	    }
1195 	  else
1196 	    {
1197 	      offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1198 	      base_align = crtl->max_used_stack_slot_alignment;
1199 	    }
1200 	}
1201       else
1202 	{
1203 	  /* Large alignment is only processed in the last pass.  */
1204 	  if (pred)
1205 	    continue;
1206 
1207 	  /* If there were any variables requiring "large" alignment, allocate
1208 	     space.  */
1209 	  if (maybe_ne (large_size, 0U) && ! large_allocation_done)
1210 	    {
1211 	      poly_int64 loffset;
1212 	      rtx large_allocsize;
1213 
1214 	      large_allocsize = gen_int_mode (large_size, Pmode);
1215 	      get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1216 	      loffset = alloc_stack_frame_space
1217 		(rtx_to_poly_int64 (large_allocsize),
1218 		 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1219 	      large_base = get_dynamic_stack_base (loffset, large_align);
1220 	      large_allocation_done = true;
1221 	    }
1222 	  gcc_assert (large_base != NULL);
1223 
1224 	  large_alloc = aligned_upper_bound (large_alloc, alignb);
1225 	  offset = large_alloc;
1226 	  large_alloc += stack_vars[i].size;
1227 
1228 	  base = large_base;
1229 	  base_align = large_align;
1230 	}
1231 
1232       /* Create rtl for each variable based on their location within the
1233 	 partition.  */
1234       for (j = i; j != EOC; j = stack_vars[j].next)
1235 	{
1236 	  expand_one_stack_var_at (stack_vars[j].decl,
1237 				   base, base_align,
1238 				   offset);
1239 	}
1240     }
1241 
1242   gcc_assert (known_eq (large_alloc, large_size));
1243 }
1244 
1245 /* Take into account all sizes of partitions and reset DECL_RTLs.  */
1246 static poly_uint64
1247 account_stack_vars (void)
1248 {
1249   size_t si, j, i, n = stack_vars_num;
1250   poly_uint64 size = 0;
1251 
1252   for (si = 0; si < n; ++si)
1253     {
1254       i = stack_vars_sorted[si];
1255 
1256       /* Skip variables that aren't partition representatives, for now.  */
1257       if (stack_vars[i].representative != i)
1258 	continue;
1259 
1260       size += stack_vars[i].size;
1261       for (j = i; j != EOC; j = stack_vars[j].next)
1262 	set_rtl (stack_vars[j].decl, NULL);
1263     }
1264   return size;
1265 }
1266 
1267 /* Record the RTL assignment X for the default def of PARM.  */
1268 
1269 extern void
1270 set_parm_rtl (tree parm, rtx x)
1271 {
1272   gcc_assert (TREE_CODE (parm) == PARM_DECL
1273 	      || TREE_CODE (parm) == RESULT_DECL);
1274 
1275   if (x && !MEM_P (x))
1276     {
1277       unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1278 					      TYPE_MODE (TREE_TYPE (parm)),
1279 					      TYPE_ALIGN (TREE_TYPE (parm)));
1280 
1281       /* If the variable alignment is very large we'll dynamicaly
1282 	 allocate it, which means that in-frame portion is just a
1283 	 pointer.  ??? We've got a pseudo for sure here, do we
1284 	 actually dynamically allocate its spilling area if needed?
1285 	 ??? Isn't it a problem when Pmode alignment also exceeds
1286 	 MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32?  */
1287       if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1288 	align = GET_MODE_ALIGNMENT (Pmode);
1289 
1290       record_alignment_for_reg_var (align);
1291     }
1292 
1293   tree ssa = ssa_default_def (cfun, parm);
1294   if (!ssa)
1295     return set_rtl (parm, x);
1296 
1297   int part = var_to_partition (SA.map, ssa);
1298   gcc_assert (part != NO_PARTITION);
1299 
1300   bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1301   gcc_assert (changed);
1302 
1303   set_rtl (ssa, x);
1304   gcc_assert (DECL_RTL (parm) == x);
1305 }
1306 
1307 /* A subroutine of expand_one_var.  Called to immediately assign rtl
1308    to a variable to be allocated in the stack frame.  */
1309 
1310 static void
1311 expand_one_stack_var_1 (tree var)
1312 {
1313   poly_uint64 size;
1314   poly_int64 offset;
1315   unsigned byte_align;
1316 
1317   if (TREE_CODE (var) == SSA_NAME)
1318     {
1319       tree type = TREE_TYPE (var);
1320       size = tree_to_poly_uint64 (TYPE_SIZE_UNIT (type));
1321       byte_align = TYPE_ALIGN_UNIT (type);
1322     }
1323   else
1324     {
1325       size = tree_to_poly_uint64 (DECL_SIZE_UNIT (var));
1326       byte_align = align_local_variable (var);
1327     }
1328 
1329   /* We handle highly aligned variables in expand_stack_vars.  */
1330   gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1331 
1332   offset = alloc_stack_frame_space (size, byte_align);
1333 
1334   expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1335 			   crtl->max_used_stack_slot_alignment, offset);
1336 }
1337 
1338 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1339    already assigned some MEM.  */
1340 
1341 static void
1342 expand_one_stack_var (tree var)
1343 {
1344   if (TREE_CODE (var) == SSA_NAME)
1345     {
1346       int part = var_to_partition (SA.map, var);
1347       if (part != NO_PARTITION)
1348 	{
1349 	  rtx x = SA.partition_to_pseudo[part];
1350 	  gcc_assert (x);
1351 	  gcc_assert (MEM_P (x));
1352 	  return;
1353 	}
1354     }
1355 
1356   return expand_one_stack_var_1 (var);
1357 }
1358 
1359 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1360    that will reside in a hard register.  */
1361 
1362 static void
1363 expand_one_hard_reg_var (tree var)
1364 {
1365   rest_of_decl_compilation (var, 0, 0);
1366 }
1367 
1368 /* Record the alignment requirements of some variable assigned to a
1369    pseudo.  */
1370 
1371 static void
1372 record_alignment_for_reg_var (unsigned int align)
1373 {
1374   if (SUPPORTS_STACK_ALIGNMENT
1375       && crtl->stack_alignment_estimated < align)
1376     {
1377       /* stack_alignment_estimated shouldn't change after stack
1378          realign decision made */
1379       gcc_assert (!crtl->stack_realign_processed);
1380       crtl->stack_alignment_estimated = align;
1381     }
1382 
1383   /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1384      So here we only make sure stack_alignment_needed >= align.  */
1385   if (crtl->stack_alignment_needed < align)
1386     crtl->stack_alignment_needed = align;
1387   if (crtl->max_used_stack_slot_alignment < align)
1388     crtl->max_used_stack_slot_alignment = align;
1389 }
1390 
1391 /* Create RTL for an SSA partition.  */
1392 
1393 static void
1394 expand_one_ssa_partition (tree var)
1395 {
1396   int part = var_to_partition (SA.map, var);
1397   gcc_assert (part != NO_PARTITION);
1398 
1399   if (SA.partition_to_pseudo[part])
1400     return;
1401 
1402   unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1403 					  TYPE_MODE (TREE_TYPE (var)),
1404 					  TYPE_ALIGN (TREE_TYPE (var)));
1405 
1406   /* If the variable alignment is very large we'll dynamicaly allocate
1407      it, which means that in-frame portion is just a pointer.  */
1408   if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1409     align = GET_MODE_ALIGNMENT (Pmode);
1410 
1411   record_alignment_for_reg_var (align);
1412 
1413   if (!use_register_for_decl (var))
1414     {
1415       if (defer_stack_allocation (var, true))
1416 	add_stack_var (var);
1417       else
1418 	expand_one_stack_var_1 (var);
1419       return;
1420     }
1421 
1422   machine_mode reg_mode = promote_ssa_mode (var, NULL);
1423   rtx x = gen_reg_rtx (reg_mode);
1424 
1425   set_rtl (var, x);
1426 
1427   /* For a promoted variable, X will not be used directly but wrapped in a
1428      SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1429      will assume that its upper bits can be inferred from its lower bits.
1430      Therefore, if X isn't initialized on every path from the entry, then
1431      we must do it manually in order to fulfill the above assumption.  */
1432   if (reg_mode != TYPE_MODE (TREE_TYPE (var))
1433       && bitmap_bit_p (SA.partitions_for_undefined_values, part))
1434     emit_move_insn (x, CONST0_RTX (reg_mode));
1435 }
1436 
1437 /* Record the association between the RTL generated for partition PART
1438    and the underlying variable of the SSA_NAME VAR.  */
1439 
1440 static void
1441 adjust_one_expanded_partition_var (tree var)
1442 {
1443   if (!var)
1444     return;
1445 
1446   tree decl = SSA_NAME_VAR (var);
1447 
1448   int part = var_to_partition (SA.map, var);
1449   if (part == NO_PARTITION)
1450     return;
1451 
1452   rtx x = SA.partition_to_pseudo[part];
1453 
1454   gcc_assert (x);
1455 
1456   set_rtl (var, x);
1457 
1458   if (!REG_P (x))
1459     return;
1460 
1461   /* Note if the object is a user variable.  */
1462   if (decl && !DECL_ARTIFICIAL (decl))
1463     mark_user_reg (x);
1464 
1465   if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1466     mark_reg_pointer (x, get_pointer_alignment (var));
1467 }
1468 
1469 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1470    that will reside in a pseudo register.  */
1471 
1472 static void
1473 expand_one_register_var (tree var)
1474 {
1475   if (TREE_CODE (var) == SSA_NAME)
1476     {
1477       int part = var_to_partition (SA.map, var);
1478       if (part != NO_PARTITION)
1479 	{
1480 	  rtx x = SA.partition_to_pseudo[part];
1481 	  gcc_assert (x);
1482 	  gcc_assert (REG_P (x));
1483 	  return;
1484 	}
1485       gcc_unreachable ();
1486     }
1487 
1488   tree decl = var;
1489   tree type = TREE_TYPE (decl);
1490   machine_mode reg_mode = promote_decl_mode (decl, NULL);
1491   rtx x = gen_reg_rtx (reg_mode);
1492 
1493   set_rtl (var, x);
1494 
1495   /* Note if the object is a user variable.  */
1496   if (!DECL_ARTIFICIAL (decl))
1497     mark_user_reg (x);
1498 
1499   if (POINTER_TYPE_P (type))
1500     mark_reg_pointer (x, get_pointer_alignment (var));
1501 }
1502 
1503 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL that
1504    has some associated error, e.g. its type is error-mark.  We just need
1505    to pick something that won't crash the rest of the compiler.  */
1506 
1507 static void
1508 expand_one_error_var (tree var)
1509 {
1510   machine_mode mode = DECL_MODE (var);
1511   rtx x;
1512 
1513   if (mode == BLKmode)
1514     x = gen_rtx_MEM (BLKmode, const0_rtx);
1515   else if (mode == VOIDmode)
1516     x = const0_rtx;
1517   else
1518     x = gen_reg_rtx (mode);
1519 
1520   SET_DECL_RTL (var, x);
1521 }
1522 
1523 /* A subroutine of expand_one_var.  VAR is a variable that will be
1524    allocated to the local stack frame.  Return true if we wish to
1525    add VAR to STACK_VARS so that it will be coalesced with other
1526    variables.  Return false to allocate VAR immediately.
1527 
1528    This function is used to reduce the number of variables considered
1529    for coalescing, which reduces the size of the quadratic problem.  */
1530 
1531 static bool
1532 defer_stack_allocation (tree var, bool toplevel)
1533 {
1534   tree size_unit = TREE_CODE (var) == SSA_NAME
1535     ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1536     : DECL_SIZE_UNIT (var);
1537   poly_uint64 size;
1538 
1539   /* Whether the variable is small enough for immediate allocation not to be
1540      a problem with regard to the frame size.  */
1541   bool smallish
1542     = (poly_int_tree_p (size_unit, &size)
1543        && (estimated_poly_value (size)
1544 	   < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING)));
1545 
1546   /* If stack protection is enabled, *all* stack variables must be deferred,
1547      so that we can re-order the strings to the top of the frame.
1548      Similarly for Address Sanitizer.  */
1549   if (flag_stack_protect || asan_sanitize_stack_p ())
1550     return true;
1551 
1552   unsigned int align = TREE_CODE (var) == SSA_NAME
1553     ? TYPE_ALIGN (TREE_TYPE (var))
1554     : DECL_ALIGN (var);
1555 
1556   /* We handle "large" alignment via dynamic allocation.  We want to handle
1557      this extra complication in only one place, so defer them.  */
1558   if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1559     return true;
1560 
1561   bool ignored = TREE_CODE (var) == SSA_NAME
1562     ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1563     : DECL_IGNORED_P (var);
1564 
1565   /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1566      might be detached from their block and appear at toplevel when we reach
1567      here.  We want to coalesce them with variables from other blocks when
1568      the immediate contribution to the frame size would be noticeable.  */
1569   if (toplevel && optimize > 0 && ignored && !smallish)
1570     return true;
1571 
1572   /* Variables declared in the outermost scope automatically conflict
1573      with every other variable.  The only reason to want to defer them
1574      at all is that, after sorting, we can more efficiently pack
1575      small variables in the stack frame.  Continue to defer at -O2.  */
1576   if (toplevel && optimize < 2)
1577     return false;
1578 
1579   /* Without optimization, *most* variables are allocated from the
1580      stack, which makes the quadratic problem large exactly when we
1581      want compilation to proceed as quickly as possible.  On the
1582      other hand, we don't want the function's stack frame size to
1583      get completely out of hand.  So we avoid adding scalars and
1584      "small" aggregates to the list at all.  */
1585   if (optimize == 0 && smallish)
1586     return false;
1587 
1588   return true;
1589 }
1590 
1591 /* A subroutine of expand_used_vars.  Expand one variable according to
1592    its flavor.  Variables to be placed on the stack are not actually
1593    expanded yet, merely recorded.
1594    When REALLY_EXPAND is false, only add stack values to be allocated.
1595    Return stack usage this variable is supposed to take.
1596 */
1597 
1598 static poly_uint64
1599 expand_one_var (tree var, bool toplevel, bool really_expand)
1600 {
1601   unsigned int align = BITS_PER_UNIT;
1602   tree origvar = var;
1603 
1604   var = SSAVAR (var);
1605 
1606   if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
1607     {
1608       if (is_global_var (var))
1609 	return 0;
1610 
1611       /* Because we don't know if VAR will be in register or on stack,
1612 	 we conservatively assume it will be on stack even if VAR is
1613 	 eventually put into register after RA pass.  For non-automatic
1614 	 variables, which won't be on stack, we collect alignment of
1615 	 type and ignore user specified alignment.  Similarly for
1616 	 SSA_NAMEs for which use_register_for_decl returns true.  */
1617       if (TREE_STATIC (var)
1618 	  || DECL_EXTERNAL (var)
1619 	  || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1620 	align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1621 				   TYPE_MODE (TREE_TYPE (var)),
1622 				   TYPE_ALIGN (TREE_TYPE (var)));
1623       else if (DECL_HAS_VALUE_EXPR_P (var)
1624 	       || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1625 	/* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1626 	   or variables which were assigned a stack slot already by
1627 	   expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1628 	   changed from the offset chosen to it.  */
1629 	align = crtl->stack_alignment_estimated;
1630       else
1631 	align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1632 
1633       /* If the variable alignment is very large we'll dynamicaly allocate
1634 	 it, which means that in-frame portion is just a pointer.  */
1635       if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1636 	align = GET_MODE_ALIGNMENT (Pmode);
1637     }
1638 
1639   record_alignment_for_reg_var (align);
1640 
1641   poly_uint64 size;
1642   if (TREE_CODE (origvar) == SSA_NAME)
1643     {
1644       gcc_assert (!VAR_P (var)
1645 		  || (!DECL_EXTERNAL (var)
1646 		      && !DECL_HAS_VALUE_EXPR_P (var)
1647 		      && !TREE_STATIC (var)
1648 		      && TREE_TYPE (var) != error_mark_node
1649 		      && !DECL_HARD_REGISTER (var)
1650 		      && really_expand));
1651     }
1652   if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
1653     ;
1654   else if (DECL_EXTERNAL (var))
1655     ;
1656   else if (DECL_HAS_VALUE_EXPR_P (var))
1657     ;
1658   else if (TREE_STATIC (var))
1659     ;
1660   else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1661     ;
1662   else if (TREE_TYPE (var) == error_mark_node)
1663     {
1664       if (really_expand)
1665         expand_one_error_var (var);
1666     }
1667   else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1668     {
1669       if (really_expand)
1670 	{
1671 	  expand_one_hard_reg_var (var);
1672 	  if (!DECL_HARD_REGISTER (var))
1673 	    /* Invalid register specification.  */
1674 	    expand_one_error_var (var);
1675 	}
1676     }
1677   else if (use_register_for_decl (var))
1678     {
1679       if (really_expand)
1680         expand_one_register_var (origvar);
1681     }
1682   else if (!poly_int_tree_p (DECL_SIZE_UNIT (var), &size)
1683 	   || !valid_constant_size_p (DECL_SIZE_UNIT (var)))
1684     {
1685       /* Reject variables which cover more than half of the address-space.  */
1686       if (really_expand)
1687 	{
1688 	  if (DECL_NONLOCAL_FRAME (var))
1689 	    error_at (DECL_SOURCE_LOCATION (current_function_decl),
1690 		      "total size of local objects is too large");
1691 	  else
1692 	    error_at (DECL_SOURCE_LOCATION (var),
1693 		      "size of variable %q+D is too large", var);
1694 	  expand_one_error_var (var);
1695 	}
1696     }
1697   else if (defer_stack_allocation (var, toplevel))
1698     add_stack_var (origvar);
1699   else
1700     {
1701       if (really_expand)
1702         {
1703           if (lookup_attribute ("naked",
1704                                 DECL_ATTRIBUTES (current_function_decl)))
1705             error ("cannot allocate stack for variable %q+D, naked function.",
1706                    var);
1707 
1708           expand_one_stack_var (origvar);
1709         }
1710       return size;
1711     }
1712   return 0;
1713 }
1714 
1715 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1716    expanding variables.  Those variables that can be put into registers
1717    are allocated pseudos; those that can't are put on the stack.
1718 
1719    TOPLEVEL is true if this is the outermost BLOCK.  */
1720 
1721 static void
1722 expand_used_vars_for_block (tree block, bool toplevel)
1723 {
1724   tree t;
1725 
1726   /* Expand all variables at this level.  */
1727   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1728     if (TREE_USED (t)
1729         && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1730 	    || !DECL_NONSHAREABLE (t)))
1731       expand_one_var (t, toplevel, true);
1732 
1733   /* Expand all variables at containing levels.  */
1734   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1735     expand_used_vars_for_block (t, false);
1736 }
1737 
1738 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1739    and clear TREE_USED on all local variables.  */
1740 
1741 static void
1742 clear_tree_used (tree block)
1743 {
1744   tree t;
1745 
1746   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1747     /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1748     if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1749 	|| !DECL_NONSHAREABLE (t))
1750       TREE_USED (t) = 0;
1751 
1752   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1753     clear_tree_used (t);
1754 }
1755 
1756 enum {
1757   SPCT_FLAG_DEFAULT = 1,
1758   SPCT_FLAG_ALL = 2,
1759   SPCT_FLAG_STRONG = 3,
1760   SPCT_FLAG_EXPLICIT = 4
1761 };
1762 
1763 /* Examine TYPE and determine a bit mask of the following features.  */
1764 
1765 #define SPCT_HAS_LARGE_CHAR_ARRAY	1
1766 #define SPCT_HAS_SMALL_CHAR_ARRAY	2
1767 #define SPCT_HAS_ARRAY			4
1768 #define SPCT_HAS_AGGREGATE		8
1769 
1770 static unsigned int
1771 stack_protect_classify_type (tree type)
1772 {
1773   unsigned int ret = 0;
1774   tree t;
1775 
1776   switch (TREE_CODE (type))
1777     {
1778     case ARRAY_TYPE:
1779       t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1780       if (t == char_type_node
1781 	  || t == signed_char_type_node
1782 	  || t == unsigned_char_type_node)
1783 	{
1784 	  unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1785 	  unsigned HOST_WIDE_INT len;
1786 
1787 	  if (!TYPE_SIZE_UNIT (type)
1788 	      || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1789 	    len = max;
1790 	  else
1791 	    len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1792 
1793 	  if (len == 0)
1794 	    ret = SPCT_HAS_ARRAY;
1795 	  else if (len < max)
1796 	    ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1797 	  else
1798 	    ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1799 	}
1800       else
1801 	ret = SPCT_HAS_ARRAY;
1802       break;
1803 
1804     case UNION_TYPE:
1805     case QUAL_UNION_TYPE:
1806     case RECORD_TYPE:
1807       ret = SPCT_HAS_AGGREGATE;
1808       for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1809 	if (TREE_CODE (t) == FIELD_DECL)
1810 	  ret |= stack_protect_classify_type (TREE_TYPE (t));
1811       break;
1812 
1813     default:
1814       break;
1815     }
1816 
1817   return ret;
1818 }
1819 
1820 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1821    part of the local stack frame.  Remember if we ever return nonzero for
1822    any variable in this function.  The return value is the phase number in
1823    which the variable should be allocated.  */
1824 
1825 static int
1826 stack_protect_decl_phase (tree decl)
1827 {
1828   unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1829   int ret = 0;
1830 
1831   if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1832     has_short_buffer = true;
1833 
1834   if (flag_stack_protect == SPCT_FLAG_ALL
1835       || flag_stack_protect == SPCT_FLAG_STRONG
1836       || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1837 	  && lookup_attribute ("stack_protect",
1838 			       DECL_ATTRIBUTES (current_function_decl))))
1839     {
1840       if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1841 	  && !(bits & SPCT_HAS_AGGREGATE))
1842 	ret = 1;
1843       else if (bits & SPCT_HAS_ARRAY)
1844 	ret = 2;
1845     }
1846   else
1847     ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1848 
1849   if (ret)
1850     has_protected_decls = true;
1851 
1852   return ret;
1853 }
1854 
1855 /* Two helper routines that check for phase 1 and phase 2.  These are used
1856    as callbacks for expand_stack_vars.  */
1857 
1858 static bool
1859 stack_protect_decl_phase_1 (size_t i)
1860 {
1861   return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1862 }
1863 
1864 static bool
1865 stack_protect_decl_phase_2 (size_t i)
1866 {
1867   return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1868 }
1869 
1870 /* And helper function that checks for asan phase (with stack protector
1871    it is phase 3).  This is used as callback for expand_stack_vars.
1872    Returns true if any of the vars in the partition need to be protected.  */
1873 
1874 static bool
1875 asan_decl_phase_3 (size_t i)
1876 {
1877   while (i != EOC)
1878     {
1879       if (asan_protect_stack_decl (stack_vars[i].decl))
1880 	return true;
1881       i = stack_vars[i].next;
1882     }
1883   return false;
1884 }
1885 
1886 /* Ensure that variables in different stack protection phases conflict
1887    so that they are not merged and share the same stack slot.  */
1888 
1889 static void
1890 add_stack_protection_conflicts (void)
1891 {
1892   size_t i, j, n = stack_vars_num;
1893   unsigned char *phase;
1894 
1895   phase = XNEWVEC (unsigned char, n);
1896   for (i = 0; i < n; ++i)
1897     phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1898 
1899   for (i = 0; i < n; ++i)
1900     {
1901       unsigned char ph_i = phase[i];
1902       for (j = i + 1; j < n; ++j)
1903 	if (ph_i != phase[j])
1904 	  add_stack_var_conflict (i, j);
1905     }
1906 
1907   XDELETEVEC (phase);
1908 }
1909 
1910 /* Create a decl for the guard at the top of the stack frame.  */
1911 
1912 static void
1913 create_stack_guard (void)
1914 {
1915   tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1916 			   VAR_DECL, NULL, ptr_type_node);
1917   TREE_THIS_VOLATILE (guard) = 1;
1918   TREE_USED (guard) = 1;
1919   expand_one_stack_var (guard);
1920   crtl->stack_protect_guard = guard;
1921 }
1922 
1923 /* Prepare for expanding variables.  */
1924 static void
1925 init_vars_expansion (void)
1926 {
1927   /* Conflict bitmaps, and a few related temporary bitmaps, go here.  */
1928   bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1929 
1930   /* A map from decl to stack partition.  */
1931   decl_to_stack_part = new hash_map<tree, size_t>;
1932 
1933   /* Initialize local stack smashing state.  */
1934   has_protected_decls = false;
1935   has_short_buffer = false;
1936 }
1937 
1938 /* Free up stack variable graph data.  */
1939 static void
1940 fini_vars_expansion (void)
1941 {
1942   bitmap_obstack_release (&stack_var_bitmap_obstack);
1943   if (stack_vars)
1944     XDELETEVEC (stack_vars);
1945   if (stack_vars_sorted)
1946     XDELETEVEC (stack_vars_sorted);
1947   stack_vars = NULL;
1948   stack_vars_sorted = NULL;
1949   stack_vars_alloc = stack_vars_num = 0;
1950   delete decl_to_stack_part;
1951   decl_to_stack_part = NULL;
1952 }
1953 
1954 /* Make a fair guess for the size of the stack frame of the function
1955    in NODE.  This doesn't have to be exact, the result is only used in
1956    the inline heuristics.  So we don't want to run the full stack var
1957    packing algorithm (which is quadratic in the number of stack vars).
1958    Instead, we calculate the total size of all stack vars.  This turns
1959    out to be a pretty fair estimate -- packing of stack vars doesn't
1960    happen very often.  */
1961 
1962 HOST_WIDE_INT
1963 estimated_stack_frame_size (struct cgraph_node *node)
1964 {
1965   poly_int64 size = 0;
1966   size_t i;
1967   tree var;
1968   struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1969 
1970   push_cfun (fn);
1971 
1972   init_vars_expansion ();
1973 
1974   FOR_EACH_LOCAL_DECL (fn, i, var)
1975     if (auto_var_in_fn_p (var, fn->decl))
1976       size += expand_one_var (var, true, false);
1977 
1978   if (stack_vars_num > 0)
1979     {
1980       /* Fake sorting the stack vars for account_stack_vars ().  */
1981       stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1982       for (i = 0; i < stack_vars_num; ++i)
1983 	stack_vars_sorted[i] = i;
1984       size += account_stack_vars ();
1985     }
1986 
1987   fini_vars_expansion ();
1988   pop_cfun ();
1989   return estimated_poly_value (size);
1990 }
1991 
1992 /* Helper routine to check if a record or union contains an array field. */
1993 
1994 static int
1995 record_or_union_type_has_array_p (const_tree tree_type)
1996 {
1997   tree fields = TYPE_FIELDS (tree_type);
1998   tree f;
1999 
2000   for (f = fields; f; f = DECL_CHAIN (f))
2001     if (TREE_CODE (f) == FIELD_DECL)
2002       {
2003 	tree field_type = TREE_TYPE (f);
2004 	if (RECORD_OR_UNION_TYPE_P (field_type)
2005 	    && record_or_union_type_has_array_p (field_type))
2006 	  return 1;
2007 	if (TREE_CODE (field_type) == ARRAY_TYPE)
2008 	  return 1;
2009       }
2010   return 0;
2011 }
2012 
2013 /* Check if the current function has local referenced variables that
2014    have their addresses taken, contain an array, or are arrays.  */
2015 
2016 static bool
2017 stack_protect_decl_p ()
2018 {
2019   unsigned i;
2020   tree var;
2021 
2022   FOR_EACH_LOCAL_DECL (cfun, i, var)
2023     if (!is_global_var (var))
2024       {
2025 	tree var_type = TREE_TYPE (var);
2026 	if (VAR_P (var)
2027 	    && (TREE_CODE (var_type) == ARRAY_TYPE
2028 		|| TREE_ADDRESSABLE (var)
2029 		|| (RECORD_OR_UNION_TYPE_P (var_type)
2030 		    && record_or_union_type_has_array_p (var_type))))
2031 	  return true;
2032       }
2033   return false;
2034 }
2035 
2036 /* Check if the current function has calls that use a return slot.  */
2037 
2038 static bool
2039 stack_protect_return_slot_p ()
2040 {
2041   basic_block bb;
2042 
2043   FOR_ALL_BB_FN (bb, cfun)
2044     for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2045 	 !gsi_end_p (gsi); gsi_next (&gsi))
2046       {
2047 	gimple *stmt = gsi_stmt (gsi);
2048 	/* This assumes that calls to internal-only functions never
2049 	   use a return slot.  */
2050 	if (is_gimple_call (stmt)
2051 	    && !gimple_call_internal_p (stmt)
2052 	    && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2053 				  gimple_call_fndecl (stmt)))
2054 	  return true;
2055       }
2056   return false;
2057 }
2058 
2059 /* Expand all variables used in the function.  */
2060 
2061 static rtx_insn *
2062 expand_used_vars (void)
2063 {
2064   tree var, outer_block = DECL_INITIAL (current_function_decl);
2065   auto_vec<tree> maybe_local_decls;
2066   rtx_insn *var_end_seq = NULL;
2067   unsigned i;
2068   unsigned len;
2069   bool gen_stack_protect_signal = false;
2070 
2071   /* Compute the phase of the stack frame for this function.  */
2072   {
2073     int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2074     int off = targetm.starting_frame_offset () % align;
2075     frame_phase = off ? align - off : 0;
2076   }
2077 
2078   /* Set TREE_USED on all variables in the local_decls.  */
2079   FOR_EACH_LOCAL_DECL (cfun, i, var)
2080     TREE_USED (var) = 1;
2081   /* Clear TREE_USED on all variables associated with a block scope.  */
2082   clear_tree_used (DECL_INITIAL (current_function_decl));
2083 
2084   init_vars_expansion ();
2085 
2086   if (targetm.use_pseudo_pic_reg ())
2087     pic_offset_table_rtx = gen_reg_rtx (Pmode);
2088 
2089   for (i = 0; i < SA.map->num_partitions; i++)
2090     {
2091       if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2092 	continue;
2093 
2094       tree var = partition_to_var (SA.map, i);
2095 
2096       gcc_assert (!virtual_operand_p (var));
2097 
2098       expand_one_ssa_partition (var);
2099     }
2100 
2101   if (flag_stack_protect == SPCT_FLAG_STRONG)
2102       gen_stack_protect_signal
2103 	= stack_protect_decl_p () || stack_protect_return_slot_p ();
2104 
2105   /* At this point all variables on the local_decls with TREE_USED
2106      set are not associated with any block scope.  Lay them out.  */
2107 
2108   len = vec_safe_length (cfun->local_decls);
2109   FOR_EACH_LOCAL_DECL (cfun, i, var)
2110     {
2111       bool expand_now = false;
2112 
2113       /* Expanded above already.  */
2114       if (is_gimple_reg (var))
2115 	{
2116 	  TREE_USED (var) = 0;
2117 	  goto next;
2118 	}
2119       /* We didn't set a block for static or extern because it's hard
2120 	 to tell the difference between a global variable (re)declared
2121 	 in a local scope, and one that's really declared there to
2122 	 begin with.  And it doesn't really matter much, since we're
2123 	 not giving them stack space.  Expand them now.  */
2124       else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
2125 	expand_now = true;
2126 
2127       /* Expand variables not associated with any block now.  Those created by
2128 	 the optimizers could be live anywhere in the function.  Those that
2129 	 could possibly have been scoped originally and detached from their
2130 	 block will have their allocation deferred so we coalesce them with
2131 	 others when optimization is enabled.  */
2132       else if (TREE_USED (var))
2133 	expand_now = true;
2134 
2135       /* Finally, mark all variables on the list as used.  We'll use
2136 	 this in a moment when we expand those associated with scopes.  */
2137       TREE_USED (var) = 1;
2138 
2139       if (expand_now)
2140 	expand_one_var (var, true, true);
2141 
2142     next:
2143       if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
2144 	{
2145 	  rtx rtl = DECL_RTL_IF_SET (var);
2146 
2147 	  /* Keep artificial non-ignored vars in cfun->local_decls
2148 	     chain until instantiate_decls.  */
2149 	  if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2150 	    add_local_decl (cfun, var);
2151 	  else if (rtl == NULL_RTX)
2152 	    /* If rtl isn't set yet, which can happen e.g. with
2153 	       -fstack-protector, retry before returning from this
2154 	       function.  */
2155 	    maybe_local_decls.safe_push (var);
2156 	}
2157     }
2158 
2159   /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2160 
2161      +-----------------+-----------------+
2162      | ...processed... | ...duplicates...|
2163      +-----------------+-----------------+
2164                        ^
2165 		       +-- LEN points here.
2166 
2167      We just want the duplicates, as those are the artificial
2168      non-ignored vars that we want to keep until instantiate_decls.
2169      Move them down and truncate the array.  */
2170   if (!vec_safe_is_empty (cfun->local_decls))
2171     cfun->local_decls->block_remove (0, len);
2172 
2173   /* At this point, all variables within the block tree with TREE_USED
2174      set are actually used by the optimized function.  Lay them out.  */
2175   expand_used_vars_for_block (outer_block, true);
2176 
2177   if (stack_vars_num > 0)
2178     {
2179       add_scope_conflicts ();
2180 
2181       /* If stack protection is enabled, we don't share space between
2182 	 vulnerable data and non-vulnerable data.  */
2183       if (flag_stack_protect != 0
2184 	  && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2185 	      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2186 		  && lookup_attribute ("stack_protect",
2187 				       DECL_ATTRIBUTES (current_function_decl)))))
2188 	add_stack_protection_conflicts ();
2189 
2190       /* Now that we have collected all stack variables, and have computed a
2191 	 minimal interference graph, attempt to save some stack space.  */
2192       partition_stack_vars ();
2193       if (dump_file)
2194 	dump_stack_var_partition ();
2195     }
2196 
2197   switch (flag_stack_protect)
2198     {
2199     case SPCT_FLAG_ALL:
2200       create_stack_guard ();
2201       break;
2202 
2203     case SPCT_FLAG_STRONG:
2204       if (gen_stack_protect_signal
2205 	  || cfun->calls_alloca || has_protected_decls
2206 	  || lookup_attribute ("stack_protect",
2207 			       DECL_ATTRIBUTES (current_function_decl)))
2208 	create_stack_guard ();
2209       break;
2210 
2211     case SPCT_FLAG_DEFAULT:
2212       if (cfun->calls_alloca || has_protected_decls
2213 	  || lookup_attribute ("stack_protect",
2214 			       DECL_ATTRIBUTES (current_function_decl)))
2215 	create_stack_guard ();
2216       break;
2217 
2218     case SPCT_FLAG_EXPLICIT:
2219       if (lookup_attribute ("stack_protect",
2220 			    DECL_ATTRIBUTES (current_function_decl)))
2221 	create_stack_guard ();
2222       break;
2223     default:
2224       ;
2225     }
2226 
2227   /* Assign rtl to each variable based on these partitions.  */
2228   if (stack_vars_num > 0)
2229     {
2230       struct stack_vars_data data;
2231 
2232       data.asan_base = NULL_RTX;
2233       data.asan_alignb = 0;
2234 
2235       /* Reorder decls to be protected by iterating over the variables
2236 	 array multiple times, and allocating out of each phase in turn.  */
2237       /* ??? We could probably integrate this into the qsort we did
2238 	 earlier, such that we naturally see these variables first,
2239 	 and thus naturally allocate things in the right order.  */
2240       if (has_protected_decls)
2241 	{
2242 	  /* Phase 1 contains only character arrays.  */
2243 	  expand_stack_vars (stack_protect_decl_phase_1, &data);
2244 
2245 	  /* Phase 2 contains other kinds of arrays.  */
2246 	  if (flag_stack_protect == SPCT_FLAG_ALL
2247 	      || flag_stack_protect == SPCT_FLAG_STRONG
2248 	      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2249 		  && lookup_attribute ("stack_protect",
2250 				       DECL_ATTRIBUTES (current_function_decl))))
2251 	    expand_stack_vars (stack_protect_decl_phase_2, &data);
2252 	}
2253 
2254       if (asan_sanitize_stack_p ())
2255 	/* Phase 3, any partitions that need asan protection
2256 	   in addition to phase 1 and 2.  */
2257 	expand_stack_vars (asan_decl_phase_3, &data);
2258 
2259       /* ASAN description strings don't yet have a syntax for expressing
2260 	 polynomial offsets.  */
2261       HOST_WIDE_INT prev_offset;
2262       if (!data.asan_vec.is_empty ()
2263 	  && frame_offset.is_constant (&prev_offset))
2264 	{
2265 	  HOST_WIDE_INT offset, sz, redzonesz;
2266 	  redzonesz = ASAN_RED_ZONE_SIZE;
2267 	  sz = data.asan_vec[0] - prev_offset;
2268 	  if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2269 	      && data.asan_alignb <= 4096
2270 	      && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
2271 	    redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2272 			 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2273 	  /* Allocating a constant amount of space from a constant
2274 	     starting offset must give a constant result.  */
2275 	  offset = (alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE)
2276 		    .to_constant ());
2277 	  data.asan_vec.safe_push (prev_offset);
2278 	  data.asan_vec.safe_push (offset);
2279 	  /* Leave space for alignment if STRICT_ALIGNMENT.  */
2280 	  if (STRICT_ALIGNMENT)
2281 	    alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2282 				      << ASAN_SHADOW_SHIFT)
2283 				     / BITS_PER_UNIT, 1);
2284 
2285 	  var_end_seq
2286 	    = asan_emit_stack_protection (virtual_stack_vars_rtx,
2287 					  data.asan_base,
2288 					  data.asan_alignb,
2289 					  data.asan_vec.address (),
2290 					  data.asan_decl_vec.address (),
2291 					  data.asan_vec.length ());
2292 	}
2293 
2294       expand_stack_vars (NULL, &data);
2295     }
2296 
2297   if (asan_sanitize_allocas_p () && cfun->calls_alloca)
2298     var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
2299 					      virtual_stack_vars_rtx,
2300 					      var_end_seq);
2301 
2302   fini_vars_expansion ();
2303 
2304   /* If there were any artificial non-ignored vars without rtl
2305      found earlier, see if deferred stack allocation hasn't assigned
2306      rtl to them.  */
2307   FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2308     {
2309       rtx rtl = DECL_RTL_IF_SET (var);
2310 
2311       /* Keep artificial non-ignored vars in cfun->local_decls
2312 	 chain until instantiate_decls.  */
2313       if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2314 	add_local_decl (cfun, var);
2315     }
2316 
2317   /* If the target requires that FRAME_OFFSET be aligned, do it.  */
2318   if (STACK_ALIGNMENT_NEEDED)
2319     {
2320       HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2321       if (FRAME_GROWS_DOWNWARD)
2322 	frame_offset = aligned_lower_bound (frame_offset, align);
2323       else
2324 	frame_offset = aligned_upper_bound (frame_offset, align);
2325     }
2326 
2327   return var_end_seq;
2328 }
2329 
2330 
2331 /* If we need to produce a detailed dump, print the tree representation
2332    for STMT to the dump file.  SINCE is the last RTX after which the RTL
2333    generated for STMT should have been appended.  */
2334 
2335 static void
2336 maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
2337 {
2338   if (dump_file && (dump_flags & TDF_DETAILS))
2339     {
2340       fprintf (dump_file, "\n;; ");
2341       print_gimple_stmt (dump_file, stmt, 0,
2342 			 TDF_SLIM | (dump_flags & TDF_LINENO));
2343       fprintf (dump_file, "\n");
2344 
2345       print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2346     }
2347 }
2348 
2349 /* Maps the blocks that do not contain tree labels to rtx labels.  */
2350 
2351 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2352 
2353 /* Returns the label_rtx expression for a label starting basic block BB.  */
2354 
2355 static rtx_code_label *
2356 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2357 {
2358   gimple_stmt_iterator gsi;
2359   tree lab;
2360 
2361   if (bb->flags & BB_RTL)
2362     return block_label (bb);
2363 
2364   rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2365   if (elt)
2366     return *elt;
2367 
2368   /* Find the tree label if it is present.  */
2369 
2370   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2371     {
2372       glabel *lab_stmt;
2373 
2374       lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2375       if (!lab_stmt)
2376 	break;
2377 
2378       lab = gimple_label_label (lab_stmt);
2379       if (DECL_NONLOCAL (lab))
2380 	break;
2381 
2382       return jump_target_rtx (lab);
2383     }
2384 
2385   rtx_code_label *l = gen_label_rtx ();
2386   lab_rtx_for_bb->put (bb, l);
2387   return l;
2388 }
2389 
2390 
2391 /* A subroutine of expand_gimple_cond.  Given E, a fallthrough edge
2392    of a basic block where we just expanded the conditional at the end,
2393    possibly clean up the CFG and instruction sequence.  LAST is the
2394    last instruction before the just emitted jump sequence.  */
2395 
2396 static void
2397 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2398 {
2399   /* Special case: when jumpif decides that the condition is
2400      trivial it emits an unconditional jump (and the necessary
2401      barrier).  But we still have two edges, the fallthru one is
2402      wrong.  purge_dead_edges would clean this up later.  Unfortunately
2403      we have to insert insns (and split edges) before
2404      find_many_sub_basic_blocks and hence before purge_dead_edges.
2405      But splitting edges might create new blocks which depend on the
2406      fact that if there are two edges there's no barrier.  So the
2407      barrier would get lost and verify_flow_info would ICE.  Instead
2408      of auditing all edge splitters to care for the barrier (which
2409      normally isn't there in a cleaned CFG), fix it here.  */
2410   if (BARRIER_P (get_last_insn ()))
2411     {
2412       rtx_insn *insn;
2413       remove_edge (e);
2414       /* Now, we have a single successor block, if we have insns to
2415 	 insert on the remaining edge we potentially will insert
2416 	 it at the end of this block (if the dest block isn't feasible)
2417 	 in order to avoid splitting the edge.  This insertion will take
2418 	 place in front of the last jump.  But we might have emitted
2419 	 multiple jumps (conditional and one unconditional) to the
2420 	 same destination.  Inserting in front of the last one then
2421 	 is a problem.  See PR 40021.  We fix this by deleting all
2422 	 jumps except the last unconditional one.  */
2423       insn = PREV_INSN (get_last_insn ());
2424       /* Make sure we have an unconditional jump.  Otherwise we're
2425 	 confused.  */
2426       gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2427       for (insn = PREV_INSN (insn); insn != last;)
2428 	{
2429 	  insn = PREV_INSN (insn);
2430 	  if (JUMP_P (NEXT_INSN (insn)))
2431 	    {
2432 	      if (!any_condjump_p (NEXT_INSN (insn)))
2433 		{
2434 		  gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2435 		  delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2436 		}
2437 	      delete_insn (NEXT_INSN (insn));
2438 	    }
2439 	}
2440     }
2441 }
2442 
2443 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_COND.
2444    Returns a new basic block if we've terminated the current basic
2445    block and created a new one.  */
2446 
2447 static basic_block
2448 expand_gimple_cond (basic_block bb, gcond *stmt)
2449 {
2450   basic_block new_bb, dest;
2451   edge true_edge;
2452   edge false_edge;
2453   rtx_insn *last2, *last;
2454   enum tree_code code;
2455   tree op0, op1;
2456 
2457   code = gimple_cond_code (stmt);
2458   op0 = gimple_cond_lhs (stmt);
2459   op1 = gimple_cond_rhs (stmt);
2460   /* We're sometimes presented with such code:
2461        D.123_1 = x < y;
2462        if (D.123_1 != 0)
2463          ...
2464      This would expand to two comparisons which then later might
2465      be cleaned up by combine.  But some pattern matchers like if-conversion
2466      work better when there's only one compare, so make up for this
2467      here as special exception if TER would have made the same change.  */
2468   if (SA.values
2469       && TREE_CODE (op0) == SSA_NAME
2470       && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2471       && TREE_CODE (op1) == INTEGER_CST
2472       && ((gimple_cond_code (stmt) == NE_EXPR
2473 	   && integer_zerop (op1))
2474 	  || (gimple_cond_code (stmt) == EQ_EXPR
2475 	      && integer_onep (op1)))
2476       && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2477     {
2478       gimple *second = SSA_NAME_DEF_STMT (op0);
2479       if (gimple_code (second) == GIMPLE_ASSIGN)
2480 	{
2481 	  enum tree_code code2 = gimple_assign_rhs_code (second);
2482 	  if (TREE_CODE_CLASS (code2) == tcc_comparison)
2483 	    {
2484 	      code = code2;
2485 	      op0 = gimple_assign_rhs1 (second);
2486 	      op1 = gimple_assign_rhs2 (second);
2487 	    }
2488 	  /* If jumps are cheap and the target does not support conditional
2489 	     compare, turn some more codes into jumpy sequences.  */
2490 	  else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2491 		   && targetm.gen_ccmp_first == NULL)
2492 	    {
2493 	      if ((code2 == BIT_AND_EXPR
2494 		   && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2495 		   && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2496 		  || code2 == TRUTH_AND_EXPR)
2497 		{
2498 		  code = TRUTH_ANDIF_EXPR;
2499 		  op0 = gimple_assign_rhs1 (second);
2500 		  op1 = gimple_assign_rhs2 (second);
2501 		}
2502 	      else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2503 		{
2504 		  code = TRUTH_ORIF_EXPR;
2505 		  op0 = gimple_assign_rhs1 (second);
2506 		  op1 = gimple_assign_rhs2 (second);
2507 		}
2508 	    }
2509 	}
2510     }
2511 
2512   /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
2513      into (x - C2) * C3 < C4.  */
2514   if ((code == EQ_EXPR || code == NE_EXPR)
2515       && TREE_CODE (op0) == SSA_NAME
2516       && TREE_CODE (op1) == INTEGER_CST)
2517     code = maybe_optimize_mod_cmp (code, &op0, &op1);
2518 
2519   last2 = last = get_last_insn ();
2520 
2521   extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2522   set_curr_insn_location (gimple_location (stmt));
2523 
2524   /* These flags have no purpose in RTL land.  */
2525   true_edge->flags &= ~EDGE_TRUE_VALUE;
2526   false_edge->flags &= ~EDGE_FALSE_VALUE;
2527 
2528   /* We can either have a pure conditional jump with one fallthru edge or
2529      two-way jump that needs to be decomposed into two basic blocks.  */
2530   if (false_edge->dest == bb->next_bb)
2531     {
2532       jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2533 		true_edge->probability);
2534       maybe_dump_rtl_for_gimple_stmt (stmt, last);
2535       if (true_edge->goto_locus != UNKNOWN_LOCATION)
2536 	set_curr_insn_location (true_edge->goto_locus);
2537       false_edge->flags |= EDGE_FALLTHRU;
2538       maybe_cleanup_end_of_block (false_edge, last);
2539       return NULL;
2540     }
2541   if (true_edge->dest == bb->next_bb)
2542     {
2543       jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2544 		   false_edge->probability);
2545       maybe_dump_rtl_for_gimple_stmt (stmt, last);
2546       if (false_edge->goto_locus != UNKNOWN_LOCATION)
2547 	set_curr_insn_location (false_edge->goto_locus);
2548       true_edge->flags |= EDGE_FALLTHRU;
2549       maybe_cleanup_end_of_block (true_edge, last);
2550       return NULL;
2551     }
2552 
2553   jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2554 	    true_edge->probability);
2555   last = get_last_insn ();
2556   if (false_edge->goto_locus != UNKNOWN_LOCATION)
2557     set_curr_insn_location (false_edge->goto_locus);
2558   emit_jump (label_rtx_for_bb (false_edge->dest));
2559 
2560   BB_END (bb) = last;
2561   if (BARRIER_P (BB_END (bb)))
2562     BB_END (bb) = PREV_INSN (BB_END (bb));
2563   update_bb_for_insn (bb);
2564 
2565   new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2566   dest = false_edge->dest;
2567   redirect_edge_succ (false_edge, new_bb);
2568   false_edge->flags |= EDGE_FALLTHRU;
2569   new_bb->count = false_edge->count ();
2570   loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
2571   add_bb_to_loop (new_bb, loop);
2572   if (loop->latch == bb
2573       && loop->header == dest)
2574     loop->latch = new_bb;
2575   make_single_succ_edge (new_bb, dest, 0);
2576   if (BARRIER_P (BB_END (new_bb)))
2577     BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2578   update_bb_for_insn (new_bb);
2579 
2580   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2581 
2582   if (true_edge->goto_locus != UNKNOWN_LOCATION)
2583     {
2584       set_curr_insn_location (true_edge->goto_locus);
2585       true_edge->goto_locus = curr_insn_location ();
2586     }
2587 
2588   return new_bb;
2589 }
2590 
2591 /* Mark all calls that can have a transaction restart.  */
2592 
2593 static void
2594 mark_transaction_restart_calls (gimple *stmt)
2595 {
2596   struct tm_restart_node dummy;
2597   tm_restart_node **slot;
2598 
2599   if (!cfun->gimple_df->tm_restart)
2600     return;
2601 
2602   dummy.stmt = stmt;
2603   slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2604   if (slot)
2605     {
2606       struct tm_restart_node *n = *slot;
2607       tree list = n->label_or_list;
2608       rtx_insn *insn;
2609 
2610       for (insn = next_real_insn (get_last_insn ());
2611 	   !CALL_P (insn);
2612 	   insn = next_real_insn (insn))
2613 	continue;
2614 
2615       if (TREE_CODE (list) == LABEL_DECL)
2616 	add_reg_note (insn, REG_TM, label_rtx (list));
2617       else
2618 	for (; list ; list = TREE_CHAIN (list))
2619 	  add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2620     }
2621 }
2622 
2623 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2624    statement STMT.  */
2625 
2626 static void
2627 expand_call_stmt (gcall *stmt)
2628 {
2629   tree exp, decl, lhs;
2630   bool builtin_p;
2631   size_t i;
2632 
2633   if (gimple_call_internal_p (stmt))
2634     {
2635       expand_internal_call (stmt);
2636       return;
2637     }
2638 
2639   /* If this is a call to a built-in function and it has no effect other
2640      than setting the lhs, try to implement it using an internal function
2641      instead.  */
2642   decl = gimple_call_fndecl (stmt);
2643   if (gimple_call_lhs (stmt)
2644       && !gimple_has_side_effects (stmt)
2645       && (optimize || (decl && called_as_built_in (decl))))
2646     {
2647       internal_fn ifn = replacement_internal_fn (stmt);
2648       if (ifn != IFN_LAST)
2649 	{
2650 	  expand_internal_call (ifn, stmt);
2651 	  return;
2652 	}
2653     }
2654 
2655   exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2656 
2657   CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2658   builtin_p = decl && fndecl_built_in_p (decl);
2659 
2660   /* If this is not a builtin function, the function type through which the
2661      call is made may be different from the type of the function.  */
2662   if (!builtin_p)
2663     CALL_EXPR_FN (exp)
2664       = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2665 		      CALL_EXPR_FN (exp));
2666 
2667   TREE_TYPE (exp) = gimple_call_return_type (stmt);
2668   CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2669 
2670   for (i = 0; i < gimple_call_num_args (stmt); i++)
2671     {
2672       tree arg = gimple_call_arg (stmt, i);
2673       gimple *def;
2674       /* TER addresses into arguments of builtin functions so we have a
2675 	 chance to infer more correct alignment information.  See PR39954.  */
2676       if (builtin_p
2677 	  && TREE_CODE (arg) == SSA_NAME
2678 	  && (def = get_gimple_for_ssa_name (arg))
2679 	  && gimple_assign_rhs_code (def) == ADDR_EXPR)
2680 	arg = gimple_assign_rhs1 (def);
2681       CALL_EXPR_ARG (exp, i) = arg;
2682     }
2683 
2684   if (gimple_has_side_effects (stmt))
2685     TREE_SIDE_EFFECTS (exp) = 1;
2686 
2687   if (gimple_call_nothrow_p (stmt))
2688     TREE_NOTHROW (exp) = 1;
2689 
2690   if (gimple_no_warning_p (stmt))
2691     TREE_NO_WARNING (exp) = 1;
2692 
2693   CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2694   CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
2695   CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2696   if (decl
2697       && fndecl_built_in_p (decl, BUILT_IN_NORMAL)
2698       && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2699     CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2700   else
2701     CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2702   CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2703   CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
2704   SET_EXPR_LOCATION (exp, gimple_location (stmt));
2705 
2706   /* Ensure RTL is created for debug args.  */
2707   if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2708     {
2709       vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2710       unsigned int ix;
2711       tree dtemp;
2712 
2713       if (debug_args)
2714 	for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2715 	  {
2716 	    gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2717 	    expand_debug_expr (dtemp);
2718 	  }
2719     }
2720 
2721   rtx_insn *before_call = get_last_insn ();
2722   lhs = gimple_call_lhs (stmt);
2723   if (lhs)
2724     expand_assignment (lhs, exp, false);
2725   else
2726     expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2727 
2728   /* If the gimple call is an indirect call and has 'nocf_check'
2729      attribute find a generated CALL insn to mark it as no
2730      control-flow verification is needed.  */
2731   if (gimple_call_nocf_check_p (stmt)
2732       && !gimple_call_fndecl (stmt))
2733     {
2734       rtx_insn *last = get_last_insn ();
2735       while (!CALL_P (last)
2736 	     && last != before_call)
2737 	last = PREV_INSN (last);
2738 
2739       if (last != before_call)
2740 	add_reg_note (last, REG_CALL_NOCF_CHECK, const0_rtx);
2741     }
2742 
2743   mark_transaction_restart_calls (stmt);
2744 }
2745 
2746 
2747 /* Generate RTL for an asm statement (explicit assembler code).
2748    STRING is a STRING_CST node containing the assembler code text,
2749    or an ADDR_EXPR containing a STRING_CST.  VOL nonzero means the
2750    insn is volatile; don't optimize it.  */
2751 
2752 static void
2753 expand_asm_loc (tree string, int vol, location_t locus)
2754 {
2755   rtx body;
2756 
2757   body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2758 				ggc_strdup (TREE_STRING_POINTER (string)),
2759 				locus);
2760 
2761   MEM_VOLATILE_P (body) = vol;
2762 
2763   /* Non-empty basic ASM implicitly clobbers memory.  */
2764   if (TREE_STRING_LENGTH (string) != 0)
2765     {
2766       rtx asm_op, clob;
2767       unsigned i, nclobbers;
2768       auto_vec<rtx> input_rvec, output_rvec;
2769       auto_vec<const char *> constraints;
2770       auto_vec<rtx> clobber_rvec;
2771       HARD_REG_SET clobbered_regs;
2772       CLEAR_HARD_REG_SET (clobbered_regs);
2773 
2774       clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2775       clobber_rvec.safe_push (clob);
2776 
2777       if (targetm.md_asm_adjust)
2778 	targetm.md_asm_adjust (output_rvec, input_rvec,
2779 			       constraints, clobber_rvec,
2780 			       clobbered_regs);
2781 
2782       asm_op = body;
2783       nclobbers = clobber_rvec.length ();
2784       body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
2785 
2786       XVECEXP (body, 0, 0) = asm_op;
2787       for (i = 0; i < nclobbers; i++)
2788 	XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
2789     }
2790 
2791   emit_insn (body);
2792 }
2793 
2794 /* Return the number of times character C occurs in string S.  */
2795 static int
2796 n_occurrences (int c, const char *s)
2797 {
2798   int n = 0;
2799   while (*s)
2800     n += (*s++ == c);
2801   return n;
2802 }
2803 
2804 /* A subroutine of expand_asm_operands.  Check that all operands have
2805    the same number of alternatives.  Return true if so.  */
2806 
2807 static bool
2808 check_operand_nalternatives (const vec<const char *> &constraints)
2809 {
2810   unsigned len = constraints.length();
2811   if (len > 0)
2812     {
2813       int nalternatives = n_occurrences (',', constraints[0]);
2814 
2815       if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2816 	{
2817 	  error ("too many alternatives in %<asm%>");
2818 	  return false;
2819 	}
2820 
2821       for (unsigned i = 1; i < len; ++i)
2822 	if (n_occurrences (',', constraints[i]) != nalternatives)
2823 	  {
2824 	    error ("operand constraints for %<asm%> differ "
2825 		   "in number of alternatives");
2826 	    return false;
2827 	  }
2828     }
2829   return true;
2830 }
2831 
2832 /* Check for overlap between registers marked in CLOBBERED_REGS and
2833    anything inappropriate in T.  Emit error and return the register
2834    variable definition for error, NULL_TREE for ok.  */
2835 
2836 static bool
2837 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2838 {
2839   /* Conflicts between asm-declared register variables and the clobber
2840      list are not allowed.  */
2841   tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2842 
2843   if (overlap)
2844     {
2845       error ("asm-specifier for variable %qE conflicts with asm clobber list",
2846 	     DECL_NAME (overlap));
2847 
2848       /* Reset registerness to stop multiple errors emitted for a single
2849 	 variable.  */
2850       DECL_REGISTER (overlap) = 0;
2851       return true;
2852     }
2853 
2854   return false;
2855 }
2856 
2857 /* Check that the given REGNO spanning NREGS is a valid
2858    asm clobber operand.  Some HW registers cannot be
2859    saved/restored, hence they should not be clobbered by
2860    asm statements.  */
2861 static bool
2862 asm_clobber_reg_is_valid (int regno, int nregs, const char *regname)
2863 {
2864   bool is_valid = true;
2865   HARD_REG_SET regset;
2866 
2867   CLEAR_HARD_REG_SET (regset);
2868 
2869   add_range_to_hard_reg_set (&regset, regno, nregs);
2870 
2871   /* Clobbering the PIC register is an error.  */
2872   if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2873       && overlaps_hard_reg_set_p (regset, Pmode, PIC_OFFSET_TABLE_REGNUM))
2874     {
2875       /* ??? Diagnose during gimplification?  */
2876       error ("PIC register clobbered by %qs in %<asm%>", regname);
2877       is_valid = false;
2878     }
2879   /* Clobbering the stack pointer register is deprecated.  GCC expects
2880      the value of the stack pointer after an asm statement to be the same
2881      as it was before, so no asm can validly clobber the stack pointer in
2882      the usual sense.  Adding the stack pointer to the clobber list has
2883      traditionally had some undocumented and somewhat obscure side-effects.  */
2884   if (overlaps_hard_reg_set_p (regset, Pmode, STACK_POINTER_REGNUM)
2885       && warning (OPT_Wdeprecated, "listing the stack pointer register"
2886 		  " %qs in a clobber list is deprecated", regname))
2887     inform (input_location, "the value of the stack pointer after an %<asm%>"
2888 	    " statement must be the same as it was before the statement");
2889 
2890   return is_valid;
2891 }
2892 
2893 /* Generate RTL for an asm statement with arguments.
2894    STRING is the instruction template.
2895    OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2896    Each output or input has an expression in the TREE_VALUE and
2897    a tree list in TREE_PURPOSE which in turn contains a constraint
2898    name in TREE_VALUE (or NULL_TREE) and a constraint string
2899    in TREE_PURPOSE.
2900    CLOBBERS is a list of STRING_CST nodes each naming a hard register
2901    that is clobbered by this insn.
2902 
2903    LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2904    should be the fallthru basic block of the asm goto.
2905 
2906    Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2907    Some elements of OUTPUTS may be replaced with trees representing temporary
2908    values.  The caller should copy those temporary values to the originally
2909    specified lvalues.
2910 
2911    VOL nonzero means the insn is volatile; don't optimize it.  */
2912 
2913 static void
2914 expand_asm_stmt (gasm *stmt)
2915 {
2916   class save_input_location
2917   {
2918     location_t old;
2919 
2920   public:
2921     explicit save_input_location(location_t where)
2922     {
2923       old = input_location;
2924       input_location = where;
2925     }
2926 
2927     ~save_input_location()
2928     {
2929       input_location = old;
2930     }
2931   };
2932 
2933   location_t locus = gimple_location (stmt);
2934 
2935   if (gimple_asm_input_p (stmt))
2936     {
2937       const char *s = gimple_asm_string (stmt);
2938       tree string = build_string (strlen (s), s);
2939       expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2940       return;
2941     }
2942 
2943   /* There are some legacy diagnostics in here, and also avoids a
2944      sixth parameger to targetm.md_asm_adjust.  */
2945   save_input_location s_i_l(locus);
2946 
2947   unsigned noutputs = gimple_asm_noutputs (stmt);
2948   unsigned ninputs = gimple_asm_ninputs (stmt);
2949   unsigned nlabels = gimple_asm_nlabels (stmt);
2950   unsigned i;
2951 
2952   /* ??? Diagnose during gimplification?  */
2953   if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2954     {
2955       error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2956       return;
2957     }
2958 
2959   auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2960   auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2961   auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
2962 
2963   /* Copy the gimple vectors into new vectors that we can manipulate.  */
2964 
2965   output_tvec.safe_grow (noutputs);
2966   input_tvec.safe_grow (ninputs);
2967   constraints.safe_grow (noutputs + ninputs);
2968 
2969   for (i = 0; i < noutputs; ++i)
2970     {
2971       tree t = gimple_asm_output_op (stmt, i);
2972       output_tvec[i] = TREE_VALUE (t);
2973       constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2974     }
2975   for (i = 0; i < ninputs; i++)
2976     {
2977       tree t = gimple_asm_input_op (stmt, i);
2978       input_tvec[i] = TREE_VALUE (t);
2979       constraints[i + noutputs]
2980 	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2981     }
2982 
2983   /* ??? Diagnose during gimplification?  */
2984   if (! check_operand_nalternatives (constraints))
2985     return;
2986 
2987   /* Count the number of meaningful clobbered registers, ignoring what
2988      we would ignore later.  */
2989   auto_vec<rtx> clobber_rvec;
2990   HARD_REG_SET clobbered_regs;
2991   CLEAR_HARD_REG_SET (clobbered_regs);
2992 
2993   if (unsigned n = gimple_asm_nclobbers (stmt))
2994     {
2995       clobber_rvec.reserve (n);
2996       for (i = 0; i < n; i++)
2997 	{
2998 	  tree t = gimple_asm_clobber_op (stmt, i);
2999           const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
3000 	  int nregs, j;
3001 
3002 	  j = decode_reg_name_and_count (regname, &nregs);
3003 	  if (j < 0)
3004 	    {
3005 	      if (j == -2)
3006 		{
3007 		  /* ??? Diagnose during gimplification?  */
3008 		  error ("unknown register name %qs in %<asm%>", regname);
3009 		}
3010 	      else if (j == -4)
3011 		{
3012 		  rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
3013 		  clobber_rvec.safe_push (x);
3014 		}
3015 	      else
3016 		{
3017 		  /* Otherwise we should have -1 == empty string
3018 		     or -3 == cc, which is not a register.  */
3019 		  gcc_assert (j == -1 || j == -3);
3020 		}
3021 	    }
3022 	  else
3023 	    for (int reg = j; reg < j + nregs; reg++)
3024 	      {
3025 		if (!asm_clobber_reg_is_valid (reg, nregs, regname))
3026 		  return;
3027 
3028 	        SET_HARD_REG_BIT (clobbered_regs, reg);
3029 	        rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
3030 		clobber_rvec.safe_push (x);
3031 	      }
3032 	}
3033     }
3034   unsigned nclobbers = clobber_rvec.length();
3035 
3036   /* First pass over inputs and outputs checks validity and sets
3037      mark_addressable if needed.  */
3038   /* ??? Diagnose during gimplification?  */
3039 
3040   for (i = 0; i < noutputs; ++i)
3041     {
3042       tree val = output_tvec[i];
3043       tree type = TREE_TYPE (val);
3044       const char *constraint;
3045       bool is_inout;
3046       bool allows_reg;
3047       bool allows_mem;
3048 
3049       /* Try to parse the output constraint.  If that fails, there's
3050 	 no point in going further.  */
3051       constraint = constraints[i];
3052       if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
3053 				    &allows_mem, &allows_reg, &is_inout))
3054 	return;
3055 
3056       /* If the output is a hard register, verify it doesn't conflict with
3057 	 any other operand's possible hard register use.  */
3058       if (DECL_P (val)
3059 	  && REG_P (DECL_RTL (val))
3060 	  && HARD_REGISTER_P (DECL_RTL (val)))
3061 	{
3062 	  unsigned j, output_hregno = REGNO (DECL_RTL (val));
3063 	  bool early_clobber_p = strchr (constraints[i], '&') != NULL;
3064 	  unsigned long match;
3065 
3066 	  /* Verify the other outputs do not use the same hard register.  */
3067 	  for (j = i + 1; j < noutputs; ++j)
3068 	    if (DECL_P (output_tvec[j])
3069 		&& REG_P (DECL_RTL (output_tvec[j]))
3070 		&& HARD_REGISTER_P (DECL_RTL (output_tvec[j]))
3071 		&& output_hregno == REGNO (DECL_RTL (output_tvec[j])))
3072 	      error ("invalid hard register usage between output operands");
3073 
3074 	  /* Verify matching constraint operands use the same hard register
3075 	     and that the non-matching constraint operands do not use the same
3076 	     hard register if the output is an early clobber operand.  */
3077 	  for (j = 0; j < ninputs; ++j)
3078 	    if (DECL_P (input_tvec[j])
3079 		&& REG_P (DECL_RTL (input_tvec[j]))
3080 		&& HARD_REGISTER_P (DECL_RTL (input_tvec[j])))
3081 	      {
3082 		unsigned input_hregno = REGNO (DECL_RTL (input_tvec[j]));
3083 		switch (*constraints[j + noutputs])
3084 		  {
3085 		  case '0':  case '1':  case '2':  case '3':  case '4':
3086 		  case '5':  case '6':  case '7':  case '8':  case '9':
3087 		    match = strtoul (constraints[j + noutputs], NULL, 10);
3088 		    break;
3089 		  default:
3090 		    match = ULONG_MAX;
3091 		    break;
3092 		  }
3093 		if (i == match
3094 		    && output_hregno != input_hregno)
3095 		  error ("invalid hard register usage between output operand "
3096 			 "and matching constraint operand");
3097 		else if (early_clobber_p
3098 			 && i != match
3099 			 && output_hregno == input_hregno)
3100 		  error ("invalid hard register usage between earlyclobber "
3101 			 "operand and input operand");
3102 	      }
3103 	}
3104 
3105       if (! allows_reg
3106 	  && (allows_mem
3107 	      || is_inout
3108 	      || (DECL_P (val)
3109 		  && REG_P (DECL_RTL (val))
3110 		  && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
3111 	mark_addressable (val);
3112     }
3113 
3114   for (i = 0; i < ninputs; ++i)
3115     {
3116       bool allows_reg, allows_mem;
3117       const char *constraint;
3118 
3119       constraint = constraints[i + noutputs];
3120       if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3121 				    constraints.address (),
3122 				    &allows_mem, &allows_reg))
3123 	return;
3124 
3125       if (! allows_reg && allows_mem)
3126 	mark_addressable (input_tvec[i]);
3127     }
3128 
3129   /* Second pass evaluates arguments.  */
3130 
3131   /* Make sure stack is consistent for asm goto.  */
3132   if (nlabels > 0)
3133     do_pending_stack_adjust ();
3134   int old_generating_concat_p = generating_concat_p;
3135 
3136   /* Vector of RTX's of evaluated output operands.  */
3137   auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
3138   auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
3139   rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
3140 
3141   output_rvec.safe_grow (noutputs);
3142 
3143   for (i = 0; i < noutputs; ++i)
3144     {
3145       tree val = output_tvec[i];
3146       tree type = TREE_TYPE (val);
3147       bool is_inout, allows_reg, allows_mem, ok;
3148       rtx op;
3149 
3150       ok = parse_output_constraint (&constraints[i], i, ninputs,
3151 				    noutputs, &allows_mem, &allows_reg,
3152 				    &is_inout);
3153       gcc_assert (ok);
3154 
3155       /* If an output operand is not a decl or indirect ref and our constraint
3156 	 allows a register, make a temporary to act as an intermediate.
3157 	 Make the asm insn write into that, then we will copy it to
3158 	 the real output operand.  Likewise for promoted variables.  */
3159 
3160       generating_concat_p = 0;
3161 
3162       if ((TREE_CODE (val) == INDIRECT_REF && allows_mem)
3163 	  || (DECL_P (val)
3164 	      && (allows_mem || REG_P (DECL_RTL (val)))
3165 	      && ! (REG_P (DECL_RTL (val))
3166 		    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3167 	  || ! allows_reg
3168 	  || is_inout
3169 	  || TREE_ADDRESSABLE (type))
3170 	{
3171 	  op = expand_expr (val, NULL_RTX, VOIDmode,
3172 			    !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3173 	  if (MEM_P (op))
3174 	    op = validize_mem (op);
3175 
3176 	  if (! allows_reg && !MEM_P (op))
3177 	    error ("output number %d not directly addressable", i);
3178 	  if ((! allows_mem && MEM_P (op) && GET_MODE (op) != BLKmode)
3179 	      || GET_CODE (op) == CONCAT)
3180 	    {
3181 	      rtx old_op = op;
3182 	      op = gen_reg_rtx (GET_MODE (op));
3183 
3184 	      generating_concat_p = old_generating_concat_p;
3185 
3186 	      if (is_inout)
3187 		emit_move_insn (op, old_op);
3188 
3189 	      push_to_sequence2 (after_rtl_seq, after_rtl_end);
3190 	      emit_move_insn (old_op, op);
3191 	      after_rtl_seq = get_insns ();
3192 	      after_rtl_end = get_last_insn ();
3193 	      end_sequence ();
3194 	    }
3195 	}
3196       else
3197 	{
3198 	  op = assign_temp (type, 0, 1);
3199 	  op = validize_mem (op);
3200 	  if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3201 	    set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
3202 
3203 	  generating_concat_p = old_generating_concat_p;
3204 
3205 	  push_to_sequence2 (after_rtl_seq, after_rtl_end);
3206 	  expand_assignment (val, make_tree (type, op), false);
3207 	  after_rtl_seq = get_insns ();
3208 	  after_rtl_end = get_last_insn ();
3209 	  end_sequence ();
3210 	}
3211       output_rvec[i] = op;
3212 
3213       if (is_inout)
3214 	inout_opnum.safe_push (i);
3215     }
3216 
3217   auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3218   auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
3219 
3220   input_rvec.safe_grow (ninputs);
3221   input_mode.safe_grow (ninputs);
3222 
3223   generating_concat_p = 0;
3224 
3225   for (i = 0; i < ninputs; ++i)
3226     {
3227       tree val = input_tvec[i];
3228       tree type = TREE_TYPE (val);
3229       bool allows_reg, allows_mem, ok;
3230       const char *constraint;
3231       rtx op;
3232 
3233       constraint = constraints[i + noutputs];
3234       ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3235 				   constraints.address (),
3236 				   &allows_mem, &allows_reg);
3237       gcc_assert (ok);
3238 
3239       /* EXPAND_INITIALIZER will not generate code for valid initializer
3240 	 constants, but will still generate code for other types of operand.
3241 	 This is the behavior we want for constant constraints.  */
3242       op = expand_expr (val, NULL_RTX, VOIDmode,
3243 			allows_reg ? EXPAND_NORMAL
3244 			: allows_mem ? EXPAND_MEMORY
3245 			: EXPAND_INITIALIZER);
3246 
3247       /* Never pass a CONCAT to an ASM.  */
3248       if (GET_CODE (op) == CONCAT)
3249 	op = force_reg (GET_MODE (op), op);
3250       else if (MEM_P (op))
3251 	op = validize_mem (op);
3252 
3253       if (asm_operand_ok (op, constraint, NULL) <= 0)
3254 	{
3255 	  if (allows_reg && TYPE_MODE (type) != BLKmode)
3256 	    op = force_reg (TYPE_MODE (type), op);
3257 	  else if (!allows_mem)
3258 	    warning (0, "asm operand %d probably doesn%'t match constraints",
3259 		     i + noutputs);
3260 	  else if (MEM_P (op))
3261 	    {
3262 	      /* We won't recognize either volatile memory or memory
3263 		 with a queued address as available a memory_operand
3264 		 at this point.  Ignore it: clearly this *is* a memory.  */
3265 	    }
3266 	  else
3267 	    gcc_unreachable ();
3268 	}
3269       input_rvec[i] = op;
3270       input_mode[i] = TYPE_MODE (type);
3271     }
3272 
3273   /* For in-out operands, copy output rtx to input rtx.  */
3274   unsigned ninout = inout_opnum.length();
3275   for (i = 0; i < ninout; i++)
3276     {
3277       int j = inout_opnum[i];
3278       rtx o = output_rvec[j];
3279 
3280       input_rvec.safe_push (o);
3281       input_mode.safe_push (GET_MODE (o));
3282 
3283       char buffer[16];
3284       sprintf (buffer, "%d", j);
3285       constraints.safe_push (ggc_strdup (buffer));
3286     }
3287   ninputs += ninout;
3288 
3289   /* Sometimes we wish to automatically clobber registers across an asm.
3290      Case in point is when the i386 backend moved from cc0 to a hard reg --
3291      maintaining source-level compatibility means automatically clobbering
3292      the flags register.  */
3293   rtx_insn *after_md_seq = NULL;
3294   if (targetm.md_asm_adjust)
3295     after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3296 					  constraints, clobber_rvec,
3297 					  clobbered_regs);
3298 
3299   /* Do not allow the hook to change the output and input count,
3300      lest it mess up the operand numbering.  */
3301   gcc_assert (output_rvec.length() == noutputs);
3302   gcc_assert (input_rvec.length() == ninputs);
3303   gcc_assert (constraints.length() == noutputs + ninputs);
3304 
3305   /* But it certainly can adjust the clobbers.  */
3306   nclobbers = clobber_rvec.length();
3307 
3308   /* Third pass checks for easy conflicts.  */
3309   /* ??? Why are we doing this on trees instead of rtx.  */
3310 
3311   bool clobber_conflict_found = 0;
3312   for (i = 0; i < noutputs; ++i)
3313     if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3314 	clobber_conflict_found = 1;
3315   for (i = 0; i < ninputs - ninout; ++i)
3316     if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3317 	clobber_conflict_found = 1;
3318 
3319   /* Make vectors for the expression-rtx, constraint strings,
3320      and named operands.  */
3321 
3322   rtvec argvec = rtvec_alloc (ninputs);
3323   rtvec constraintvec = rtvec_alloc (ninputs);
3324   rtvec labelvec = rtvec_alloc (nlabels);
3325 
3326   rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3327 				    : GET_MODE (output_rvec[0])),
3328 				   ggc_strdup (gimple_asm_string (stmt)),
3329 				   "", 0, argvec, constraintvec,
3330 				   labelvec, locus);
3331   MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3332 
3333   for (i = 0; i < ninputs; ++i)
3334     {
3335       ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3336       ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3337 	= gen_rtx_ASM_INPUT_loc (input_mode[i],
3338 				 constraints[i + noutputs],
3339 				 locus);
3340     }
3341 
3342   /* Copy labels to the vector.  */
3343   rtx_code_label *fallthru_label = NULL;
3344   if (nlabels > 0)
3345     {
3346       basic_block fallthru_bb = NULL;
3347       edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3348       if (fallthru)
3349 	fallthru_bb = fallthru->dest;
3350 
3351       for (i = 0; i < nlabels; ++i)
3352 	{
3353 	  tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
3354 	  rtx_insn *r;
3355 	  /* If asm goto has any labels in the fallthru basic block, use
3356 	     a label that we emit immediately after the asm goto.  Expansion
3357 	     may insert further instructions into the same basic block after
3358 	     asm goto and if we don't do this, insertion of instructions on
3359 	     the fallthru edge might misbehave.  See PR58670.  */
3360 	  if (fallthru_bb && label_to_block (cfun, label) == fallthru_bb)
3361 	    {
3362 	      if (fallthru_label == NULL_RTX)
3363 	        fallthru_label = gen_label_rtx ();
3364 	      r = fallthru_label;
3365 	    }
3366 	  else
3367 	    r = label_rtx (label);
3368 	  ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
3369 	}
3370     }
3371 
3372   /* Now, for each output, construct an rtx
3373      (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3374 			       ARGVEC CONSTRAINTS OPNAMES))
3375      If there is more than one, put them inside a PARALLEL.  */
3376 
3377   if (nlabels > 0 && nclobbers == 0)
3378     {
3379       gcc_assert (noutputs == 0);
3380       emit_jump_insn (body);
3381     }
3382   else if (noutputs == 0 && nclobbers == 0)
3383     {
3384       /* No output operands: put in a raw ASM_OPERANDS rtx.  */
3385       emit_insn (body);
3386     }
3387   else if (noutputs == 1 && nclobbers == 0)
3388     {
3389       ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3390       emit_insn (gen_rtx_SET (output_rvec[0], body));
3391     }
3392   else
3393     {
3394       rtx obody = body;
3395       int num = noutputs;
3396 
3397       if (num == 0)
3398 	num = 1;
3399 
3400       body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3401 
3402       /* For each output operand, store a SET.  */
3403       for (i = 0; i < noutputs; ++i)
3404 	{
3405 	  rtx src, o = output_rvec[i];
3406 	  if (i == 0)
3407 	    {
3408 	      ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3409 	      src = obody;
3410 	    }
3411 	  else
3412 	    {
3413 	      src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3414 					  ASM_OPERANDS_TEMPLATE (obody),
3415 					  constraints[i], i, argvec,
3416 					  constraintvec, labelvec, locus);
3417 	      MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3418 	    }
3419 	  XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
3420 	}
3421 
3422       /* If there are no outputs (but there are some clobbers)
3423 	 store the bare ASM_OPERANDS into the PARALLEL.  */
3424       if (i == 0)
3425 	XVECEXP (body, 0, i++) = obody;
3426 
3427       /* Store (clobber REG) for each clobbered register specified.  */
3428       for (unsigned j = 0; j < nclobbers; ++j)
3429 	{
3430 	  rtx clobbered_reg = clobber_rvec[j];
3431 
3432 	  /* Do sanity check for overlap between clobbers and respectively
3433 	     input and outputs that hasn't been handled.  Such overlap
3434 	     should have been detected and reported above.  */
3435 	  if (!clobber_conflict_found && REG_P (clobbered_reg))
3436 	    {
3437 	      /* We test the old body (obody) contents to avoid
3438 		 tripping over the under-construction body.  */
3439 	      for (unsigned k = 0; k < noutputs; ++k)
3440 		if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3441 		  internal_error ("asm clobber conflict with output operand");
3442 
3443 	      for (unsigned k = 0; k < ninputs - ninout; ++k)
3444 		if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3445 		  internal_error ("asm clobber conflict with input operand");
3446 	    }
3447 
3448 	  XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
3449 	}
3450 
3451       if (nlabels > 0)
3452 	emit_jump_insn (body);
3453       else
3454 	emit_insn (body);
3455     }
3456 
3457   generating_concat_p = old_generating_concat_p;
3458 
3459   if (fallthru_label)
3460     emit_label (fallthru_label);
3461 
3462   if (after_md_seq)
3463     emit_insn (after_md_seq);
3464   if (after_rtl_seq)
3465     emit_insn (after_rtl_seq);
3466 
3467   free_temp_slots ();
3468   crtl->has_asm_statement = 1;
3469 }
3470 
3471 /* Emit code to jump to the address
3472    specified by the pointer expression EXP.  */
3473 
3474 static void
3475 expand_computed_goto (tree exp)
3476 {
3477   rtx x = expand_normal (exp);
3478 
3479   do_pending_stack_adjust ();
3480   emit_indirect_jump (x);
3481 }
3482 
3483 /* Generate RTL code for a `goto' statement with target label LABEL.
3484    LABEL should be a LABEL_DECL tree node that was or will later be
3485    defined with `expand_label'.  */
3486 
3487 static void
3488 expand_goto (tree label)
3489 {
3490   if (flag_checking)
3491     {
3492       /* Check for a nonlocal goto to a containing function.  Should have
3493 	 gotten translated to __builtin_nonlocal_goto.  */
3494       tree context = decl_function_context (label);
3495       gcc_assert (!context || context == current_function_decl);
3496     }
3497 
3498   emit_jump (jump_target_rtx (label));
3499 }
3500 
3501 /* Output a return with no value.  */
3502 
3503 static void
3504 expand_null_return_1 (void)
3505 {
3506   clear_pending_stack_adjust ();
3507   do_pending_stack_adjust ();
3508   emit_jump (return_label);
3509 }
3510 
3511 /* Generate RTL to return from the current function, with no value.
3512    (That is, we do not do anything about returning any value.)  */
3513 
3514 void
3515 expand_null_return (void)
3516 {
3517   /* If this function was declared to return a value, but we
3518      didn't, clobber the return registers so that they are not
3519      propagated live to the rest of the function.  */
3520   clobber_return_register ();
3521 
3522   expand_null_return_1 ();
3523 }
3524 
3525 /* Generate RTL to return from the current function, with value VAL.  */
3526 
3527 static void
3528 expand_value_return (rtx val)
3529 {
3530   /* Copy the value to the return location unless it's already there.  */
3531 
3532   tree decl = DECL_RESULT (current_function_decl);
3533   rtx return_reg = DECL_RTL (decl);
3534   if (return_reg != val)
3535     {
3536       tree funtype = TREE_TYPE (current_function_decl);
3537       tree type = TREE_TYPE (decl);
3538       int unsignedp = TYPE_UNSIGNED (type);
3539       machine_mode old_mode = DECL_MODE (decl);
3540       machine_mode mode;
3541       if (DECL_BY_REFERENCE (decl))
3542         mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3543       else
3544         mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3545 
3546       if (mode != old_mode)
3547 	val = convert_modes (mode, old_mode, val, unsignedp);
3548 
3549       if (GET_CODE (return_reg) == PARALLEL)
3550 	emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3551       else
3552 	emit_move_insn (return_reg, val);
3553     }
3554 
3555   expand_null_return_1 ();
3556 }
3557 
3558 /* Generate RTL to evaluate the expression RETVAL and return it
3559    from the current function.  */
3560 
3561 static void
3562 expand_return (tree retval)
3563 {
3564   rtx result_rtl;
3565   rtx val = 0;
3566   tree retval_rhs;
3567 
3568   /* If function wants no value, give it none.  */
3569   if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3570     {
3571       expand_normal (retval);
3572       expand_null_return ();
3573       return;
3574     }
3575 
3576   if (retval == error_mark_node)
3577     {
3578       /* Treat this like a return of no value from a function that
3579 	 returns a value.  */
3580       expand_null_return ();
3581       return;
3582     }
3583   else if ((TREE_CODE (retval) == MODIFY_EXPR
3584 	    || TREE_CODE (retval) == INIT_EXPR)
3585 	   && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3586     retval_rhs = TREE_OPERAND (retval, 1);
3587   else
3588     retval_rhs = retval;
3589 
3590   result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3591 
3592   /* If we are returning the RESULT_DECL, then the value has already
3593      been stored into it, so we don't have to do anything special.  */
3594   if (TREE_CODE (retval_rhs) == RESULT_DECL)
3595     expand_value_return (result_rtl);
3596 
3597   /* If the result is an aggregate that is being returned in one (or more)
3598      registers, load the registers here.  */
3599 
3600   else if (retval_rhs != 0
3601 	   && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3602 	   && REG_P (result_rtl))
3603     {
3604       val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3605       if (val)
3606 	{
3607 	  /* Use the mode of the result value on the return register.  */
3608 	  PUT_MODE (result_rtl, GET_MODE (val));
3609 	  expand_value_return (val);
3610 	}
3611       else
3612 	expand_null_return ();
3613     }
3614   else if (retval_rhs != 0
3615 	   && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3616 	   && (REG_P (result_rtl)
3617 	       || (GET_CODE (result_rtl) == PARALLEL)))
3618     {
3619       /* Compute the return value into a temporary (usually a pseudo reg).  */
3620       val
3621 	= assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3622       val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3623       val = force_not_mem (val);
3624       expand_value_return (val);
3625     }
3626   else
3627     {
3628       /* No hard reg used; calculate value into hard return reg.  */
3629       expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3630       expand_value_return (result_rtl);
3631     }
3632 }
3633 
3634 /* Expand a clobber of LHS.  If LHS is stored it in a multi-part
3635    register, tell the rtl optimizers that its value is no longer
3636    needed.  */
3637 
3638 static void
3639 expand_clobber (tree lhs)
3640 {
3641   if (DECL_P (lhs))
3642     {
3643       rtx decl_rtl = DECL_RTL_IF_SET (lhs);
3644       if (decl_rtl && REG_P (decl_rtl))
3645 	{
3646 	  machine_mode decl_mode = GET_MODE (decl_rtl);
3647 	  if (maybe_gt (GET_MODE_SIZE (decl_mode),
3648 			REGMODE_NATURAL_SIZE (decl_mode)))
3649 	    emit_clobber (decl_rtl);
3650 	}
3651     }
3652 }
3653 
3654 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3655    STMT that doesn't require special handling for outgoing edges.  That
3656    is no tailcalls and no GIMPLE_COND.  */
3657 
3658 static void
3659 expand_gimple_stmt_1 (gimple *stmt)
3660 {
3661   tree op0;
3662 
3663   set_curr_insn_location (gimple_location (stmt));
3664 
3665   switch (gimple_code (stmt))
3666     {
3667     case GIMPLE_GOTO:
3668       op0 = gimple_goto_dest (stmt);
3669       if (TREE_CODE (op0) == LABEL_DECL)
3670 	expand_goto (op0);
3671       else
3672 	expand_computed_goto (op0);
3673       break;
3674     case GIMPLE_LABEL:
3675       expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3676       break;
3677     case GIMPLE_NOP:
3678     case GIMPLE_PREDICT:
3679       break;
3680     case GIMPLE_SWITCH:
3681       {
3682 	gswitch *swtch = as_a <gswitch *> (stmt);
3683 	if (gimple_switch_num_labels (swtch) == 1)
3684 	  expand_goto (CASE_LABEL (gimple_switch_default_label (swtch)));
3685 	else
3686 	  expand_case (swtch);
3687       }
3688       break;
3689     case GIMPLE_ASM:
3690       expand_asm_stmt (as_a <gasm *> (stmt));
3691       break;
3692     case GIMPLE_CALL:
3693       expand_call_stmt (as_a <gcall *> (stmt));
3694       break;
3695 
3696     case GIMPLE_RETURN:
3697       {
3698 	op0 = gimple_return_retval (as_a <greturn *> (stmt));
3699 
3700 	if (op0 && op0 != error_mark_node)
3701 	  {
3702 	    tree result = DECL_RESULT (current_function_decl);
3703 
3704 	    /* If we are not returning the current function's RESULT_DECL,
3705 	       build an assignment to it.  */
3706 	    if (op0 != result)
3707 	      {
3708 		/* I believe that a function's RESULT_DECL is unique.  */
3709 		gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3710 
3711 		/* ??? We'd like to use simply expand_assignment here,
3712 		   but this fails if the value is of BLKmode but the return
3713 		   decl is a register.  expand_return has special handling
3714 		   for this combination, which eventually should move
3715 		   to common code.  See comments there.  Until then, let's
3716 		   build a modify expression :-/  */
3717 		op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3718 			      result, op0);
3719 	      }
3720 	  }
3721 
3722 	if (!op0)
3723 	  expand_null_return ();
3724 	else
3725 	  expand_return (op0);
3726       }
3727       break;
3728 
3729     case GIMPLE_ASSIGN:
3730       {
3731 	gassign *assign_stmt = as_a <gassign *> (stmt);
3732 	tree lhs = gimple_assign_lhs (assign_stmt);
3733 
3734 	/* Tree expand used to fiddle with |= and &= of two bitfield
3735 	   COMPONENT_REFs here.  This can't happen with gimple, the LHS
3736 	   of binary assigns must be a gimple reg.  */
3737 
3738 	if (TREE_CODE (lhs) != SSA_NAME
3739 	    || get_gimple_rhs_class (gimple_expr_code (stmt))
3740 	       == GIMPLE_SINGLE_RHS)
3741 	  {
3742 	    tree rhs = gimple_assign_rhs1 (assign_stmt);
3743 	    gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3744 			== GIMPLE_SINGLE_RHS);
3745 	    if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3746 		/* Do not put locations on possibly shared trees.  */
3747 		&& !is_gimple_min_invariant (rhs))
3748 	      SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3749 	    if (TREE_CLOBBER_P (rhs))
3750 	      /* This is a clobber to mark the going out of scope for
3751 		 this LHS.  */
3752 	      expand_clobber (lhs);
3753 	    else
3754 	      expand_assignment (lhs, rhs,
3755 				 gimple_assign_nontemporal_move_p (
3756 				   assign_stmt));
3757 	  }
3758 	else
3759 	  {
3760 	    rtx target, temp;
3761 	    bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3762 	    struct separate_ops ops;
3763 	    bool promoted = false;
3764 
3765 	    target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3766 	    if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3767 	      promoted = true;
3768 
3769 	    ops.code = gimple_assign_rhs_code (assign_stmt);
3770 	    ops.type = TREE_TYPE (lhs);
3771 	    switch (get_gimple_rhs_class (ops.code))
3772 	      {
3773 		case GIMPLE_TERNARY_RHS:
3774 		  ops.op2 = gimple_assign_rhs3 (assign_stmt);
3775 		  /* Fallthru */
3776 		case GIMPLE_BINARY_RHS:
3777 		  ops.op1 = gimple_assign_rhs2 (assign_stmt);
3778 		  /* Fallthru */
3779 		case GIMPLE_UNARY_RHS:
3780 		  ops.op0 = gimple_assign_rhs1 (assign_stmt);
3781 		  break;
3782 		default:
3783 		  gcc_unreachable ();
3784 	      }
3785 	    ops.location = gimple_location (stmt);
3786 
3787 	    /* If we want to use a nontemporal store, force the value to
3788 	       register first.  If we store into a promoted register,
3789 	       don't directly expand to target.  */
3790 	    temp = nontemporal || promoted ? NULL_RTX : target;
3791 	    temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3792 				       EXPAND_NORMAL);
3793 
3794 	    if (temp == target)
3795 	      ;
3796 	    else if (promoted)
3797 	      {
3798 		int unsignedp = SUBREG_PROMOTED_SIGN (target);
3799 		/* If TEMP is a VOIDmode constant, use convert_modes to make
3800 		   sure that we properly convert it.  */
3801 		if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3802 		  {
3803 		    temp = convert_modes (GET_MODE (target),
3804 					  TYPE_MODE (ops.type),
3805 					  temp, unsignedp);
3806 		    temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3807 					  GET_MODE (target), temp, unsignedp);
3808 		  }
3809 
3810 		convert_move (SUBREG_REG (target), temp, unsignedp);
3811 	      }
3812 	    else if (nontemporal && emit_storent_insn (target, temp))
3813 	      ;
3814 	    else
3815 	      {
3816 		temp = force_operand (temp, target);
3817 		if (temp != target)
3818 		  emit_move_insn (target, temp);
3819 	      }
3820 	  }
3821       }
3822       break;
3823 
3824     default:
3825       gcc_unreachable ();
3826     }
3827 }
3828 
3829 /* Expand one gimple statement STMT and return the last RTL instruction
3830    before any of the newly generated ones.
3831 
3832    In addition to generating the necessary RTL instructions this also
3833    sets REG_EH_REGION notes if necessary and sets the current source
3834    location for diagnostics.  */
3835 
3836 static rtx_insn *
3837 expand_gimple_stmt (gimple *stmt)
3838 {
3839   location_t saved_location = input_location;
3840   rtx_insn *last = get_last_insn ();
3841   int lp_nr;
3842 
3843   gcc_assert (cfun);
3844 
3845   /* We need to save and restore the current source location so that errors
3846      discovered during expansion are emitted with the right location.  But
3847      it would be better if the diagnostic routines used the source location
3848      embedded in the tree nodes rather than globals.  */
3849   if (gimple_has_location (stmt))
3850     input_location = gimple_location (stmt);
3851 
3852   expand_gimple_stmt_1 (stmt);
3853 
3854   /* Free any temporaries used to evaluate this statement.  */
3855   free_temp_slots ();
3856 
3857   input_location = saved_location;
3858 
3859   /* Mark all insns that may trap.  */
3860   lp_nr = lookup_stmt_eh_lp (stmt);
3861   if (lp_nr)
3862     {
3863       rtx_insn *insn;
3864       for (insn = next_real_insn (last); insn;
3865 	   insn = next_real_insn (insn))
3866 	{
3867 	  if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3868 	      /* If we want exceptions for non-call insns, any
3869 		 may_trap_p instruction may throw.  */
3870 	      && GET_CODE (PATTERN (insn)) != CLOBBER
3871 	      && GET_CODE (PATTERN (insn)) != CLOBBER_HIGH
3872 	      && GET_CODE (PATTERN (insn)) != USE
3873 	      && insn_could_throw_p (insn))
3874 	    make_reg_eh_region_note (insn, 0, lp_nr);
3875 	}
3876     }
3877 
3878   return last;
3879 }
3880 
3881 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_CALL
3882    that has CALL_EXPR_TAILCALL set.  Returns non-null if we actually
3883    generated a tail call (something that might be denied by the ABI
3884    rules governing the call; see calls.c).
3885 
3886    Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3887    can still reach the rest of BB.  The case here is __builtin_sqrt,
3888    where the NaN result goes through the external function (with a
3889    tailcall) and the normal result happens via a sqrt instruction.  */
3890 
3891 static basic_block
3892 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3893 {
3894   rtx_insn *last2, *last;
3895   edge e;
3896   edge_iterator ei;
3897   profile_probability probability;
3898 
3899   last2 = last = expand_gimple_stmt (stmt);
3900 
3901   for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3902     if (CALL_P (last) && SIBLING_CALL_P (last))
3903       goto found;
3904 
3905   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3906 
3907   *can_fallthru = true;
3908   return NULL;
3909 
3910  found:
3911   /* ??? Wouldn't it be better to just reset any pending stack adjust?
3912      Any instructions emitted here are about to be deleted.  */
3913   do_pending_stack_adjust ();
3914 
3915   /* Remove any non-eh, non-abnormal edges that don't go to exit.  */
3916   /* ??? I.e. the fallthrough edge.  HOWEVER!  If there were to be
3917      EH or abnormal edges, we shouldn't have created a tail call in
3918      the first place.  So it seems to me we should just be removing
3919      all edges here, or redirecting the existing fallthru edge to
3920      the exit block.  */
3921 
3922   probability = profile_probability::never ();
3923 
3924   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3925     {
3926       if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3927 	{
3928 	  if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3929 	    e->dest->count -= e->count ();
3930 	  probability += e->probability;
3931 	  remove_edge (e);
3932 	}
3933       else
3934 	ei_next (&ei);
3935     }
3936 
3937   /* This is somewhat ugly: the call_expr expander often emits instructions
3938      after the sibcall (to perform the function return).  These confuse the
3939      find_many_sub_basic_blocks code, so we need to get rid of these.  */
3940   last = NEXT_INSN (last);
3941   gcc_assert (BARRIER_P (last));
3942 
3943   *can_fallthru = false;
3944   while (NEXT_INSN (last))
3945     {
3946       /* For instance an sqrt builtin expander expands if with
3947 	 sibcall in the then and label for `else`.  */
3948       if (LABEL_P (NEXT_INSN (last)))
3949 	{
3950 	  *can_fallthru = true;
3951 	  break;
3952 	}
3953       delete_insn (NEXT_INSN (last));
3954     }
3955 
3956   e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3957 		 | EDGE_SIBCALL);
3958   e->probability = probability;
3959   BB_END (bb) = last;
3960   update_bb_for_insn (bb);
3961 
3962   if (NEXT_INSN (last))
3963     {
3964       bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3965 
3966       last = BB_END (bb);
3967       if (BARRIER_P (last))
3968 	BB_END (bb) = PREV_INSN (last);
3969     }
3970 
3971   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3972 
3973   return bb;
3974 }
3975 
3976 /* Return the difference between the floor and the truncated result of
3977    a signed division by OP1 with remainder MOD.  */
3978 static rtx
3979 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3980 {
3981   /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3982   return gen_rtx_IF_THEN_ELSE
3983     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3984      gen_rtx_IF_THEN_ELSE
3985      (mode, gen_rtx_LT (BImode,
3986 			gen_rtx_DIV (mode, op1, mod),
3987 			const0_rtx),
3988       constm1_rtx, const0_rtx),
3989      const0_rtx);
3990 }
3991 
3992 /* Return the difference between the ceil and the truncated result of
3993    a signed division by OP1 with remainder MOD.  */
3994 static rtx
3995 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3996 {
3997   /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3998   return gen_rtx_IF_THEN_ELSE
3999     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
4000      gen_rtx_IF_THEN_ELSE
4001      (mode, gen_rtx_GT (BImode,
4002 			gen_rtx_DIV (mode, op1, mod),
4003 			const0_rtx),
4004       const1_rtx, const0_rtx),
4005      const0_rtx);
4006 }
4007 
4008 /* Return the difference between the ceil and the truncated result of
4009    an unsigned division by OP1 with remainder MOD.  */
4010 static rtx
4011 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
4012 {
4013   /* (mod != 0 ? 1 : 0) */
4014   return gen_rtx_IF_THEN_ELSE
4015     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
4016      const1_rtx, const0_rtx);
4017 }
4018 
4019 /* Return the difference between the rounded and the truncated result
4020    of a signed division by OP1 with remainder MOD.  Halfway cases are
4021    rounded away from zero, rather than to the nearest even number.  */
4022 static rtx
4023 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
4024 {
4025   /* (abs (mod) >= abs (op1) - abs (mod)
4026       ? (op1 / mod > 0 ? 1 : -1)
4027       : 0) */
4028   return gen_rtx_IF_THEN_ELSE
4029     (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
4030 		       gen_rtx_MINUS (mode,
4031 				      gen_rtx_ABS (mode, op1),
4032 				      gen_rtx_ABS (mode, mod))),
4033      gen_rtx_IF_THEN_ELSE
4034      (mode, gen_rtx_GT (BImode,
4035 			gen_rtx_DIV (mode, op1, mod),
4036 			const0_rtx),
4037       const1_rtx, constm1_rtx),
4038      const0_rtx);
4039 }
4040 
4041 /* Return the difference between the rounded and the truncated result
4042    of a unsigned division by OP1 with remainder MOD.  Halfway cases
4043    are rounded away from zero, rather than to the nearest even
4044    number.  */
4045 static rtx
4046 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
4047 {
4048   /* (mod >= op1 - mod ? 1 : 0) */
4049   return gen_rtx_IF_THEN_ELSE
4050     (mode, gen_rtx_GE (BImode, mod,
4051 		       gen_rtx_MINUS (mode, op1, mod)),
4052      const1_rtx, const0_rtx);
4053 }
4054 
4055 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
4056    any rtl.  */
4057 
4058 static rtx
4059 convert_debug_memory_address (scalar_int_mode mode, rtx x,
4060 			      addr_space_t as)
4061 {
4062 #ifndef POINTERS_EXTEND_UNSIGNED
4063   gcc_assert (mode == Pmode
4064 	      || mode == targetm.addr_space.address_mode (as));
4065   gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
4066 #else
4067   rtx temp;
4068 
4069   gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
4070 
4071   if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
4072     return x;
4073 
4074   /* X must have some form of address mode already.  */
4075   scalar_int_mode xmode = as_a <scalar_int_mode> (GET_MODE (x));
4076   if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
4077     x = lowpart_subreg (mode, x, xmode);
4078   else if (POINTERS_EXTEND_UNSIGNED > 0)
4079     x = gen_rtx_ZERO_EXTEND (mode, x);
4080   else if (!POINTERS_EXTEND_UNSIGNED)
4081     x = gen_rtx_SIGN_EXTEND (mode, x);
4082   else
4083     {
4084       switch (GET_CODE (x))
4085 	{
4086 	case SUBREG:
4087 	  if ((SUBREG_PROMOTED_VAR_P (x)
4088 	       || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
4089 	       || (GET_CODE (SUBREG_REG (x)) == PLUS
4090 		   && REG_P (XEXP (SUBREG_REG (x), 0))
4091 		   && REG_POINTER (XEXP (SUBREG_REG (x), 0))
4092 		   && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
4093 	      && GET_MODE (SUBREG_REG (x)) == mode)
4094 	    return SUBREG_REG (x);
4095 	  break;
4096 	case LABEL_REF:
4097 	  temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
4098 	  LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
4099 	  return temp;
4100 	case SYMBOL_REF:
4101 	  temp = shallow_copy_rtx (x);
4102 	  PUT_MODE (temp, mode);
4103 	  return temp;
4104 	case CONST:
4105 	  temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4106 	  if (temp)
4107 	    temp = gen_rtx_CONST (mode, temp);
4108 	  return temp;
4109 	case PLUS:
4110 	case MINUS:
4111 	  if (CONST_INT_P (XEXP (x, 1)))
4112 	    {
4113 	      temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4114 	      if (temp)
4115 		return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4116 	    }
4117 	  break;
4118 	default:
4119 	  break;
4120 	}
4121       /* Don't know how to express ptr_extend as operation in debug info.  */
4122       return NULL;
4123     }
4124 #endif /* POINTERS_EXTEND_UNSIGNED */
4125 
4126   return x;
4127 }
4128 
4129 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4130    by avoid_deep_ter_for_debug.  */
4131 
4132 static hash_map<tree, tree> *deep_ter_debug_map;
4133 
4134 /* Split too deep TER chains for debug stmts using debug temporaries.  */
4135 
4136 static void
4137 avoid_deep_ter_for_debug (gimple *stmt, int depth)
4138 {
4139   use_operand_p use_p;
4140   ssa_op_iter iter;
4141   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4142     {
4143       tree use = USE_FROM_PTR (use_p);
4144       if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4145 	continue;
4146       gimple *g = get_gimple_for_ssa_name (use);
4147       if (g == NULL)
4148 	continue;
4149       if (depth > 6 && !stmt_ends_bb_p (g))
4150 	{
4151 	  if (deep_ter_debug_map == NULL)
4152 	    deep_ter_debug_map = new hash_map<tree, tree>;
4153 
4154 	  tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4155 	  if (vexpr != NULL)
4156 	    continue;
4157 	  vexpr = make_node (DEBUG_EXPR_DECL);
4158 	  gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
4159 	  DECL_ARTIFICIAL (vexpr) = 1;
4160 	  TREE_TYPE (vexpr) = TREE_TYPE (use);
4161 	  SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (use)));
4162 	  gimple_stmt_iterator gsi = gsi_for_stmt (g);
4163 	  gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4164 	  avoid_deep_ter_for_debug (def_temp, 0);
4165 	}
4166       else
4167 	avoid_deep_ter_for_debug (g, depth + 1);
4168     }
4169 }
4170 
4171 /* Return an RTX equivalent to the value of the parameter DECL.  */
4172 
4173 static rtx
4174 expand_debug_parm_decl (tree decl)
4175 {
4176   rtx incoming = DECL_INCOMING_RTL (decl);
4177 
4178   if (incoming
4179       && GET_MODE (incoming) != BLKmode
4180       && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4181 	  || (MEM_P (incoming)
4182 	      && REG_P (XEXP (incoming, 0))
4183 	      && HARD_REGISTER_P (XEXP (incoming, 0)))))
4184     {
4185       rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4186 
4187 #ifdef HAVE_window_save
4188       /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4189 	 If the target machine has an explicit window save instruction, the
4190 	 actual entry value is the corresponding OUTGOING_REGNO instead.  */
4191       if (REG_P (incoming)
4192 	  && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4193 	incoming
4194 	  = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4195 				OUTGOING_REGNO (REGNO (incoming)), 0);
4196       else if (MEM_P (incoming))
4197 	{
4198 	  rtx reg = XEXP (incoming, 0);
4199 	  if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4200 	    {
4201 	      reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4202 	      incoming = replace_equiv_address_nv (incoming, reg);
4203 	    }
4204 	  else
4205 	    incoming = copy_rtx (incoming);
4206 	}
4207 #endif
4208 
4209       ENTRY_VALUE_EXP (rtl) = incoming;
4210       return rtl;
4211     }
4212 
4213   if (incoming
4214       && GET_MODE (incoming) != BLKmode
4215       && !TREE_ADDRESSABLE (decl)
4216       && MEM_P (incoming)
4217       && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4218 	  || (GET_CODE (XEXP (incoming, 0)) == PLUS
4219 	      && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4220 	      && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
4221     return copy_rtx (incoming);
4222 
4223   return NULL_RTX;
4224 }
4225 
4226 /* Return an RTX equivalent to the value of the tree expression EXP.  */
4227 
4228 static rtx
4229 expand_debug_expr (tree exp)
4230 {
4231   rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
4232   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4233   machine_mode inner_mode = VOIDmode;
4234   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4235   addr_space_t as;
4236   scalar_int_mode op0_mode, op1_mode, addr_mode;
4237 
4238   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4239     {
4240     case tcc_expression:
4241       switch (TREE_CODE (exp))
4242 	{
4243 	case COND_EXPR:
4244 	case DOT_PROD_EXPR:
4245 	case SAD_EXPR:
4246 	case WIDEN_MULT_PLUS_EXPR:
4247 	case WIDEN_MULT_MINUS_EXPR:
4248 	  goto ternary;
4249 
4250 	case TRUTH_ANDIF_EXPR:
4251 	case TRUTH_ORIF_EXPR:
4252 	case TRUTH_AND_EXPR:
4253 	case TRUTH_OR_EXPR:
4254 	case TRUTH_XOR_EXPR:
4255 	  goto binary;
4256 
4257 	case TRUTH_NOT_EXPR:
4258 	  goto unary;
4259 
4260 	default:
4261 	  break;
4262 	}
4263       break;
4264 
4265     ternary:
4266       op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4267       if (!op2)
4268 	return NULL_RTX;
4269       /* Fall through.  */
4270 
4271     binary:
4272     case tcc_binary:
4273       if (mode == BLKmode)
4274 	return NULL_RTX;
4275       op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4276       if (!op1)
4277 	return NULL_RTX;
4278       switch (TREE_CODE (exp))
4279 	{
4280 	case LSHIFT_EXPR:
4281 	case RSHIFT_EXPR:
4282 	case LROTATE_EXPR:
4283 	case RROTATE_EXPR:
4284 	case WIDEN_LSHIFT_EXPR:
4285 	  /* Ensure second operand isn't wider than the first one.  */
4286 	  inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4287 	  if (is_a <scalar_int_mode> (inner_mode, &op1_mode)
4288 	      && (GET_MODE_UNIT_PRECISION (mode)
4289 		  < GET_MODE_PRECISION (op1_mode)))
4290 	    op1 = lowpart_subreg (GET_MODE_INNER (mode), op1, op1_mode);
4291 	  break;
4292 	default:
4293 	  break;
4294 	}
4295       /* Fall through.  */
4296 
4297     unary:
4298     case tcc_unary:
4299       if (mode == BLKmode)
4300 	return NULL_RTX;
4301       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4302       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4303       if (!op0)
4304 	return NULL_RTX;
4305       break;
4306 
4307     case tcc_comparison:
4308       unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4309       goto binary;
4310 
4311     case tcc_type:
4312     case tcc_statement:
4313       gcc_unreachable ();
4314 
4315     case tcc_constant:
4316     case tcc_exceptional:
4317     case tcc_declaration:
4318     case tcc_reference:
4319     case tcc_vl_exp:
4320       break;
4321     }
4322 
4323   switch (TREE_CODE (exp))
4324     {
4325     case STRING_CST:
4326       if (!lookup_constant_def (exp))
4327 	{
4328 	  if (strlen (TREE_STRING_POINTER (exp)) + 1
4329 	      != (size_t) TREE_STRING_LENGTH (exp))
4330 	    return NULL_RTX;
4331 	  op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4332 	  op0 = gen_rtx_MEM (BLKmode, op0);
4333 	  set_mem_attributes (op0, exp, 0);
4334 	  return op0;
4335 	}
4336       /* Fall through.  */
4337 
4338     case INTEGER_CST:
4339     case REAL_CST:
4340     case FIXED_CST:
4341       op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4342       return op0;
4343 
4344     case POLY_INT_CST:
4345       return immed_wide_int_const (poly_int_cst_value (exp), mode);
4346 
4347     case COMPLEX_CST:
4348       gcc_assert (COMPLEX_MODE_P (mode));
4349       op0 = expand_debug_expr (TREE_REALPART (exp));
4350       op1 = expand_debug_expr (TREE_IMAGPART (exp));
4351       return gen_rtx_CONCAT (mode, op0, op1);
4352 
4353     case DEBUG_EXPR_DECL:
4354       op0 = DECL_RTL_IF_SET (exp);
4355 
4356       if (op0)
4357 	return op0;
4358 
4359       op0 = gen_rtx_DEBUG_EXPR (mode);
4360       DEBUG_EXPR_TREE_DECL (op0) = exp;
4361       SET_DECL_RTL (exp, op0);
4362 
4363       return op0;
4364 
4365     case VAR_DECL:
4366     case PARM_DECL:
4367     case FUNCTION_DECL:
4368     case LABEL_DECL:
4369     case CONST_DECL:
4370     case RESULT_DECL:
4371       op0 = DECL_RTL_IF_SET (exp);
4372 
4373       /* This decl was probably optimized away.  */
4374       if (!op0
4375 	  /* At least label RTXen are sometimes replaced by
4376 	     NOTE_INSN_DELETED_LABEL.  Any notes here are not
4377 	     handled by copy_rtx.  */
4378 	  || NOTE_P (op0))
4379 	{
4380 	  if (!VAR_P (exp)
4381 	      || DECL_EXTERNAL (exp)
4382 	      || !TREE_STATIC (exp)
4383 	      || !DECL_NAME (exp)
4384 	      || DECL_HARD_REGISTER (exp)
4385 	      || DECL_IN_CONSTANT_POOL (exp)
4386 	      || mode == VOIDmode)
4387 	    return NULL;
4388 
4389 	  op0 = make_decl_rtl_for_debug (exp);
4390 	  if (!MEM_P (op0)
4391 	      || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4392 	      || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4393 	    return NULL;
4394 	}
4395       else
4396 	op0 = copy_rtx (op0);
4397 
4398       if (GET_MODE (op0) == BLKmode
4399 	  /* If op0 is not BLKmode, but mode is, adjust_mode
4400 	     below would ICE.  While it is likely a FE bug,
4401 	     try to be robust here.  See PR43166.  */
4402 	  || mode == BLKmode
4403 	  || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4404 	{
4405 	  gcc_assert (MEM_P (op0));
4406 	  op0 = adjust_address_nv (op0, mode, 0);
4407 	  return op0;
4408 	}
4409 
4410       /* Fall through.  */
4411 
4412     adjust_mode:
4413     case PAREN_EXPR:
4414     CASE_CONVERT:
4415       {
4416 	inner_mode = GET_MODE (op0);
4417 
4418 	if (mode == inner_mode)
4419 	  return op0;
4420 
4421 	if (inner_mode == VOIDmode)
4422 	  {
4423 	    if (TREE_CODE (exp) == SSA_NAME)
4424 	      inner_mode = TYPE_MODE (TREE_TYPE (exp));
4425 	    else
4426 	      inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4427 	    if (mode == inner_mode)
4428 	      return op0;
4429 	  }
4430 
4431 	if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4432 	  {
4433 	    if (GET_MODE_UNIT_BITSIZE (mode)
4434 		== GET_MODE_UNIT_BITSIZE (inner_mode))
4435 	      op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4436 	    else if (GET_MODE_UNIT_BITSIZE (mode)
4437 		     < GET_MODE_UNIT_BITSIZE (inner_mode))
4438 	      op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4439 	    else
4440 	      op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4441 	  }
4442 	else if (FLOAT_MODE_P (mode))
4443 	  {
4444 	    gcc_assert (TREE_CODE (exp) != SSA_NAME);
4445 	    if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4446 	      op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4447 	    else
4448 	      op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4449 	  }
4450 	else if (FLOAT_MODE_P (inner_mode))
4451 	  {
4452 	    if (unsignedp)
4453 	      op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4454 	    else
4455 	      op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4456 	  }
4457 	else if (GET_MODE_UNIT_PRECISION (mode)
4458 		 == GET_MODE_UNIT_PRECISION (inner_mode))
4459 	  op0 = lowpart_subreg (mode, op0, inner_mode);
4460 	else if (GET_MODE_UNIT_PRECISION (mode)
4461 		 < GET_MODE_UNIT_PRECISION (inner_mode))
4462 	  op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
4463 	else if (UNARY_CLASS_P (exp)
4464 		 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4465 		 : unsignedp)
4466 	  op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4467 	else
4468 	  op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4469 
4470 	return op0;
4471       }
4472 
4473     case MEM_REF:
4474       if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4475 	{
4476 	  tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4477 				     TREE_OPERAND (exp, 0),
4478 				     TREE_OPERAND (exp, 1));
4479 	  if (newexp)
4480 	    return expand_debug_expr (newexp);
4481 	}
4482       /* FALLTHROUGH */
4483     case INDIRECT_REF:
4484       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4485       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4486       if (!op0)
4487 	return NULL;
4488 
4489       if (TREE_CODE (exp) == MEM_REF)
4490 	{
4491 	  if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4492 	      || (GET_CODE (op0) == PLUS
4493 		  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4494 	    /* (mem (debug_implicit_ptr)) might confuse aliasing.
4495 	       Instead just use get_inner_reference.  */
4496 	    goto component_ref;
4497 
4498 	  op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4499 	  poly_int64 offset;
4500 	  if (!op1 || !poly_int_rtx_p (op1, &offset))
4501 	    return NULL;
4502 
4503 	  op0 = plus_constant (inner_mode, op0, offset);
4504 	}
4505 
4506       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4507 
4508       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4509 					  op0, as);
4510       if (op0 == NULL_RTX)
4511 	return NULL;
4512 
4513       op0 = gen_rtx_MEM (mode, op0);
4514       set_mem_attributes (op0, exp, 0);
4515       if (TREE_CODE (exp) == MEM_REF
4516 	  && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4517 	set_mem_expr (op0, NULL_TREE);
4518       set_mem_addr_space (op0, as);
4519 
4520       return op0;
4521 
4522     case TARGET_MEM_REF:
4523       if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4524 	  && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4525 	return NULL;
4526 
4527       op0 = expand_debug_expr
4528 	    (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4529       if (!op0)
4530 	return NULL;
4531 
4532       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4533       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4534 					  op0, as);
4535       if (op0 == NULL_RTX)
4536 	return NULL;
4537 
4538       op0 = gen_rtx_MEM (mode, op0);
4539 
4540       set_mem_attributes (op0, exp, 0);
4541       set_mem_addr_space (op0, as);
4542 
4543       return op0;
4544 
4545     component_ref:
4546     case ARRAY_REF:
4547     case ARRAY_RANGE_REF:
4548     case COMPONENT_REF:
4549     case BIT_FIELD_REF:
4550     case REALPART_EXPR:
4551     case IMAGPART_EXPR:
4552     case VIEW_CONVERT_EXPR:
4553       {
4554 	machine_mode mode1;
4555 	poly_int64 bitsize, bitpos;
4556 	tree offset;
4557 	int reversep, volatilep = 0;
4558 	tree tem
4559 	  = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
4560 				 &unsignedp, &reversep, &volatilep);
4561 	rtx orig_op0;
4562 
4563 	if (known_eq (bitsize, 0))
4564 	  return NULL;
4565 
4566 	orig_op0 = op0 = expand_debug_expr (tem);
4567 
4568 	if (!op0)
4569 	  return NULL;
4570 
4571 	if (offset)
4572 	  {
4573 	    machine_mode addrmode, offmode;
4574 
4575 	    if (!MEM_P (op0))
4576 	      return NULL;
4577 
4578 	    op0 = XEXP (op0, 0);
4579 	    addrmode = GET_MODE (op0);
4580 	    if (addrmode == VOIDmode)
4581 	      addrmode = Pmode;
4582 
4583 	    op1 = expand_debug_expr (offset);
4584 	    if (!op1)
4585 	      return NULL;
4586 
4587 	    offmode = GET_MODE (op1);
4588 	    if (offmode == VOIDmode)
4589 	      offmode = TYPE_MODE (TREE_TYPE (offset));
4590 
4591 	    if (addrmode != offmode)
4592 	      op1 = lowpart_subreg (addrmode, op1, offmode);
4593 
4594 	    /* Don't use offset_address here, we don't need a
4595 	       recognizable address, and we don't want to generate
4596 	       code.  */
4597 	    op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4598 							  op0, op1));
4599 	  }
4600 
4601 	if (MEM_P (op0))
4602 	  {
4603 	    if (mode1 == VOIDmode)
4604 	      {
4605 		if (maybe_gt (bitsize, MAX_BITSIZE_MODE_ANY_INT))
4606 		  return NULL;
4607 		/* Bitfield.  */
4608 		mode1 = smallest_int_mode_for_size (bitsize);
4609 	      }
4610 	    poly_int64 bytepos = bits_to_bytes_round_down (bitpos);
4611 	    if (maybe_ne (bytepos, 0))
4612 	      {
4613 		op0 = adjust_address_nv (op0, mode1, bytepos);
4614 		bitpos = num_trailing_bits (bitpos);
4615 	      }
4616 	    else if (known_eq (bitpos, 0)
4617 		     && known_eq (bitsize, GET_MODE_BITSIZE (mode)))
4618 	      op0 = adjust_address_nv (op0, mode, 0);
4619 	    else if (GET_MODE (op0) != mode1)
4620 	      op0 = adjust_address_nv (op0, mode1, 0);
4621 	    else
4622 	      op0 = copy_rtx (op0);
4623 	    if (op0 == orig_op0)
4624 	      op0 = shallow_copy_rtx (op0);
4625 	    set_mem_attributes (op0, exp, 0);
4626 	  }
4627 
4628 	if (known_eq (bitpos, 0) && mode == GET_MODE (op0))
4629 	  return op0;
4630 
4631 	if (maybe_lt (bitpos, 0))
4632           return NULL;
4633 
4634 	if (GET_MODE (op0) == BLKmode || mode == BLKmode)
4635 	  return NULL;
4636 
4637 	poly_int64 bytepos;
4638 	if (multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
4639 	    && known_eq (bitsize, GET_MODE_BITSIZE (mode1)))
4640 	  {
4641 	    machine_mode opmode = GET_MODE (op0);
4642 
4643 	    if (opmode == VOIDmode)
4644 	      opmode = TYPE_MODE (TREE_TYPE (tem));
4645 
4646 	    /* This condition may hold if we're expanding the address
4647 	       right past the end of an array that turned out not to
4648 	       be addressable (i.e., the address was only computed in
4649 	       debug stmts).  The gen_subreg below would rightfully
4650 	       crash, and the address doesn't really exist, so just
4651 	       drop it.  */
4652 	    if (known_ge (bitpos, GET_MODE_BITSIZE (opmode)))
4653 	      return NULL;
4654 
4655 	    if (multiple_p (bitpos, GET_MODE_BITSIZE (mode)))
4656 	      return simplify_gen_subreg (mode, op0, opmode, bytepos);
4657 	  }
4658 
4659 	return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4660 				     && TYPE_UNSIGNED (TREE_TYPE (exp))
4661 				     ? SIGN_EXTRACT
4662 				     : ZERO_EXTRACT, mode,
4663 				     GET_MODE (op0) != VOIDmode
4664 				     ? GET_MODE (op0)
4665 				     : TYPE_MODE (TREE_TYPE (tem)),
4666 				     op0, gen_int_mode (bitsize, word_mode),
4667 				     gen_int_mode (bitpos, word_mode));
4668       }
4669 
4670     case ABS_EXPR:
4671     case ABSU_EXPR:
4672       return simplify_gen_unary (ABS, mode, op0, mode);
4673 
4674     case NEGATE_EXPR:
4675       return simplify_gen_unary (NEG, mode, op0, mode);
4676 
4677     case BIT_NOT_EXPR:
4678       return simplify_gen_unary (NOT, mode, op0, mode);
4679 
4680     case FLOAT_EXPR:
4681       return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4682 									 0)))
4683 				 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4684 				 inner_mode);
4685 
4686     case FIX_TRUNC_EXPR:
4687       return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4688 				 inner_mode);
4689 
4690     case POINTER_PLUS_EXPR:
4691       /* For the rare target where pointers are not the same size as
4692 	 size_t, we need to check for mis-matched modes and correct
4693 	 the addend.  */
4694       if (op0 && op1
4695 	  && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
4696 	  && is_a <scalar_int_mode> (GET_MODE (op1), &op1_mode)
4697 	  && op0_mode != op1_mode)
4698 	{
4699 	  if (GET_MODE_BITSIZE (op0_mode) < GET_MODE_BITSIZE (op1_mode)
4700 	      /* If OP0 is a partial mode, then we must truncate, even
4701 		 if it has the same bitsize as OP1 as GCC's
4702 		 representation of partial modes is opaque.  */
4703 	      || (GET_MODE_CLASS (op0_mode) == MODE_PARTIAL_INT
4704 		  && (GET_MODE_BITSIZE (op0_mode)
4705 		      == GET_MODE_BITSIZE (op1_mode))))
4706 	    op1 = simplify_gen_unary (TRUNCATE, op0_mode, op1, op1_mode);
4707 	  else
4708 	    /* We always sign-extend, regardless of the signedness of
4709 	       the operand, because the operand is always unsigned
4710 	       here even if the original C expression is signed.  */
4711 	    op1 = simplify_gen_unary (SIGN_EXTEND, op0_mode, op1, op1_mode);
4712 	}
4713       /* Fall through.  */
4714     case PLUS_EXPR:
4715       return simplify_gen_binary (PLUS, mode, op0, op1);
4716 
4717     case MINUS_EXPR:
4718     case POINTER_DIFF_EXPR:
4719       return simplify_gen_binary (MINUS, mode, op0, op1);
4720 
4721     case MULT_EXPR:
4722       return simplify_gen_binary (MULT, mode, op0, op1);
4723 
4724     case RDIV_EXPR:
4725     case TRUNC_DIV_EXPR:
4726     case EXACT_DIV_EXPR:
4727       if (unsignedp)
4728 	return simplify_gen_binary (UDIV, mode, op0, op1);
4729       else
4730 	return simplify_gen_binary (DIV, mode, op0, op1);
4731 
4732     case TRUNC_MOD_EXPR:
4733       return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4734 
4735     case FLOOR_DIV_EXPR:
4736       if (unsignedp)
4737 	return simplify_gen_binary (UDIV, mode, op0, op1);
4738       else
4739 	{
4740 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4741 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4742 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
4743 	  return simplify_gen_binary (PLUS, mode, div, adj);
4744 	}
4745 
4746     case FLOOR_MOD_EXPR:
4747       if (unsignedp)
4748 	return simplify_gen_binary (UMOD, mode, op0, op1);
4749       else
4750 	{
4751 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4752 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
4753 	  adj = simplify_gen_unary (NEG, mode,
4754 				    simplify_gen_binary (MULT, mode, adj, op1),
4755 				    mode);
4756 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4757 	}
4758 
4759     case CEIL_DIV_EXPR:
4760       if (unsignedp)
4761 	{
4762 	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4763 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4764 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
4765 	  return simplify_gen_binary (PLUS, mode, div, adj);
4766 	}
4767       else
4768 	{
4769 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4770 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4771 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4772 	  return simplify_gen_binary (PLUS, mode, div, adj);
4773 	}
4774 
4775     case CEIL_MOD_EXPR:
4776       if (unsignedp)
4777 	{
4778 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4779 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
4780 	  adj = simplify_gen_unary (NEG, mode,
4781 				    simplify_gen_binary (MULT, mode, adj, op1),
4782 				    mode);
4783 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4784 	}
4785       else
4786 	{
4787 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4788 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4789 	  adj = simplify_gen_unary (NEG, mode,
4790 				    simplify_gen_binary (MULT, mode, adj, op1),
4791 				    mode);
4792 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4793 	}
4794 
4795     case ROUND_DIV_EXPR:
4796       if (unsignedp)
4797 	{
4798 	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4799 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4800 	  rtx adj = round_udiv_adjust (mode, mod, op1);
4801 	  return simplify_gen_binary (PLUS, mode, div, adj);
4802 	}
4803       else
4804 	{
4805 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4806 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4807 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
4808 	  return simplify_gen_binary (PLUS, mode, div, adj);
4809 	}
4810 
4811     case ROUND_MOD_EXPR:
4812       if (unsignedp)
4813 	{
4814 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4815 	  rtx adj = round_udiv_adjust (mode, mod, op1);
4816 	  adj = simplify_gen_unary (NEG, mode,
4817 				    simplify_gen_binary (MULT, mode, adj, op1),
4818 				    mode);
4819 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4820 	}
4821       else
4822 	{
4823 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4824 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
4825 	  adj = simplify_gen_unary (NEG, mode,
4826 				    simplify_gen_binary (MULT, mode, adj, op1),
4827 				    mode);
4828 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4829 	}
4830 
4831     case LSHIFT_EXPR:
4832       return simplify_gen_binary (ASHIFT, mode, op0, op1);
4833 
4834     case RSHIFT_EXPR:
4835       if (unsignedp)
4836 	return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4837       else
4838 	return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4839 
4840     case LROTATE_EXPR:
4841       return simplify_gen_binary (ROTATE, mode, op0, op1);
4842 
4843     case RROTATE_EXPR:
4844       return simplify_gen_binary (ROTATERT, mode, op0, op1);
4845 
4846     case MIN_EXPR:
4847       return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4848 
4849     case MAX_EXPR:
4850       return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4851 
4852     case BIT_AND_EXPR:
4853     case TRUTH_AND_EXPR:
4854       return simplify_gen_binary (AND, mode, op0, op1);
4855 
4856     case BIT_IOR_EXPR:
4857     case TRUTH_OR_EXPR:
4858       return simplify_gen_binary (IOR, mode, op0, op1);
4859 
4860     case BIT_XOR_EXPR:
4861     case TRUTH_XOR_EXPR:
4862       return simplify_gen_binary (XOR, mode, op0, op1);
4863 
4864     case TRUTH_ANDIF_EXPR:
4865       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4866 
4867     case TRUTH_ORIF_EXPR:
4868       return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4869 
4870     case TRUTH_NOT_EXPR:
4871       return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4872 
4873     case LT_EXPR:
4874       return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4875 				      op0, op1);
4876 
4877     case LE_EXPR:
4878       return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4879 				      op0, op1);
4880 
4881     case GT_EXPR:
4882       return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4883 				      op0, op1);
4884 
4885     case GE_EXPR:
4886       return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4887 				      op0, op1);
4888 
4889     case EQ_EXPR:
4890       return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4891 
4892     case NE_EXPR:
4893       return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4894 
4895     case UNORDERED_EXPR:
4896       return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4897 
4898     case ORDERED_EXPR:
4899       return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4900 
4901     case UNLT_EXPR:
4902       return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4903 
4904     case UNLE_EXPR:
4905       return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4906 
4907     case UNGT_EXPR:
4908       return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4909 
4910     case UNGE_EXPR:
4911       return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4912 
4913     case UNEQ_EXPR:
4914       return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4915 
4916     case LTGT_EXPR:
4917       return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4918 
4919     case COND_EXPR:
4920       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4921 
4922     case COMPLEX_EXPR:
4923       gcc_assert (COMPLEX_MODE_P (mode));
4924       if (GET_MODE (op0) == VOIDmode)
4925 	op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4926       if (GET_MODE (op1) == VOIDmode)
4927 	op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4928       return gen_rtx_CONCAT (mode, op0, op1);
4929 
4930     case CONJ_EXPR:
4931       if (GET_CODE (op0) == CONCAT)
4932 	return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4933 			       simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4934 						   XEXP (op0, 1),
4935 						   GET_MODE_INNER (mode)));
4936       else
4937 	{
4938 	  scalar_mode imode = GET_MODE_INNER (mode);
4939 	  rtx re, im;
4940 
4941 	  if (MEM_P (op0))
4942 	    {
4943 	      re = adjust_address_nv (op0, imode, 0);
4944 	      im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4945 	    }
4946 	  else
4947 	    {
4948 	      scalar_int_mode ifmode;
4949 	      scalar_int_mode ihmode;
4950 	      rtx halfsize;
4951 	      if (!int_mode_for_mode (mode).exists (&ifmode)
4952 		  || !int_mode_for_mode (imode).exists (&ihmode))
4953 		return NULL;
4954 	      halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4955 	      re = op0;
4956 	      if (mode != ifmode)
4957 		re = gen_rtx_SUBREG (ifmode, re, 0);
4958 	      re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4959 	      if (imode != ihmode)
4960 		re = gen_rtx_SUBREG (imode, re, 0);
4961 	      im = copy_rtx (op0);
4962 	      if (mode != ifmode)
4963 		im = gen_rtx_SUBREG (ifmode, im, 0);
4964 	      im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4965 	      if (imode != ihmode)
4966 		im = gen_rtx_SUBREG (imode, im, 0);
4967 	    }
4968 	  im = gen_rtx_NEG (imode, im);
4969 	  return gen_rtx_CONCAT (mode, re, im);
4970 	}
4971 
4972     case ADDR_EXPR:
4973       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4974       if (!op0 || !MEM_P (op0))
4975 	{
4976 	  if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4977 	       || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4978 	       || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4979 	      && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4980 		  || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4981 	    return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4982 
4983 	  if (handled_component_p (TREE_OPERAND (exp, 0)))
4984 	    {
4985 	      poly_int64 bitoffset, bitsize, maxsize, byteoffset;
4986 	      bool reverse;
4987 	      tree decl
4988 		= get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4989 					   &bitsize, &maxsize, &reverse);
4990 	      if ((VAR_P (decl)
4991 		   || TREE_CODE (decl) == PARM_DECL
4992 		   || TREE_CODE (decl) == RESULT_DECL)
4993 		  && (!TREE_ADDRESSABLE (decl)
4994 		      || target_for_debug_bind (decl))
4995 		  && multiple_p (bitoffset, BITS_PER_UNIT, &byteoffset)
4996 		  && known_gt (bitsize, 0)
4997 		  && known_eq (bitsize, maxsize))
4998 		{
4999 		  rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
5000 		  return plus_constant (mode, base, byteoffset);
5001 		}
5002 	    }
5003 
5004 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
5005 	      && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5006 		 == ADDR_EXPR)
5007 	    {
5008 	      op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
5009 						     0));
5010 	      if (op0 != NULL
5011 		  && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
5012 		      || (GET_CODE (op0) == PLUS
5013 			  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
5014 			  && CONST_INT_P (XEXP (op0, 1)))))
5015 		{
5016 		  op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
5017 							 1));
5018 		  poly_int64 offset;
5019 		  if (!op1 || !poly_int_rtx_p (op1, &offset))
5020 		    return NULL;
5021 
5022 		  return plus_constant (mode, op0, offset);
5023 		}
5024 	    }
5025 
5026 	  return NULL;
5027 	}
5028 
5029       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
5030       addr_mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5031       op0 = convert_debug_memory_address (addr_mode, XEXP (op0, 0), as);
5032 
5033       return op0;
5034 
5035     case VECTOR_CST:
5036       {
5037 	unsigned HOST_WIDE_INT i, nelts;
5038 
5039 	if (!VECTOR_CST_NELTS (exp).is_constant (&nelts))
5040 	  return NULL;
5041 
5042 	op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
5043 
5044 	for (i = 0; i < nelts; ++i)
5045 	  {
5046 	    op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
5047 	    if (!op1)
5048 	      return NULL;
5049 	    XVECEXP (op0, 0, i) = op1;
5050 	  }
5051 
5052 	return op0;
5053       }
5054 
5055     case CONSTRUCTOR:
5056       if (TREE_CLOBBER_P (exp))
5057 	return NULL;
5058       else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
5059 	{
5060 	  unsigned i;
5061 	  unsigned HOST_WIDE_INT nelts;
5062 	  tree val;
5063 
5064 	  if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)).is_constant (&nelts))
5065 	    goto flag_unsupported;
5066 
5067 	  op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
5068 
5069 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
5070 	    {
5071 	      op1 = expand_debug_expr (val);
5072 	      if (!op1)
5073 		return NULL;
5074 	      XVECEXP (op0, 0, i) = op1;
5075 	    }
5076 
5077 	  if (i < nelts)
5078 	    {
5079 	      op1 = expand_debug_expr
5080 		(build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
5081 
5082 	      if (!op1)
5083 		return NULL;
5084 
5085 	      for (; i < nelts; i++)
5086 		XVECEXP (op0, 0, i) = op1;
5087 	    }
5088 
5089 	  return op0;
5090 	}
5091       else
5092 	goto flag_unsupported;
5093 
5094     case CALL_EXPR:
5095       /* ??? Maybe handle some builtins?  */
5096       return NULL;
5097 
5098     case SSA_NAME:
5099       {
5100 	gimple *g = get_gimple_for_ssa_name (exp);
5101 	if (g)
5102 	  {
5103 	    tree t = NULL_TREE;
5104 	    if (deep_ter_debug_map)
5105 	      {
5106 		tree *slot = deep_ter_debug_map->get (exp);
5107 		if (slot)
5108 		  t = *slot;
5109 	      }
5110 	    if (t == NULL_TREE)
5111 	      t = gimple_assign_rhs_to_tree (g);
5112 	    op0 = expand_debug_expr (t);
5113 	    if (!op0)
5114 	      return NULL;
5115 	  }
5116 	else
5117 	  {
5118 	    /* If this is a reference to an incoming value of
5119 	       parameter that is never used in the code or where the
5120 	       incoming value is never used in the code, use
5121 	       PARM_DECL's DECL_RTL if set.  */
5122 	    if (SSA_NAME_IS_DEFAULT_DEF (exp)
5123 		&& SSA_NAME_VAR (exp)
5124 		&& TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5125 		&& has_zero_uses (exp))
5126 	      {
5127 		op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5128 		if (op0)
5129 		  goto adjust_mode;
5130 		op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5131 		if (op0)
5132 		  goto adjust_mode;
5133 	      }
5134 
5135 	    int part = var_to_partition (SA.map, exp);
5136 
5137 	    if (part == NO_PARTITION)
5138 	      return NULL;
5139 
5140 	    gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
5141 
5142 	    op0 = copy_rtx (SA.partition_to_pseudo[part]);
5143 	  }
5144 	goto adjust_mode;
5145       }
5146 
5147     case ERROR_MARK:
5148       return NULL;
5149 
5150     /* Vector stuff.  For most of the codes we don't have rtl codes.  */
5151     case REALIGN_LOAD_EXPR:
5152     case VEC_COND_EXPR:
5153     case VEC_PACK_FIX_TRUNC_EXPR:
5154     case VEC_PACK_FLOAT_EXPR:
5155     case VEC_PACK_SAT_EXPR:
5156     case VEC_PACK_TRUNC_EXPR:
5157     case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
5158     case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
5159     case VEC_UNPACK_FLOAT_HI_EXPR:
5160     case VEC_UNPACK_FLOAT_LO_EXPR:
5161     case VEC_UNPACK_HI_EXPR:
5162     case VEC_UNPACK_LO_EXPR:
5163     case VEC_WIDEN_MULT_HI_EXPR:
5164     case VEC_WIDEN_MULT_LO_EXPR:
5165     case VEC_WIDEN_MULT_EVEN_EXPR:
5166     case VEC_WIDEN_MULT_ODD_EXPR:
5167     case VEC_WIDEN_LSHIFT_HI_EXPR:
5168     case VEC_WIDEN_LSHIFT_LO_EXPR:
5169     case VEC_PERM_EXPR:
5170     case VEC_DUPLICATE_EXPR:
5171     case VEC_SERIES_EXPR:
5172       return NULL;
5173 
5174     /* Misc codes.  */
5175     case ADDR_SPACE_CONVERT_EXPR:
5176     case FIXED_CONVERT_EXPR:
5177     case OBJ_TYPE_REF:
5178     case WITH_SIZE_EXPR:
5179     case BIT_INSERT_EXPR:
5180       return NULL;
5181 
5182     case DOT_PROD_EXPR:
5183       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5184 	  && SCALAR_INT_MODE_P (mode))
5185 	{
5186 	  op0
5187 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5188 									  0)))
5189 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5190 				  inner_mode);
5191 	  op1
5192 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5193 									  1)))
5194 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5195 				  inner_mode);
5196 	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
5197 	  return simplify_gen_binary (PLUS, mode, op0, op2);
5198 	}
5199       return NULL;
5200 
5201     case WIDEN_MULT_EXPR:
5202     case WIDEN_MULT_PLUS_EXPR:
5203     case WIDEN_MULT_MINUS_EXPR:
5204       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5205 	  && SCALAR_INT_MODE_P (mode))
5206 	{
5207 	  inner_mode = GET_MODE (op0);
5208 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5209 	    op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5210 	  else
5211 	    op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5212 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5213 	    op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
5214 	  else
5215 	    op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
5216 	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
5217 	  if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5218 	    return op0;
5219 	  else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
5220 	    return simplify_gen_binary (PLUS, mode, op0, op2);
5221 	  else
5222 	    return simplify_gen_binary (MINUS, mode, op2, op0);
5223 	}
5224       return NULL;
5225 
5226     case MULT_HIGHPART_EXPR:
5227       /* ??? Similar to the above.  */
5228       return NULL;
5229 
5230     case WIDEN_SUM_EXPR:
5231     case WIDEN_LSHIFT_EXPR:
5232       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5233 	  && SCALAR_INT_MODE_P (mode))
5234 	{
5235 	  op0
5236 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5237 									  0)))
5238 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5239 				  inner_mode);
5240 	  return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5241 				      ? ASHIFT : PLUS, mode, op0, op1);
5242 	}
5243       return NULL;
5244 
5245     default:
5246     flag_unsupported:
5247       if (flag_checking)
5248 	{
5249 	  debug_tree (exp);
5250 	  gcc_unreachable ();
5251 	}
5252       return NULL;
5253     }
5254 }
5255 
5256 /* Return an RTX equivalent to the source bind value of the tree expression
5257    EXP.  */
5258 
5259 static rtx
5260 expand_debug_source_expr (tree exp)
5261 {
5262   rtx op0 = NULL_RTX;
5263   machine_mode mode = VOIDmode, inner_mode;
5264 
5265   switch (TREE_CODE (exp))
5266     {
5267     case VAR_DECL:
5268       if (DECL_ABSTRACT_ORIGIN (exp))
5269 	return expand_debug_source_expr (DECL_ABSTRACT_ORIGIN (exp));
5270       break;
5271     case PARM_DECL:
5272       {
5273 	mode = DECL_MODE (exp);
5274 	op0 = expand_debug_parm_decl (exp);
5275 	if (op0)
5276 	   break;
5277 	/* See if this isn't an argument that has been completely
5278 	   optimized out.  */
5279 	if (!DECL_RTL_SET_P (exp)
5280 	    && !DECL_INCOMING_RTL (exp)
5281 	    && DECL_ABSTRACT_ORIGIN (current_function_decl))
5282 	  {
5283 	    tree aexp = DECL_ORIGIN (exp);
5284 	    if (DECL_CONTEXT (aexp)
5285 		== DECL_ABSTRACT_ORIGIN (current_function_decl))
5286 	      {
5287 		vec<tree, va_gc> **debug_args;
5288 		unsigned int ix;
5289 		tree ddecl;
5290 		debug_args = decl_debug_args_lookup (current_function_decl);
5291 		if (debug_args != NULL)
5292 		  {
5293 		    for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
5294 			 ix += 2)
5295 		      if (ddecl == aexp)
5296 			return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5297 		  }
5298 	      }
5299 	  }
5300 	break;
5301       }
5302     default:
5303       break;
5304     }
5305 
5306   if (op0 == NULL_RTX)
5307     return NULL_RTX;
5308 
5309   inner_mode = GET_MODE (op0);
5310   if (mode == inner_mode)
5311     return op0;
5312 
5313   if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5314     {
5315       if (GET_MODE_UNIT_BITSIZE (mode)
5316 	  == GET_MODE_UNIT_BITSIZE (inner_mode))
5317 	op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5318       else if (GET_MODE_UNIT_BITSIZE (mode)
5319 	       < GET_MODE_UNIT_BITSIZE (inner_mode))
5320 	op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5321       else
5322 	op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5323     }
5324   else if (FLOAT_MODE_P (mode))
5325     gcc_unreachable ();
5326   else if (FLOAT_MODE_P (inner_mode))
5327     {
5328       if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5329 	op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5330       else
5331 	op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5332     }
5333   else if (GET_MODE_UNIT_PRECISION (mode)
5334 	   == GET_MODE_UNIT_PRECISION (inner_mode))
5335     op0 = lowpart_subreg (mode, op0, inner_mode);
5336   else if (GET_MODE_UNIT_PRECISION (mode)
5337 	   < GET_MODE_UNIT_PRECISION (inner_mode))
5338     op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
5339   else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5340     op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5341   else
5342     op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5343 
5344   return op0;
5345 }
5346 
5347 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5348    Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5349    deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN.  */
5350 
5351 static void
5352 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
5353 {
5354   rtx exp = *exp_p;
5355 
5356   if (exp == NULL_RTX)
5357     return;
5358 
5359   if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5360     return;
5361 
5362   if (depth == 4)
5363     {
5364       /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL).  */
5365       rtx dval = make_debug_expr_from_rtl (exp);
5366 
5367       /* Emit a debug bind insn before INSN.  */
5368       rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5369 				       DEBUG_EXPR_TREE_DECL (dval), exp,
5370 				       VAR_INIT_STATUS_INITIALIZED);
5371 
5372       emit_debug_insn_before (bind, insn);
5373       *exp_p = dval;
5374       return;
5375     }
5376 
5377   const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5378   int i, j;
5379   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5380     switch (*format_ptr++)
5381       {
5382       case 'e':
5383 	avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5384 	break;
5385 
5386       case 'E':
5387       case 'V':
5388 	for (j = 0; j < XVECLEN (exp, i); j++)
5389 	  avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5390 	break;
5391 
5392       default:
5393 	break;
5394       }
5395 }
5396 
5397 /* Expand the _LOCs in debug insns.  We run this after expanding all
5398    regular insns, so that any variables referenced in the function
5399    will have their DECL_RTLs set.  */
5400 
5401 static void
5402 expand_debug_locations (void)
5403 {
5404   rtx_insn *insn;
5405   rtx_insn *last = get_last_insn ();
5406   int save_strict_alias = flag_strict_aliasing;
5407 
5408   /* New alias sets while setting up memory attributes cause
5409      -fcompare-debug failures, even though it doesn't bring about any
5410      codegen changes.  */
5411   flag_strict_aliasing = 0;
5412 
5413   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5414     if (DEBUG_BIND_INSN_P (insn))
5415       {
5416 	tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5417 	rtx val;
5418 	rtx_insn *prev_insn, *insn2;
5419 	machine_mode mode;
5420 
5421 	if (value == NULL_TREE)
5422 	  val = NULL_RTX;
5423 	else
5424 	  {
5425 	    if (INSN_VAR_LOCATION_STATUS (insn)
5426 		== VAR_INIT_STATUS_UNINITIALIZED)
5427 	      val = expand_debug_source_expr (value);
5428 	    /* The avoid_deep_ter_for_debug function inserts
5429 	       debug bind stmts after SSA_NAME definition, with the
5430 	       SSA_NAME as the whole bind location.  Disable temporarily
5431 	       expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5432 	       being defined in this DEBUG_INSN.  */
5433 	    else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5434 	      {
5435 		tree *slot = deep_ter_debug_map->get (value);
5436 		if (slot)
5437 		  {
5438 		    if (*slot == INSN_VAR_LOCATION_DECL (insn))
5439 		      *slot = NULL_TREE;
5440 		    else
5441 		      slot = NULL;
5442 		  }
5443 		val = expand_debug_expr (value);
5444 		if (slot)
5445 		  *slot = INSN_VAR_LOCATION_DECL (insn);
5446 	      }
5447 	    else
5448 	      val = expand_debug_expr (value);
5449 	    gcc_assert (last == get_last_insn ());
5450 	  }
5451 
5452 	if (!val)
5453 	  val = gen_rtx_UNKNOWN_VAR_LOC ();
5454 	else
5455 	  {
5456 	    mode = GET_MODE (INSN_VAR_LOCATION (insn));
5457 
5458 	    gcc_assert (mode == GET_MODE (val)
5459 			|| (GET_MODE (val) == VOIDmode
5460 			    && (CONST_SCALAR_INT_P (val)
5461 				|| GET_CODE (val) == CONST_FIXED
5462 				|| GET_CODE (val) == LABEL_REF)));
5463 	  }
5464 
5465 	INSN_VAR_LOCATION_LOC (insn) = val;
5466 	prev_insn = PREV_INSN (insn);
5467 	for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5468 	  avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5469       }
5470 
5471   flag_strict_aliasing = save_strict_alias;
5472 }
5473 
5474 /* Performs swapping operands of commutative operations to expand
5475    the expensive one first.  */
5476 
5477 static void
5478 reorder_operands (basic_block bb)
5479 {
5480   unsigned int *lattice;  /* Hold cost of each statement.  */
5481   unsigned int i = 0, n = 0;
5482   gimple_stmt_iterator gsi;
5483   gimple_seq stmts;
5484   gimple *stmt;
5485   bool swap;
5486   tree op0, op1;
5487   ssa_op_iter iter;
5488   use_operand_p use_p;
5489   gimple *def0, *def1;
5490 
5491   /* Compute cost of each statement using estimate_num_insns.  */
5492   stmts = bb_seq (bb);
5493   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5494     {
5495       stmt = gsi_stmt (gsi);
5496       if (!is_gimple_debug (stmt))
5497         gimple_set_uid (stmt, n++);
5498     }
5499   lattice = XNEWVEC (unsigned int, n);
5500   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5501     {
5502       unsigned cost;
5503       stmt = gsi_stmt (gsi);
5504       if (is_gimple_debug (stmt))
5505 	continue;
5506       cost = estimate_num_insns (stmt, &eni_size_weights);
5507       lattice[i] = cost;
5508       FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5509 	{
5510 	  tree use = USE_FROM_PTR (use_p);
5511 	  gimple *def_stmt;
5512 	  if (TREE_CODE (use) != SSA_NAME)
5513 	    continue;
5514 	  def_stmt = get_gimple_for_ssa_name (use);
5515 	  if (!def_stmt)
5516 	    continue;
5517 	  lattice[i] += lattice[gimple_uid (def_stmt)];
5518 	}
5519       i++;
5520       if (!is_gimple_assign (stmt)
5521 	  || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5522 	continue;
5523       op0 = gimple_op (stmt, 1);
5524       op1 = gimple_op (stmt, 2);
5525       if (TREE_CODE (op0) != SSA_NAME
5526 	  || TREE_CODE (op1) != SSA_NAME)
5527 	continue;
5528       /* Swap operands if the second one is more expensive.  */
5529       def0 = get_gimple_for_ssa_name (op0);
5530       def1 = get_gimple_for_ssa_name (op1);
5531       if (!def1)
5532 	continue;
5533       swap = false;
5534       if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5535 	swap = true;
5536       if (swap)
5537 	{
5538 	  if (dump_file && (dump_flags & TDF_DETAILS))
5539 	    {
5540 	      fprintf (dump_file, "Swap operands in stmt:\n");
5541 	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5542 	      fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5543 		       def0 ? lattice[gimple_uid (def0)] : 0,
5544 		       lattice[gimple_uid (def1)]);
5545 	    }
5546 	  swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5547 			     gimple_assign_rhs2_ptr (stmt));
5548 	}
5549     }
5550   XDELETE (lattice);
5551 }
5552 
5553 /* Expand basic block BB from GIMPLE trees to RTL.  */
5554 
5555 static basic_block
5556 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5557 {
5558   gimple_stmt_iterator gsi;
5559   gimple_seq stmts;
5560   gimple *stmt = NULL;
5561   rtx_note *note = NULL;
5562   rtx_insn *last;
5563   edge e;
5564   edge_iterator ei;
5565 
5566   if (dump_file)
5567     fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5568 	     bb->index);
5569 
5570   /* Note that since we are now transitioning from GIMPLE to RTL, we
5571      cannot use the gsi_*_bb() routines because they expect the basic
5572      block to be in GIMPLE, instead of RTL.  Therefore, we need to
5573      access the BB sequence directly.  */
5574   if (optimize)
5575     reorder_operands (bb);
5576   stmts = bb_seq (bb);
5577   bb->il.gimple.seq = NULL;
5578   bb->il.gimple.phi_nodes = NULL;
5579   rtl_profile_for_bb (bb);
5580   init_rtl_bb_info (bb);
5581   bb->flags |= BB_RTL;
5582 
5583   /* Remove the RETURN_EXPR if we may fall though to the exit
5584      instead.  */
5585   gsi = gsi_last (stmts);
5586   if (!gsi_end_p (gsi)
5587       && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5588     {
5589       greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5590 
5591       gcc_assert (single_succ_p (bb));
5592       gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5593 
5594       if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5595 	  && !gimple_return_retval (ret_stmt))
5596 	{
5597 	  gsi_remove (&gsi, false);
5598 	  single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5599 	}
5600     }
5601 
5602   gsi = gsi_start (stmts);
5603   if (!gsi_end_p (gsi))
5604     {
5605       stmt = gsi_stmt (gsi);
5606       if (gimple_code (stmt) != GIMPLE_LABEL)
5607 	stmt = NULL;
5608     }
5609 
5610   rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5611 
5612   if (stmt || elt)
5613     {
5614       gcc_checking_assert (!note);
5615       last = get_last_insn ();
5616 
5617       if (stmt)
5618 	{
5619 	  expand_gimple_stmt (stmt);
5620 	  gsi_next (&gsi);
5621 	}
5622 
5623       if (elt)
5624 	emit_label (*elt);
5625 
5626       BB_HEAD (bb) = NEXT_INSN (last);
5627       if (NOTE_P (BB_HEAD (bb)))
5628 	BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5629       gcc_assert (LABEL_P (BB_HEAD (bb)));
5630       note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5631 
5632       maybe_dump_rtl_for_gimple_stmt (stmt, last);
5633     }
5634   else
5635     BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5636 
5637   if (note)
5638     NOTE_BASIC_BLOCK (note) = bb;
5639 
5640   for (; !gsi_end_p (gsi); gsi_next (&gsi))
5641     {
5642       basic_block new_bb;
5643 
5644       stmt = gsi_stmt (gsi);
5645 
5646       /* If this statement is a non-debug one, and we generate debug
5647 	 insns, then this one might be the last real use of a TERed
5648 	 SSA_NAME, but where there are still some debug uses further
5649 	 down.  Expanding the current SSA name in such further debug
5650 	 uses by their RHS might lead to wrong debug info, as coalescing
5651 	 might make the operands of such RHS be placed into the same
5652 	 pseudo as something else.  Like so:
5653 	   a_1 = a_0 + 1;   // Assume a_1 is TERed and a_0 is dead
5654 	   use(a_1);
5655 	   a_2 = ...
5656            #DEBUG ... => a_1
5657 	 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5658 	 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5659 	 the write to a_2 would actually have clobbered the place which
5660 	 formerly held a_0.
5661 
5662 	 So, instead of that, we recognize the situation, and generate
5663 	 debug temporaries at the last real use of TERed SSA names:
5664 	   a_1 = a_0 + 1;
5665            #DEBUG #D1 => a_1
5666 	   use(a_1);
5667 	   a_2 = ...
5668            #DEBUG ... => #D1
5669 	 */
5670       if (MAY_HAVE_DEBUG_BIND_INSNS
5671 	  && SA.values
5672 	  && !is_gimple_debug (stmt))
5673 	{
5674 	  ssa_op_iter iter;
5675 	  tree op;
5676 	  gimple *def;
5677 
5678 	  location_t sloc = curr_insn_location ();
5679 
5680 	  /* Look for SSA names that have their last use here (TERed
5681 	     names always have only one real use).  */
5682 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5683 	    if ((def = get_gimple_for_ssa_name (op)))
5684 	      {
5685 		imm_use_iterator imm_iter;
5686 		use_operand_p use_p;
5687 		bool have_debug_uses = false;
5688 
5689 		FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5690 		  {
5691 		    if (gimple_debug_bind_p (USE_STMT (use_p)))
5692 		      {
5693 			have_debug_uses = true;
5694 			break;
5695 		      }
5696 		  }
5697 
5698 		if (have_debug_uses)
5699 		  {
5700 		    /* OP is a TERed SSA name, with DEF its defining
5701 		       statement, and where OP is used in further debug
5702 		       instructions.  Generate a debug temporary, and
5703 		       replace all uses of OP in debug insns with that
5704 		       temporary.  */
5705 		    gimple *debugstmt;
5706 		    tree value = gimple_assign_rhs_to_tree (def);
5707 		    tree vexpr = make_node (DEBUG_EXPR_DECL);
5708 		    rtx val;
5709 		    machine_mode mode;
5710 
5711 		    set_curr_insn_location (gimple_location (def));
5712 
5713 		    DECL_ARTIFICIAL (vexpr) = 1;
5714 		    TREE_TYPE (vexpr) = TREE_TYPE (value);
5715 		    if (DECL_P (value))
5716 		      mode = DECL_MODE (value);
5717 		    else
5718 		      mode = TYPE_MODE (TREE_TYPE (value));
5719 		    SET_DECL_MODE (vexpr, mode);
5720 
5721 		    val = gen_rtx_VAR_LOCATION
5722 			(mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5723 
5724 		    emit_debug_insn (val);
5725 
5726 		    FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5727 		      {
5728 			if (!gimple_debug_bind_p (debugstmt))
5729 			  continue;
5730 
5731 			FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5732 			  SET_USE (use_p, vexpr);
5733 
5734 			update_stmt (debugstmt);
5735 		      }
5736 		  }
5737 	      }
5738 	  set_curr_insn_location (sloc);
5739 	}
5740 
5741       currently_expanding_gimple_stmt = stmt;
5742 
5743       /* Expand this statement, then evaluate the resulting RTL and
5744 	 fixup the CFG accordingly.  */
5745       if (gimple_code (stmt) == GIMPLE_COND)
5746 	{
5747 	  new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5748 	  if (new_bb)
5749 	    return new_bb;
5750 	}
5751       else if (is_gimple_debug (stmt))
5752 	{
5753 	  location_t sloc = curr_insn_location ();
5754 	  gimple_stmt_iterator nsi = gsi;
5755 
5756 	  for (;;)
5757 	    {
5758 	      tree var;
5759 	      tree value = NULL_TREE;
5760 	      rtx val = NULL_RTX;
5761 	      machine_mode mode;
5762 
5763 	      if (!gimple_debug_nonbind_marker_p (stmt))
5764 		{
5765 		  if (gimple_debug_bind_p (stmt))
5766 		    {
5767 		      var = gimple_debug_bind_get_var (stmt);
5768 
5769 		      if (TREE_CODE (var) != DEBUG_EXPR_DECL
5770 			  && TREE_CODE (var) != LABEL_DECL
5771 			  && !target_for_debug_bind (var))
5772 			goto delink_debug_stmt;
5773 
5774 		      if (DECL_P (var))
5775 			mode = DECL_MODE (var);
5776 		      else
5777 			mode = TYPE_MODE (TREE_TYPE (var));
5778 
5779 		      if (gimple_debug_bind_has_value_p (stmt))
5780 			value = gimple_debug_bind_get_value (stmt);
5781 
5782 		      val = gen_rtx_VAR_LOCATION
5783 			(mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5784 		    }
5785 		  else if (gimple_debug_source_bind_p (stmt))
5786 		    {
5787 		      var = gimple_debug_source_bind_get_var (stmt);
5788 
5789 		      value = gimple_debug_source_bind_get_value (stmt);
5790 
5791 		      mode = DECL_MODE (var);
5792 
5793 		      val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5794 						  VAR_INIT_STATUS_UNINITIALIZED);
5795 		    }
5796 		  else
5797 		    gcc_unreachable ();
5798 		}
5799 	      /* If this function was first compiled with markers
5800 		 enabled, but they're now disable (e.g. LTO), drop
5801 		 them on the floor.  */
5802 	      else if (gimple_debug_nonbind_marker_p (stmt)
5803 		       && !MAY_HAVE_DEBUG_MARKER_INSNS)
5804 		goto delink_debug_stmt;
5805 	      else if (gimple_debug_begin_stmt_p (stmt))
5806 		val = GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
5807 	      else if (gimple_debug_inline_entry_p (stmt))
5808 		{
5809 		  tree block = gimple_block (stmt);
5810 
5811 		  if (block)
5812 		    val = GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
5813 		  else
5814 		    goto delink_debug_stmt;
5815 		}
5816 	      else
5817 		gcc_unreachable ();
5818 
5819 	      last = get_last_insn ();
5820 
5821 	      set_curr_insn_location (gimple_location (stmt));
5822 
5823 	      emit_debug_insn (val);
5824 
5825 	      if (dump_file && (dump_flags & TDF_DETAILS))
5826 		{
5827 		  /* We can't dump the insn with a TREE where an RTX
5828 		     is expected.  */
5829 		  if (GET_CODE (val) == VAR_LOCATION)
5830 		    {
5831 		      gcc_checking_assert (PAT_VAR_LOCATION_LOC (val) == (rtx)value);
5832 		      PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5833 		    }
5834 		  maybe_dump_rtl_for_gimple_stmt (stmt, last);
5835 		  if (GET_CODE (val) == VAR_LOCATION)
5836 		    PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5837 		}
5838 
5839 	    delink_debug_stmt:
5840 	      /* In order not to generate too many debug temporaries,
5841 	         we delink all uses of debug statements we already expanded.
5842 		 Therefore debug statements between definition and real
5843 		 use of TERed SSA names will continue to use the SSA name,
5844 		 and not be replaced with debug temps.  */
5845 	      delink_stmt_imm_use (stmt);
5846 
5847 	      gsi = nsi;
5848 	      gsi_next (&nsi);
5849 	      if (gsi_end_p (nsi))
5850 		break;
5851 	      stmt = gsi_stmt (nsi);
5852 	      if (!is_gimple_debug (stmt))
5853 		break;
5854 	    }
5855 
5856 	  set_curr_insn_location (sloc);
5857 	}
5858       else
5859 	{
5860 	  gcall *call_stmt = dyn_cast <gcall *> (stmt);
5861 	  if (call_stmt
5862 	      && gimple_call_tail_p (call_stmt)
5863 	      && disable_tail_calls)
5864 	    gimple_call_set_tail (call_stmt, false);
5865 
5866 	  if (call_stmt && gimple_call_tail_p (call_stmt))
5867 	    {
5868 	      bool can_fallthru;
5869 	      new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5870 	      if (new_bb)
5871 		{
5872 		  if (can_fallthru)
5873 		    bb = new_bb;
5874 		  else
5875 		    return new_bb;
5876 		}
5877 	    }
5878 	  else
5879 	    {
5880 	      def_operand_p def_p;
5881 	      def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5882 
5883 	      if (def_p != NULL)
5884 		{
5885 		  /* Ignore this stmt if it is in the list of
5886 		     replaceable expressions.  */
5887 		  if (SA.values
5888 		      && bitmap_bit_p (SA.values,
5889 				       SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5890 		    continue;
5891 		}
5892 	      last = expand_gimple_stmt (stmt);
5893 	      maybe_dump_rtl_for_gimple_stmt (stmt, last);
5894 	    }
5895 	}
5896     }
5897 
5898   currently_expanding_gimple_stmt = NULL;
5899 
5900   /* Expand implicit goto and convert goto_locus.  */
5901   FOR_EACH_EDGE (e, ei, bb->succs)
5902     {
5903       if (e->goto_locus != UNKNOWN_LOCATION)
5904 	set_curr_insn_location (e->goto_locus);
5905       if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5906 	{
5907 	  emit_jump (label_rtx_for_bb (e->dest));
5908 	  e->flags &= ~EDGE_FALLTHRU;
5909 	}
5910     }
5911 
5912   /* Expanded RTL can create a jump in the last instruction of block.
5913      This later might be assumed to be a jump to successor and break edge insertion.
5914      We need to insert dummy move to prevent this. PR41440. */
5915   if (single_succ_p (bb)
5916       && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5917       && (last = get_last_insn ())
5918       && (JUMP_P (last)
5919 	  || (DEBUG_INSN_P (last)
5920 	      && JUMP_P (prev_nondebug_insn (last)))))
5921     {
5922       rtx dummy = gen_reg_rtx (SImode);
5923       emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5924     }
5925 
5926   do_pending_stack_adjust ();
5927 
5928   /* Find the block tail.  The last insn in the block is the insn
5929      before a barrier and/or table jump insn.  */
5930   last = get_last_insn ();
5931   if (BARRIER_P (last))
5932     last = PREV_INSN (last);
5933   if (JUMP_TABLE_DATA_P (last))
5934     last = PREV_INSN (PREV_INSN (last));
5935   if (BARRIER_P (last))
5936     last = PREV_INSN (last);
5937   BB_END (bb) = last;
5938 
5939   update_bb_for_insn (bb);
5940 
5941   return bb;
5942 }
5943 
5944 
5945 /* Create a basic block for initialization code.  */
5946 
5947 static basic_block
5948 construct_init_block (void)
5949 {
5950   basic_block init_block, first_block;
5951   edge e = NULL;
5952   int flags;
5953 
5954   /* Multiple entry points not supported yet.  */
5955   gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5956   init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5957   init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5958   ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5959   EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5960 
5961   e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5962 
5963   /* When entry edge points to first basic block, we don't need jump,
5964      otherwise we have to jump into proper target.  */
5965   if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5966     {
5967       tree label = gimple_block_label (e->dest);
5968 
5969       emit_jump (jump_target_rtx (label));
5970       flags = 0;
5971     }
5972   else
5973     flags = EDGE_FALLTHRU;
5974 
5975   init_block = create_basic_block (NEXT_INSN (get_insns ()),
5976 				   get_last_insn (),
5977 				   ENTRY_BLOCK_PTR_FOR_FN (cfun));
5978   init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5979   add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5980   if (e)
5981     {
5982       first_block = e->dest;
5983       redirect_edge_succ (e, init_block);
5984       e = make_single_succ_edge (init_block, first_block, flags);
5985     }
5986   else
5987     e = make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5988 			       EDGE_FALLTHRU);
5989 
5990   update_bb_for_insn (init_block);
5991   return init_block;
5992 }
5993 
5994 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5995    found in the block tree.  */
5996 
5997 static void
5998 set_block_levels (tree block, int level)
5999 {
6000   while (block)
6001     {
6002       BLOCK_NUMBER (block) = level;
6003       set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
6004       block = BLOCK_CHAIN (block);
6005     }
6006 }
6007 
6008 /* Create a block containing landing pads and similar stuff.  */
6009 
6010 static void
6011 construct_exit_block (void)
6012 {
6013   rtx_insn *head = get_last_insn ();
6014   rtx_insn *end;
6015   basic_block exit_block;
6016   edge e, e2;
6017   unsigned ix;
6018   edge_iterator ei;
6019   basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
6020   rtx_insn *orig_end = BB_END (prev_bb);
6021 
6022   rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
6023 
6024   /* Make sure the locus is set to the end of the function, so that
6025      epilogue line numbers and warnings are set properly.  */
6026   if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
6027     input_location = cfun->function_end_locus;
6028 
6029   /* Generate rtl for function exit.  */
6030   expand_function_end ();
6031 
6032   end = get_last_insn ();
6033   if (head == end)
6034     return;
6035   /* While emitting the function end we could move end of the last basic
6036      block.  */
6037   BB_END (prev_bb) = orig_end;
6038   while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
6039     head = NEXT_INSN (head);
6040   /* But make sure exit_block starts with RETURN_LABEL, otherwise the
6041      bb count counting will be confused.  Any instructions before that
6042      label are emitted for the case where PREV_BB falls through into the
6043      exit block, so append those instructions to prev_bb in that case.  */
6044   if (NEXT_INSN (head) != return_label)
6045     {
6046       while (NEXT_INSN (head) != return_label)
6047 	{
6048 	  if (!NOTE_P (NEXT_INSN (head)))
6049 	    BB_END (prev_bb) = NEXT_INSN (head);
6050 	  head = NEXT_INSN (head);
6051 	}
6052     }
6053   exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
6054   exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
6055   add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
6056 
6057   ix = 0;
6058   while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
6059     {
6060       e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
6061       if (!(e->flags & EDGE_ABNORMAL))
6062 	redirect_edge_succ (e, exit_block);
6063       else
6064 	ix++;
6065     }
6066 
6067   e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
6068 			     EDGE_FALLTHRU);
6069   FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6070     if (e2 != e)
6071       {
6072 	exit_block->count -= e2->count ();
6073       }
6074   update_bb_for_insn (exit_block);
6075 }
6076 
6077 /* Helper function for discover_nonconstant_array_refs.
6078    Look for ARRAY_REF nodes with non-constant indexes and mark them
6079    addressable.  */
6080 
6081 static tree
6082 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
6083 				   void *data ATTRIBUTE_UNUSED)
6084 {
6085   tree t = *tp;
6086 
6087   if (IS_TYPE_OR_DECL_P (t))
6088     *walk_subtrees = 0;
6089   else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6090     {
6091       while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6092 	      && is_gimple_min_invariant (TREE_OPERAND (t, 1))
6093 	      && (!TREE_OPERAND (t, 2)
6094 		  || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6095 	     || (TREE_CODE (t) == COMPONENT_REF
6096 		 && (!TREE_OPERAND (t,2)
6097 		     || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6098 	     || TREE_CODE (t) == BIT_FIELD_REF
6099 	     || TREE_CODE (t) == REALPART_EXPR
6100 	     || TREE_CODE (t) == IMAGPART_EXPR
6101 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
6102 	     || CONVERT_EXPR_P (t))
6103 	t = TREE_OPERAND (t, 0);
6104 
6105       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6106 	{
6107 	  t = get_base_address (t);
6108 	  if (t && DECL_P (t)
6109               && DECL_MODE (t) != BLKmode)
6110 	    TREE_ADDRESSABLE (t) = 1;
6111 	}
6112 
6113       *walk_subtrees = 0;
6114     }
6115 
6116   return NULL_TREE;
6117 }
6118 
6119 /* RTL expansion is not able to compile array references with variable
6120    offsets for arrays stored in single register.  Discover such
6121    expressions and mark variables as addressable to avoid this
6122    scenario.  */
6123 
6124 static void
6125 discover_nonconstant_array_refs (void)
6126 {
6127   basic_block bb;
6128   gimple_stmt_iterator gsi;
6129 
6130   FOR_EACH_BB_FN (bb, cfun)
6131     for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6132       {
6133 	gimple *stmt = gsi_stmt (gsi);
6134 	if (!is_gimple_debug (stmt))
6135 	  walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
6136       }
6137 }
6138 
6139 /* This function sets crtl->args.internal_arg_pointer to a virtual
6140    register if DRAP is needed.  Local register allocator will replace
6141    virtual_incoming_args_rtx with the virtual register.  */
6142 
6143 static void
6144 expand_stack_alignment (void)
6145 {
6146   rtx drap_rtx;
6147   unsigned int preferred_stack_boundary;
6148 
6149   if (! SUPPORTS_STACK_ALIGNMENT)
6150     return;
6151 
6152   if (cfun->calls_alloca
6153       || cfun->has_nonlocal_label
6154       || crtl->has_nonlocal_goto)
6155     crtl->need_drap = true;
6156 
6157   /* Call update_stack_boundary here again to update incoming stack
6158      boundary.  It may set incoming stack alignment to a different
6159      value after RTL expansion.  TARGET_FUNCTION_OK_FOR_SIBCALL may
6160      use the minimum incoming stack alignment to check if it is OK
6161      to perform sibcall optimization since sibcall optimization will
6162      only align the outgoing stack to incoming stack boundary.  */
6163   if (targetm.calls.update_stack_boundary)
6164     targetm.calls.update_stack_boundary ();
6165 
6166   /* The incoming stack frame has to be aligned at least at
6167      parm_stack_boundary.  */
6168   gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
6169 
6170   /* Update crtl->stack_alignment_estimated and use it later to align
6171      stack.  We check PREFERRED_STACK_BOUNDARY if there may be non-call
6172      exceptions since callgraph doesn't collect incoming stack alignment
6173      in this case.  */
6174   if (cfun->can_throw_non_call_exceptions
6175       && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6176     preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6177   else
6178     preferred_stack_boundary = crtl->preferred_stack_boundary;
6179   if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6180     crtl->stack_alignment_estimated = preferred_stack_boundary;
6181   if (preferred_stack_boundary > crtl->stack_alignment_needed)
6182     crtl->stack_alignment_needed = preferred_stack_boundary;
6183 
6184   gcc_assert (crtl->stack_alignment_needed
6185 	      <= crtl->stack_alignment_estimated);
6186 
6187   crtl->stack_realign_needed
6188     = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
6189   crtl->stack_realign_tried = crtl->stack_realign_needed;
6190 
6191   crtl->stack_realign_processed = true;
6192 
6193   /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6194      alignment.  */
6195   gcc_assert (targetm.calls.get_drap_rtx != NULL);
6196   drap_rtx = targetm.calls.get_drap_rtx ();
6197 
6198   /* stack_realign_drap and drap_rtx must match.  */
6199   gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6200 
6201   /* Do nothing if NULL is returned, which means DRAP is not needed.  */
6202   if (drap_rtx != NULL)
6203     {
6204       crtl->args.internal_arg_pointer = drap_rtx;
6205 
6206       /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6207          needed. */
6208       fixup_tail_calls ();
6209     }
6210 }
6211 
6212 
6213 static void
6214 expand_main_function (void)
6215 {
6216 #if (defined(INVOKE__main)				\
6217      || (!defined(HAS_INIT_SECTION)			\
6218 	 && !defined(INIT_SECTION_ASM_OP)		\
6219 	 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6220   emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode);
6221 #endif
6222 }
6223 
6224 
6225 /* Expand code to initialize the stack_protect_guard.  This is invoked at
6226    the beginning of a function to be protected.  */
6227 
6228 static void
6229 stack_protect_prologue (void)
6230 {
6231   tree guard_decl = targetm.stack_protect_guard ();
6232   rtx x, y;
6233 
6234   crtl->stack_protect_guard_decl = guard_decl;
6235   x = expand_normal (crtl->stack_protect_guard);
6236 
6237   if (targetm.have_stack_protect_combined_set () && guard_decl)
6238     {
6239       gcc_assert (DECL_P (guard_decl));
6240       y = DECL_RTL (guard_decl);
6241 
6242       /* Allow the target to compute address of Y and copy it to X without
6243 	 leaking Y into a register.  This combined address + copy pattern
6244 	 allows the target to prevent spilling of any intermediate results by
6245 	 splitting it after register allocator.  */
6246       if (rtx_insn *insn = targetm.gen_stack_protect_combined_set (x, y))
6247 	{
6248 	  emit_insn (insn);
6249 	  return;
6250 	}
6251     }
6252 
6253   if (guard_decl)
6254     y = expand_normal (guard_decl);
6255   else
6256     y = const0_rtx;
6257 
6258   /* Allow the target to copy from Y to X without leaking Y into a
6259      register.  */
6260   if (targetm.have_stack_protect_set ())
6261     if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6262       {
6263 	emit_insn (insn);
6264 	return;
6265       }
6266 
6267   /* Otherwise do a straight move.  */
6268   emit_move_insn (x, y);
6269 }
6270 
6271 /* Translate the intermediate representation contained in the CFG
6272    from GIMPLE trees to RTL.
6273 
6274    We do conversion per basic block and preserve/update the tree CFG.
6275    This implies we have to do some magic as the CFG can simultaneously
6276    consist of basic blocks containing RTL and GIMPLE trees.  This can
6277    confuse the CFG hooks, so be careful to not manipulate CFG during
6278    the expansion.  */
6279 
6280 namespace {
6281 
6282 const pass_data pass_data_expand =
6283 {
6284   RTL_PASS, /* type */
6285   "expand", /* name */
6286   OPTGROUP_NONE, /* optinfo_flags */
6287   TV_EXPAND, /* tv_id */
6288   ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6289     | PROP_gimple_lcx
6290     | PROP_gimple_lvec
6291     | PROP_gimple_lva), /* properties_required */
6292   PROP_rtl, /* properties_provided */
6293   ( PROP_ssa | PROP_trees ), /* properties_destroyed */
6294   0, /* todo_flags_start */
6295   0, /* todo_flags_finish */
6296 };
6297 
6298 class pass_expand : public rtl_opt_pass
6299 {
6300 public:
6301   pass_expand (gcc::context *ctxt)
6302     : rtl_opt_pass (pass_data_expand, ctxt)
6303   {}
6304 
6305   /* opt_pass methods: */
6306   virtual unsigned int execute (function *);
6307 
6308 }; // class pass_expand
6309 
6310 unsigned int
6311 pass_expand::execute (function *fun)
6312 {
6313   basic_block bb, init_block;
6314   edge_iterator ei;
6315   edge e;
6316   rtx_insn *var_seq, *var_ret_seq;
6317   unsigned i;
6318 
6319   timevar_push (TV_OUT_OF_SSA);
6320   rewrite_out_of_ssa (&SA);
6321   timevar_pop (TV_OUT_OF_SSA);
6322   SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
6323 
6324   if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
6325     {
6326       gimple_stmt_iterator gsi;
6327       FOR_EACH_BB_FN (bb, cfun)
6328 	for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6329 	  if (gimple_debug_bind_p (gsi_stmt (gsi)))
6330 	    avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6331     }
6332 
6333   /* Make sure all values used by the optimization passes have sane
6334      defaults.  */
6335   reg_renumber = 0;
6336 
6337   /* Some backends want to know that we are expanding to RTL.  */
6338   currently_expanding_to_rtl = 1;
6339   /* Dominators are not kept up-to-date as we may create new basic-blocks.  */
6340   free_dominance_info (CDI_DOMINATORS);
6341 
6342   rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
6343 
6344   insn_locations_init ();
6345   if (!DECL_IS_BUILTIN (current_function_decl))
6346     {
6347       /* Eventually, all FEs should explicitly set function_start_locus.  */
6348       if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6349 	set_curr_insn_location
6350 	  (DECL_SOURCE_LOCATION (current_function_decl));
6351       else
6352 	set_curr_insn_location (fun->function_start_locus);
6353     }
6354   else
6355     set_curr_insn_location (UNKNOWN_LOCATION);
6356   prologue_location = curr_insn_location ();
6357 
6358 #ifdef INSN_SCHEDULING
6359   init_sched_attrs ();
6360 #endif
6361 
6362   /* Make sure first insn is a note even if we don't want linenums.
6363      This makes sure the first insn will never be deleted.
6364      Also, final expects a note to appear there.  */
6365   emit_note (NOTE_INSN_DELETED);
6366 
6367   /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE.  */
6368   discover_nonconstant_array_refs ();
6369 
6370   targetm.expand_to_rtl_hook ();
6371   crtl->init_stack_alignment ();
6372   fun->cfg->max_jumptable_ents = 0;
6373 
6374   /* Resovle the function section.  Some targets, like ARM EABI rely on knowledge
6375      of the function section at exapnsion time to predict distance of calls.  */
6376   resolve_unique_section (current_function_decl, 0, flag_function_sections);
6377 
6378   /* Expand the variables recorded during gimple lowering.  */
6379   timevar_push (TV_VAR_EXPAND);
6380   start_sequence ();
6381 
6382   var_ret_seq = expand_used_vars ();
6383 
6384   var_seq = get_insns ();
6385   end_sequence ();
6386   timevar_pop (TV_VAR_EXPAND);
6387 
6388   /* Honor stack protection warnings.  */
6389   if (warn_stack_protect)
6390     {
6391       if (fun->calls_alloca)
6392 	warning (OPT_Wstack_protector,
6393 		 "stack protector not protecting local variables: "
6394 		 "variable length buffer");
6395       if (has_short_buffer && !crtl->stack_protect_guard)
6396 	warning (OPT_Wstack_protector,
6397 		 "stack protector not protecting function: "
6398 		 "all local arrays are less than %d bytes long",
6399 		 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6400     }
6401 
6402   /* Set up parameters and prepare for return, for the function.  */
6403   expand_function_start (current_function_decl);
6404 
6405   /* If we emitted any instructions for setting up the variables,
6406      emit them before the FUNCTION_START note.  */
6407   if (var_seq)
6408     {
6409       emit_insn_before (var_seq, parm_birth_insn);
6410 
6411       /* In expand_function_end we'll insert the alloca save/restore
6412 	 before parm_birth_insn.  We've just insertted an alloca call.
6413 	 Adjust the pointer to match.  */
6414       parm_birth_insn = var_seq;
6415     }
6416 
6417   /* Now propagate the RTL assignment of each partition to the
6418      underlying var of each SSA_NAME.  */
6419   tree name;
6420 
6421   FOR_EACH_SSA_NAME (i, name, cfun)
6422     {
6423       /* We might have generated new SSA names in
6424 	 update_alias_info_with_stack_vars.  They will have a NULL
6425 	 defining statements, and won't be part of the partitioning,
6426 	 so ignore those.  */
6427       if (!SSA_NAME_DEF_STMT (name))
6428 	continue;
6429 
6430       adjust_one_expanded_partition_var (name);
6431     }
6432 
6433   /* Clean up RTL of variables that straddle across multiple
6434      partitions, and check that the rtl of any PARM_DECLs that are not
6435      cleaned up is that of their default defs.  */
6436   FOR_EACH_SSA_NAME (i, name, cfun)
6437     {
6438       int part;
6439 
6440       /* We might have generated new SSA names in
6441 	 update_alias_info_with_stack_vars.  They will have a NULL
6442 	 defining statements, and won't be part of the partitioning,
6443 	 so ignore those.  */
6444       if (!SSA_NAME_DEF_STMT (name))
6445 	continue;
6446       part = var_to_partition (SA.map, name);
6447       if (part == NO_PARTITION)
6448 	continue;
6449 
6450       /* If this decl was marked as living in multiple places, reset
6451 	 this now to NULL.  */
6452       tree var = SSA_NAME_VAR (name);
6453       if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6454 	SET_DECL_RTL (var, NULL);
6455       /* Check that the pseudos chosen by assign_parms are those of
6456 	 the corresponding default defs.  */
6457       else if (SSA_NAME_IS_DEFAULT_DEF (name)
6458 	       && (TREE_CODE (var) == PARM_DECL
6459 		   || TREE_CODE (var) == RESULT_DECL))
6460 	{
6461 	  rtx in = DECL_RTL_IF_SET (var);
6462 	  gcc_assert (in);
6463 	  rtx out = SA.partition_to_pseudo[part];
6464 	  gcc_assert (in == out);
6465 
6466 	  /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6467 	     those expected by debug backends for each parm and for
6468 	     the result.  This is particularly important for stabs,
6469 	     whose register elimination from parm's DECL_RTL may cause
6470 	     -fcompare-debug differences as SET_DECL_RTL changes reg's
6471 	     attrs.  So, make sure the RTL already has the parm as the
6472 	     EXPR, so that it won't change.  */
6473 	  SET_DECL_RTL (var, NULL_RTX);
6474 	  if (MEM_P (in))
6475 	    set_mem_attributes (in, var, true);
6476 	  SET_DECL_RTL (var, in);
6477 	}
6478     }
6479 
6480   /* If this function is `main', emit a call to `__main'
6481      to run global initializers, etc.  */
6482   if (DECL_NAME (current_function_decl)
6483       && MAIN_NAME_P (DECL_NAME (current_function_decl))
6484       && DECL_FILE_SCOPE_P (current_function_decl))
6485     expand_main_function ();
6486 
6487   /* Initialize the stack_protect_guard field.  This must happen after the
6488      call to __main (if any) so that the external decl is initialized.  */
6489   if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
6490     stack_protect_prologue ();
6491 
6492   expand_phi_nodes (&SA);
6493 
6494   /* Release any stale SSA redirection data.  */
6495   redirect_edge_var_map_empty ();
6496 
6497   /* Register rtl specific functions for cfg.  */
6498   rtl_register_cfg_hooks ();
6499 
6500   init_block = construct_init_block ();
6501 
6502   /* Clear EDGE_EXECUTABLE on the entry edge(s).  It is cleaned from the
6503      remaining edges later.  */
6504   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6505     e->flags &= ~EDGE_EXECUTABLE;
6506 
6507   /* If the function has too many markers, drop them while expanding.  */
6508   if (cfun->debug_marker_count
6509       >= PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
6510     cfun->debug_nonbind_markers = false;
6511 
6512   lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6513   FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6514 		  next_bb)
6515     bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6516 
6517   if (MAY_HAVE_DEBUG_BIND_INSNS)
6518     expand_debug_locations ();
6519 
6520   if (deep_ter_debug_map)
6521     {
6522       delete deep_ter_debug_map;
6523       deep_ter_debug_map = NULL;
6524     }
6525 
6526   /* Free stuff we no longer need after GIMPLE optimizations.  */
6527   free_dominance_info (CDI_DOMINATORS);
6528   free_dominance_info (CDI_POST_DOMINATORS);
6529   delete_tree_cfg_annotations (fun);
6530 
6531   timevar_push (TV_OUT_OF_SSA);
6532   finish_out_of_ssa (&SA);
6533   timevar_pop (TV_OUT_OF_SSA);
6534 
6535   timevar_push (TV_POST_EXPAND);
6536   /* We are no longer in SSA form.  */
6537   fun->gimple_df->in_ssa_p = false;
6538   loops_state_clear (LOOP_CLOSED_SSA);
6539 
6540   /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6541      conservatively to true until they are all profile aware.  */
6542   delete lab_rtx_for_bb;
6543   free_histograms (fun);
6544 
6545   construct_exit_block ();
6546   insn_locations_finalize ();
6547 
6548   if (var_ret_seq)
6549     {
6550       rtx_insn *after = return_label;
6551       rtx_insn *next = NEXT_INSN (after);
6552       if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6553 	after = next;
6554       emit_insn_after (var_ret_seq, after);
6555     }
6556 
6557   /* Zap the tree EH table.  */
6558   set_eh_throw_stmt_table (fun, NULL);
6559 
6560   /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6561      split edges which edge insertions might do.  */
6562   rebuild_jump_labels (get_insns ());
6563 
6564   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6565 		  EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6566     {
6567       edge e;
6568       edge_iterator ei;
6569       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6570 	{
6571 	  if (e->insns.r)
6572 	    {
6573 	      rebuild_jump_labels_chain (e->insns.r);
6574 	      /* Put insns after parm birth, but before
6575 		 NOTE_INSNS_FUNCTION_BEG.  */
6576 	      if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6577 		  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6578 		{
6579 		  rtx_insn *insns = e->insns.r;
6580 		  e->insns.r = NULL;
6581 		  if (NOTE_P (parm_birth_insn)
6582 		      && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6583 		    emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6584 		  else
6585 		    emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6586 		}
6587 	      else
6588 		commit_one_edge_insertion (e);
6589 	    }
6590 	  else
6591 	    ei_next (&ei);
6592 	}
6593     }
6594 
6595   /* We're done expanding trees to RTL.  */
6596   currently_expanding_to_rtl = 0;
6597 
6598   flush_mark_addressable_queue ();
6599 
6600   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6601 		  EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6602     {
6603       edge e;
6604       edge_iterator ei;
6605       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6606 	{
6607 	  /* Clear EDGE_EXECUTABLE.  This flag is never used in the backend.  */
6608 	  e->flags &= ~EDGE_EXECUTABLE;
6609 
6610 	  /* At the moment not all abnormal edges match the RTL
6611 	     representation.  It is safe to remove them here as
6612 	     find_many_sub_basic_blocks will rediscover them.
6613 	     In the future we should get this fixed properly.  */
6614 	  if ((e->flags & EDGE_ABNORMAL)
6615 	      && !(e->flags & EDGE_SIBCALL))
6616 	    remove_edge (e);
6617 	  else
6618 	    ei_next (&ei);
6619 	}
6620     }
6621 
6622   auto_sbitmap blocks (last_basic_block_for_fn (fun));
6623   bitmap_ones (blocks);
6624   find_many_sub_basic_blocks (blocks);
6625   purge_all_dead_edges ();
6626 
6627   /* After initial rtl generation, call back to finish generating
6628      exception support code.  We need to do this before cleaning up
6629      the CFG as the code does not expect dead landing pads.  */
6630   if (fun->eh->region_tree != NULL)
6631     finish_eh_generation ();
6632 
6633   /* Call expand_stack_alignment after finishing all
6634      updates to crtl->preferred_stack_boundary.  */
6635   expand_stack_alignment ();
6636 
6637   /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6638      function.  */
6639   if (crtl->tail_call_emit)
6640     fixup_tail_calls ();
6641 
6642   /* BB subdivision may have created basic blocks that are are only reachable
6643      from unlikely bbs but not marked as such in the profile.  */
6644   if (optimize)
6645     propagate_unlikely_bbs_forward ();
6646 
6647   /* Remove unreachable blocks, otherwise we cannot compute dominators
6648      which are needed for loop state verification.  As a side-effect
6649      this also compacts blocks.
6650      ???  We cannot remove trivially dead insns here as for example
6651      the DRAP reg on i?86 is not magically live at this point.
6652      gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise.  */
6653   cleanup_cfg (CLEANUP_NO_INSN_DEL);
6654 
6655   checking_verify_flow_info ();
6656 
6657   /* Initialize pseudos allocated for hard registers.  */
6658   emit_initial_value_sets ();
6659 
6660   /* And finally unshare all RTL.  */
6661   unshare_all_rtl ();
6662 
6663   /* There's no need to defer outputting this function any more; we
6664      know we want to output it.  */
6665   DECL_DEFER_OUTPUT (current_function_decl) = 0;
6666 
6667   /* Now that we're done expanding trees to RTL, we shouldn't have any
6668      more CONCATs anywhere.  */
6669   generating_concat_p = 0;
6670 
6671   if (dump_file)
6672     {
6673       fprintf (dump_file,
6674 	       "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6675       /* And the pass manager will dump RTL for us.  */
6676     }
6677 
6678   /* If we're emitting a nested function, make sure its parent gets
6679      emitted as well.  Doing otherwise confuses debug info.  */
6680     {
6681       tree parent;
6682       for (parent = DECL_CONTEXT (current_function_decl);
6683 	   parent != NULL_TREE;
6684 	   parent = get_containing_scope (parent))
6685 	if (TREE_CODE (parent) == FUNCTION_DECL)
6686 	  TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6687     }
6688 
6689   TREE_ASM_WRITTEN (current_function_decl) = 1;
6690 
6691   /* After expanding, the return labels are no longer needed. */
6692   return_label = NULL;
6693   naked_return_label = NULL;
6694 
6695   /* After expanding, the tm_restart map is no longer needed.  */
6696   if (fun->gimple_df->tm_restart)
6697     fun->gimple_df->tm_restart = NULL;
6698 
6699   /* Tag the blocks with a depth number so that change_scope can find
6700      the common parent easily.  */
6701   set_block_levels (DECL_INITIAL (fun->decl), 0);
6702   default_rtl_profile ();
6703 
6704   /* For -dx discard loops now, otherwise IL verify in clean_state will
6705      ICE.  */
6706   if (rtl_dump_and_exit)
6707     {
6708       cfun->curr_properties &= ~PROP_loops;
6709       loop_optimizer_finalize ();
6710     }
6711 
6712   timevar_pop (TV_POST_EXPAND);
6713 
6714   return 0;
6715 }
6716 
6717 } // anon namespace
6718 
6719 rtl_opt_pass *
6720 make_pass_expand (gcc::context *ctxt)
6721 {
6722   return new pass_expand (ctxt);
6723 }
6724