xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cfgexpand.c (revision 8feb0f0b7eaff0608f8350bbfa3098827b4bb91b)
1 /* A pass for lowering trees to RTL.
2    Copyright (C) 2004-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "optabs.h"
34 #include "regs.h" /* For reg_renumber.  */
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "stor-layout.h"
42 #include "stmt.h"
43 #include "print-tree.h"
44 #include "cfgrtl.h"
45 #include "cfganal.h"
46 #include "cfgbuild.h"
47 #include "cfgcleanup.h"
48 #include "dojump.h"
49 #include "explow.h"
50 #include "calls.h"
51 #include "expr.h"
52 #include "internal-fn.h"
53 #include "tree-eh.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
57 #include "tree-cfg.h"
58 #include "tree-dfa.h"
59 #include "tree-ssa.h"
60 #include "except.h"
61 #include "gimple-pretty-print.h"
62 #include "toplev.h"
63 #include "debug.h"
64 #include "tree-inline.h"
65 #include "value-prof.h"
66 #include "tree-ssa-live.h"
67 #include "tree-outof-ssa.h"
68 #include "cfgloop.h"
69 #include "insn-attr.h" /* For INSN_SCHEDULING.  */
70 #include "stringpool.h"
71 #include "attribs.h"
72 #include "asan.h"
73 #include "tree-ssa-address.h"
74 #include "output.h"
75 #include "builtins.h"
76 
77 /* Some systems use __main in a way incompatible with its use in gcc, in these
78    cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79    give the same symbol without quotes for an alternative entry point.  You
80    must define both, or neither.  */
81 #ifndef NAME__MAIN
82 #define NAME__MAIN "__main"
83 #endif
84 
85 /* This variable holds information helping the rewriting of SSA trees
86    into RTL.  */
87 struct ssaexpand SA;
88 
89 /* This variable holds the currently expanded gimple statement for purposes
90    of comminucating the profile info to the builtin expanders.  */
91 gimple *currently_expanding_gimple_stmt;
92 
93 static rtx expand_debug_expr (tree);
94 
95 static bool defer_stack_allocation (tree, bool);
96 
97 static void record_alignment_for_reg_var (unsigned int);
98 
99 /* Return an expression tree corresponding to the RHS of GIMPLE
100    statement STMT.  */
101 
102 tree
gimple_assign_rhs_to_tree(gimple * stmt)103 gimple_assign_rhs_to_tree (gimple *stmt)
104 {
105   tree t;
106   switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
107     {
108     case GIMPLE_TERNARY_RHS:
109       t = build3 (gimple_assign_rhs_code (stmt),
110 		  TREE_TYPE (gimple_assign_lhs (stmt)),
111 		  gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt),
112 		  gimple_assign_rhs3 (stmt));
113       break;
114     case GIMPLE_BINARY_RHS:
115       t = build2 (gimple_assign_rhs_code (stmt),
116 		  TREE_TYPE (gimple_assign_lhs (stmt)),
117 		  gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt));
118       break;
119     case GIMPLE_UNARY_RHS:
120       t = build1 (gimple_assign_rhs_code (stmt),
121 		  TREE_TYPE (gimple_assign_lhs (stmt)),
122 		  gimple_assign_rhs1 (stmt));
123       break;
124     case GIMPLE_SINGLE_RHS:
125       {
126 	t = gimple_assign_rhs1 (stmt);
127 	/* Avoid modifying this tree in place below.  */
128 	if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
129 	     && gimple_location (stmt) != EXPR_LOCATION (t))
130 	    || (gimple_block (stmt) && currently_expanding_to_rtl
131 		&& EXPR_P (t)))
132 	  t = copy_node (t);
133 	break;
134       }
135     default:
136       gcc_unreachable ();
137     }
138 
139   if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
140     SET_EXPR_LOCATION (t, gimple_location (stmt));
141 
142   return t;
143 }
144 
145 
146 #ifndef STACK_ALIGNMENT_NEEDED
147 #define STACK_ALIGNMENT_NEEDED 1
148 #endif
149 
150 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
151 
152 /* Choose either CUR or NEXT as the leader DECL for a partition.
153    Prefer ignored decls, to simplify debug dumps and reduce ambiguity
154    out of the same user variable being in multiple partitions (this is
155    less likely for compiler-introduced temps).  */
156 
157 static tree
leader_merge(tree cur,tree next)158 leader_merge (tree cur, tree next)
159 {
160   if (cur == NULL || cur == next)
161     return next;
162 
163   if (DECL_P (cur) && DECL_IGNORED_P (cur))
164     return cur;
165 
166   if (DECL_P (next) && DECL_IGNORED_P (next))
167     return next;
168 
169   return cur;
170 }
171 
172 /* Associate declaration T with storage space X.  If T is no
173    SSA name this is exactly SET_DECL_RTL, otherwise make the
174    partition of T associated with X.  */
175 static inline void
set_rtl(tree t,rtx x)176 set_rtl (tree t, rtx x)
177 {
178   gcc_checking_assert (!x
179 		       || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
180 		       || (use_register_for_decl (t)
181 			   ? (REG_P (x)
182 			      || (GET_CODE (x) == CONCAT
183 				  && (REG_P (XEXP (x, 0))
184 				      || SUBREG_P (XEXP (x, 0)))
185 				  && (REG_P (XEXP (x, 1))
186 				      || SUBREG_P (XEXP (x, 1))))
187 			      /* We need to accept PARALLELs for RESUT_DECLs
188 				 because of vector types with BLKmode returned
189 				 in multiple registers, but they are supposed
190 				 to be uncoalesced.  */
191 			      || (GET_CODE (x) == PARALLEL
192 				  && SSAVAR (t)
193 				  && TREE_CODE (SSAVAR (t)) == RESULT_DECL
194 				  && (GET_MODE (x) == BLKmode
195 				      || !flag_tree_coalesce_vars)))
196 			   : (MEM_P (x) || x == pc_rtx
197 			      || (GET_CODE (x) == CONCAT
198 				  && MEM_P (XEXP (x, 0))
199 				  && MEM_P (XEXP (x, 1))))));
200   /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
201      RESULT_DECLs has the expected mode.  For memory, we accept
202      unpromoted modes, since that's what we're likely to get.  For
203      PARM_DECLs and RESULT_DECLs, we'll have been called by
204      set_parm_rtl, which will give us the default def, so we don't
205      have to compute it ourselves.  For RESULT_DECLs, we accept mode
206      mismatches too, as long as we have BLKmode or are not coalescing
207      across variables, so that we don't reject BLKmode PARALLELs or
208      unpromoted REGs.  */
209   gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
210 		       || (SSAVAR (t)
211 			   && TREE_CODE (SSAVAR (t)) == RESULT_DECL
212 			   && (promote_ssa_mode (t, NULL) == BLKmode
213 			       || !flag_tree_coalesce_vars))
214 		       || !use_register_for_decl (t)
215 		       || GET_MODE (x) == promote_ssa_mode (t, NULL));
216 
217   if (x)
218     {
219       bool skip = false;
220       tree cur = NULL_TREE;
221       rtx xm = x;
222 
223     retry:
224       if (MEM_P (xm))
225 	cur = MEM_EXPR (xm);
226       else if (REG_P (xm))
227 	cur = REG_EXPR (xm);
228       else if (SUBREG_P (xm))
229 	{
230 	  gcc_assert (subreg_lowpart_p (xm));
231 	  xm = SUBREG_REG (xm);
232 	  goto retry;
233 	}
234       else if (GET_CODE (xm) == CONCAT)
235 	{
236 	  xm = XEXP (xm, 0);
237 	  goto retry;
238 	}
239       else if (GET_CODE (xm) == PARALLEL)
240 	{
241 	  xm = XVECEXP (xm, 0, 0);
242 	  gcc_assert (GET_CODE (xm) == EXPR_LIST);
243 	  xm = XEXP (xm, 0);
244 	  goto retry;
245 	}
246       else if (xm == pc_rtx)
247 	skip = true;
248       else
249 	gcc_unreachable ();
250 
251       tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
252 
253       if (cur != next)
254 	{
255 	  if (MEM_P (x))
256 	    set_mem_attributes (x,
257 				next && TREE_CODE (next) == SSA_NAME
258 				? TREE_TYPE (next)
259 				: next, true);
260 	  else
261 	    set_reg_attrs_for_decl_rtl (next, x);
262 	}
263     }
264 
265   if (TREE_CODE (t) == SSA_NAME)
266     {
267       int part = var_to_partition (SA.map, t);
268       if (part != NO_PARTITION)
269 	{
270 	  if (SA.partition_to_pseudo[part])
271 	    gcc_assert (SA.partition_to_pseudo[part] == x);
272 	  else if (x != pc_rtx)
273 	    SA.partition_to_pseudo[part] = x;
274 	}
275       /* For the benefit of debug information at -O0 (where
276          vartracking doesn't run) record the place also in the base
277          DECL.  For PARMs and RESULTs, do so only when setting the
278          default def.  */
279       if (x && x != pc_rtx && SSA_NAME_VAR (t)
280 	  && (VAR_P (SSA_NAME_VAR (t))
281 	      || SSA_NAME_IS_DEFAULT_DEF (t)))
282 	{
283 	  tree var = SSA_NAME_VAR (t);
284 	  /* If we don't yet have something recorded, just record it now.  */
285 	  if (!DECL_RTL_SET_P (var))
286 	    SET_DECL_RTL (var, x);
287 	  /* If we have it set already to "multiple places" don't
288 	     change this.  */
289 	  else if (DECL_RTL (var) == pc_rtx)
290 	    ;
291 	  /* If we have something recorded and it's not the same place
292 	     as we want to record now, we have multiple partitions for the
293 	     same base variable, with different places.  We can't just
294 	     randomly chose one, hence we have to say that we don't know.
295 	     This only happens with optimization, and there var-tracking
296 	     will figure out the right thing.  */
297 	  else if (DECL_RTL (var) != x)
298 	    SET_DECL_RTL (var, pc_rtx);
299 	}
300     }
301   else
302     SET_DECL_RTL (t, x);
303 }
304 
305 /* This structure holds data relevant to one variable that will be
306    placed in a stack slot.  */
307 class stack_var
308 {
309 public:
310   /* The Variable.  */
311   tree decl;
312 
313   /* Initially, the size of the variable.  Later, the size of the partition,
314      if this variable becomes it's partition's representative.  */
315   poly_uint64 size;
316 
317   /* The *byte* alignment required for this variable.  Or as, with the
318      size, the alignment for this partition.  */
319   unsigned int alignb;
320 
321   /* The partition representative.  */
322   size_t representative;
323 
324   /* The next stack variable in the partition, or EOC.  */
325   size_t next;
326 
327   /* The numbers of conflicting stack variables.  */
328   bitmap conflicts;
329 };
330 
331 #define EOC  ((size_t)-1)
332 
333 /* We have an array of such objects while deciding allocation.  */
334 static class stack_var *stack_vars;
335 static size_t stack_vars_alloc;
336 static size_t stack_vars_num;
337 static hash_map<tree, size_t> *decl_to_stack_part;
338 
339 /* Conflict bitmaps go on this obstack.  This allows us to destroy
340    all of them in one big sweep.  */
341 static bitmap_obstack stack_var_bitmap_obstack;
342 
343 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
344    is non-decreasing.  */
345 static size_t *stack_vars_sorted;
346 
347 /* The phase of the stack frame.  This is the known misalignment of
348    virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY.  That is,
349    (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0.  */
350 static int frame_phase;
351 
352 /* Used during expand_used_vars to remember if we saw any decls for
353    which we'd like to enable stack smashing protection.  */
354 static bool has_protected_decls;
355 
356 /* Used during expand_used_vars.  Remember if we say a character buffer
357    smaller than our cutoff threshold.  Used for -Wstack-protector.  */
358 static bool has_short_buffer;
359 
360 /* Compute the byte alignment to use for DECL.  Ignore alignment
361    we can't do with expected alignment of the stack boundary.  */
362 
363 static unsigned int
align_local_variable(tree decl,bool really_expand)364 align_local_variable (tree decl, bool really_expand)
365 {
366   unsigned int align;
367 
368   if (TREE_CODE (decl) == SSA_NAME)
369     align = TYPE_ALIGN (TREE_TYPE (decl));
370   else
371     {
372       align = LOCAL_DECL_ALIGNMENT (decl);
373       /* Don't change DECL_ALIGN when called from estimated_stack_frame_size.
374 	 That is done before IPA and could bump alignment based on host
375 	 backend even for offloaded code which wants different
376 	 LOCAL_DECL_ALIGNMENT.  */
377       if (really_expand)
378 	SET_DECL_ALIGN (decl, align);
379     }
380   return align / BITS_PER_UNIT;
381 }
382 
383 /* Align given offset BASE with ALIGN.  Truncate up if ALIGN_UP is true,
384    down otherwise.  Return truncated BASE value.  */
385 
386 static inline unsigned HOST_WIDE_INT
align_base(HOST_WIDE_INT base,unsigned HOST_WIDE_INT align,bool align_up)387 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
388 {
389   return align_up ? (base + align - 1) & -align : base & -align;
390 }
391 
392 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
393    Return the frame offset.  */
394 
395 static poly_int64
alloc_stack_frame_space(poly_int64 size,unsigned HOST_WIDE_INT align)396 alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
397 {
398   poly_int64 offset, new_frame_offset;
399 
400   if (FRAME_GROWS_DOWNWARD)
401     {
402       new_frame_offset
403 	= aligned_lower_bound (frame_offset - frame_phase - size,
404 			       align) + frame_phase;
405       offset = new_frame_offset;
406     }
407   else
408     {
409       new_frame_offset
410 	= aligned_upper_bound (frame_offset - frame_phase,
411 			       align) + frame_phase;
412       offset = new_frame_offset;
413       new_frame_offset += size;
414     }
415   frame_offset = new_frame_offset;
416 
417   if (frame_offset_overflow (frame_offset, cfun->decl))
418     frame_offset = offset = 0;
419 
420   return offset;
421 }
422 
423 /* Accumulate DECL into STACK_VARS.  */
424 
425 static void
add_stack_var(tree decl,bool really_expand)426 add_stack_var (tree decl, bool really_expand)
427 {
428   class stack_var *v;
429 
430   if (stack_vars_num >= stack_vars_alloc)
431     {
432       if (stack_vars_alloc)
433 	stack_vars_alloc = stack_vars_alloc * 3 / 2;
434       else
435 	stack_vars_alloc = 32;
436       stack_vars
437 	= XRESIZEVEC (class stack_var, stack_vars, stack_vars_alloc);
438     }
439   if (!decl_to_stack_part)
440     decl_to_stack_part = new hash_map<tree, size_t>;
441 
442   v = &stack_vars[stack_vars_num];
443   decl_to_stack_part->put (decl, stack_vars_num);
444 
445   v->decl = decl;
446   tree size = TREE_CODE (decl) == SSA_NAME
447     ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
448     : DECL_SIZE_UNIT (decl);
449   v->size = tree_to_poly_uint64 (size);
450   /* Ensure that all variables have size, so that &a != &b for any two
451      variables that are simultaneously live.  */
452   if (known_eq (v->size, 0U))
453     v->size = 1;
454   v->alignb = align_local_variable (decl, really_expand);
455   /* An alignment of zero can mightily confuse us later.  */
456   gcc_assert (v->alignb != 0);
457 
458   /* All variables are initially in their own partition.  */
459   v->representative = stack_vars_num;
460   v->next = EOC;
461 
462   /* All variables initially conflict with no other.  */
463   v->conflicts = NULL;
464 
465   /* Ensure that this decl doesn't get put onto the list twice.  */
466   set_rtl (decl, pc_rtx);
467 
468   stack_vars_num++;
469 }
470 
471 /* Make the decls associated with luid's X and Y conflict.  */
472 
473 static void
add_stack_var_conflict(size_t x,size_t y)474 add_stack_var_conflict (size_t x, size_t y)
475 {
476   class stack_var *a = &stack_vars[x];
477   class stack_var *b = &stack_vars[y];
478   if (x == y)
479     return;
480   if (!a->conflicts)
481     a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
482   if (!b->conflicts)
483     b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
484   bitmap_set_bit (a->conflicts, y);
485   bitmap_set_bit (b->conflicts, x);
486 }
487 
488 /* Check whether the decls associated with luid's X and Y conflict.  */
489 
490 static bool
stack_var_conflict_p(size_t x,size_t y)491 stack_var_conflict_p (size_t x, size_t y)
492 {
493   class stack_var *a = &stack_vars[x];
494   class stack_var *b = &stack_vars[y];
495   if (x == y)
496     return false;
497   /* Partitions containing an SSA name result from gimple registers
498      with things like unsupported modes.  They are top-level and
499      hence conflict with everything else.  */
500   if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
501     return true;
502 
503   if (!a->conflicts || !b->conflicts)
504     return false;
505   return bitmap_bit_p (a->conflicts, y);
506 }
507 
508 /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
509    enter its partition number into bitmap DATA.  */
510 
511 static bool
visit_op(gimple *,tree op,tree,void * data)512 visit_op (gimple *, tree op, tree, void *data)
513 {
514   bitmap active = (bitmap)data;
515   op = get_base_address (op);
516   if (op
517       && DECL_P (op)
518       && DECL_RTL_IF_SET (op) == pc_rtx)
519     {
520       size_t *v = decl_to_stack_part->get (op);
521       if (v)
522 	bitmap_set_bit (active, *v);
523     }
524   return false;
525 }
526 
527 /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
528    record conflicts between it and all currently active other partitions
529    from bitmap DATA.  */
530 
531 static bool
visit_conflict(gimple *,tree op,tree,void * data)532 visit_conflict (gimple *, tree op, tree, void *data)
533 {
534   bitmap active = (bitmap)data;
535   op = get_base_address (op);
536   if (op
537       && DECL_P (op)
538       && DECL_RTL_IF_SET (op) == pc_rtx)
539     {
540       size_t *v = decl_to_stack_part->get (op);
541       if (v && bitmap_set_bit (active, *v))
542 	{
543 	  size_t num = *v;
544 	  bitmap_iterator bi;
545 	  unsigned i;
546 	  gcc_assert (num < stack_vars_num);
547 	  EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
548 	    add_stack_var_conflict (num, i);
549 	}
550     }
551   return false;
552 }
553 
554 /* Helper routine for add_scope_conflicts, calculating the active partitions
555    at the end of BB, leaving the result in WORK.  We're called to generate
556    conflicts when FOR_CONFLICT is true, otherwise we're just tracking
557    liveness.  */
558 
559 static void
add_scope_conflicts_1(basic_block bb,bitmap work,bool for_conflict)560 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
561 {
562   edge e;
563   edge_iterator ei;
564   gimple_stmt_iterator gsi;
565   walk_stmt_load_store_addr_fn visit;
566 
567   bitmap_clear (work);
568   FOR_EACH_EDGE (e, ei, bb->preds)
569     bitmap_ior_into (work, (bitmap)e->src->aux);
570 
571   visit = visit_op;
572 
573   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
574     {
575       gimple *stmt = gsi_stmt (gsi);
576       walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
577     }
578   for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
579     {
580       gimple *stmt = gsi_stmt (gsi);
581 
582       if (gimple_clobber_p (stmt))
583 	{
584 	  tree lhs = gimple_assign_lhs (stmt);
585 	  size_t *v;
586 	  /* Nested function lowering might introduce LHSs
587 	     that are COMPONENT_REFs.  */
588 	  if (!VAR_P (lhs))
589 	    continue;
590 	  if (DECL_RTL_IF_SET (lhs) == pc_rtx
591 	      && (v = decl_to_stack_part->get (lhs)))
592 	    bitmap_clear_bit (work, *v);
593 	}
594       else if (!is_gimple_debug (stmt))
595 	{
596 	  if (for_conflict
597 	      && visit == visit_op)
598 	    {
599 	      /* If this is the first real instruction in this BB we need
600 	         to add conflicts for everything live at this point now.
601 		 Unlike classical liveness for named objects we can't
602 		 rely on seeing a def/use of the names we're interested in.
603 		 There might merely be indirect loads/stores.  We'd not add any
604 		 conflicts for such partitions.  */
605 	      bitmap_iterator bi;
606 	      unsigned i;
607 	      EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
608 		{
609 		  class stack_var *a = &stack_vars[i];
610 		  if (!a->conflicts)
611 		    a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
612 		  bitmap_ior_into (a->conflicts, work);
613 		}
614 	      visit = visit_conflict;
615 	    }
616 	  walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
617 	}
618     }
619 }
620 
621 /* Generate stack partition conflicts between all partitions that are
622    simultaneously live.  */
623 
624 static void
add_scope_conflicts(void)625 add_scope_conflicts (void)
626 {
627   basic_block bb;
628   bool changed;
629   bitmap work = BITMAP_ALLOC (NULL);
630   int *rpo;
631   int n_bbs;
632 
633   /* We approximate the live range of a stack variable by taking the first
634      mention of its name as starting point(s), and by the end-of-scope
635      death clobber added by gimplify as ending point(s) of the range.
636      This overapproximates in the case we for instance moved an address-taken
637      operation upward, without also moving a dereference to it upwards.
638      But it's conservatively correct as a variable never can hold values
639      before its name is mentioned at least once.
640 
641      We then do a mostly classical bitmap liveness algorithm.  */
642 
643   FOR_ALL_BB_FN (bb, cfun)
644     bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
645 
646   rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
647   n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
648 
649   changed = true;
650   while (changed)
651     {
652       int i;
653       changed = false;
654       for (i = 0; i < n_bbs; i++)
655 	{
656 	  bitmap active;
657 	  bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
658 	  active = (bitmap)bb->aux;
659 	  add_scope_conflicts_1 (bb, work, false);
660 	  if (bitmap_ior_into (active, work))
661 	    changed = true;
662 	}
663     }
664 
665   FOR_EACH_BB_FN (bb, cfun)
666     add_scope_conflicts_1 (bb, work, true);
667 
668   free (rpo);
669   BITMAP_FREE (work);
670   FOR_ALL_BB_FN (bb, cfun)
671     BITMAP_FREE (bb->aux);
672 }
673 
674 /* A subroutine of partition_stack_vars.  A comparison function for qsort,
675    sorting an array of indices by the properties of the object.  */
676 
677 static int
stack_var_cmp(const void * a,const void * b)678 stack_var_cmp (const void *a, const void *b)
679 {
680   size_t ia = *(const size_t *)a;
681   size_t ib = *(const size_t *)b;
682   unsigned int aligna = stack_vars[ia].alignb;
683   unsigned int alignb = stack_vars[ib].alignb;
684   poly_int64 sizea = stack_vars[ia].size;
685   poly_int64 sizeb = stack_vars[ib].size;
686   tree decla = stack_vars[ia].decl;
687   tree declb = stack_vars[ib].decl;
688   bool largea, largeb;
689   unsigned int uida, uidb;
690 
691   /* Primary compare on "large" alignment.  Large comes first.  */
692   largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
693   largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
694   if (largea != largeb)
695     return (int)largeb - (int)largea;
696 
697   /* Secondary compare on size, decreasing  */
698   int diff = compare_sizes_for_sort (sizeb, sizea);
699   if (diff != 0)
700     return diff;
701 
702   /* Tertiary compare on true alignment, decreasing.  */
703   if (aligna < alignb)
704     return -1;
705   if (aligna > alignb)
706     return 1;
707 
708   /* Final compare on ID for sort stability, increasing.
709      Two SSA names are compared by their version, SSA names come before
710      non-SSA names, and two normal decls are compared by their DECL_UID.  */
711   if (TREE_CODE (decla) == SSA_NAME)
712     {
713       if (TREE_CODE (declb) == SSA_NAME)
714 	uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
715       else
716 	return -1;
717     }
718   else if (TREE_CODE (declb) == SSA_NAME)
719     return 1;
720   else
721     uida = DECL_UID (decla), uidb = DECL_UID (declb);
722   if (uida < uidb)
723     return 1;
724   if (uida > uidb)
725     return -1;
726   return 0;
727 }
728 
729 struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
730 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
731 
732 /* If the points-to solution *PI points to variables that are in a partition
733    together with other variables add all partition members to the pointed-to
734    variables bitmap.  */
735 
736 static void
add_partitioned_vars_to_ptset(struct pt_solution * pt,part_hashmap * decls_to_partitions,hash_set<bitmap> * visited,bitmap temp)737 add_partitioned_vars_to_ptset (struct pt_solution *pt,
738 			       part_hashmap *decls_to_partitions,
739 			       hash_set<bitmap> *visited, bitmap temp)
740 {
741   bitmap_iterator bi;
742   unsigned i;
743   bitmap *part;
744 
745   if (pt->anything
746       || pt->vars == NULL
747       /* The pointed-to vars bitmap is shared, it is enough to
748 	 visit it once.  */
749       || visited->add (pt->vars))
750     return;
751 
752   bitmap_clear (temp);
753 
754   /* By using a temporary bitmap to store all members of the partitions
755      we have to add we make sure to visit each of the partitions only
756      once.  */
757   EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
758     if ((!temp
759 	 || !bitmap_bit_p (temp, i))
760 	&& (part = decls_to_partitions->get (i)))
761       bitmap_ior_into (temp, *part);
762   if (!bitmap_empty_p (temp))
763     bitmap_ior_into (pt->vars, temp);
764 }
765 
766 /* Update points-to sets based on partition info, so we can use them on RTL.
767    The bitmaps representing stack partitions will be saved until expand,
768    where partitioned decls used as bases in memory expressions will be
769    rewritten.  */
770 
771 static void
update_alias_info_with_stack_vars(void)772 update_alias_info_with_stack_vars (void)
773 {
774   part_hashmap *decls_to_partitions = NULL;
775   size_t i, j;
776   tree var = NULL_TREE;
777 
778   for (i = 0; i < stack_vars_num; i++)
779     {
780       bitmap part = NULL;
781       tree name;
782       struct ptr_info_def *pi;
783 
784       /* Not interested in partitions with single variable.  */
785       if (stack_vars[i].representative != i
786           || stack_vars[i].next == EOC)
787         continue;
788 
789       if (!decls_to_partitions)
790 	{
791 	  decls_to_partitions = new part_hashmap;
792 	  cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
793 	}
794 
795       /* Create an SSA_NAME that points to the partition for use
796          as base during alias-oracle queries on RTL for bases that
797 	 have been partitioned.  */
798       if (var == NULL_TREE)
799 	var = create_tmp_var (ptr_type_node);
800       name = make_ssa_name (var);
801 
802       /* Create bitmaps representing partitions.  They will be used for
803          points-to sets later, so use GGC alloc.  */
804       part = BITMAP_GGC_ALLOC ();
805       for (j = i; j != EOC; j = stack_vars[j].next)
806 	{
807 	  tree decl = stack_vars[j].decl;
808 	  unsigned int uid = DECL_PT_UID (decl);
809 	  bitmap_set_bit (part, uid);
810 	  decls_to_partitions->put (uid, part);
811 	  cfun->gimple_df->decls_to_pointers->put (decl, name);
812 	  if (TREE_ADDRESSABLE (decl))
813 	    TREE_ADDRESSABLE (name) = 1;
814 	}
815 
816       /* Make the SSA name point to all partition members.  */
817       pi = get_ptr_info (name);
818       pt_solution_set (&pi->pt, part, false);
819     }
820 
821   /* Make all points-to sets that contain one member of a partition
822      contain all members of the partition.  */
823   if (decls_to_partitions)
824     {
825       unsigned i;
826       tree name;
827       hash_set<bitmap> visited;
828       bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
829 
830       FOR_EACH_SSA_NAME (i, name, cfun)
831 	{
832 	  struct ptr_info_def *pi;
833 
834 	  if (POINTER_TYPE_P (TREE_TYPE (name))
835 	      && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
836 	    add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
837 					   &visited, temp);
838 	}
839 
840       add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
841 				     decls_to_partitions, &visited, temp);
842 
843       delete decls_to_partitions;
844       BITMAP_FREE (temp);
845     }
846 }
847 
848 /* A subroutine of partition_stack_vars.  The UNION portion of a UNION/FIND
849    partitioning algorithm.  Partitions A and B are known to be non-conflicting.
850    Merge them into a single partition A.  */
851 
852 static void
union_stack_vars(size_t a,size_t b)853 union_stack_vars (size_t a, size_t b)
854 {
855   class stack_var *vb = &stack_vars[b];
856   bitmap_iterator bi;
857   unsigned u;
858 
859   gcc_assert (stack_vars[b].next == EOC);
860    /* Add B to A's partition.  */
861   stack_vars[b].next = stack_vars[a].next;
862   stack_vars[b].representative = a;
863   stack_vars[a].next = b;
864 
865   /* Make sure A is big enough to hold B.  */
866   stack_vars[a].size = upper_bound (stack_vars[a].size, stack_vars[b].size);
867 
868   /* Update the required alignment of partition A to account for B.  */
869   if (stack_vars[a].alignb < stack_vars[b].alignb)
870     stack_vars[a].alignb = stack_vars[b].alignb;
871 
872   /* Update the interference graph and merge the conflicts.  */
873   if (vb->conflicts)
874     {
875       EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
876 	add_stack_var_conflict (a, stack_vars[u].representative);
877       BITMAP_FREE (vb->conflicts);
878     }
879 }
880 
881 /* A subroutine of expand_used_vars.  Binpack the variables into
882    partitions constrained by the interference graph.  The overall
883    algorithm used is as follows:
884 
885 	Sort the objects by size in descending order.
886 	For each object A {
887 	  S = size(A)
888 	  O = 0
889 	  loop {
890 	    Look for the largest non-conflicting object B with size <= S.
891 	    UNION (A, B)
892 	  }
893 	}
894 */
895 
896 static void
partition_stack_vars(void)897 partition_stack_vars (void)
898 {
899   size_t si, sj, n = stack_vars_num;
900 
901   stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
902   for (si = 0; si < n; ++si)
903     stack_vars_sorted[si] = si;
904 
905   if (n == 1)
906     return;
907 
908   qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
909 
910   for (si = 0; si < n; ++si)
911     {
912       size_t i = stack_vars_sorted[si];
913       unsigned int ialign = stack_vars[i].alignb;
914       poly_int64 isize = stack_vars[i].size;
915 
916       /* Ignore objects that aren't partition representatives. If we
917          see a var that is not a partition representative, it must
918          have been merged earlier.  */
919       if (stack_vars[i].representative != i)
920         continue;
921 
922       for (sj = si + 1; sj < n; ++sj)
923 	{
924 	  size_t j = stack_vars_sorted[sj];
925 	  unsigned int jalign = stack_vars[j].alignb;
926 	  poly_int64 jsize = stack_vars[j].size;
927 
928 	  /* Ignore objects that aren't partition representatives.  */
929 	  if (stack_vars[j].representative != j)
930 	    continue;
931 
932 	  /* Do not mix objects of "small" (supported) alignment
933 	     and "large" (unsupported) alignment.  */
934 	  if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
935 	      != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
936 	    break;
937 
938 	  /* For Address Sanitizer do not mix objects with different
939 	     sizes, as the shorter vars wouldn't be adequately protected.
940 	     Don't do that for "large" (unsupported) alignment objects,
941 	     those aren't protected anyway.  */
942 	  if (asan_sanitize_stack_p ()
943 	      && maybe_ne (isize, jsize)
944 	      && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
945 	    break;
946 
947 	  /* Ignore conflicting objects.  */
948 	  if (stack_var_conflict_p (i, j))
949 	    continue;
950 
951 	  /* UNION the objects, placing J at OFFSET.  */
952 	  union_stack_vars (i, j);
953 	}
954     }
955 
956   update_alias_info_with_stack_vars ();
957 }
958 
959 /* A debugging aid for expand_used_vars.  Dump the generated partitions.  */
960 
961 static void
dump_stack_var_partition(void)962 dump_stack_var_partition (void)
963 {
964   size_t si, i, j, n = stack_vars_num;
965 
966   for (si = 0; si < n; ++si)
967     {
968       i = stack_vars_sorted[si];
969 
970       /* Skip variables that aren't partition representatives, for now.  */
971       if (stack_vars[i].representative != i)
972 	continue;
973 
974       fprintf (dump_file, "Partition %lu: size ", (unsigned long) i);
975       print_dec (stack_vars[i].size, dump_file);
976       fprintf (dump_file, " align %u\n", stack_vars[i].alignb);
977 
978       for (j = i; j != EOC; j = stack_vars[j].next)
979 	{
980 	  fputc ('\t', dump_file);
981 	  print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
982 	}
983       fputc ('\n', dump_file);
984     }
985 }
986 
987 /* Assign rtl to DECL at BASE + OFFSET.  */
988 
989 static void
expand_one_stack_var_at(tree decl,rtx base,unsigned base_align,poly_int64 offset)990 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
991 			 poly_int64 offset)
992 {
993   unsigned align;
994   rtx x;
995 
996   /* If this fails, we've overflowed the stack frame.  Error nicely?  */
997   gcc_assert (known_eq (offset, trunc_int_for_mode (offset, Pmode)));
998 
999   x = plus_constant (Pmode, base, offset);
1000   x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
1001 		   ? TYPE_MODE (TREE_TYPE (decl))
1002 		   : DECL_MODE (SSAVAR (decl)), x);
1003 
1004   if (TREE_CODE (decl) != SSA_NAME)
1005     {
1006       /* Set alignment we actually gave this decl if it isn't an SSA name.
1007          If it is we generate stack slots only accidentally so it isn't as
1008 	 important, we'll simply use the alignment that is already set.  */
1009       if (base == virtual_stack_vars_rtx)
1010 	offset -= frame_phase;
1011       align = known_alignment (offset);
1012       align *= BITS_PER_UNIT;
1013       if (align == 0 || align > base_align)
1014 	align = base_align;
1015 
1016       /* One would think that we could assert that we're not decreasing
1017 	 alignment here, but (at least) the i386 port does exactly this
1018 	 via the MINIMUM_ALIGNMENT hook.  */
1019 
1020       SET_DECL_ALIGN (decl, align);
1021       DECL_USER_ALIGN (decl) = 0;
1022     }
1023 
1024   set_rtl (decl, x);
1025 }
1026 
1027 class stack_vars_data
1028 {
1029 public:
1030   /* Vector of offset pairs, always end of some padding followed
1031      by start of the padding that needs Address Sanitizer protection.
1032      The vector is in reversed, highest offset pairs come first.  */
1033   auto_vec<HOST_WIDE_INT> asan_vec;
1034 
1035   /* Vector of partition representative decls in between the paddings.  */
1036   auto_vec<tree> asan_decl_vec;
1037 
1038   /* Base pseudo register for Address Sanitizer protected automatic vars.  */
1039   rtx asan_base;
1040 
1041   /* Alignment needed for the Address Sanitizer protected automatic vars.  */
1042   unsigned int asan_alignb;
1043 };
1044 
1045 /* A subroutine of expand_used_vars.  Give each partition representative
1046    a unique location within the stack frame.  Update each partition member
1047    with that location.  */
1048 
1049 static void
expand_stack_vars(bool (* pred)(size_t),class stack_vars_data * data)1050 expand_stack_vars (bool (*pred) (size_t), class stack_vars_data *data)
1051 {
1052   size_t si, i, j, n = stack_vars_num;
1053   poly_uint64 large_size = 0, large_alloc = 0;
1054   rtx large_base = NULL;
1055   unsigned large_align = 0;
1056   bool large_allocation_done = false;
1057   tree decl;
1058 
1059   /* Determine if there are any variables requiring "large" alignment.
1060      Since these are dynamically allocated, we only process these if
1061      no predicate involved.  */
1062   large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1063   if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1064     {
1065       /* Find the total size of these variables.  */
1066       for (si = 0; si < n; ++si)
1067 	{
1068 	  unsigned alignb;
1069 
1070 	  i = stack_vars_sorted[si];
1071 	  alignb = stack_vars[i].alignb;
1072 
1073 	  /* All "large" alignment decls come before all "small" alignment
1074 	     decls, but "large" alignment decls are not sorted based on
1075 	     their alignment.  Increase large_align to track the largest
1076 	     required alignment.  */
1077 	  if ((alignb * BITS_PER_UNIT) > large_align)
1078 	    large_align = alignb * BITS_PER_UNIT;
1079 
1080 	  /* Stop when we get to the first decl with "small" alignment.  */
1081 	  if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1082 	    break;
1083 
1084 	  /* Skip variables that aren't partition representatives.  */
1085 	  if (stack_vars[i].representative != i)
1086 	    continue;
1087 
1088 	  /* Skip variables that have already had rtl assigned.  See also
1089 	     add_stack_var where we perpetrate this pc_rtx hack.  */
1090 	  decl = stack_vars[i].decl;
1091 	  if (TREE_CODE (decl) == SSA_NAME
1092 	      ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1093 	      : DECL_RTL (decl) != pc_rtx)
1094 	    continue;
1095 
1096 	  large_size = aligned_upper_bound (large_size, alignb);
1097 	  large_size += stack_vars[i].size;
1098 	}
1099     }
1100 
1101   for (si = 0; si < n; ++si)
1102     {
1103       rtx base;
1104       unsigned base_align, alignb;
1105       poly_int64 offset;
1106 
1107       i = stack_vars_sorted[si];
1108 
1109       /* Skip variables that aren't partition representatives, for now.  */
1110       if (stack_vars[i].representative != i)
1111 	continue;
1112 
1113       /* Skip variables that have already had rtl assigned.  See also
1114 	 add_stack_var where we perpetrate this pc_rtx hack.  */
1115       decl = stack_vars[i].decl;
1116       if (TREE_CODE (decl) == SSA_NAME
1117 	  ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1118 	  : DECL_RTL (decl) != pc_rtx)
1119 	continue;
1120 
1121       /* Check the predicate to see whether this variable should be
1122 	 allocated in this pass.  */
1123       if (pred && !pred (i))
1124 	continue;
1125 
1126       alignb = stack_vars[i].alignb;
1127       if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1128 	{
1129 	  base = virtual_stack_vars_rtx;
1130 	  /* ASAN description strings don't yet have a syntax for expressing
1131 	     polynomial offsets.  */
1132 	  HOST_WIDE_INT prev_offset;
1133 	  if (asan_sanitize_stack_p ()
1134 	      && pred
1135 	      && frame_offset.is_constant (&prev_offset)
1136 	      && stack_vars[i].size.is_constant ())
1137 	    {
1138 	      if (data->asan_vec.is_empty ())
1139 		{
1140 		  alloc_stack_frame_space (0, ASAN_RED_ZONE_SIZE);
1141 		  prev_offset = frame_offset.to_constant ();
1142 		}
1143 	      prev_offset = align_base (prev_offset,
1144 					ASAN_MIN_RED_ZONE_SIZE,
1145 					!FRAME_GROWS_DOWNWARD);
1146 	      tree repr_decl = NULL_TREE;
1147 	      unsigned HOST_WIDE_INT size
1148 		= asan_var_and_redzone_size (stack_vars[i].size.to_constant ());
1149 	      if (data->asan_vec.is_empty ())
1150 		size = MAX (size, ASAN_RED_ZONE_SIZE);
1151 
1152 	      unsigned HOST_WIDE_INT alignment = MAX (alignb,
1153 						      ASAN_MIN_RED_ZONE_SIZE);
1154 	      offset = alloc_stack_frame_space (size, alignment);
1155 
1156 	      data->asan_vec.safe_push (prev_offset);
1157 	      /* Allocating a constant amount of space from a constant
1158 		 starting offset must give a constant result.  */
1159 	      data->asan_vec.safe_push ((offset + stack_vars[i].size)
1160 					.to_constant ());
1161 	      /* Find best representative of the partition.
1162 		 Prefer those with DECL_NAME, even better
1163 		 satisfying asan_protect_stack_decl predicate.  */
1164 	      for (j = i; j != EOC; j = stack_vars[j].next)
1165 		if (asan_protect_stack_decl (stack_vars[j].decl)
1166 		    && DECL_NAME (stack_vars[j].decl))
1167 		  {
1168 		    repr_decl = stack_vars[j].decl;
1169 		    break;
1170 		  }
1171 		else if (repr_decl == NULL_TREE
1172 			 && DECL_P (stack_vars[j].decl)
1173 			 && DECL_NAME (stack_vars[j].decl))
1174 		  repr_decl = stack_vars[j].decl;
1175 	      if (repr_decl == NULL_TREE)
1176 		repr_decl = stack_vars[i].decl;
1177 	      data->asan_decl_vec.safe_push (repr_decl);
1178 
1179 	      /* Make sure a representative is unpoison if another
1180 		 variable in the partition is handled by
1181 		 use-after-scope sanitization.  */
1182 	      if (asan_handled_variables != NULL
1183 		  && !asan_handled_variables->contains (repr_decl))
1184 		{
1185 		  for (j = i; j != EOC; j = stack_vars[j].next)
1186 		    if (asan_handled_variables->contains (stack_vars[j].decl))
1187 		      break;
1188 		  if (j != EOC)
1189 		    asan_handled_variables->add (repr_decl);
1190 		}
1191 
1192 	      data->asan_alignb = MAX (data->asan_alignb, alignb);
1193 	      if (data->asan_base == NULL)
1194 		data->asan_base = gen_reg_rtx (Pmode);
1195 	      base = data->asan_base;
1196 
1197 	      if (!STRICT_ALIGNMENT)
1198 		base_align = crtl->max_used_stack_slot_alignment;
1199 	      else
1200 		base_align = MAX (crtl->max_used_stack_slot_alignment,
1201 				  GET_MODE_ALIGNMENT (SImode)
1202 				  << ASAN_SHADOW_SHIFT);
1203 	    }
1204 	  else
1205 	    {
1206 	      offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1207 	      base_align = crtl->max_used_stack_slot_alignment;
1208 	    }
1209 	}
1210       else
1211 	{
1212 	  /* Large alignment is only processed in the last pass.  */
1213 	  if (pred)
1214 	    continue;
1215 
1216 	  /* If there were any variables requiring "large" alignment, allocate
1217 	     space.  */
1218 	  if (maybe_ne (large_size, 0U) && ! large_allocation_done)
1219 	    {
1220 	      poly_int64 loffset;
1221 	      rtx large_allocsize;
1222 
1223 	      large_allocsize = gen_int_mode (large_size, Pmode);
1224 	      get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1225 	      loffset = alloc_stack_frame_space
1226 		(rtx_to_poly_int64 (large_allocsize),
1227 		 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1228 	      large_base = get_dynamic_stack_base (loffset, large_align);
1229 	      large_allocation_done = true;
1230 	    }
1231 	  gcc_assert (large_base != NULL);
1232 
1233 	  large_alloc = aligned_upper_bound (large_alloc, alignb);
1234 	  offset = large_alloc;
1235 	  large_alloc += stack_vars[i].size;
1236 
1237 	  base = large_base;
1238 	  base_align = large_align;
1239 	}
1240 
1241       /* Create rtl for each variable based on their location within the
1242 	 partition.  */
1243       for (j = i; j != EOC; j = stack_vars[j].next)
1244 	{
1245 	  expand_one_stack_var_at (stack_vars[j].decl,
1246 				   base, base_align,
1247 				   offset);
1248 	}
1249     }
1250 
1251   gcc_assert (known_eq (large_alloc, large_size));
1252 }
1253 
1254 /* Take into account all sizes of partitions and reset DECL_RTLs.  */
1255 static poly_uint64
account_stack_vars(void)1256 account_stack_vars (void)
1257 {
1258   size_t si, j, i, n = stack_vars_num;
1259   poly_uint64 size = 0;
1260 
1261   for (si = 0; si < n; ++si)
1262     {
1263       i = stack_vars_sorted[si];
1264 
1265       /* Skip variables that aren't partition representatives, for now.  */
1266       if (stack_vars[i].representative != i)
1267 	continue;
1268 
1269       size += stack_vars[i].size;
1270       for (j = i; j != EOC; j = stack_vars[j].next)
1271 	set_rtl (stack_vars[j].decl, NULL);
1272     }
1273   return size;
1274 }
1275 
1276 /* Record the RTL assignment X for the default def of PARM.  */
1277 
1278 extern void
set_parm_rtl(tree parm,rtx x)1279 set_parm_rtl (tree parm, rtx x)
1280 {
1281   gcc_assert (TREE_CODE (parm) == PARM_DECL
1282 	      || TREE_CODE (parm) == RESULT_DECL);
1283 
1284   if (x && !MEM_P (x))
1285     {
1286       unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1287 					      TYPE_MODE (TREE_TYPE (parm)),
1288 					      TYPE_ALIGN (TREE_TYPE (parm)));
1289 
1290       /* If the variable alignment is very large we'll dynamicaly
1291 	 allocate it, which means that in-frame portion is just a
1292 	 pointer.  ??? We've got a pseudo for sure here, do we
1293 	 actually dynamically allocate its spilling area if needed?
1294 	 ??? Isn't it a problem when Pmode alignment also exceeds
1295 	 MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32?  */
1296       if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1297 	align = GET_MODE_ALIGNMENT (Pmode);
1298 
1299       record_alignment_for_reg_var (align);
1300     }
1301 
1302   tree ssa = ssa_default_def (cfun, parm);
1303   if (!ssa)
1304     return set_rtl (parm, x);
1305 
1306   int part = var_to_partition (SA.map, ssa);
1307   gcc_assert (part != NO_PARTITION);
1308 
1309   bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1310   gcc_assert (changed);
1311 
1312   set_rtl (ssa, x);
1313   gcc_assert (DECL_RTL (parm) == x);
1314 }
1315 
1316 /* A subroutine of expand_one_var.  Called to immediately assign rtl
1317    to a variable to be allocated in the stack frame.  */
1318 
1319 static void
expand_one_stack_var_1(tree var)1320 expand_one_stack_var_1 (tree var)
1321 {
1322   poly_uint64 size;
1323   poly_int64 offset;
1324   unsigned byte_align;
1325 
1326   if (TREE_CODE (var) == SSA_NAME)
1327     {
1328       tree type = TREE_TYPE (var);
1329       size = tree_to_poly_uint64 (TYPE_SIZE_UNIT (type));
1330       byte_align = TYPE_ALIGN_UNIT (type);
1331     }
1332   else
1333     {
1334       size = tree_to_poly_uint64 (DECL_SIZE_UNIT (var));
1335       byte_align = align_local_variable (var, true);
1336     }
1337 
1338   /* We handle highly aligned variables in expand_stack_vars.  */
1339   gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1340 
1341   offset = alloc_stack_frame_space (size, byte_align);
1342 
1343   expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1344 			   crtl->max_used_stack_slot_alignment, offset);
1345 }
1346 
1347 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1348    already assigned some MEM.  */
1349 
1350 static void
expand_one_stack_var(tree var)1351 expand_one_stack_var (tree var)
1352 {
1353   if (TREE_CODE (var) == SSA_NAME)
1354     {
1355       int part = var_to_partition (SA.map, var);
1356       if (part != NO_PARTITION)
1357 	{
1358 	  rtx x = SA.partition_to_pseudo[part];
1359 	  gcc_assert (x);
1360 	  gcc_assert (MEM_P (x));
1361 	  return;
1362 	}
1363     }
1364 
1365   return expand_one_stack_var_1 (var);
1366 }
1367 
1368 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1369    that will reside in a hard register.  */
1370 
1371 static void
expand_one_hard_reg_var(tree var)1372 expand_one_hard_reg_var (tree var)
1373 {
1374   rest_of_decl_compilation (var, 0, 0);
1375 }
1376 
1377 /* Record the alignment requirements of some variable assigned to a
1378    pseudo.  */
1379 
1380 static void
record_alignment_for_reg_var(unsigned int align)1381 record_alignment_for_reg_var (unsigned int align)
1382 {
1383   if (SUPPORTS_STACK_ALIGNMENT
1384       && crtl->stack_alignment_estimated < align)
1385     {
1386       /* stack_alignment_estimated shouldn't change after stack
1387          realign decision made */
1388       gcc_assert (!crtl->stack_realign_processed);
1389       crtl->stack_alignment_estimated = align;
1390     }
1391 
1392   /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1393      So here we only make sure stack_alignment_needed >= align.  */
1394   if (crtl->stack_alignment_needed < align)
1395     crtl->stack_alignment_needed = align;
1396   if (crtl->max_used_stack_slot_alignment < align)
1397     crtl->max_used_stack_slot_alignment = align;
1398 }
1399 
1400 /* Create RTL for an SSA partition.  */
1401 
1402 static void
expand_one_ssa_partition(tree var)1403 expand_one_ssa_partition (tree var)
1404 {
1405   int part = var_to_partition (SA.map, var);
1406   gcc_assert (part != NO_PARTITION);
1407 
1408   if (SA.partition_to_pseudo[part])
1409     return;
1410 
1411   unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1412 					  TYPE_MODE (TREE_TYPE (var)),
1413 					  TYPE_ALIGN (TREE_TYPE (var)));
1414 
1415   /* If the variable alignment is very large we'll dynamicaly allocate
1416      it, which means that in-frame portion is just a pointer.  */
1417   if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1418     align = GET_MODE_ALIGNMENT (Pmode);
1419 
1420   record_alignment_for_reg_var (align);
1421 
1422   if (!use_register_for_decl (var))
1423     {
1424       if (defer_stack_allocation (var, true))
1425 	add_stack_var (var, true);
1426       else
1427 	expand_one_stack_var_1 (var);
1428       return;
1429     }
1430 
1431   machine_mode reg_mode = promote_ssa_mode (var, NULL);
1432   rtx x = gen_reg_rtx (reg_mode);
1433 
1434   set_rtl (var, x);
1435 
1436   /* For a promoted variable, X will not be used directly but wrapped in a
1437      SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1438      will assume that its upper bits can be inferred from its lower bits.
1439      Therefore, if X isn't initialized on every path from the entry, then
1440      we must do it manually in order to fulfill the above assumption.  */
1441   if (reg_mode != TYPE_MODE (TREE_TYPE (var))
1442       && bitmap_bit_p (SA.partitions_for_undefined_values, part))
1443     emit_move_insn (x, CONST0_RTX (reg_mode));
1444 }
1445 
1446 /* Record the association between the RTL generated for partition PART
1447    and the underlying variable of the SSA_NAME VAR.  */
1448 
1449 static void
adjust_one_expanded_partition_var(tree var)1450 adjust_one_expanded_partition_var (tree var)
1451 {
1452   if (!var)
1453     return;
1454 
1455   tree decl = SSA_NAME_VAR (var);
1456 
1457   int part = var_to_partition (SA.map, var);
1458   if (part == NO_PARTITION)
1459     return;
1460 
1461   rtx x = SA.partition_to_pseudo[part];
1462 
1463   gcc_assert (x);
1464 
1465   set_rtl (var, x);
1466 
1467   if (!REG_P (x))
1468     return;
1469 
1470   /* Note if the object is a user variable.  */
1471   if (decl && !DECL_ARTIFICIAL (decl))
1472     mark_user_reg (x);
1473 
1474   if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1475     mark_reg_pointer (x, get_pointer_alignment (var));
1476 }
1477 
1478 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1479    that will reside in a pseudo register.  */
1480 
1481 static void
expand_one_register_var(tree var)1482 expand_one_register_var (tree var)
1483 {
1484   if (TREE_CODE (var) == SSA_NAME)
1485     {
1486       int part = var_to_partition (SA.map, var);
1487       if (part != NO_PARTITION)
1488 	{
1489 	  rtx x = SA.partition_to_pseudo[part];
1490 	  gcc_assert (x);
1491 	  gcc_assert (REG_P (x));
1492 	  return;
1493 	}
1494       gcc_unreachable ();
1495     }
1496 
1497   tree decl = var;
1498   tree type = TREE_TYPE (decl);
1499   machine_mode reg_mode = promote_decl_mode (decl, NULL);
1500   rtx x = gen_reg_rtx (reg_mode);
1501 
1502   set_rtl (var, x);
1503 
1504   /* Note if the object is a user variable.  */
1505   if (!DECL_ARTIFICIAL (decl))
1506     mark_user_reg (x);
1507 
1508   if (POINTER_TYPE_P (type))
1509     mark_reg_pointer (x, get_pointer_alignment (var));
1510 }
1511 
1512 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL that
1513    has some associated error, e.g. its type is error-mark.  We just need
1514    to pick something that won't crash the rest of the compiler.  */
1515 
1516 static void
expand_one_error_var(tree var)1517 expand_one_error_var (tree var)
1518 {
1519   machine_mode mode = DECL_MODE (var);
1520   rtx x;
1521 
1522   if (mode == BLKmode)
1523     x = gen_rtx_MEM (BLKmode, const0_rtx);
1524   else if (mode == VOIDmode)
1525     x = const0_rtx;
1526   else
1527     x = gen_reg_rtx (mode);
1528 
1529   SET_DECL_RTL (var, x);
1530 }
1531 
1532 /* A subroutine of expand_one_var.  VAR is a variable that will be
1533    allocated to the local stack frame.  Return true if we wish to
1534    add VAR to STACK_VARS so that it will be coalesced with other
1535    variables.  Return false to allocate VAR immediately.
1536 
1537    This function is used to reduce the number of variables considered
1538    for coalescing, which reduces the size of the quadratic problem.  */
1539 
1540 static bool
defer_stack_allocation(tree var,bool toplevel)1541 defer_stack_allocation (tree var, bool toplevel)
1542 {
1543   tree size_unit = TREE_CODE (var) == SSA_NAME
1544     ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1545     : DECL_SIZE_UNIT (var);
1546   poly_uint64 size;
1547 
1548   /* Whether the variable is small enough for immediate allocation not to be
1549      a problem with regard to the frame size.  */
1550   bool smallish
1551     = (poly_int_tree_p (size_unit, &size)
1552        && (estimated_poly_value (size)
1553 	   < param_min_size_for_stack_sharing));
1554 
1555   /* If stack protection is enabled, *all* stack variables must be deferred,
1556      so that we can re-order the strings to the top of the frame.
1557      Similarly for Address Sanitizer.  */
1558   if (flag_stack_protect || asan_sanitize_stack_p ())
1559     return true;
1560 
1561   unsigned int align = TREE_CODE (var) == SSA_NAME
1562     ? TYPE_ALIGN (TREE_TYPE (var))
1563     : DECL_ALIGN (var);
1564 
1565   /* We handle "large" alignment via dynamic allocation.  We want to handle
1566      this extra complication in only one place, so defer them.  */
1567   if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1568     return true;
1569 
1570   bool ignored = TREE_CODE (var) == SSA_NAME
1571     ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1572     : DECL_IGNORED_P (var);
1573 
1574   /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1575      might be detached from their block and appear at toplevel when we reach
1576      here.  We want to coalesce them with variables from other blocks when
1577      the immediate contribution to the frame size would be noticeable.  */
1578   if (toplevel && optimize > 0 && ignored && !smallish)
1579     return true;
1580 
1581   /* Variables declared in the outermost scope automatically conflict
1582      with every other variable.  The only reason to want to defer them
1583      at all is that, after sorting, we can more efficiently pack
1584      small variables in the stack frame.  Continue to defer at -O2.  */
1585   if (toplevel && optimize < 2)
1586     return false;
1587 
1588   /* Without optimization, *most* variables are allocated from the
1589      stack, which makes the quadratic problem large exactly when we
1590      want compilation to proceed as quickly as possible.  On the
1591      other hand, we don't want the function's stack frame size to
1592      get completely out of hand.  So we avoid adding scalars and
1593      "small" aggregates to the list at all.  */
1594   if (optimize == 0 && smallish)
1595     return false;
1596 
1597   return true;
1598 }
1599 
1600 /* A subroutine of expand_used_vars.  Expand one variable according to
1601    its flavor.  Variables to be placed on the stack are not actually
1602    expanded yet, merely recorded.
1603    When REALLY_EXPAND is false, only add stack values to be allocated.
1604    Return stack usage this variable is supposed to take.
1605 */
1606 
1607 static poly_uint64
expand_one_var(tree var,bool toplevel,bool really_expand)1608 expand_one_var (tree var, bool toplevel, bool really_expand)
1609 {
1610   unsigned int align = BITS_PER_UNIT;
1611   tree origvar = var;
1612 
1613   var = SSAVAR (var);
1614 
1615   if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
1616     {
1617       if (is_global_var (var))
1618 	return 0;
1619 
1620       /* Because we don't know if VAR will be in register or on stack,
1621 	 we conservatively assume it will be on stack even if VAR is
1622 	 eventually put into register after RA pass.  For non-automatic
1623 	 variables, which won't be on stack, we collect alignment of
1624 	 type and ignore user specified alignment.  Similarly for
1625 	 SSA_NAMEs for which use_register_for_decl returns true.  */
1626       if (TREE_STATIC (var)
1627 	  || DECL_EXTERNAL (var)
1628 	  || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1629 	align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1630 				   TYPE_MODE (TREE_TYPE (var)),
1631 				   TYPE_ALIGN (TREE_TYPE (var)));
1632       else if (DECL_HAS_VALUE_EXPR_P (var)
1633 	       || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1634 	/* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1635 	   or variables which were assigned a stack slot already by
1636 	   expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1637 	   changed from the offset chosen to it.  */
1638 	align = crtl->stack_alignment_estimated;
1639       else
1640 	align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1641 
1642       /* If the variable alignment is very large we'll dynamicaly allocate
1643 	 it, which means that in-frame portion is just a pointer.  */
1644       if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1645 	align = GET_MODE_ALIGNMENT (Pmode);
1646     }
1647 
1648   record_alignment_for_reg_var (align);
1649 
1650   poly_uint64 size;
1651   if (TREE_CODE (origvar) == SSA_NAME)
1652     {
1653       gcc_assert (!VAR_P (var)
1654 		  || (!DECL_EXTERNAL (var)
1655 		      && !DECL_HAS_VALUE_EXPR_P (var)
1656 		      && !TREE_STATIC (var)
1657 		      && TREE_TYPE (var) != error_mark_node
1658 		      && !DECL_HARD_REGISTER (var)
1659 		      && really_expand));
1660     }
1661   if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
1662     ;
1663   else if (DECL_EXTERNAL (var))
1664     ;
1665   else if (DECL_HAS_VALUE_EXPR_P (var))
1666     ;
1667   else if (TREE_STATIC (var))
1668     ;
1669   else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1670     ;
1671   else if (TREE_TYPE (var) == error_mark_node)
1672     {
1673       if (really_expand)
1674         expand_one_error_var (var);
1675     }
1676   else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1677     {
1678       if (really_expand)
1679 	{
1680 	  expand_one_hard_reg_var (var);
1681 	  if (!DECL_HARD_REGISTER (var))
1682 	    /* Invalid register specification.  */
1683 	    expand_one_error_var (var);
1684 	}
1685     }
1686   else if (use_register_for_decl (var))
1687     {
1688       if (really_expand)
1689         expand_one_register_var (origvar);
1690     }
1691   else if (!poly_int_tree_p (DECL_SIZE_UNIT (var), &size)
1692 	   || !valid_constant_size_p (DECL_SIZE_UNIT (var)))
1693     {
1694       /* Reject variables which cover more than half of the address-space.  */
1695       if (really_expand)
1696 	{
1697 	  if (DECL_NONLOCAL_FRAME (var))
1698 	    error_at (DECL_SOURCE_LOCATION (current_function_decl),
1699 		      "total size of local objects is too large");
1700 	  else
1701 	    error_at (DECL_SOURCE_LOCATION (var),
1702 		      "size of variable %q+D is too large", var);
1703 	  expand_one_error_var (var);
1704 	}
1705     }
1706   else if (defer_stack_allocation (var, toplevel))
1707     add_stack_var (origvar, really_expand);
1708   else
1709     {
1710       if (really_expand)
1711         {
1712           if (lookup_attribute ("naked",
1713                                 DECL_ATTRIBUTES (current_function_decl)))
1714 	    error ("cannot allocate stack for variable %q+D, naked function",
1715                    var);
1716 
1717           expand_one_stack_var (origvar);
1718         }
1719       return size;
1720     }
1721   return 0;
1722 }
1723 
1724 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1725    expanding variables.  Those variables that can be put into registers
1726    are allocated pseudos; those that can't are put on the stack.
1727 
1728    TOPLEVEL is true if this is the outermost BLOCK.  */
1729 
1730 static void
expand_used_vars_for_block(tree block,bool toplevel)1731 expand_used_vars_for_block (tree block, bool toplevel)
1732 {
1733   tree t;
1734 
1735   /* Expand all variables at this level.  */
1736   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1737     if (TREE_USED (t)
1738         && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1739 	    || !DECL_NONSHAREABLE (t)))
1740       expand_one_var (t, toplevel, true);
1741 
1742   /* Expand all variables at containing levels.  */
1743   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1744     expand_used_vars_for_block (t, false);
1745 }
1746 
1747 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1748    and clear TREE_USED on all local variables.  */
1749 
1750 static void
clear_tree_used(tree block)1751 clear_tree_used (tree block)
1752 {
1753   tree t;
1754 
1755   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1756     /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1757     if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1758 	|| !DECL_NONSHAREABLE (t))
1759       TREE_USED (t) = 0;
1760 
1761   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1762     clear_tree_used (t);
1763 }
1764 
1765 enum {
1766   SPCT_FLAG_DEFAULT = 1,
1767   SPCT_FLAG_ALL = 2,
1768   SPCT_FLAG_STRONG = 3,
1769   SPCT_FLAG_EXPLICIT = 4
1770 };
1771 
1772 /* Examine TYPE and determine a bit mask of the following features.  */
1773 
1774 #define SPCT_HAS_LARGE_CHAR_ARRAY	1
1775 #define SPCT_HAS_SMALL_CHAR_ARRAY	2
1776 #define SPCT_HAS_ARRAY			4
1777 #define SPCT_HAS_AGGREGATE		8
1778 
1779 static unsigned int
stack_protect_classify_type(tree type)1780 stack_protect_classify_type (tree type)
1781 {
1782   unsigned int ret = 0;
1783   tree t;
1784 
1785   switch (TREE_CODE (type))
1786     {
1787     case ARRAY_TYPE:
1788       t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1789       if (t == char_type_node
1790 	  || t == signed_char_type_node
1791 	  || t == unsigned_char_type_node)
1792 	{
1793 	  unsigned HOST_WIDE_INT max = param_ssp_buffer_size;
1794 	  unsigned HOST_WIDE_INT len;
1795 
1796 	  if (!TYPE_SIZE_UNIT (type)
1797 	      || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1798 	    len = max;
1799 	  else
1800 	    len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1801 
1802 	  if (len == 0)
1803 	    ret = SPCT_HAS_ARRAY;
1804 	  else if (len < max)
1805 	    ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1806 	  else
1807 	    ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1808 	}
1809       else
1810 	ret = SPCT_HAS_ARRAY;
1811       break;
1812 
1813     case UNION_TYPE:
1814     case QUAL_UNION_TYPE:
1815     case RECORD_TYPE:
1816       ret = SPCT_HAS_AGGREGATE;
1817       for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1818 	if (TREE_CODE (t) == FIELD_DECL)
1819 	  ret |= stack_protect_classify_type (TREE_TYPE (t));
1820       break;
1821 
1822     default:
1823       break;
1824     }
1825 
1826   return ret;
1827 }
1828 
1829 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1830    part of the local stack frame.  Remember if we ever return nonzero for
1831    any variable in this function.  The return value is the phase number in
1832    which the variable should be allocated.  */
1833 
1834 static int
stack_protect_decl_phase(tree decl)1835 stack_protect_decl_phase (tree decl)
1836 {
1837   unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1838   int ret = 0;
1839 
1840   if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1841     has_short_buffer = true;
1842 
1843   if (flag_stack_protect == SPCT_FLAG_ALL
1844       || flag_stack_protect == SPCT_FLAG_STRONG
1845       || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1846 	  && lookup_attribute ("stack_protect",
1847 			       DECL_ATTRIBUTES (current_function_decl))))
1848     {
1849       if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1850 	  && !(bits & SPCT_HAS_AGGREGATE))
1851 	ret = 1;
1852       else if (bits & SPCT_HAS_ARRAY)
1853 	ret = 2;
1854     }
1855   else
1856     ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1857 
1858   if (ret)
1859     has_protected_decls = true;
1860 
1861   return ret;
1862 }
1863 
1864 /* Two helper routines that check for phase 1 and phase 2.  These are used
1865    as callbacks for expand_stack_vars.  */
1866 
1867 static bool
stack_protect_decl_phase_1(size_t i)1868 stack_protect_decl_phase_1 (size_t i)
1869 {
1870   return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1871 }
1872 
1873 static bool
stack_protect_decl_phase_2(size_t i)1874 stack_protect_decl_phase_2 (size_t i)
1875 {
1876   return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1877 }
1878 
1879 /* And helper function that checks for asan phase (with stack protector
1880    it is phase 3).  This is used as callback for expand_stack_vars.
1881    Returns true if any of the vars in the partition need to be protected.  */
1882 
1883 static bool
asan_decl_phase_3(size_t i)1884 asan_decl_phase_3 (size_t i)
1885 {
1886   while (i != EOC)
1887     {
1888       if (asan_protect_stack_decl (stack_vars[i].decl))
1889 	return true;
1890       i = stack_vars[i].next;
1891     }
1892   return false;
1893 }
1894 
1895 /* Ensure that variables in different stack protection phases conflict
1896    so that they are not merged and share the same stack slot.
1897    Return true if there are any address taken variables.  */
1898 
1899 static bool
add_stack_protection_conflicts(void)1900 add_stack_protection_conflicts (void)
1901 {
1902   size_t i, j, n = stack_vars_num;
1903   unsigned char *phase;
1904   bool ret = false;
1905 
1906   phase = XNEWVEC (unsigned char, n);
1907   for (i = 0; i < n; ++i)
1908     {
1909       phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1910       if (TREE_ADDRESSABLE (stack_vars[i].decl))
1911 	ret = true;
1912     }
1913 
1914   for (i = 0; i < n; ++i)
1915     {
1916       unsigned char ph_i = phase[i];
1917       for (j = i + 1; j < n; ++j)
1918 	if (ph_i != phase[j])
1919 	  add_stack_var_conflict (i, j);
1920     }
1921 
1922   XDELETEVEC (phase);
1923   return ret;
1924 }
1925 
1926 /* Create a decl for the guard at the top of the stack frame.  */
1927 
1928 static void
create_stack_guard(void)1929 create_stack_guard (void)
1930 {
1931   tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1932 			   VAR_DECL, NULL, ptr_type_node);
1933   TREE_THIS_VOLATILE (guard) = 1;
1934   TREE_USED (guard) = 1;
1935   expand_one_stack_var (guard);
1936   crtl->stack_protect_guard = guard;
1937 }
1938 
1939 /* Prepare for expanding variables.  */
1940 static void
init_vars_expansion(void)1941 init_vars_expansion (void)
1942 {
1943   /* Conflict bitmaps, and a few related temporary bitmaps, go here.  */
1944   bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1945 
1946   /* A map from decl to stack partition.  */
1947   decl_to_stack_part = new hash_map<tree, size_t>;
1948 
1949   /* Initialize local stack smashing state.  */
1950   has_protected_decls = false;
1951   has_short_buffer = false;
1952 }
1953 
1954 /* Free up stack variable graph data.  */
1955 static void
fini_vars_expansion(void)1956 fini_vars_expansion (void)
1957 {
1958   bitmap_obstack_release (&stack_var_bitmap_obstack);
1959   if (stack_vars)
1960     XDELETEVEC (stack_vars);
1961   if (stack_vars_sorted)
1962     XDELETEVEC (stack_vars_sorted);
1963   stack_vars = NULL;
1964   stack_vars_sorted = NULL;
1965   stack_vars_alloc = stack_vars_num = 0;
1966   delete decl_to_stack_part;
1967   decl_to_stack_part = NULL;
1968 }
1969 
1970 /* Make a fair guess for the size of the stack frame of the function
1971    in NODE.  This doesn't have to be exact, the result is only used in
1972    the inline heuristics.  So we don't want to run the full stack var
1973    packing algorithm (which is quadratic in the number of stack vars).
1974    Instead, we calculate the total size of all stack vars.  This turns
1975    out to be a pretty fair estimate -- packing of stack vars doesn't
1976    happen very often.  */
1977 
1978 HOST_WIDE_INT
estimated_stack_frame_size(struct cgraph_node * node)1979 estimated_stack_frame_size (struct cgraph_node *node)
1980 {
1981   poly_int64 size = 0;
1982   size_t i;
1983   tree var;
1984   struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1985 
1986   push_cfun (fn);
1987 
1988   init_vars_expansion ();
1989 
1990   FOR_EACH_LOCAL_DECL (fn, i, var)
1991     if (auto_var_in_fn_p (var, fn->decl))
1992       size += expand_one_var (var, true, false);
1993 
1994   if (stack_vars_num > 0)
1995     {
1996       /* Fake sorting the stack vars for account_stack_vars ().  */
1997       stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1998       for (i = 0; i < stack_vars_num; ++i)
1999 	stack_vars_sorted[i] = i;
2000       size += account_stack_vars ();
2001     }
2002 
2003   fini_vars_expansion ();
2004   pop_cfun ();
2005   return estimated_poly_value (size);
2006 }
2007 
2008 /* Check if the current function has calls that use a return slot.  */
2009 
2010 static bool
stack_protect_return_slot_p()2011 stack_protect_return_slot_p ()
2012 {
2013   basic_block bb;
2014 
2015   FOR_ALL_BB_FN (bb, cfun)
2016     for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2017 	 !gsi_end_p (gsi); gsi_next (&gsi))
2018       {
2019 	gimple *stmt = gsi_stmt (gsi);
2020 	/* This assumes that calls to internal-only functions never
2021 	   use a return slot.  */
2022 	if (is_gimple_call (stmt)
2023 	    && !gimple_call_internal_p (stmt)
2024 	    && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2025 				  gimple_call_fndecl (stmt)))
2026 	  return true;
2027       }
2028   return false;
2029 }
2030 
2031 /* Expand all variables used in the function.  */
2032 
2033 static rtx_insn *
expand_used_vars(void)2034 expand_used_vars (void)
2035 {
2036   tree var, outer_block = DECL_INITIAL (current_function_decl);
2037   auto_vec<tree> maybe_local_decls;
2038   rtx_insn *var_end_seq = NULL;
2039   unsigned i;
2040   unsigned len;
2041   bool gen_stack_protect_signal = false;
2042 
2043   /* Compute the phase of the stack frame for this function.  */
2044   {
2045     int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2046     int off = targetm.starting_frame_offset () % align;
2047     frame_phase = off ? align - off : 0;
2048   }
2049 
2050   /* Set TREE_USED on all variables in the local_decls.  */
2051   FOR_EACH_LOCAL_DECL (cfun, i, var)
2052     TREE_USED (var) = 1;
2053   /* Clear TREE_USED on all variables associated with a block scope.  */
2054   clear_tree_used (DECL_INITIAL (current_function_decl));
2055 
2056   init_vars_expansion ();
2057 
2058   if (targetm.use_pseudo_pic_reg ())
2059     pic_offset_table_rtx = gen_reg_rtx (Pmode);
2060 
2061   for (i = 0; i < SA.map->num_partitions; i++)
2062     {
2063       if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2064 	continue;
2065 
2066       tree var = partition_to_var (SA.map, i);
2067 
2068       gcc_assert (!virtual_operand_p (var));
2069 
2070       expand_one_ssa_partition (var);
2071     }
2072 
2073   if (flag_stack_protect == SPCT_FLAG_STRONG)
2074     gen_stack_protect_signal = stack_protect_return_slot_p ();
2075 
2076   /* At this point all variables on the local_decls with TREE_USED
2077      set are not associated with any block scope.  Lay them out.  */
2078 
2079   len = vec_safe_length (cfun->local_decls);
2080   FOR_EACH_LOCAL_DECL (cfun, i, var)
2081     {
2082       bool expand_now = false;
2083 
2084       /* Expanded above already.  */
2085       if (is_gimple_reg (var))
2086 	{
2087 	  TREE_USED (var) = 0;
2088 	  goto next;
2089 	}
2090       /* We didn't set a block for static or extern because it's hard
2091 	 to tell the difference between a global variable (re)declared
2092 	 in a local scope, and one that's really declared there to
2093 	 begin with.  And it doesn't really matter much, since we're
2094 	 not giving them stack space.  Expand them now.  */
2095       else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
2096 	expand_now = true;
2097 
2098       /* Expand variables not associated with any block now.  Those created by
2099 	 the optimizers could be live anywhere in the function.  Those that
2100 	 could possibly have been scoped originally and detached from their
2101 	 block will have their allocation deferred so we coalesce them with
2102 	 others when optimization is enabled.  */
2103       else if (TREE_USED (var))
2104 	expand_now = true;
2105 
2106       /* Finally, mark all variables on the list as used.  We'll use
2107 	 this in a moment when we expand those associated with scopes.  */
2108       TREE_USED (var) = 1;
2109 
2110       if (expand_now)
2111 	expand_one_var (var, true, true);
2112 
2113     next:
2114       if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
2115 	{
2116 	  rtx rtl = DECL_RTL_IF_SET (var);
2117 
2118 	  /* Keep artificial non-ignored vars in cfun->local_decls
2119 	     chain until instantiate_decls.  */
2120 	  if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2121 	    add_local_decl (cfun, var);
2122 	  else if (rtl == NULL_RTX)
2123 	    /* If rtl isn't set yet, which can happen e.g. with
2124 	       -fstack-protector, retry before returning from this
2125 	       function.  */
2126 	    maybe_local_decls.safe_push (var);
2127 	}
2128     }
2129 
2130   /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2131 
2132      +-----------------+-----------------+
2133      | ...processed... | ...duplicates...|
2134      +-----------------+-----------------+
2135                        ^
2136 		       +-- LEN points here.
2137 
2138      We just want the duplicates, as those are the artificial
2139      non-ignored vars that we want to keep until instantiate_decls.
2140      Move them down and truncate the array.  */
2141   if (!vec_safe_is_empty (cfun->local_decls))
2142     cfun->local_decls->block_remove (0, len);
2143 
2144   /* At this point, all variables within the block tree with TREE_USED
2145      set are actually used by the optimized function.  Lay them out.  */
2146   expand_used_vars_for_block (outer_block, true);
2147 
2148   if (stack_vars_num > 0)
2149     {
2150       bool has_addressable_vars = false;
2151 
2152       add_scope_conflicts ();
2153 
2154       /* If stack protection is enabled, we don't share space between
2155 	 vulnerable data and non-vulnerable data.  */
2156       if (flag_stack_protect != 0
2157 	  && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2158 	      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2159 		  && lookup_attribute ("stack_protect",
2160 				       DECL_ATTRIBUTES (current_function_decl)))))
2161 	has_addressable_vars = add_stack_protection_conflicts ();
2162 
2163       if (flag_stack_protect == SPCT_FLAG_STRONG && has_addressable_vars)
2164 	gen_stack_protect_signal = true;
2165 
2166       /* Now that we have collected all stack variables, and have computed a
2167 	 minimal interference graph, attempt to save some stack space.  */
2168       partition_stack_vars ();
2169       if (dump_file)
2170 	dump_stack_var_partition ();
2171     }
2172 
2173   switch (flag_stack_protect)
2174     {
2175     case SPCT_FLAG_ALL:
2176       create_stack_guard ();
2177       break;
2178 
2179     case SPCT_FLAG_STRONG:
2180       if (gen_stack_protect_signal
2181 	  || cfun->calls_alloca
2182 	  || has_protected_decls
2183 	  || lookup_attribute ("stack_protect",
2184 			       DECL_ATTRIBUTES (current_function_decl)))
2185 	create_stack_guard ();
2186       break;
2187 
2188     case SPCT_FLAG_DEFAULT:
2189       if (cfun->calls_alloca
2190 	  || has_protected_decls
2191 	  || lookup_attribute ("stack_protect",
2192 			       DECL_ATTRIBUTES (current_function_decl)))
2193 	create_stack_guard ();
2194       break;
2195 
2196     case SPCT_FLAG_EXPLICIT:
2197       if (lookup_attribute ("stack_protect",
2198 			    DECL_ATTRIBUTES (current_function_decl)))
2199 	create_stack_guard ();
2200       break;
2201 
2202     default:
2203       break;
2204     }
2205 
2206   /* Assign rtl to each variable based on these partitions.  */
2207   if (stack_vars_num > 0)
2208     {
2209       class stack_vars_data data;
2210 
2211       data.asan_base = NULL_RTX;
2212       data.asan_alignb = 0;
2213 
2214       /* Reorder decls to be protected by iterating over the variables
2215 	 array multiple times, and allocating out of each phase in turn.  */
2216       /* ??? We could probably integrate this into the qsort we did
2217 	 earlier, such that we naturally see these variables first,
2218 	 and thus naturally allocate things in the right order.  */
2219       if (has_protected_decls)
2220 	{
2221 	  /* Phase 1 contains only character arrays.  */
2222 	  expand_stack_vars (stack_protect_decl_phase_1, &data);
2223 
2224 	  /* Phase 2 contains other kinds of arrays.  */
2225 	  if (flag_stack_protect == SPCT_FLAG_ALL
2226 	      || flag_stack_protect == SPCT_FLAG_STRONG
2227 	      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2228 		  && lookup_attribute ("stack_protect",
2229 				       DECL_ATTRIBUTES (current_function_decl))))
2230 	    expand_stack_vars (stack_protect_decl_phase_2, &data);
2231 	}
2232 
2233       if (asan_sanitize_stack_p ())
2234 	/* Phase 3, any partitions that need asan protection
2235 	   in addition to phase 1 and 2.  */
2236 	expand_stack_vars (asan_decl_phase_3, &data);
2237 
2238       /* ASAN description strings don't yet have a syntax for expressing
2239 	 polynomial offsets.  */
2240       HOST_WIDE_INT prev_offset;
2241       if (!data.asan_vec.is_empty ()
2242 	  && frame_offset.is_constant (&prev_offset))
2243 	{
2244 	  HOST_WIDE_INT offset, sz, redzonesz;
2245 	  redzonesz = ASAN_RED_ZONE_SIZE;
2246 	  sz = data.asan_vec[0] - prev_offset;
2247 	  if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2248 	      && data.asan_alignb <= 4096
2249 	      && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
2250 	    redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2251 			 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2252 	  /* Allocating a constant amount of space from a constant
2253 	     starting offset must give a constant result.  */
2254 	  offset = (alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE)
2255 		    .to_constant ());
2256 	  data.asan_vec.safe_push (prev_offset);
2257 	  data.asan_vec.safe_push (offset);
2258 	  /* Leave space for alignment if STRICT_ALIGNMENT.  */
2259 	  if (STRICT_ALIGNMENT)
2260 	    alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2261 				      << ASAN_SHADOW_SHIFT)
2262 				     / BITS_PER_UNIT, 1);
2263 
2264 	  var_end_seq
2265 	    = asan_emit_stack_protection (virtual_stack_vars_rtx,
2266 					  data.asan_base,
2267 					  data.asan_alignb,
2268 					  data.asan_vec.address (),
2269 					  data.asan_decl_vec.address (),
2270 					  data.asan_vec.length ());
2271 	}
2272 
2273       expand_stack_vars (NULL, &data);
2274     }
2275 
2276   if (asan_sanitize_allocas_p () && cfun->calls_alloca)
2277     var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
2278 					      virtual_stack_vars_rtx,
2279 					      var_end_seq);
2280 
2281   fini_vars_expansion ();
2282 
2283   /* If there were any artificial non-ignored vars without rtl
2284      found earlier, see if deferred stack allocation hasn't assigned
2285      rtl to them.  */
2286   FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2287     {
2288       rtx rtl = DECL_RTL_IF_SET (var);
2289 
2290       /* Keep artificial non-ignored vars in cfun->local_decls
2291 	 chain until instantiate_decls.  */
2292       if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2293 	add_local_decl (cfun, var);
2294     }
2295 
2296   /* If the target requires that FRAME_OFFSET be aligned, do it.  */
2297   if (STACK_ALIGNMENT_NEEDED)
2298     {
2299       HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2300       if (FRAME_GROWS_DOWNWARD)
2301 	frame_offset = aligned_lower_bound (frame_offset, align);
2302       else
2303 	frame_offset = aligned_upper_bound (frame_offset, align);
2304     }
2305 
2306   return var_end_seq;
2307 }
2308 
2309 
2310 /* If we need to produce a detailed dump, print the tree representation
2311    for STMT to the dump file.  SINCE is the last RTX after which the RTL
2312    generated for STMT should have been appended.  */
2313 
2314 static void
maybe_dump_rtl_for_gimple_stmt(gimple * stmt,rtx_insn * since)2315 maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
2316 {
2317   if (dump_file && (dump_flags & TDF_DETAILS))
2318     {
2319       fprintf (dump_file, "\n;; ");
2320       print_gimple_stmt (dump_file, stmt, 0,
2321 			 TDF_SLIM | (dump_flags & TDF_LINENO));
2322       fprintf (dump_file, "\n");
2323 
2324       print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2325     }
2326 }
2327 
2328 /* Maps the blocks that do not contain tree labels to rtx labels.  */
2329 
2330 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2331 
2332 /* Returns the label_rtx expression for a label starting basic block BB.  */
2333 
2334 static rtx_code_label *
label_rtx_for_bb(basic_block bb ATTRIBUTE_UNUSED)2335 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2336 {
2337   gimple_stmt_iterator gsi;
2338   tree lab;
2339 
2340   if (bb->flags & BB_RTL)
2341     return block_label (bb);
2342 
2343   rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2344   if (elt)
2345     return *elt;
2346 
2347   /* Find the tree label if it is present.  */
2348 
2349   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2350     {
2351       glabel *lab_stmt;
2352 
2353       lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2354       if (!lab_stmt)
2355 	break;
2356 
2357       lab = gimple_label_label (lab_stmt);
2358       if (DECL_NONLOCAL (lab))
2359 	break;
2360 
2361       return jump_target_rtx (lab);
2362     }
2363 
2364   rtx_code_label *l = gen_label_rtx ();
2365   lab_rtx_for_bb->put (bb, l);
2366   return l;
2367 }
2368 
2369 
2370 /* A subroutine of expand_gimple_cond.  Given E, a fallthrough edge
2371    of a basic block where we just expanded the conditional at the end,
2372    possibly clean up the CFG and instruction sequence.  LAST is the
2373    last instruction before the just emitted jump sequence.  */
2374 
2375 static void
maybe_cleanup_end_of_block(edge e,rtx_insn * last)2376 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2377 {
2378   /* Special case: when jumpif decides that the condition is
2379      trivial it emits an unconditional jump (and the necessary
2380      barrier).  But we still have two edges, the fallthru one is
2381      wrong.  purge_dead_edges would clean this up later.  Unfortunately
2382      we have to insert insns (and split edges) before
2383      find_many_sub_basic_blocks and hence before purge_dead_edges.
2384      But splitting edges might create new blocks which depend on the
2385      fact that if there are two edges there's no barrier.  So the
2386      barrier would get lost and verify_flow_info would ICE.  Instead
2387      of auditing all edge splitters to care for the barrier (which
2388      normally isn't there in a cleaned CFG), fix it here.  */
2389   if (BARRIER_P (get_last_insn ()))
2390     {
2391       rtx_insn *insn;
2392       remove_edge (e);
2393       /* Now, we have a single successor block, if we have insns to
2394 	 insert on the remaining edge we potentially will insert
2395 	 it at the end of this block (if the dest block isn't feasible)
2396 	 in order to avoid splitting the edge.  This insertion will take
2397 	 place in front of the last jump.  But we might have emitted
2398 	 multiple jumps (conditional and one unconditional) to the
2399 	 same destination.  Inserting in front of the last one then
2400 	 is a problem.  See PR 40021.  We fix this by deleting all
2401 	 jumps except the last unconditional one.  */
2402       insn = PREV_INSN (get_last_insn ());
2403       /* Make sure we have an unconditional jump.  Otherwise we're
2404 	 confused.  */
2405       gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2406       for (insn = PREV_INSN (insn); insn != last;)
2407 	{
2408 	  insn = PREV_INSN (insn);
2409 	  if (JUMP_P (NEXT_INSN (insn)))
2410 	    {
2411 	      if (!any_condjump_p (NEXT_INSN (insn)))
2412 		{
2413 		  gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2414 		  delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2415 		}
2416 	      delete_insn (NEXT_INSN (insn));
2417 	    }
2418 	}
2419     }
2420 }
2421 
2422 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_COND.
2423    Returns a new basic block if we've terminated the current basic
2424    block and created a new one.  */
2425 
2426 static basic_block
expand_gimple_cond(basic_block bb,gcond * stmt)2427 expand_gimple_cond (basic_block bb, gcond *stmt)
2428 {
2429   basic_block new_bb, dest;
2430   edge true_edge;
2431   edge false_edge;
2432   rtx_insn *last2, *last;
2433   enum tree_code code;
2434   tree op0, op1;
2435 
2436   code = gimple_cond_code (stmt);
2437   op0 = gimple_cond_lhs (stmt);
2438   op1 = gimple_cond_rhs (stmt);
2439   /* We're sometimes presented with such code:
2440        D.123_1 = x < y;
2441        if (D.123_1 != 0)
2442          ...
2443      This would expand to two comparisons which then later might
2444      be cleaned up by combine.  But some pattern matchers like if-conversion
2445      work better when there's only one compare, so make up for this
2446      here as special exception if TER would have made the same change.  */
2447   if (SA.values
2448       && TREE_CODE (op0) == SSA_NAME
2449       && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2450       && TREE_CODE (op1) == INTEGER_CST
2451       && ((gimple_cond_code (stmt) == NE_EXPR
2452 	   && integer_zerop (op1))
2453 	  || (gimple_cond_code (stmt) == EQ_EXPR
2454 	      && integer_onep (op1)))
2455       && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2456     {
2457       gimple *second = SSA_NAME_DEF_STMT (op0);
2458       if (gimple_code (second) == GIMPLE_ASSIGN)
2459 	{
2460 	  enum tree_code code2 = gimple_assign_rhs_code (second);
2461 	  if (TREE_CODE_CLASS (code2) == tcc_comparison)
2462 	    {
2463 	      code = code2;
2464 	      op0 = gimple_assign_rhs1 (second);
2465 	      op1 = gimple_assign_rhs2 (second);
2466 	    }
2467 	  /* If jumps are cheap and the target does not support conditional
2468 	     compare, turn some more codes into jumpy sequences.  */
2469 	  else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2470 		   && targetm.gen_ccmp_first == NULL)
2471 	    {
2472 	      if ((code2 == BIT_AND_EXPR
2473 		   && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2474 		   && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2475 		  || code2 == TRUTH_AND_EXPR)
2476 		{
2477 		  code = TRUTH_ANDIF_EXPR;
2478 		  op0 = gimple_assign_rhs1 (second);
2479 		  op1 = gimple_assign_rhs2 (second);
2480 		}
2481 	      else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2482 		{
2483 		  code = TRUTH_ORIF_EXPR;
2484 		  op0 = gimple_assign_rhs1 (second);
2485 		  op1 = gimple_assign_rhs2 (second);
2486 		}
2487 	    }
2488 	}
2489     }
2490 
2491   /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
2492      into (x - C2) * C3 < C4.  */
2493   if ((code == EQ_EXPR || code == NE_EXPR)
2494       && TREE_CODE (op0) == SSA_NAME
2495       && TREE_CODE (op1) == INTEGER_CST)
2496     code = maybe_optimize_mod_cmp (code, &op0, &op1);
2497 
2498   last2 = last = get_last_insn ();
2499 
2500   extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2501   set_curr_insn_location (gimple_location (stmt));
2502 
2503   /* These flags have no purpose in RTL land.  */
2504   true_edge->flags &= ~EDGE_TRUE_VALUE;
2505   false_edge->flags &= ~EDGE_FALSE_VALUE;
2506 
2507   /* We can either have a pure conditional jump with one fallthru edge or
2508      two-way jump that needs to be decomposed into two basic blocks.  */
2509   if (false_edge->dest == bb->next_bb)
2510     {
2511       jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2512 		true_edge->probability);
2513       maybe_dump_rtl_for_gimple_stmt (stmt, last);
2514       if (true_edge->goto_locus != UNKNOWN_LOCATION)
2515 	set_curr_insn_location (true_edge->goto_locus);
2516       false_edge->flags |= EDGE_FALLTHRU;
2517       maybe_cleanup_end_of_block (false_edge, last);
2518       return NULL;
2519     }
2520   if (true_edge->dest == bb->next_bb)
2521     {
2522       jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2523 		   false_edge->probability);
2524       maybe_dump_rtl_for_gimple_stmt (stmt, last);
2525       if (false_edge->goto_locus != UNKNOWN_LOCATION)
2526 	set_curr_insn_location (false_edge->goto_locus);
2527       true_edge->flags |= EDGE_FALLTHRU;
2528       maybe_cleanup_end_of_block (true_edge, last);
2529       return NULL;
2530     }
2531 
2532   jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2533 	    true_edge->probability);
2534   last = get_last_insn ();
2535   if (false_edge->goto_locus != UNKNOWN_LOCATION)
2536     set_curr_insn_location (false_edge->goto_locus);
2537   emit_jump (label_rtx_for_bb (false_edge->dest));
2538 
2539   BB_END (bb) = last;
2540   if (BARRIER_P (BB_END (bb)))
2541     BB_END (bb) = PREV_INSN (BB_END (bb));
2542   update_bb_for_insn (bb);
2543 
2544   new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2545   dest = false_edge->dest;
2546   redirect_edge_succ (false_edge, new_bb);
2547   false_edge->flags |= EDGE_FALLTHRU;
2548   new_bb->count = false_edge->count ();
2549   loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
2550   add_bb_to_loop (new_bb, loop);
2551   if (loop->latch == bb
2552       && loop->header == dest)
2553     loop->latch = new_bb;
2554   make_single_succ_edge (new_bb, dest, 0);
2555   if (BARRIER_P (BB_END (new_bb)))
2556     BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2557   update_bb_for_insn (new_bb);
2558 
2559   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2560 
2561   if (true_edge->goto_locus != UNKNOWN_LOCATION)
2562     {
2563       set_curr_insn_location (true_edge->goto_locus);
2564       true_edge->goto_locus = curr_insn_location ();
2565     }
2566 
2567   return new_bb;
2568 }
2569 
2570 /* Mark all calls that can have a transaction restart.  */
2571 
2572 static void
mark_transaction_restart_calls(gimple * stmt)2573 mark_transaction_restart_calls (gimple *stmt)
2574 {
2575   struct tm_restart_node dummy;
2576   tm_restart_node **slot;
2577 
2578   if (!cfun->gimple_df->tm_restart)
2579     return;
2580 
2581   dummy.stmt = stmt;
2582   slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2583   if (slot)
2584     {
2585       struct tm_restart_node *n = *slot;
2586       tree list = n->label_or_list;
2587       rtx_insn *insn;
2588 
2589       for (insn = next_real_insn (get_last_insn ());
2590 	   !CALL_P (insn);
2591 	   insn = next_real_insn (insn))
2592 	continue;
2593 
2594       if (TREE_CODE (list) == LABEL_DECL)
2595 	add_reg_note (insn, REG_TM, label_rtx (list));
2596       else
2597 	for (; list ; list = TREE_CHAIN (list))
2598 	  add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2599     }
2600 }
2601 
2602 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2603    statement STMT.  */
2604 
2605 static void
expand_call_stmt(gcall * stmt)2606 expand_call_stmt (gcall *stmt)
2607 {
2608   tree exp, decl, lhs;
2609   bool builtin_p;
2610   size_t i;
2611 
2612   if (gimple_call_internal_p (stmt))
2613     {
2614       expand_internal_call (stmt);
2615       return;
2616     }
2617 
2618   /* If this is a call to a built-in function and it has no effect other
2619      than setting the lhs, try to implement it using an internal function
2620      instead.  */
2621   decl = gimple_call_fndecl (stmt);
2622   if (gimple_call_lhs (stmt)
2623       && !gimple_has_side_effects (stmt)
2624       && (optimize || (decl && called_as_built_in (decl))))
2625     {
2626       internal_fn ifn = replacement_internal_fn (stmt);
2627       if (ifn != IFN_LAST)
2628 	{
2629 	  expand_internal_call (ifn, stmt);
2630 	  return;
2631 	}
2632     }
2633 
2634   exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2635 
2636   CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2637   builtin_p = decl && fndecl_built_in_p (decl);
2638 
2639   /* If this is not a builtin function, the function type through which the
2640      call is made may be different from the type of the function.  */
2641   if (!builtin_p)
2642     CALL_EXPR_FN (exp)
2643       = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2644 		      CALL_EXPR_FN (exp));
2645 
2646   TREE_TYPE (exp) = gimple_call_return_type (stmt);
2647   CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2648 
2649   for (i = 0; i < gimple_call_num_args (stmt); i++)
2650     {
2651       tree arg = gimple_call_arg (stmt, i);
2652       gimple *def;
2653       /* TER addresses into arguments of builtin functions so we have a
2654 	 chance to infer more correct alignment information.  See PR39954.  */
2655       if (builtin_p
2656 	  && TREE_CODE (arg) == SSA_NAME
2657 	  && (def = get_gimple_for_ssa_name (arg))
2658 	  && gimple_assign_rhs_code (def) == ADDR_EXPR)
2659 	arg = gimple_assign_rhs1 (def);
2660       CALL_EXPR_ARG (exp, i) = arg;
2661     }
2662 
2663   if (gimple_has_side_effects (stmt))
2664     TREE_SIDE_EFFECTS (exp) = 1;
2665 
2666   if (gimple_call_nothrow_p (stmt))
2667     TREE_NOTHROW (exp) = 1;
2668 
2669   if (gimple_no_warning_p (stmt))
2670     TREE_NO_WARNING (exp) = 1;
2671 
2672   CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2673   CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
2674   CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2675   if (decl
2676       && fndecl_built_in_p (decl, BUILT_IN_NORMAL)
2677       && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2678     CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2679   else
2680     CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2681   CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2682   CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
2683   SET_EXPR_LOCATION (exp, gimple_location (stmt));
2684 
2685   /* Ensure RTL is created for debug args.  */
2686   if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2687     {
2688       vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2689       unsigned int ix;
2690       tree dtemp;
2691 
2692       if (debug_args)
2693 	for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2694 	  {
2695 	    gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2696 	    expand_debug_expr (dtemp);
2697 	  }
2698     }
2699 
2700   rtx_insn *before_call = get_last_insn ();
2701   lhs = gimple_call_lhs (stmt);
2702   if (lhs)
2703     expand_assignment (lhs, exp, false);
2704   else
2705     expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2706 
2707   /* If the gimple call is an indirect call and has 'nocf_check'
2708      attribute find a generated CALL insn to mark it as no
2709      control-flow verification is needed.  */
2710   if (gimple_call_nocf_check_p (stmt)
2711       && !gimple_call_fndecl (stmt))
2712     {
2713       rtx_insn *last = get_last_insn ();
2714       while (!CALL_P (last)
2715 	     && last != before_call)
2716 	last = PREV_INSN (last);
2717 
2718       if (last != before_call)
2719 	add_reg_note (last, REG_CALL_NOCF_CHECK, const0_rtx);
2720     }
2721 
2722   mark_transaction_restart_calls (stmt);
2723 }
2724 
2725 
2726 /* Generate RTL for an asm statement (explicit assembler code).
2727    STRING is a STRING_CST node containing the assembler code text,
2728    or an ADDR_EXPR containing a STRING_CST.  VOL nonzero means the
2729    insn is volatile; don't optimize it.  */
2730 
2731 static void
expand_asm_loc(tree string,int vol,location_t locus)2732 expand_asm_loc (tree string, int vol, location_t locus)
2733 {
2734   rtx body;
2735 
2736   body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2737 				ggc_strdup (TREE_STRING_POINTER (string)),
2738 				locus);
2739 
2740   MEM_VOLATILE_P (body) = vol;
2741 
2742   /* Non-empty basic ASM implicitly clobbers memory.  */
2743   if (TREE_STRING_LENGTH (string) != 0)
2744     {
2745       rtx asm_op, clob;
2746       unsigned i, nclobbers;
2747       auto_vec<rtx> input_rvec, output_rvec;
2748       auto_vec<const char *> constraints;
2749       auto_vec<rtx> clobber_rvec;
2750       HARD_REG_SET clobbered_regs;
2751       CLEAR_HARD_REG_SET (clobbered_regs);
2752 
2753       clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2754       clobber_rvec.safe_push (clob);
2755 
2756       if (targetm.md_asm_adjust)
2757 	targetm.md_asm_adjust (output_rvec, input_rvec,
2758 			       constraints, clobber_rvec,
2759 			       clobbered_regs);
2760 
2761       asm_op = body;
2762       nclobbers = clobber_rvec.length ();
2763       body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
2764 
2765       XVECEXP (body, 0, 0) = asm_op;
2766       for (i = 0; i < nclobbers; i++)
2767 	XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
2768     }
2769 
2770   emit_insn (body);
2771 }
2772 
2773 /* Return the number of times character C occurs in string S.  */
2774 static int
n_occurrences(int c,const char * s)2775 n_occurrences (int c, const char *s)
2776 {
2777   int n = 0;
2778   while (*s)
2779     n += (*s++ == c);
2780   return n;
2781 }
2782 
2783 /* A subroutine of expand_asm_operands.  Check that all operands have
2784    the same number of alternatives.  Return true if so.  */
2785 
2786 static bool
check_operand_nalternatives(const vec<const char * > & constraints)2787 check_operand_nalternatives (const vec<const char *> &constraints)
2788 {
2789   unsigned len = constraints.length();
2790   if (len > 0)
2791     {
2792       int nalternatives = n_occurrences (',', constraints[0]);
2793 
2794       if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2795 	{
2796 	  error ("too many alternatives in %<asm%>");
2797 	  return false;
2798 	}
2799 
2800       for (unsigned i = 1; i < len; ++i)
2801 	if (n_occurrences (',', constraints[i]) != nalternatives)
2802 	  {
2803 	    error ("operand constraints for %<asm%> differ "
2804 		   "in number of alternatives");
2805 	    return false;
2806 	  }
2807     }
2808   return true;
2809 }
2810 
2811 /* Check for overlap between registers marked in CLOBBERED_REGS and
2812    anything inappropriate in T.  Emit error and return the register
2813    variable definition for error, NULL_TREE for ok.  */
2814 
2815 static bool
tree_conflicts_with_clobbers_p(tree t,HARD_REG_SET * clobbered_regs)2816 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2817 {
2818   /* Conflicts between asm-declared register variables and the clobber
2819      list are not allowed.  */
2820   tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2821 
2822   if (overlap)
2823     {
2824       error ("%<asm%> specifier for variable %qE conflicts with "
2825 	     "%<asm%> clobber list",
2826 	     DECL_NAME (overlap));
2827 
2828       /* Reset registerness to stop multiple errors emitted for a single
2829 	 variable.  */
2830       DECL_REGISTER (overlap) = 0;
2831       return true;
2832     }
2833 
2834   return false;
2835 }
2836 
2837 /* Check that the given REGNO spanning NREGS is a valid
2838    asm clobber operand.  Some HW registers cannot be
2839    saved/restored, hence they should not be clobbered by
2840    asm statements.  */
2841 static bool
asm_clobber_reg_is_valid(int regno,int nregs,const char * regname)2842 asm_clobber_reg_is_valid (int regno, int nregs, const char *regname)
2843 {
2844   bool is_valid = true;
2845   HARD_REG_SET regset;
2846 
2847   CLEAR_HARD_REG_SET (regset);
2848 
2849   add_range_to_hard_reg_set (&regset, regno, nregs);
2850 
2851   /* Clobbering the PIC register is an error.  */
2852   if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2853       && overlaps_hard_reg_set_p (regset, Pmode, PIC_OFFSET_TABLE_REGNUM))
2854     {
2855       /* ??? Diagnose during gimplification?  */
2856       error ("PIC register clobbered by %qs in %<asm%>", regname);
2857       is_valid = false;
2858     }
2859   else if (!in_hard_reg_set_p
2860 	   (accessible_reg_set, reg_raw_mode[regno], regno))
2861     {
2862       /* ??? Diagnose during gimplification?  */
2863       error ("the register %qs cannot be clobbered in %<asm%>"
2864 	     " for the current target", regname);
2865       is_valid = false;
2866     }
2867 
2868   /* Clobbering the stack pointer register is deprecated.  GCC expects
2869      the value of the stack pointer after an asm statement to be the same
2870      as it was before, so no asm can validly clobber the stack pointer in
2871      the usual sense.  Adding the stack pointer to the clobber list has
2872      traditionally had some undocumented and somewhat obscure side-effects.  */
2873   if (overlaps_hard_reg_set_p (regset, Pmode, STACK_POINTER_REGNUM))
2874     {
2875       crtl->sp_is_clobbered_by_asm = true;
2876       if (warning (OPT_Wdeprecated, "listing the stack pointer register"
2877 		   " %qs in a clobber list is deprecated", regname))
2878 	inform (input_location, "the value of the stack pointer after"
2879 		" an %<asm%> statement must be the same as it was before"
2880 		" the statement");
2881     }
2882 
2883   return is_valid;
2884 }
2885 
2886 /* Generate RTL for an asm statement with arguments.
2887    STRING is the instruction template.
2888    OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2889    Each output or input has an expression in the TREE_VALUE and
2890    a tree list in TREE_PURPOSE which in turn contains a constraint
2891    name in TREE_VALUE (or NULL_TREE) and a constraint string
2892    in TREE_PURPOSE.
2893    CLOBBERS is a list of STRING_CST nodes each naming a hard register
2894    that is clobbered by this insn.
2895 
2896    LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2897    should be the fallthru basic block of the asm goto.
2898 
2899    Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2900    Some elements of OUTPUTS may be replaced with trees representing temporary
2901    values.  The caller should copy those temporary values to the originally
2902    specified lvalues.
2903 
2904    VOL nonzero means the insn is volatile; don't optimize it.  */
2905 
2906 static void
expand_asm_stmt(gasm * stmt)2907 expand_asm_stmt (gasm *stmt)
2908 {
2909   class save_input_location
2910   {
2911     location_t old;
2912 
2913   public:
2914     explicit save_input_location(location_t where)
2915     {
2916       old = input_location;
2917       input_location = where;
2918     }
2919 
2920     ~save_input_location()
2921     {
2922       input_location = old;
2923     }
2924   };
2925 
2926   location_t locus = gimple_location (stmt);
2927 
2928   if (gimple_asm_input_p (stmt))
2929     {
2930       const char *s = gimple_asm_string (stmt);
2931       tree string = build_string (strlen (s), s);
2932       expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2933       return;
2934     }
2935 
2936   /* There are some legacy diagnostics in here, and also avoids a
2937      sixth parameger to targetm.md_asm_adjust.  */
2938   save_input_location s_i_l(locus);
2939 
2940   unsigned noutputs = gimple_asm_noutputs (stmt);
2941   unsigned ninputs = gimple_asm_ninputs (stmt);
2942   unsigned nlabels = gimple_asm_nlabels (stmt);
2943   unsigned i;
2944   bool error_seen = false;
2945 
2946   /* ??? Diagnose during gimplification?  */
2947   if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2948     {
2949       error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2950       return;
2951     }
2952 
2953   auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2954   auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2955   auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
2956 
2957   /* Copy the gimple vectors into new vectors that we can manipulate.  */
2958 
2959   output_tvec.safe_grow (noutputs);
2960   input_tvec.safe_grow (ninputs);
2961   constraints.safe_grow (noutputs + ninputs);
2962 
2963   for (i = 0; i < noutputs; ++i)
2964     {
2965       tree t = gimple_asm_output_op (stmt, i);
2966       output_tvec[i] = TREE_VALUE (t);
2967       constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2968     }
2969   for (i = 0; i < ninputs; i++)
2970     {
2971       tree t = gimple_asm_input_op (stmt, i);
2972       input_tvec[i] = TREE_VALUE (t);
2973       constraints[i + noutputs]
2974 	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2975     }
2976 
2977   /* ??? Diagnose during gimplification?  */
2978   if (! check_operand_nalternatives (constraints))
2979     return;
2980 
2981   /* Count the number of meaningful clobbered registers, ignoring what
2982      we would ignore later.  */
2983   auto_vec<rtx> clobber_rvec;
2984   HARD_REG_SET clobbered_regs;
2985   CLEAR_HARD_REG_SET (clobbered_regs);
2986 
2987   if (unsigned n = gimple_asm_nclobbers (stmt))
2988     {
2989       clobber_rvec.reserve (n);
2990       for (i = 0; i < n; i++)
2991 	{
2992 	  tree t = gimple_asm_clobber_op (stmt, i);
2993           const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2994 	  int nregs, j;
2995 
2996 	  j = decode_reg_name_and_count (regname, &nregs);
2997 	  if (j < 0)
2998 	    {
2999 	      if (j == -2)
3000 		{
3001 		  /* ??? Diagnose during gimplification?  */
3002 		  error ("unknown register name %qs in %<asm%>", regname);
3003 		  error_seen = true;
3004 		}
3005 	      else if (j == -4)
3006 		{
3007 		  rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
3008 		  clobber_rvec.safe_push (x);
3009 		}
3010 	      else
3011 		{
3012 		  /* Otherwise we should have -1 == empty string
3013 		     or -3 == cc, which is not a register.  */
3014 		  gcc_assert (j == -1 || j == -3);
3015 		}
3016 	    }
3017 	  else
3018 	    for (int reg = j; reg < j + nregs; reg++)
3019 	      {
3020 		if (!asm_clobber_reg_is_valid (reg, nregs, regname))
3021 		  return;
3022 
3023 	        SET_HARD_REG_BIT (clobbered_regs, reg);
3024 	        rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
3025 		clobber_rvec.safe_push (x);
3026 	      }
3027 	}
3028     }
3029 
3030   /* First pass over inputs and outputs checks validity and sets
3031      mark_addressable if needed.  */
3032   /* ??? Diagnose during gimplification?  */
3033 
3034   for (i = 0; i < noutputs; ++i)
3035     {
3036       tree val = output_tvec[i];
3037       tree type = TREE_TYPE (val);
3038       const char *constraint;
3039       bool is_inout;
3040       bool allows_reg;
3041       bool allows_mem;
3042 
3043       /* Try to parse the output constraint.  If that fails, there's
3044 	 no point in going further.  */
3045       constraint = constraints[i];
3046       if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
3047 				    &allows_mem, &allows_reg, &is_inout))
3048 	return;
3049 
3050       /* If the output is a hard register, verify it doesn't conflict with
3051 	 any other operand's possible hard register use.  */
3052       if (DECL_P (val)
3053 	  && REG_P (DECL_RTL (val))
3054 	  && HARD_REGISTER_P (DECL_RTL (val)))
3055 	{
3056 	  unsigned j, output_hregno = REGNO (DECL_RTL (val));
3057 	  bool early_clobber_p = strchr (constraints[i], '&') != NULL;
3058 	  unsigned long match;
3059 
3060 	  /* Verify the other outputs do not use the same hard register.  */
3061 	  for (j = i + 1; j < noutputs; ++j)
3062 	    if (DECL_P (output_tvec[j])
3063 		&& REG_P (DECL_RTL (output_tvec[j]))
3064 		&& HARD_REGISTER_P (DECL_RTL (output_tvec[j]))
3065 		&& output_hregno == REGNO (DECL_RTL (output_tvec[j])))
3066 	      {
3067 		error ("invalid hard register usage between output operands");
3068 		error_seen = true;
3069 	      }
3070 
3071 	  /* Verify matching constraint operands use the same hard register
3072 	     and that the non-matching constraint operands do not use the same
3073 	     hard register if the output is an early clobber operand.  */
3074 	  for (j = 0; j < ninputs; ++j)
3075 	    if (DECL_P (input_tvec[j])
3076 		&& REG_P (DECL_RTL (input_tvec[j]))
3077 		&& HARD_REGISTER_P (DECL_RTL (input_tvec[j])))
3078 	      {
3079 		unsigned input_hregno = REGNO (DECL_RTL (input_tvec[j]));
3080 		switch (*constraints[j + noutputs])
3081 		  {
3082 		  case '0':  case '1':  case '2':  case '3':  case '4':
3083 		  case '5':  case '6':  case '7':  case '8':  case '9':
3084 		    match = strtoul (constraints[j + noutputs], NULL, 10);
3085 		    break;
3086 		  default:
3087 		    match = ULONG_MAX;
3088 		    break;
3089 		  }
3090 		if (i == match
3091 		    && output_hregno != input_hregno)
3092 		  {
3093 		    error ("invalid hard register usage between output "
3094 			   "operand and matching constraint operand");
3095 		    error_seen = true;
3096 		  }
3097 		else if (early_clobber_p
3098 			 && i != match
3099 			 && output_hregno == input_hregno)
3100 		  {
3101 		    error ("invalid hard register usage between "
3102 			   "earlyclobber operand and input operand");
3103 		    error_seen = true;
3104 		  }
3105 	      }
3106 	}
3107 
3108       if (! allows_reg
3109 	  && (allows_mem
3110 	      || is_inout
3111 	      || (DECL_P (val)
3112 		  && REG_P (DECL_RTL (val))
3113 		  && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
3114 	mark_addressable (val);
3115     }
3116 
3117   for (i = 0; i < ninputs; ++i)
3118     {
3119       bool allows_reg, allows_mem;
3120       const char *constraint;
3121 
3122       constraint = constraints[i + noutputs];
3123       if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3124 				    constraints.address (),
3125 				    &allows_mem, &allows_reg))
3126 	return;
3127 
3128       if (! allows_reg && allows_mem)
3129 	mark_addressable (input_tvec[i]);
3130     }
3131 
3132   /* Second pass evaluates arguments.  */
3133 
3134   /* Make sure stack is consistent for asm goto.  */
3135   if (nlabels > 0)
3136     do_pending_stack_adjust ();
3137   int old_generating_concat_p = generating_concat_p;
3138 
3139   /* Vector of RTX's of evaluated output operands.  */
3140   auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
3141   auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
3142   rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
3143 
3144   output_rvec.safe_grow (noutputs);
3145 
3146   for (i = 0; i < noutputs; ++i)
3147     {
3148       tree val = output_tvec[i];
3149       tree type = TREE_TYPE (val);
3150       bool is_inout, allows_reg, allows_mem, ok;
3151       rtx op;
3152 
3153       ok = parse_output_constraint (&constraints[i], i, ninputs,
3154 				    noutputs, &allows_mem, &allows_reg,
3155 				    &is_inout);
3156       gcc_assert (ok);
3157 
3158       /* If an output operand is not a decl or indirect ref and our constraint
3159 	 allows a register, make a temporary to act as an intermediate.
3160 	 Make the asm insn write into that, then we will copy it to
3161 	 the real output operand.  Likewise for promoted variables.  */
3162 
3163       generating_concat_p = 0;
3164 
3165       if ((TREE_CODE (val) == INDIRECT_REF && allows_mem)
3166 	  || (DECL_P (val)
3167 	      && (allows_mem || REG_P (DECL_RTL (val)))
3168 	      && ! (REG_P (DECL_RTL (val))
3169 		    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3170 	  || ! allows_reg
3171 	  || is_inout
3172 	  || TREE_ADDRESSABLE (type)
3173 	  || (!tree_fits_poly_int64_p (TYPE_SIZE (type))
3174 	      && !known_size_p (max_int_size_in_bytes (type))))
3175 	{
3176 	  op = expand_expr (val, NULL_RTX, VOIDmode,
3177 			    !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3178 	  if (MEM_P (op))
3179 	    op = validize_mem (op);
3180 
3181 	  if (! allows_reg && !MEM_P (op))
3182 	    {
3183 	      error ("output number %d not directly addressable", i);
3184 	      error_seen = true;
3185 	    }
3186 	  if ((! allows_mem && MEM_P (op) && GET_MODE (op) != BLKmode)
3187 	      || GET_CODE (op) == CONCAT)
3188 	    {
3189 	      rtx old_op = op;
3190 	      op = gen_reg_rtx (GET_MODE (op));
3191 
3192 	      generating_concat_p = old_generating_concat_p;
3193 
3194 	      if (is_inout)
3195 		emit_move_insn (op, old_op);
3196 
3197 	      push_to_sequence2 (after_rtl_seq, after_rtl_end);
3198 	      emit_move_insn (old_op, op);
3199 	      after_rtl_seq = get_insns ();
3200 	      after_rtl_end = get_last_insn ();
3201 	      end_sequence ();
3202 	    }
3203 	}
3204       else
3205 	{
3206 	  op = assign_temp (type, 0, 1);
3207 	  op = validize_mem (op);
3208 	  if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3209 	    set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
3210 
3211 	  generating_concat_p = old_generating_concat_p;
3212 
3213 	  push_to_sequence2 (after_rtl_seq, after_rtl_end);
3214 	  expand_assignment (val, make_tree (type, op), false);
3215 	  after_rtl_seq = get_insns ();
3216 	  after_rtl_end = get_last_insn ();
3217 	  end_sequence ();
3218 	}
3219       output_rvec[i] = op;
3220 
3221       if (is_inout)
3222 	inout_opnum.safe_push (i);
3223     }
3224 
3225   const char *str = gimple_asm_string (stmt);
3226   if (error_seen)
3227     {
3228       ninputs = 0;
3229       noutputs = 0;
3230       inout_opnum.truncate (0);
3231       output_rvec.truncate (0);
3232       clobber_rvec.truncate (0);
3233       constraints.truncate (0);
3234       CLEAR_HARD_REG_SET (clobbered_regs);
3235       str = "";
3236     }
3237 
3238   auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3239   auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
3240 
3241   input_rvec.safe_grow (ninputs);
3242   input_mode.safe_grow (ninputs);
3243 
3244   generating_concat_p = 0;
3245 
3246   for (i = 0; i < ninputs; ++i)
3247     {
3248       tree val = input_tvec[i];
3249       tree type = TREE_TYPE (val);
3250       bool allows_reg, allows_mem, ok;
3251       const char *constraint;
3252       rtx op;
3253 
3254       constraint = constraints[i + noutputs];
3255       ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3256 				   constraints.address (),
3257 				   &allows_mem, &allows_reg);
3258       gcc_assert (ok);
3259 
3260       /* EXPAND_INITIALIZER will not generate code for valid initializer
3261 	 constants, but will still generate code for other types of operand.
3262 	 This is the behavior we want for constant constraints.  */
3263       op = expand_expr (val, NULL_RTX, VOIDmode,
3264 			allows_reg ? EXPAND_NORMAL
3265 			: allows_mem ? EXPAND_MEMORY
3266 			: EXPAND_INITIALIZER);
3267 
3268       /* Never pass a CONCAT to an ASM.  */
3269       if (GET_CODE (op) == CONCAT)
3270 	op = force_reg (GET_MODE (op), op);
3271       else if (MEM_P (op))
3272 	op = validize_mem (op);
3273 
3274       if (asm_operand_ok (op, constraint, NULL) <= 0)
3275 	{
3276 	  if (allows_reg && TYPE_MODE (type) != BLKmode)
3277 	    op = force_reg (TYPE_MODE (type), op);
3278 	  else if (!allows_mem)
3279 	    warning (0, "%<asm%> operand %d probably does not match "
3280 		     "constraints",
3281 		     i + noutputs);
3282 	  else if (MEM_P (op))
3283 	    {
3284 	      /* We won't recognize either volatile memory or memory
3285 		 with a queued address as available a memory_operand
3286 		 at this point.  Ignore it: clearly this *is* a memory.  */
3287 	    }
3288 	  else
3289 	    gcc_unreachable ();
3290 	}
3291       input_rvec[i] = op;
3292       input_mode[i] = TYPE_MODE (type);
3293     }
3294 
3295   /* For in-out operands, copy output rtx to input rtx.  */
3296   unsigned ninout = inout_opnum.length ();
3297   for (i = 0; i < ninout; i++)
3298     {
3299       int j = inout_opnum[i];
3300       rtx o = output_rvec[j];
3301 
3302       input_rvec.safe_push (o);
3303       input_mode.safe_push (GET_MODE (o));
3304 
3305       char buffer[16];
3306       sprintf (buffer, "%d", j);
3307       constraints.safe_push (ggc_strdup (buffer));
3308     }
3309   ninputs += ninout;
3310 
3311   /* Sometimes we wish to automatically clobber registers across an asm.
3312      Case in point is when the i386 backend moved from cc0 to a hard reg --
3313      maintaining source-level compatibility means automatically clobbering
3314      the flags register.  */
3315   rtx_insn *after_md_seq = NULL;
3316   if (targetm.md_asm_adjust)
3317     after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3318 					  constraints, clobber_rvec,
3319 					  clobbered_regs);
3320 
3321   /* Do not allow the hook to change the output and input count,
3322      lest it mess up the operand numbering.  */
3323   gcc_assert (output_rvec.length() == noutputs);
3324   gcc_assert (input_rvec.length() == ninputs);
3325   gcc_assert (constraints.length() == noutputs + ninputs);
3326 
3327   /* But it certainly can adjust the clobbers.  */
3328   unsigned nclobbers = clobber_rvec.length ();
3329 
3330   /* Third pass checks for easy conflicts.  */
3331   /* ??? Why are we doing this on trees instead of rtx.  */
3332 
3333   bool clobber_conflict_found = 0;
3334   for (i = 0; i < noutputs; ++i)
3335     if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3336 	clobber_conflict_found = 1;
3337   for (i = 0; i < ninputs - ninout; ++i)
3338     if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3339 	clobber_conflict_found = 1;
3340 
3341   /* Make vectors for the expression-rtx, constraint strings,
3342      and named operands.  */
3343 
3344   rtvec argvec = rtvec_alloc (ninputs);
3345   rtvec constraintvec = rtvec_alloc (ninputs);
3346   rtvec labelvec = rtvec_alloc (nlabels);
3347 
3348   rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3349 				    : GET_MODE (output_rvec[0])),
3350 				   ggc_strdup (str),
3351 				   "", 0, argvec, constraintvec,
3352 				   labelvec, locus);
3353   MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3354 
3355   for (i = 0; i < ninputs; ++i)
3356     {
3357       ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3358       ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3359 	= gen_rtx_ASM_INPUT_loc (input_mode[i],
3360 				 constraints[i + noutputs],
3361 				 locus);
3362     }
3363 
3364   /* Copy labels to the vector.  */
3365   rtx_code_label *fallthru_label = NULL;
3366   if (nlabels > 0)
3367     {
3368       basic_block fallthru_bb = NULL;
3369       edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3370       if (fallthru)
3371 	fallthru_bb = fallthru->dest;
3372 
3373       for (i = 0; i < nlabels; ++i)
3374 	{
3375 	  tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
3376 	  rtx_insn *r;
3377 	  /* If asm goto has any labels in the fallthru basic block, use
3378 	     a label that we emit immediately after the asm goto.  Expansion
3379 	     may insert further instructions into the same basic block after
3380 	     asm goto and if we don't do this, insertion of instructions on
3381 	     the fallthru edge might misbehave.  See PR58670.  */
3382 	  if (fallthru_bb && label_to_block (cfun, label) == fallthru_bb)
3383 	    {
3384 	      if (fallthru_label == NULL_RTX)
3385 	        fallthru_label = gen_label_rtx ();
3386 	      r = fallthru_label;
3387 	    }
3388 	  else
3389 	    r = label_rtx (label);
3390 	  ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
3391 	}
3392     }
3393 
3394   /* Now, for each output, construct an rtx
3395      (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3396 			       ARGVEC CONSTRAINTS OPNAMES))
3397      If there is more than one, put them inside a PARALLEL.  */
3398 
3399   if (nlabels > 0 && nclobbers == 0)
3400     {
3401       gcc_assert (noutputs == 0);
3402       emit_jump_insn (body);
3403     }
3404   else if (noutputs == 0 && nclobbers == 0)
3405     {
3406       /* No output operands: put in a raw ASM_OPERANDS rtx.  */
3407       emit_insn (body);
3408     }
3409   else if (noutputs == 1 && nclobbers == 0)
3410     {
3411       ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3412       emit_insn (gen_rtx_SET (output_rvec[0], body));
3413     }
3414   else
3415     {
3416       rtx obody = body;
3417       int num = noutputs;
3418 
3419       if (num == 0)
3420 	num = 1;
3421 
3422       body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3423 
3424       /* For each output operand, store a SET.  */
3425       for (i = 0; i < noutputs; ++i)
3426 	{
3427 	  rtx src, o = output_rvec[i];
3428 	  if (i == 0)
3429 	    {
3430 	      ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3431 	      src = obody;
3432 	    }
3433 	  else
3434 	    {
3435 	      src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3436 					  ASM_OPERANDS_TEMPLATE (obody),
3437 					  constraints[i], i, argvec,
3438 					  constraintvec, labelvec, locus);
3439 	      MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3440 	    }
3441 	  XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
3442 	}
3443 
3444       /* If there are no outputs (but there are some clobbers)
3445 	 store the bare ASM_OPERANDS into the PARALLEL.  */
3446       if (i == 0)
3447 	XVECEXP (body, 0, i++) = obody;
3448 
3449       /* Store (clobber REG) for each clobbered register specified.  */
3450       for (unsigned j = 0; j < nclobbers; ++j)
3451 	{
3452 	  rtx clobbered_reg = clobber_rvec[j];
3453 
3454 	  /* Do sanity check for overlap between clobbers and respectively
3455 	     input and outputs that hasn't been handled.  Such overlap
3456 	     should have been detected and reported above.  */
3457 	  if (!clobber_conflict_found && REG_P (clobbered_reg))
3458 	    {
3459 	      /* We test the old body (obody) contents to avoid
3460 		 tripping over the under-construction body.  */
3461 	      for (unsigned k = 0; k < noutputs; ++k)
3462 		if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3463 		  internal_error ("%<asm%> clobber conflict with "
3464 				  "output operand");
3465 
3466 	      for (unsigned k = 0; k < ninputs - ninout; ++k)
3467 		if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3468 		  internal_error ("%<asm%> clobber conflict with "
3469 				  "input operand");
3470 	    }
3471 
3472 	  XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
3473 	}
3474 
3475       if (nlabels > 0)
3476 	emit_jump_insn (body);
3477       else
3478 	emit_insn (body);
3479     }
3480 
3481   generating_concat_p = old_generating_concat_p;
3482 
3483   if (fallthru_label)
3484     emit_label (fallthru_label);
3485 
3486   if (after_md_seq)
3487     emit_insn (after_md_seq);
3488   if (after_rtl_seq)
3489     emit_insn (after_rtl_seq);
3490 
3491   free_temp_slots ();
3492   crtl->has_asm_statement = 1;
3493 }
3494 
3495 /* Emit code to jump to the address
3496    specified by the pointer expression EXP.  */
3497 
3498 static void
expand_computed_goto(tree exp)3499 expand_computed_goto (tree exp)
3500 {
3501   rtx x = expand_normal (exp);
3502 
3503   do_pending_stack_adjust ();
3504   emit_indirect_jump (x);
3505 }
3506 
3507 /* Generate RTL code for a `goto' statement with target label LABEL.
3508    LABEL should be a LABEL_DECL tree node that was or will later be
3509    defined with `expand_label'.  */
3510 
3511 static void
expand_goto(tree label)3512 expand_goto (tree label)
3513 {
3514   if (flag_checking)
3515     {
3516       /* Check for a nonlocal goto to a containing function.  Should have
3517 	 gotten translated to __builtin_nonlocal_goto.  */
3518       tree context = decl_function_context (label);
3519       gcc_assert (!context || context == current_function_decl);
3520     }
3521 
3522   emit_jump (jump_target_rtx (label));
3523 }
3524 
3525 /* Output a return with no value.  */
3526 
3527 static void
expand_null_return_1(void)3528 expand_null_return_1 (void)
3529 {
3530   clear_pending_stack_adjust ();
3531   do_pending_stack_adjust ();
3532   emit_jump (return_label);
3533 }
3534 
3535 /* Generate RTL to return from the current function, with no value.
3536    (That is, we do not do anything about returning any value.)  */
3537 
3538 void
expand_null_return(void)3539 expand_null_return (void)
3540 {
3541   /* If this function was declared to return a value, but we
3542      didn't, clobber the return registers so that they are not
3543      propagated live to the rest of the function.  */
3544   clobber_return_register ();
3545 
3546   expand_null_return_1 ();
3547 }
3548 
3549 /* Generate RTL to return from the current function, with value VAL.  */
3550 
3551 static void
expand_value_return(rtx val)3552 expand_value_return (rtx val)
3553 {
3554   /* Copy the value to the return location unless it's already there.  */
3555 
3556   tree decl = DECL_RESULT (current_function_decl);
3557   rtx return_reg = DECL_RTL (decl);
3558   if (return_reg != val)
3559     {
3560       tree funtype = TREE_TYPE (current_function_decl);
3561       tree type = TREE_TYPE (decl);
3562       int unsignedp = TYPE_UNSIGNED (type);
3563       machine_mode old_mode = DECL_MODE (decl);
3564       machine_mode mode;
3565       if (DECL_BY_REFERENCE (decl))
3566         mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3567       else
3568         mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3569 
3570       if (mode != old_mode)
3571 	val = convert_modes (mode, old_mode, val, unsignedp);
3572 
3573       if (GET_CODE (return_reg) == PARALLEL)
3574 	emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3575       else
3576 	emit_move_insn (return_reg, val);
3577     }
3578 
3579   expand_null_return_1 ();
3580 }
3581 
3582 /* Generate RTL to evaluate the expression RETVAL and return it
3583    from the current function.  */
3584 
3585 static void
expand_return(tree retval)3586 expand_return (tree retval)
3587 {
3588   rtx result_rtl;
3589   rtx val = 0;
3590   tree retval_rhs;
3591 
3592   /* If function wants no value, give it none.  */
3593   if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3594     {
3595       expand_normal (retval);
3596       expand_null_return ();
3597       return;
3598     }
3599 
3600   if (retval == error_mark_node)
3601     {
3602       /* Treat this like a return of no value from a function that
3603 	 returns a value.  */
3604       expand_null_return ();
3605       return;
3606     }
3607   else if ((TREE_CODE (retval) == MODIFY_EXPR
3608 	    || TREE_CODE (retval) == INIT_EXPR)
3609 	   && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3610     retval_rhs = TREE_OPERAND (retval, 1);
3611   else
3612     retval_rhs = retval;
3613 
3614   result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3615 
3616   /* If we are returning the RESULT_DECL, then the value has already
3617      been stored into it, so we don't have to do anything special.  */
3618   if (TREE_CODE (retval_rhs) == RESULT_DECL)
3619     expand_value_return (result_rtl);
3620 
3621   /* If the result is an aggregate that is being returned in one (or more)
3622      registers, load the registers here.  */
3623 
3624   else if (retval_rhs != 0
3625 	   && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3626 	   && REG_P (result_rtl))
3627     {
3628       val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3629       if (val)
3630 	{
3631 	  /* Use the mode of the result value on the return register.  */
3632 	  PUT_MODE (result_rtl, GET_MODE (val));
3633 	  expand_value_return (val);
3634 	}
3635       else
3636 	expand_null_return ();
3637     }
3638   else if (retval_rhs != 0
3639 	   && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3640 	   && (REG_P (result_rtl)
3641 	       || (GET_CODE (result_rtl) == PARALLEL)))
3642     {
3643       /* Compute the return value into a temporary (usually a pseudo reg).  */
3644       val
3645 	= assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3646       val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3647       val = force_not_mem (val);
3648       expand_value_return (val);
3649     }
3650   else
3651     {
3652       /* No hard reg used; calculate value into hard return reg.  */
3653       expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3654       expand_value_return (result_rtl);
3655     }
3656 }
3657 
3658 /* Expand a clobber of LHS.  If LHS is stored it in a multi-part
3659    register, tell the rtl optimizers that its value is no longer
3660    needed.  */
3661 
3662 static void
expand_clobber(tree lhs)3663 expand_clobber (tree lhs)
3664 {
3665   if (DECL_P (lhs))
3666     {
3667       rtx decl_rtl = DECL_RTL_IF_SET (lhs);
3668       if (decl_rtl && REG_P (decl_rtl))
3669 	{
3670 	  machine_mode decl_mode = GET_MODE (decl_rtl);
3671 	  if (maybe_gt (GET_MODE_SIZE (decl_mode),
3672 			REGMODE_NATURAL_SIZE (decl_mode)))
3673 	    emit_clobber (decl_rtl);
3674 	}
3675     }
3676 }
3677 
3678 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3679    STMT that doesn't require special handling for outgoing edges.  That
3680    is no tailcalls and no GIMPLE_COND.  */
3681 
3682 static void
expand_gimple_stmt_1(gimple * stmt)3683 expand_gimple_stmt_1 (gimple *stmt)
3684 {
3685   tree op0;
3686 
3687   set_curr_insn_location (gimple_location (stmt));
3688 
3689   switch (gimple_code (stmt))
3690     {
3691     case GIMPLE_GOTO:
3692       op0 = gimple_goto_dest (stmt);
3693       if (TREE_CODE (op0) == LABEL_DECL)
3694 	expand_goto (op0);
3695       else
3696 	expand_computed_goto (op0);
3697       break;
3698     case GIMPLE_LABEL:
3699       expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3700       break;
3701     case GIMPLE_NOP:
3702     case GIMPLE_PREDICT:
3703       break;
3704     case GIMPLE_SWITCH:
3705       {
3706 	gswitch *swtch = as_a <gswitch *> (stmt);
3707 	if (gimple_switch_num_labels (swtch) == 1)
3708 	  expand_goto (CASE_LABEL (gimple_switch_default_label (swtch)));
3709 	else
3710 	  expand_case (swtch);
3711       }
3712       break;
3713     case GIMPLE_ASM:
3714       expand_asm_stmt (as_a <gasm *> (stmt));
3715       break;
3716     case GIMPLE_CALL:
3717       expand_call_stmt (as_a <gcall *> (stmt));
3718       break;
3719 
3720     case GIMPLE_RETURN:
3721       {
3722 	op0 = gimple_return_retval (as_a <greturn *> (stmt));
3723 
3724 	/* If a return doesn't have a location, it very likely represents
3725 	   multiple user returns so we cannot let it inherit the location
3726 	   of the last statement of the previous basic block in RTL.  */
3727 	if (!gimple_has_location (stmt))
3728 	  set_curr_insn_location (cfun->function_end_locus);
3729 
3730 	if (op0 && op0 != error_mark_node)
3731 	  {
3732 	    tree result = DECL_RESULT (current_function_decl);
3733 
3734 	    /* If we are not returning the current function's RESULT_DECL,
3735 	       build an assignment to it.  */
3736 	    if (op0 != result)
3737 	      {
3738 		/* I believe that a function's RESULT_DECL is unique.  */
3739 		gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3740 
3741 		/* ??? We'd like to use simply expand_assignment here,
3742 		   but this fails if the value is of BLKmode but the return
3743 		   decl is a register.  expand_return has special handling
3744 		   for this combination, which eventually should move
3745 		   to common code.  See comments there.  Until then, let's
3746 		   build a modify expression :-/  */
3747 		op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3748 			      result, op0);
3749 	      }
3750 	  }
3751 
3752 	if (!op0)
3753 	  expand_null_return ();
3754 	else
3755 	  expand_return (op0);
3756       }
3757       break;
3758 
3759     case GIMPLE_ASSIGN:
3760       {
3761 	gassign *assign_stmt = as_a <gassign *> (stmt);
3762 	tree lhs = gimple_assign_lhs (assign_stmt);
3763 
3764 	/* Tree expand used to fiddle with |= and &= of two bitfield
3765 	   COMPONENT_REFs here.  This can't happen with gimple, the LHS
3766 	   of binary assigns must be a gimple reg.  */
3767 
3768 	if (TREE_CODE (lhs) != SSA_NAME
3769 	    || get_gimple_rhs_class (gimple_expr_code (stmt))
3770 	       == GIMPLE_SINGLE_RHS)
3771 	  {
3772 	    tree rhs = gimple_assign_rhs1 (assign_stmt);
3773 	    gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3774 			== GIMPLE_SINGLE_RHS);
3775 	    if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3776 		/* Do not put locations on possibly shared trees.  */
3777 		&& !is_gimple_min_invariant (rhs))
3778 	      SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3779 	    if (TREE_CLOBBER_P (rhs))
3780 	      /* This is a clobber to mark the going out of scope for
3781 		 this LHS.  */
3782 	      expand_clobber (lhs);
3783 	    else
3784 	      expand_assignment (lhs, rhs,
3785 				 gimple_assign_nontemporal_move_p (
3786 				   assign_stmt));
3787 	  }
3788 	else
3789 	  {
3790 	    rtx target, temp;
3791 	    bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3792 	    struct separate_ops ops;
3793 	    bool promoted = false;
3794 
3795 	    target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3796 	    if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3797 	      promoted = true;
3798 
3799 	    ops.code = gimple_assign_rhs_code (assign_stmt);
3800 	    ops.type = TREE_TYPE (lhs);
3801 	    switch (get_gimple_rhs_class (ops.code))
3802 	      {
3803 		case GIMPLE_TERNARY_RHS:
3804 		  ops.op2 = gimple_assign_rhs3 (assign_stmt);
3805 		  /* Fallthru */
3806 		case GIMPLE_BINARY_RHS:
3807 		  ops.op1 = gimple_assign_rhs2 (assign_stmt);
3808 		  /* Fallthru */
3809 		case GIMPLE_UNARY_RHS:
3810 		  ops.op0 = gimple_assign_rhs1 (assign_stmt);
3811 		  break;
3812 		default:
3813 		  gcc_unreachable ();
3814 	      }
3815 	    ops.location = gimple_location (stmt);
3816 
3817 	    /* If we want to use a nontemporal store, force the value to
3818 	       register first.  If we store into a promoted register,
3819 	       don't directly expand to target.  */
3820 	    temp = nontemporal || promoted ? NULL_RTX : target;
3821 	    temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3822 				       EXPAND_NORMAL);
3823 
3824 	    if (temp == target)
3825 	      ;
3826 	    else if (promoted)
3827 	      {
3828 		int unsignedp = SUBREG_PROMOTED_SIGN (target);
3829 		/* If TEMP is a VOIDmode constant, use convert_modes to make
3830 		   sure that we properly convert it.  */
3831 		if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3832 		  {
3833 		    temp = convert_modes (GET_MODE (target),
3834 					  TYPE_MODE (ops.type),
3835 					  temp, unsignedp);
3836 		    temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3837 					  GET_MODE (target), temp, unsignedp);
3838 		  }
3839 
3840 		convert_move (SUBREG_REG (target), temp, unsignedp);
3841 	      }
3842 	    else if (nontemporal && emit_storent_insn (target, temp))
3843 	      ;
3844 	    else
3845 	      {
3846 		temp = force_operand (temp, target);
3847 		if (temp != target)
3848 		  emit_move_insn (target, temp);
3849 	      }
3850 	  }
3851       }
3852       break;
3853 
3854     default:
3855       gcc_unreachable ();
3856     }
3857 }
3858 
3859 /* Expand one gimple statement STMT and return the last RTL instruction
3860    before any of the newly generated ones.
3861 
3862    In addition to generating the necessary RTL instructions this also
3863    sets REG_EH_REGION notes if necessary and sets the current source
3864    location for diagnostics.  */
3865 
3866 static rtx_insn *
expand_gimple_stmt(gimple * stmt)3867 expand_gimple_stmt (gimple *stmt)
3868 {
3869   location_t saved_location = input_location;
3870   rtx_insn *last = get_last_insn ();
3871   int lp_nr;
3872 
3873   gcc_assert (cfun);
3874 
3875   /* We need to save and restore the current source location so that errors
3876      discovered during expansion are emitted with the right location.  But
3877      it would be better if the diagnostic routines used the source location
3878      embedded in the tree nodes rather than globals.  */
3879   if (gimple_has_location (stmt))
3880     input_location = gimple_location (stmt);
3881 
3882   expand_gimple_stmt_1 (stmt);
3883 
3884   /* Free any temporaries used to evaluate this statement.  */
3885   free_temp_slots ();
3886 
3887   input_location = saved_location;
3888 
3889   /* Mark all insns that may trap.  */
3890   lp_nr = lookup_stmt_eh_lp (stmt);
3891   if (lp_nr)
3892     {
3893       rtx_insn *insn;
3894       for (insn = next_real_insn (last); insn;
3895 	   insn = next_real_insn (insn))
3896 	{
3897 	  if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3898 	      /* If we want exceptions for non-call insns, any
3899 		 may_trap_p instruction may throw.  */
3900 	      && GET_CODE (PATTERN (insn)) != CLOBBER
3901 	      && GET_CODE (PATTERN (insn)) != USE
3902 	      && insn_could_throw_p (insn))
3903 	    make_reg_eh_region_note (insn, 0, lp_nr);
3904 	}
3905     }
3906 
3907   return last;
3908 }
3909 
3910 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_CALL
3911    that has CALL_EXPR_TAILCALL set.  Returns non-null if we actually
3912    generated a tail call (something that might be denied by the ABI
3913    rules governing the call; see calls.c).
3914 
3915    Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3916    can still reach the rest of BB.  The case here is __builtin_sqrt,
3917    where the NaN result goes through the external function (with a
3918    tailcall) and the normal result happens via a sqrt instruction.  */
3919 
3920 static basic_block
expand_gimple_tailcall(basic_block bb,gcall * stmt,bool * can_fallthru)3921 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3922 {
3923   rtx_insn *last2, *last;
3924   edge e;
3925   edge_iterator ei;
3926   profile_probability probability;
3927 
3928   last2 = last = expand_gimple_stmt (stmt);
3929 
3930   for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3931     if (CALL_P (last) && SIBLING_CALL_P (last))
3932       goto found;
3933 
3934   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3935 
3936   *can_fallthru = true;
3937   return NULL;
3938 
3939  found:
3940   /* ??? Wouldn't it be better to just reset any pending stack adjust?
3941      Any instructions emitted here are about to be deleted.  */
3942   do_pending_stack_adjust ();
3943 
3944   /* Remove any non-eh, non-abnormal edges that don't go to exit.  */
3945   /* ??? I.e. the fallthrough edge.  HOWEVER!  If there were to be
3946      EH or abnormal edges, we shouldn't have created a tail call in
3947      the first place.  So it seems to me we should just be removing
3948      all edges here, or redirecting the existing fallthru edge to
3949      the exit block.  */
3950 
3951   probability = profile_probability::never ();
3952 
3953   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3954     {
3955       if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3956 	{
3957 	  if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3958 	    e->dest->count -= e->count ();
3959 	  probability += e->probability;
3960 	  remove_edge (e);
3961 	}
3962       else
3963 	ei_next (&ei);
3964     }
3965 
3966   /* This is somewhat ugly: the call_expr expander often emits instructions
3967      after the sibcall (to perform the function return).  These confuse the
3968      find_many_sub_basic_blocks code, so we need to get rid of these.  */
3969   last = NEXT_INSN (last);
3970   gcc_assert (BARRIER_P (last));
3971 
3972   *can_fallthru = false;
3973   while (NEXT_INSN (last))
3974     {
3975       /* For instance an sqrt builtin expander expands if with
3976 	 sibcall in the then and label for `else`.  */
3977       if (LABEL_P (NEXT_INSN (last)))
3978 	{
3979 	  *can_fallthru = true;
3980 	  break;
3981 	}
3982       delete_insn (NEXT_INSN (last));
3983     }
3984 
3985   e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3986 		 | EDGE_SIBCALL);
3987   e->probability = probability;
3988   BB_END (bb) = last;
3989   update_bb_for_insn (bb);
3990 
3991   if (NEXT_INSN (last))
3992     {
3993       bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3994 
3995       last = BB_END (bb);
3996       if (BARRIER_P (last))
3997 	BB_END (bb) = PREV_INSN (last);
3998     }
3999 
4000   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
4001 
4002   return bb;
4003 }
4004 
4005 /* Return the difference between the floor and the truncated result of
4006    a signed division by OP1 with remainder MOD.  */
4007 static rtx
floor_sdiv_adjust(machine_mode mode,rtx mod,rtx op1)4008 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
4009 {
4010   /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
4011   return gen_rtx_IF_THEN_ELSE
4012     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
4013      gen_rtx_IF_THEN_ELSE
4014      (mode, gen_rtx_LT (BImode,
4015 			gen_rtx_DIV (mode, op1, mod),
4016 			const0_rtx),
4017       constm1_rtx, const0_rtx),
4018      const0_rtx);
4019 }
4020 
4021 /* Return the difference between the ceil and the truncated result of
4022    a signed division by OP1 with remainder MOD.  */
4023 static rtx
ceil_sdiv_adjust(machine_mode mode,rtx mod,rtx op1)4024 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
4025 {
4026   /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
4027   return gen_rtx_IF_THEN_ELSE
4028     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
4029      gen_rtx_IF_THEN_ELSE
4030      (mode, gen_rtx_GT (BImode,
4031 			gen_rtx_DIV (mode, op1, mod),
4032 			const0_rtx),
4033       const1_rtx, const0_rtx),
4034      const0_rtx);
4035 }
4036 
4037 /* Return the difference between the ceil and the truncated result of
4038    an unsigned division by OP1 with remainder MOD.  */
4039 static rtx
ceil_udiv_adjust(machine_mode mode,rtx mod,rtx op1 ATTRIBUTE_UNUSED)4040 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
4041 {
4042   /* (mod != 0 ? 1 : 0) */
4043   return gen_rtx_IF_THEN_ELSE
4044     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
4045      const1_rtx, const0_rtx);
4046 }
4047 
4048 /* Return the difference between the rounded and the truncated result
4049    of a signed division by OP1 with remainder MOD.  Halfway cases are
4050    rounded away from zero, rather than to the nearest even number.  */
4051 static rtx
round_sdiv_adjust(machine_mode mode,rtx mod,rtx op1)4052 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
4053 {
4054   /* (abs (mod) >= abs (op1) - abs (mod)
4055       ? (op1 / mod > 0 ? 1 : -1)
4056       : 0) */
4057   return gen_rtx_IF_THEN_ELSE
4058     (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
4059 		       gen_rtx_MINUS (mode,
4060 				      gen_rtx_ABS (mode, op1),
4061 				      gen_rtx_ABS (mode, mod))),
4062      gen_rtx_IF_THEN_ELSE
4063      (mode, gen_rtx_GT (BImode,
4064 			gen_rtx_DIV (mode, op1, mod),
4065 			const0_rtx),
4066       const1_rtx, constm1_rtx),
4067      const0_rtx);
4068 }
4069 
4070 /* Return the difference between the rounded and the truncated result
4071    of a unsigned division by OP1 with remainder MOD.  Halfway cases
4072    are rounded away from zero, rather than to the nearest even
4073    number.  */
4074 static rtx
round_udiv_adjust(machine_mode mode,rtx mod,rtx op1)4075 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
4076 {
4077   /* (mod >= op1 - mod ? 1 : 0) */
4078   return gen_rtx_IF_THEN_ELSE
4079     (mode, gen_rtx_GE (BImode, mod,
4080 		       gen_rtx_MINUS (mode, op1, mod)),
4081      const1_rtx, const0_rtx);
4082 }
4083 
4084 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
4085    any rtl.  */
4086 
4087 static rtx
convert_debug_memory_address(scalar_int_mode mode,rtx x,addr_space_t as)4088 convert_debug_memory_address (scalar_int_mode mode, rtx x,
4089 			      addr_space_t as)
4090 {
4091 #ifndef POINTERS_EXTEND_UNSIGNED
4092   gcc_assert (mode == Pmode
4093 	      || mode == targetm.addr_space.address_mode (as));
4094   gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
4095 #else
4096   rtx temp;
4097 
4098   gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
4099 
4100   if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
4101     return x;
4102 
4103   /* X must have some form of address mode already.  */
4104   scalar_int_mode xmode = as_a <scalar_int_mode> (GET_MODE (x));
4105   if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
4106     x = lowpart_subreg (mode, x, xmode);
4107   else if (POINTERS_EXTEND_UNSIGNED > 0)
4108     x = gen_rtx_ZERO_EXTEND (mode, x);
4109   else if (!POINTERS_EXTEND_UNSIGNED)
4110     x = gen_rtx_SIGN_EXTEND (mode, x);
4111   else
4112     {
4113       switch (GET_CODE (x))
4114 	{
4115 	case SUBREG:
4116 	  if ((SUBREG_PROMOTED_VAR_P (x)
4117 	       || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
4118 	       || (GET_CODE (SUBREG_REG (x)) == PLUS
4119 		   && REG_P (XEXP (SUBREG_REG (x), 0))
4120 		   && REG_POINTER (XEXP (SUBREG_REG (x), 0))
4121 		   && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
4122 	      && GET_MODE (SUBREG_REG (x)) == mode)
4123 	    return SUBREG_REG (x);
4124 	  break;
4125 	case LABEL_REF:
4126 	  temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
4127 	  LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
4128 	  return temp;
4129 	case SYMBOL_REF:
4130 	  temp = shallow_copy_rtx (x);
4131 	  PUT_MODE (temp, mode);
4132 	  return temp;
4133 	case CONST:
4134 	  temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4135 	  if (temp)
4136 	    temp = gen_rtx_CONST (mode, temp);
4137 	  return temp;
4138 	case PLUS:
4139 	case MINUS:
4140 	  if (CONST_INT_P (XEXP (x, 1)))
4141 	    {
4142 	      temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4143 	      if (temp)
4144 		return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4145 	    }
4146 	  break;
4147 	default:
4148 	  break;
4149 	}
4150       /* Don't know how to express ptr_extend as operation in debug info.  */
4151       return NULL;
4152     }
4153 #endif /* POINTERS_EXTEND_UNSIGNED */
4154 
4155   return x;
4156 }
4157 
4158 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4159    by avoid_deep_ter_for_debug.  */
4160 
4161 static hash_map<tree, tree> *deep_ter_debug_map;
4162 
4163 /* Split too deep TER chains for debug stmts using debug temporaries.  */
4164 
4165 static void
avoid_deep_ter_for_debug(gimple * stmt,int depth)4166 avoid_deep_ter_for_debug (gimple *stmt, int depth)
4167 {
4168   use_operand_p use_p;
4169   ssa_op_iter iter;
4170   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4171     {
4172       tree use = USE_FROM_PTR (use_p);
4173       if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4174 	continue;
4175       gimple *g = get_gimple_for_ssa_name (use);
4176       if (g == NULL)
4177 	continue;
4178       if (depth > 6 && !stmt_ends_bb_p (g))
4179 	{
4180 	  if (deep_ter_debug_map == NULL)
4181 	    deep_ter_debug_map = new hash_map<tree, tree>;
4182 
4183 	  tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4184 	  if (vexpr != NULL)
4185 	    continue;
4186 	  vexpr = make_node (DEBUG_EXPR_DECL);
4187 	  gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
4188 	  DECL_ARTIFICIAL (vexpr) = 1;
4189 	  TREE_TYPE (vexpr) = TREE_TYPE (use);
4190 	  SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (use)));
4191 	  gimple_stmt_iterator gsi = gsi_for_stmt (g);
4192 	  gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4193 	  avoid_deep_ter_for_debug (def_temp, 0);
4194 	}
4195       else
4196 	avoid_deep_ter_for_debug (g, depth + 1);
4197     }
4198 }
4199 
4200 /* Return an RTX equivalent to the value of the parameter DECL.  */
4201 
4202 static rtx
expand_debug_parm_decl(tree decl)4203 expand_debug_parm_decl (tree decl)
4204 {
4205   rtx incoming = DECL_INCOMING_RTL (decl);
4206 
4207   if (incoming
4208       && GET_MODE (incoming) != BLKmode
4209       && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4210 	  || (MEM_P (incoming)
4211 	      && REG_P (XEXP (incoming, 0))
4212 	      && HARD_REGISTER_P (XEXP (incoming, 0)))))
4213     {
4214       rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4215 
4216 #ifdef HAVE_window_save
4217       /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4218 	 If the target machine has an explicit window save instruction, the
4219 	 actual entry value is the corresponding OUTGOING_REGNO instead.  */
4220       if (REG_P (incoming)
4221 	  && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4222 	incoming
4223 	  = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4224 				OUTGOING_REGNO (REGNO (incoming)), 0);
4225       else if (MEM_P (incoming))
4226 	{
4227 	  rtx reg = XEXP (incoming, 0);
4228 	  if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4229 	    {
4230 	      reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4231 	      incoming = replace_equiv_address_nv (incoming, reg);
4232 	    }
4233 	  else
4234 	    incoming = copy_rtx (incoming);
4235 	}
4236 #endif
4237 
4238       ENTRY_VALUE_EXP (rtl) = incoming;
4239       return rtl;
4240     }
4241 
4242   if (incoming
4243       && GET_MODE (incoming) != BLKmode
4244       && !TREE_ADDRESSABLE (decl)
4245       && MEM_P (incoming)
4246       && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4247 	  || (GET_CODE (XEXP (incoming, 0)) == PLUS
4248 	      && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4249 	      && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
4250     return copy_rtx (incoming);
4251 
4252   return NULL_RTX;
4253 }
4254 
4255 /* Return an RTX equivalent to the value of the tree expression EXP.  */
4256 
4257 static rtx
expand_debug_expr(tree exp)4258 expand_debug_expr (tree exp)
4259 {
4260   rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
4261   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4262   machine_mode inner_mode = VOIDmode;
4263   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4264   addr_space_t as;
4265   scalar_int_mode op0_mode, op1_mode, addr_mode;
4266 
4267   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4268     {
4269     case tcc_expression:
4270       switch (TREE_CODE (exp))
4271 	{
4272 	case COND_EXPR:
4273 	case DOT_PROD_EXPR:
4274 	case SAD_EXPR:
4275 	case WIDEN_MULT_PLUS_EXPR:
4276 	case WIDEN_MULT_MINUS_EXPR:
4277 	  goto ternary;
4278 
4279 	case TRUTH_ANDIF_EXPR:
4280 	case TRUTH_ORIF_EXPR:
4281 	case TRUTH_AND_EXPR:
4282 	case TRUTH_OR_EXPR:
4283 	case TRUTH_XOR_EXPR:
4284 	  goto binary;
4285 
4286 	case TRUTH_NOT_EXPR:
4287 	  goto unary;
4288 
4289 	default:
4290 	  break;
4291 	}
4292       break;
4293 
4294     ternary:
4295       op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4296       if (!op2)
4297 	return NULL_RTX;
4298       /* Fall through.  */
4299 
4300     binary:
4301     case tcc_binary:
4302       if (mode == BLKmode)
4303 	return NULL_RTX;
4304       op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4305       if (!op1)
4306 	return NULL_RTX;
4307       switch (TREE_CODE (exp))
4308 	{
4309 	case LSHIFT_EXPR:
4310 	case RSHIFT_EXPR:
4311 	case LROTATE_EXPR:
4312 	case RROTATE_EXPR:
4313 	case WIDEN_LSHIFT_EXPR:
4314 	  /* Ensure second operand isn't wider than the first one.  */
4315 	  inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4316 	  if (is_a <scalar_int_mode> (inner_mode, &op1_mode)
4317 	      && (GET_MODE_UNIT_PRECISION (mode)
4318 		  < GET_MODE_PRECISION (op1_mode)))
4319 	    op1 = lowpart_subreg (GET_MODE_INNER (mode), op1, op1_mode);
4320 	  break;
4321 	default:
4322 	  break;
4323 	}
4324       /* Fall through.  */
4325 
4326     unary:
4327     case tcc_unary:
4328       if (mode == BLKmode)
4329 	return NULL_RTX;
4330       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4331       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4332       if (!op0)
4333 	return NULL_RTX;
4334       break;
4335 
4336     case tcc_comparison:
4337       unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4338       goto binary;
4339 
4340     case tcc_type:
4341     case tcc_statement:
4342       gcc_unreachable ();
4343 
4344     case tcc_constant:
4345     case tcc_exceptional:
4346     case tcc_declaration:
4347     case tcc_reference:
4348     case tcc_vl_exp:
4349       break;
4350     }
4351 
4352   switch (TREE_CODE (exp))
4353     {
4354     case STRING_CST:
4355       if (!lookup_constant_def (exp))
4356 	{
4357 	  if (strlen (TREE_STRING_POINTER (exp)) + 1
4358 	      != (size_t) TREE_STRING_LENGTH (exp))
4359 	    return NULL_RTX;
4360 	  op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4361 	  op0 = gen_rtx_MEM (BLKmode, op0);
4362 	  set_mem_attributes (op0, exp, 0);
4363 	  return op0;
4364 	}
4365       /* Fall through.  */
4366 
4367     case INTEGER_CST:
4368     case REAL_CST:
4369     case FIXED_CST:
4370       op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4371       return op0;
4372 
4373     case POLY_INT_CST:
4374       return immed_wide_int_const (poly_int_cst_value (exp), mode);
4375 
4376     case COMPLEX_CST:
4377       gcc_assert (COMPLEX_MODE_P (mode));
4378       op0 = expand_debug_expr (TREE_REALPART (exp));
4379       op1 = expand_debug_expr (TREE_IMAGPART (exp));
4380       return gen_rtx_CONCAT (mode, op0, op1);
4381 
4382     case DEBUG_EXPR_DECL:
4383       op0 = DECL_RTL_IF_SET (exp);
4384 
4385       if (op0)
4386 	{
4387 	  if (GET_MODE (op0) != mode)
4388 	    gcc_assert (VECTOR_TYPE_P (TREE_TYPE (exp)));
4389 	  else
4390 	    return op0;
4391 	}
4392 
4393       op0 = gen_rtx_DEBUG_EXPR (mode);
4394       DEBUG_EXPR_TREE_DECL (op0) = exp;
4395       SET_DECL_RTL (exp, op0);
4396 
4397       return op0;
4398 
4399     case VAR_DECL:
4400     case PARM_DECL:
4401     case FUNCTION_DECL:
4402     case LABEL_DECL:
4403     case CONST_DECL:
4404     case RESULT_DECL:
4405       op0 = DECL_RTL_IF_SET (exp);
4406 
4407       /* This decl was probably optimized away.  */
4408       if (!op0
4409 	  /* At least label RTXen are sometimes replaced by
4410 	     NOTE_INSN_DELETED_LABEL.  Any notes here are not
4411 	     handled by copy_rtx.  */
4412 	  || NOTE_P (op0))
4413 	{
4414 	  if (!VAR_P (exp)
4415 	      || DECL_EXTERNAL (exp)
4416 	      || !TREE_STATIC (exp)
4417 	      || !DECL_NAME (exp)
4418 	      || DECL_HARD_REGISTER (exp)
4419 	      || DECL_IN_CONSTANT_POOL (exp)
4420 	      || mode == VOIDmode)
4421 	    return NULL;
4422 
4423 	  op0 = make_decl_rtl_for_debug (exp);
4424 	  if (!MEM_P (op0)
4425 	      || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4426 	      || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4427 	    return NULL;
4428 	}
4429       else
4430 	op0 = copy_rtx (op0);
4431 
4432       if (GET_MODE (op0) == BLKmode
4433 	  /* If op0 is not BLKmode, but mode is, adjust_mode
4434 	     below would ICE.  While it is likely a FE bug,
4435 	     try to be robust here.  See PR43166.  */
4436 	  || mode == BLKmode
4437 	  || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4438 	{
4439 	  gcc_assert (MEM_P (op0));
4440 	  op0 = adjust_address_nv (op0, mode, 0);
4441 	  return op0;
4442 	}
4443 
4444       /* Fall through.  */
4445 
4446     adjust_mode:
4447     case PAREN_EXPR:
4448     CASE_CONVERT:
4449       {
4450 	inner_mode = GET_MODE (op0);
4451 
4452 	if (mode == inner_mode)
4453 	  return op0;
4454 
4455 	if (inner_mode == VOIDmode)
4456 	  {
4457 	    if (TREE_CODE (exp) == SSA_NAME)
4458 	      inner_mode = TYPE_MODE (TREE_TYPE (exp));
4459 	    else
4460 	      inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4461 	    if (mode == inner_mode)
4462 	      return op0;
4463 	  }
4464 
4465 	if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4466 	  {
4467 	    if (GET_MODE_UNIT_BITSIZE (mode)
4468 		== GET_MODE_UNIT_BITSIZE (inner_mode))
4469 	      op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4470 	    else if (GET_MODE_UNIT_BITSIZE (mode)
4471 		     < GET_MODE_UNIT_BITSIZE (inner_mode))
4472 	      op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4473 	    else
4474 	      op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4475 	  }
4476 	else if (FLOAT_MODE_P (mode))
4477 	  {
4478 	    gcc_assert (TREE_CODE (exp) != SSA_NAME);
4479 	    if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4480 	      op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4481 	    else
4482 	      op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4483 	  }
4484 	else if (FLOAT_MODE_P (inner_mode))
4485 	  {
4486 	    if (unsignedp)
4487 	      op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4488 	    else
4489 	      op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4490 	  }
4491 	else if (GET_MODE_UNIT_PRECISION (mode)
4492 		 == GET_MODE_UNIT_PRECISION (inner_mode))
4493 	  op0 = lowpart_subreg (mode, op0, inner_mode);
4494 	else if (GET_MODE_UNIT_PRECISION (mode)
4495 		 < GET_MODE_UNIT_PRECISION (inner_mode))
4496 	  op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
4497 	else if (UNARY_CLASS_P (exp)
4498 		 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4499 		 : unsignedp)
4500 	  op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4501 	else
4502 	  op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4503 
4504 	return op0;
4505       }
4506 
4507     case MEM_REF:
4508       if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4509 	{
4510 	  tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4511 				     TREE_OPERAND (exp, 0),
4512 				     TREE_OPERAND (exp, 1));
4513 	  if (newexp)
4514 	    return expand_debug_expr (newexp);
4515 	}
4516       /* FALLTHROUGH */
4517     case INDIRECT_REF:
4518       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4519       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4520       if (!op0)
4521 	return NULL;
4522 
4523       if (TREE_CODE (exp) == MEM_REF)
4524 	{
4525 	  if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4526 	      || (GET_CODE (op0) == PLUS
4527 		  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4528 	    /* (mem (debug_implicit_ptr)) might confuse aliasing.
4529 	       Instead just use get_inner_reference.  */
4530 	    goto component_ref;
4531 
4532 	  op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4533 	  poly_int64 offset;
4534 	  if (!op1 || !poly_int_rtx_p (op1, &offset))
4535 	    return NULL;
4536 
4537 	  op0 = plus_constant (inner_mode, op0, offset);
4538 	}
4539 
4540       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4541 
4542       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4543 					  op0, as);
4544       if (op0 == NULL_RTX)
4545 	return NULL;
4546 
4547       op0 = gen_rtx_MEM (mode, op0);
4548       set_mem_attributes (op0, exp, 0);
4549       if (TREE_CODE (exp) == MEM_REF
4550 	  && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4551 	set_mem_expr (op0, NULL_TREE);
4552       set_mem_addr_space (op0, as);
4553 
4554       return op0;
4555 
4556     case TARGET_MEM_REF:
4557       if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4558 	  && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4559 	return NULL;
4560 
4561       op0 = expand_debug_expr
4562 	    (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4563       if (!op0)
4564 	return NULL;
4565 
4566       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4567       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4568 					  op0, as);
4569       if (op0 == NULL_RTX)
4570 	return NULL;
4571 
4572       op0 = gen_rtx_MEM (mode, op0);
4573 
4574       set_mem_attributes (op0, exp, 0);
4575       set_mem_addr_space (op0, as);
4576 
4577       return op0;
4578 
4579     component_ref:
4580     case ARRAY_REF:
4581     case ARRAY_RANGE_REF:
4582     case COMPONENT_REF:
4583     case BIT_FIELD_REF:
4584     case REALPART_EXPR:
4585     case IMAGPART_EXPR:
4586     case VIEW_CONVERT_EXPR:
4587       {
4588 	machine_mode mode1;
4589 	poly_int64 bitsize, bitpos;
4590 	tree offset;
4591 	int reversep, volatilep = 0;
4592 	tree tem
4593 	  = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
4594 				 &unsignedp, &reversep, &volatilep);
4595 	rtx orig_op0;
4596 
4597 	if (known_eq (bitsize, 0))
4598 	  return NULL;
4599 
4600 	orig_op0 = op0 = expand_debug_expr (tem);
4601 
4602 	if (!op0)
4603 	  return NULL;
4604 
4605 	if (offset)
4606 	  {
4607 	    machine_mode addrmode, offmode;
4608 
4609 	    if (!MEM_P (op0))
4610 	      return NULL;
4611 
4612 	    op0 = XEXP (op0, 0);
4613 	    addrmode = GET_MODE (op0);
4614 	    if (addrmode == VOIDmode)
4615 	      addrmode = Pmode;
4616 
4617 	    op1 = expand_debug_expr (offset);
4618 	    if (!op1)
4619 	      return NULL;
4620 
4621 	    offmode = GET_MODE (op1);
4622 	    if (offmode == VOIDmode)
4623 	      offmode = TYPE_MODE (TREE_TYPE (offset));
4624 
4625 	    if (addrmode != offmode)
4626 	      op1 = lowpart_subreg (addrmode, op1, offmode);
4627 
4628 	    /* Don't use offset_address here, we don't need a
4629 	       recognizable address, and we don't want to generate
4630 	       code.  */
4631 	    op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4632 							  op0, op1));
4633 	  }
4634 
4635 	if (MEM_P (op0))
4636 	  {
4637 	    if (mode1 == VOIDmode)
4638 	      {
4639 		if (maybe_gt (bitsize, MAX_BITSIZE_MODE_ANY_INT))
4640 		  return NULL;
4641 		/* Bitfield.  */
4642 		mode1 = smallest_int_mode_for_size (bitsize);
4643 	      }
4644 	    poly_int64 bytepos = bits_to_bytes_round_down (bitpos);
4645 	    if (maybe_ne (bytepos, 0))
4646 	      {
4647 		op0 = adjust_address_nv (op0, mode1, bytepos);
4648 		bitpos = num_trailing_bits (bitpos);
4649 	      }
4650 	    else if (known_eq (bitpos, 0)
4651 		     && known_eq (bitsize, GET_MODE_BITSIZE (mode)))
4652 	      op0 = adjust_address_nv (op0, mode, 0);
4653 	    else if (GET_MODE (op0) != mode1)
4654 	      op0 = adjust_address_nv (op0, mode1, 0);
4655 	    else
4656 	      op0 = copy_rtx (op0);
4657 	    if (op0 == orig_op0)
4658 	      op0 = shallow_copy_rtx (op0);
4659 	    if (TREE_CODE (tem) != SSA_NAME)
4660 	      set_mem_attributes (op0, exp, 0);
4661 	  }
4662 
4663 	if (known_eq (bitpos, 0) && mode == GET_MODE (op0))
4664 	  return op0;
4665 
4666 	if (maybe_lt (bitpos, 0))
4667           return NULL;
4668 
4669 	if (GET_MODE (op0) == BLKmode || mode == BLKmode)
4670 	  return NULL;
4671 
4672 	poly_int64 bytepos;
4673 	if (multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
4674 	    && known_eq (bitsize, GET_MODE_BITSIZE (mode1)))
4675 	  {
4676 	    machine_mode opmode = GET_MODE (op0);
4677 
4678 	    if (opmode == VOIDmode)
4679 	      opmode = TYPE_MODE (TREE_TYPE (tem));
4680 
4681 	    /* This condition may hold if we're expanding the address
4682 	       right past the end of an array that turned out not to
4683 	       be addressable (i.e., the address was only computed in
4684 	       debug stmts).  The gen_subreg below would rightfully
4685 	       crash, and the address doesn't really exist, so just
4686 	       drop it.  */
4687 	    if (known_ge (bitpos, GET_MODE_BITSIZE (opmode)))
4688 	      return NULL;
4689 
4690 	    if (multiple_p (bitpos, GET_MODE_BITSIZE (mode)))
4691 	      return simplify_gen_subreg (mode, op0, opmode, bytepos);
4692 	  }
4693 
4694 	return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4695 				     && TYPE_UNSIGNED (TREE_TYPE (exp))
4696 				     ? SIGN_EXTRACT
4697 				     : ZERO_EXTRACT, mode,
4698 				     GET_MODE (op0) != VOIDmode
4699 				     ? GET_MODE (op0)
4700 				     : TYPE_MODE (TREE_TYPE (tem)),
4701 				     op0, gen_int_mode (bitsize, word_mode),
4702 				     gen_int_mode (bitpos, word_mode));
4703       }
4704 
4705     case ABS_EXPR:
4706     case ABSU_EXPR:
4707       return simplify_gen_unary (ABS, mode, op0, mode);
4708 
4709     case NEGATE_EXPR:
4710       return simplify_gen_unary (NEG, mode, op0, mode);
4711 
4712     case BIT_NOT_EXPR:
4713       return simplify_gen_unary (NOT, mode, op0, mode);
4714 
4715     case FLOAT_EXPR:
4716       return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4717 									 0)))
4718 				 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4719 				 inner_mode);
4720 
4721     case FIX_TRUNC_EXPR:
4722       return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4723 				 inner_mode);
4724 
4725     case POINTER_PLUS_EXPR:
4726       /* For the rare target where pointers are not the same size as
4727 	 size_t, we need to check for mis-matched modes and correct
4728 	 the addend.  */
4729       if (op0 && op1
4730 	  && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
4731 	  && is_a <scalar_int_mode> (GET_MODE (op1), &op1_mode)
4732 	  && op0_mode != op1_mode)
4733 	{
4734 	  if (GET_MODE_BITSIZE (op0_mode) < GET_MODE_BITSIZE (op1_mode)
4735 	      /* If OP0 is a partial mode, then we must truncate, even
4736 		 if it has the same bitsize as OP1 as GCC's
4737 		 representation of partial modes is opaque.  */
4738 	      || (GET_MODE_CLASS (op0_mode) == MODE_PARTIAL_INT
4739 		  && (GET_MODE_BITSIZE (op0_mode)
4740 		      == GET_MODE_BITSIZE (op1_mode))))
4741 	    op1 = simplify_gen_unary (TRUNCATE, op0_mode, op1, op1_mode);
4742 	  else
4743 	    /* We always sign-extend, regardless of the signedness of
4744 	       the operand, because the operand is always unsigned
4745 	       here even if the original C expression is signed.  */
4746 	    op1 = simplify_gen_unary (SIGN_EXTEND, op0_mode, op1, op1_mode);
4747 	}
4748       /* Fall through.  */
4749     case PLUS_EXPR:
4750       return simplify_gen_binary (PLUS, mode, op0, op1);
4751 
4752     case MINUS_EXPR:
4753     case POINTER_DIFF_EXPR:
4754       return simplify_gen_binary (MINUS, mode, op0, op1);
4755 
4756     case MULT_EXPR:
4757       return simplify_gen_binary (MULT, mode, op0, op1);
4758 
4759     case RDIV_EXPR:
4760     case TRUNC_DIV_EXPR:
4761     case EXACT_DIV_EXPR:
4762       if (unsignedp)
4763 	return simplify_gen_binary (UDIV, mode, op0, op1);
4764       else
4765 	return simplify_gen_binary (DIV, mode, op0, op1);
4766 
4767     case TRUNC_MOD_EXPR:
4768       return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4769 
4770     case FLOOR_DIV_EXPR:
4771       if (unsignedp)
4772 	return simplify_gen_binary (UDIV, mode, op0, op1);
4773       else
4774 	{
4775 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4776 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4777 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
4778 	  return simplify_gen_binary (PLUS, mode, div, adj);
4779 	}
4780 
4781     case FLOOR_MOD_EXPR:
4782       if (unsignedp)
4783 	return simplify_gen_binary (UMOD, mode, op0, op1);
4784       else
4785 	{
4786 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4787 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
4788 	  adj = simplify_gen_unary (NEG, mode,
4789 				    simplify_gen_binary (MULT, mode, adj, op1),
4790 				    mode);
4791 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4792 	}
4793 
4794     case CEIL_DIV_EXPR:
4795       if (unsignedp)
4796 	{
4797 	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4798 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4799 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
4800 	  return simplify_gen_binary (PLUS, mode, div, adj);
4801 	}
4802       else
4803 	{
4804 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4805 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4806 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4807 	  return simplify_gen_binary (PLUS, mode, div, adj);
4808 	}
4809 
4810     case CEIL_MOD_EXPR:
4811       if (unsignedp)
4812 	{
4813 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4814 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
4815 	  adj = simplify_gen_unary (NEG, mode,
4816 				    simplify_gen_binary (MULT, mode, adj, op1),
4817 				    mode);
4818 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4819 	}
4820       else
4821 	{
4822 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4823 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4824 	  adj = simplify_gen_unary (NEG, mode,
4825 				    simplify_gen_binary (MULT, mode, adj, op1),
4826 				    mode);
4827 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4828 	}
4829 
4830     case ROUND_DIV_EXPR:
4831       if (unsignedp)
4832 	{
4833 	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4834 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4835 	  rtx adj = round_udiv_adjust (mode, mod, op1);
4836 	  return simplify_gen_binary (PLUS, mode, div, adj);
4837 	}
4838       else
4839 	{
4840 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4841 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4842 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
4843 	  return simplify_gen_binary (PLUS, mode, div, adj);
4844 	}
4845 
4846     case ROUND_MOD_EXPR:
4847       if (unsignedp)
4848 	{
4849 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4850 	  rtx adj = round_udiv_adjust (mode, mod, op1);
4851 	  adj = simplify_gen_unary (NEG, mode,
4852 				    simplify_gen_binary (MULT, mode, adj, op1),
4853 				    mode);
4854 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4855 	}
4856       else
4857 	{
4858 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4859 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
4860 	  adj = simplify_gen_unary (NEG, mode,
4861 				    simplify_gen_binary (MULT, mode, adj, op1),
4862 				    mode);
4863 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4864 	}
4865 
4866     case LSHIFT_EXPR:
4867       return simplify_gen_binary (ASHIFT, mode, op0, op1);
4868 
4869     case RSHIFT_EXPR:
4870       if (unsignedp)
4871 	return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4872       else
4873 	return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4874 
4875     case LROTATE_EXPR:
4876       return simplify_gen_binary (ROTATE, mode, op0, op1);
4877 
4878     case RROTATE_EXPR:
4879       return simplify_gen_binary (ROTATERT, mode, op0, op1);
4880 
4881     case MIN_EXPR:
4882       return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4883 
4884     case MAX_EXPR:
4885       return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4886 
4887     case BIT_AND_EXPR:
4888     case TRUTH_AND_EXPR:
4889       return simplify_gen_binary (AND, mode, op0, op1);
4890 
4891     case BIT_IOR_EXPR:
4892     case TRUTH_OR_EXPR:
4893       return simplify_gen_binary (IOR, mode, op0, op1);
4894 
4895     case BIT_XOR_EXPR:
4896     case TRUTH_XOR_EXPR:
4897       return simplify_gen_binary (XOR, mode, op0, op1);
4898 
4899     case TRUTH_ANDIF_EXPR:
4900       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4901 
4902     case TRUTH_ORIF_EXPR:
4903       return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4904 
4905     case TRUTH_NOT_EXPR:
4906       return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4907 
4908     case LT_EXPR:
4909       return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4910 				      op0, op1);
4911 
4912     case LE_EXPR:
4913       return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4914 				      op0, op1);
4915 
4916     case GT_EXPR:
4917       return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4918 				      op0, op1);
4919 
4920     case GE_EXPR:
4921       return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4922 				      op0, op1);
4923 
4924     case EQ_EXPR:
4925       return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4926 
4927     case NE_EXPR:
4928       return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4929 
4930     case UNORDERED_EXPR:
4931       return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4932 
4933     case ORDERED_EXPR:
4934       return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4935 
4936     case UNLT_EXPR:
4937       return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4938 
4939     case UNLE_EXPR:
4940       return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4941 
4942     case UNGT_EXPR:
4943       return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4944 
4945     case UNGE_EXPR:
4946       return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4947 
4948     case UNEQ_EXPR:
4949       return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4950 
4951     case LTGT_EXPR:
4952       return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4953 
4954     case COND_EXPR:
4955       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4956 
4957     case COMPLEX_EXPR:
4958       gcc_assert (COMPLEX_MODE_P (mode));
4959       if (GET_MODE (op0) == VOIDmode)
4960 	op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4961       if (GET_MODE (op1) == VOIDmode)
4962 	op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4963       return gen_rtx_CONCAT (mode, op0, op1);
4964 
4965     case CONJ_EXPR:
4966       if (GET_CODE (op0) == CONCAT)
4967 	return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4968 			       simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4969 						   XEXP (op0, 1),
4970 						   GET_MODE_INNER (mode)));
4971       else
4972 	{
4973 	  scalar_mode imode = GET_MODE_INNER (mode);
4974 	  rtx re, im;
4975 
4976 	  if (MEM_P (op0))
4977 	    {
4978 	      re = adjust_address_nv (op0, imode, 0);
4979 	      im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4980 	    }
4981 	  else
4982 	    {
4983 	      scalar_int_mode ifmode;
4984 	      scalar_int_mode ihmode;
4985 	      rtx halfsize;
4986 	      if (!int_mode_for_mode (mode).exists (&ifmode)
4987 		  || !int_mode_for_mode (imode).exists (&ihmode))
4988 		return NULL;
4989 	      halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4990 	      re = op0;
4991 	      if (mode != ifmode)
4992 		re = gen_rtx_SUBREG (ifmode, re, 0);
4993 	      re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4994 	      if (imode != ihmode)
4995 		re = gen_rtx_SUBREG (imode, re, 0);
4996 	      im = copy_rtx (op0);
4997 	      if (mode != ifmode)
4998 		im = gen_rtx_SUBREG (ifmode, im, 0);
4999 	      im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
5000 	      if (imode != ihmode)
5001 		im = gen_rtx_SUBREG (imode, im, 0);
5002 	    }
5003 	  im = gen_rtx_NEG (imode, im);
5004 	  return gen_rtx_CONCAT (mode, re, im);
5005 	}
5006 
5007     case ADDR_EXPR:
5008       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
5009       if (!op0 || !MEM_P (op0))
5010 	{
5011 	  if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
5012 	       || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
5013 	       || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
5014 	      && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
5015 		  || target_for_debug_bind (TREE_OPERAND (exp, 0))))
5016 	    return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
5017 
5018 	  if (handled_component_p (TREE_OPERAND (exp, 0)))
5019 	    {
5020 	      poly_int64 bitoffset, bitsize, maxsize, byteoffset;
5021 	      bool reverse;
5022 	      tree decl
5023 		= get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
5024 					   &bitsize, &maxsize, &reverse);
5025 	      if ((VAR_P (decl)
5026 		   || TREE_CODE (decl) == PARM_DECL
5027 		   || TREE_CODE (decl) == RESULT_DECL)
5028 		  && (!TREE_ADDRESSABLE (decl)
5029 		      || target_for_debug_bind (decl))
5030 		  && multiple_p (bitoffset, BITS_PER_UNIT, &byteoffset)
5031 		  && known_gt (bitsize, 0)
5032 		  && known_eq (bitsize, maxsize))
5033 		{
5034 		  rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
5035 		  return plus_constant (mode, base, byteoffset);
5036 		}
5037 	    }
5038 
5039 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
5040 	      && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5041 		 == ADDR_EXPR)
5042 	    {
5043 	      op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
5044 						     0));
5045 	      if (op0 != NULL
5046 		  && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
5047 		      || (GET_CODE (op0) == PLUS
5048 			  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
5049 			  && CONST_INT_P (XEXP (op0, 1)))))
5050 		{
5051 		  op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
5052 							 1));
5053 		  poly_int64 offset;
5054 		  if (!op1 || !poly_int_rtx_p (op1, &offset))
5055 		    return NULL;
5056 
5057 		  return plus_constant (mode, op0, offset);
5058 		}
5059 	    }
5060 
5061 	  return NULL;
5062 	}
5063 
5064       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
5065       addr_mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5066       op0 = convert_debug_memory_address (addr_mode, XEXP (op0, 0), as);
5067 
5068       return op0;
5069 
5070     case VECTOR_CST:
5071       {
5072 	unsigned HOST_WIDE_INT i, nelts;
5073 
5074 	if (!VECTOR_CST_NELTS (exp).is_constant (&nelts))
5075 	  return NULL;
5076 
5077 	op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
5078 
5079 	for (i = 0; i < nelts; ++i)
5080 	  {
5081 	    op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
5082 	    if (!op1)
5083 	      return NULL;
5084 	    XVECEXP (op0, 0, i) = op1;
5085 	  }
5086 
5087 	return op0;
5088       }
5089 
5090     case CONSTRUCTOR:
5091       if (TREE_CLOBBER_P (exp))
5092 	return NULL;
5093       else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
5094 	{
5095 	  unsigned i;
5096 	  unsigned HOST_WIDE_INT nelts;
5097 	  tree val;
5098 
5099 	  if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)).is_constant (&nelts))
5100 	    goto flag_unsupported;
5101 
5102 	  op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
5103 
5104 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
5105 	    {
5106 	      op1 = expand_debug_expr (val);
5107 	      if (!op1)
5108 		return NULL;
5109 	      XVECEXP (op0, 0, i) = op1;
5110 	    }
5111 
5112 	  if (i < nelts)
5113 	    {
5114 	      op1 = expand_debug_expr
5115 		(build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
5116 
5117 	      if (!op1)
5118 		return NULL;
5119 
5120 	      for (; i < nelts; i++)
5121 		XVECEXP (op0, 0, i) = op1;
5122 	    }
5123 
5124 	  return op0;
5125 	}
5126       else
5127 	goto flag_unsupported;
5128 
5129     case CALL_EXPR:
5130       /* ??? Maybe handle some builtins?  */
5131       return NULL;
5132 
5133     case SSA_NAME:
5134       {
5135 	gimple *g = get_gimple_for_ssa_name (exp);
5136 	if (g)
5137 	  {
5138 	    tree t = NULL_TREE;
5139 	    if (deep_ter_debug_map)
5140 	      {
5141 		tree *slot = deep_ter_debug_map->get (exp);
5142 		if (slot)
5143 		  t = *slot;
5144 	      }
5145 	    if (t == NULL_TREE)
5146 	      t = gimple_assign_rhs_to_tree (g);
5147 	    op0 = expand_debug_expr (t);
5148 	    if (!op0)
5149 	      return NULL;
5150 	  }
5151 	else
5152 	  {
5153 	    /* If this is a reference to an incoming value of
5154 	       parameter that is never used in the code or where the
5155 	       incoming value is never used in the code, use
5156 	       PARM_DECL's DECL_RTL if set.  */
5157 	    if (SSA_NAME_IS_DEFAULT_DEF (exp)
5158 		&& SSA_NAME_VAR (exp)
5159 		&& TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5160 		&& has_zero_uses (exp))
5161 	      {
5162 		op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5163 		if (op0)
5164 		  goto adjust_mode;
5165 		op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5166 		if (op0)
5167 		  goto adjust_mode;
5168 	      }
5169 
5170 	    int part = var_to_partition (SA.map, exp);
5171 
5172 	    if (part == NO_PARTITION)
5173 	      return NULL;
5174 
5175 	    gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
5176 
5177 	    op0 = copy_rtx (SA.partition_to_pseudo[part]);
5178 	  }
5179 	goto adjust_mode;
5180       }
5181 
5182     case ERROR_MARK:
5183       return NULL;
5184 
5185     /* Vector stuff.  For most of the codes we don't have rtl codes.  */
5186     case REALIGN_LOAD_EXPR:
5187     case VEC_COND_EXPR:
5188     case VEC_PACK_FIX_TRUNC_EXPR:
5189     case VEC_PACK_FLOAT_EXPR:
5190     case VEC_PACK_SAT_EXPR:
5191     case VEC_PACK_TRUNC_EXPR:
5192     case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
5193     case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
5194     case VEC_UNPACK_FLOAT_HI_EXPR:
5195     case VEC_UNPACK_FLOAT_LO_EXPR:
5196     case VEC_UNPACK_HI_EXPR:
5197     case VEC_UNPACK_LO_EXPR:
5198     case VEC_WIDEN_MULT_HI_EXPR:
5199     case VEC_WIDEN_MULT_LO_EXPR:
5200     case VEC_WIDEN_MULT_EVEN_EXPR:
5201     case VEC_WIDEN_MULT_ODD_EXPR:
5202     case VEC_WIDEN_LSHIFT_HI_EXPR:
5203     case VEC_WIDEN_LSHIFT_LO_EXPR:
5204     case VEC_PERM_EXPR:
5205     case VEC_DUPLICATE_EXPR:
5206     case VEC_SERIES_EXPR:
5207     case SAD_EXPR:
5208       return NULL;
5209 
5210     /* Misc codes.  */
5211     case ADDR_SPACE_CONVERT_EXPR:
5212     case FIXED_CONVERT_EXPR:
5213     case OBJ_TYPE_REF:
5214     case WITH_SIZE_EXPR:
5215     case BIT_INSERT_EXPR:
5216       return NULL;
5217 
5218     case DOT_PROD_EXPR:
5219       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5220 	  && SCALAR_INT_MODE_P (mode))
5221 	{
5222 	  op0
5223 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5224 									  0)))
5225 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5226 				  inner_mode);
5227 	  op1
5228 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5229 									  1)))
5230 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5231 				  inner_mode);
5232 	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
5233 	  return simplify_gen_binary (PLUS, mode, op0, op2);
5234 	}
5235       return NULL;
5236 
5237     case WIDEN_MULT_EXPR:
5238     case WIDEN_MULT_PLUS_EXPR:
5239     case WIDEN_MULT_MINUS_EXPR:
5240       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5241 	  && SCALAR_INT_MODE_P (mode))
5242 	{
5243 	  inner_mode = GET_MODE (op0);
5244 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5245 	    op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5246 	  else
5247 	    op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5248 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5249 	    op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
5250 	  else
5251 	    op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
5252 	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
5253 	  if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5254 	    return op0;
5255 	  else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
5256 	    return simplify_gen_binary (PLUS, mode, op0, op2);
5257 	  else
5258 	    return simplify_gen_binary (MINUS, mode, op2, op0);
5259 	}
5260       return NULL;
5261 
5262     case MULT_HIGHPART_EXPR:
5263       /* ??? Similar to the above.  */
5264       return NULL;
5265 
5266     case WIDEN_SUM_EXPR:
5267     case WIDEN_LSHIFT_EXPR:
5268       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5269 	  && SCALAR_INT_MODE_P (mode))
5270 	{
5271 	  op0
5272 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5273 									  0)))
5274 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5275 				  inner_mode);
5276 	  return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5277 				      ? ASHIFT : PLUS, mode, op0, op1);
5278 	}
5279       return NULL;
5280 
5281     default:
5282     flag_unsupported:
5283       if (flag_checking)
5284 	{
5285 	  debug_tree (exp);
5286 	  gcc_unreachable ();
5287 	}
5288       return NULL;
5289     }
5290 }
5291 
5292 /* Return an RTX equivalent to the source bind value of the tree expression
5293    EXP.  */
5294 
5295 static rtx
expand_debug_source_expr(tree exp)5296 expand_debug_source_expr (tree exp)
5297 {
5298   rtx op0 = NULL_RTX;
5299   machine_mode mode = VOIDmode, inner_mode;
5300 
5301   switch (TREE_CODE (exp))
5302     {
5303     case VAR_DECL:
5304       if (DECL_ABSTRACT_ORIGIN (exp))
5305 	return expand_debug_source_expr (DECL_ABSTRACT_ORIGIN (exp));
5306       break;
5307     case PARM_DECL:
5308       {
5309 	mode = DECL_MODE (exp);
5310 	op0 = expand_debug_parm_decl (exp);
5311 	if (op0)
5312 	   break;
5313 	/* See if this isn't an argument that has been completely
5314 	   optimized out.  */
5315 	if (!DECL_RTL_SET_P (exp)
5316 	    && !DECL_INCOMING_RTL (exp)
5317 	    && DECL_ABSTRACT_ORIGIN (current_function_decl))
5318 	  {
5319 	    tree aexp = DECL_ORIGIN (exp);
5320 	    if (DECL_CONTEXT (aexp)
5321 		== DECL_ABSTRACT_ORIGIN (current_function_decl))
5322 	      {
5323 		vec<tree, va_gc> **debug_args;
5324 		unsigned int ix;
5325 		tree ddecl;
5326 		debug_args = decl_debug_args_lookup (current_function_decl);
5327 		if (debug_args != NULL)
5328 		  {
5329 		    for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
5330 			 ix += 2)
5331 		      if (ddecl == aexp)
5332 			return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5333 		  }
5334 	      }
5335 	  }
5336 	break;
5337       }
5338     default:
5339       break;
5340     }
5341 
5342   if (op0 == NULL_RTX)
5343     return NULL_RTX;
5344 
5345   inner_mode = GET_MODE (op0);
5346   if (mode == inner_mode)
5347     return op0;
5348 
5349   if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5350     {
5351       if (GET_MODE_UNIT_BITSIZE (mode)
5352 	  == GET_MODE_UNIT_BITSIZE (inner_mode))
5353 	op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5354       else if (GET_MODE_UNIT_BITSIZE (mode)
5355 	       < GET_MODE_UNIT_BITSIZE (inner_mode))
5356 	op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5357       else
5358 	op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5359     }
5360   else if (FLOAT_MODE_P (mode))
5361     gcc_unreachable ();
5362   else if (FLOAT_MODE_P (inner_mode))
5363     {
5364       if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5365 	op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5366       else
5367 	op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5368     }
5369   else if (GET_MODE_UNIT_PRECISION (mode)
5370 	   == GET_MODE_UNIT_PRECISION (inner_mode))
5371     op0 = lowpart_subreg (mode, op0, inner_mode);
5372   else if (GET_MODE_UNIT_PRECISION (mode)
5373 	   < GET_MODE_UNIT_PRECISION (inner_mode))
5374     op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
5375   else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5376     op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5377   else
5378     op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5379 
5380   return op0;
5381 }
5382 
5383 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5384    Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5385    deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN.  */
5386 
5387 static void
avoid_complex_debug_insns(rtx_insn * insn,rtx * exp_p,int depth)5388 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
5389 {
5390   rtx exp = *exp_p;
5391 
5392   if (exp == NULL_RTX)
5393     return;
5394 
5395   if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5396     return;
5397 
5398   if (depth == 4)
5399     {
5400       /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL).  */
5401       rtx dval = make_debug_expr_from_rtl (exp);
5402 
5403       /* Emit a debug bind insn before INSN.  */
5404       rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5405 				       DEBUG_EXPR_TREE_DECL (dval), exp,
5406 				       VAR_INIT_STATUS_INITIALIZED);
5407 
5408       emit_debug_insn_before (bind, insn);
5409       *exp_p = dval;
5410       return;
5411     }
5412 
5413   const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5414   int i, j;
5415   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5416     switch (*format_ptr++)
5417       {
5418       case 'e':
5419 	avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5420 	break;
5421 
5422       case 'E':
5423       case 'V':
5424 	for (j = 0; j < XVECLEN (exp, i); j++)
5425 	  avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5426 	break;
5427 
5428       default:
5429 	break;
5430       }
5431 }
5432 
5433 /* Expand the _LOCs in debug insns.  We run this after expanding all
5434    regular insns, so that any variables referenced in the function
5435    will have their DECL_RTLs set.  */
5436 
5437 static void
expand_debug_locations(void)5438 expand_debug_locations (void)
5439 {
5440   rtx_insn *insn;
5441   rtx_insn *last = get_last_insn ();
5442   int save_strict_alias = flag_strict_aliasing;
5443 
5444   /* New alias sets while setting up memory attributes cause
5445      -fcompare-debug failures, even though it doesn't bring about any
5446      codegen changes.  */
5447   flag_strict_aliasing = 0;
5448 
5449   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5450     if (DEBUG_BIND_INSN_P (insn))
5451       {
5452 	tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5453 	rtx val;
5454 	rtx_insn *prev_insn, *insn2;
5455 	machine_mode mode;
5456 
5457 	if (value == NULL_TREE)
5458 	  val = NULL_RTX;
5459 	else
5460 	  {
5461 	    if (INSN_VAR_LOCATION_STATUS (insn)
5462 		== VAR_INIT_STATUS_UNINITIALIZED)
5463 	      val = expand_debug_source_expr (value);
5464 	    /* The avoid_deep_ter_for_debug function inserts
5465 	       debug bind stmts after SSA_NAME definition, with the
5466 	       SSA_NAME as the whole bind location.  Disable temporarily
5467 	       expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5468 	       being defined in this DEBUG_INSN.  */
5469 	    else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5470 	      {
5471 		tree *slot = deep_ter_debug_map->get (value);
5472 		if (slot)
5473 		  {
5474 		    if (*slot == INSN_VAR_LOCATION_DECL (insn))
5475 		      *slot = NULL_TREE;
5476 		    else
5477 		      slot = NULL;
5478 		  }
5479 		val = expand_debug_expr (value);
5480 		if (slot)
5481 		  *slot = INSN_VAR_LOCATION_DECL (insn);
5482 	      }
5483 	    else
5484 	      val = expand_debug_expr (value);
5485 	    gcc_assert (last == get_last_insn ());
5486 	  }
5487 
5488 	if (!val)
5489 	  val = gen_rtx_UNKNOWN_VAR_LOC ();
5490 	else
5491 	  {
5492 	    mode = GET_MODE (INSN_VAR_LOCATION (insn));
5493 
5494 	    gcc_assert (mode == GET_MODE (val)
5495 			|| (GET_MODE (val) == VOIDmode
5496 			    && (CONST_SCALAR_INT_P (val)
5497 				|| GET_CODE (val) == CONST_FIXED
5498 				|| GET_CODE (val) == LABEL_REF)));
5499 	  }
5500 
5501 	INSN_VAR_LOCATION_LOC (insn) = val;
5502 	prev_insn = PREV_INSN (insn);
5503 	for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5504 	  avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5505       }
5506 
5507   flag_strict_aliasing = save_strict_alias;
5508 }
5509 
5510 /* Performs swapping operands of commutative operations to expand
5511    the expensive one first.  */
5512 
5513 static void
reorder_operands(basic_block bb)5514 reorder_operands (basic_block bb)
5515 {
5516   unsigned int *lattice;  /* Hold cost of each statement.  */
5517   unsigned int i = 0, n = 0;
5518   gimple_stmt_iterator gsi;
5519   gimple_seq stmts;
5520   gimple *stmt;
5521   bool swap;
5522   tree op0, op1;
5523   ssa_op_iter iter;
5524   use_operand_p use_p;
5525   gimple *def0, *def1;
5526 
5527   /* Compute cost of each statement using estimate_num_insns.  */
5528   stmts = bb_seq (bb);
5529   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5530     {
5531       stmt = gsi_stmt (gsi);
5532       if (!is_gimple_debug (stmt))
5533         gimple_set_uid (stmt, n++);
5534     }
5535   lattice = XNEWVEC (unsigned int, n);
5536   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5537     {
5538       unsigned cost;
5539       stmt = gsi_stmt (gsi);
5540       if (is_gimple_debug (stmt))
5541 	continue;
5542       cost = estimate_num_insns (stmt, &eni_size_weights);
5543       lattice[i] = cost;
5544       FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5545 	{
5546 	  tree use = USE_FROM_PTR (use_p);
5547 	  gimple *def_stmt;
5548 	  if (TREE_CODE (use) != SSA_NAME)
5549 	    continue;
5550 	  def_stmt = get_gimple_for_ssa_name (use);
5551 	  if (!def_stmt)
5552 	    continue;
5553 	  lattice[i] += lattice[gimple_uid (def_stmt)];
5554 	}
5555       i++;
5556       if (!is_gimple_assign (stmt)
5557 	  || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5558 	continue;
5559       op0 = gimple_op (stmt, 1);
5560       op1 = gimple_op (stmt, 2);
5561       if (TREE_CODE (op0) != SSA_NAME
5562 	  || TREE_CODE (op1) != SSA_NAME)
5563 	continue;
5564       /* Swap operands if the second one is more expensive.  */
5565       def0 = get_gimple_for_ssa_name (op0);
5566       def1 = get_gimple_for_ssa_name (op1);
5567       if (!def1)
5568 	continue;
5569       swap = false;
5570       if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5571 	swap = true;
5572       if (swap)
5573 	{
5574 	  if (dump_file && (dump_flags & TDF_DETAILS))
5575 	    {
5576 	      fprintf (dump_file, "Swap operands in stmt:\n");
5577 	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5578 	      fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5579 		       def0 ? lattice[gimple_uid (def0)] : 0,
5580 		       lattice[gimple_uid (def1)]);
5581 	    }
5582 	  swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5583 			     gimple_assign_rhs2_ptr (stmt));
5584 	}
5585     }
5586   XDELETE (lattice);
5587 }
5588 
5589 /* Expand basic block BB from GIMPLE trees to RTL.  */
5590 
5591 static basic_block
expand_gimple_basic_block(basic_block bb,bool disable_tail_calls)5592 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5593 {
5594   gimple_stmt_iterator gsi;
5595   gimple_seq stmts;
5596   gimple *stmt = NULL;
5597   rtx_note *note = NULL;
5598   rtx_insn *last;
5599   edge e;
5600   edge_iterator ei;
5601 
5602   if (dump_file)
5603     fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5604 	     bb->index);
5605 
5606   /* Note that since we are now transitioning from GIMPLE to RTL, we
5607      cannot use the gsi_*_bb() routines because they expect the basic
5608      block to be in GIMPLE, instead of RTL.  Therefore, we need to
5609      access the BB sequence directly.  */
5610   if (optimize)
5611     reorder_operands (bb);
5612   stmts = bb_seq (bb);
5613   bb->il.gimple.seq = NULL;
5614   bb->il.gimple.phi_nodes = NULL;
5615   rtl_profile_for_bb (bb);
5616   init_rtl_bb_info (bb);
5617   bb->flags |= BB_RTL;
5618 
5619   /* Remove the RETURN_EXPR if we may fall though to the exit
5620      instead.  */
5621   gsi = gsi_last (stmts);
5622   if (!gsi_end_p (gsi)
5623       && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5624     {
5625       greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5626 
5627       gcc_assert (single_succ_p (bb));
5628       gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5629 
5630       if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5631 	  && !gimple_return_retval (ret_stmt))
5632 	{
5633 	  gsi_remove (&gsi, false);
5634 	  single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5635 	}
5636     }
5637 
5638   gsi = gsi_start (stmts);
5639   if (!gsi_end_p (gsi))
5640     {
5641       stmt = gsi_stmt (gsi);
5642       if (gimple_code (stmt) != GIMPLE_LABEL)
5643 	stmt = NULL;
5644     }
5645 
5646   rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5647 
5648   if (stmt || elt)
5649     {
5650       gcc_checking_assert (!note);
5651       last = get_last_insn ();
5652 
5653       if (stmt)
5654 	{
5655 	  expand_gimple_stmt (stmt);
5656 	  gsi_next (&gsi);
5657 	}
5658 
5659       if (elt)
5660 	emit_label (*elt);
5661 
5662       BB_HEAD (bb) = NEXT_INSN (last);
5663       if (NOTE_P (BB_HEAD (bb)))
5664 	BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5665       gcc_assert (LABEL_P (BB_HEAD (bb)));
5666       note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5667 
5668       maybe_dump_rtl_for_gimple_stmt (stmt, last);
5669     }
5670   else
5671     BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5672 
5673   if (note)
5674     NOTE_BASIC_BLOCK (note) = bb;
5675 
5676   for (; !gsi_end_p (gsi); gsi_next (&gsi))
5677     {
5678       basic_block new_bb;
5679 
5680       stmt = gsi_stmt (gsi);
5681 
5682       /* If this statement is a non-debug one, and we generate debug
5683 	 insns, then this one might be the last real use of a TERed
5684 	 SSA_NAME, but where there are still some debug uses further
5685 	 down.  Expanding the current SSA name in such further debug
5686 	 uses by their RHS might lead to wrong debug info, as coalescing
5687 	 might make the operands of such RHS be placed into the same
5688 	 pseudo as something else.  Like so:
5689 	   a_1 = a_0 + 1;   // Assume a_1 is TERed and a_0 is dead
5690 	   use(a_1);
5691 	   a_2 = ...
5692            #DEBUG ... => a_1
5693 	 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5694 	 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5695 	 the write to a_2 would actually have clobbered the place which
5696 	 formerly held a_0.
5697 
5698 	 So, instead of that, we recognize the situation, and generate
5699 	 debug temporaries at the last real use of TERed SSA names:
5700 	   a_1 = a_0 + 1;
5701            #DEBUG #D1 => a_1
5702 	   use(a_1);
5703 	   a_2 = ...
5704            #DEBUG ... => #D1
5705 	 */
5706       if (MAY_HAVE_DEBUG_BIND_INSNS
5707 	  && SA.values
5708 	  && !is_gimple_debug (stmt))
5709 	{
5710 	  ssa_op_iter iter;
5711 	  tree op;
5712 	  gimple *def;
5713 
5714 	  location_t sloc = curr_insn_location ();
5715 
5716 	  /* Look for SSA names that have their last use here (TERed
5717 	     names always have only one real use).  */
5718 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5719 	    if ((def = get_gimple_for_ssa_name (op)))
5720 	      {
5721 		imm_use_iterator imm_iter;
5722 		use_operand_p use_p;
5723 		bool have_debug_uses = false;
5724 
5725 		FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5726 		  {
5727 		    if (gimple_debug_bind_p (USE_STMT (use_p)))
5728 		      {
5729 			have_debug_uses = true;
5730 			break;
5731 		      }
5732 		  }
5733 
5734 		if (have_debug_uses)
5735 		  {
5736 		    /* OP is a TERed SSA name, with DEF its defining
5737 		       statement, and where OP is used in further debug
5738 		       instructions.  Generate a debug temporary, and
5739 		       replace all uses of OP in debug insns with that
5740 		       temporary.  */
5741 		    gimple *debugstmt;
5742 		    tree value = gimple_assign_rhs_to_tree (def);
5743 		    tree vexpr = make_node (DEBUG_EXPR_DECL);
5744 		    rtx val;
5745 		    machine_mode mode;
5746 
5747 		    set_curr_insn_location (gimple_location (def));
5748 
5749 		    DECL_ARTIFICIAL (vexpr) = 1;
5750 		    TREE_TYPE (vexpr) = TREE_TYPE (value);
5751 		    if (DECL_P (value))
5752 		      mode = DECL_MODE (value);
5753 		    else
5754 		      mode = TYPE_MODE (TREE_TYPE (value));
5755 		    SET_DECL_MODE (vexpr, mode);
5756 
5757 		    val = gen_rtx_VAR_LOCATION
5758 			(mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5759 
5760 		    emit_debug_insn (val);
5761 
5762 		    FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5763 		      {
5764 			if (!gimple_debug_bind_p (debugstmt))
5765 			  continue;
5766 
5767 			FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5768 			  SET_USE (use_p, vexpr);
5769 
5770 			update_stmt (debugstmt);
5771 		      }
5772 		  }
5773 	      }
5774 	  set_curr_insn_location (sloc);
5775 	}
5776 
5777       currently_expanding_gimple_stmt = stmt;
5778 
5779       /* Expand this statement, then evaluate the resulting RTL and
5780 	 fixup the CFG accordingly.  */
5781       if (gimple_code (stmt) == GIMPLE_COND)
5782 	{
5783 	  new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5784 	  if (new_bb)
5785 	    return new_bb;
5786 	}
5787       else if (is_gimple_debug (stmt))
5788 	{
5789 	  location_t sloc = curr_insn_location ();
5790 	  gimple_stmt_iterator nsi = gsi;
5791 
5792 	  for (;;)
5793 	    {
5794 	      tree var;
5795 	      tree value = NULL_TREE;
5796 	      rtx val = NULL_RTX;
5797 	      machine_mode mode;
5798 
5799 	      if (!gimple_debug_nonbind_marker_p (stmt))
5800 		{
5801 		  if (gimple_debug_bind_p (stmt))
5802 		    {
5803 		      var = gimple_debug_bind_get_var (stmt);
5804 
5805 		      if (TREE_CODE (var) != DEBUG_EXPR_DECL
5806 			  && TREE_CODE (var) != LABEL_DECL
5807 			  && !target_for_debug_bind (var))
5808 			goto delink_debug_stmt;
5809 
5810 		      if (DECL_P (var) && !VECTOR_TYPE_P (TREE_TYPE (var)))
5811 			mode = DECL_MODE (var);
5812 		      else
5813 			mode = TYPE_MODE (TREE_TYPE (var));
5814 
5815 		      if (gimple_debug_bind_has_value_p (stmt))
5816 			value = gimple_debug_bind_get_value (stmt);
5817 
5818 		      val = gen_rtx_VAR_LOCATION
5819 			(mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5820 		    }
5821 		  else if (gimple_debug_source_bind_p (stmt))
5822 		    {
5823 		      var = gimple_debug_source_bind_get_var (stmt);
5824 
5825 		      value = gimple_debug_source_bind_get_value (stmt);
5826 
5827 		      if (!VECTOR_TYPE_P (TREE_TYPE (var)))
5828 			mode = DECL_MODE (var);
5829 		      else
5830 			mode = TYPE_MODE (TREE_TYPE (var));
5831 
5832 		      val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5833 						  VAR_INIT_STATUS_UNINITIALIZED);
5834 		    }
5835 		  else
5836 		    gcc_unreachable ();
5837 		}
5838 	      /* If this function was first compiled with markers
5839 		 enabled, but they're now disable (e.g. LTO), drop
5840 		 them on the floor.  */
5841 	      else if (gimple_debug_nonbind_marker_p (stmt)
5842 		       && !MAY_HAVE_DEBUG_MARKER_INSNS)
5843 		goto delink_debug_stmt;
5844 	      else if (gimple_debug_begin_stmt_p (stmt))
5845 		val = GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
5846 	      else if (gimple_debug_inline_entry_p (stmt))
5847 		{
5848 		  tree block = gimple_block (stmt);
5849 
5850 		  if (block)
5851 		    val = GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
5852 		  else
5853 		    goto delink_debug_stmt;
5854 		}
5855 	      else
5856 		gcc_unreachable ();
5857 
5858 	      last = get_last_insn ();
5859 
5860 	      set_curr_insn_location (gimple_location (stmt));
5861 
5862 	      emit_debug_insn (val);
5863 
5864 	      if (dump_file && (dump_flags & TDF_DETAILS))
5865 		{
5866 		  /* We can't dump the insn with a TREE where an RTX
5867 		     is expected.  */
5868 		  if (GET_CODE (val) == VAR_LOCATION)
5869 		    {
5870 		      gcc_checking_assert (PAT_VAR_LOCATION_LOC (val) == (rtx)value);
5871 		      PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5872 		    }
5873 		  maybe_dump_rtl_for_gimple_stmt (stmt, last);
5874 		  if (GET_CODE (val) == VAR_LOCATION)
5875 		    PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5876 		}
5877 
5878 	    delink_debug_stmt:
5879 	      /* In order not to generate too many debug temporaries,
5880 	         we delink all uses of debug statements we already expanded.
5881 		 Therefore debug statements between definition and real
5882 		 use of TERed SSA names will continue to use the SSA name,
5883 		 and not be replaced with debug temps.  */
5884 	      delink_stmt_imm_use (stmt);
5885 
5886 	      gsi = nsi;
5887 	      gsi_next (&nsi);
5888 	      if (gsi_end_p (nsi))
5889 		break;
5890 	      stmt = gsi_stmt (nsi);
5891 	      if (!is_gimple_debug (stmt))
5892 		break;
5893 	    }
5894 
5895 	  set_curr_insn_location (sloc);
5896 	}
5897       else
5898 	{
5899 	  gcall *call_stmt = dyn_cast <gcall *> (stmt);
5900 	  if (call_stmt
5901 	      && gimple_call_tail_p (call_stmt)
5902 	      && disable_tail_calls)
5903 	    gimple_call_set_tail (call_stmt, false);
5904 
5905 	  if (call_stmt && gimple_call_tail_p (call_stmt))
5906 	    {
5907 	      bool can_fallthru;
5908 	      new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5909 	      if (new_bb)
5910 		{
5911 		  if (can_fallthru)
5912 		    bb = new_bb;
5913 		  else
5914 		    return new_bb;
5915 		}
5916 	    }
5917 	  else
5918 	    {
5919 	      def_operand_p def_p;
5920 	      def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5921 
5922 	      if (def_p != NULL)
5923 		{
5924 		  /* Ignore this stmt if it is in the list of
5925 		     replaceable expressions.  */
5926 		  if (SA.values
5927 		      && bitmap_bit_p (SA.values,
5928 				       SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5929 		    continue;
5930 		}
5931 	      last = expand_gimple_stmt (stmt);
5932 	      maybe_dump_rtl_for_gimple_stmt (stmt, last);
5933 	    }
5934 	}
5935     }
5936 
5937   currently_expanding_gimple_stmt = NULL;
5938 
5939   /* Expand implicit goto and convert goto_locus.  */
5940   FOR_EACH_EDGE (e, ei, bb->succs)
5941     {
5942       if (e->goto_locus != UNKNOWN_LOCATION)
5943 	set_curr_insn_location (e->goto_locus);
5944       if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5945 	{
5946 	  emit_jump (label_rtx_for_bb (e->dest));
5947 	  e->flags &= ~EDGE_FALLTHRU;
5948 	}
5949     }
5950 
5951   /* Expanded RTL can create a jump in the last instruction of block.
5952      This later might be assumed to be a jump to successor and break edge insertion.
5953      We need to insert dummy move to prevent this. PR41440. */
5954   if (single_succ_p (bb)
5955       && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5956       && (last = get_last_insn ())
5957       && (JUMP_P (last)
5958 	  || (DEBUG_INSN_P (last)
5959 	      && JUMP_P (prev_nondebug_insn (last)))))
5960     {
5961       rtx dummy = gen_reg_rtx (SImode);
5962       emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5963     }
5964 
5965   do_pending_stack_adjust ();
5966 
5967   /* Find the block tail.  The last insn in the block is the insn
5968      before a barrier and/or table jump insn.  */
5969   last = get_last_insn ();
5970   if (BARRIER_P (last))
5971     last = PREV_INSN (last);
5972   if (JUMP_TABLE_DATA_P (last))
5973     last = PREV_INSN (PREV_INSN (last));
5974   if (BARRIER_P (last))
5975     last = PREV_INSN (last);
5976   BB_END (bb) = last;
5977 
5978   update_bb_for_insn (bb);
5979 
5980   return bb;
5981 }
5982 
5983 
5984 /* Create a basic block for initialization code.  */
5985 
5986 static basic_block
construct_init_block(void)5987 construct_init_block (void)
5988 {
5989   basic_block init_block, first_block;
5990   edge e = NULL;
5991   int flags;
5992 
5993   /* Multiple entry points not supported yet.  */
5994   gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5995   init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5996   init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5997   ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5998   EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5999 
6000   e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
6001 
6002   /* When entry edge points to first basic block, we don't need jump,
6003      otherwise we have to jump into proper target.  */
6004   if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
6005     {
6006       tree label = gimple_block_label (e->dest);
6007 
6008       emit_jump (jump_target_rtx (label));
6009       flags = 0;
6010     }
6011   else
6012     flags = EDGE_FALLTHRU;
6013 
6014   init_block = create_basic_block (NEXT_INSN (get_insns ()),
6015 				   get_last_insn (),
6016 				   ENTRY_BLOCK_PTR_FOR_FN (cfun));
6017   init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6018   add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
6019   if (e)
6020     {
6021       first_block = e->dest;
6022       redirect_edge_succ (e, init_block);
6023       make_single_succ_edge (init_block, first_block, flags);
6024     }
6025   else
6026     make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
6027 			   EDGE_FALLTHRU);
6028 
6029   update_bb_for_insn (init_block);
6030   return init_block;
6031 }
6032 
6033 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
6034    found in the block tree.  */
6035 
6036 static void
set_block_levels(tree block,int level)6037 set_block_levels (tree block, int level)
6038 {
6039   while (block)
6040     {
6041       BLOCK_NUMBER (block) = level;
6042       set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
6043       block = BLOCK_CHAIN (block);
6044     }
6045 }
6046 
6047 /* Create a block containing landing pads and similar stuff.  */
6048 
6049 static void
construct_exit_block(void)6050 construct_exit_block (void)
6051 {
6052   rtx_insn *head = get_last_insn ();
6053   rtx_insn *end;
6054   basic_block exit_block;
6055   edge e, e2;
6056   unsigned ix;
6057   edge_iterator ei;
6058   basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
6059   rtx_insn *orig_end = BB_END (prev_bb);
6060 
6061   rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
6062 
6063   /* Make sure the locus is set to the end of the function, so that
6064      epilogue line numbers and warnings are set properly.  */
6065   if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
6066     input_location = cfun->function_end_locus;
6067 
6068   /* Generate rtl for function exit.  */
6069   expand_function_end ();
6070 
6071   end = get_last_insn ();
6072   if (head == end)
6073     return;
6074   /* While emitting the function end we could move end of the last basic
6075      block.  */
6076   BB_END (prev_bb) = orig_end;
6077   while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
6078     head = NEXT_INSN (head);
6079   /* But make sure exit_block starts with RETURN_LABEL, otherwise the
6080      bb count counting will be confused.  Any instructions before that
6081      label are emitted for the case where PREV_BB falls through into the
6082      exit block, so append those instructions to prev_bb in that case.  */
6083   if (NEXT_INSN (head) != return_label)
6084     {
6085       while (NEXT_INSN (head) != return_label)
6086 	{
6087 	  if (!NOTE_P (NEXT_INSN (head)))
6088 	    BB_END (prev_bb) = NEXT_INSN (head);
6089 	  head = NEXT_INSN (head);
6090 	}
6091     }
6092   exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
6093   exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
6094   add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
6095 
6096   ix = 0;
6097   while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
6098     {
6099       e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
6100       if (!(e->flags & EDGE_ABNORMAL))
6101 	redirect_edge_succ (e, exit_block);
6102       else
6103 	ix++;
6104     }
6105 
6106   e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
6107 			     EDGE_FALLTHRU);
6108   FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6109     if (e2 != e)
6110       {
6111 	exit_block->count -= e2->count ();
6112       }
6113   update_bb_for_insn (exit_block);
6114 }
6115 
6116 /* Helper function for discover_nonconstant_array_refs.
6117    Look for ARRAY_REF nodes with non-constant indexes and mark them
6118    addressable.  */
6119 
6120 static tree
discover_nonconstant_array_refs_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)6121 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
6122 				   void *data ATTRIBUTE_UNUSED)
6123 {
6124   tree t = *tp;
6125 
6126   if (IS_TYPE_OR_DECL_P (t))
6127     *walk_subtrees = 0;
6128   else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6129     {
6130       while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6131 	      && is_gimple_min_invariant (TREE_OPERAND (t, 1))
6132 	      && (!TREE_OPERAND (t, 2)
6133 		  || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6134 	     || (TREE_CODE (t) == COMPONENT_REF
6135 		 && (!TREE_OPERAND (t,2)
6136 		     || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6137 	     || TREE_CODE (t) == BIT_FIELD_REF
6138 	     || TREE_CODE (t) == REALPART_EXPR
6139 	     || TREE_CODE (t) == IMAGPART_EXPR
6140 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
6141 	     || CONVERT_EXPR_P (t))
6142 	t = TREE_OPERAND (t, 0);
6143 
6144       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6145 	{
6146 	  t = get_base_address (t);
6147 	  if (t && DECL_P (t)
6148               && DECL_MODE (t) != BLKmode)
6149 	    TREE_ADDRESSABLE (t) = 1;
6150 	}
6151 
6152       *walk_subtrees = 0;
6153     }
6154   /* References of size POLY_INT_CST to a fixed-size object must go
6155      through memory.  It's more efficient to force that here than
6156      to create temporary slots on the fly.  */
6157   else if ((TREE_CODE (t) == MEM_REF || TREE_CODE (t) == TARGET_MEM_REF)
6158 	   && TYPE_SIZE (TREE_TYPE (t))
6159 	   && POLY_INT_CST_P (TYPE_SIZE (TREE_TYPE (t))))
6160     {
6161       tree base = get_base_address (t);
6162       if (base
6163 	  && DECL_P (base)
6164 	  && DECL_MODE (base) != BLKmode
6165 	  && GET_MODE_SIZE (DECL_MODE (base)).is_constant ())
6166 	TREE_ADDRESSABLE (base) = 1;
6167       *walk_subtrees = 0;
6168     }
6169 
6170   return NULL_TREE;
6171 }
6172 
6173 /* RTL expansion is not able to compile array references with variable
6174    offsets for arrays stored in single register.  Discover such
6175    expressions and mark variables as addressable to avoid this
6176    scenario.  */
6177 
6178 static void
discover_nonconstant_array_refs(void)6179 discover_nonconstant_array_refs (void)
6180 {
6181   basic_block bb;
6182   gimple_stmt_iterator gsi;
6183 
6184   FOR_EACH_BB_FN (bb, cfun)
6185     for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6186       {
6187 	gimple *stmt = gsi_stmt (gsi);
6188 	if (!is_gimple_debug (stmt))
6189 	  {
6190 	    walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
6191 	    gcall *call = dyn_cast <gcall *> (stmt);
6192 	    if (call && gimple_call_internal_p (call))
6193 	      switch (gimple_call_internal_fn (call))
6194 		{
6195 		case IFN_LOAD_LANES:
6196 		  /* The source must be a MEM.  */
6197 		  mark_addressable (gimple_call_arg (call, 0));
6198 		  break;
6199 		case IFN_STORE_LANES:
6200 		  /* The destination must be a MEM.  */
6201 		  mark_addressable (gimple_call_lhs (call));
6202 		  break;
6203 		default:
6204 		  break;
6205 		}
6206 	  }
6207       }
6208 }
6209 
6210 /* This function sets crtl->args.internal_arg_pointer to a virtual
6211    register if DRAP is needed.  Local register allocator will replace
6212    virtual_incoming_args_rtx with the virtual register.  */
6213 
6214 static void
expand_stack_alignment(void)6215 expand_stack_alignment (void)
6216 {
6217   rtx drap_rtx;
6218   unsigned int preferred_stack_boundary;
6219 
6220   if (! SUPPORTS_STACK_ALIGNMENT)
6221     return;
6222 
6223   if (cfun->calls_alloca
6224       || cfun->has_nonlocal_label
6225       || crtl->has_nonlocal_goto)
6226     crtl->need_drap = true;
6227 
6228   /* Call update_stack_boundary here again to update incoming stack
6229      boundary.  It may set incoming stack alignment to a different
6230      value after RTL expansion.  TARGET_FUNCTION_OK_FOR_SIBCALL may
6231      use the minimum incoming stack alignment to check if it is OK
6232      to perform sibcall optimization since sibcall optimization will
6233      only align the outgoing stack to incoming stack boundary.  */
6234   if (targetm.calls.update_stack_boundary)
6235     targetm.calls.update_stack_boundary ();
6236 
6237   /* The incoming stack frame has to be aligned at least at
6238      parm_stack_boundary.  */
6239   gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
6240 
6241   /* Update crtl->stack_alignment_estimated and use it later to align
6242      stack.  We check PREFERRED_STACK_BOUNDARY if there may be non-call
6243      exceptions since callgraph doesn't collect incoming stack alignment
6244      in this case.  */
6245   if (cfun->can_throw_non_call_exceptions
6246       && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6247     preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6248   else
6249     preferred_stack_boundary = crtl->preferred_stack_boundary;
6250   if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6251     crtl->stack_alignment_estimated = preferred_stack_boundary;
6252   if (preferred_stack_boundary > crtl->stack_alignment_needed)
6253     crtl->stack_alignment_needed = preferred_stack_boundary;
6254 
6255   gcc_assert (crtl->stack_alignment_needed
6256 	      <= crtl->stack_alignment_estimated);
6257 
6258   crtl->stack_realign_needed
6259     = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
6260   crtl->stack_realign_tried = crtl->stack_realign_needed;
6261 
6262   crtl->stack_realign_processed = true;
6263 
6264   /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6265      alignment.  */
6266   gcc_assert (targetm.calls.get_drap_rtx != NULL);
6267   drap_rtx = targetm.calls.get_drap_rtx ();
6268 
6269   /* stack_realign_drap and drap_rtx must match.  */
6270   gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6271 
6272   /* Do nothing if NULL is returned, which means DRAP is not needed.  */
6273   if (drap_rtx != NULL)
6274     {
6275       crtl->args.internal_arg_pointer = drap_rtx;
6276 
6277       /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6278          needed. */
6279       fixup_tail_calls ();
6280     }
6281 }
6282 
6283 
6284 static void
expand_main_function(void)6285 expand_main_function (void)
6286 {
6287 #if (defined(INVOKE__main)				\
6288      || (!defined(HAS_INIT_SECTION)			\
6289 	 && !defined(INIT_SECTION_ASM_OP)		\
6290 	 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6291   emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode);
6292 #endif
6293 }
6294 
6295 
6296 /* Expand code to initialize the stack_protect_guard.  This is invoked at
6297    the beginning of a function to be protected.  */
6298 
6299 static void
stack_protect_prologue(void)6300 stack_protect_prologue (void)
6301 {
6302   tree guard_decl = targetm.stack_protect_guard ();
6303   rtx x, y;
6304 
6305   crtl->stack_protect_guard_decl = guard_decl;
6306   x = expand_normal (crtl->stack_protect_guard);
6307 
6308   if (targetm.have_stack_protect_combined_set () && guard_decl)
6309     {
6310       gcc_assert (DECL_P (guard_decl));
6311       y = DECL_RTL (guard_decl);
6312 
6313       /* Allow the target to compute address of Y and copy it to X without
6314 	 leaking Y into a register.  This combined address + copy pattern
6315 	 allows the target to prevent spilling of any intermediate results by
6316 	 splitting it after register allocator.  */
6317       if (rtx_insn *insn = targetm.gen_stack_protect_combined_set (x, y))
6318 	{
6319 	  emit_insn (insn);
6320 	  return;
6321 	}
6322     }
6323 
6324   if (guard_decl)
6325     y = expand_normal (guard_decl);
6326   else
6327     y = const0_rtx;
6328 
6329   /* Allow the target to copy from Y to X without leaking Y into a
6330      register.  */
6331   if (targetm.have_stack_protect_set ())
6332     if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6333       {
6334 	emit_insn (insn);
6335 	return;
6336       }
6337 
6338   /* Otherwise do a straight move.  */
6339   emit_move_insn (x, y);
6340 }
6341 
6342 /* Translate the intermediate representation contained in the CFG
6343    from GIMPLE trees to RTL.
6344 
6345    We do conversion per basic block and preserve/update the tree CFG.
6346    This implies we have to do some magic as the CFG can simultaneously
6347    consist of basic blocks containing RTL and GIMPLE trees.  This can
6348    confuse the CFG hooks, so be careful to not manipulate CFG during
6349    the expansion.  */
6350 
6351 namespace {
6352 
6353 const pass_data pass_data_expand =
6354 {
6355   RTL_PASS, /* type */
6356   "expand", /* name */
6357   OPTGROUP_NONE, /* optinfo_flags */
6358   TV_EXPAND, /* tv_id */
6359   ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6360     | PROP_gimple_lcx
6361     | PROP_gimple_lvec
6362     | PROP_gimple_lva), /* properties_required */
6363   PROP_rtl, /* properties_provided */
6364   ( PROP_ssa | PROP_trees ), /* properties_destroyed */
6365   0, /* todo_flags_start */
6366   0, /* todo_flags_finish */
6367 };
6368 
6369 class pass_expand : public rtl_opt_pass
6370 {
6371 public:
pass_expand(gcc::context * ctxt)6372   pass_expand (gcc::context *ctxt)
6373     : rtl_opt_pass (pass_data_expand, ctxt)
6374   {}
6375 
6376   /* opt_pass methods: */
6377   virtual unsigned int execute (function *);
6378 
6379 }; // class pass_expand
6380 
6381 unsigned int
execute(function * fun)6382 pass_expand::execute (function *fun)
6383 {
6384   basic_block bb, init_block;
6385   edge_iterator ei;
6386   edge e;
6387   rtx_insn *var_seq, *var_ret_seq;
6388   unsigned i;
6389 
6390   timevar_push (TV_OUT_OF_SSA);
6391   rewrite_out_of_ssa (&SA);
6392   timevar_pop (TV_OUT_OF_SSA);
6393   SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
6394 
6395   if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
6396     {
6397       gimple_stmt_iterator gsi;
6398       FOR_EACH_BB_FN (bb, cfun)
6399 	for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6400 	  if (gimple_debug_bind_p (gsi_stmt (gsi)))
6401 	    avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6402     }
6403 
6404   /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE.  */
6405   discover_nonconstant_array_refs ();
6406 
6407   /* Make sure all values used by the optimization passes have sane
6408      defaults.  */
6409   reg_renumber = 0;
6410 
6411   /* Some backends want to know that we are expanding to RTL.  */
6412   currently_expanding_to_rtl = 1;
6413   /* Dominators are not kept up-to-date as we may create new basic-blocks.  */
6414   free_dominance_info (CDI_DOMINATORS);
6415 
6416   rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
6417 
6418   insn_locations_init ();
6419   if (!DECL_IS_BUILTIN (current_function_decl))
6420     {
6421       /* Eventually, all FEs should explicitly set function_start_locus.  */
6422       if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6423 	set_curr_insn_location
6424 	  (DECL_SOURCE_LOCATION (current_function_decl));
6425       else
6426 	set_curr_insn_location (fun->function_start_locus);
6427     }
6428   else
6429     set_curr_insn_location (UNKNOWN_LOCATION);
6430   prologue_location = curr_insn_location ();
6431 
6432 #ifdef INSN_SCHEDULING
6433   init_sched_attrs ();
6434 #endif
6435 
6436   /* Make sure first insn is a note even if we don't want linenums.
6437      This makes sure the first insn will never be deleted.
6438      Also, final expects a note to appear there.  */
6439   emit_note (NOTE_INSN_DELETED);
6440 
6441   targetm.expand_to_rtl_hook ();
6442   crtl->init_stack_alignment ();
6443   fun->cfg->max_jumptable_ents = 0;
6444 
6445   /* Resovle the function section.  Some targets, like ARM EABI rely on knowledge
6446      of the function section at exapnsion time to predict distance of calls.  */
6447   resolve_unique_section (current_function_decl, 0, flag_function_sections);
6448 
6449   /* Expand the variables recorded during gimple lowering.  */
6450   timevar_push (TV_VAR_EXPAND);
6451   start_sequence ();
6452 
6453   var_ret_seq = expand_used_vars ();
6454 
6455   var_seq = get_insns ();
6456   end_sequence ();
6457   timevar_pop (TV_VAR_EXPAND);
6458 
6459   /* Honor stack protection warnings.  */
6460   if (warn_stack_protect)
6461     {
6462       if (fun->calls_alloca)
6463 	warning (OPT_Wstack_protector,
6464 		 "stack protector not protecting local variables: "
6465 		 "variable length buffer");
6466       if (has_short_buffer && !crtl->stack_protect_guard)
6467 	warning (OPT_Wstack_protector,
6468 		 "stack protector not protecting function: "
6469 		 "all local arrays are less than %d bytes long",
6470 		 (int) param_ssp_buffer_size);
6471     }
6472 
6473   /* Set up parameters and prepare for return, for the function.  */
6474   expand_function_start (current_function_decl);
6475 
6476   /* If we emitted any instructions for setting up the variables,
6477      emit them before the FUNCTION_START note.  */
6478   if (var_seq)
6479     {
6480       emit_insn_before (var_seq, parm_birth_insn);
6481 
6482       /* In expand_function_end we'll insert the alloca save/restore
6483 	 before parm_birth_insn.  We've just insertted an alloca call.
6484 	 Adjust the pointer to match.  */
6485       parm_birth_insn = var_seq;
6486     }
6487 
6488   /* Now propagate the RTL assignment of each partition to the
6489      underlying var of each SSA_NAME.  */
6490   tree name;
6491 
6492   FOR_EACH_SSA_NAME (i, name, cfun)
6493     {
6494       /* We might have generated new SSA names in
6495 	 update_alias_info_with_stack_vars.  They will have a NULL
6496 	 defining statements, and won't be part of the partitioning,
6497 	 so ignore those.  */
6498       if (!SSA_NAME_DEF_STMT (name))
6499 	continue;
6500 
6501       adjust_one_expanded_partition_var (name);
6502     }
6503 
6504   /* Clean up RTL of variables that straddle across multiple
6505      partitions, and check that the rtl of any PARM_DECLs that are not
6506      cleaned up is that of their default defs.  */
6507   FOR_EACH_SSA_NAME (i, name, cfun)
6508     {
6509       int part;
6510 
6511       /* We might have generated new SSA names in
6512 	 update_alias_info_with_stack_vars.  They will have a NULL
6513 	 defining statements, and won't be part of the partitioning,
6514 	 so ignore those.  */
6515       if (!SSA_NAME_DEF_STMT (name))
6516 	continue;
6517       part = var_to_partition (SA.map, name);
6518       if (part == NO_PARTITION)
6519 	continue;
6520 
6521       /* If this decl was marked as living in multiple places, reset
6522 	 this now to NULL.  */
6523       tree var = SSA_NAME_VAR (name);
6524       if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6525 	SET_DECL_RTL (var, NULL);
6526       /* Check that the pseudos chosen by assign_parms are those of
6527 	 the corresponding default defs.  */
6528       else if (SSA_NAME_IS_DEFAULT_DEF (name)
6529 	       && (TREE_CODE (var) == PARM_DECL
6530 		   || TREE_CODE (var) == RESULT_DECL))
6531 	{
6532 	  rtx in = DECL_RTL_IF_SET (var);
6533 	  gcc_assert (in);
6534 	  rtx out = SA.partition_to_pseudo[part];
6535 	  gcc_assert (in == out);
6536 
6537 	  /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6538 	     those expected by debug backends for each parm and for
6539 	     the result.  This is particularly important for stabs,
6540 	     whose register elimination from parm's DECL_RTL may cause
6541 	     -fcompare-debug differences as SET_DECL_RTL changes reg's
6542 	     attrs.  So, make sure the RTL already has the parm as the
6543 	     EXPR, so that it won't change.  */
6544 	  SET_DECL_RTL (var, NULL_RTX);
6545 	  if (MEM_P (in))
6546 	    set_mem_attributes (in, var, true);
6547 	  SET_DECL_RTL (var, in);
6548 	}
6549     }
6550 
6551   /* If this function is `main', emit a call to `__main'
6552      to run global initializers, etc.  */
6553   if (DECL_NAME (current_function_decl)
6554       && MAIN_NAME_P (DECL_NAME (current_function_decl))
6555       && DECL_FILE_SCOPE_P (current_function_decl))
6556     expand_main_function ();
6557 
6558   /* Initialize the stack_protect_guard field.  This must happen after the
6559      call to __main (if any) so that the external decl is initialized.  */
6560   if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
6561     stack_protect_prologue ();
6562 
6563   expand_phi_nodes (&SA);
6564 
6565   /* Release any stale SSA redirection data.  */
6566   redirect_edge_var_map_empty ();
6567 
6568   /* Register rtl specific functions for cfg.  */
6569   rtl_register_cfg_hooks ();
6570 
6571   init_block = construct_init_block ();
6572 
6573   /* Clear EDGE_EXECUTABLE on the entry edge(s).  It is cleaned from the
6574      remaining edges later.  */
6575   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6576     e->flags &= ~EDGE_EXECUTABLE;
6577 
6578   /* If the function has too many markers, drop them while expanding.  */
6579   if (cfun->debug_marker_count
6580       >= param_max_debug_marker_count)
6581     cfun->debug_nonbind_markers = false;
6582 
6583   lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6584   FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6585 		  next_bb)
6586     bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6587 
6588   if (MAY_HAVE_DEBUG_BIND_INSNS)
6589     expand_debug_locations ();
6590 
6591   if (deep_ter_debug_map)
6592     {
6593       delete deep_ter_debug_map;
6594       deep_ter_debug_map = NULL;
6595     }
6596 
6597   /* Free stuff we no longer need after GIMPLE optimizations.  */
6598   free_dominance_info (CDI_DOMINATORS);
6599   free_dominance_info (CDI_POST_DOMINATORS);
6600   delete_tree_cfg_annotations (fun);
6601 
6602   timevar_push (TV_OUT_OF_SSA);
6603   finish_out_of_ssa (&SA);
6604   timevar_pop (TV_OUT_OF_SSA);
6605 
6606   timevar_push (TV_POST_EXPAND);
6607   /* We are no longer in SSA form.  */
6608   fun->gimple_df->in_ssa_p = false;
6609   loops_state_clear (LOOP_CLOSED_SSA);
6610 
6611   /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6612      conservatively to true until they are all profile aware.  */
6613   delete lab_rtx_for_bb;
6614   free_histograms (fun);
6615 
6616   construct_exit_block ();
6617   insn_locations_finalize ();
6618 
6619   if (var_ret_seq)
6620     {
6621       rtx_insn *after = return_label;
6622       rtx_insn *next = NEXT_INSN (after);
6623       if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6624 	after = next;
6625       emit_insn_after (var_ret_seq, after);
6626     }
6627 
6628   /* Zap the tree EH table.  */
6629   set_eh_throw_stmt_table (fun, NULL);
6630 
6631   /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6632      split edges which edge insertions might do.  */
6633   rebuild_jump_labels (get_insns ());
6634 
6635   /* If we have a single successor to the entry block, put the pending insns
6636      after parm birth, but before NOTE_INSNS_FUNCTION_BEG.  */
6637   if (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6638     {
6639       edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fun));
6640       if (e->insns.r)
6641 	{
6642 	  rtx_insn *insns = e->insns.r;
6643 	  e->insns.r = NULL;
6644 	  rebuild_jump_labels_chain (insns);
6645 	  if (NOTE_P (parm_birth_insn)
6646 	      && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6647 	    emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6648 	  else
6649 	    emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6650 	}
6651     }
6652 
6653   /* Otherwise, as well as for other edges, take the usual way.  */
6654   commit_edge_insertions ();
6655 
6656   /* We're done expanding trees to RTL.  */
6657   currently_expanding_to_rtl = 0;
6658 
6659   flush_mark_addressable_queue ();
6660 
6661   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6662 		  EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6663     {
6664       edge e;
6665       edge_iterator ei;
6666       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6667 	{
6668 	  /* Clear EDGE_EXECUTABLE.  This flag is never used in the backend.  */
6669 	  e->flags &= ~EDGE_EXECUTABLE;
6670 
6671 	  /* At the moment not all abnormal edges match the RTL
6672 	     representation.  It is safe to remove them here as
6673 	     find_many_sub_basic_blocks will rediscover them.
6674 	     In the future we should get this fixed properly.  */
6675 	  if ((e->flags & EDGE_ABNORMAL)
6676 	      && !(e->flags & EDGE_SIBCALL))
6677 	    remove_edge (e);
6678 	  else
6679 	    ei_next (&ei);
6680 	}
6681     }
6682 
6683   auto_sbitmap blocks (last_basic_block_for_fn (fun));
6684   bitmap_ones (blocks);
6685   find_many_sub_basic_blocks (blocks);
6686   purge_all_dead_edges ();
6687 
6688   /* After initial rtl generation, call back to finish generating
6689      exception support code.  We need to do this before cleaning up
6690      the CFG as the code does not expect dead landing pads.  */
6691   if (fun->eh->region_tree != NULL)
6692     finish_eh_generation ();
6693 
6694   /* Call expand_stack_alignment after finishing all
6695      updates to crtl->preferred_stack_boundary.  */
6696   expand_stack_alignment ();
6697 
6698   /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6699      function.  */
6700   if (crtl->tail_call_emit)
6701     fixup_tail_calls ();
6702 
6703   /* BB subdivision may have created basic blocks that are only reachable
6704      from unlikely bbs but not marked as such in the profile.  */
6705   if (optimize)
6706     propagate_unlikely_bbs_forward ();
6707 
6708   /* Remove unreachable blocks, otherwise we cannot compute dominators
6709      which are needed for loop state verification.  As a side-effect
6710      this also compacts blocks.
6711      ???  We cannot remove trivially dead insns here as for example
6712      the DRAP reg on i?86 is not magically live at this point.
6713      gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise.  */
6714   cleanup_cfg (CLEANUP_NO_INSN_DEL);
6715 
6716   checking_verify_flow_info ();
6717 
6718   /* Initialize pseudos allocated for hard registers.  */
6719   emit_initial_value_sets ();
6720 
6721   /* And finally unshare all RTL.  */
6722   unshare_all_rtl ();
6723 
6724   /* There's no need to defer outputting this function any more; we
6725      know we want to output it.  */
6726   DECL_DEFER_OUTPUT (current_function_decl) = 0;
6727 
6728   /* Now that we're done expanding trees to RTL, we shouldn't have any
6729      more CONCATs anywhere.  */
6730   generating_concat_p = 0;
6731 
6732   if (dump_file)
6733     {
6734       fprintf (dump_file,
6735 	       "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6736       /* And the pass manager will dump RTL for us.  */
6737     }
6738 
6739   /* If we're emitting a nested function, make sure its parent gets
6740      emitted as well.  Doing otherwise confuses debug info.  */
6741     {
6742       tree parent;
6743       for (parent = DECL_CONTEXT (current_function_decl);
6744 	   parent != NULL_TREE;
6745 	   parent = get_containing_scope (parent))
6746 	if (TREE_CODE (parent) == FUNCTION_DECL)
6747 	  TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6748     }
6749 
6750   TREE_ASM_WRITTEN (current_function_decl) = 1;
6751 
6752   /* After expanding, the return labels are no longer needed. */
6753   return_label = NULL;
6754   naked_return_label = NULL;
6755 
6756   /* After expanding, the tm_restart map is no longer needed.  */
6757   if (fun->gimple_df->tm_restart)
6758     fun->gimple_df->tm_restart = NULL;
6759 
6760   /* Tag the blocks with a depth number so that change_scope can find
6761      the common parent easily.  */
6762   set_block_levels (DECL_INITIAL (fun->decl), 0);
6763   default_rtl_profile ();
6764 
6765   /* For -dx discard loops now, otherwise IL verify in clean_state will
6766      ICE.  */
6767   if (rtl_dump_and_exit)
6768     {
6769       cfun->curr_properties &= ~PROP_loops;
6770       loop_optimizer_finalize ();
6771     }
6772 
6773   timevar_pop (TV_POST_EXPAND);
6774 
6775   return 0;
6776 }
6777 
6778 } // anon namespace
6779 
6780 rtl_opt_pass *
make_pass_expand(gcc::context * ctxt)6781 make_pass_expand (gcc::context *ctxt)
6782 {
6783   return new pass_expand (ctxt);
6784 }
6785