xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cfgexpand.c (revision 946379e7b37692fc43f68eb0d1c10daa0a7f3b6c)
1 /* A pass for lowering trees to RTL.
2    Copyright (C) 2004-2013 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "function.h"
29 #include "expr.h"
30 #include "langhooks.h"
31 #include "tree-flow.h"
32 #include "tree-pass.h"
33 #include "except.h"
34 #include "flags.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
37 #include "toplev.h"
38 #include "debug.h"
39 #include "params.h"
40 #include "tree-inline.h"
41 #include "value-prof.h"
42 #include "target.h"
43 #include "ssaexpand.h"
44 #include "bitmap.h"
45 #include "sbitmap.h"
46 #include "cfgloop.h"
47 #include "regs.h" /* For reg_renumber.  */
48 #include "insn-attr.h" /* For INSN_SCHEDULING.  */
49 #include "asan.h"
50 
51 /* This variable holds information helping the rewriting of SSA trees
52    into RTL.  */
53 struct ssaexpand SA;
54 
55 /* This variable holds the currently expanded gimple statement for purposes
56    of comminucating the profile info to the builtin expanders.  */
57 gimple currently_expanding_gimple_stmt;
58 
59 static rtx expand_debug_expr (tree);
60 
61 /* Return an expression tree corresponding to the RHS of GIMPLE
62    statement STMT.  */
63 
64 tree
65 gimple_assign_rhs_to_tree (gimple stmt)
66 {
67   tree t;
68   enum gimple_rhs_class grhs_class;
69 
70   grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
71 
72   if (grhs_class == GIMPLE_TERNARY_RHS)
73     t = build3 (gimple_assign_rhs_code (stmt),
74 		TREE_TYPE (gimple_assign_lhs (stmt)),
75 		gimple_assign_rhs1 (stmt),
76 		gimple_assign_rhs2 (stmt),
77 		gimple_assign_rhs3 (stmt));
78   else if (grhs_class == GIMPLE_BINARY_RHS)
79     t = build2 (gimple_assign_rhs_code (stmt),
80 		TREE_TYPE (gimple_assign_lhs (stmt)),
81 		gimple_assign_rhs1 (stmt),
82 		gimple_assign_rhs2 (stmt));
83   else if (grhs_class == GIMPLE_UNARY_RHS)
84     t = build1 (gimple_assign_rhs_code (stmt),
85 		TREE_TYPE (gimple_assign_lhs (stmt)),
86 		gimple_assign_rhs1 (stmt));
87   else if (grhs_class == GIMPLE_SINGLE_RHS)
88     {
89       t = gimple_assign_rhs1 (stmt);
90       /* Avoid modifying this tree in place below.  */
91       if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
92 	   && gimple_location (stmt) != EXPR_LOCATION (t))
93 	  || (gimple_block (stmt)
94 	      && currently_expanding_to_rtl
95 	      && EXPR_P (t)))
96 	t = copy_node (t);
97     }
98   else
99     gcc_unreachable ();
100 
101   if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
102     SET_EXPR_LOCATION (t, gimple_location (stmt));
103 
104   return t;
105 }
106 
107 
108 #ifndef STACK_ALIGNMENT_NEEDED
109 #define STACK_ALIGNMENT_NEEDED 1
110 #endif
111 
112 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
113 
114 /* Associate declaration T with storage space X.  If T is no
115    SSA name this is exactly SET_DECL_RTL, otherwise make the
116    partition of T associated with X.  */
117 static inline void
118 set_rtl (tree t, rtx x)
119 {
120   if (TREE_CODE (t) == SSA_NAME)
121     {
122       SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
123       if (x && !MEM_P (x))
124 	set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
125       /* For the benefit of debug information at -O0 (where vartracking
126          doesn't run) record the place also in the base DECL if it's
127 	 a normal variable (not a parameter).  */
128       if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
129 	{
130 	  tree var = SSA_NAME_VAR (t);
131 	  /* If we don't yet have something recorded, just record it now.  */
132 	  if (!DECL_RTL_SET_P (var))
133 	    SET_DECL_RTL (var, x);
134 	  /* If we have it set already to "multiple places" don't
135 	     change this.  */
136 	  else if (DECL_RTL (var) == pc_rtx)
137 	    ;
138 	  /* If we have something recorded and it's not the same place
139 	     as we want to record now, we have multiple partitions for the
140 	     same base variable, with different places.  We can't just
141 	     randomly chose one, hence we have to say that we don't know.
142 	     This only happens with optimization, and there var-tracking
143 	     will figure out the right thing.  */
144 	  else if (DECL_RTL (var) != x)
145 	    SET_DECL_RTL (var, pc_rtx);
146 	}
147     }
148   else
149     SET_DECL_RTL (t, x);
150 }
151 
152 /* This structure holds data relevant to one variable that will be
153    placed in a stack slot.  */
154 struct stack_var
155 {
156   /* The Variable.  */
157   tree decl;
158 
159   /* Initially, the size of the variable.  Later, the size of the partition,
160      if this variable becomes it's partition's representative.  */
161   HOST_WIDE_INT size;
162 
163   /* The *byte* alignment required for this variable.  Or as, with the
164      size, the alignment for this partition.  */
165   unsigned int alignb;
166 
167   /* The partition representative.  */
168   size_t representative;
169 
170   /* The next stack variable in the partition, or EOC.  */
171   size_t next;
172 
173   /* The numbers of conflicting stack variables.  */
174   bitmap conflicts;
175 };
176 
177 #define EOC  ((size_t)-1)
178 
179 /* We have an array of such objects while deciding allocation.  */
180 static struct stack_var *stack_vars;
181 static size_t stack_vars_alloc;
182 static size_t stack_vars_num;
183 static struct pointer_map_t *decl_to_stack_part;
184 
185 /* Conflict bitmaps go on this obstack.  This allows us to destroy
186    all of them in one big sweep.  */
187 static bitmap_obstack stack_var_bitmap_obstack;
188 
189 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
190    is non-decreasing.  */
191 static size_t *stack_vars_sorted;
192 
193 /* The phase of the stack frame.  This is the known misalignment of
194    virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY.  That is,
195    (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0.  */
196 static int frame_phase;
197 
198 /* Used during expand_used_vars to remember if we saw any decls for
199    which we'd like to enable stack smashing protection.  */
200 static bool has_protected_decls;
201 
202 /* Used during expand_used_vars.  Remember if we say a character buffer
203    smaller than our cutoff threshold.  Used for -Wstack-protector.  */
204 static bool has_short_buffer;
205 
206 /* Compute the byte alignment to use for DECL.  Ignore alignment
207    we can't do with expected alignment of the stack boundary.  */
208 
209 static unsigned int
210 align_local_variable (tree decl)
211 {
212   unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
213   DECL_ALIGN (decl) = align;
214   return align / BITS_PER_UNIT;
215 }
216 
217 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
218    Return the frame offset.  */
219 
220 static HOST_WIDE_INT
221 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
222 {
223   HOST_WIDE_INT offset, new_frame_offset;
224 
225   new_frame_offset = frame_offset;
226   if (FRAME_GROWS_DOWNWARD)
227     {
228       new_frame_offset -= size + frame_phase;
229       new_frame_offset &= -align;
230       new_frame_offset += frame_phase;
231       offset = new_frame_offset;
232     }
233   else
234     {
235       new_frame_offset -= frame_phase;
236       new_frame_offset += align - 1;
237       new_frame_offset &= -align;
238       new_frame_offset += frame_phase;
239       offset = new_frame_offset;
240       new_frame_offset += size;
241     }
242   frame_offset = new_frame_offset;
243 
244   if (frame_offset_overflow (frame_offset, cfun->decl))
245     frame_offset = offset = 0;
246 
247   return offset;
248 }
249 
250 /* Accumulate DECL into STACK_VARS.  */
251 
252 static void
253 add_stack_var (tree decl)
254 {
255   struct stack_var *v;
256 
257   if (stack_vars_num >= stack_vars_alloc)
258     {
259       if (stack_vars_alloc)
260 	stack_vars_alloc = stack_vars_alloc * 3 / 2;
261       else
262 	stack_vars_alloc = 32;
263       stack_vars
264 	= XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
265     }
266   if (!decl_to_stack_part)
267     decl_to_stack_part = pointer_map_create ();
268 
269   v = &stack_vars[stack_vars_num];
270   * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
271 
272   v->decl = decl;
273   v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
274   /* Ensure that all variables have size, so that &a != &b for any two
275      variables that are simultaneously live.  */
276   if (v->size == 0)
277     v->size = 1;
278   v->alignb = align_local_variable (SSAVAR (decl));
279   /* An alignment of zero can mightily confuse us later.  */
280   gcc_assert (v->alignb != 0);
281 
282   /* All variables are initially in their own partition.  */
283   v->representative = stack_vars_num;
284   v->next = EOC;
285 
286   /* All variables initially conflict with no other.  */
287   v->conflicts = NULL;
288 
289   /* Ensure that this decl doesn't get put onto the list twice.  */
290   set_rtl (decl, pc_rtx);
291 
292   stack_vars_num++;
293 }
294 
295 /* Make the decls associated with luid's X and Y conflict.  */
296 
297 static void
298 add_stack_var_conflict (size_t x, size_t y)
299 {
300   struct stack_var *a = &stack_vars[x];
301   struct stack_var *b = &stack_vars[y];
302   if (!a->conflicts)
303     a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
304   if (!b->conflicts)
305     b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
306   bitmap_set_bit (a->conflicts, y);
307   bitmap_set_bit (b->conflicts, x);
308 }
309 
310 /* Check whether the decls associated with luid's X and Y conflict.  */
311 
312 static bool
313 stack_var_conflict_p (size_t x, size_t y)
314 {
315   struct stack_var *a = &stack_vars[x];
316   struct stack_var *b = &stack_vars[y];
317   if (x == y)
318     return false;
319   /* Partitions containing an SSA name result from gimple registers
320      with things like unsupported modes.  They are top-level and
321      hence conflict with everything else.  */
322   if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
323     return true;
324 
325   if (!a->conflicts || !b->conflicts)
326     return false;
327   return bitmap_bit_p (a->conflicts, y);
328 }
329 
330 /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
331    enter its partition number into bitmap DATA.  */
332 
333 static bool
334 visit_op (gimple, tree op, tree, void *data)
335 {
336   bitmap active = (bitmap)data;
337   op = get_base_address (op);
338   if (op
339       && DECL_P (op)
340       && DECL_RTL_IF_SET (op) == pc_rtx)
341     {
342       size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
343       if (v)
344 	bitmap_set_bit (active, *v);
345     }
346   return false;
347 }
348 
349 /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
350    record conflicts between it and all currently active other partitions
351    from bitmap DATA.  */
352 
353 static bool
354 visit_conflict (gimple, tree op, tree, void *data)
355 {
356   bitmap active = (bitmap)data;
357   op = get_base_address (op);
358   if (op
359       && DECL_P (op)
360       && DECL_RTL_IF_SET (op) == pc_rtx)
361     {
362       size_t *v =
363 	(size_t *) pointer_map_contains (decl_to_stack_part, op);
364       if (v && bitmap_set_bit (active, *v))
365 	{
366 	  size_t num = *v;
367 	  bitmap_iterator bi;
368 	  unsigned i;
369 	  gcc_assert (num < stack_vars_num);
370 	  EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
371 	    add_stack_var_conflict (num, i);
372 	}
373     }
374   return false;
375 }
376 
377 /* Helper routine for add_scope_conflicts, calculating the active partitions
378    at the end of BB, leaving the result in WORK.  We're called to generate
379    conflicts when FOR_CONFLICT is true, otherwise we're just tracking
380    liveness.  */
381 
382 static void
383 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
384 {
385   edge e;
386   edge_iterator ei;
387   gimple_stmt_iterator gsi;
388   walk_stmt_load_store_addr_fn visit;
389 
390   bitmap_clear (work);
391   FOR_EACH_EDGE (e, ei, bb->preds)
392     bitmap_ior_into (work, (bitmap)e->src->aux);
393 
394   visit = visit_op;
395 
396   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
397     {
398       gimple stmt = gsi_stmt (gsi);
399       walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
400     }
401   for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
402     {
403       gimple stmt = gsi_stmt (gsi);
404 
405       if (gimple_clobber_p (stmt))
406 	{
407 	  tree lhs = gimple_assign_lhs (stmt);
408 	  size_t *v;
409 	  /* Nested function lowering might introduce LHSs
410 	     that are COMPONENT_REFs.  */
411 	  if (TREE_CODE (lhs) != VAR_DECL)
412 	    continue;
413 	  if (DECL_RTL_IF_SET (lhs) == pc_rtx
414 	      && (v = (size_t *)
415 		  pointer_map_contains (decl_to_stack_part, lhs)))
416 	    bitmap_clear_bit (work, *v);
417 	}
418       else if (!is_gimple_debug (stmt))
419 	{
420 	  if (for_conflict
421 	      && visit == visit_op)
422 	    {
423 	      /* If this is the first real instruction in this BB we need
424 	         to add conflicts for everything live at this point now.
425 		 Unlike classical liveness for named objects we can't
426 		 rely on seeing a def/use of the names we're interested in.
427 		 There might merely be indirect loads/stores.  We'd not add any
428 		 conflicts for such partitions.  */
429 	      bitmap_iterator bi;
430 	      unsigned i;
431 	      EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
432 		{
433 		  struct stack_var *a = &stack_vars[i];
434 		  if (!a->conflicts)
435 		    a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
436 		  bitmap_ior_into (a->conflicts, work);
437 		}
438 	      visit = visit_conflict;
439 	    }
440 	  walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
441 	}
442     }
443 }
444 
445 /* Generate stack partition conflicts between all partitions that are
446    simultaneously live.  */
447 
448 static void
449 add_scope_conflicts (void)
450 {
451   basic_block bb;
452   bool changed;
453   bitmap work = BITMAP_ALLOC (NULL);
454   int *rpo;
455   int n_bbs;
456 
457   /* We approximate the live range of a stack variable by taking the first
458      mention of its name as starting point(s), and by the end-of-scope
459      death clobber added by gimplify as ending point(s) of the range.
460      This overapproximates in the case we for instance moved an address-taken
461      operation upward, without also moving a dereference to it upwards.
462      But it's conservatively correct as a variable never can hold values
463      before its name is mentioned at least once.
464 
465      We then do a mostly classical bitmap liveness algorithm.  */
466 
467   FOR_ALL_BB (bb)
468     bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
469 
470   rpo = XNEWVEC (int, last_basic_block);
471   n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
472 
473   changed = true;
474   while (changed)
475     {
476       int i;
477       changed = false;
478       for (i = 0; i < n_bbs; i++)
479 	{
480 	  bitmap active;
481 	  bb = BASIC_BLOCK (rpo[i]);
482 	  active = (bitmap)bb->aux;
483 	  add_scope_conflicts_1 (bb, work, false);
484 	  if (bitmap_ior_into (active, work))
485 	    changed = true;
486 	}
487     }
488 
489   FOR_EACH_BB (bb)
490     add_scope_conflicts_1 (bb, work, true);
491 
492   free (rpo);
493   BITMAP_FREE (work);
494   FOR_ALL_BB (bb)
495     BITMAP_FREE (bb->aux);
496 }
497 
498 /* A subroutine of partition_stack_vars.  A comparison function for qsort,
499    sorting an array of indices by the properties of the object.  */
500 
501 static int
502 stack_var_cmp (const void *a, const void *b)
503 {
504   size_t ia = *(const size_t *)a;
505   size_t ib = *(const size_t *)b;
506   unsigned int aligna = stack_vars[ia].alignb;
507   unsigned int alignb = stack_vars[ib].alignb;
508   HOST_WIDE_INT sizea = stack_vars[ia].size;
509   HOST_WIDE_INT sizeb = stack_vars[ib].size;
510   tree decla = stack_vars[ia].decl;
511   tree declb = stack_vars[ib].decl;
512   bool largea, largeb;
513   unsigned int uida, uidb;
514 
515   /* Primary compare on "large" alignment.  Large comes first.  */
516   largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
517   largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
518   if (largea != largeb)
519     return (int)largeb - (int)largea;
520 
521   /* Secondary compare on size, decreasing  */
522   if (sizea > sizeb)
523     return -1;
524   if (sizea < sizeb)
525     return 1;
526 
527   /* Tertiary compare on true alignment, decreasing.  */
528   if (aligna < alignb)
529     return -1;
530   if (aligna > alignb)
531     return 1;
532 
533   /* Final compare on ID for sort stability, increasing.
534      Two SSA names are compared by their version, SSA names come before
535      non-SSA names, and two normal decls are compared by their DECL_UID.  */
536   if (TREE_CODE (decla) == SSA_NAME)
537     {
538       if (TREE_CODE (declb) == SSA_NAME)
539 	uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
540       else
541 	return -1;
542     }
543   else if (TREE_CODE (declb) == SSA_NAME)
544     return 1;
545   else
546     uida = DECL_UID (decla), uidb = DECL_UID (declb);
547   if (uida < uidb)
548     return 1;
549   if (uida > uidb)
550     return -1;
551   return 0;
552 }
553 
554 
555 /* If the points-to solution *PI points to variables that are in a partition
556    together with other variables add all partition members to the pointed-to
557    variables bitmap.  */
558 
559 static void
560 add_partitioned_vars_to_ptset (struct pt_solution *pt,
561 			       struct pointer_map_t *decls_to_partitions,
562 			       struct pointer_set_t *visited, bitmap temp)
563 {
564   bitmap_iterator bi;
565   unsigned i;
566   bitmap *part;
567 
568   if (pt->anything
569       || pt->vars == NULL
570       /* The pointed-to vars bitmap is shared, it is enough to
571 	 visit it once.  */
572       || pointer_set_insert(visited, pt->vars))
573     return;
574 
575   bitmap_clear (temp);
576 
577   /* By using a temporary bitmap to store all members of the partitions
578      we have to add we make sure to visit each of the partitions only
579      once.  */
580   EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
581     if ((!temp
582 	 || !bitmap_bit_p (temp, i))
583 	&& (part = (bitmap *) pointer_map_contains (decls_to_partitions,
584 						    (void *)(size_t) i)))
585       bitmap_ior_into (temp, *part);
586   if (!bitmap_empty_p (temp))
587     bitmap_ior_into (pt->vars, temp);
588 }
589 
590 /* Update points-to sets based on partition info, so we can use them on RTL.
591    The bitmaps representing stack partitions will be saved until expand,
592    where partitioned decls used as bases in memory expressions will be
593    rewritten.  */
594 
595 static void
596 update_alias_info_with_stack_vars (void)
597 {
598   struct pointer_map_t *decls_to_partitions = NULL;
599   size_t i, j;
600   tree var = NULL_TREE;
601 
602   for (i = 0; i < stack_vars_num; i++)
603     {
604       bitmap part = NULL;
605       tree name;
606       struct ptr_info_def *pi;
607 
608       /* Not interested in partitions with single variable.  */
609       if (stack_vars[i].representative != i
610           || stack_vars[i].next == EOC)
611         continue;
612 
613       if (!decls_to_partitions)
614 	{
615 	  decls_to_partitions = pointer_map_create ();
616 	  cfun->gimple_df->decls_to_pointers = pointer_map_create ();
617 	}
618 
619       /* Create an SSA_NAME that points to the partition for use
620          as base during alias-oracle queries on RTL for bases that
621 	 have been partitioned.  */
622       if (var == NULL_TREE)
623 	var = create_tmp_var (ptr_type_node, NULL);
624       name = make_ssa_name (var, NULL);
625 
626       /* Create bitmaps representing partitions.  They will be used for
627          points-to sets later, so use GGC alloc.  */
628       part = BITMAP_GGC_ALLOC ();
629       for (j = i; j != EOC; j = stack_vars[j].next)
630 	{
631 	  tree decl = stack_vars[j].decl;
632 	  unsigned int uid = DECL_PT_UID (decl);
633 	  bitmap_set_bit (part, uid);
634 	  *((bitmap *) pointer_map_insert (decls_to_partitions,
635 					   (void *)(size_t) uid)) = part;
636 	  *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
637 					 decl)) = name;
638 	  if (TREE_ADDRESSABLE (decl))
639 	    TREE_ADDRESSABLE (name) = 1;
640 	}
641 
642       /* Make the SSA name point to all partition members.  */
643       pi = get_ptr_info (name);
644       pt_solution_set (&pi->pt, part, false);
645     }
646 
647   /* Make all points-to sets that contain one member of a partition
648      contain all members of the partition.  */
649   if (decls_to_partitions)
650     {
651       unsigned i;
652       struct pointer_set_t *visited = pointer_set_create ();
653       bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
654 
655       for (i = 1; i < num_ssa_names; i++)
656 	{
657 	  tree name = ssa_name (i);
658 	  struct ptr_info_def *pi;
659 
660 	  if (name
661 	      && POINTER_TYPE_P (TREE_TYPE (name))
662 	      && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
663 	    add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
664 					   visited, temp);
665 	}
666 
667       add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
668 				     decls_to_partitions, visited, temp);
669 
670       pointer_set_destroy (visited);
671       pointer_map_destroy (decls_to_partitions);
672       BITMAP_FREE (temp);
673     }
674 }
675 
676 /* A subroutine of partition_stack_vars.  The UNION portion of a UNION/FIND
677    partitioning algorithm.  Partitions A and B are known to be non-conflicting.
678    Merge them into a single partition A.  */
679 
680 static void
681 union_stack_vars (size_t a, size_t b)
682 {
683   struct stack_var *vb = &stack_vars[b];
684   bitmap_iterator bi;
685   unsigned u;
686 
687   gcc_assert (stack_vars[b].next == EOC);
688    /* Add B to A's partition.  */
689   stack_vars[b].next = stack_vars[a].next;
690   stack_vars[b].representative = a;
691   stack_vars[a].next = b;
692 
693   /* Update the required alignment of partition A to account for B.  */
694   if (stack_vars[a].alignb < stack_vars[b].alignb)
695     stack_vars[a].alignb = stack_vars[b].alignb;
696 
697   /* Update the interference graph and merge the conflicts.  */
698   if (vb->conflicts)
699     {
700       EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
701 	add_stack_var_conflict (a, stack_vars[u].representative);
702       BITMAP_FREE (vb->conflicts);
703     }
704 }
705 
706 /* A subroutine of expand_used_vars.  Binpack the variables into
707    partitions constrained by the interference graph.  The overall
708    algorithm used is as follows:
709 
710 	Sort the objects by size in descending order.
711 	For each object A {
712 	  S = size(A)
713 	  O = 0
714 	  loop {
715 	    Look for the largest non-conflicting object B with size <= S.
716 	    UNION (A, B)
717 	  }
718 	}
719 */
720 
721 static void
722 partition_stack_vars (void)
723 {
724   size_t si, sj, n = stack_vars_num;
725 
726   stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
727   for (si = 0; si < n; ++si)
728     stack_vars_sorted[si] = si;
729 
730   if (n == 1)
731     return;
732 
733   qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
734 
735   for (si = 0; si < n; ++si)
736     {
737       size_t i = stack_vars_sorted[si];
738       unsigned int ialign = stack_vars[i].alignb;
739       HOST_WIDE_INT isize = stack_vars[i].size;
740 
741       /* Ignore objects that aren't partition representatives. If we
742          see a var that is not a partition representative, it must
743          have been merged earlier.  */
744       if (stack_vars[i].representative != i)
745         continue;
746 
747       for (sj = si + 1; sj < n; ++sj)
748 	{
749 	  size_t j = stack_vars_sorted[sj];
750 	  unsigned int jalign = stack_vars[j].alignb;
751 	  HOST_WIDE_INT jsize = stack_vars[j].size;
752 
753 	  /* Ignore objects that aren't partition representatives.  */
754 	  if (stack_vars[j].representative != j)
755 	    continue;
756 
757 	  /* Do not mix objects of "small" (supported) alignment
758 	     and "large" (unsupported) alignment.  */
759 	  if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
760 	      != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
761 	    break;
762 
763 	  /* For Address Sanitizer do not mix objects with different
764 	     sizes, as the shorter vars wouldn't be adequately protected.
765 	     Don't do that for "large" (unsupported) alignment objects,
766 	     those aren't protected anyway.  */
767 	  if (flag_asan && isize != jsize
768 	      && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
769 	    break;
770 
771 	  /* Ignore conflicting objects.  */
772 	  if (stack_var_conflict_p (i, j))
773 	    continue;
774 
775 	  /* UNION the objects, placing J at OFFSET.  */
776 	  union_stack_vars (i, j);
777 	}
778     }
779 
780   update_alias_info_with_stack_vars ();
781 }
782 
783 /* A debugging aid for expand_used_vars.  Dump the generated partitions.  */
784 
785 static void
786 dump_stack_var_partition (void)
787 {
788   size_t si, i, j, n = stack_vars_num;
789 
790   for (si = 0; si < n; ++si)
791     {
792       i = stack_vars_sorted[si];
793 
794       /* Skip variables that aren't partition representatives, for now.  */
795       if (stack_vars[i].representative != i)
796 	continue;
797 
798       fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
799 	       " align %u\n", (unsigned long) i, stack_vars[i].size,
800 	       stack_vars[i].alignb);
801 
802       for (j = i; j != EOC; j = stack_vars[j].next)
803 	{
804 	  fputc ('\t', dump_file);
805 	  print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
806 	}
807       fputc ('\n', dump_file);
808     }
809 }
810 
811 /* Assign rtl to DECL at BASE + OFFSET.  */
812 
813 static void
814 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
815 			 HOST_WIDE_INT offset)
816 {
817   unsigned align;
818   rtx x;
819 
820   /* If this fails, we've overflowed the stack frame.  Error nicely?  */
821   gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
822 
823   x = plus_constant (Pmode, base, offset);
824   x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
825 
826   if (TREE_CODE (decl) != SSA_NAME)
827     {
828       /* Set alignment we actually gave this decl if it isn't an SSA name.
829          If it is we generate stack slots only accidentally so it isn't as
830 	 important, we'll simply use the alignment that is already set.  */
831       if (base == virtual_stack_vars_rtx)
832 	offset -= frame_phase;
833       align = offset & -offset;
834       align *= BITS_PER_UNIT;
835       if (align == 0 || align > base_align)
836 	align = base_align;
837 
838       /* One would think that we could assert that we're not decreasing
839 	 alignment here, but (at least) the i386 port does exactly this
840 	 via the MINIMUM_ALIGNMENT hook.  */
841 
842       DECL_ALIGN (decl) = align;
843       DECL_USER_ALIGN (decl) = 0;
844     }
845 
846   set_mem_attributes (x, SSAVAR (decl), true);
847   set_rtl (decl, x);
848 }
849 
850 struct stack_vars_data
851 {
852   /* Vector of offset pairs, always end of some padding followed
853      by start of the padding that needs Address Sanitizer protection.
854      The vector is in reversed, highest offset pairs come first.  */
855   vec<HOST_WIDE_INT> asan_vec;
856 
857   /* Vector of partition representative decls in between the paddings.  */
858   vec<tree> asan_decl_vec;
859 };
860 
861 /* A subroutine of expand_used_vars.  Give each partition representative
862    a unique location within the stack frame.  Update each partition member
863    with that location.  */
864 
865 static void
866 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
867 {
868   size_t si, i, j, n = stack_vars_num;
869   HOST_WIDE_INT large_size = 0, large_alloc = 0;
870   rtx large_base = NULL;
871   unsigned large_align = 0;
872   tree decl;
873 
874   /* Determine if there are any variables requiring "large" alignment.
875      Since these are dynamically allocated, we only process these if
876      no predicate involved.  */
877   large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
878   if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
879     {
880       /* Find the total size of these variables.  */
881       for (si = 0; si < n; ++si)
882 	{
883 	  unsigned alignb;
884 
885 	  i = stack_vars_sorted[si];
886 	  alignb = stack_vars[i].alignb;
887 
888 	  /* Stop when we get to the first decl with "small" alignment.  */
889 	  if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
890 	    break;
891 
892 	  /* Skip variables that aren't partition representatives.  */
893 	  if (stack_vars[i].representative != i)
894 	    continue;
895 
896 	  /* Skip variables that have already had rtl assigned.  See also
897 	     add_stack_var where we perpetrate this pc_rtx hack.  */
898 	  decl = stack_vars[i].decl;
899 	  if ((TREE_CODE (decl) == SSA_NAME
900 	      ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
901 	      : DECL_RTL (decl)) != pc_rtx)
902 	    continue;
903 
904 	  large_size += alignb - 1;
905 	  large_size &= -(HOST_WIDE_INT)alignb;
906 	  large_size += stack_vars[i].size;
907 	}
908 
909       /* If there were any, allocate space.  */
910       if (large_size > 0)
911 	large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
912 						   large_align, true);
913     }
914 
915   for (si = 0; si < n; ++si)
916     {
917       rtx base;
918       unsigned base_align, alignb;
919       HOST_WIDE_INT offset;
920 
921       i = stack_vars_sorted[si];
922 
923       /* Skip variables that aren't partition representatives, for now.  */
924       if (stack_vars[i].representative != i)
925 	continue;
926 
927       /* Skip variables that have already had rtl assigned.  See also
928 	 add_stack_var where we perpetrate this pc_rtx hack.  */
929       decl = stack_vars[i].decl;
930       if ((TREE_CODE (decl) == SSA_NAME
931 	   ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
932 	   : DECL_RTL (decl)) != pc_rtx)
933 	continue;
934 
935       /* Check the predicate to see whether this variable should be
936 	 allocated in this pass.  */
937       if (pred && !pred (i))
938 	continue;
939 
940       alignb = stack_vars[i].alignb;
941       if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
942 	{
943 	  if (flag_asan && pred)
944 	    {
945 	      HOST_WIDE_INT prev_offset = frame_offset;
946 	      tree repr_decl = NULL_TREE;
947 
948 	      offset
949 		= alloc_stack_frame_space (stack_vars[i].size
950 					   + ASAN_RED_ZONE_SIZE,
951 					   MAX (alignb, ASAN_RED_ZONE_SIZE));
952 	      data->asan_vec.safe_push (prev_offset);
953 	      data->asan_vec.safe_push (offset + stack_vars[i].size);
954 	      /* Find best representative of the partition.
955 		 Prefer those with DECL_NAME, even better
956 		 satisfying asan_protect_stack_decl predicate.  */
957 	      for (j = i; j != EOC; j = stack_vars[j].next)
958 		if (asan_protect_stack_decl (stack_vars[j].decl)
959 		    && DECL_NAME (stack_vars[j].decl))
960 		  {
961 		    repr_decl = stack_vars[j].decl;
962 		    break;
963 		  }
964 		else if (repr_decl == NULL_TREE
965 			 && DECL_P (stack_vars[j].decl)
966 			 && DECL_NAME (stack_vars[j].decl))
967 		  repr_decl = stack_vars[j].decl;
968 	      if (repr_decl == NULL_TREE)
969 		repr_decl = stack_vars[i].decl;
970 	      data->asan_decl_vec.safe_push (repr_decl);
971 	    }
972 	  else
973 	    offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
974 	  base = virtual_stack_vars_rtx;
975 	  base_align = crtl->max_used_stack_slot_alignment;
976 	}
977       else
978 	{
979 	  /* Large alignment is only processed in the last pass.  */
980 	  if (pred)
981 	    continue;
982 	  gcc_assert (large_base != NULL);
983 
984 	  large_alloc += alignb - 1;
985 	  large_alloc &= -(HOST_WIDE_INT)alignb;
986 	  offset = large_alloc;
987 	  large_alloc += stack_vars[i].size;
988 
989 	  base = large_base;
990 	  base_align = large_align;
991 	}
992 
993       /* Create rtl for each variable based on their location within the
994 	 partition.  */
995       for (j = i; j != EOC; j = stack_vars[j].next)
996 	{
997 	  expand_one_stack_var_at (stack_vars[j].decl,
998 				   base, base_align,
999 				   offset);
1000 	}
1001     }
1002 
1003   gcc_assert (large_alloc == large_size);
1004 }
1005 
1006 /* Take into account all sizes of partitions and reset DECL_RTLs.  */
1007 static HOST_WIDE_INT
1008 account_stack_vars (void)
1009 {
1010   size_t si, j, i, n = stack_vars_num;
1011   HOST_WIDE_INT size = 0;
1012 
1013   for (si = 0; si < n; ++si)
1014     {
1015       i = stack_vars_sorted[si];
1016 
1017       /* Skip variables that aren't partition representatives, for now.  */
1018       if (stack_vars[i].representative != i)
1019 	continue;
1020 
1021       size += stack_vars[i].size;
1022       for (j = i; j != EOC; j = stack_vars[j].next)
1023 	set_rtl (stack_vars[j].decl, NULL);
1024     }
1025   return size;
1026 }
1027 
1028 /* A subroutine of expand_one_var.  Called to immediately assign rtl
1029    to a variable to be allocated in the stack frame.  */
1030 
1031 static void
1032 expand_one_stack_var (tree var)
1033 {
1034   HOST_WIDE_INT size, offset;
1035   unsigned byte_align;
1036 
1037   size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
1038   byte_align = align_local_variable (SSAVAR (var));
1039 
1040   /* We handle highly aligned variables in expand_stack_vars.  */
1041   gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1042 
1043   offset = alloc_stack_frame_space (size, byte_align);
1044 
1045   expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1046 			   crtl->max_used_stack_slot_alignment, offset);
1047 }
1048 
1049 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1050    that will reside in a hard register.  */
1051 
1052 static void
1053 expand_one_hard_reg_var (tree var)
1054 {
1055   rest_of_decl_compilation (var, 0, 0);
1056 }
1057 
1058 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1059    that will reside in a pseudo register.  */
1060 
1061 static void
1062 expand_one_register_var (tree var)
1063 {
1064   tree decl = SSAVAR (var);
1065   tree type = TREE_TYPE (decl);
1066   enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1067   rtx x = gen_reg_rtx (reg_mode);
1068 
1069   set_rtl (var, x);
1070 
1071   /* Note if the object is a user variable.  */
1072   if (!DECL_ARTIFICIAL (decl))
1073     mark_user_reg (x);
1074 
1075   if (POINTER_TYPE_P (type))
1076     mark_reg_pointer (x, get_pointer_alignment (var));
1077 }
1078 
1079 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL that
1080    has some associated error, e.g. its type is error-mark.  We just need
1081    to pick something that won't crash the rest of the compiler.  */
1082 
1083 static void
1084 expand_one_error_var (tree var)
1085 {
1086   enum machine_mode mode = DECL_MODE (var);
1087   rtx x;
1088 
1089   if (mode == BLKmode)
1090     x = gen_rtx_MEM (BLKmode, const0_rtx);
1091   else if (mode == VOIDmode)
1092     x = const0_rtx;
1093   else
1094     x = gen_reg_rtx (mode);
1095 
1096   SET_DECL_RTL (var, x);
1097 }
1098 
1099 /* A subroutine of expand_one_var.  VAR is a variable that will be
1100    allocated to the local stack frame.  Return true if we wish to
1101    add VAR to STACK_VARS so that it will be coalesced with other
1102    variables.  Return false to allocate VAR immediately.
1103 
1104    This function is used to reduce the number of variables considered
1105    for coalescing, which reduces the size of the quadratic problem.  */
1106 
1107 static bool
1108 defer_stack_allocation (tree var, bool toplevel)
1109 {
1110   /* If stack protection is enabled, *all* stack variables must be deferred,
1111      so that we can re-order the strings to the top of the frame.
1112      Similarly for Address Sanitizer.  */
1113   if (flag_stack_protect || flag_asan)
1114     return true;
1115 
1116   /* We handle "large" alignment via dynamic allocation.  We want to handle
1117      this extra complication in only one place, so defer them.  */
1118   if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1119     return true;
1120 
1121   /* Variables in the outermost scope automatically conflict with
1122      every other variable.  The only reason to want to defer them
1123      at all is that, after sorting, we can more efficiently pack
1124      small variables in the stack frame.  Continue to defer at -O2.  */
1125   if (toplevel && optimize < 2)
1126     return false;
1127 
1128   /* Without optimization, *most* variables are allocated from the
1129      stack, which makes the quadratic problem large exactly when we
1130      want compilation to proceed as quickly as possible.  On the
1131      other hand, we don't want the function's stack frame size to
1132      get completely out of hand.  So we avoid adding scalars and
1133      "small" aggregates to the list at all.  */
1134   if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1135     return false;
1136 
1137   return true;
1138 }
1139 
1140 /* A subroutine of expand_used_vars.  Expand one variable according to
1141    its flavor.  Variables to be placed on the stack are not actually
1142    expanded yet, merely recorded.
1143    When REALLY_EXPAND is false, only add stack values to be allocated.
1144    Return stack usage this variable is supposed to take.
1145 */
1146 
1147 static HOST_WIDE_INT
1148 expand_one_var (tree var, bool toplevel, bool really_expand)
1149 {
1150   unsigned int align = BITS_PER_UNIT;
1151   tree origvar = var;
1152 
1153   var = SSAVAR (var);
1154 
1155   if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1156     {
1157       /* Because we don't know if VAR will be in register or on stack,
1158 	 we conservatively assume it will be on stack even if VAR is
1159 	 eventually put into register after RA pass.  For non-automatic
1160 	 variables, which won't be on stack, we collect alignment of
1161 	 type and ignore user specified alignment.  */
1162       if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1163 	align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1164 				   TYPE_MODE (TREE_TYPE (var)),
1165 				   TYPE_ALIGN (TREE_TYPE (var)));
1166       else if (DECL_HAS_VALUE_EXPR_P (var)
1167 	       || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1168 	/* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1169 	   or variables which were assigned a stack slot already by
1170 	   expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1171 	   changed from the offset chosen to it.  */
1172 	align = crtl->stack_alignment_estimated;
1173       else
1174 	align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1175 
1176       /* If the variable alignment is very large we'll dynamicaly allocate
1177 	 it, which means that in-frame portion is just a pointer.  */
1178       if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1179 	align = POINTER_SIZE;
1180     }
1181 
1182   if (SUPPORTS_STACK_ALIGNMENT
1183       && crtl->stack_alignment_estimated < align)
1184     {
1185       /* stack_alignment_estimated shouldn't change after stack
1186          realign decision made */
1187       gcc_assert(!crtl->stack_realign_processed);
1188       crtl->stack_alignment_estimated = align;
1189     }
1190 
1191   /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1192      So here we only make sure stack_alignment_needed >= align.  */
1193   if (crtl->stack_alignment_needed < align)
1194     crtl->stack_alignment_needed = align;
1195   if (crtl->max_used_stack_slot_alignment < align)
1196     crtl->max_used_stack_slot_alignment = align;
1197 
1198   if (TREE_CODE (origvar) == SSA_NAME)
1199     {
1200       gcc_assert (TREE_CODE (var) != VAR_DECL
1201 		  || (!DECL_EXTERNAL (var)
1202 		      && !DECL_HAS_VALUE_EXPR_P (var)
1203 		      && !TREE_STATIC (var)
1204 		      && TREE_TYPE (var) != error_mark_node
1205 		      && !DECL_HARD_REGISTER (var)
1206 		      && really_expand));
1207     }
1208   if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1209     ;
1210   else if (DECL_EXTERNAL (var))
1211     ;
1212   else if (DECL_HAS_VALUE_EXPR_P (var))
1213     ;
1214   else if (TREE_STATIC (var))
1215     ;
1216   else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1217     ;
1218   else if (TREE_TYPE (var) == error_mark_node)
1219     {
1220       if (really_expand)
1221         expand_one_error_var (var);
1222     }
1223   else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1224     {
1225       if (really_expand)
1226         expand_one_hard_reg_var (var);
1227     }
1228   else if (use_register_for_decl (var))
1229     {
1230       if (really_expand)
1231         expand_one_register_var (origvar);
1232     }
1233   else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1234     {
1235       /* Reject variables which cover more than half of the address-space.  */
1236       if (really_expand)
1237 	{
1238 	  error ("size of variable %q+D is too large", var);
1239 	  expand_one_error_var (var);
1240 	}
1241     }
1242   else if (defer_stack_allocation (var, toplevel))
1243     add_stack_var (origvar);
1244   else
1245     {
1246       if (really_expand)
1247         expand_one_stack_var (origvar);
1248       return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1249     }
1250   return 0;
1251 }
1252 
1253 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1254    expanding variables.  Those variables that can be put into registers
1255    are allocated pseudos; those that can't are put on the stack.
1256 
1257    TOPLEVEL is true if this is the outermost BLOCK.  */
1258 
1259 static void
1260 expand_used_vars_for_block (tree block, bool toplevel)
1261 {
1262   tree t;
1263 
1264   /* Expand all variables at this level.  */
1265   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1266     if (TREE_USED (t)
1267         && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1268 	    || !DECL_NONSHAREABLE (t)))
1269       expand_one_var (t, toplevel, true);
1270 
1271   /* Expand all variables at containing levels.  */
1272   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1273     expand_used_vars_for_block (t, false);
1274 }
1275 
1276 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1277    and clear TREE_USED on all local variables.  */
1278 
1279 static void
1280 clear_tree_used (tree block)
1281 {
1282   tree t;
1283 
1284   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1285     /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1286     if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1287 	|| !DECL_NONSHAREABLE (t))
1288       TREE_USED (t) = 0;
1289 
1290   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1291     clear_tree_used (t);
1292 }
1293 
1294 /* Examine TYPE and determine a bit mask of the following features.  */
1295 
1296 #define SPCT_HAS_LARGE_CHAR_ARRAY	1
1297 #define SPCT_HAS_SMALL_CHAR_ARRAY	2
1298 #define SPCT_HAS_ARRAY			4
1299 #define SPCT_HAS_AGGREGATE		8
1300 
1301 static unsigned int
1302 stack_protect_classify_type (tree type)
1303 {
1304   unsigned int ret = 0;
1305   tree t;
1306 
1307   switch (TREE_CODE (type))
1308     {
1309     case ARRAY_TYPE:
1310       t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1311       if (t == char_type_node
1312 	  || t == signed_char_type_node
1313 	  || t == unsigned_char_type_node)
1314 	{
1315 	  unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1316 	  unsigned HOST_WIDE_INT len;
1317 
1318 	  if (!TYPE_SIZE_UNIT (type)
1319 	      || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1320 	    len = max;
1321 	  else
1322 	    len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1323 
1324 	  if (len == 0)
1325 	    ret = SPCT_HAS_ARRAY;
1326 	  else if (len < max)
1327 	    ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1328 	  else
1329 	    ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1330 	}
1331       else
1332 	ret = SPCT_HAS_ARRAY;
1333       break;
1334 
1335     case UNION_TYPE:
1336     case QUAL_UNION_TYPE:
1337     case RECORD_TYPE:
1338       ret = SPCT_HAS_AGGREGATE;
1339       for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1340 	if (TREE_CODE (t) == FIELD_DECL)
1341 	  ret |= stack_protect_classify_type (TREE_TYPE (t));
1342       break;
1343 
1344     default:
1345       break;
1346     }
1347 
1348   return ret;
1349 }
1350 
1351 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1352    part of the local stack frame.  Remember if we ever return nonzero for
1353    any variable in this function.  The return value is the phase number in
1354    which the variable should be allocated.  */
1355 
1356 static int
1357 stack_protect_decl_phase (tree decl)
1358 {
1359   unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1360   int ret = 0;
1361 
1362   if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1363     has_short_buffer = true;
1364 
1365   if (flag_stack_protect == 2)
1366     {
1367       if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1368 	  && !(bits & SPCT_HAS_AGGREGATE))
1369 	ret = 1;
1370       else if (bits & SPCT_HAS_ARRAY)
1371 	ret = 2;
1372     }
1373   else
1374     ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1375 
1376   if (ret)
1377     has_protected_decls = true;
1378 
1379   return ret;
1380 }
1381 
1382 /* Two helper routines that check for phase 1 and phase 2.  These are used
1383    as callbacks for expand_stack_vars.  */
1384 
1385 static bool
1386 stack_protect_decl_phase_1 (size_t i)
1387 {
1388   return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1389 }
1390 
1391 static bool
1392 stack_protect_decl_phase_2 (size_t i)
1393 {
1394   return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1395 }
1396 
1397 /* And helper function that checks for asan phase (with stack protector
1398    it is phase 3).  This is used as callback for expand_stack_vars.
1399    Returns true if any of the vars in the partition need to be protected.  */
1400 
1401 static bool
1402 asan_decl_phase_3 (size_t i)
1403 {
1404   while (i != EOC)
1405     {
1406       if (asan_protect_stack_decl (stack_vars[i].decl))
1407 	return true;
1408       i = stack_vars[i].next;
1409     }
1410   return false;
1411 }
1412 
1413 /* Ensure that variables in different stack protection phases conflict
1414    so that they are not merged and share the same stack slot.  */
1415 
1416 static void
1417 add_stack_protection_conflicts (void)
1418 {
1419   size_t i, j, n = stack_vars_num;
1420   unsigned char *phase;
1421 
1422   phase = XNEWVEC (unsigned char, n);
1423   for (i = 0; i < n; ++i)
1424     phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1425 
1426   for (i = 0; i < n; ++i)
1427     {
1428       unsigned char ph_i = phase[i];
1429       for (j = i + 1; j < n; ++j)
1430 	if (ph_i != phase[j])
1431 	  add_stack_var_conflict (i, j);
1432     }
1433 
1434   XDELETEVEC (phase);
1435 }
1436 
1437 /* Create a decl for the guard at the top of the stack frame.  */
1438 
1439 static void
1440 create_stack_guard (void)
1441 {
1442   tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1443 			   VAR_DECL, NULL, ptr_type_node);
1444   TREE_THIS_VOLATILE (guard) = 1;
1445   TREE_USED (guard) = 1;
1446   expand_one_stack_var (guard);
1447   crtl->stack_protect_guard = guard;
1448 }
1449 
1450 /* Prepare for expanding variables.  */
1451 static void
1452 init_vars_expansion (void)
1453 {
1454   /* Conflict bitmaps, and a few related temporary bitmaps, go here.  */
1455   bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1456 
1457   /* A map from decl to stack partition.  */
1458   decl_to_stack_part = pointer_map_create ();
1459 
1460   /* Initialize local stack smashing state.  */
1461   has_protected_decls = false;
1462   has_short_buffer = false;
1463 }
1464 
1465 /* Free up stack variable graph data.  */
1466 static void
1467 fini_vars_expansion (void)
1468 {
1469   bitmap_obstack_release (&stack_var_bitmap_obstack);
1470   if (stack_vars)
1471     XDELETEVEC (stack_vars);
1472   if (stack_vars_sorted)
1473     XDELETEVEC (stack_vars_sorted);
1474   stack_vars = NULL;
1475   stack_vars_sorted = NULL;
1476   stack_vars_alloc = stack_vars_num = 0;
1477   pointer_map_destroy (decl_to_stack_part);
1478   decl_to_stack_part = NULL;
1479 }
1480 
1481 /* Make a fair guess for the size of the stack frame of the function
1482    in NODE.  This doesn't have to be exact, the result is only used in
1483    the inline heuristics.  So we don't want to run the full stack var
1484    packing algorithm (which is quadratic in the number of stack vars).
1485    Instead, we calculate the total size of all stack vars.  This turns
1486    out to be a pretty fair estimate -- packing of stack vars doesn't
1487    happen very often.  */
1488 
1489 HOST_WIDE_INT
1490 estimated_stack_frame_size (struct cgraph_node *node)
1491 {
1492   HOST_WIDE_INT size = 0;
1493   size_t i;
1494   tree var;
1495   struct function *fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
1496 
1497   push_cfun (fn);
1498 
1499   init_vars_expansion ();
1500 
1501   FOR_EACH_LOCAL_DECL (fn, i, var)
1502     if (auto_var_in_fn_p (var, fn->decl))
1503       size += expand_one_var (var, true, false);
1504 
1505   if (stack_vars_num > 0)
1506     {
1507       /* Fake sorting the stack vars for account_stack_vars ().  */
1508       stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1509       for (i = 0; i < stack_vars_num; ++i)
1510 	stack_vars_sorted[i] = i;
1511       size += account_stack_vars ();
1512     }
1513 
1514   fini_vars_expansion ();
1515   pop_cfun ();
1516   return size;
1517 }
1518 
1519 /* Expand all variables used in the function.  */
1520 
1521 static rtx
1522 expand_used_vars (void)
1523 {
1524   tree var, outer_block = DECL_INITIAL (current_function_decl);
1525   vec<tree> maybe_local_decls = vNULL;
1526   rtx var_end_seq = NULL_RTX;
1527   struct pointer_map_t *ssa_name_decls;
1528   unsigned i;
1529   unsigned len;
1530 
1531   /* Compute the phase of the stack frame for this function.  */
1532   {
1533     int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1534     int off = STARTING_FRAME_OFFSET % align;
1535     frame_phase = off ? align - off : 0;
1536   }
1537 
1538   /* Set TREE_USED on all variables in the local_decls.  */
1539   FOR_EACH_LOCAL_DECL (cfun, i, var)
1540     TREE_USED (var) = 1;
1541   /* Clear TREE_USED on all variables associated with a block scope.  */
1542   clear_tree_used (DECL_INITIAL (current_function_decl));
1543 
1544   init_vars_expansion ();
1545 
1546   ssa_name_decls = pointer_map_create ();
1547   for (i = 0; i < SA.map->num_partitions; i++)
1548     {
1549       tree var = partition_to_var (SA.map, i);
1550 
1551       gcc_assert (!virtual_operand_p (var));
1552 
1553       /* Assign decls to each SSA name partition, share decls for partitions
1554          we could have coalesced (those with the same type).  */
1555       if (SSA_NAME_VAR (var) == NULL_TREE)
1556 	{
1557 	  void **slot = pointer_map_insert (ssa_name_decls, TREE_TYPE (var));
1558 	  if (!*slot)
1559 	    *slot = (void *) create_tmp_reg (TREE_TYPE (var), NULL);
1560 	  replace_ssa_name_symbol (var, (tree) *slot);
1561 	}
1562 
1563       if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1564 	expand_one_var (var, true, true);
1565       else
1566 	{
1567 	  /* This is a PARM_DECL or RESULT_DECL.  For those partitions that
1568 	     contain the default def (representing the parm or result itself)
1569 	     we don't do anything here.  But those which don't contain the
1570 	     default def (representing a temporary based on the parm/result)
1571 	     we need to allocate space just like for normal VAR_DECLs.  */
1572 	  if (!bitmap_bit_p (SA.partition_has_default_def, i))
1573 	    {
1574 	      expand_one_var (var, true, true);
1575 	      gcc_assert (SA.partition_to_pseudo[i]);
1576 	    }
1577 	}
1578     }
1579   pointer_map_destroy (ssa_name_decls);
1580 
1581   /* At this point all variables on the local_decls with TREE_USED
1582      set are not associated with any block scope.  Lay them out.  */
1583 
1584   len = vec_safe_length (cfun->local_decls);
1585   FOR_EACH_LOCAL_DECL (cfun, i, var)
1586     {
1587       bool expand_now = false;
1588 
1589       /* Expanded above already.  */
1590       if (is_gimple_reg (var))
1591 	{
1592 	  TREE_USED (var) = 0;
1593 	  goto next;
1594 	}
1595       /* We didn't set a block for static or extern because it's hard
1596 	 to tell the difference between a global variable (re)declared
1597 	 in a local scope, and one that's really declared there to
1598 	 begin with.  And it doesn't really matter much, since we're
1599 	 not giving them stack space.  Expand them now.  */
1600       else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1601 	expand_now = true;
1602 
1603       /* If the variable is not associated with any block, then it
1604 	 was created by the optimizers, and could be live anywhere
1605 	 in the function.  */
1606       else if (TREE_USED (var))
1607 	expand_now = true;
1608 
1609       /* Finally, mark all variables on the list as used.  We'll use
1610 	 this in a moment when we expand those associated with scopes.  */
1611       TREE_USED (var) = 1;
1612 
1613       if (expand_now)
1614 	expand_one_var (var, true, true);
1615 
1616     next:
1617       if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1618 	{
1619 	  rtx rtl = DECL_RTL_IF_SET (var);
1620 
1621 	  /* Keep artificial non-ignored vars in cfun->local_decls
1622 	     chain until instantiate_decls.  */
1623 	  if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1624 	    add_local_decl (cfun, var);
1625 	  else if (rtl == NULL_RTX)
1626 	    /* If rtl isn't set yet, which can happen e.g. with
1627 	       -fstack-protector, retry before returning from this
1628 	       function.  */
1629 	    maybe_local_decls.safe_push (var);
1630 	}
1631     }
1632 
1633   /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1634 
1635      +-----------------+-----------------+
1636      | ...processed... | ...duplicates...|
1637      +-----------------+-----------------+
1638                        ^
1639 		       +-- LEN points here.
1640 
1641      We just want the duplicates, as those are the artificial
1642      non-ignored vars that we want to keep until instantiate_decls.
1643      Move them down and truncate the array.  */
1644   if (!vec_safe_is_empty (cfun->local_decls))
1645     cfun->local_decls->block_remove (0, len);
1646 
1647   /* At this point, all variables within the block tree with TREE_USED
1648      set are actually used by the optimized function.  Lay them out.  */
1649   expand_used_vars_for_block (outer_block, true);
1650 
1651   if (stack_vars_num > 0)
1652     {
1653       add_scope_conflicts ();
1654 
1655       /* If stack protection is enabled, we don't share space between
1656 	 vulnerable data and non-vulnerable data.  */
1657       if (flag_stack_protect)
1658 	add_stack_protection_conflicts ();
1659 
1660       /* Now that we have collected all stack variables, and have computed a
1661 	 minimal interference graph, attempt to save some stack space.  */
1662       partition_stack_vars ();
1663       if (dump_file)
1664 	dump_stack_var_partition ();
1665     }
1666 
1667   /* There are several conditions under which we should create a
1668      stack guard: protect-all, alloca used, protected decls present.  */
1669   if (flag_stack_protect == 2
1670       || (flag_stack_protect
1671 	  && (cfun->calls_alloca || has_protected_decls)))
1672     create_stack_guard ();
1673 
1674   /* Assign rtl to each variable based on these partitions.  */
1675   if (stack_vars_num > 0)
1676     {
1677       struct stack_vars_data data;
1678 
1679       data.asan_vec = vNULL;
1680       data.asan_decl_vec = vNULL;
1681 
1682       /* Reorder decls to be protected by iterating over the variables
1683 	 array multiple times, and allocating out of each phase in turn.  */
1684       /* ??? We could probably integrate this into the qsort we did
1685 	 earlier, such that we naturally see these variables first,
1686 	 and thus naturally allocate things in the right order.  */
1687       if (has_protected_decls)
1688 	{
1689 	  /* Phase 1 contains only character arrays.  */
1690 	  expand_stack_vars (stack_protect_decl_phase_1, &data);
1691 
1692 	  /* Phase 2 contains other kinds of arrays.  */
1693 	  if (flag_stack_protect == 2)
1694 	    expand_stack_vars (stack_protect_decl_phase_2, &data);
1695 	}
1696 
1697       if (flag_asan)
1698 	/* Phase 3, any partitions that need asan protection
1699 	   in addition to phase 1 and 2.  */
1700 	expand_stack_vars (asan_decl_phase_3, &data);
1701 
1702       if (!data.asan_vec.is_empty ())
1703 	{
1704 	  HOST_WIDE_INT prev_offset = frame_offset;
1705 	  HOST_WIDE_INT offset
1706 	    = alloc_stack_frame_space (ASAN_RED_ZONE_SIZE,
1707 				       ASAN_RED_ZONE_SIZE);
1708 	  data.asan_vec.safe_push (prev_offset);
1709 	  data.asan_vec.safe_push (offset);
1710 
1711 	  var_end_seq
1712 	    = asan_emit_stack_protection (virtual_stack_vars_rtx,
1713 					  data.asan_vec.address (),
1714 					  data.asan_decl_vec. address(),
1715 					  data.asan_vec.length ());
1716 	}
1717 
1718       expand_stack_vars (NULL, &data);
1719 
1720       data.asan_vec.release ();
1721       data.asan_decl_vec.release ();
1722     }
1723 
1724   fini_vars_expansion ();
1725 
1726   /* If there were any artificial non-ignored vars without rtl
1727      found earlier, see if deferred stack allocation hasn't assigned
1728      rtl to them.  */
1729   FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
1730     {
1731       rtx rtl = DECL_RTL_IF_SET (var);
1732 
1733       /* Keep artificial non-ignored vars in cfun->local_decls
1734 	 chain until instantiate_decls.  */
1735       if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1736 	add_local_decl (cfun, var);
1737     }
1738   maybe_local_decls.release ();
1739 
1740   /* If the target requires that FRAME_OFFSET be aligned, do it.  */
1741   if (STACK_ALIGNMENT_NEEDED)
1742     {
1743       HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1744       if (!FRAME_GROWS_DOWNWARD)
1745 	frame_offset += align - 1;
1746       frame_offset &= -align;
1747     }
1748 
1749   return var_end_seq;
1750 }
1751 
1752 
1753 /* If we need to produce a detailed dump, print the tree representation
1754    for STMT to the dump file.  SINCE is the last RTX after which the RTL
1755    generated for STMT should have been appended.  */
1756 
1757 static void
1758 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1759 {
1760   if (dump_file && (dump_flags & TDF_DETAILS))
1761     {
1762       fprintf (dump_file, "\n;; ");
1763       print_gimple_stmt (dump_file, stmt, 0,
1764 			 TDF_SLIM | (dump_flags & TDF_LINENO));
1765       fprintf (dump_file, "\n");
1766 
1767       print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1768     }
1769 }
1770 
1771 /* Maps the blocks that do not contain tree labels to rtx labels.  */
1772 
1773 static struct pointer_map_t *lab_rtx_for_bb;
1774 
1775 /* Returns the label_rtx expression for a label starting basic block BB.  */
1776 
1777 static rtx
1778 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1779 {
1780   gimple_stmt_iterator gsi;
1781   tree lab;
1782   gimple lab_stmt;
1783   void **elt;
1784 
1785   if (bb->flags & BB_RTL)
1786     return block_label (bb);
1787 
1788   elt = pointer_map_contains (lab_rtx_for_bb, bb);
1789   if (elt)
1790     return (rtx) *elt;
1791 
1792   /* Find the tree label if it is present.  */
1793 
1794   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1795     {
1796       lab_stmt = gsi_stmt (gsi);
1797       if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1798 	break;
1799 
1800       lab = gimple_label_label (lab_stmt);
1801       if (DECL_NONLOCAL (lab))
1802 	break;
1803 
1804       return label_rtx (lab);
1805     }
1806 
1807   elt = pointer_map_insert (lab_rtx_for_bb, bb);
1808   *elt = gen_label_rtx ();
1809   return (rtx) *elt;
1810 }
1811 
1812 
1813 /* A subroutine of expand_gimple_cond.  Given E, a fallthrough edge
1814    of a basic block where we just expanded the conditional at the end,
1815    possibly clean up the CFG and instruction sequence.  LAST is the
1816    last instruction before the just emitted jump sequence.  */
1817 
1818 static void
1819 maybe_cleanup_end_of_block (edge e, rtx last)
1820 {
1821   /* Special case: when jumpif decides that the condition is
1822      trivial it emits an unconditional jump (and the necessary
1823      barrier).  But we still have two edges, the fallthru one is
1824      wrong.  purge_dead_edges would clean this up later.  Unfortunately
1825      we have to insert insns (and split edges) before
1826      find_many_sub_basic_blocks and hence before purge_dead_edges.
1827      But splitting edges might create new blocks which depend on the
1828      fact that if there are two edges there's no barrier.  So the
1829      barrier would get lost and verify_flow_info would ICE.  Instead
1830      of auditing all edge splitters to care for the barrier (which
1831      normally isn't there in a cleaned CFG), fix it here.  */
1832   if (BARRIER_P (get_last_insn ()))
1833     {
1834       rtx insn;
1835       remove_edge (e);
1836       /* Now, we have a single successor block, if we have insns to
1837 	 insert on the remaining edge we potentially will insert
1838 	 it at the end of this block (if the dest block isn't feasible)
1839 	 in order to avoid splitting the edge.  This insertion will take
1840 	 place in front of the last jump.  But we might have emitted
1841 	 multiple jumps (conditional and one unconditional) to the
1842 	 same destination.  Inserting in front of the last one then
1843 	 is a problem.  See PR 40021.  We fix this by deleting all
1844 	 jumps except the last unconditional one.  */
1845       insn = PREV_INSN (get_last_insn ());
1846       /* Make sure we have an unconditional jump.  Otherwise we're
1847 	 confused.  */
1848       gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1849       for (insn = PREV_INSN (insn); insn != last;)
1850 	{
1851 	  insn = PREV_INSN (insn);
1852 	  if (JUMP_P (NEXT_INSN (insn)))
1853 	    {
1854 	      if (!any_condjump_p (NEXT_INSN (insn)))
1855 		{
1856 		  gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1857 		  delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1858 		}
1859 	      delete_insn (NEXT_INSN (insn));
1860 	    }
1861 	}
1862     }
1863 }
1864 
1865 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_COND.
1866    Returns a new basic block if we've terminated the current basic
1867    block and created a new one.  */
1868 
1869 static basic_block
1870 expand_gimple_cond (basic_block bb, gimple stmt)
1871 {
1872   basic_block new_bb, dest;
1873   edge new_edge;
1874   edge true_edge;
1875   edge false_edge;
1876   rtx last2, last;
1877   enum tree_code code;
1878   tree op0, op1;
1879 
1880   code = gimple_cond_code (stmt);
1881   op0 = gimple_cond_lhs (stmt);
1882   op1 = gimple_cond_rhs (stmt);
1883   /* We're sometimes presented with such code:
1884        D.123_1 = x < y;
1885        if (D.123_1 != 0)
1886          ...
1887      This would expand to two comparisons which then later might
1888      be cleaned up by combine.  But some pattern matchers like if-conversion
1889      work better when there's only one compare, so make up for this
1890      here as special exception if TER would have made the same change.  */
1891   if (gimple_cond_single_var_p (stmt)
1892       && SA.values
1893       && TREE_CODE (op0) == SSA_NAME
1894       && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1895     {
1896       gimple second = SSA_NAME_DEF_STMT (op0);
1897       if (gimple_code (second) == GIMPLE_ASSIGN)
1898 	{
1899 	  enum tree_code code2 = gimple_assign_rhs_code (second);
1900 	  if (TREE_CODE_CLASS (code2) == tcc_comparison)
1901 	    {
1902 	      code = code2;
1903 	      op0 = gimple_assign_rhs1 (second);
1904 	      op1 = gimple_assign_rhs2 (second);
1905 	    }
1906 	  /* If jumps are cheap turn some more codes into
1907 	     jumpy sequences.  */
1908 	  else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1909 	    {
1910 	      if ((code2 == BIT_AND_EXPR
1911 		   && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1912 		   && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1913 		  || code2 == TRUTH_AND_EXPR)
1914 		{
1915 		  code = TRUTH_ANDIF_EXPR;
1916 		  op0 = gimple_assign_rhs1 (second);
1917 		  op1 = gimple_assign_rhs2 (second);
1918 		}
1919 	      else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1920 		{
1921 		  code = TRUTH_ORIF_EXPR;
1922 		  op0 = gimple_assign_rhs1 (second);
1923 		  op1 = gimple_assign_rhs2 (second);
1924 		}
1925 	    }
1926 	}
1927     }
1928 
1929   last2 = last = get_last_insn ();
1930 
1931   extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1932   set_curr_insn_location (gimple_location (stmt));
1933 
1934   /* These flags have no purpose in RTL land.  */
1935   true_edge->flags &= ~EDGE_TRUE_VALUE;
1936   false_edge->flags &= ~EDGE_FALSE_VALUE;
1937 
1938   /* We can either have a pure conditional jump with one fallthru edge or
1939      two-way jump that needs to be decomposed into two basic blocks.  */
1940   if (false_edge->dest == bb->next_bb)
1941     {
1942       jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1943 		true_edge->probability);
1944       maybe_dump_rtl_for_gimple_stmt (stmt, last);
1945       if (true_edge->goto_locus != UNKNOWN_LOCATION)
1946 	set_curr_insn_location (true_edge->goto_locus);
1947       false_edge->flags |= EDGE_FALLTHRU;
1948       maybe_cleanup_end_of_block (false_edge, last);
1949       return NULL;
1950     }
1951   if (true_edge->dest == bb->next_bb)
1952     {
1953       jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1954 		   false_edge->probability);
1955       maybe_dump_rtl_for_gimple_stmt (stmt, last);
1956       if (false_edge->goto_locus != UNKNOWN_LOCATION)
1957 	set_curr_insn_location (false_edge->goto_locus);
1958       true_edge->flags |= EDGE_FALLTHRU;
1959       maybe_cleanup_end_of_block (true_edge, last);
1960       return NULL;
1961     }
1962 
1963   jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1964 	    true_edge->probability);
1965   last = get_last_insn ();
1966   if (false_edge->goto_locus != UNKNOWN_LOCATION)
1967     set_curr_insn_location (false_edge->goto_locus);
1968   emit_jump (label_rtx_for_bb (false_edge->dest));
1969 
1970   BB_END (bb) = last;
1971   if (BARRIER_P (BB_END (bb)))
1972     BB_END (bb) = PREV_INSN (BB_END (bb));
1973   update_bb_for_insn (bb);
1974 
1975   new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1976   dest = false_edge->dest;
1977   redirect_edge_succ (false_edge, new_bb);
1978   false_edge->flags |= EDGE_FALLTHRU;
1979   new_bb->count = false_edge->count;
1980   new_bb->frequency = EDGE_FREQUENCY (false_edge);
1981   if (current_loops && bb->loop_father)
1982     add_bb_to_loop (new_bb, bb->loop_father);
1983   new_edge = make_edge (new_bb, dest, 0);
1984   new_edge->probability = REG_BR_PROB_BASE;
1985   new_edge->count = new_bb->count;
1986   if (BARRIER_P (BB_END (new_bb)))
1987     BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1988   update_bb_for_insn (new_bb);
1989 
1990   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1991 
1992   if (true_edge->goto_locus != UNKNOWN_LOCATION)
1993     {
1994       set_curr_insn_location (true_edge->goto_locus);
1995       true_edge->goto_locus = curr_insn_location ();
1996     }
1997 
1998   return new_bb;
1999 }
2000 
2001 /* Mark all calls that can have a transaction restart.  */
2002 
2003 static void
2004 mark_transaction_restart_calls (gimple stmt)
2005 {
2006   struct tm_restart_node dummy;
2007   void **slot;
2008 
2009   if (!cfun->gimple_df->tm_restart)
2010     return;
2011 
2012   dummy.stmt = stmt;
2013   slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
2014   if (slot)
2015     {
2016       struct tm_restart_node *n = (struct tm_restart_node *) *slot;
2017       tree list = n->label_or_list;
2018       rtx insn;
2019 
2020       for (insn = next_real_insn (get_last_insn ());
2021 	   !CALL_P (insn);
2022 	   insn = next_real_insn (insn))
2023 	continue;
2024 
2025       if (TREE_CODE (list) == LABEL_DECL)
2026 	add_reg_note (insn, REG_TM, label_rtx (list));
2027       else
2028 	for (; list ; list = TREE_CHAIN (list))
2029 	  add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2030     }
2031 }
2032 
2033 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2034    statement STMT.  */
2035 
2036 static void
2037 expand_call_stmt (gimple stmt)
2038 {
2039   tree exp, decl, lhs;
2040   bool builtin_p;
2041   size_t i;
2042 
2043   if (gimple_call_internal_p (stmt))
2044     {
2045       expand_internal_call (stmt);
2046       return;
2047     }
2048 
2049   exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2050 
2051   CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2052   decl = gimple_call_fndecl (stmt);
2053   builtin_p = decl && DECL_BUILT_IN (decl);
2054 
2055   /* If this is not a builtin function, the function type through which the
2056      call is made may be different from the type of the function.  */
2057   if (!builtin_p)
2058     CALL_EXPR_FN (exp)
2059       = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2060 		      CALL_EXPR_FN (exp));
2061 
2062   TREE_TYPE (exp) = gimple_call_return_type (stmt);
2063   CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2064 
2065   for (i = 0; i < gimple_call_num_args (stmt); i++)
2066     {
2067       tree arg = gimple_call_arg (stmt, i);
2068       gimple def;
2069       /* TER addresses into arguments of builtin functions so we have a
2070 	 chance to infer more correct alignment information.  See PR39954.  */
2071       if (builtin_p
2072 	  && TREE_CODE (arg) == SSA_NAME
2073 	  && (def = get_gimple_for_ssa_name (arg))
2074 	  && gimple_assign_rhs_code (def) == ADDR_EXPR)
2075 	arg = gimple_assign_rhs1 (def);
2076       CALL_EXPR_ARG (exp, i) = arg;
2077     }
2078 
2079   if (gimple_has_side_effects (stmt))
2080     TREE_SIDE_EFFECTS (exp) = 1;
2081 
2082   if (gimple_call_nothrow_p (stmt))
2083     TREE_NOTHROW (exp) = 1;
2084 
2085   CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2086   CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2087   if (decl
2088       && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2089       && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2090 	  || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2091     CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2092   else
2093     CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2094   CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2095   SET_EXPR_LOCATION (exp, gimple_location (stmt));
2096 
2097   /* Ensure RTL is created for debug args.  */
2098   if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2099     {
2100       vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2101       unsigned int ix;
2102       tree dtemp;
2103 
2104       if (debug_args)
2105 	for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2106 	  {
2107 	    gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2108 	    expand_debug_expr (dtemp);
2109 	  }
2110     }
2111 
2112   lhs = gimple_call_lhs (stmt);
2113   if (lhs)
2114     expand_assignment (lhs, exp, false);
2115   else
2116     expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2117 
2118   mark_transaction_restart_calls (stmt);
2119 }
2120 
2121 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
2122    STMT that doesn't require special handling for outgoing edges.  That
2123    is no tailcalls and no GIMPLE_COND.  */
2124 
2125 static void
2126 expand_gimple_stmt_1 (gimple stmt)
2127 {
2128   tree op0;
2129 
2130   set_curr_insn_location (gimple_location (stmt));
2131 
2132   switch (gimple_code (stmt))
2133     {
2134     case GIMPLE_GOTO:
2135       op0 = gimple_goto_dest (stmt);
2136       if (TREE_CODE (op0) == LABEL_DECL)
2137 	expand_goto (op0);
2138       else
2139 	expand_computed_goto (op0);
2140       break;
2141     case GIMPLE_LABEL:
2142       expand_label (gimple_label_label (stmt));
2143       break;
2144     case GIMPLE_NOP:
2145     case GIMPLE_PREDICT:
2146       break;
2147     case GIMPLE_SWITCH:
2148       expand_case (stmt);
2149       break;
2150     case GIMPLE_ASM:
2151       expand_asm_stmt (stmt);
2152       break;
2153     case GIMPLE_CALL:
2154       expand_call_stmt (stmt);
2155       break;
2156 
2157     case GIMPLE_RETURN:
2158       op0 = gimple_return_retval (stmt);
2159 
2160       if (op0 && op0 != error_mark_node)
2161 	{
2162 	  tree result = DECL_RESULT (current_function_decl);
2163 
2164 	  /* If we are not returning the current function's RESULT_DECL,
2165 	     build an assignment to it.  */
2166 	  if (op0 != result)
2167 	    {
2168 	      /* I believe that a function's RESULT_DECL is unique.  */
2169 	      gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2170 
2171 	      /* ??? We'd like to use simply expand_assignment here,
2172 	         but this fails if the value is of BLKmode but the return
2173 		 decl is a register.  expand_return has special handling
2174 		 for this combination, which eventually should move
2175 		 to common code.  See comments there.  Until then, let's
2176 		 build a modify expression :-/  */
2177 	      op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2178 			    result, op0);
2179 	    }
2180 	}
2181       if (!op0)
2182 	expand_null_return ();
2183       else
2184 	expand_return (op0);
2185       break;
2186 
2187     case GIMPLE_ASSIGN:
2188       {
2189 	tree lhs = gimple_assign_lhs (stmt);
2190 
2191 	/* Tree expand used to fiddle with |= and &= of two bitfield
2192 	   COMPONENT_REFs here.  This can't happen with gimple, the LHS
2193 	   of binary assigns must be a gimple reg.  */
2194 
2195 	if (TREE_CODE (lhs) != SSA_NAME
2196 	    || get_gimple_rhs_class (gimple_expr_code (stmt))
2197 	       == GIMPLE_SINGLE_RHS)
2198 	  {
2199 	    tree rhs = gimple_assign_rhs1 (stmt);
2200 	    gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2201 			== GIMPLE_SINGLE_RHS);
2202 	    if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2203 	      SET_EXPR_LOCATION (rhs, gimple_location (stmt));
2204 	    if (TREE_CLOBBER_P (rhs))
2205 	      /* This is a clobber to mark the going out of scope for
2206 		 this LHS.  */
2207 	      ;
2208 	    else
2209 	      expand_assignment (lhs, rhs,
2210 				 gimple_assign_nontemporal_move_p (stmt));
2211 	  }
2212 	else
2213 	  {
2214 	    rtx target, temp;
2215 	    bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2216 	    struct separate_ops ops;
2217 	    bool promoted = false;
2218 
2219 	    target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2220 	    if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2221 	      promoted = true;
2222 
2223 	    ops.code = gimple_assign_rhs_code (stmt);
2224 	    ops.type = TREE_TYPE (lhs);
2225 	    switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2226 	      {
2227 		case GIMPLE_TERNARY_RHS:
2228 		  ops.op2 = gimple_assign_rhs3 (stmt);
2229 		  /* Fallthru */
2230 		case GIMPLE_BINARY_RHS:
2231 		  ops.op1 = gimple_assign_rhs2 (stmt);
2232 		  /* Fallthru */
2233 		case GIMPLE_UNARY_RHS:
2234 		  ops.op0 = gimple_assign_rhs1 (stmt);
2235 		  break;
2236 		default:
2237 		  gcc_unreachable ();
2238 	      }
2239 	    ops.location = gimple_location (stmt);
2240 
2241 	    /* If we want to use a nontemporal store, force the value to
2242 	       register first.  If we store into a promoted register,
2243 	       don't directly expand to target.  */
2244 	    temp = nontemporal || promoted ? NULL_RTX : target;
2245 	    temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2246 				       EXPAND_NORMAL);
2247 
2248 	    if (temp == target)
2249 	      ;
2250 	    else if (promoted)
2251 	      {
2252 		int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2253 		/* If TEMP is a VOIDmode constant, use convert_modes to make
2254 		   sure that we properly convert it.  */
2255 		if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2256 		  {
2257 		    temp = convert_modes (GET_MODE (target),
2258 					  TYPE_MODE (ops.type),
2259 					  temp, unsignedp);
2260 		    temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2261 					  GET_MODE (target), temp, unsignedp);
2262 		  }
2263 
2264 		convert_move (SUBREG_REG (target), temp, unsignedp);
2265 	      }
2266 	    else if (nontemporal && emit_storent_insn (target, temp))
2267 	      ;
2268 	    else
2269 	      {
2270 		temp = force_operand (temp, target);
2271 		if (temp != target)
2272 		  emit_move_insn (target, temp);
2273 	      }
2274 	  }
2275       }
2276       break;
2277 
2278     default:
2279       gcc_unreachable ();
2280     }
2281 }
2282 
2283 /* Expand one gimple statement STMT and return the last RTL instruction
2284    before any of the newly generated ones.
2285 
2286    In addition to generating the necessary RTL instructions this also
2287    sets REG_EH_REGION notes if necessary and sets the current source
2288    location for diagnostics.  */
2289 
2290 static rtx
2291 expand_gimple_stmt (gimple stmt)
2292 {
2293   location_t saved_location = input_location;
2294   rtx last = get_last_insn ();
2295   int lp_nr;
2296 
2297   gcc_assert (cfun);
2298 
2299   /* We need to save and restore the current source location so that errors
2300      discovered during expansion are emitted with the right location.  But
2301      it would be better if the diagnostic routines used the source location
2302      embedded in the tree nodes rather than globals.  */
2303   if (gimple_has_location (stmt))
2304     input_location = gimple_location (stmt);
2305 
2306   expand_gimple_stmt_1 (stmt);
2307 
2308   /* Free any temporaries used to evaluate this statement.  */
2309   free_temp_slots ();
2310 
2311   input_location = saved_location;
2312 
2313   /* Mark all insns that may trap.  */
2314   lp_nr = lookup_stmt_eh_lp (stmt);
2315   if (lp_nr)
2316     {
2317       rtx insn;
2318       for (insn = next_real_insn (last); insn;
2319 	   insn = next_real_insn (insn))
2320 	{
2321 	  if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2322 	      /* If we want exceptions for non-call insns, any
2323 		 may_trap_p instruction may throw.  */
2324 	      && GET_CODE (PATTERN (insn)) != CLOBBER
2325 	      && GET_CODE (PATTERN (insn)) != USE
2326 	      && insn_could_throw_p (insn))
2327 	    make_reg_eh_region_note (insn, 0, lp_nr);
2328 	}
2329     }
2330 
2331   return last;
2332 }
2333 
2334 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_CALL
2335    that has CALL_EXPR_TAILCALL set.  Returns non-null if we actually
2336    generated a tail call (something that might be denied by the ABI
2337    rules governing the call; see calls.c).
2338 
2339    Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2340    can still reach the rest of BB.  The case here is __builtin_sqrt,
2341    where the NaN result goes through the external function (with a
2342    tailcall) and the normal result happens via a sqrt instruction.  */
2343 
2344 static basic_block
2345 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2346 {
2347   rtx last2, last;
2348   edge e;
2349   edge_iterator ei;
2350   int probability;
2351   gcov_type count;
2352 
2353   last2 = last = expand_gimple_stmt (stmt);
2354 
2355   for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2356     if (CALL_P (last) && SIBLING_CALL_P (last))
2357       goto found;
2358 
2359   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2360 
2361   *can_fallthru = true;
2362   return NULL;
2363 
2364  found:
2365   /* ??? Wouldn't it be better to just reset any pending stack adjust?
2366      Any instructions emitted here are about to be deleted.  */
2367   do_pending_stack_adjust ();
2368 
2369   /* Remove any non-eh, non-abnormal edges that don't go to exit.  */
2370   /* ??? I.e. the fallthrough edge.  HOWEVER!  If there were to be
2371      EH or abnormal edges, we shouldn't have created a tail call in
2372      the first place.  So it seems to me we should just be removing
2373      all edges here, or redirecting the existing fallthru edge to
2374      the exit block.  */
2375 
2376   probability = 0;
2377   count = 0;
2378 
2379   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2380     {
2381       if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2382 	{
2383 	  if (e->dest != EXIT_BLOCK_PTR)
2384 	    {
2385 	      e->dest->count -= e->count;
2386 	      e->dest->frequency -= EDGE_FREQUENCY (e);
2387 	      if (e->dest->count < 0)
2388 		e->dest->count = 0;
2389 	      if (e->dest->frequency < 0)
2390 		e->dest->frequency = 0;
2391 	    }
2392 	  count += e->count;
2393 	  probability += e->probability;
2394 	  remove_edge (e);
2395 	}
2396       else
2397 	ei_next (&ei);
2398     }
2399 
2400   /* This is somewhat ugly: the call_expr expander often emits instructions
2401      after the sibcall (to perform the function return).  These confuse the
2402      find_many_sub_basic_blocks code, so we need to get rid of these.  */
2403   last = NEXT_INSN (last);
2404   gcc_assert (BARRIER_P (last));
2405 
2406   *can_fallthru = false;
2407   while (NEXT_INSN (last))
2408     {
2409       /* For instance an sqrt builtin expander expands if with
2410 	 sibcall in the then and label for `else`.  */
2411       if (LABEL_P (NEXT_INSN (last)))
2412 	{
2413 	  *can_fallthru = true;
2414 	  break;
2415 	}
2416       delete_insn (NEXT_INSN (last));
2417     }
2418 
2419   e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2420   e->probability += probability;
2421   e->count += count;
2422   BB_END (bb) = last;
2423   update_bb_for_insn (bb);
2424 
2425   if (NEXT_INSN (last))
2426     {
2427       bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2428 
2429       last = BB_END (bb);
2430       if (BARRIER_P (last))
2431 	BB_END (bb) = PREV_INSN (last);
2432     }
2433 
2434   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2435 
2436   return bb;
2437 }
2438 
2439 /* Return the difference between the floor and the truncated result of
2440    a signed division by OP1 with remainder MOD.  */
2441 static rtx
2442 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2443 {
2444   /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2445   return gen_rtx_IF_THEN_ELSE
2446     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2447      gen_rtx_IF_THEN_ELSE
2448      (mode, gen_rtx_LT (BImode,
2449 			gen_rtx_DIV (mode, op1, mod),
2450 			const0_rtx),
2451       constm1_rtx, const0_rtx),
2452      const0_rtx);
2453 }
2454 
2455 /* Return the difference between the ceil and the truncated result of
2456    a signed division by OP1 with remainder MOD.  */
2457 static rtx
2458 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2459 {
2460   /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2461   return gen_rtx_IF_THEN_ELSE
2462     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2463      gen_rtx_IF_THEN_ELSE
2464      (mode, gen_rtx_GT (BImode,
2465 			gen_rtx_DIV (mode, op1, mod),
2466 			const0_rtx),
2467       const1_rtx, const0_rtx),
2468      const0_rtx);
2469 }
2470 
2471 /* Return the difference between the ceil and the truncated result of
2472    an unsigned division by OP1 with remainder MOD.  */
2473 static rtx
2474 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2475 {
2476   /* (mod != 0 ? 1 : 0) */
2477   return gen_rtx_IF_THEN_ELSE
2478     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2479      const1_rtx, const0_rtx);
2480 }
2481 
2482 /* Return the difference between the rounded and the truncated result
2483    of a signed division by OP1 with remainder MOD.  Halfway cases are
2484    rounded away from zero, rather than to the nearest even number.  */
2485 static rtx
2486 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2487 {
2488   /* (abs (mod) >= abs (op1) - abs (mod)
2489       ? (op1 / mod > 0 ? 1 : -1)
2490       : 0) */
2491   return gen_rtx_IF_THEN_ELSE
2492     (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2493 		       gen_rtx_MINUS (mode,
2494 				      gen_rtx_ABS (mode, op1),
2495 				      gen_rtx_ABS (mode, mod))),
2496      gen_rtx_IF_THEN_ELSE
2497      (mode, gen_rtx_GT (BImode,
2498 			gen_rtx_DIV (mode, op1, mod),
2499 			const0_rtx),
2500       const1_rtx, constm1_rtx),
2501      const0_rtx);
2502 }
2503 
2504 /* Return the difference between the rounded and the truncated result
2505    of a unsigned division by OP1 with remainder MOD.  Halfway cases
2506    are rounded away from zero, rather than to the nearest even
2507    number.  */
2508 static rtx
2509 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2510 {
2511   /* (mod >= op1 - mod ? 1 : 0) */
2512   return gen_rtx_IF_THEN_ELSE
2513     (mode, gen_rtx_GE (BImode, mod,
2514 		       gen_rtx_MINUS (mode, op1, mod)),
2515      const1_rtx, const0_rtx);
2516 }
2517 
2518 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2519    any rtl.  */
2520 
2521 static rtx
2522 convert_debug_memory_address (enum machine_mode mode, rtx x,
2523 			      addr_space_t as)
2524 {
2525   enum machine_mode xmode = GET_MODE (x);
2526 
2527 #ifndef POINTERS_EXTEND_UNSIGNED
2528   gcc_assert (mode == Pmode
2529 	      || mode == targetm.addr_space.address_mode (as));
2530   gcc_assert (xmode == mode || xmode == VOIDmode);
2531 #else
2532   rtx temp;
2533 
2534   gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
2535 
2536   if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2537     return x;
2538 
2539   if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
2540     x = simplify_gen_subreg (mode, x, xmode,
2541 			     subreg_lowpart_offset
2542 			     (mode, xmode));
2543   else if (POINTERS_EXTEND_UNSIGNED > 0)
2544     x = gen_rtx_ZERO_EXTEND (mode, x);
2545   else if (!POINTERS_EXTEND_UNSIGNED)
2546     x = gen_rtx_SIGN_EXTEND (mode, x);
2547   else
2548     {
2549       switch (GET_CODE (x))
2550 	{
2551 	case SUBREG:
2552 	  if ((SUBREG_PROMOTED_VAR_P (x)
2553 	       || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2554 	       || (GET_CODE (SUBREG_REG (x)) == PLUS
2555 		   && REG_P (XEXP (SUBREG_REG (x), 0))
2556 		   && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2557 		   && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2558 	      && GET_MODE (SUBREG_REG (x)) == mode)
2559 	    return SUBREG_REG (x);
2560 	  break;
2561 	case LABEL_REF:
2562 	  temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2563 	  LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2564 	  return temp;
2565 	case SYMBOL_REF:
2566 	  temp = shallow_copy_rtx (x);
2567 	  PUT_MODE (temp, mode);
2568 	  return temp;
2569 	case CONST:
2570 	  temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2571 	  if (temp)
2572 	    temp = gen_rtx_CONST (mode, temp);
2573 	  return temp;
2574 	case PLUS:
2575 	case MINUS:
2576 	  if (CONST_INT_P (XEXP (x, 1)))
2577 	    {
2578 	      temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2579 	      if (temp)
2580 		return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2581 	    }
2582 	  break;
2583 	default:
2584 	  break;
2585 	}
2586       /* Don't know how to express ptr_extend as operation in debug info.  */
2587       return NULL;
2588     }
2589 #endif /* POINTERS_EXTEND_UNSIGNED */
2590 
2591   return x;
2592 }
2593 
2594 /* Return an RTX equivalent to the value of the parameter DECL.  */
2595 
2596 static rtx
2597 expand_debug_parm_decl (tree decl)
2598 {
2599   rtx incoming = DECL_INCOMING_RTL (decl);
2600 
2601   if (incoming
2602       && GET_MODE (incoming) != BLKmode
2603       && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2604 	  || (MEM_P (incoming)
2605 	      && REG_P (XEXP (incoming, 0))
2606 	      && HARD_REGISTER_P (XEXP (incoming, 0)))))
2607     {
2608       rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2609 
2610 #ifdef HAVE_window_save
2611       /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2612 	 If the target machine has an explicit window save instruction, the
2613 	 actual entry value is the corresponding OUTGOING_REGNO instead.  */
2614       if (REG_P (incoming)
2615 	  && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2616 	incoming
2617 	  = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2618 				OUTGOING_REGNO (REGNO (incoming)), 0);
2619       else if (MEM_P (incoming))
2620 	{
2621 	  rtx reg = XEXP (incoming, 0);
2622 	  if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2623 	    {
2624 	      reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2625 	      incoming = replace_equiv_address_nv (incoming, reg);
2626 	    }
2627 	  else
2628 	    incoming = copy_rtx (incoming);
2629 	}
2630 #endif
2631 
2632       ENTRY_VALUE_EXP (rtl) = incoming;
2633       return rtl;
2634     }
2635 
2636   if (incoming
2637       && GET_MODE (incoming) != BLKmode
2638       && !TREE_ADDRESSABLE (decl)
2639       && MEM_P (incoming)
2640       && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2641 	  || (GET_CODE (XEXP (incoming, 0)) == PLUS
2642 	      && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2643 	      && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2644     return copy_rtx (incoming);
2645 
2646   return NULL_RTX;
2647 }
2648 
2649 /* Return an RTX equivalent to the value of the tree expression EXP.  */
2650 
2651 static rtx
2652 expand_debug_expr (tree exp)
2653 {
2654   rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2655   enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2656   enum machine_mode inner_mode = VOIDmode;
2657   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2658   addr_space_t as;
2659 
2660   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2661     {
2662     case tcc_expression:
2663       switch (TREE_CODE (exp))
2664 	{
2665 	case COND_EXPR:
2666 	case DOT_PROD_EXPR:
2667 	case WIDEN_MULT_PLUS_EXPR:
2668 	case WIDEN_MULT_MINUS_EXPR:
2669 	case FMA_EXPR:
2670 	  goto ternary;
2671 
2672 	case TRUTH_ANDIF_EXPR:
2673 	case TRUTH_ORIF_EXPR:
2674 	case TRUTH_AND_EXPR:
2675 	case TRUTH_OR_EXPR:
2676 	case TRUTH_XOR_EXPR:
2677 	  goto binary;
2678 
2679 	case TRUTH_NOT_EXPR:
2680 	  goto unary;
2681 
2682 	default:
2683 	  break;
2684 	}
2685       break;
2686 
2687     ternary:
2688       op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2689       if (!op2)
2690 	return NULL_RTX;
2691       /* Fall through.  */
2692 
2693     binary:
2694     case tcc_binary:
2695     case tcc_comparison:
2696       op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2697       if (!op1)
2698 	return NULL_RTX;
2699       /* Fall through.  */
2700 
2701     unary:
2702     case tcc_unary:
2703       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2704       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2705       if (!op0)
2706 	return NULL_RTX;
2707       break;
2708 
2709     case tcc_type:
2710     case tcc_statement:
2711       gcc_unreachable ();
2712 
2713     case tcc_constant:
2714     case tcc_exceptional:
2715     case tcc_declaration:
2716     case tcc_reference:
2717     case tcc_vl_exp:
2718       break;
2719     }
2720 
2721   switch (TREE_CODE (exp))
2722     {
2723     case STRING_CST:
2724       if (!lookup_constant_def (exp))
2725 	{
2726 	  if (strlen (TREE_STRING_POINTER (exp)) + 1
2727 	      != (size_t) TREE_STRING_LENGTH (exp))
2728 	    return NULL_RTX;
2729 	  op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2730 	  op0 = gen_rtx_MEM (BLKmode, op0);
2731 	  set_mem_attributes (op0, exp, 0);
2732 	  return op0;
2733 	}
2734       /* Fall through...  */
2735 
2736     case INTEGER_CST:
2737     case REAL_CST:
2738     case FIXED_CST:
2739       op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2740       return op0;
2741 
2742     case COMPLEX_CST:
2743       gcc_assert (COMPLEX_MODE_P (mode));
2744       op0 = expand_debug_expr (TREE_REALPART (exp));
2745       op1 = expand_debug_expr (TREE_IMAGPART (exp));
2746       return gen_rtx_CONCAT (mode, op0, op1);
2747 
2748     case DEBUG_EXPR_DECL:
2749       op0 = DECL_RTL_IF_SET (exp);
2750 
2751       if (op0)
2752 	return op0;
2753 
2754       op0 = gen_rtx_DEBUG_EXPR (mode);
2755       DEBUG_EXPR_TREE_DECL (op0) = exp;
2756       SET_DECL_RTL (exp, op0);
2757 
2758       return op0;
2759 
2760     case VAR_DECL:
2761     case PARM_DECL:
2762     case FUNCTION_DECL:
2763     case LABEL_DECL:
2764     case CONST_DECL:
2765     case RESULT_DECL:
2766       op0 = DECL_RTL_IF_SET (exp);
2767 
2768       /* This decl was probably optimized away.  */
2769       if (!op0)
2770 	{
2771 	  if (TREE_CODE (exp) != VAR_DECL
2772 	      || DECL_EXTERNAL (exp)
2773 	      || !TREE_STATIC (exp)
2774 	      || !DECL_NAME (exp)
2775 	      || DECL_HARD_REGISTER (exp)
2776 	      || DECL_IN_CONSTANT_POOL (exp)
2777 	      || mode == VOIDmode)
2778 	    return NULL;
2779 
2780 	  op0 = make_decl_rtl_for_debug (exp);
2781 	  if (!MEM_P (op0)
2782 	      || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2783 	      || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2784 	    return NULL;
2785 	}
2786       else
2787 	op0 = copy_rtx (op0);
2788 
2789       if (GET_MODE (op0) == BLKmode
2790 	  /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2791 	     below would ICE.  While it is likely a FE bug,
2792 	     try to be robust here.  See PR43166.  */
2793 	  || mode == BLKmode
2794 	  || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2795 	{
2796 	  gcc_assert (MEM_P (op0));
2797 	  op0 = adjust_address_nv (op0, mode, 0);
2798 	  return op0;
2799 	}
2800 
2801       /* Fall through.  */
2802 
2803     adjust_mode:
2804     case PAREN_EXPR:
2805     case NOP_EXPR:
2806     case CONVERT_EXPR:
2807       {
2808 	inner_mode = GET_MODE (op0);
2809 
2810 	if (mode == inner_mode)
2811 	  return op0;
2812 
2813 	if (inner_mode == VOIDmode)
2814 	  {
2815 	    if (TREE_CODE (exp) == SSA_NAME)
2816 	      inner_mode = TYPE_MODE (TREE_TYPE (exp));
2817 	    else
2818 	      inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2819 	    if (mode == inner_mode)
2820 	      return op0;
2821 	  }
2822 
2823 	if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2824 	  {
2825 	    if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2826 	      op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2827 	    else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2828 	      op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2829 	    else
2830 	      op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2831 	  }
2832 	else if (FLOAT_MODE_P (mode))
2833 	  {
2834 	    gcc_assert (TREE_CODE (exp) != SSA_NAME);
2835 	    if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2836 	      op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2837 	    else
2838 	      op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2839 	  }
2840 	else if (FLOAT_MODE_P (inner_mode))
2841 	  {
2842 	    if (unsignedp)
2843 	      op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2844 	    else
2845 	      op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2846 	  }
2847 	else if (CONSTANT_P (op0)
2848 		 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
2849 	  op0 = simplify_gen_subreg (mode, op0, inner_mode,
2850 				     subreg_lowpart_offset (mode,
2851 							    inner_mode));
2852 	else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2853 		 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2854 		 : unsignedp)
2855 	  op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2856 	else
2857 	  op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2858 
2859 	return op0;
2860       }
2861 
2862     case MEM_REF:
2863       if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2864 	{
2865 	  tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2866 				     TREE_OPERAND (exp, 0),
2867 				     TREE_OPERAND (exp, 1));
2868 	  if (newexp)
2869 	    return expand_debug_expr (newexp);
2870 	}
2871       /* FALLTHROUGH */
2872     case INDIRECT_REF:
2873       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2874       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2875       if (!op0)
2876 	return NULL;
2877 
2878       if (TREE_CODE (exp) == MEM_REF)
2879 	{
2880 	  if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2881 	      || (GET_CODE (op0) == PLUS
2882 		  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2883 	    /* (mem (debug_implicit_ptr)) might confuse aliasing.
2884 	       Instead just use get_inner_reference.  */
2885 	    goto component_ref;
2886 
2887 	  op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2888 	  if (!op1 || !CONST_INT_P (op1))
2889 	    return NULL;
2890 
2891 	  op0 = plus_constant (inner_mode, op0, INTVAL (op1));
2892 	}
2893 
2894       if (POINTER_TYPE_P (TREE_TYPE (exp)))
2895 	as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2896       else
2897 	as = ADDR_SPACE_GENERIC;
2898 
2899       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2900 					  op0, as);
2901       if (op0 == NULL_RTX)
2902 	return NULL;
2903 
2904       op0 = gen_rtx_MEM (mode, op0);
2905       set_mem_attributes (op0, exp, 0);
2906       if (TREE_CODE (exp) == MEM_REF
2907 	  && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2908 	set_mem_expr (op0, NULL_TREE);
2909       set_mem_addr_space (op0, as);
2910 
2911       return op0;
2912 
2913     case TARGET_MEM_REF:
2914       if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2915 	  && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2916 	return NULL;
2917 
2918       op0 = expand_debug_expr
2919 	    (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2920       if (!op0)
2921 	return NULL;
2922 
2923       if (POINTER_TYPE_P (TREE_TYPE (exp)))
2924 	as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2925       else
2926 	as = ADDR_SPACE_GENERIC;
2927 
2928       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2929 					  op0, as);
2930       if (op0 == NULL_RTX)
2931 	return NULL;
2932 
2933       op0 = gen_rtx_MEM (mode, op0);
2934 
2935       set_mem_attributes (op0, exp, 0);
2936       set_mem_addr_space (op0, as);
2937 
2938       return op0;
2939 
2940     component_ref:
2941     case ARRAY_REF:
2942     case ARRAY_RANGE_REF:
2943     case COMPONENT_REF:
2944     case BIT_FIELD_REF:
2945     case REALPART_EXPR:
2946     case IMAGPART_EXPR:
2947     case VIEW_CONVERT_EXPR:
2948       {
2949 	enum machine_mode mode1;
2950 	HOST_WIDE_INT bitsize, bitpos;
2951 	tree offset;
2952 	int volatilep = 0;
2953 	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2954 					&mode1, &unsignedp, &volatilep, false);
2955 	rtx orig_op0;
2956 
2957 	if (bitsize == 0)
2958 	  return NULL;
2959 
2960 	orig_op0 = op0 = expand_debug_expr (tem);
2961 
2962 	if (!op0)
2963 	  return NULL;
2964 
2965 	if (offset)
2966 	  {
2967 	    enum machine_mode addrmode, offmode;
2968 
2969 	    if (!MEM_P (op0))
2970 	      return NULL;
2971 
2972 	    op0 = XEXP (op0, 0);
2973 	    addrmode = GET_MODE (op0);
2974 	    if (addrmode == VOIDmode)
2975 	      addrmode = Pmode;
2976 
2977 	    op1 = expand_debug_expr (offset);
2978 	    if (!op1)
2979 	      return NULL;
2980 
2981 	    offmode = GET_MODE (op1);
2982 	    if (offmode == VOIDmode)
2983 	      offmode = TYPE_MODE (TREE_TYPE (offset));
2984 
2985 	    if (addrmode != offmode)
2986 	      op1 = simplify_gen_subreg (addrmode, op1, offmode,
2987 					 subreg_lowpart_offset (addrmode,
2988 								offmode));
2989 
2990 	    /* Don't use offset_address here, we don't need a
2991 	       recognizable address, and we don't want to generate
2992 	       code.  */
2993 	    op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2994 							  op0, op1));
2995 	  }
2996 
2997 	if (MEM_P (op0))
2998 	  {
2999 	    if (mode1 == VOIDmode)
3000 	      /* Bitfield.  */
3001 	      mode1 = smallest_mode_for_size (bitsize, MODE_INT);
3002 	    if (bitpos >= BITS_PER_UNIT)
3003 	      {
3004 		op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
3005 		bitpos %= BITS_PER_UNIT;
3006 	      }
3007 	    else if (bitpos < 0)
3008 	      {
3009 		HOST_WIDE_INT units
3010 		  = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
3011 		op0 = adjust_address_nv (op0, mode1, units);
3012 		bitpos += units * BITS_PER_UNIT;
3013 	      }
3014 	    else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
3015 	      op0 = adjust_address_nv (op0, mode, 0);
3016 	    else if (GET_MODE (op0) != mode1)
3017 	      op0 = adjust_address_nv (op0, mode1, 0);
3018 	    else
3019 	      op0 = copy_rtx (op0);
3020 	    if (op0 == orig_op0)
3021 	      op0 = shallow_copy_rtx (op0);
3022 	    set_mem_attributes (op0, exp, 0);
3023 	  }
3024 
3025 	if (bitpos == 0 && mode == GET_MODE (op0))
3026 	  return op0;
3027 
3028         if (bitpos < 0)
3029           return NULL;
3030 
3031 	if (GET_MODE (op0) == BLKmode)
3032 	  return NULL;
3033 
3034 	if ((bitpos % BITS_PER_UNIT) == 0
3035 	    && bitsize == GET_MODE_BITSIZE (mode1))
3036 	  {
3037 	    enum machine_mode opmode = GET_MODE (op0);
3038 
3039 	    if (opmode == VOIDmode)
3040 	      opmode = TYPE_MODE (TREE_TYPE (tem));
3041 
3042 	    /* This condition may hold if we're expanding the address
3043 	       right past the end of an array that turned out not to
3044 	       be addressable (i.e., the address was only computed in
3045 	       debug stmts).  The gen_subreg below would rightfully
3046 	       crash, and the address doesn't really exist, so just
3047 	       drop it.  */
3048 	    if (bitpos >= GET_MODE_BITSIZE (opmode))
3049 	      return NULL;
3050 
3051 	    if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
3052 	      return simplify_gen_subreg (mode, op0, opmode,
3053 					  bitpos / BITS_PER_UNIT);
3054 	  }
3055 
3056 	return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
3057 				     && TYPE_UNSIGNED (TREE_TYPE (exp))
3058 				     ? SIGN_EXTRACT
3059 				     : ZERO_EXTRACT, mode,
3060 				     GET_MODE (op0) != VOIDmode
3061 				     ? GET_MODE (op0)
3062 				     : TYPE_MODE (TREE_TYPE (tem)),
3063 				     op0, GEN_INT (bitsize), GEN_INT (bitpos));
3064       }
3065 
3066     case ABS_EXPR:
3067       return simplify_gen_unary (ABS, mode, op0, mode);
3068 
3069     case NEGATE_EXPR:
3070       return simplify_gen_unary (NEG, mode, op0, mode);
3071 
3072     case BIT_NOT_EXPR:
3073       return simplify_gen_unary (NOT, mode, op0, mode);
3074 
3075     case FLOAT_EXPR:
3076       return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3077 									 0)))
3078 				 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
3079 				 inner_mode);
3080 
3081     case FIX_TRUNC_EXPR:
3082       return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
3083 				 inner_mode);
3084 
3085     case POINTER_PLUS_EXPR:
3086       /* For the rare target where pointers are not the same size as
3087 	 size_t, we need to check for mis-matched modes and correct
3088 	 the addend.  */
3089       if (op0 && op1
3090 	  && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
3091 	  && GET_MODE (op0) != GET_MODE (op1))
3092 	{
3093 	  if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
3094 	    op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
3095 				      GET_MODE (op1));
3096 	  else
3097 	    /* We always sign-extend, regardless of the signedness of
3098 	       the operand, because the operand is always unsigned
3099 	       here even if the original C expression is signed.  */
3100 	    op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
3101 				      GET_MODE (op1));
3102 	}
3103       /* Fall through.  */
3104     case PLUS_EXPR:
3105       return simplify_gen_binary (PLUS, mode, op0, op1);
3106 
3107     case MINUS_EXPR:
3108       return simplify_gen_binary (MINUS, mode, op0, op1);
3109 
3110     case MULT_EXPR:
3111       return simplify_gen_binary (MULT, mode, op0, op1);
3112 
3113     case RDIV_EXPR:
3114     case TRUNC_DIV_EXPR:
3115     case EXACT_DIV_EXPR:
3116       if (unsignedp)
3117 	return simplify_gen_binary (UDIV, mode, op0, op1);
3118       else
3119 	return simplify_gen_binary (DIV, mode, op0, op1);
3120 
3121     case TRUNC_MOD_EXPR:
3122       return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
3123 
3124     case FLOOR_DIV_EXPR:
3125       if (unsignedp)
3126 	return simplify_gen_binary (UDIV, mode, op0, op1);
3127       else
3128 	{
3129 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3130 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3131 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
3132 	  return simplify_gen_binary (PLUS, mode, div, adj);
3133 	}
3134 
3135     case FLOOR_MOD_EXPR:
3136       if (unsignedp)
3137 	return simplify_gen_binary (UMOD, mode, op0, op1);
3138       else
3139 	{
3140 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3141 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
3142 	  adj = simplify_gen_unary (NEG, mode,
3143 				    simplify_gen_binary (MULT, mode, adj, op1),
3144 				    mode);
3145 	  return simplify_gen_binary (PLUS, mode, mod, adj);
3146 	}
3147 
3148     case CEIL_DIV_EXPR:
3149       if (unsignedp)
3150 	{
3151 	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3152 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3153 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
3154 	  return simplify_gen_binary (PLUS, mode, div, adj);
3155 	}
3156       else
3157 	{
3158 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3159 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3160 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3161 	  return simplify_gen_binary (PLUS, mode, div, adj);
3162 	}
3163 
3164     case CEIL_MOD_EXPR:
3165       if (unsignedp)
3166 	{
3167 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3168 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
3169 	  adj = simplify_gen_unary (NEG, mode,
3170 				    simplify_gen_binary (MULT, mode, adj, op1),
3171 				    mode);
3172 	  return simplify_gen_binary (PLUS, mode, mod, adj);
3173 	}
3174       else
3175 	{
3176 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3177 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3178 	  adj = simplify_gen_unary (NEG, mode,
3179 				    simplify_gen_binary (MULT, mode, adj, op1),
3180 				    mode);
3181 	  return simplify_gen_binary (PLUS, mode, mod, adj);
3182 	}
3183 
3184     case ROUND_DIV_EXPR:
3185       if (unsignedp)
3186 	{
3187 	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3188 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3189 	  rtx adj = round_udiv_adjust (mode, mod, op1);
3190 	  return simplify_gen_binary (PLUS, mode, div, adj);
3191 	}
3192       else
3193 	{
3194 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3195 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3196 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
3197 	  return simplify_gen_binary (PLUS, mode, div, adj);
3198 	}
3199 
3200     case ROUND_MOD_EXPR:
3201       if (unsignedp)
3202 	{
3203 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3204 	  rtx adj = round_udiv_adjust (mode, mod, op1);
3205 	  adj = simplify_gen_unary (NEG, mode,
3206 				    simplify_gen_binary (MULT, mode, adj, op1),
3207 				    mode);
3208 	  return simplify_gen_binary (PLUS, mode, mod, adj);
3209 	}
3210       else
3211 	{
3212 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3213 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
3214 	  adj = simplify_gen_unary (NEG, mode,
3215 				    simplify_gen_binary (MULT, mode, adj, op1),
3216 				    mode);
3217 	  return simplify_gen_binary (PLUS, mode, mod, adj);
3218 	}
3219 
3220     case LSHIFT_EXPR:
3221       return simplify_gen_binary (ASHIFT, mode, op0, op1);
3222 
3223     case RSHIFT_EXPR:
3224       if (unsignedp)
3225 	return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
3226       else
3227 	return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
3228 
3229     case LROTATE_EXPR:
3230       return simplify_gen_binary (ROTATE, mode, op0, op1);
3231 
3232     case RROTATE_EXPR:
3233       return simplify_gen_binary (ROTATERT, mode, op0, op1);
3234 
3235     case MIN_EXPR:
3236       return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
3237 
3238     case MAX_EXPR:
3239       return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
3240 
3241     case BIT_AND_EXPR:
3242     case TRUTH_AND_EXPR:
3243       return simplify_gen_binary (AND, mode, op0, op1);
3244 
3245     case BIT_IOR_EXPR:
3246     case TRUTH_OR_EXPR:
3247       return simplify_gen_binary (IOR, mode, op0, op1);
3248 
3249     case BIT_XOR_EXPR:
3250     case TRUTH_XOR_EXPR:
3251       return simplify_gen_binary (XOR, mode, op0, op1);
3252 
3253     case TRUTH_ANDIF_EXPR:
3254       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3255 
3256     case TRUTH_ORIF_EXPR:
3257       return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3258 
3259     case TRUTH_NOT_EXPR:
3260       return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
3261 
3262     case LT_EXPR:
3263       return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3264 				      op0, op1);
3265 
3266     case LE_EXPR:
3267       return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3268 				      op0, op1);
3269 
3270     case GT_EXPR:
3271       return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3272 				      op0, op1);
3273 
3274     case GE_EXPR:
3275       return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3276 				      op0, op1);
3277 
3278     case EQ_EXPR:
3279       return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3280 
3281     case NE_EXPR:
3282       return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3283 
3284     case UNORDERED_EXPR:
3285       return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3286 
3287     case ORDERED_EXPR:
3288       return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3289 
3290     case UNLT_EXPR:
3291       return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3292 
3293     case UNLE_EXPR:
3294       return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3295 
3296     case UNGT_EXPR:
3297       return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3298 
3299     case UNGE_EXPR:
3300       return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3301 
3302     case UNEQ_EXPR:
3303       return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3304 
3305     case LTGT_EXPR:
3306       return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3307 
3308     case COND_EXPR:
3309       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3310 
3311     case COMPLEX_EXPR:
3312       gcc_assert (COMPLEX_MODE_P (mode));
3313       if (GET_MODE (op0) == VOIDmode)
3314 	op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3315       if (GET_MODE (op1) == VOIDmode)
3316 	op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3317       return gen_rtx_CONCAT (mode, op0, op1);
3318 
3319     case CONJ_EXPR:
3320       if (GET_CODE (op0) == CONCAT)
3321 	return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3322 			       simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3323 						   XEXP (op0, 1),
3324 						   GET_MODE_INNER (mode)));
3325       else
3326 	{
3327 	  enum machine_mode imode = GET_MODE_INNER (mode);
3328 	  rtx re, im;
3329 
3330 	  if (MEM_P (op0))
3331 	    {
3332 	      re = adjust_address_nv (op0, imode, 0);
3333 	      im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3334 	    }
3335 	  else
3336 	    {
3337 	      enum machine_mode ifmode = int_mode_for_mode (mode);
3338 	      enum machine_mode ihmode = int_mode_for_mode (imode);
3339 	      rtx halfsize;
3340 	      if (ifmode == BLKmode || ihmode == BLKmode)
3341 		return NULL;
3342 	      halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3343 	      re = op0;
3344 	      if (mode != ifmode)
3345 		re = gen_rtx_SUBREG (ifmode, re, 0);
3346 	      re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3347 	      if (imode != ihmode)
3348 		re = gen_rtx_SUBREG (imode, re, 0);
3349 	      im = copy_rtx (op0);
3350 	      if (mode != ifmode)
3351 		im = gen_rtx_SUBREG (ifmode, im, 0);
3352 	      im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3353 	      if (imode != ihmode)
3354 		im = gen_rtx_SUBREG (imode, im, 0);
3355 	    }
3356 	  im = gen_rtx_NEG (imode, im);
3357 	  return gen_rtx_CONCAT (mode, re, im);
3358 	}
3359 
3360     case ADDR_EXPR:
3361       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3362       if (!op0 || !MEM_P (op0))
3363 	{
3364 	  if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3365 	       || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3366 	       || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3367 	      && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3368 		  || target_for_debug_bind (TREE_OPERAND (exp, 0))))
3369 	    return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3370 
3371 	  if (handled_component_p (TREE_OPERAND (exp, 0)))
3372 	    {
3373 	      HOST_WIDE_INT bitoffset, bitsize, maxsize;
3374 	      tree decl
3375 		= get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3376 					   &bitoffset, &bitsize, &maxsize);
3377 	      if ((TREE_CODE (decl) == VAR_DECL
3378 		   || TREE_CODE (decl) == PARM_DECL
3379 		   || TREE_CODE (decl) == RESULT_DECL)
3380 		  && (!TREE_ADDRESSABLE (decl)
3381 		      || target_for_debug_bind (decl))
3382 		  && (bitoffset % BITS_PER_UNIT) == 0
3383 		  && bitsize > 0
3384 		  && bitsize == maxsize)
3385 		{
3386 		  rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
3387 		  return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
3388 		}
3389 	    }
3390 
3391 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
3392 	      && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
3393 		 == ADDR_EXPR)
3394 	    {
3395 	      op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
3396 						     0));
3397 	      if (op0 != NULL
3398 		  && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
3399 		      || (GET_CODE (op0) == PLUS
3400 			  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
3401 			  && CONST_INT_P (XEXP (op0, 1)))))
3402 		{
3403 		  op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
3404 							 1));
3405 		  if (!op1 || !CONST_INT_P (op1))
3406 		    return NULL;
3407 
3408 		  return plus_constant (mode, op0, INTVAL (op1));
3409 		}
3410 	    }
3411 
3412 	  return NULL;
3413 	}
3414 
3415       as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3416       op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3417 
3418       return op0;
3419 
3420     case VECTOR_CST:
3421       {
3422 	unsigned i;
3423 
3424 	op0 = gen_rtx_CONCATN
3425 	  (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3426 
3427 	for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
3428 	  {
3429 	    op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
3430 	    if (!op1)
3431 	      return NULL;
3432 	    XVECEXP (op0, 0, i) = op1;
3433 	  }
3434 
3435 	return op0;
3436       }
3437 
3438     case CONSTRUCTOR:
3439       if (TREE_CLOBBER_P (exp))
3440 	return NULL;
3441       else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3442 	{
3443 	  unsigned i;
3444 	  tree val;
3445 
3446 	  op0 = gen_rtx_CONCATN
3447 	    (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3448 
3449 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3450 	    {
3451 	      op1 = expand_debug_expr (val);
3452 	      if (!op1)
3453 		return NULL;
3454 	      XVECEXP (op0, 0, i) = op1;
3455 	    }
3456 
3457 	  if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3458 	    {
3459 	      op1 = expand_debug_expr
3460 		(build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3461 
3462 	      if (!op1)
3463 		return NULL;
3464 
3465 	      for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3466 		XVECEXP (op0, 0, i) = op1;
3467 	    }
3468 
3469 	  return op0;
3470 	}
3471       else
3472 	goto flag_unsupported;
3473 
3474     case CALL_EXPR:
3475       /* ??? Maybe handle some builtins?  */
3476       return NULL;
3477 
3478     case SSA_NAME:
3479       {
3480 	gimple g = get_gimple_for_ssa_name (exp);
3481 	if (g)
3482 	  {
3483 	    op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3484 	    if (!op0)
3485 	      return NULL;
3486 	  }
3487 	else
3488 	  {
3489 	    int part = var_to_partition (SA.map, exp);
3490 
3491 	    if (part == NO_PARTITION)
3492 	      {
3493 		/* If this is a reference to an incoming value of parameter
3494 		   that is never used in the code or where the incoming
3495 		   value is never used in the code, use PARM_DECL's
3496 		   DECL_RTL if set.  */
3497 		if (SSA_NAME_IS_DEFAULT_DEF (exp)
3498 		    && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3499 		  {
3500 		    op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3501 		    if (op0)
3502 		      goto adjust_mode;
3503 		    op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3504 		    if (op0)
3505 		      goto adjust_mode;
3506 		  }
3507 		return NULL;
3508 	      }
3509 
3510 	    gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3511 
3512 	    op0 = copy_rtx (SA.partition_to_pseudo[part]);
3513 	  }
3514 	goto adjust_mode;
3515       }
3516 
3517     case ERROR_MARK:
3518       return NULL;
3519 
3520     /* Vector stuff.  For most of the codes we don't have rtl codes.  */
3521     case REALIGN_LOAD_EXPR:
3522     case REDUC_MAX_EXPR:
3523     case REDUC_MIN_EXPR:
3524     case REDUC_PLUS_EXPR:
3525     case VEC_COND_EXPR:
3526     case VEC_LSHIFT_EXPR:
3527     case VEC_PACK_FIX_TRUNC_EXPR:
3528     case VEC_PACK_SAT_EXPR:
3529     case VEC_PACK_TRUNC_EXPR:
3530     case VEC_RSHIFT_EXPR:
3531     case VEC_UNPACK_FLOAT_HI_EXPR:
3532     case VEC_UNPACK_FLOAT_LO_EXPR:
3533     case VEC_UNPACK_HI_EXPR:
3534     case VEC_UNPACK_LO_EXPR:
3535     case VEC_WIDEN_MULT_HI_EXPR:
3536     case VEC_WIDEN_MULT_LO_EXPR:
3537     case VEC_WIDEN_MULT_EVEN_EXPR:
3538     case VEC_WIDEN_MULT_ODD_EXPR:
3539     case VEC_WIDEN_LSHIFT_HI_EXPR:
3540     case VEC_WIDEN_LSHIFT_LO_EXPR:
3541     case VEC_PERM_EXPR:
3542       return NULL;
3543 
3544     /* Misc codes.  */
3545     case ADDR_SPACE_CONVERT_EXPR:
3546     case FIXED_CONVERT_EXPR:
3547     case OBJ_TYPE_REF:
3548     case WITH_SIZE_EXPR:
3549       return NULL;
3550 
3551     case DOT_PROD_EXPR:
3552       if (SCALAR_INT_MODE_P (GET_MODE (op0))
3553 	  && SCALAR_INT_MODE_P (mode))
3554 	{
3555 	  op0
3556 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3557 									  0)))
3558 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3559 				  inner_mode);
3560 	  op1
3561 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3562 									  1)))
3563 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3564 				  inner_mode);
3565 	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
3566 	  return simplify_gen_binary (PLUS, mode, op0, op2);
3567 	}
3568       return NULL;
3569 
3570     case WIDEN_MULT_EXPR:
3571     case WIDEN_MULT_PLUS_EXPR:
3572     case WIDEN_MULT_MINUS_EXPR:
3573       if (SCALAR_INT_MODE_P (GET_MODE (op0))
3574 	  && SCALAR_INT_MODE_P (mode))
3575 	{
3576 	  inner_mode = GET_MODE (op0);
3577 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3578 	    op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3579 	  else
3580 	    op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3581 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3582 	    op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3583 	  else
3584 	    op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3585 	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
3586 	  if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3587 	    return op0;
3588 	  else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3589 	    return simplify_gen_binary (PLUS, mode, op0, op2);
3590 	  else
3591 	    return simplify_gen_binary (MINUS, mode, op2, op0);
3592 	}
3593       return NULL;
3594 
3595     case MULT_HIGHPART_EXPR:
3596       /* ??? Similar to the above.  */
3597       return NULL;
3598 
3599     case WIDEN_SUM_EXPR:
3600     case WIDEN_LSHIFT_EXPR:
3601       if (SCALAR_INT_MODE_P (GET_MODE (op0))
3602 	  && SCALAR_INT_MODE_P (mode))
3603 	{
3604 	  op0
3605 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3606 									  0)))
3607 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3608 				  inner_mode);
3609 	  return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3610 				      ? ASHIFT : PLUS, mode, op0, op1);
3611 	}
3612       return NULL;
3613 
3614     case FMA_EXPR:
3615       return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3616 
3617     default:
3618     flag_unsupported:
3619 #ifdef ENABLE_CHECKING
3620       debug_tree (exp);
3621       gcc_unreachable ();
3622 #else
3623       return NULL;
3624 #endif
3625     }
3626 }
3627 
3628 /* Return an RTX equivalent to the source bind value of the tree expression
3629    EXP.  */
3630 
3631 static rtx
3632 expand_debug_source_expr (tree exp)
3633 {
3634   rtx op0 = NULL_RTX;
3635   enum machine_mode mode = VOIDmode, inner_mode;
3636 
3637   switch (TREE_CODE (exp))
3638     {
3639     case PARM_DECL:
3640       {
3641 	mode = DECL_MODE (exp);
3642 	op0 = expand_debug_parm_decl (exp);
3643 	if (op0)
3644 	   break;
3645 	/* See if this isn't an argument that has been completely
3646 	   optimized out.  */
3647 	if (!DECL_RTL_SET_P (exp)
3648 	    && !DECL_INCOMING_RTL (exp)
3649 	    && DECL_ABSTRACT_ORIGIN (current_function_decl))
3650 	  {
3651 	    tree aexp = DECL_ORIGIN (exp);
3652 	    if (DECL_CONTEXT (aexp)
3653 		== DECL_ABSTRACT_ORIGIN (current_function_decl))
3654 	      {
3655 		vec<tree, va_gc> **debug_args;
3656 		unsigned int ix;
3657 		tree ddecl;
3658 		debug_args = decl_debug_args_lookup (current_function_decl);
3659 		if (debug_args != NULL)
3660 		  {
3661 		    for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
3662 			 ix += 2)
3663 		      if (ddecl == aexp)
3664 			return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3665 		  }
3666 	      }
3667 	  }
3668 	break;
3669       }
3670     default:
3671       break;
3672     }
3673 
3674   if (op0 == NULL_RTX)
3675     return NULL_RTX;
3676 
3677   inner_mode = GET_MODE (op0);
3678   if (mode == inner_mode)
3679     return op0;
3680 
3681   if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3682     {
3683       if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3684 	op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3685       else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3686 	op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3687       else
3688 	op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3689     }
3690   else if (FLOAT_MODE_P (mode))
3691     gcc_unreachable ();
3692   else if (FLOAT_MODE_P (inner_mode))
3693     {
3694       if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3695 	op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3696       else
3697 	op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3698     }
3699   else if (CONSTANT_P (op0)
3700 	   || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3701     op0 = simplify_gen_subreg (mode, op0, inner_mode,
3702 			       subreg_lowpart_offset (mode, inner_mode));
3703   else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3704     op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3705   else
3706     op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3707 
3708   return op0;
3709 }
3710 
3711 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
3712    Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
3713    deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN.  */
3714 
3715 static void
3716 avoid_complex_debug_insns (rtx insn, rtx *exp_p, int depth)
3717 {
3718   rtx exp = *exp_p;
3719 
3720   if (exp == NULL_RTX)
3721     return;
3722 
3723   if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
3724     return;
3725 
3726   if (depth == 4)
3727     {
3728       /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL).  */
3729       rtx dval = make_debug_expr_from_rtl (exp);
3730 
3731       /* Emit a debug bind insn before INSN.  */
3732       rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
3733 				       DEBUG_EXPR_TREE_DECL (dval), exp,
3734 				       VAR_INIT_STATUS_INITIALIZED);
3735 
3736       emit_debug_insn_before (bind, insn);
3737       *exp_p = dval;
3738       return;
3739     }
3740 
3741   const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
3742   int i, j;
3743   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
3744     switch (*format_ptr++)
3745       {
3746       case 'e':
3747 	avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
3748 	break;
3749 
3750       case 'E':
3751       case 'V':
3752 	for (j = 0; j < XVECLEN (exp, i); j++)
3753 	  avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
3754 	break;
3755 
3756       default:
3757 	break;
3758       }
3759 }
3760 
3761 /* Expand the _LOCs in debug insns.  We run this after expanding all
3762    regular insns, so that any variables referenced in the function
3763    will have their DECL_RTLs set.  */
3764 
3765 static void
3766 expand_debug_locations (void)
3767 {
3768   rtx insn;
3769   rtx last = get_last_insn ();
3770   int save_strict_alias = flag_strict_aliasing;
3771 
3772   /* New alias sets while setting up memory attributes cause
3773      -fcompare-debug failures, even though it doesn't bring about any
3774      codegen changes.  */
3775   flag_strict_aliasing = 0;
3776 
3777   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3778     if (DEBUG_INSN_P (insn))
3779       {
3780 	tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3781 	rtx val, prev_insn, insn2;
3782 	enum machine_mode mode;
3783 
3784 	if (value == NULL_TREE)
3785 	  val = NULL_RTX;
3786 	else
3787 	  {
3788 	    if (INSN_VAR_LOCATION_STATUS (insn)
3789 		== VAR_INIT_STATUS_UNINITIALIZED)
3790 	      val = expand_debug_source_expr (value);
3791 	    else
3792 	      val = expand_debug_expr (value);
3793 	    gcc_assert (last == get_last_insn ());
3794 	  }
3795 
3796 	if (!val)
3797 	  val = gen_rtx_UNKNOWN_VAR_LOC ();
3798 	else
3799 	  {
3800 	    mode = GET_MODE (INSN_VAR_LOCATION (insn));
3801 
3802 	    gcc_assert (mode == GET_MODE (val)
3803 			|| (GET_MODE (val) == VOIDmode
3804 			    && (CONST_SCALAR_INT_P (val)
3805 				|| GET_CODE (val) == CONST_FIXED
3806 				|| GET_CODE (val) == LABEL_REF)));
3807 	  }
3808 
3809 	INSN_VAR_LOCATION_LOC (insn) = val;
3810 	prev_insn = PREV_INSN (insn);
3811 	for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
3812 	  avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
3813       }
3814 
3815   flag_strict_aliasing = save_strict_alias;
3816 }
3817 
3818 /* Expand basic block BB from GIMPLE trees to RTL.  */
3819 
3820 static basic_block
3821 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
3822 {
3823   gimple_stmt_iterator gsi;
3824   gimple_seq stmts;
3825   gimple stmt = NULL;
3826   rtx note, last;
3827   edge e;
3828   edge_iterator ei;
3829   void **elt;
3830 
3831   if (dump_file)
3832     fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3833 	     bb->index);
3834 
3835   /* Note that since we are now transitioning from GIMPLE to RTL, we
3836      cannot use the gsi_*_bb() routines because they expect the basic
3837      block to be in GIMPLE, instead of RTL.  Therefore, we need to
3838      access the BB sequence directly.  */
3839   stmts = bb_seq (bb);
3840   bb->il.gimple.seq = NULL;
3841   bb->il.gimple.phi_nodes = NULL;
3842   rtl_profile_for_bb (bb);
3843   init_rtl_bb_info (bb);
3844   bb->flags |= BB_RTL;
3845 
3846   /* Remove the RETURN_EXPR if we may fall though to the exit
3847      instead.  */
3848   gsi = gsi_last (stmts);
3849   if (!gsi_end_p (gsi)
3850       && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3851     {
3852       gimple ret_stmt = gsi_stmt (gsi);
3853 
3854       gcc_assert (single_succ_p (bb));
3855       gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3856 
3857       if (bb->next_bb == EXIT_BLOCK_PTR
3858 	  && !gimple_return_retval (ret_stmt))
3859 	{
3860 	  gsi_remove (&gsi, false);
3861 	  single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3862 	}
3863     }
3864 
3865   gsi = gsi_start (stmts);
3866   if (!gsi_end_p (gsi))
3867     {
3868       stmt = gsi_stmt (gsi);
3869       if (gimple_code (stmt) != GIMPLE_LABEL)
3870 	stmt = NULL;
3871     }
3872 
3873   elt = pointer_map_contains (lab_rtx_for_bb, bb);
3874 
3875   if (stmt || elt)
3876     {
3877       last = get_last_insn ();
3878 
3879       if (stmt)
3880 	{
3881 	  expand_gimple_stmt (stmt);
3882 	  gsi_next (&gsi);
3883 	}
3884 
3885       if (elt)
3886 	emit_label ((rtx) *elt);
3887 
3888       /* Java emits line number notes in the top of labels.
3889 	 ??? Make this go away once line number notes are obsoleted.  */
3890       BB_HEAD (bb) = NEXT_INSN (last);
3891       if (NOTE_P (BB_HEAD (bb)))
3892 	BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3893       note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3894 
3895       maybe_dump_rtl_for_gimple_stmt (stmt, last);
3896     }
3897   else
3898     note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3899 
3900   NOTE_BASIC_BLOCK (note) = bb;
3901 
3902   for (; !gsi_end_p (gsi); gsi_next (&gsi))
3903     {
3904       basic_block new_bb;
3905 
3906       stmt = gsi_stmt (gsi);
3907 
3908       /* If this statement is a non-debug one, and we generate debug
3909 	 insns, then this one might be the last real use of a TERed
3910 	 SSA_NAME, but where there are still some debug uses further
3911 	 down.  Expanding the current SSA name in such further debug
3912 	 uses by their RHS might lead to wrong debug info, as coalescing
3913 	 might make the operands of such RHS be placed into the same
3914 	 pseudo as something else.  Like so:
3915 	   a_1 = a_0 + 1;   // Assume a_1 is TERed and a_0 is dead
3916 	   use(a_1);
3917 	   a_2 = ...
3918            #DEBUG ... => a_1
3919 	 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3920 	 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3921 	 the write to a_2 would actually have clobbered the place which
3922 	 formerly held a_0.
3923 
3924 	 So, instead of that, we recognize the situation, and generate
3925 	 debug temporaries at the last real use of TERed SSA names:
3926 	   a_1 = a_0 + 1;
3927            #DEBUG #D1 => a_1
3928 	   use(a_1);
3929 	   a_2 = ...
3930            #DEBUG ... => #D1
3931 	 */
3932       if (MAY_HAVE_DEBUG_INSNS
3933 	  && SA.values
3934 	  && !is_gimple_debug (stmt))
3935 	{
3936 	  ssa_op_iter iter;
3937 	  tree op;
3938 	  gimple def;
3939 
3940 	  location_t sloc = curr_insn_location ();
3941 
3942 	  /* Look for SSA names that have their last use here (TERed
3943 	     names always have only one real use).  */
3944 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3945 	    if ((def = get_gimple_for_ssa_name (op)))
3946 	      {
3947 		imm_use_iterator imm_iter;
3948 		use_operand_p use_p;
3949 		bool have_debug_uses = false;
3950 
3951 		FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3952 		  {
3953 		    if (gimple_debug_bind_p (USE_STMT (use_p)))
3954 		      {
3955 			have_debug_uses = true;
3956 			break;
3957 		      }
3958 		  }
3959 
3960 		if (have_debug_uses)
3961 		  {
3962 		    /* OP is a TERed SSA name, with DEF it's defining
3963 		       statement, and where OP is used in further debug
3964 		       instructions.  Generate a debug temporary, and
3965 		       replace all uses of OP in debug insns with that
3966 		       temporary.  */
3967 		    gimple debugstmt;
3968 		    tree value = gimple_assign_rhs_to_tree (def);
3969 		    tree vexpr = make_node (DEBUG_EXPR_DECL);
3970 		    rtx val;
3971 		    enum machine_mode mode;
3972 
3973 		    set_curr_insn_location (gimple_location (def));
3974 
3975 		    DECL_ARTIFICIAL (vexpr) = 1;
3976 		    TREE_TYPE (vexpr) = TREE_TYPE (value);
3977 		    if (DECL_P (value))
3978 		      mode = DECL_MODE (value);
3979 		    else
3980 		      mode = TYPE_MODE (TREE_TYPE (value));
3981 		    DECL_MODE (vexpr) = mode;
3982 
3983 		    val = gen_rtx_VAR_LOCATION
3984 			(mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3985 
3986 		    emit_debug_insn (val);
3987 
3988 		    FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3989 		      {
3990 			if (!gimple_debug_bind_p (debugstmt))
3991 			  continue;
3992 
3993 			FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3994 			  SET_USE (use_p, vexpr);
3995 
3996 			update_stmt (debugstmt);
3997 		      }
3998 		  }
3999 	      }
4000 	  set_curr_insn_location (sloc);
4001 	}
4002 
4003       currently_expanding_gimple_stmt = stmt;
4004 
4005       /* Expand this statement, then evaluate the resulting RTL and
4006 	 fixup the CFG accordingly.  */
4007       if (gimple_code (stmt) == GIMPLE_COND)
4008 	{
4009 	  new_bb = expand_gimple_cond (bb, stmt);
4010 	  if (new_bb)
4011 	    return new_bb;
4012 	}
4013       else if (gimple_debug_bind_p (stmt))
4014 	{
4015 	  location_t sloc = curr_insn_location ();
4016 	  gimple_stmt_iterator nsi = gsi;
4017 
4018 	  for (;;)
4019 	    {
4020 	      tree var = gimple_debug_bind_get_var (stmt);
4021 	      tree value;
4022 	      rtx val;
4023 	      enum machine_mode mode;
4024 
4025 	      if (TREE_CODE (var) != DEBUG_EXPR_DECL
4026 		  && TREE_CODE (var) != LABEL_DECL
4027 		  && !target_for_debug_bind (var))
4028 		goto delink_debug_stmt;
4029 
4030 	      if (gimple_debug_bind_has_value_p (stmt))
4031 		value = gimple_debug_bind_get_value (stmt);
4032 	      else
4033 		value = NULL_TREE;
4034 
4035 	      last = get_last_insn ();
4036 
4037 	      set_curr_insn_location (gimple_location (stmt));
4038 
4039 	      if (DECL_P (var))
4040 		mode = DECL_MODE (var);
4041 	      else
4042 		mode = TYPE_MODE (TREE_TYPE (var));
4043 
4044 	      val = gen_rtx_VAR_LOCATION
4045 		(mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
4046 
4047 	      emit_debug_insn (val);
4048 
4049 	      if (dump_file && (dump_flags & TDF_DETAILS))
4050 		{
4051 		  /* We can't dump the insn with a TREE where an RTX
4052 		     is expected.  */
4053 		  PAT_VAR_LOCATION_LOC (val) = const0_rtx;
4054 		  maybe_dump_rtl_for_gimple_stmt (stmt, last);
4055 		  PAT_VAR_LOCATION_LOC (val) = (rtx)value;
4056 		}
4057 
4058 	    delink_debug_stmt:
4059 	      /* In order not to generate too many debug temporaries,
4060 	         we delink all uses of debug statements we already expanded.
4061 		 Therefore debug statements between definition and real
4062 		 use of TERed SSA names will continue to use the SSA name,
4063 		 and not be replaced with debug temps.  */
4064 	      delink_stmt_imm_use (stmt);
4065 
4066 	      gsi = nsi;
4067 	      gsi_next (&nsi);
4068 	      if (gsi_end_p (nsi))
4069 		break;
4070 	      stmt = gsi_stmt (nsi);
4071 	      if (!gimple_debug_bind_p (stmt))
4072 		break;
4073 	    }
4074 
4075 	  set_curr_insn_location (sloc);
4076 	}
4077       else if (gimple_debug_source_bind_p (stmt))
4078 	{
4079 	  location_t sloc = curr_insn_location ();
4080 	  tree var = gimple_debug_source_bind_get_var (stmt);
4081 	  tree value = gimple_debug_source_bind_get_value (stmt);
4082 	  rtx val;
4083 	  enum machine_mode mode;
4084 
4085 	  last = get_last_insn ();
4086 
4087 	  set_curr_insn_location (gimple_location (stmt));
4088 
4089 	  mode = DECL_MODE (var);
4090 
4091 	  val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
4092 				      VAR_INIT_STATUS_UNINITIALIZED);
4093 
4094 	  emit_debug_insn (val);
4095 
4096 	  if (dump_file && (dump_flags & TDF_DETAILS))
4097 	    {
4098 	      /* We can't dump the insn with a TREE where an RTX
4099 		 is expected.  */
4100 	      PAT_VAR_LOCATION_LOC (val) = const0_rtx;
4101 	      maybe_dump_rtl_for_gimple_stmt (stmt, last);
4102 	      PAT_VAR_LOCATION_LOC (val) = (rtx)value;
4103 	    }
4104 
4105 	  set_curr_insn_location (sloc);
4106 	}
4107       else
4108 	{
4109 	  if (is_gimple_call (stmt)
4110 	      && gimple_call_tail_p (stmt)
4111 	      && disable_tail_calls)
4112 	    gimple_call_set_tail (stmt, false);
4113 
4114 	  if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
4115 	    {
4116 	      bool can_fallthru;
4117 	      new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
4118 	      if (new_bb)
4119 		{
4120 		  if (can_fallthru)
4121 		    bb = new_bb;
4122 		  else
4123 		    return new_bb;
4124 		}
4125 	    }
4126 	  else
4127 	    {
4128 	      def_operand_p def_p;
4129 	      def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
4130 
4131 	      if (def_p != NULL)
4132 		{
4133 		  /* Ignore this stmt if it is in the list of
4134 		     replaceable expressions.  */
4135 		  if (SA.values
4136 		      && bitmap_bit_p (SA.values,
4137 				       SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4138 		    continue;
4139 		}
4140 	      last = expand_gimple_stmt (stmt);
4141 	      maybe_dump_rtl_for_gimple_stmt (stmt, last);
4142 	    }
4143 	}
4144     }
4145 
4146   currently_expanding_gimple_stmt = NULL;
4147 
4148   /* Expand implicit goto and convert goto_locus.  */
4149   FOR_EACH_EDGE (e, ei, bb->succs)
4150     {
4151       if (e->goto_locus != UNKNOWN_LOCATION)
4152 	set_curr_insn_location (e->goto_locus);
4153       if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
4154 	{
4155 	  emit_jump (label_rtx_for_bb (e->dest));
4156 	  e->flags &= ~EDGE_FALLTHRU;
4157 	}
4158     }
4159 
4160   /* Expanded RTL can create a jump in the last instruction of block.
4161      This later might be assumed to be a jump to successor and break edge insertion.
4162      We need to insert dummy move to prevent this. PR41440. */
4163   if (single_succ_p (bb)
4164       && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4165       && (last = get_last_insn ())
4166       && JUMP_P (last))
4167     {
4168       rtx dummy = gen_reg_rtx (SImode);
4169       emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4170     }
4171 
4172   do_pending_stack_adjust ();
4173 
4174   /* Find the block tail.  The last insn in the block is the insn
4175      before a barrier and/or table jump insn.  */
4176   last = get_last_insn ();
4177   if (BARRIER_P (last))
4178     last = PREV_INSN (last);
4179   if (JUMP_TABLE_DATA_P (last))
4180     last = PREV_INSN (PREV_INSN (last));
4181   BB_END (bb) = last;
4182 
4183   update_bb_for_insn (bb);
4184 
4185   return bb;
4186 }
4187 
4188 
4189 /* Create a basic block for initialization code.  */
4190 
4191 static basic_block
4192 construct_init_block (void)
4193 {
4194   basic_block init_block, first_block;
4195   edge e = NULL;
4196   int flags;
4197 
4198   /* Multiple entry points not supported yet.  */
4199   gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
4200   init_rtl_bb_info (ENTRY_BLOCK_PTR);
4201   init_rtl_bb_info (EXIT_BLOCK_PTR);
4202   ENTRY_BLOCK_PTR->flags |= BB_RTL;
4203   EXIT_BLOCK_PTR->flags |= BB_RTL;
4204 
4205   e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
4206 
4207   /* When entry edge points to first basic block, we don't need jump,
4208      otherwise we have to jump into proper target.  */
4209   if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4210     {
4211       tree label = gimple_block_label (e->dest);
4212 
4213       emit_jump (label_rtx (label));
4214       flags = 0;
4215     }
4216   else
4217     flags = EDGE_FALLTHRU;
4218 
4219   init_block = create_basic_block (NEXT_INSN (get_insns ()),
4220 				   get_last_insn (),
4221 				   ENTRY_BLOCK_PTR);
4222   init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4223   init_block->count = ENTRY_BLOCK_PTR->count;
4224   if (current_loops && ENTRY_BLOCK_PTR->loop_father)
4225     add_bb_to_loop (init_block, ENTRY_BLOCK_PTR->loop_father);
4226   if (e)
4227     {
4228       first_block = e->dest;
4229       redirect_edge_succ (e, init_block);
4230       e = make_edge (init_block, first_block, flags);
4231     }
4232   else
4233     e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4234   e->probability = REG_BR_PROB_BASE;
4235   e->count = ENTRY_BLOCK_PTR->count;
4236 
4237   update_bb_for_insn (init_block);
4238   return init_block;
4239 }
4240 
4241 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4242    found in the block tree.  */
4243 
4244 static void
4245 set_block_levels (tree block, int level)
4246 {
4247   while (block)
4248     {
4249       BLOCK_NUMBER (block) = level;
4250       set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4251       block = BLOCK_CHAIN (block);
4252     }
4253 }
4254 
4255 /* Create a block containing landing pads and similar stuff.  */
4256 
4257 static void
4258 construct_exit_block (void)
4259 {
4260   rtx head = get_last_insn ();
4261   rtx end;
4262   basic_block exit_block;
4263   edge e, e2;
4264   unsigned ix;
4265   edge_iterator ei;
4266   rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
4267 
4268   rtl_profile_for_bb (EXIT_BLOCK_PTR);
4269 
4270   /* Make sure the locus is set to the end of the function, so that
4271      epilogue line numbers and warnings are set properly.  */
4272   if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
4273     input_location = cfun->function_end_locus;
4274 
4275   /* Generate rtl for function exit.  */
4276   expand_function_end ();
4277 
4278   end = get_last_insn ();
4279   if (head == end)
4280     return;
4281   /* While emitting the function end we could move end of the last basic block.
4282    */
4283   BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4284   while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
4285     head = NEXT_INSN (head);
4286   exit_block = create_basic_block (NEXT_INSN (head), end,
4287 				   EXIT_BLOCK_PTR->prev_bb);
4288   exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4289   exit_block->count = EXIT_BLOCK_PTR->count;
4290   if (current_loops && EXIT_BLOCK_PTR->loop_father)
4291     add_bb_to_loop (exit_block, EXIT_BLOCK_PTR->loop_father);
4292 
4293   ix = 0;
4294   while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
4295     {
4296       e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
4297       if (!(e->flags & EDGE_ABNORMAL))
4298 	redirect_edge_succ (e, exit_block);
4299       else
4300 	ix++;
4301     }
4302 
4303   e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4304   e->probability = REG_BR_PROB_BASE;
4305   e->count = EXIT_BLOCK_PTR->count;
4306   FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
4307     if (e2 != e)
4308       {
4309 	e->count -= e2->count;
4310 	exit_block->count -= e2->count;
4311 	exit_block->frequency -= EDGE_FREQUENCY (e2);
4312       }
4313   if (e->count < 0)
4314     e->count = 0;
4315   if (exit_block->count < 0)
4316     exit_block->count = 0;
4317   if (exit_block->frequency < 0)
4318     exit_block->frequency = 0;
4319   update_bb_for_insn (exit_block);
4320 }
4321 
4322 /* Helper function for discover_nonconstant_array_refs.
4323    Look for ARRAY_REF nodes with non-constant indexes and mark them
4324    addressable.  */
4325 
4326 static tree
4327 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4328 				   void *data ATTRIBUTE_UNUSED)
4329 {
4330   tree t = *tp;
4331 
4332   if (IS_TYPE_OR_DECL_P (t))
4333     *walk_subtrees = 0;
4334   else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4335     {
4336       while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4337 	      && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4338 	      && (!TREE_OPERAND (t, 2)
4339 		  || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4340 	     || (TREE_CODE (t) == COMPONENT_REF
4341 		 && (!TREE_OPERAND (t,2)
4342 		     || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4343 	     || TREE_CODE (t) == BIT_FIELD_REF
4344 	     || TREE_CODE (t) == REALPART_EXPR
4345 	     || TREE_CODE (t) == IMAGPART_EXPR
4346 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
4347 	     || CONVERT_EXPR_P (t))
4348 	t = TREE_OPERAND (t, 0);
4349 
4350       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4351 	{
4352 	  t = get_base_address (t);
4353 	  if (t && DECL_P (t)
4354               && DECL_MODE (t) != BLKmode)
4355 	    TREE_ADDRESSABLE (t) = 1;
4356 	}
4357 
4358       *walk_subtrees = 0;
4359     }
4360 
4361   return NULL_TREE;
4362 }
4363 
4364 /* RTL expansion is not able to compile array references with variable
4365    offsets for arrays stored in single register.  Discover such
4366    expressions and mark variables as addressable to avoid this
4367    scenario.  */
4368 
4369 static void
4370 discover_nonconstant_array_refs (void)
4371 {
4372   basic_block bb;
4373   gimple_stmt_iterator gsi;
4374 
4375   FOR_EACH_BB (bb)
4376     for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4377       {
4378 	gimple stmt = gsi_stmt (gsi);
4379 	if (!is_gimple_debug (stmt))
4380 	  walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4381       }
4382 }
4383 
4384 /* This function sets crtl->args.internal_arg_pointer to a virtual
4385    register if DRAP is needed.  Local register allocator will replace
4386    virtual_incoming_args_rtx with the virtual register.  */
4387 
4388 static void
4389 expand_stack_alignment (void)
4390 {
4391   rtx drap_rtx;
4392   unsigned int preferred_stack_boundary;
4393 
4394   if (! SUPPORTS_STACK_ALIGNMENT)
4395     return;
4396 
4397   if (cfun->calls_alloca
4398       || cfun->has_nonlocal_label
4399       || crtl->has_nonlocal_goto)
4400     crtl->need_drap = true;
4401 
4402   /* Call update_stack_boundary here again to update incoming stack
4403      boundary.  It may set incoming stack alignment to a different
4404      value after RTL expansion.  TARGET_FUNCTION_OK_FOR_SIBCALL may
4405      use the minimum incoming stack alignment to check if it is OK
4406      to perform sibcall optimization since sibcall optimization will
4407      only align the outgoing stack to incoming stack boundary.  */
4408   if (targetm.calls.update_stack_boundary)
4409     targetm.calls.update_stack_boundary ();
4410 
4411   /* The incoming stack frame has to be aligned at least at
4412      parm_stack_boundary.  */
4413   gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4414 
4415   /* Update crtl->stack_alignment_estimated and use it later to align
4416      stack.  We check PREFERRED_STACK_BOUNDARY if there may be non-call
4417      exceptions since callgraph doesn't collect incoming stack alignment
4418      in this case.  */
4419   if (cfun->can_throw_non_call_exceptions
4420       && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4421     preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4422   else
4423     preferred_stack_boundary = crtl->preferred_stack_boundary;
4424   if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4425     crtl->stack_alignment_estimated = preferred_stack_boundary;
4426   if (preferred_stack_boundary > crtl->stack_alignment_needed)
4427     crtl->stack_alignment_needed = preferred_stack_boundary;
4428 
4429   gcc_assert (crtl->stack_alignment_needed
4430 	      <= crtl->stack_alignment_estimated);
4431 
4432   crtl->stack_realign_needed
4433     = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4434   crtl->stack_realign_tried = crtl->stack_realign_needed;
4435 
4436   crtl->stack_realign_processed = true;
4437 
4438   /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4439      alignment.  */
4440   gcc_assert (targetm.calls.get_drap_rtx != NULL);
4441   drap_rtx = targetm.calls.get_drap_rtx ();
4442 
4443   /* stack_realign_drap and drap_rtx must match.  */
4444   gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4445 
4446   /* Do nothing if NULL is returned, which means DRAP is not needed.  */
4447   if (NULL != drap_rtx)
4448     {
4449       crtl->args.internal_arg_pointer = drap_rtx;
4450 
4451       /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4452          needed. */
4453       fixup_tail_calls ();
4454     }
4455 }
4456 
4457 /* Translate the intermediate representation contained in the CFG
4458    from GIMPLE trees to RTL.
4459 
4460    We do conversion per basic block and preserve/update the tree CFG.
4461    This implies we have to do some magic as the CFG can simultaneously
4462    consist of basic blocks containing RTL and GIMPLE trees.  This can
4463    confuse the CFG hooks, so be careful to not manipulate CFG during
4464    the expansion.  */
4465 
4466 static unsigned int
4467 gimple_expand_cfg (void)
4468 {
4469   basic_block bb, init_block;
4470   sbitmap blocks;
4471   edge_iterator ei;
4472   edge e;
4473   rtx var_seq, var_ret_seq;
4474   unsigned i;
4475 
4476   timevar_push (TV_OUT_OF_SSA);
4477   rewrite_out_of_ssa (&SA);
4478   timevar_pop (TV_OUT_OF_SSA);
4479   SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
4480 
4481   /* Make sure all values used by the optimization passes have sane
4482      defaults.  */
4483   reg_renumber = 0;
4484 
4485   /* Some backends want to know that we are expanding to RTL.  */
4486   currently_expanding_to_rtl = 1;
4487   /* Dominators are not kept up-to-date as we may create new basic-blocks.  */
4488   free_dominance_info (CDI_DOMINATORS);
4489 
4490   rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4491 
4492   insn_locations_init ();
4493   if (!DECL_IS_BUILTIN (current_function_decl))
4494     {
4495       /* Eventually, all FEs should explicitly set function_start_locus.  */
4496       if (LOCATION_LOCUS (cfun->function_start_locus) == UNKNOWN_LOCATION)
4497        set_curr_insn_location
4498          (DECL_SOURCE_LOCATION (current_function_decl));
4499       else
4500        set_curr_insn_location (cfun->function_start_locus);
4501     }
4502   else
4503     set_curr_insn_location (UNKNOWN_LOCATION);
4504   prologue_location = curr_insn_location ();
4505 
4506 #ifdef INSN_SCHEDULING
4507   init_sched_attrs ();
4508 #endif
4509 
4510   /* Make sure first insn is a note even if we don't want linenums.
4511      This makes sure the first insn will never be deleted.
4512      Also, final expects a note to appear there.  */
4513   emit_note (NOTE_INSN_DELETED);
4514 
4515   /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE.  */
4516   discover_nonconstant_array_refs ();
4517 
4518   targetm.expand_to_rtl_hook ();
4519   crtl->stack_alignment_needed = STACK_BOUNDARY;
4520   crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4521   crtl->stack_alignment_estimated = 0;
4522   crtl->preferred_stack_boundary = STACK_BOUNDARY;
4523   cfun->cfg->max_jumptable_ents = 0;
4524 
4525   /* Resovle the function section.  Some targets, like ARM EABI rely on knowledge
4526      of the function section at exapnsion time to predict distance of calls.  */
4527   resolve_unique_section (current_function_decl, 0, flag_function_sections);
4528 
4529   /* Expand the variables recorded during gimple lowering.  */
4530   timevar_push (TV_VAR_EXPAND);
4531   start_sequence ();
4532 
4533   var_ret_seq = expand_used_vars ();
4534 
4535   var_seq = get_insns ();
4536   end_sequence ();
4537   timevar_pop (TV_VAR_EXPAND);
4538 
4539   /* Honor stack protection warnings.  */
4540   if (warn_stack_protect)
4541     {
4542       if (cfun->calls_alloca)
4543 	warning (OPT_Wstack_protector,
4544 		 "stack protector not protecting local variables: "
4545                  "variable length buffer");
4546       if (has_short_buffer && !crtl->stack_protect_guard)
4547 	warning (OPT_Wstack_protector,
4548 		 "stack protector not protecting function: "
4549                  "all local arrays are less than %d bytes long",
4550 		 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4551     }
4552 
4553   /* Set up parameters and prepare for return, for the function.  */
4554   expand_function_start (current_function_decl);
4555 
4556   /* If we emitted any instructions for setting up the variables,
4557      emit them before the FUNCTION_START note.  */
4558   if (var_seq)
4559     {
4560       emit_insn_before (var_seq, parm_birth_insn);
4561 
4562       /* In expand_function_end we'll insert the alloca save/restore
4563 	 before parm_birth_insn.  We've just insertted an alloca call.
4564 	 Adjust the pointer to match.  */
4565       parm_birth_insn = var_seq;
4566     }
4567 
4568   /* Now that we also have the parameter RTXs, copy them over to our
4569      partitions.  */
4570   for (i = 0; i < SA.map->num_partitions; i++)
4571     {
4572       tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4573 
4574       if (TREE_CODE (var) != VAR_DECL
4575 	  && !SA.partition_to_pseudo[i])
4576 	SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4577       gcc_assert (SA.partition_to_pseudo[i]);
4578 
4579       /* If this decl was marked as living in multiple places, reset
4580          this now to NULL.  */
4581       if (DECL_RTL_IF_SET (var) == pc_rtx)
4582 	SET_DECL_RTL (var, NULL);
4583 
4584       /* Some RTL parts really want to look at DECL_RTL(x) when x
4585          was a decl marked in REG_ATTR or MEM_ATTR.  We could use
4586 	 SET_DECL_RTL here making this available, but that would mean
4587 	 to select one of the potentially many RTLs for one DECL.  Instead
4588 	 of doing that we simply reset the MEM_EXPR of the RTL in question,
4589 	 then nobody can get at it and hence nobody can call DECL_RTL on it.  */
4590       if (!DECL_RTL_SET_P (var))
4591 	{
4592 	  if (MEM_P (SA.partition_to_pseudo[i]))
4593 	    set_mem_expr (SA.partition_to_pseudo[i], NULL);
4594 	}
4595     }
4596 
4597   /* If we have a class containing differently aligned pointers
4598      we need to merge those into the corresponding RTL pointer
4599      alignment.  */
4600   for (i = 1; i < num_ssa_names; i++)
4601     {
4602       tree name = ssa_name (i);
4603       int part;
4604       rtx r;
4605 
4606       if (!name
4607 	  /* We might have generated new SSA names in
4608 	     update_alias_info_with_stack_vars.  They will have a NULL
4609 	     defining statements, and won't be part of the partitioning,
4610 	     so ignore those.  */
4611 	  || !SSA_NAME_DEF_STMT (name))
4612 	continue;
4613       part = var_to_partition (SA.map, name);
4614       if (part == NO_PARTITION)
4615 	continue;
4616 
4617       /* Adjust all partition members to get the underlying decl of
4618 	 the representative which we might have created in expand_one_var.  */
4619       if (SSA_NAME_VAR (name) == NULL_TREE)
4620 	{
4621 	  tree leader = partition_to_var (SA.map, part);
4622 	  gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
4623 	  replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
4624 	}
4625       if (!POINTER_TYPE_P (TREE_TYPE (name)))
4626 	continue;
4627 
4628       r = SA.partition_to_pseudo[part];
4629       if (REG_P (r))
4630 	mark_reg_pointer (r, get_pointer_alignment (name));
4631     }
4632 
4633   /* If this function is `main', emit a call to `__main'
4634      to run global initializers, etc.  */
4635   if (DECL_NAME (current_function_decl)
4636       && MAIN_NAME_P (DECL_NAME (current_function_decl))
4637       && DECL_FILE_SCOPE_P (current_function_decl))
4638     expand_main_function ();
4639 
4640   /* Initialize the stack_protect_guard field.  This must happen after the
4641      call to __main (if any) so that the external decl is initialized.  */
4642   if (crtl->stack_protect_guard)
4643     stack_protect_prologue ();
4644 
4645   expand_phi_nodes (&SA);
4646 
4647   /* Register rtl specific functions for cfg.  */
4648   rtl_register_cfg_hooks ();
4649 
4650   init_block = construct_init_block ();
4651 
4652   /* Clear EDGE_EXECUTABLE on the entry edge(s).  It is cleaned from the
4653      remaining edges later.  */
4654   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4655     e->flags &= ~EDGE_EXECUTABLE;
4656 
4657   lab_rtx_for_bb = pointer_map_create ();
4658   FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4659     bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
4660 
4661   if (MAY_HAVE_DEBUG_INSNS)
4662     expand_debug_locations ();
4663 
4664   /* Free stuff we no longer need after GIMPLE optimizations.  */
4665   free_dominance_info (CDI_DOMINATORS);
4666   free_dominance_info (CDI_POST_DOMINATORS);
4667   delete_tree_cfg_annotations ();
4668 
4669   timevar_push (TV_OUT_OF_SSA);
4670   finish_out_of_ssa (&SA);
4671   timevar_pop (TV_OUT_OF_SSA);
4672 
4673   timevar_push (TV_POST_EXPAND);
4674   /* We are no longer in SSA form.  */
4675   cfun->gimple_df->in_ssa_p = false;
4676   if (current_loops)
4677     loops_state_clear (LOOP_CLOSED_SSA);
4678 
4679   /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4680      conservatively to true until they are all profile aware.  */
4681   pointer_map_destroy (lab_rtx_for_bb);
4682   free_histograms ();
4683 
4684   construct_exit_block ();
4685   insn_locations_finalize ();
4686 
4687   if (var_ret_seq)
4688     {
4689       rtx after = return_label;
4690       rtx next = NEXT_INSN (after);
4691       if (next && NOTE_INSN_BASIC_BLOCK_P (next))
4692 	after = next;
4693       emit_insn_after (var_ret_seq, after);
4694     }
4695 
4696   /* Zap the tree EH table.  */
4697   set_eh_throw_stmt_table (cfun, NULL);
4698 
4699   /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4700      split edges which edge insertions might do.  */
4701   rebuild_jump_labels (get_insns ());
4702 
4703   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4704     {
4705       edge e;
4706       edge_iterator ei;
4707       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4708 	{
4709 	  if (e->insns.r)
4710 	    {
4711 	      rebuild_jump_labels_chain (e->insns.r);
4712 	      /* Put insns after parm birth, but before
4713 		 NOTE_INSNS_FUNCTION_BEG.  */
4714 	      if (e->src == ENTRY_BLOCK_PTR
4715 		  && single_succ_p (ENTRY_BLOCK_PTR))
4716 		{
4717 		  rtx insns = e->insns.r;
4718 		  e->insns.r = NULL_RTX;
4719 		  if (NOTE_P (parm_birth_insn)
4720 		      && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
4721 		    emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
4722 		  else
4723 		    emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4724 		}
4725 	      else
4726 		commit_one_edge_insertion (e);
4727 	    }
4728 	  else
4729 	    ei_next (&ei);
4730 	}
4731     }
4732 
4733   /* We're done expanding trees to RTL.  */
4734   currently_expanding_to_rtl = 0;
4735 
4736   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4737     {
4738       edge e;
4739       edge_iterator ei;
4740       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4741 	{
4742 	  /* Clear EDGE_EXECUTABLE.  This flag is never used in the backend.  */
4743 	  e->flags &= ~EDGE_EXECUTABLE;
4744 
4745 	  /* At the moment not all abnormal edges match the RTL
4746 	     representation.  It is safe to remove them here as
4747 	     find_many_sub_basic_blocks will rediscover them.
4748 	     In the future we should get this fixed properly.  */
4749 	  if ((e->flags & EDGE_ABNORMAL)
4750 	      && !(e->flags & EDGE_SIBCALL))
4751 	    remove_edge (e);
4752 	  else
4753 	    ei_next (&ei);
4754 	}
4755     }
4756 
4757   blocks = sbitmap_alloc (last_basic_block);
4758   bitmap_ones (blocks);
4759   find_many_sub_basic_blocks (blocks);
4760   sbitmap_free (blocks);
4761   purge_all_dead_edges ();
4762 
4763   expand_stack_alignment ();
4764 
4765   /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
4766      function.  */
4767   if (crtl->tail_call_emit)
4768     fixup_tail_calls ();
4769 
4770   /* After initial rtl generation, call back to finish generating
4771      exception support code.  We need to do this before cleaning up
4772      the CFG as the code does not expect dead landing pads.  */
4773   if (cfun->eh->region_tree != NULL)
4774     finish_eh_generation ();
4775 
4776   /* Remove unreachable blocks, otherwise we cannot compute dominators
4777      which are needed for loop state verification.  As a side-effect
4778      this also compacts blocks.
4779      ???  We cannot remove trivially dead insns here as for example
4780      the DRAP reg on i?86 is not magically live at this point.
4781      gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise.  */
4782   cleanup_cfg (CLEANUP_NO_INSN_DEL);
4783 
4784 #ifdef ENABLE_CHECKING
4785   verify_flow_info ();
4786 #endif
4787 
4788   /* Initialize pseudos allocated for hard registers.  */
4789   emit_initial_value_sets ();
4790 
4791   /* And finally unshare all RTL.  */
4792   unshare_all_rtl ();
4793 
4794   /* There's no need to defer outputting this function any more; we
4795      know we want to output it.  */
4796   DECL_DEFER_OUTPUT (current_function_decl) = 0;
4797 
4798   /* Now that we're done expanding trees to RTL, we shouldn't have any
4799      more CONCATs anywhere.  */
4800   generating_concat_p = 0;
4801 
4802   if (dump_file)
4803     {
4804       fprintf (dump_file,
4805 	       "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4806       /* And the pass manager will dump RTL for us.  */
4807     }
4808 
4809   /* If we're emitting a nested function, make sure its parent gets
4810      emitted as well.  Doing otherwise confuses debug info.  */
4811   {
4812     tree parent;
4813     for (parent = DECL_CONTEXT (current_function_decl);
4814 	 parent != NULL_TREE;
4815 	 parent = get_containing_scope (parent))
4816       if (TREE_CODE (parent) == FUNCTION_DECL)
4817 	TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4818   }
4819 
4820   /* We are now committed to emitting code for this function.  Do any
4821      preparation, such as emitting abstract debug info for the inline
4822      before it gets mangled by optimization.  */
4823   if (cgraph_function_possibly_inlined_p (current_function_decl))
4824     (*debug_hooks->outlining_inline_function) (current_function_decl);
4825 
4826   TREE_ASM_WRITTEN (current_function_decl) = 1;
4827 
4828   /* After expanding, the return labels are no longer needed. */
4829   return_label = NULL;
4830   naked_return_label = NULL;
4831 
4832   /* After expanding, the tm_restart map is no longer needed.  */
4833   if (cfun->gimple_df->tm_restart)
4834     {
4835       htab_delete (cfun->gimple_df->tm_restart);
4836       cfun->gimple_df->tm_restart = NULL;
4837     }
4838 
4839   /* Tag the blocks with a depth number so that change_scope can find
4840      the common parent easily.  */
4841   set_block_levels (DECL_INITIAL (cfun->decl), 0);
4842   default_rtl_profile ();
4843 
4844   timevar_pop (TV_POST_EXPAND);
4845 
4846   return 0;
4847 }
4848 
4849 struct rtl_opt_pass pass_expand =
4850 {
4851  {
4852   RTL_PASS,
4853   "expand",				/* name */
4854   OPTGROUP_NONE,                        /* optinfo_flags */
4855   NULL,                                 /* gate */
4856   gimple_expand_cfg,			/* execute */
4857   NULL,                                 /* sub */
4858   NULL,                                 /* next */
4859   0,                                    /* static_pass_number */
4860   TV_EXPAND,				/* tv_id */
4861   PROP_ssa | PROP_gimple_leh | PROP_cfg
4862     | PROP_gimple_lcx,			/* properties_required */
4863   PROP_rtl,                             /* properties_provided */
4864   PROP_ssa | PROP_trees,		/* properties_destroyed */
4865   TODO_verify_ssa | TODO_verify_flow
4866     | TODO_verify_stmts,		/* todo_flags_start */
4867   TODO_ggc_collect			/* todo_flags_finish */
4868  }
4869 };
4870