xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cfgexpand.c (revision b7b7574d3bf8eeb51a1fa3977b59142ec6434a55)
1 /* A pass for lowering trees to RTL.
2    Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3    Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "except.h"
37 #include "flags.h"
38 #include "diagnostic.h"
39 #include "toplev.h"
40 #include "debug.h"
41 #include "params.h"
42 #include "tree-inline.h"
43 #include "value-prof.h"
44 #include "target.h"
45 #include "ssaexpand.h"
46 
47 
48 /* This variable holds information helping the rewriting of SSA trees
49    into RTL.  */
50 struct ssaexpand SA;
51 
52 /* This variable holds the currently expanded gimple statement for purposes
53    of comminucating the profile info to the builtin expanders.  */
54 gimple currently_expanding_gimple_stmt;
55 
56 /* Return an expression tree corresponding to the RHS of GIMPLE
57    statement STMT.  */
58 
59 tree
60 gimple_assign_rhs_to_tree (gimple stmt)
61 {
62   tree t;
63   enum gimple_rhs_class grhs_class;
64 
65   grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
66 
67   if (grhs_class == GIMPLE_BINARY_RHS)
68     t = build2 (gimple_assign_rhs_code (stmt),
69 		TREE_TYPE (gimple_assign_lhs (stmt)),
70 		gimple_assign_rhs1 (stmt),
71 		gimple_assign_rhs2 (stmt));
72   else if (grhs_class == GIMPLE_UNARY_RHS)
73     t = build1 (gimple_assign_rhs_code (stmt),
74 		TREE_TYPE (gimple_assign_lhs (stmt)),
75 		gimple_assign_rhs1 (stmt));
76   else if (grhs_class == GIMPLE_SINGLE_RHS)
77     {
78       t = gimple_assign_rhs1 (stmt);
79       /* Avoid modifying this tree in place below.  */
80       if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
81 	   && gimple_location (stmt) != EXPR_LOCATION (t))
82 	  || (gimple_block (stmt)
83 	      && currently_expanding_to_rtl
84 	      && EXPR_P (t)
85 	      && gimple_block (stmt) != TREE_BLOCK (t)))
86 	t = copy_node (t);
87     }
88   else
89     gcc_unreachable ();
90 
91   if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
92     SET_EXPR_LOCATION (t, gimple_location (stmt));
93   if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
94     TREE_BLOCK (t) = gimple_block (stmt);
95 
96   return t;
97 }
98 
99 
100 #ifndef STACK_ALIGNMENT_NEEDED
101 #define STACK_ALIGNMENT_NEEDED 1
102 #endif
103 
104 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
105 
106 /* Associate declaration T with storage space X.  If T is no
107    SSA name this is exactly SET_DECL_RTL, otherwise make the
108    partition of T associated with X.  */
109 static inline void
110 set_rtl (tree t, rtx x)
111 {
112   if (TREE_CODE (t) == SSA_NAME)
113     {
114       SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
115       if (x && !MEM_P (x))
116 	set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
117       /* For the benefit of debug information at -O0 (where vartracking
118          doesn't run) record the place also in the base DECL if it's
119 	 a normal variable (not a parameter).  */
120       if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
121 	{
122 	  tree var = SSA_NAME_VAR (t);
123 	  /* If we don't yet have something recorded, just record it now.  */
124 	  if (!DECL_RTL_SET_P (var))
125 	    SET_DECL_RTL (var, x);
126 	  /* If we have it set alrady to "multiple places" don't
127 	     change this.  */
128 	  else if (DECL_RTL (var) == pc_rtx)
129 	    ;
130 	  /* If we have something recorded and it's not the same place
131 	     as we want to record now, we have multiple partitions for the
132 	     same base variable, with different places.  We can't just
133 	     randomly chose one, hence we have to say that we don't know.
134 	     This only happens with optimization, and there var-tracking
135 	     will figure out the right thing.  */
136 	  else if (DECL_RTL (var) != x)
137 	    SET_DECL_RTL (var, pc_rtx);
138 	}
139     }
140   else
141     SET_DECL_RTL (t, x);
142 }
143 
144 /* This structure holds data relevant to one variable that will be
145    placed in a stack slot.  */
146 struct stack_var
147 {
148   /* The Variable.  */
149   tree decl;
150 
151   /* The offset of the variable.  During partitioning, this is the
152      offset relative to the partition.  After partitioning, this
153      is relative to the stack frame.  */
154   HOST_WIDE_INT offset;
155 
156   /* Initially, the size of the variable.  Later, the size of the partition,
157      if this variable becomes it's partition's representative.  */
158   HOST_WIDE_INT size;
159 
160   /* The *byte* alignment required for this variable.  Or as, with the
161      size, the alignment for this partition.  */
162   unsigned int alignb;
163 
164   /* The partition representative.  */
165   size_t representative;
166 
167   /* The next stack variable in the partition, or EOC.  */
168   size_t next;
169 
170   /* The numbers of conflicting stack variables.  */
171   bitmap conflicts;
172 };
173 
174 #define EOC  ((size_t)-1)
175 
176 /* We have an array of such objects while deciding allocation.  */
177 static struct stack_var *stack_vars;
178 static size_t stack_vars_alloc;
179 static size_t stack_vars_num;
180 
181 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
182    is non-decreasing.  */
183 static size_t *stack_vars_sorted;
184 
185 /* The phase of the stack frame.  This is the known misalignment of
186    virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY.  That is,
187    (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0.  */
188 static int frame_phase;
189 
190 /* Used during expand_used_vars to remember if we saw any decls for
191    which we'd like to enable stack smashing protection.  */
192 static bool has_protected_decls;
193 
194 /* Used during expand_used_vars.  Remember if we say a character buffer
195    smaller than our cutoff threshold.  Used for -Wstack-protector.  */
196 static bool has_short_buffer;
197 
198 /* Discover the byte alignment to use for DECL.  Ignore alignment
199    we can't do with expected alignment of the stack boundary.  */
200 
201 static unsigned int
202 get_decl_align_unit (tree decl)
203 {
204   unsigned int align;
205 
206   align = LOCAL_DECL_ALIGNMENT (decl);
207 
208   if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
209     align = MAX_SUPPORTED_STACK_ALIGNMENT;
210 
211   if (SUPPORTS_STACK_ALIGNMENT)
212     {
213       if (crtl->stack_alignment_estimated < align)
214 	{
215 	  gcc_assert(!crtl->stack_realign_processed);
216           crtl->stack_alignment_estimated = align;
217 	}
218     }
219 
220   /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
221      So here we only make sure stack_alignment_needed >= align.  */
222   if (crtl->stack_alignment_needed < align)
223     crtl->stack_alignment_needed = align;
224   if (crtl->max_used_stack_slot_alignment < align)
225     crtl->max_used_stack_slot_alignment = align;
226 
227   return align / BITS_PER_UNIT;
228 }
229 
230 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
231    Return the frame offset.  */
232 
233 static HOST_WIDE_INT
234 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
235 {
236   HOST_WIDE_INT offset, new_frame_offset;
237 
238   new_frame_offset = frame_offset;
239   if (FRAME_GROWS_DOWNWARD)
240     {
241       new_frame_offset -= size + frame_phase;
242       new_frame_offset &= -align;
243       new_frame_offset += frame_phase;
244       offset = new_frame_offset;
245     }
246   else
247     {
248       new_frame_offset -= frame_phase;
249       new_frame_offset += align - 1;
250       new_frame_offset &= -align;
251       new_frame_offset += frame_phase;
252       offset = new_frame_offset;
253       new_frame_offset += size;
254     }
255   frame_offset = new_frame_offset;
256 
257   if (frame_offset_overflow (frame_offset, cfun->decl))
258     frame_offset = offset = 0;
259 
260   return offset;
261 }
262 
263 /* Accumulate DECL into STACK_VARS.  */
264 
265 static void
266 add_stack_var (tree decl)
267 {
268   if (stack_vars_num >= stack_vars_alloc)
269     {
270       if (stack_vars_alloc)
271 	stack_vars_alloc = stack_vars_alloc * 3 / 2;
272       else
273 	stack_vars_alloc = 32;
274       stack_vars
275 	= XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
276     }
277   stack_vars[stack_vars_num].decl = decl;
278   stack_vars[stack_vars_num].offset = 0;
279   stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
280   stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl));
281 
282   /* All variables are initially in their own partition.  */
283   stack_vars[stack_vars_num].representative = stack_vars_num;
284   stack_vars[stack_vars_num].next = EOC;
285 
286   /* All variables initially conflict with no other.  */
287   stack_vars[stack_vars_num].conflicts = NULL;
288 
289   /* Ensure that this decl doesn't get put onto the list twice.  */
290   set_rtl (decl, pc_rtx);
291 
292   stack_vars_num++;
293 }
294 
295 /* Make the decls associated with luid's X and Y conflict.  */
296 
297 static void
298 add_stack_var_conflict (size_t x, size_t y)
299 {
300   struct stack_var *a = &stack_vars[x];
301   struct stack_var *b = &stack_vars[y];
302   if (!a->conflicts)
303     a->conflicts = BITMAP_ALLOC (NULL);
304   if (!b->conflicts)
305     b->conflicts = BITMAP_ALLOC (NULL);
306   bitmap_set_bit (a->conflicts, y);
307   bitmap_set_bit (b->conflicts, x);
308 }
309 
310 /* Check whether the decls associated with luid's X and Y conflict.  */
311 
312 static bool
313 stack_var_conflict_p (size_t x, size_t y)
314 {
315   struct stack_var *a = &stack_vars[x];
316   struct stack_var *b = &stack_vars[y];
317   if (!a->conflicts || !b->conflicts)
318     return false;
319   return bitmap_bit_p (a->conflicts, y);
320 }
321 
322 /* Returns true if TYPE is or contains a union type.  */
323 
324 static bool
325 aggregate_contains_union_type (tree type)
326 {
327   tree field;
328 
329   if (TREE_CODE (type) == UNION_TYPE
330       || TREE_CODE (type) == QUAL_UNION_TYPE)
331     return true;
332   if (TREE_CODE (type) == ARRAY_TYPE)
333     return aggregate_contains_union_type (TREE_TYPE (type));
334   if (TREE_CODE (type) != RECORD_TYPE)
335     return false;
336 
337   for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
338     if (TREE_CODE (field) == FIELD_DECL)
339       if (aggregate_contains_union_type (TREE_TYPE (field)))
340 	return true;
341 
342   return false;
343 }
344 
345 /* A subroutine of expand_used_vars.  If two variables X and Y have alias
346    sets that do not conflict, then do add a conflict for these variables
347    in the interference graph.  We also need to make sure to add conflicts
348    for union containing structures.  Else RTL alias analysis comes along
349    and due to type based aliasing rules decides that for two overlapping
350    union temporaries { short s; int i; } accesses to the same mem through
351    different types may not alias and happily reorders stores across
352    life-time boundaries of the temporaries (See PR25654).
353    We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P.  */
354 
355 static void
356 add_alias_set_conflicts (void)
357 {
358   size_t i, j, n = stack_vars_num;
359 
360   for (i = 0; i < n; ++i)
361     {
362       tree type_i = TREE_TYPE (stack_vars[i].decl);
363       bool aggr_i = AGGREGATE_TYPE_P (type_i);
364       bool contains_union;
365 
366       contains_union = aggregate_contains_union_type (type_i);
367       for (j = 0; j < i; ++j)
368 	{
369 	  tree type_j = TREE_TYPE (stack_vars[j].decl);
370 	  bool aggr_j = AGGREGATE_TYPE_P (type_j);
371 	  if (aggr_i != aggr_j
372 	      /* Either the objects conflict by means of type based
373 		 aliasing rules, or we need to add a conflict.  */
374 	      || !objects_must_conflict_p (type_i, type_j)
375 	      /* In case the types do not conflict ensure that access
376 		 to elements will conflict.  In case of unions we have
377 		 to be careful as type based aliasing rules may say
378 		 access to the same memory does not conflict.  So play
379 		 safe and add a conflict in this case.  */
380 	      || contains_union)
381 	    add_stack_var_conflict (i, j);
382 	}
383     }
384 }
385 
386 /* A subroutine of partition_stack_vars.  A comparison function for qsort,
387    sorting an array of indices by the size and type of the object.  */
388 
389 static int
390 stack_var_size_cmp (const void *a, const void *b)
391 {
392   HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
393   HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
394   tree decla, declb;
395   unsigned int uida, uidb;
396 
397   if (sa < sb)
398     return -1;
399   if (sa > sb)
400     return 1;
401   decla = stack_vars[*(const size_t *)a].decl;
402   declb = stack_vars[*(const size_t *)b].decl;
403   /* For stack variables of the same size use and id of the decls
404      to make the sort stable.  Two SSA names are compared by their
405      version, SSA names come before non-SSA names, and two normal
406      decls are compared by their DECL_UID.  */
407   if (TREE_CODE (decla) == SSA_NAME)
408     {
409       if (TREE_CODE (declb) == SSA_NAME)
410 	uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
411       else
412 	return -1;
413     }
414   else if (TREE_CODE (declb) == SSA_NAME)
415     return 1;
416   else
417     uida = DECL_UID (decla), uidb = DECL_UID (declb);
418   if (uida < uidb)
419     return -1;
420   if (uida > uidb)
421     return 1;
422   return 0;
423 }
424 
425 
426 /* If the points-to solution *PI points to variables that are in a partition
427    together with other variables add all partition members to the pointed-to
428    variables bitmap.  */
429 
430 static void
431 add_partitioned_vars_to_ptset (struct pt_solution *pt,
432 			       struct pointer_map_t *decls_to_partitions,
433 			       struct pointer_set_t *visited, bitmap temp)
434 {
435   bitmap_iterator bi;
436   unsigned i;
437   bitmap *part;
438 
439   if (pt->anything
440       || pt->vars == NULL
441       /* The pointed-to vars bitmap is shared, it is enough to
442 	 visit it once.  */
443       || pointer_set_insert(visited, pt->vars))
444     return;
445 
446   bitmap_clear (temp);
447 
448   /* By using a temporary bitmap to store all members of the partitions
449      we have to add we make sure to visit each of the partitions only
450      once.  */
451   EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
452     if ((!temp
453 	 || !bitmap_bit_p (temp, i))
454 	&& (part = (bitmap *) pointer_map_contains (decls_to_partitions,
455 						    (void *)(size_t) i)))
456       bitmap_ior_into (temp, *part);
457   if (!bitmap_empty_p (temp))
458     bitmap_ior_into (pt->vars, temp);
459 }
460 
461 /* Update points-to sets based on partition info, so we can use them on RTL.
462    The bitmaps representing stack partitions will be saved until expand,
463    where partitioned decls used as bases in memory expressions will be
464    rewritten.  */
465 
466 static void
467 update_alias_info_with_stack_vars (void)
468 {
469   struct pointer_map_t *decls_to_partitions = NULL;
470   size_t i, j;
471   tree var = NULL_TREE;
472 
473   for (i = 0; i < stack_vars_num; i++)
474     {
475       bitmap part = NULL;
476       tree name;
477       struct ptr_info_def *pi;
478 
479       /* Not interested in partitions with single variable.  */
480       if (stack_vars[i].representative != i
481           || stack_vars[i].next == EOC)
482         continue;
483 
484       if (!decls_to_partitions)
485 	{
486 	  decls_to_partitions = pointer_map_create ();
487 	  cfun->gimple_df->decls_to_pointers = pointer_map_create ();
488 	}
489 
490       /* Create an SSA_NAME that points to the partition for use
491          as base during alias-oracle queries on RTL for bases that
492 	 have been partitioned.  */
493       if (var == NULL_TREE)
494 	var = create_tmp_var (ptr_type_node, NULL);
495       name = make_ssa_name (var, NULL);
496 
497       /* Create bitmaps representing partitions.  They will be used for
498          points-to sets later, so use GGC alloc.  */
499       part = BITMAP_GGC_ALLOC ();
500       for (j = i; j != EOC; j = stack_vars[j].next)
501 	{
502 	  tree decl = stack_vars[j].decl;
503 	  unsigned int uid = DECL_UID (decl);
504 	  /* We should never end up partitioning SSA names (though they
505 	     may end up on the stack).  Neither should we allocate stack
506 	     space to something that is unused and thus unreferenced.  */
507 	  gcc_assert (DECL_P (decl)
508 		      && referenced_var_lookup (uid));
509 	  bitmap_set_bit (part, uid);
510 	  *((bitmap *) pointer_map_insert (decls_to_partitions,
511 					   (void *)(size_t) uid)) = part;
512 	  *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
513 					 decl)) = name;
514 	}
515 
516       /* Make the SSA name point to all partition members.  */
517       pi = get_ptr_info (name);
518       pt_solution_set (&pi->pt, part);
519     }
520 
521   /* Make all points-to sets that contain one member of a partition
522      contain all members of the partition.  */
523   if (decls_to_partitions)
524     {
525       unsigned i;
526       struct pointer_set_t *visited = pointer_set_create ();
527       bitmap temp = BITMAP_ALLOC (NULL);
528 
529       for (i = 1; i < num_ssa_names; i++)
530 	{
531 	  tree name = ssa_name (i);
532 	  struct ptr_info_def *pi;
533 
534 	  if (name
535 	      && POINTER_TYPE_P (TREE_TYPE (name))
536 	      && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
537 	    add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
538 					   visited, temp);
539 	}
540 
541       add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
542 				     decls_to_partitions, visited, temp);
543       add_partitioned_vars_to_ptset (&cfun->gimple_df->callused,
544 				     decls_to_partitions, visited, temp);
545 
546       pointer_set_destroy (visited);
547       pointer_map_destroy (decls_to_partitions);
548       BITMAP_FREE (temp);
549     }
550 }
551 
552 /* A subroutine of partition_stack_vars.  The UNION portion of a UNION/FIND
553    partitioning algorithm.  Partitions A and B are known to be non-conflicting.
554    Merge them into a single partition A.
555 
556    At the same time, add OFFSET to all variables in partition B.  At the end
557    of the partitioning process we've have a nice block easy to lay out within
558    the stack frame.  */
559 
560 static void
561 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
562 {
563   size_t i, last;
564   struct stack_var *vb = &stack_vars[b];
565   bitmap_iterator bi;
566   unsigned u;
567 
568   /* Update each element of partition B with the given offset,
569      and merge them into partition A.  */
570   for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
571     {
572       stack_vars[i].offset += offset;
573       stack_vars[i].representative = a;
574     }
575   stack_vars[last].next = stack_vars[a].next;
576   stack_vars[a].next = b;
577 
578   /* Update the required alignment of partition A to account for B.  */
579   if (stack_vars[a].alignb < stack_vars[b].alignb)
580     stack_vars[a].alignb = stack_vars[b].alignb;
581 
582   /* Update the interference graph and merge the conflicts.  */
583   if (vb->conflicts)
584     {
585       EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
586 	add_stack_var_conflict (a, stack_vars[u].representative);
587       BITMAP_FREE (vb->conflicts);
588     }
589 }
590 
591 /* A subroutine of expand_used_vars.  Binpack the variables into
592    partitions constrained by the interference graph.  The overall
593    algorithm used is as follows:
594 
595 	Sort the objects by size.
596 	For each object A {
597 	  S = size(A)
598 	  O = 0
599 	  loop {
600 	    Look for the largest non-conflicting object B with size <= S.
601 	    UNION (A, B)
602 	    offset(B) = O
603 	    O += size(B)
604 	    S -= size(B)
605 	  }
606 	}
607 */
608 
609 static void
610 partition_stack_vars (void)
611 {
612   size_t si, sj, n = stack_vars_num;
613 
614   stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
615   for (si = 0; si < n; ++si)
616     stack_vars_sorted[si] = si;
617 
618   if (n == 1)
619     return;
620 
621   qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
622 
623   for (si = 0; si < n; ++si)
624     {
625       size_t i = stack_vars_sorted[si];
626       HOST_WIDE_INT isize = stack_vars[i].size;
627       HOST_WIDE_INT offset = 0;
628 
629       for (sj = si; sj-- > 0; )
630 	{
631 	  size_t j = stack_vars_sorted[sj];
632 	  HOST_WIDE_INT jsize = stack_vars[j].size;
633 	  unsigned int jalign = stack_vars[j].alignb;
634 
635 	  /* Ignore objects that aren't partition representatives.  */
636 	  if (stack_vars[j].representative != j)
637 	    continue;
638 
639 	  /* Ignore objects too large for the remaining space.  */
640 	  if (isize < jsize)
641 	    continue;
642 
643 	  /* Ignore conflicting objects.  */
644 	  if (stack_var_conflict_p (i, j))
645 	    continue;
646 
647 	  /* Refine the remaining space check to include alignment.  */
648 	  if (offset & (jalign - 1))
649 	    {
650 	      HOST_WIDE_INT toff = offset;
651 	      toff += jalign - 1;
652 	      toff &= -(HOST_WIDE_INT)jalign;
653 	      if (isize - (toff - offset) < jsize)
654 		continue;
655 
656 	      isize -= toff - offset;
657 	      offset = toff;
658 	    }
659 
660 	  /* UNION the objects, placing J at OFFSET.  */
661 	  union_stack_vars (i, j, offset);
662 
663 	  isize -= jsize;
664 	  if (isize == 0)
665 	    break;
666 	}
667     }
668 
669   if (optimize)
670     update_alias_info_with_stack_vars ();
671 }
672 
673 /* A debugging aid for expand_used_vars.  Dump the generated partitions.  */
674 
675 static void
676 dump_stack_var_partition (void)
677 {
678   size_t si, i, j, n = stack_vars_num;
679 
680   for (si = 0; si < n; ++si)
681     {
682       i = stack_vars_sorted[si];
683 
684       /* Skip variables that aren't partition representatives, for now.  */
685       if (stack_vars[i].representative != i)
686 	continue;
687 
688       fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
689 	       " align %u\n", (unsigned long) i, stack_vars[i].size,
690 	       stack_vars[i].alignb);
691 
692       for (j = i; j != EOC; j = stack_vars[j].next)
693 	{
694 	  fputc ('\t', dump_file);
695 	  print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
696 	  fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
697 		   stack_vars[j].offset);
698 	}
699     }
700 }
701 
702 /* Assign rtl to DECL at frame offset OFFSET.  */
703 
704 static void
705 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
706 {
707   /* Alignment is unsigned.   */
708   unsigned HOST_WIDE_INT align, max_align;
709   rtx x;
710 
711   /* If this fails, we've overflowed the stack frame.  Error nicely?  */
712   gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
713 
714   x = plus_constant (virtual_stack_vars_rtx, offset);
715   x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
716 
717   if (TREE_CODE (decl) != SSA_NAME)
718     {
719       /* Set alignment we actually gave this decl if it isn't an SSA name.
720          If it is we generate stack slots only accidentally so it isn't as
721 	 important, we'll simply use the alignment that is already set.  */
722       offset -= frame_phase;
723       align = offset & -offset;
724       align *= BITS_PER_UNIT;
725       max_align = crtl->max_used_stack_slot_alignment;
726       if (align == 0 || align > max_align)
727 	align = max_align;
728 
729       DECL_ALIGN (decl) = align;
730       DECL_USER_ALIGN (decl) = 0;
731     }
732 
733   set_mem_attributes (x, SSAVAR (decl), true);
734   set_rtl (decl, x);
735 }
736 
737 /* A subroutine of expand_used_vars.  Give each partition representative
738    a unique location within the stack frame.  Update each partition member
739    with that location.  */
740 
741 static void
742 expand_stack_vars (bool (*pred) (tree))
743 {
744   size_t si, i, j, n = stack_vars_num;
745 
746   for (si = 0; si < n; ++si)
747     {
748       HOST_WIDE_INT offset;
749 
750       i = stack_vars_sorted[si];
751 
752       /* Skip variables that aren't partition representatives, for now.  */
753       if (stack_vars[i].representative != i)
754 	continue;
755 
756       /* Skip variables that have already had rtl assigned.  See also
757 	 add_stack_var where we perpetrate this pc_rtx hack.  */
758       if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME
759 	   ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)]
760 	   : DECL_RTL (stack_vars[i].decl)) != pc_rtx)
761 	continue;
762 
763       /* Check the predicate to see whether this variable should be
764 	 allocated in this pass.  */
765       if (pred && !pred (stack_vars[i].decl))
766 	continue;
767 
768       offset = alloc_stack_frame_space (stack_vars[i].size,
769 					stack_vars[i].alignb);
770 
771       /* Create rtl for each variable based on their location within the
772 	 partition.  */
773       for (j = i; j != EOC; j = stack_vars[j].next)
774 	{
775 	  gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
776 	  expand_one_stack_var_at (stack_vars[j].decl,
777 				   stack_vars[j].offset + offset);
778 	}
779     }
780 }
781 
782 /* Take into account all sizes of partitions and reset DECL_RTLs.  */
783 static HOST_WIDE_INT
784 account_stack_vars (void)
785 {
786   size_t si, j, i, n = stack_vars_num;
787   HOST_WIDE_INT size = 0;
788 
789   for (si = 0; si < n; ++si)
790     {
791       i = stack_vars_sorted[si];
792 
793       /* Skip variables that aren't partition representatives, for now.  */
794       if (stack_vars[i].representative != i)
795 	continue;
796 
797       size += stack_vars[i].size;
798       for (j = i; j != EOC; j = stack_vars[j].next)
799 	set_rtl (stack_vars[j].decl, NULL);
800     }
801   return size;
802 }
803 
804 /* A subroutine of expand_one_var.  Called to immediately assign rtl
805    to a variable to be allocated in the stack frame.  */
806 
807 static void
808 expand_one_stack_var (tree var)
809 {
810   HOST_WIDE_INT size, offset, align;
811 
812   size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
813   align = get_decl_align_unit (SSAVAR (var));
814   offset = alloc_stack_frame_space (size, align);
815 
816   expand_one_stack_var_at (var, offset);
817 }
818 
819 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
820    that will reside in a hard register.  */
821 
822 static void
823 expand_one_hard_reg_var (tree var)
824 {
825   rest_of_decl_compilation (var, 0, 0);
826 }
827 
828 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
829    that will reside in a pseudo register.  */
830 
831 static void
832 expand_one_register_var (tree var)
833 {
834   tree decl = SSAVAR (var);
835   tree type = TREE_TYPE (decl);
836   enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
837   rtx x = gen_reg_rtx (reg_mode);
838 
839   set_rtl (var, x);
840 
841   /* Note if the object is a user variable.  */
842   if (!DECL_ARTIFICIAL (decl))
843     mark_user_reg (x);
844 
845   if (POINTER_TYPE_P (type))
846     mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
847 }
848 
849 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL that
850    has some associated error, e.g. its type is error-mark.  We just need
851    to pick something that won't crash the rest of the compiler.  */
852 
853 static void
854 expand_one_error_var (tree var)
855 {
856   enum machine_mode mode = DECL_MODE (var);
857   rtx x;
858 
859   if (mode == BLKmode)
860     x = gen_rtx_MEM (BLKmode, const0_rtx);
861   else if (mode == VOIDmode)
862     x = const0_rtx;
863   else
864     x = gen_reg_rtx (mode);
865 
866   SET_DECL_RTL (var, x);
867 }
868 
869 /* A subroutine of expand_one_var.  VAR is a variable that will be
870    allocated to the local stack frame.  Return true if we wish to
871    add VAR to STACK_VARS so that it will be coalesced with other
872    variables.  Return false to allocate VAR immediately.
873 
874    This function is used to reduce the number of variables considered
875    for coalescing, which reduces the size of the quadratic problem.  */
876 
877 static bool
878 defer_stack_allocation (tree var, bool toplevel)
879 {
880   /* If stack protection is enabled, *all* stack variables must be deferred,
881      so that we can re-order the strings to the top of the frame.  */
882   if (flag_stack_protect)
883     return true;
884 
885   /* Variables in the outermost scope automatically conflict with
886      every other variable.  The only reason to want to defer them
887      at all is that, after sorting, we can more efficiently pack
888      small variables in the stack frame.  Continue to defer at -O2.  */
889   if (toplevel && optimize < 2)
890     return false;
891 
892   /* Without optimization, *most* variables are allocated from the
893      stack, which makes the quadratic problem large exactly when we
894      want compilation to proceed as quickly as possible.  On the
895      other hand, we don't want the function's stack frame size to
896      get completely out of hand.  So we avoid adding scalars and
897      "small" aggregates to the list at all.  */
898   if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
899     return false;
900 
901   return true;
902 }
903 
904 /* A subroutine of expand_used_vars.  Expand one variable according to
905    its flavor.  Variables to be placed on the stack are not actually
906    expanded yet, merely recorded.
907    When REALLY_EXPAND is false, only add stack values to be allocated.
908    Return stack usage this variable is supposed to take.
909 */
910 
911 static HOST_WIDE_INT
912 expand_one_var (tree var, bool toplevel, bool really_expand)
913 {
914   tree origvar = var;
915   var = SSAVAR (var);
916 
917   if (SUPPORTS_STACK_ALIGNMENT
918       && TREE_TYPE (var) != error_mark_node
919       && TREE_CODE (var) == VAR_DECL)
920     {
921       unsigned int align;
922 
923       /* Because we don't know if VAR will be in register or on stack,
924 	 we conservatively assume it will be on stack even if VAR is
925 	 eventually put into register after RA pass.  For non-automatic
926 	 variables, which won't be on stack, we collect alignment of
927 	 type and ignore user specified alignment.  */
928       if (TREE_STATIC (var) || DECL_EXTERNAL (var))
929 	align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
930 				   TYPE_MODE (TREE_TYPE (var)),
931 				   TYPE_ALIGN (TREE_TYPE (var)));
932       else
933 	align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
934 
935       if (crtl->stack_alignment_estimated < align)
936         {
937           /* stack_alignment_estimated shouldn't change after stack
938              realign decision made */
939           gcc_assert(!crtl->stack_realign_processed);
940 	  crtl->stack_alignment_estimated = align;
941 	}
942     }
943 
944   if (TREE_CODE (origvar) == SSA_NAME)
945     {
946       gcc_assert (TREE_CODE (var) != VAR_DECL
947 		  || (!DECL_EXTERNAL (var)
948 		      && !DECL_HAS_VALUE_EXPR_P (var)
949 		      && !TREE_STATIC (var)
950 		      && TREE_TYPE (var) != error_mark_node
951 		      && !DECL_HARD_REGISTER (var)
952 		      && really_expand));
953     }
954   if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
955     ;
956   else if (DECL_EXTERNAL (var))
957     ;
958   else if (DECL_HAS_VALUE_EXPR_P (var))
959     ;
960   else if (TREE_STATIC (var))
961     ;
962   else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
963     ;
964   else if (TREE_TYPE (var) == error_mark_node)
965     {
966       if (really_expand)
967         expand_one_error_var (var);
968     }
969   else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
970     {
971       if (really_expand)
972         expand_one_hard_reg_var (var);
973     }
974   else if (use_register_for_decl (var))
975     {
976       if (really_expand)
977         expand_one_register_var (origvar);
978     }
979   else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
980     {
981       if (really_expand)
982 	{
983 	  error ("size of variable %q+D is too large", var);
984 	  expand_one_error_var (var);
985 	}
986     }
987   else if (defer_stack_allocation (var, toplevel))
988     add_stack_var (origvar);
989   else
990     {
991       if (really_expand)
992         expand_one_stack_var (origvar);
993       return tree_low_cst (DECL_SIZE_UNIT (var), 1);
994     }
995   return 0;
996 }
997 
998 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
999    expanding variables.  Those variables that can be put into registers
1000    are allocated pseudos; those that can't are put on the stack.
1001 
1002    TOPLEVEL is true if this is the outermost BLOCK.  */
1003 
1004 static void
1005 expand_used_vars_for_block (tree block, bool toplevel)
1006 {
1007   size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1008   tree t;
1009 
1010   old_sv_num = toplevel ? 0 : stack_vars_num;
1011 
1012   /* Expand all variables at this level.  */
1013   for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1014     if (TREE_USED (t))
1015       expand_one_var (t, toplevel, true);
1016 
1017   this_sv_num = stack_vars_num;
1018 
1019   /* Expand all variables at containing levels.  */
1020   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1021     expand_used_vars_for_block (t, false);
1022 
1023   /* Since we do not track exact variable lifetimes (which is not even
1024      possible for variables whose address escapes), we mirror the block
1025      tree in the interference graph.  Here we cause all variables at this
1026      level, and all sublevels, to conflict.  */
1027   if (old_sv_num < this_sv_num)
1028     {
1029       new_sv_num = stack_vars_num;
1030 
1031       for (i = old_sv_num; i < new_sv_num; ++i)
1032 	for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
1033 	  add_stack_var_conflict (i, j);
1034     }
1035 }
1036 
1037 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1038    and clear TREE_USED on all local variables.  */
1039 
1040 static void
1041 clear_tree_used (tree block)
1042 {
1043   tree t;
1044 
1045   for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1046     /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1047       TREE_USED (t) = 0;
1048 
1049   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1050     clear_tree_used (t);
1051 }
1052 
1053 /* Examine TYPE and determine a bit mask of the following features.  */
1054 
1055 #define SPCT_HAS_LARGE_CHAR_ARRAY	1
1056 #define SPCT_HAS_SMALL_CHAR_ARRAY	2
1057 #define SPCT_HAS_ARRAY			4
1058 #define SPCT_HAS_AGGREGATE		8
1059 
1060 static unsigned int
1061 stack_protect_classify_type (tree type)
1062 {
1063   unsigned int ret = 0;
1064   tree t;
1065 
1066   switch (TREE_CODE (type))
1067     {
1068     case ARRAY_TYPE:
1069       t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1070       if (t == char_type_node
1071 	  || t == signed_char_type_node
1072 	  || t == unsigned_char_type_node)
1073 	{
1074 	  unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1075 	  unsigned HOST_WIDE_INT len;
1076 
1077 	  if (!TYPE_SIZE_UNIT (type)
1078 	      || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1079 	    len = max;
1080 	  else
1081 	    len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1082 
1083 	  if (len == 0)
1084 	    ret = SPCT_HAS_ARRAY;
1085 	  else if (len < max)
1086 	    ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1087 	  else
1088 	    ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1089 	}
1090       else
1091 	ret = SPCT_HAS_ARRAY;
1092       break;
1093 
1094     case UNION_TYPE:
1095     case QUAL_UNION_TYPE:
1096     case RECORD_TYPE:
1097       ret = SPCT_HAS_AGGREGATE;
1098       for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1099 	if (TREE_CODE (t) == FIELD_DECL)
1100 	  ret |= stack_protect_classify_type (TREE_TYPE (t));
1101       break;
1102 
1103     default:
1104       break;
1105     }
1106 
1107   return ret;
1108 }
1109 
1110 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1111    part of the local stack frame.  Remember if we ever return nonzero for
1112    any variable in this function.  The return value is the phase number in
1113    which the variable should be allocated.  */
1114 
1115 static int
1116 stack_protect_decl_phase (tree decl)
1117 {
1118   unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1119   int ret = 0;
1120 
1121   if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1122     has_short_buffer = true;
1123 
1124   if (flag_stack_protect == 2)
1125     {
1126       if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1127 	  && !(bits & SPCT_HAS_AGGREGATE))
1128 	ret = 1;
1129       else if (bits & SPCT_HAS_ARRAY)
1130 	ret = 2;
1131     }
1132   else
1133     ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1134 
1135   if (ret)
1136     has_protected_decls = true;
1137 
1138   return ret;
1139 }
1140 
1141 /* Two helper routines that check for phase 1 and phase 2.  These are used
1142    as callbacks for expand_stack_vars.  */
1143 
1144 static bool
1145 stack_protect_decl_phase_1 (tree decl)
1146 {
1147   return stack_protect_decl_phase (decl) == 1;
1148 }
1149 
1150 static bool
1151 stack_protect_decl_phase_2 (tree decl)
1152 {
1153   return stack_protect_decl_phase (decl) == 2;
1154 }
1155 
1156 /* Ensure that variables in different stack protection phases conflict
1157    so that they are not merged and share the same stack slot.  */
1158 
1159 static void
1160 add_stack_protection_conflicts (void)
1161 {
1162   size_t i, j, n = stack_vars_num;
1163   unsigned char *phase;
1164 
1165   phase = XNEWVEC (unsigned char, n);
1166   for (i = 0; i < n; ++i)
1167     phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1168 
1169   for (i = 0; i < n; ++i)
1170     {
1171       unsigned char ph_i = phase[i];
1172       for (j = 0; j < i; ++j)
1173 	if (ph_i != phase[j])
1174 	  add_stack_var_conflict (i, j);
1175     }
1176 
1177   XDELETEVEC (phase);
1178 }
1179 
1180 /* Create a decl for the guard at the top of the stack frame.  */
1181 
1182 static void
1183 create_stack_guard (void)
1184 {
1185   tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1186 			   VAR_DECL, NULL, ptr_type_node);
1187   TREE_THIS_VOLATILE (guard) = 1;
1188   TREE_USED (guard) = 1;
1189   expand_one_stack_var (guard);
1190   crtl->stack_protect_guard = guard;
1191 }
1192 
1193 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1194    expanding variables.  Those variables that can be put into registers
1195    are allocated pseudos; those that can't are put on the stack.
1196 
1197    TOPLEVEL is true if this is the outermost BLOCK.  */
1198 
1199 static HOST_WIDE_INT
1200 account_used_vars_for_block (tree block, bool toplevel)
1201 {
1202   tree t;
1203   HOST_WIDE_INT size = 0;
1204 
1205   /* Expand all variables at this level.  */
1206   for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1207     if (TREE_USED (t))
1208       size += expand_one_var (t, toplevel, false);
1209 
1210   /* Expand all variables at containing levels.  */
1211   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1212     size += account_used_vars_for_block (t, false);
1213 
1214   return size;
1215 }
1216 
1217 /* Prepare for expanding variables.  */
1218 static void
1219 init_vars_expansion (void)
1220 {
1221   tree t;
1222   /* Set TREE_USED on all variables in the local_decls.  */
1223   for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1224     TREE_USED (TREE_VALUE (t)) = 1;
1225 
1226   /* Clear TREE_USED on all variables associated with a block scope.  */
1227   clear_tree_used (DECL_INITIAL (current_function_decl));
1228 
1229   /* Initialize local stack smashing state.  */
1230   has_protected_decls = false;
1231   has_short_buffer = false;
1232 }
1233 
1234 /* Free up stack variable graph data.  */
1235 static void
1236 fini_vars_expansion (void)
1237 {
1238   size_t i, n = stack_vars_num;
1239   for (i = 0; i < n; i++)
1240     BITMAP_FREE (stack_vars[i].conflicts);
1241   XDELETEVEC (stack_vars);
1242   XDELETEVEC (stack_vars_sorted);
1243   stack_vars = NULL;
1244   stack_vars_alloc = stack_vars_num = 0;
1245 }
1246 
1247 /* Make a fair guess for the size of the stack frame of the current
1248    function.  This doesn't have to be exact, the result is only used
1249    in the inline heuristics.  So we don't want to run the full stack
1250    var packing algorithm (which is quadratic in the number of stack
1251    vars).  Instead, we calculate the total size of all stack vars.
1252    This turns out to be a pretty fair estimate -- packing of stack
1253    vars doesn't happen very often.  */
1254 
1255 HOST_WIDE_INT
1256 estimated_stack_frame_size (void)
1257 {
1258   HOST_WIDE_INT size = 0;
1259   size_t i;
1260   tree t, outer_block = DECL_INITIAL (current_function_decl);
1261 
1262   init_vars_expansion ();
1263 
1264   for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1265     {
1266       tree var = TREE_VALUE (t);
1267 
1268       if (TREE_USED (var))
1269         size += expand_one_var (var, true, false);
1270       TREE_USED (var) = 1;
1271     }
1272   size += account_used_vars_for_block (outer_block, true);
1273 
1274   if (stack_vars_num > 0)
1275     {
1276       /* Fake sorting the stack vars for account_stack_vars ().  */
1277       stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1278       for (i = 0; i < stack_vars_num; ++i)
1279 	stack_vars_sorted[i] = i;
1280       size += account_stack_vars ();
1281       fini_vars_expansion ();
1282     }
1283 
1284   return size;
1285 }
1286 
1287 /* Expand all variables used in the function.  */
1288 
1289 static void
1290 expand_used_vars (void)
1291 {
1292   tree t, next, outer_block = DECL_INITIAL (current_function_decl);
1293   tree maybe_local_decls = NULL_TREE;
1294   unsigned i;
1295 
1296   /* Compute the phase of the stack frame for this function.  */
1297   {
1298     int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1299     int off = STARTING_FRAME_OFFSET % align;
1300     frame_phase = off ? align - off : 0;
1301   }
1302 
1303   init_vars_expansion ();
1304 
1305   for (i = 0; i < SA.map->num_partitions; i++)
1306     {
1307       tree var = partition_to_var (SA.map, i);
1308 
1309       gcc_assert (is_gimple_reg (var));
1310       if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1311 	expand_one_var (var, true, true);
1312       else
1313 	{
1314 	  /* This is a PARM_DECL or RESULT_DECL.  For those partitions that
1315 	     contain the default def (representing the parm or result itself)
1316 	     we don't do anything here.  But those which don't contain the
1317 	     default def (representing a temporary based on the parm/result)
1318 	     we need to allocate space just like for normal VAR_DECLs.  */
1319 	  if (!bitmap_bit_p (SA.partition_has_default_def, i))
1320 	    {
1321 	      expand_one_var (var, true, true);
1322 	      gcc_assert (SA.partition_to_pseudo[i]);
1323 	    }
1324 	}
1325     }
1326 
1327   /* At this point all variables on the local_decls with TREE_USED
1328      set are not associated with any block scope.  Lay them out.  */
1329   t = cfun->local_decls;
1330   cfun->local_decls = NULL_TREE;
1331   for (; t; t = next)
1332     {
1333       tree var = TREE_VALUE (t);
1334       bool expand_now = false;
1335 
1336       next = TREE_CHAIN (t);
1337 
1338       /* Expanded above already.  */
1339       if (is_gimple_reg (var))
1340 	{
1341 	  TREE_USED (var) = 0;
1342 	  goto next;
1343 	}
1344       /* We didn't set a block for static or extern because it's hard
1345 	 to tell the difference between a global variable (re)declared
1346 	 in a local scope, and one that's really declared there to
1347 	 begin with.  And it doesn't really matter much, since we're
1348 	 not giving them stack space.  Expand them now.  */
1349       else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1350 	expand_now = true;
1351 
1352       /* If the variable is not associated with any block, then it
1353 	 was created by the optimizers, and could be live anywhere
1354 	 in the function.  */
1355       else if (TREE_USED (var))
1356 	expand_now = true;
1357 
1358       /* Finally, mark all variables on the list as used.  We'll use
1359 	 this in a moment when we expand those associated with scopes.  */
1360       TREE_USED (var) = 1;
1361 
1362       if (expand_now)
1363 	expand_one_var (var, true, true);
1364 
1365     next:
1366       if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1367 	{
1368 	  rtx rtl = DECL_RTL_IF_SET (var);
1369 
1370 	  /* Keep artificial non-ignored vars in cfun->local_decls
1371 	     chain until instantiate_decls.  */
1372 	  if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1373 	    {
1374 	      TREE_CHAIN (t) = cfun->local_decls;
1375 	      cfun->local_decls = t;
1376 	      continue;
1377 	    }
1378 	  else if (rtl == NULL_RTX)
1379 	    {
1380 	      /* If rtl isn't set yet, which can happen e.g. with
1381 		 -fstack-protector, retry before returning from this
1382 		 function.  */
1383 	      TREE_CHAIN (t) = maybe_local_decls;
1384 	      maybe_local_decls = t;
1385 	      continue;
1386 	    }
1387 	}
1388 
1389       ggc_free (t);
1390     }
1391 
1392   /* At this point, all variables within the block tree with TREE_USED
1393      set are actually used by the optimized function.  Lay them out.  */
1394   expand_used_vars_for_block (outer_block, true);
1395 
1396   if (stack_vars_num > 0)
1397     {
1398       /* Due to the way alias sets work, no variables with non-conflicting
1399 	 alias sets may be assigned the same address.  Add conflicts to
1400 	 reflect this.  */
1401       add_alias_set_conflicts ();
1402 
1403       /* If stack protection is enabled, we don't share space between
1404 	 vulnerable data and non-vulnerable data.  */
1405       if (flag_stack_protect)
1406 	add_stack_protection_conflicts ();
1407 
1408       /* Now that we have collected all stack variables, and have computed a
1409 	 minimal interference graph, attempt to save some stack space.  */
1410       partition_stack_vars ();
1411       if (dump_file)
1412 	dump_stack_var_partition ();
1413     }
1414 
1415   /* There are several conditions under which we should create a
1416      stack guard: protect-all, alloca used, protected decls present.  */
1417   if (flag_stack_protect == 2
1418       || (flag_stack_protect
1419 	  && (cfun->calls_alloca || has_protected_decls)))
1420     create_stack_guard ();
1421 
1422   /* Assign rtl to each variable based on these partitions.  */
1423   if (stack_vars_num > 0)
1424     {
1425       /* Reorder decls to be protected by iterating over the variables
1426 	 array multiple times, and allocating out of each phase in turn.  */
1427       /* ??? We could probably integrate this into the qsort we did
1428 	 earlier, such that we naturally see these variables first,
1429 	 and thus naturally allocate things in the right order.  */
1430       if (has_protected_decls)
1431 	{
1432 	  /* Phase 1 contains only character arrays.  */
1433 	  expand_stack_vars (stack_protect_decl_phase_1);
1434 
1435 	  /* Phase 2 contains other kinds of arrays.  */
1436 	  if (flag_stack_protect == 2)
1437 	    expand_stack_vars (stack_protect_decl_phase_2);
1438 	}
1439 
1440       expand_stack_vars (NULL);
1441 
1442       fini_vars_expansion ();
1443     }
1444 
1445   /* If there were any artificial non-ignored vars without rtl
1446      found earlier, see if deferred stack allocation hasn't assigned
1447      rtl to them.  */
1448   for (t = maybe_local_decls; t; t = next)
1449     {
1450       tree var = TREE_VALUE (t);
1451       rtx rtl = DECL_RTL_IF_SET (var);
1452 
1453       next = TREE_CHAIN (t);
1454 
1455       /* Keep artificial non-ignored vars in cfun->local_decls
1456 	 chain until instantiate_decls.  */
1457       if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1458 	{
1459 	  TREE_CHAIN (t) = cfun->local_decls;
1460 	  cfun->local_decls = t;
1461 	  continue;
1462 	}
1463 
1464       ggc_free (t);
1465     }
1466 
1467   /* If the target requires that FRAME_OFFSET be aligned, do it.  */
1468   if (STACK_ALIGNMENT_NEEDED)
1469     {
1470       HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1471       if (!FRAME_GROWS_DOWNWARD)
1472 	frame_offset += align - 1;
1473       frame_offset &= -align;
1474     }
1475 }
1476 
1477 
1478 /* If we need to produce a detailed dump, print the tree representation
1479    for STMT to the dump file.  SINCE is the last RTX after which the RTL
1480    generated for STMT should have been appended.  */
1481 
1482 static void
1483 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1484 {
1485   if (dump_file && (dump_flags & TDF_DETAILS))
1486     {
1487       fprintf (dump_file, "\n;; ");
1488       print_gimple_stmt (dump_file, stmt, 0,
1489 			 TDF_SLIM | (dump_flags & TDF_LINENO));
1490       fprintf (dump_file, "\n");
1491 
1492       print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1493     }
1494 }
1495 
1496 /* Maps the blocks that do not contain tree labels to rtx labels.  */
1497 
1498 static struct pointer_map_t *lab_rtx_for_bb;
1499 
1500 /* Returns the label_rtx expression for a label starting basic block BB.  */
1501 
1502 static rtx
1503 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1504 {
1505   gimple_stmt_iterator gsi;
1506   tree lab;
1507   gimple lab_stmt;
1508   void **elt;
1509 
1510   if (bb->flags & BB_RTL)
1511     return block_label (bb);
1512 
1513   elt = pointer_map_contains (lab_rtx_for_bb, bb);
1514   if (elt)
1515     return (rtx) *elt;
1516 
1517   /* Find the tree label if it is present.  */
1518 
1519   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1520     {
1521       lab_stmt = gsi_stmt (gsi);
1522       if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1523 	break;
1524 
1525       lab = gimple_label_label (lab_stmt);
1526       if (DECL_NONLOCAL (lab))
1527 	break;
1528 
1529       return label_rtx (lab);
1530     }
1531 
1532   elt = pointer_map_insert (lab_rtx_for_bb, bb);
1533   *elt = gen_label_rtx ();
1534   return (rtx) *elt;
1535 }
1536 
1537 
1538 /* A subroutine of expand_gimple_cond.  Given E, a fallthrough edge
1539    of a basic block where we just expanded the conditional at the end,
1540    possibly clean up the CFG and instruction sequence.  LAST is the
1541    last instruction before the just emitted jump sequence.  */
1542 
1543 static void
1544 maybe_cleanup_end_of_block (edge e, rtx last)
1545 {
1546   /* Special case: when jumpif decides that the condition is
1547      trivial it emits an unconditional jump (and the necessary
1548      barrier).  But we still have two edges, the fallthru one is
1549      wrong.  purge_dead_edges would clean this up later.  Unfortunately
1550      we have to insert insns (and split edges) before
1551      find_many_sub_basic_blocks and hence before purge_dead_edges.
1552      But splitting edges might create new blocks which depend on the
1553      fact that if there are two edges there's no barrier.  So the
1554      barrier would get lost and verify_flow_info would ICE.  Instead
1555      of auditing all edge splitters to care for the barrier (which
1556      normally isn't there in a cleaned CFG), fix it here.  */
1557   if (BARRIER_P (get_last_insn ()))
1558     {
1559       rtx insn;
1560       remove_edge (e);
1561       /* Now, we have a single successor block, if we have insns to
1562 	 insert on the remaining edge we potentially will insert
1563 	 it at the end of this block (if the dest block isn't feasible)
1564 	 in order to avoid splitting the edge.  This insertion will take
1565 	 place in front of the last jump.  But we might have emitted
1566 	 multiple jumps (conditional and one unconditional) to the
1567 	 same destination.  Inserting in front of the last one then
1568 	 is a problem.  See PR 40021.  We fix this by deleting all
1569 	 jumps except the last unconditional one.  */
1570       insn = PREV_INSN (get_last_insn ());
1571       /* Make sure we have an unconditional jump.  Otherwise we're
1572 	 confused.  */
1573       gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1574       for (insn = PREV_INSN (insn); insn != last;)
1575 	{
1576 	  insn = PREV_INSN (insn);
1577 	  if (JUMP_P (NEXT_INSN (insn)))
1578 	    {
1579 	      if (!any_condjump_p (NEXT_INSN (insn)))
1580 		{
1581 		  gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1582 		  delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1583 		}
1584 	      delete_insn (NEXT_INSN (insn));
1585 	    }
1586 	}
1587     }
1588 }
1589 
1590 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_COND.
1591    Returns a new basic block if we've terminated the current basic
1592    block and created a new one.  */
1593 
1594 static basic_block
1595 expand_gimple_cond (basic_block bb, gimple stmt)
1596 {
1597   basic_block new_bb, dest;
1598   edge new_edge;
1599   edge true_edge;
1600   edge false_edge;
1601   rtx last2, last;
1602   enum tree_code code;
1603   tree op0, op1;
1604 
1605   code = gimple_cond_code (stmt);
1606   op0 = gimple_cond_lhs (stmt);
1607   op1 = gimple_cond_rhs (stmt);
1608   /* We're sometimes presented with such code:
1609        D.123_1 = x < y;
1610        if (D.123_1 != 0)
1611          ...
1612      This would expand to two comparisons which then later might
1613      be cleaned up by combine.  But some pattern matchers like if-conversion
1614      work better when there's only one compare, so make up for this
1615      here as special exception if TER would have made the same change.  */
1616   if (gimple_cond_single_var_p (stmt)
1617       && SA.values
1618       && TREE_CODE (op0) == SSA_NAME
1619       && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1620     {
1621       gimple second = SSA_NAME_DEF_STMT (op0);
1622       if (gimple_code (second) == GIMPLE_ASSIGN)
1623 	{
1624 	  enum tree_code code2 = gimple_assign_rhs_code (second);
1625 	  if (TREE_CODE_CLASS (code2) == tcc_comparison)
1626 	    {
1627 	      code = code2;
1628 	      op0 = gimple_assign_rhs1 (second);
1629 	      op1 = gimple_assign_rhs2 (second);
1630 	    }
1631 	  /* If jumps are cheap turn some more codes into
1632 	     jumpy sequences.  */
1633 	  else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1634 	    {
1635 	      if ((code2 == BIT_AND_EXPR
1636 		   && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1637 		   && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1638 		  || code2 == TRUTH_AND_EXPR)
1639 		{
1640 		  code = TRUTH_ANDIF_EXPR;
1641 		  op0 = gimple_assign_rhs1 (second);
1642 		  op1 = gimple_assign_rhs2 (second);
1643 		}
1644 	      else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1645 		{
1646 		  code = TRUTH_ORIF_EXPR;
1647 		  op0 = gimple_assign_rhs1 (second);
1648 		  op1 = gimple_assign_rhs2 (second);
1649 		}
1650 	    }
1651 	}
1652     }
1653 
1654   last2 = last = get_last_insn ();
1655 
1656   extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1657   if (gimple_has_location (stmt))
1658     {
1659       set_curr_insn_source_location (gimple_location (stmt));
1660       set_curr_insn_block (gimple_block (stmt));
1661     }
1662 
1663   /* These flags have no purpose in RTL land.  */
1664   true_edge->flags &= ~EDGE_TRUE_VALUE;
1665   false_edge->flags &= ~EDGE_FALSE_VALUE;
1666 
1667   /* We can either have a pure conditional jump with one fallthru edge or
1668      two-way jump that needs to be decomposed into two basic blocks.  */
1669   if (false_edge->dest == bb->next_bb)
1670     {
1671       jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1672 		true_edge->probability);
1673       maybe_dump_rtl_for_gimple_stmt (stmt, last);
1674       if (true_edge->goto_locus)
1675 	{
1676 	  set_curr_insn_source_location (true_edge->goto_locus);
1677 	  set_curr_insn_block (true_edge->goto_block);
1678 	  true_edge->goto_locus = curr_insn_locator ();
1679 	}
1680       true_edge->goto_block = NULL;
1681       false_edge->flags |= EDGE_FALLTHRU;
1682       maybe_cleanup_end_of_block (false_edge, last);
1683       return NULL;
1684     }
1685   if (true_edge->dest == bb->next_bb)
1686     {
1687       jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1688 		   false_edge->probability);
1689       maybe_dump_rtl_for_gimple_stmt (stmt, last);
1690       if (false_edge->goto_locus)
1691 	{
1692 	  set_curr_insn_source_location (false_edge->goto_locus);
1693 	  set_curr_insn_block (false_edge->goto_block);
1694 	  false_edge->goto_locus = curr_insn_locator ();
1695 	}
1696       false_edge->goto_block = NULL;
1697       true_edge->flags |= EDGE_FALLTHRU;
1698       maybe_cleanup_end_of_block (true_edge, last);
1699       return NULL;
1700     }
1701 
1702   jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1703 	    true_edge->probability);
1704   last = get_last_insn ();
1705   if (false_edge->goto_locus)
1706     {
1707       set_curr_insn_source_location (false_edge->goto_locus);
1708       set_curr_insn_block (false_edge->goto_block);
1709       false_edge->goto_locus = curr_insn_locator ();
1710     }
1711   false_edge->goto_block = NULL;
1712   emit_jump (label_rtx_for_bb (false_edge->dest));
1713 
1714   BB_END (bb) = last;
1715   if (BARRIER_P (BB_END (bb)))
1716     BB_END (bb) = PREV_INSN (BB_END (bb));
1717   update_bb_for_insn (bb);
1718 
1719   new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1720   dest = false_edge->dest;
1721   redirect_edge_succ (false_edge, new_bb);
1722   false_edge->flags |= EDGE_FALLTHRU;
1723   new_bb->count = false_edge->count;
1724   new_bb->frequency = EDGE_FREQUENCY (false_edge);
1725   new_edge = make_edge (new_bb, dest, 0);
1726   new_edge->probability = REG_BR_PROB_BASE;
1727   new_edge->count = new_bb->count;
1728   if (BARRIER_P (BB_END (new_bb)))
1729     BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1730   update_bb_for_insn (new_bb);
1731 
1732   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1733 
1734   if (true_edge->goto_locus)
1735     {
1736       set_curr_insn_source_location (true_edge->goto_locus);
1737       set_curr_insn_block (true_edge->goto_block);
1738       true_edge->goto_locus = curr_insn_locator ();
1739     }
1740   true_edge->goto_block = NULL;
1741 
1742   return new_bb;
1743 }
1744 
1745 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1746    statement STMT.  */
1747 
1748 static void
1749 expand_call_stmt (gimple stmt)
1750 {
1751   tree exp;
1752   tree lhs = gimple_call_lhs (stmt);
1753   size_t i;
1754   bool builtin_p;
1755   tree decl;
1756 
1757   exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1758 
1759   CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1760   decl = gimple_call_fndecl (stmt);
1761   builtin_p = decl && DECL_BUILT_IN (decl);
1762 
1763   TREE_TYPE (exp) = gimple_call_return_type (stmt);
1764   CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1765 
1766   for (i = 0; i < gimple_call_num_args (stmt); i++)
1767     {
1768       tree arg = gimple_call_arg (stmt, i);
1769       gimple def;
1770       /* TER addresses into arguments of builtin functions so we have a
1771 	 chance to infer more correct alignment information.  See PR39954.  */
1772       if (builtin_p
1773 	  && TREE_CODE (arg) == SSA_NAME
1774 	  && (def = get_gimple_for_ssa_name (arg))
1775 	  && gimple_assign_rhs_code (def) == ADDR_EXPR)
1776 	arg = gimple_assign_rhs1 (def);
1777       CALL_EXPR_ARG (exp, i) = arg;
1778     }
1779 
1780   if (gimple_has_side_effects (stmt))
1781     TREE_SIDE_EFFECTS (exp) = 1;
1782 
1783   if (gimple_call_nothrow_p (stmt))
1784     TREE_NOTHROW (exp) = 1;
1785 
1786   CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1787   CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1788   CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1789   CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1790   CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1791   SET_EXPR_LOCATION (exp, gimple_location (stmt));
1792   TREE_BLOCK (exp) = gimple_block (stmt);
1793 
1794   if (lhs)
1795     expand_assignment (lhs, exp, false);
1796   else
1797     expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1798 }
1799 
1800 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1801    STMT that doesn't require special handling for outgoing edges.  That
1802    is no tailcalls and no GIMPLE_COND.  */
1803 
1804 static void
1805 expand_gimple_stmt_1 (gimple stmt)
1806 {
1807   tree op0;
1808   switch (gimple_code (stmt))
1809     {
1810     case GIMPLE_GOTO:
1811       op0 = gimple_goto_dest (stmt);
1812       if (TREE_CODE (op0) == LABEL_DECL)
1813 	expand_goto (op0);
1814       else
1815 	expand_computed_goto (op0);
1816       break;
1817     case GIMPLE_LABEL:
1818       expand_label (gimple_label_label (stmt));
1819       break;
1820     case GIMPLE_NOP:
1821     case GIMPLE_PREDICT:
1822       break;
1823     case GIMPLE_SWITCH:
1824       expand_case (stmt);
1825       break;
1826     case GIMPLE_ASM:
1827       expand_asm_stmt (stmt);
1828       break;
1829     case GIMPLE_CALL:
1830       expand_call_stmt (stmt);
1831       break;
1832 
1833     case GIMPLE_RETURN:
1834       op0 = gimple_return_retval (stmt);
1835 
1836       if (op0 && op0 != error_mark_node)
1837 	{
1838 	  tree result = DECL_RESULT (current_function_decl);
1839 
1840 	  /* If we are not returning the current function's RESULT_DECL,
1841 	     build an assignment to it.  */
1842 	  if (op0 != result)
1843 	    {
1844 	      /* I believe that a function's RESULT_DECL is unique.  */
1845 	      gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1846 
1847 	      /* ??? We'd like to use simply expand_assignment here,
1848 	         but this fails if the value is of BLKmode but the return
1849 		 decl is a register.  expand_return has special handling
1850 		 for this combination, which eventually should move
1851 		 to common code.  See comments there.  Until then, let's
1852 		 build a modify expression :-/  */
1853 	      op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1854 			    result, op0);
1855 	    }
1856 	}
1857       if (!op0)
1858 	expand_null_return ();
1859       else
1860 	expand_return (op0);
1861       break;
1862 
1863     case GIMPLE_ASSIGN:
1864       {
1865 	tree lhs = gimple_assign_lhs (stmt);
1866 
1867 	/* Tree expand used to fiddle with |= and &= of two bitfield
1868 	   COMPONENT_REFs here.  This can't happen with gimple, the LHS
1869 	   of binary assigns must be a gimple reg.  */
1870 
1871 	if (TREE_CODE (lhs) != SSA_NAME
1872 	    || get_gimple_rhs_class (gimple_expr_code (stmt))
1873 	       == GIMPLE_SINGLE_RHS)
1874 	  {
1875 	    tree rhs = gimple_assign_rhs1 (stmt);
1876 	    gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1877 			== GIMPLE_SINGLE_RHS);
1878 	    if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1879 	      SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1880 	    expand_assignment (lhs, rhs,
1881 			       gimple_assign_nontemporal_move_p (stmt));
1882 	  }
1883 	else
1884 	  {
1885 	    rtx target, temp;
1886 	    bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1887 	    struct separate_ops ops;
1888 	    bool promoted = false;
1889 
1890 	    target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1891 	    if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1892 	      promoted = true;
1893 
1894 	    ops.code = gimple_assign_rhs_code (stmt);
1895 	    ops.type = TREE_TYPE (lhs);
1896 	    switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1897 	      {
1898 		case GIMPLE_BINARY_RHS:
1899 		  ops.op1 = gimple_assign_rhs2 (stmt);
1900 		  /* Fallthru */
1901 		case GIMPLE_UNARY_RHS:
1902 		  ops.op0 = gimple_assign_rhs1 (stmt);
1903 		  break;
1904 		default:
1905 		  gcc_unreachable ();
1906 	      }
1907 	    ops.location = gimple_location (stmt);
1908 
1909 	    /* If we want to use a nontemporal store, force the value to
1910 	       register first.  If we store into a promoted register,
1911 	       don't directly expand to target.  */
1912 	    temp = nontemporal || promoted ? NULL_RTX : target;
1913 	    temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
1914 				       EXPAND_NORMAL);
1915 
1916 	    if (temp == target)
1917 	      ;
1918 	    else if (promoted)
1919 	      {
1920 		int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
1921 		/* If TEMP is a VOIDmode constant, use convert_modes to make
1922 		   sure that we properly convert it.  */
1923 		if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1924 		  {
1925 		    temp = convert_modes (GET_MODE (target),
1926 					  TYPE_MODE (ops.type),
1927 					  temp, unsignedp);
1928 		    temp = convert_modes (GET_MODE (SUBREG_REG (target)),
1929 					  GET_MODE (target), temp, unsignedp);
1930 		  }
1931 
1932 		convert_move (SUBREG_REG (target), temp, unsignedp);
1933 	      }
1934 	    else if (nontemporal && emit_storent_insn (target, temp))
1935 	      ;
1936 	    else
1937 	      {
1938 		temp = force_operand (temp, target);
1939 		if (temp != target)
1940 		  emit_move_insn (target, temp);
1941 	      }
1942 	  }
1943       }
1944       break;
1945 
1946     default:
1947       gcc_unreachable ();
1948     }
1949 }
1950 
1951 /* Expand one gimple statement STMT and return the last RTL instruction
1952    before any of the newly generated ones.
1953 
1954    In addition to generating the necessary RTL instructions this also
1955    sets REG_EH_REGION notes if necessary and sets the current source
1956    location for diagnostics.  */
1957 
1958 static rtx
1959 expand_gimple_stmt (gimple stmt)
1960 {
1961   int lp_nr = 0;
1962   rtx last = NULL;
1963   location_t saved_location = input_location;
1964 
1965   last = get_last_insn ();
1966 
1967   /* If this is an expression of some kind and it has an associated line
1968      number, then emit the line number before expanding the expression.
1969 
1970      We need to save and restore the file and line information so that
1971      errors discovered during expansion are emitted with the right
1972      information.  It would be better of the diagnostic routines
1973      used the file/line information embedded in the tree nodes rather
1974      than globals.  */
1975   gcc_assert (cfun);
1976 
1977   if (gimple_has_location (stmt))
1978     {
1979       input_location = gimple_location (stmt);
1980       set_curr_insn_source_location (input_location);
1981 
1982       /* Record where the insns produced belong.  */
1983       set_curr_insn_block (gimple_block (stmt));
1984     }
1985 
1986   expand_gimple_stmt_1 (stmt);
1987   /* Free any temporaries used to evaluate this statement.  */
1988   free_temp_slots ();
1989 
1990   input_location = saved_location;
1991 
1992   /* Mark all insns that may trap.  */
1993   lp_nr = lookup_stmt_eh_lp (stmt);
1994   if (lp_nr)
1995     {
1996       rtx insn;
1997       for (insn = next_real_insn (last); insn;
1998 	   insn = next_real_insn (insn))
1999 	{
2000 	  if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2001 	      /* If we want exceptions for non-call insns, any
2002 		 may_trap_p instruction may throw.  */
2003 	      && GET_CODE (PATTERN (insn)) != CLOBBER
2004 	      && GET_CODE (PATTERN (insn)) != USE
2005 	      && insn_could_throw_p (insn))
2006 	    make_reg_eh_region_note (insn, 0, lp_nr);
2007 	}
2008     }
2009 
2010   return last;
2011 }
2012 
2013 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_CALL
2014    that has CALL_EXPR_TAILCALL set.  Returns non-null if we actually
2015    generated a tail call (something that might be denied by the ABI
2016    rules governing the call; see calls.c).
2017 
2018    Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2019    can still reach the rest of BB.  The case here is __builtin_sqrt,
2020    where the NaN result goes through the external function (with a
2021    tailcall) and the normal result happens via a sqrt instruction.  */
2022 
2023 static basic_block
2024 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2025 {
2026   rtx last2, last;
2027   edge e;
2028   edge_iterator ei;
2029   int probability;
2030   gcov_type count;
2031 
2032   last2 = last = expand_gimple_stmt (stmt);
2033 
2034   for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2035     if (CALL_P (last) && SIBLING_CALL_P (last))
2036       goto found;
2037 
2038   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2039 
2040   *can_fallthru = true;
2041   return NULL;
2042 
2043  found:
2044   /* ??? Wouldn't it be better to just reset any pending stack adjust?
2045      Any instructions emitted here are about to be deleted.  */
2046   do_pending_stack_adjust ();
2047 
2048   /* Remove any non-eh, non-abnormal edges that don't go to exit.  */
2049   /* ??? I.e. the fallthrough edge.  HOWEVER!  If there were to be
2050      EH or abnormal edges, we shouldn't have created a tail call in
2051      the first place.  So it seems to me we should just be removing
2052      all edges here, or redirecting the existing fallthru edge to
2053      the exit block.  */
2054 
2055   probability = 0;
2056   count = 0;
2057 
2058   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2059     {
2060       if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2061 	{
2062 	  if (e->dest != EXIT_BLOCK_PTR)
2063 	    {
2064 	      e->dest->count -= e->count;
2065 	      e->dest->frequency -= EDGE_FREQUENCY (e);
2066 	      if (e->dest->count < 0)
2067 		e->dest->count = 0;
2068 	      if (e->dest->frequency < 0)
2069 		e->dest->frequency = 0;
2070 	    }
2071 	  count += e->count;
2072 	  probability += e->probability;
2073 	  remove_edge (e);
2074 	}
2075       else
2076 	ei_next (&ei);
2077     }
2078 
2079   /* This is somewhat ugly: the call_expr expander often emits instructions
2080      after the sibcall (to perform the function return).  These confuse the
2081      find_many_sub_basic_blocks code, so we need to get rid of these.  */
2082   last = NEXT_INSN (last);
2083   gcc_assert (BARRIER_P (last));
2084 
2085   *can_fallthru = false;
2086   while (NEXT_INSN (last))
2087     {
2088       /* For instance an sqrt builtin expander expands if with
2089 	 sibcall in the then and label for `else`.  */
2090       if (LABEL_P (NEXT_INSN (last)))
2091 	{
2092 	  *can_fallthru = true;
2093 	  break;
2094 	}
2095       delete_insn (NEXT_INSN (last));
2096     }
2097 
2098   e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2099   e->probability += probability;
2100   e->count += count;
2101   BB_END (bb) = last;
2102   update_bb_for_insn (bb);
2103 
2104   if (NEXT_INSN (last))
2105     {
2106       bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2107 
2108       last = BB_END (bb);
2109       if (BARRIER_P (last))
2110 	BB_END (bb) = PREV_INSN (last);
2111     }
2112 
2113   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2114 
2115   return bb;
2116 }
2117 
2118 /* Return the difference between the floor and the truncated result of
2119    a signed division by OP1 with remainder MOD.  */
2120 static rtx
2121 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2122 {
2123   /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2124   return gen_rtx_IF_THEN_ELSE
2125     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2126      gen_rtx_IF_THEN_ELSE
2127      (mode, gen_rtx_LT (BImode,
2128 			gen_rtx_DIV (mode, op1, mod),
2129 			const0_rtx),
2130       constm1_rtx, const0_rtx),
2131      const0_rtx);
2132 }
2133 
2134 /* Return the difference between the ceil and the truncated result of
2135    a signed division by OP1 with remainder MOD.  */
2136 static rtx
2137 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2138 {
2139   /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2140   return gen_rtx_IF_THEN_ELSE
2141     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2142      gen_rtx_IF_THEN_ELSE
2143      (mode, gen_rtx_GT (BImode,
2144 			gen_rtx_DIV (mode, op1, mod),
2145 			const0_rtx),
2146       const1_rtx, const0_rtx),
2147      const0_rtx);
2148 }
2149 
2150 /* Return the difference between the ceil and the truncated result of
2151    an unsigned division by OP1 with remainder MOD.  */
2152 static rtx
2153 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2154 {
2155   /* (mod != 0 ? 1 : 0) */
2156   return gen_rtx_IF_THEN_ELSE
2157     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2158      const1_rtx, const0_rtx);
2159 }
2160 
2161 /* Return the difference between the rounded and the truncated result
2162    of a signed division by OP1 with remainder MOD.  Halfway cases are
2163    rounded away from zero, rather than to the nearest even number.  */
2164 static rtx
2165 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2166 {
2167   /* (abs (mod) >= abs (op1) - abs (mod)
2168       ? (op1 / mod > 0 ? 1 : -1)
2169       : 0) */
2170   return gen_rtx_IF_THEN_ELSE
2171     (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2172 		       gen_rtx_MINUS (mode,
2173 				      gen_rtx_ABS (mode, op1),
2174 				      gen_rtx_ABS (mode, mod))),
2175      gen_rtx_IF_THEN_ELSE
2176      (mode, gen_rtx_GT (BImode,
2177 			gen_rtx_DIV (mode, op1, mod),
2178 			const0_rtx),
2179       const1_rtx, constm1_rtx),
2180      const0_rtx);
2181 }
2182 
2183 /* Return the difference between the rounded and the truncated result
2184    of a unsigned division by OP1 with remainder MOD.  Halfway cases
2185    are rounded away from zero, rather than to the nearest even
2186    number.  */
2187 static rtx
2188 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2189 {
2190   /* (mod >= op1 - mod ? 1 : 0) */
2191   return gen_rtx_IF_THEN_ELSE
2192     (mode, gen_rtx_GE (BImode, mod,
2193 		       gen_rtx_MINUS (mode, op1, mod)),
2194      const1_rtx, const0_rtx);
2195 }
2196 
2197 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2198    any rtl.  */
2199 
2200 static rtx
2201 convert_debug_memory_address (enum machine_mode mode, rtx x)
2202 {
2203   enum machine_mode xmode = GET_MODE (x);
2204 
2205 #ifndef POINTERS_EXTEND_UNSIGNED
2206   gcc_assert (mode == Pmode);
2207   gcc_assert (xmode == mode || xmode == VOIDmode);
2208 #else
2209   gcc_assert (mode == Pmode || mode == ptr_mode);
2210 
2211   if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2212     return x;
2213 
2214   if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2215     x = simplify_gen_subreg (mode, x, xmode,
2216 			     subreg_lowpart_offset
2217 			     (mode, xmode));
2218   else if (POINTERS_EXTEND_UNSIGNED > 0)
2219     x = gen_rtx_ZERO_EXTEND (mode, x);
2220   else if (!POINTERS_EXTEND_UNSIGNED)
2221     x = gen_rtx_SIGN_EXTEND (mode, x);
2222   else
2223     gcc_unreachable ();
2224 #endif /* POINTERS_EXTEND_UNSIGNED */
2225 
2226   return x;
2227 }
2228 
2229 /* Return an RTX equivalent to the value of the tree expression
2230    EXP.  */
2231 
2232 static rtx
2233 expand_debug_expr (tree exp)
2234 {
2235   rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2236   enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2237   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2238   addr_space_t as;
2239   enum machine_mode address_mode;
2240 
2241   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2242     {
2243     case tcc_expression:
2244       switch (TREE_CODE (exp))
2245 	{
2246 	case COND_EXPR:
2247 	case DOT_PROD_EXPR:
2248 	  goto ternary;
2249 
2250 	case TRUTH_ANDIF_EXPR:
2251 	case TRUTH_ORIF_EXPR:
2252 	case TRUTH_AND_EXPR:
2253 	case TRUTH_OR_EXPR:
2254 	case TRUTH_XOR_EXPR:
2255 	  goto binary;
2256 
2257 	case TRUTH_NOT_EXPR:
2258 	  goto unary;
2259 
2260 	default:
2261 	  break;
2262 	}
2263       break;
2264 
2265     ternary:
2266       op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2267       if (!op2)
2268 	return NULL_RTX;
2269       /* Fall through.  */
2270 
2271     binary:
2272     case tcc_binary:
2273     case tcc_comparison:
2274       op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2275       if (!op1)
2276 	return NULL_RTX;
2277       /* Fall through.  */
2278 
2279     unary:
2280     case tcc_unary:
2281       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2282       if (!op0)
2283 	return NULL_RTX;
2284       break;
2285 
2286     case tcc_type:
2287     case tcc_statement:
2288       gcc_unreachable ();
2289 
2290     case tcc_constant:
2291     case tcc_exceptional:
2292     case tcc_declaration:
2293     case tcc_reference:
2294     case tcc_vl_exp:
2295       break;
2296     }
2297 
2298   switch (TREE_CODE (exp))
2299     {
2300     case STRING_CST:
2301       if (!lookup_constant_def (exp))
2302 	{
2303 	  if (strlen (TREE_STRING_POINTER (exp)) + 1
2304 	      != (size_t) TREE_STRING_LENGTH (exp))
2305 	    return NULL_RTX;
2306 	  op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2307 	  op0 = gen_rtx_MEM (BLKmode, op0);
2308 	  set_mem_attributes (op0, exp, 0);
2309 	  return op0;
2310 	}
2311       /* Fall through...  */
2312 
2313     case INTEGER_CST:
2314     case REAL_CST:
2315     case FIXED_CST:
2316       op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2317       return op0;
2318 
2319     case COMPLEX_CST:
2320       gcc_assert (COMPLEX_MODE_P (mode));
2321       op0 = expand_debug_expr (TREE_REALPART (exp));
2322       op1 = expand_debug_expr (TREE_IMAGPART (exp));
2323       return gen_rtx_CONCAT (mode, op0, op1);
2324 
2325     case DEBUG_EXPR_DECL:
2326       op0 = DECL_RTL_IF_SET (exp);
2327 
2328       if (op0)
2329 	return op0;
2330 
2331       op0 = gen_rtx_DEBUG_EXPR (mode);
2332       DEBUG_EXPR_TREE_DECL (op0) = exp;
2333       SET_DECL_RTL (exp, op0);
2334 
2335       return op0;
2336 
2337     case VAR_DECL:
2338     case PARM_DECL:
2339     case FUNCTION_DECL:
2340     case LABEL_DECL:
2341     case CONST_DECL:
2342     case RESULT_DECL:
2343       op0 = DECL_RTL_IF_SET (exp);
2344 
2345       /* This decl was probably optimized away.  */
2346       if (!op0)
2347 	{
2348 	  if (TREE_CODE (exp) != VAR_DECL
2349 	      || DECL_EXTERNAL (exp)
2350 	      || !TREE_STATIC (exp)
2351 	      || !DECL_NAME (exp)
2352 	      || DECL_HARD_REGISTER (exp)
2353 	      || mode == VOIDmode)
2354 	    return NULL;
2355 
2356 	  op0 = make_decl_rtl_for_debug (exp);
2357 	  if (!MEM_P (op0)
2358 	      || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2359 	      || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2360 	    return NULL;
2361 	}
2362       else
2363 	op0 = copy_rtx (op0);
2364 
2365       if (GET_MODE (op0) == BLKmode
2366 	  /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2367 	     below would ICE.  While it is likely a FE bug,
2368 	     try to be robust here.  See PR43166.  */
2369 	  || mode == BLKmode
2370 	  || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2371 	{
2372 	  gcc_assert (MEM_P (op0));
2373 	  op0 = adjust_address_nv (op0, mode, 0);
2374 	  return op0;
2375 	}
2376 
2377       /* Fall through.  */
2378 
2379     adjust_mode:
2380     case PAREN_EXPR:
2381     case NOP_EXPR:
2382     case CONVERT_EXPR:
2383       {
2384 	enum machine_mode inner_mode = GET_MODE (op0);
2385 
2386 	if (mode == inner_mode)
2387 	  return op0;
2388 
2389 	if (inner_mode == VOIDmode)
2390 	  {
2391 	    if (TREE_CODE (exp) == SSA_NAME)
2392 	      inner_mode = TYPE_MODE (TREE_TYPE (exp));
2393 	    else
2394 	      inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2395 	    if (mode == inner_mode)
2396 	      return op0;
2397 	  }
2398 
2399 	if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2400 	  {
2401 	    if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2402 	      op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2403 	    else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2404 	      op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2405 	    else
2406 	      op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2407 	  }
2408 	else if (FLOAT_MODE_P (mode))
2409 	  {
2410 	    gcc_assert (TREE_CODE (exp) != SSA_NAME);
2411 	    if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2412 	      op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2413 	    else
2414 	      op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2415 	  }
2416 	else if (FLOAT_MODE_P (inner_mode))
2417 	  {
2418 	    if (unsignedp)
2419 	      op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2420 	    else
2421 	      op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2422 	  }
2423 	else if (CONSTANT_P (op0)
2424 		 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2425 	  op0 = simplify_gen_subreg (mode, op0, inner_mode,
2426 				     subreg_lowpart_offset (mode,
2427 							    inner_mode));
2428 	else if (unsignedp)
2429 	  op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2430 	else
2431 	  op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2432 
2433 	return op0;
2434       }
2435 
2436     case INDIRECT_REF:
2437     case ALIGN_INDIRECT_REF:
2438     case MISALIGNED_INDIRECT_REF:
2439       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2440       if (!op0)
2441 	return NULL;
2442 
2443       if (POINTER_TYPE_P (TREE_TYPE (exp)))
2444 	{
2445 	  as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2446 	  address_mode = targetm.addr_space.address_mode (as);
2447 	}
2448       else
2449 	{
2450 	  as = ADDR_SPACE_GENERIC;
2451 	  address_mode = Pmode;
2452 	}
2453 
2454       if (TREE_CODE (exp) == ALIGN_INDIRECT_REF)
2455 	{
2456 	  int align = TYPE_ALIGN_UNIT (TREE_TYPE (exp));
2457 	  op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align));
2458 	}
2459 
2460       op0 = gen_rtx_MEM (mode, op0);
2461 
2462       set_mem_attributes (op0, exp, 0);
2463       set_mem_addr_space (op0, as);
2464 
2465       return op0;
2466 
2467     case TARGET_MEM_REF:
2468       if (TMR_SYMBOL (exp) && !DECL_RTL_SET_P (TMR_SYMBOL (exp)))
2469 	return NULL;
2470 
2471       op0 = expand_debug_expr
2472 	    (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2473       if (!op0)
2474 	return NULL;
2475 
2476       as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
2477 
2478       op0 = gen_rtx_MEM (mode, op0);
2479 
2480       set_mem_attributes (op0, exp, 0);
2481       set_mem_addr_space (op0, as);
2482 
2483       return op0;
2484 
2485     case ARRAY_REF:
2486     case ARRAY_RANGE_REF:
2487     case COMPONENT_REF:
2488     case BIT_FIELD_REF:
2489     case REALPART_EXPR:
2490     case IMAGPART_EXPR:
2491     case VIEW_CONVERT_EXPR:
2492       {
2493 	enum machine_mode mode1;
2494 	HOST_WIDE_INT bitsize, bitpos;
2495 	tree offset;
2496 	int volatilep = 0;
2497 	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2498 					&mode1, &unsignedp, &volatilep, false);
2499 	rtx orig_op0;
2500 
2501 	if (bitsize == 0)
2502 	  return NULL;
2503 
2504 	orig_op0 = op0 = expand_debug_expr (tem);
2505 
2506 	if (!op0)
2507 	  return NULL;
2508 
2509 	if (offset)
2510 	  {
2511 	    enum machine_mode addrmode, offmode;
2512 
2513 	    if (!MEM_P (op0))
2514 	      return NULL;
2515 
2516 	    op0 = XEXP (op0, 0);
2517 	    addrmode = GET_MODE (op0);
2518 	    if (addrmode == VOIDmode)
2519 	      addrmode = Pmode;
2520 
2521 	    op1 = expand_debug_expr (offset);
2522 	    if (!op1)
2523 	      return NULL;
2524 
2525 	    offmode = GET_MODE (op1);
2526 	    if (offmode == VOIDmode)
2527 	      offmode = TYPE_MODE (TREE_TYPE (offset));
2528 
2529 	    if (addrmode != offmode)
2530 	      op1 = simplify_gen_subreg (addrmode, op1, offmode,
2531 					 subreg_lowpart_offset (addrmode,
2532 								offmode));
2533 
2534 	    /* Don't use offset_address here, we don't need a
2535 	       recognizable address, and we don't want to generate
2536 	       code.  */
2537 	    op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
2538 	  }
2539 
2540 	if (MEM_P (op0))
2541 	  {
2542 	    if (mode1 == VOIDmode)
2543 	      /* Bitfield.  */
2544 	      mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2545 	    if (bitpos >= BITS_PER_UNIT)
2546 	      {
2547 		op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2548 		bitpos %= BITS_PER_UNIT;
2549 	      }
2550 	    else if (bitpos < 0)
2551 	      {
2552 		HOST_WIDE_INT units
2553 		  = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2554 		op0 = adjust_address_nv (op0, mode1, units);
2555 		bitpos += units * BITS_PER_UNIT;
2556 	      }
2557 	    else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2558 	      op0 = adjust_address_nv (op0, mode, 0);
2559 	    else if (GET_MODE (op0) != mode1)
2560 	      op0 = adjust_address_nv (op0, mode1, 0);
2561 	    else
2562 	      op0 = copy_rtx (op0);
2563 	    if (op0 == orig_op0)
2564 	      op0 = shallow_copy_rtx (op0);
2565 	    set_mem_attributes (op0, exp, 0);
2566 	  }
2567 
2568 	if (bitpos == 0 && mode == GET_MODE (op0))
2569 	  return op0;
2570 
2571         if (bitpos < 0)
2572           return NULL;
2573 
2574 	if (GET_MODE (op0) == BLKmode)
2575 	  return NULL;
2576 
2577 	if ((bitpos % BITS_PER_UNIT) == 0
2578 	    && bitsize == GET_MODE_BITSIZE (mode1))
2579 	  {
2580 	    enum machine_mode opmode = GET_MODE (op0);
2581 
2582 	    if (opmode == VOIDmode)
2583 	      opmode = TYPE_MODE (TREE_TYPE (tem));
2584 
2585 	    /* This condition may hold if we're expanding the address
2586 	       right past the end of an array that turned out not to
2587 	       be addressable (i.e., the address was only computed in
2588 	       debug stmts).  The gen_subreg below would rightfully
2589 	       crash, and the address doesn't really exist, so just
2590 	       drop it.  */
2591 	    if (bitpos >= GET_MODE_BITSIZE (opmode))
2592 	      return NULL;
2593 
2594 	    if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2595 	      return simplify_gen_subreg (mode, op0, opmode,
2596 					  bitpos / BITS_PER_UNIT);
2597 	  }
2598 
2599 	return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2600 				     && TYPE_UNSIGNED (TREE_TYPE (exp))
2601 				     ? SIGN_EXTRACT
2602 				     : ZERO_EXTRACT, mode,
2603 				     GET_MODE (op0) != VOIDmode
2604 				     ? GET_MODE (op0)
2605 				     : TYPE_MODE (TREE_TYPE (tem)),
2606 				     op0, GEN_INT (bitsize), GEN_INT (bitpos));
2607       }
2608 
2609     case ABS_EXPR:
2610       return gen_rtx_ABS (mode, op0);
2611 
2612     case NEGATE_EXPR:
2613       return gen_rtx_NEG (mode, op0);
2614 
2615     case BIT_NOT_EXPR:
2616       return gen_rtx_NOT (mode, op0);
2617 
2618     case FLOAT_EXPR:
2619       if (unsignedp)
2620 	return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2621       else
2622 	return gen_rtx_FLOAT (mode, op0);
2623 
2624     case FIX_TRUNC_EXPR:
2625       if (unsignedp)
2626 	return gen_rtx_UNSIGNED_FIX (mode, op0);
2627       else
2628 	return gen_rtx_FIX (mode, op0);
2629 
2630     case POINTER_PLUS_EXPR:
2631       /* For the rare target where pointers are not the same size as
2632 	 size_t, we need to check for mis-matched modes and correct
2633 	 the addend.  */
2634       if (op0 && op1
2635 	  && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2636 	  && GET_MODE (op0) != GET_MODE (op1))
2637 	{
2638 	  if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2639 	    op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1);
2640 	  else
2641 	    /* We always sign-extend, regardless of the signedness of
2642 	       the operand, because the operand is always unsigned
2643 	       here even if the original C expression is signed.  */
2644 	    op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1);
2645 	}
2646       /* Fall through.  */
2647     case PLUS_EXPR:
2648       return gen_rtx_PLUS (mode, op0, op1);
2649 
2650     case MINUS_EXPR:
2651       return gen_rtx_MINUS (mode, op0, op1);
2652 
2653     case MULT_EXPR:
2654       return gen_rtx_MULT (mode, op0, op1);
2655 
2656     case RDIV_EXPR:
2657     case TRUNC_DIV_EXPR:
2658     case EXACT_DIV_EXPR:
2659       if (unsignedp)
2660 	return gen_rtx_UDIV (mode, op0, op1);
2661       else
2662 	return gen_rtx_DIV (mode, op0, op1);
2663 
2664     case TRUNC_MOD_EXPR:
2665       if (unsignedp)
2666 	return gen_rtx_UMOD (mode, op0, op1);
2667       else
2668 	return gen_rtx_MOD (mode, op0, op1);
2669 
2670     case FLOOR_DIV_EXPR:
2671       if (unsignedp)
2672 	return gen_rtx_UDIV (mode, op0, op1);
2673       else
2674 	{
2675 	  rtx div = gen_rtx_DIV (mode, op0, op1);
2676 	  rtx mod = gen_rtx_MOD (mode, op0, op1);
2677 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
2678 	  return gen_rtx_PLUS (mode, div, adj);
2679 	}
2680 
2681     case FLOOR_MOD_EXPR:
2682       if (unsignedp)
2683 	return gen_rtx_UMOD (mode, op0, op1);
2684       else
2685 	{
2686 	  rtx mod = gen_rtx_MOD (mode, op0, op1);
2687 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
2688 	  adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2689 	  return gen_rtx_PLUS (mode, mod, adj);
2690 	}
2691 
2692     case CEIL_DIV_EXPR:
2693       if (unsignedp)
2694 	{
2695 	  rtx div = gen_rtx_UDIV (mode, op0, op1);
2696 	  rtx mod = gen_rtx_UMOD (mode, op0, op1);
2697 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
2698 	  return gen_rtx_PLUS (mode, div, adj);
2699 	}
2700       else
2701 	{
2702 	  rtx div = gen_rtx_DIV (mode, op0, op1);
2703 	  rtx mod = gen_rtx_MOD (mode, op0, op1);
2704 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2705 	  return gen_rtx_PLUS (mode, div, adj);
2706 	}
2707 
2708     case CEIL_MOD_EXPR:
2709       if (unsignedp)
2710 	{
2711 	  rtx mod = gen_rtx_UMOD (mode, op0, op1);
2712 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
2713 	  adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2714 	  return gen_rtx_PLUS (mode, mod, adj);
2715 	}
2716       else
2717 	{
2718 	  rtx mod = gen_rtx_MOD (mode, op0, op1);
2719 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2720 	  adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2721 	  return gen_rtx_PLUS (mode, mod, adj);
2722 	}
2723 
2724     case ROUND_DIV_EXPR:
2725       if (unsignedp)
2726 	{
2727 	  rtx div = gen_rtx_UDIV (mode, op0, op1);
2728 	  rtx mod = gen_rtx_UMOD (mode, op0, op1);
2729 	  rtx adj = round_udiv_adjust (mode, mod, op1);
2730 	  return gen_rtx_PLUS (mode, div, adj);
2731 	}
2732       else
2733 	{
2734 	  rtx div = gen_rtx_DIV (mode, op0, op1);
2735 	  rtx mod = gen_rtx_MOD (mode, op0, op1);
2736 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
2737 	  return gen_rtx_PLUS (mode, div, adj);
2738 	}
2739 
2740     case ROUND_MOD_EXPR:
2741       if (unsignedp)
2742 	{
2743 	  rtx mod = gen_rtx_UMOD (mode, op0, op1);
2744 	  rtx adj = round_udiv_adjust (mode, mod, op1);
2745 	  adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2746 	  return gen_rtx_PLUS (mode, mod, adj);
2747 	}
2748       else
2749 	{
2750 	  rtx mod = gen_rtx_MOD (mode, op0, op1);
2751 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
2752 	  adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2753 	  return gen_rtx_PLUS (mode, mod, adj);
2754 	}
2755 
2756     case LSHIFT_EXPR:
2757       return gen_rtx_ASHIFT (mode, op0, op1);
2758 
2759     case RSHIFT_EXPR:
2760       if (unsignedp)
2761 	return gen_rtx_LSHIFTRT (mode, op0, op1);
2762       else
2763 	return gen_rtx_ASHIFTRT (mode, op0, op1);
2764 
2765     case LROTATE_EXPR:
2766       return gen_rtx_ROTATE (mode, op0, op1);
2767 
2768     case RROTATE_EXPR:
2769       return gen_rtx_ROTATERT (mode, op0, op1);
2770 
2771     case MIN_EXPR:
2772       if (unsignedp)
2773 	return gen_rtx_UMIN (mode, op0, op1);
2774       else
2775 	return gen_rtx_SMIN (mode, op0, op1);
2776 
2777     case MAX_EXPR:
2778       if (unsignedp)
2779 	return gen_rtx_UMAX (mode, op0, op1);
2780       else
2781 	return gen_rtx_SMAX (mode, op0, op1);
2782 
2783     case BIT_AND_EXPR:
2784     case TRUTH_AND_EXPR:
2785       return gen_rtx_AND (mode, op0, op1);
2786 
2787     case BIT_IOR_EXPR:
2788     case TRUTH_OR_EXPR:
2789       return gen_rtx_IOR (mode, op0, op1);
2790 
2791     case BIT_XOR_EXPR:
2792     case TRUTH_XOR_EXPR:
2793       return gen_rtx_XOR (mode, op0, op1);
2794 
2795     case TRUTH_ANDIF_EXPR:
2796       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2797 
2798     case TRUTH_ORIF_EXPR:
2799       return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2800 
2801     case TRUTH_NOT_EXPR:
2802       return gen_rtx_EQ (mode, op0, const0_rtx);
2803 
2804     case LT_EXPR:
2805       if (unsignedp)
2806 	return gen_rtx_LTU (mode, op0, op1);
2807       else
2808 	return gen_rtx_LT (mode, op0, op1);
2809 
2810     case LE_EXPR:
2811       if (unsignedp)
2812 	return gen_rtx_LEU (mode, op0, op1);
2813       else
2814 	return gen_rtx_LE (mode, op0, op1);
2815 
2816     case GT_EXPR:
2817       if (unsignedp)
2818 	return gen_rtx_GTU (mode, op0, op1);
2819       else
2820 	return gen_rtx_GT (mode, op0, op1);
2821 
2822     case GE_EXPR:
2823       if (unsignedp)
2824 	return gen_rtx_GEU (mode, op0, op1);
2825       else
2826 	return gen_rtx_GE (mode, op0, op1);
2827 
2828     case EQ_EXPR:
2829       return gen_rtx_EQ (mode, op0, op1);
2830 
2831     case NE_EXPR:
2832       return gen_rtx_NE (mode, op0, op1);
2833 
2834     case UNORDERED_EXPR:
2835       return gen_rtx_UNORDERED (mode, op0, op1);
2836 
2837     case ORDERED_EXPR:
2838       return gen_rtx_ORDERED (mode, op0, op1);
2839 
2840     case UNLT_EXPR:
2841       return gen_rtx_UNLT (mode, op0, op1);
2842 
2843     case UNLE_EXPR:
2844       return gen_rtx_UNLE (mode, op0, op1);
2845 
2846     case UNGT_EXPR:
2847       return gen_rtx_UNGT (mode, op0, op1);
2848 
2849     case UNGE_EXPR:
2850       return gen_rtx_UNGE (mode, op0, op1);
2851 
2852     case UNEQ_EXPR:
2853       return gen_rtx_UNEQ (mode, op0, op1);
2854 
2855     case LTGT_EXPR:
2856       return gen_rtx_LTGT (mode, op0, op1);
2857 
2858     case COND_EXPR:
2859       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
2860 
2861     case COMPLEX_EXPR:
2862       gcc_assert (COMPLEX_MODE_P (mode));
2863       if (GET_MODE (op0) == VOIDmode)
2864 	op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
2865       if (GET_MODE (op1) == VOIDmode)
2866 	op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
2867       return gen_rtx_CONCAT (mode, op0, op1);
2868 
2869     case CONJ_EXPR:
2870       if (GET_CODE (op0) == CONCAT)
2871 	return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2872 			       gen_rtx_NEG (GET_MODE_INNER (mode),
2873 					    XEXP (op0, 1)));
2874       else
2875 	{
2876 	  enum machine_mode imode = GET_MODE_INNER (mode);
2877 	  rtx re, im;
2878 
2879 	  if (MEM_P (op0))
2880 	    {
2881 	      re = adjust_address_nv (op0, imode, 0);
2882 	      im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
2883 	    }
2884 	  else
2885 	    {
2886 	      enum machine_mode ifmode = int_mode_for_mode (mode);
2887 	      enum machine_mode ihmode = int_mode_for_mode (imode);
2888 	      rtx halfsize;
2889 	      if (ifmode == BLKmode || ihmode == BLKmode)
2890 		return NULL;
2891 	      halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
2892 	      re = op0;
2893 	      if (mode != ifmode)
2894 		re = gen_rtx_SUBREG (ifmode, re, 0);
2895 	      re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
2896 	      if (imode != ihmode)
2897 		re = gen_rtx_SUBREG (imode, re, 0);
2898 	      im = copy_rtx (op0);
2899 	      if (mode != ifmode)
2900 		im = gen_rtx_SUBREG (ifmode, im, 0);
2901 	      im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
2902 	      if (imode != ihmode)
2903 		im = gen_rtx_SUBREG (imode, im, 0);
2904 	    }
2905 	  im = gen_rtx_NEG (imode, im);
2906 	  return gen_rtx_CONCAT (mode, re, im);
2907 	}
2908 
2909     case ADDR_EXPR:
2910       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2911       if (!op0 || !MEM_P (op0))
2912 	return NULL;
2913 
2914       op0 = convert_debug_memory_address (mode, XEXP (op0, 0));
2915 
2916       return op0;
2917 
2918     case VECTOR_CST:
2919       exp = build_constructor_from_list (TREE_TYPE (exp),
2920 					 TREE_VECTOR_CST_ELTS (exp));
2921       /* Fall through.  */
2922 
2923     case CONSTRUCTOR:
2924       if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
2925 	{
2926 	  unsigned i;
2927 	  tree val;
2928 
2929 	  op0 = gen_rtx_CONCATN
2930 	    (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
2931 
2932 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
2933 	    {
2934 	      op1 = expand_debug_expr (val);
2935 	      if (!op1)
2936 		return NULL;
2937 	      XVECEXP (op0, 0, i) = op1;
2938 	    }
2939 
2940 	  if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
2941 	    {
2942 	      op1 = expand_debug_expr
2943 		(fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node));
2944 
2945 	      if (!op1)
2946 		return NULL;
2947 
2948 	      for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
2949 		XVECEXP (op0, 0, i) = op1;
2950 	    }
2951 
2952 	  return op0;
2953 	}
2954       else
2955 	goto flag_unsupported;
2956 
2957     case CALL_EXPR:
2958       /* ??? Maybe handle some builtins?  */
2959       return NULL;
2960 
2961     case SSA_NAME:
2962       {
2963 	gimple g = get_gimple_for_ssa_name (exp);
2964 	if (g)
2965 	  {
2966 	    op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
2967 	    if (!op0)
2968 	      return NULL;
2969 	  }
2970 	else
2971 	  {
2972 	    int part = var_to_partition (SA.map, exp);
2973 
2974 	    if (part == NO_PARTITION)
2975 	      return NULL;
2976 
2977 	    gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
2978 
2979 	    op0 = copy_rtx (SA.partition_to_pseudo[part]);
2980 	  }
2981 	goto adjust_mode;
2982       }
2983 
2984     case ERROR_MARK:
2985       return NULL;
2986 
2987     /* Vector stuff.  For most of the codes we don't have rtl codes.  */
2988     case REALIGN_LOAD_EXPR:
2989     case REDUC_MAX_EXPR:
2990     case REDUC_MIN_EXPR:
2991     case REDUC_PLUS_EXPR:
2992     case VEC_COND_EXPR:
2993     case VEC_EXTRACT_EVEN_EXPR:
2994     case VEC_EXTRACT_ODD_EXPR:
2995     case VEC_INTERLEAVE_HIGH_EXPR:
2996     case VEC_INTERLEAVE_LOW_EXPR:
2997     case VEC_LSHIFT_EXPR:
2998     case VEC_PACK_FIX_TRUNC_EXPR:
2999     case VEC_PACK_SAT_EXPR:
3000     case VEC_PACK_TRUNC_EXPR:
3001     case VEC_RSHIFT_EXPR:
3002     case VEC_UNPACK_FLOAT_HI_EXPR:
3003     case VEC_UNPACK_FLOAT_LO_EXPR:
3004     case VEC_UNPACK_HI_EXPR:
3005     case VEC_UNPACK_LO_EXPR:
3006     case VEC_WIDEN_MULT_HI_EXPR:
3007     case VEC_WIDEN_MULT_LO_EXPR:
3008       return NULL;
3009 
3010    /* Misc codes.  */
3011     case ADDR_SPACE_CONVERT_EXPR:
3012     case FIXED_CONVERT_EXPR:
3013     case OBJ_TYPE_REF:
3014     case WITH_SIZE_EXPR:
3015       return NULL;
3016 
3017     case DOT_PROD_EXPR:
3018       if (SCALAR_INT_MODE_P (GET_MODE (op0))
3019 	  && SCALAR_INT_MODE_P (mode))
3020 	{
3021 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3022 	    op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3023 	  else
3024 	    op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3025 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3026 	    op1 = gen_rtx_ZERO_EXTEND (mode, op1);
3027 	  else
3028 	    op1 = gen_rtx_SIGN_EXTEND (mode, op1);
3029 	  op0 = gen_rtx_MULT (mode, op0, op1);
3030 	  return gen_rtx_PLUS (mode, op0, op2);
3031 	}
3032       return NULL;
3033 
3034     case WIDEN_MULT_EXPR:
3035       if (SCALAR_INT_MODE_P (GET_MODE (op0))
3036 	  && SCALAR_INT_MODE_P (mode))
3037 	{
3038 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3039 	    op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3040 	  else
3041 	    op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3042 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3043 	    op1 = gen_rtx_ZERO_EXTEND (mode, op1);
3044 	  else
3045 	    op1 = gen_rtx_SIGN_EXTEND (mode, op1);
3046 	  return gen_rtx_MULT (mode, op0, op1);
3047 	}
3048       return NULL;
3049 
3050     case WIDEN_SUM_EXPR:
3051       if (SCALAR_INT_MODE_P (GET_MODE (op0))
3052 	  && SCALAR_INT_MODE_P (mode))
3053 	{
3054 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3055 	    op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3056 	  else
3057 	    op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3058 	  return gen_rtx_PLUS (mode, op0, op1);
3059 	}
3060       return NULL;
3061 
3062     default:
3063     flag_unsupported:
3064 #ifdef ENABLE_CHECKING
3065       debug_tree (exp);
3066       gcc_unreachable ();
3067 #else
3068       return NULL;
3069 #endif
3070     }
3071 }
3072 
3073 /* Expand the _LOCs in debug insns.  We run this after expanding all
3074    regular insns, so that any variables referenced in the function
3075    will have their DECL_RTLs set.  */
3076 
3077 static void
3078 expand_debug_locations (void)
3079 {
3080   rtx insn;
3081   rtx last = get_last_insn ();
3082   int save_strict_alias = flag_strict_aliasing;
3083 
3084   /* New alias sets while setting up memory attributes cause
3085      -fcompare-debug failures, even though it doesn't bring about any
3086      codegen changes.  */
3087   flag_strict_aliasing = 0;
3088 
3089   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3090     if (DEBUG_INSN_P (insn))
3091       {
3092 	tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3093 	rtx val;
3094 	enum machine_mode mode;
3095 
3096 	if (value == NULL_TREE)
3097 	  val = NULL_RTX;
3098 	else
3099 	  {
3100 	    val = expand_debug_expr (value);
3101 	    gcc_assert (last == get_last_insn ());
3102 	  }
3103 
3104 	if (!val)
3105 	  val = gen_rtx_UNKNOWN_VAR_LOC ();
3106 	else
3107 	  {
3108 	    mode = GET_MODE (INSN_VAR_LOCATION (insn));
3109 
3110 	    gcc_assert (mode == GET_MODE (val)
3111 			|| (GET_MODE (val) == VOIDmode
3112 			    && (CONST_INT_P (val)
3113 				|| GET_CODE (val) == CONST_FIXED
3114 				|| GET_CODE (val) == CONST_DOUBLE
3115 				|| GET_CODE (val) == LABEL_REF)));
3116 	  }
3117 
3118 	INSN_VAR_LOCATION_LOC (insn) = val;
3119       }
3120 
3121   flag_strict_aliasing = save_strict_alias;
3122 }
3123 
3124 /* Expand basic block BB from GIMPLE trees to RTL.  */
3125 
3126 static basic_block
3127 expand_gimple_basic_block (basic_block bb)
3128 {
3129   gimple_stmt_iterator gsi;
3130   gimple_seq stmts;
3131   gimple stmt = NULL;
3132   rtx note, last;
3133   edge e;
3134   edge_iterator ei;
3135   void **elt;
3136 
3137   if (dump_file)
3138     fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3139 	     bb->index);
3140 
3141   /* Note that since we are now transitioning from GIMPLE to RTL, we
3142      cannot use the gsi_*_bb() routines because they expect the basic
3143      block to be in GIMPLE, instead of RTL.  Therefore, we need to
3144      access the BB sequence directly.  */
3145   stmts = bb_seq (bb);
3146   bb->il.gimple = NULL;
3147   rtl_profile_for_bb (bb);
3148   init_rtl_bb_info (bb);
3149   bb->flags |= BB_RTL;
3150 
3151   /* Remove the RETURN_EXPR if we may fall though to the exit
3152      instead.  */
3153   gsi = gsi_last (stmts);
3154   if (!gsi_end_p (gsi)
3155       && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3156     {
3157       gimple ret_stmt = gsi_stmt (gsi);
3158 
3159       gcc_assert (single_succ_p (bb));
3160       gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3161 
3162       if (bb->next_bb == EXIT_BLOCK_PTR
3163 	  && !gimple_return_retval (ret_stmt))
3164 	{
3165 	  gsi_remove (&gsi, false);
3166 	  single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3167 	}
3168     }
3169 
3170   gsi = gsi_start (stmts);
3171   if (!gsi_end_p (gsi))
3172     {
3173       stmt = gsi_stmt (gsi);
3174       if (gimple_code (stmt) != GIMPLE_LABEL)
3175 	stmt = NULL;
3176     }
3177 
3178   elt = pointer_map_contains (lab_rtx_for_bb, bb);
3179 
3180   if (stmt || elt)
3181     {
3182       last = get_last_insn ();
3183 
3184       if (stmt)
3185 	{
3186 	  expand_gimple_stmt (stmt);
3187 	  gsi_next (&gsi);
3188 	}
3189 
3190       if (elt)
3191 	emit_label ((rtx) *elt);
3192 
3193       /* Java emits line number notes in the top of labels.
3194 	 ??? Make this go away once line number notes are obsoleted.  */
3195       BB_HEAD (bb) = NEXT_INSN (last);
3196       if (NOTE_P (BB_HEAD (bb)))
3197 	BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3198       note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3199 
3200       maybe_dump_rtl_for_gimple_stmt (stmt, last);
3201     }
3202   else
3203     note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3204 
3205   NOTE_BASIC_BLOCK (note) = bb;
3206 
3207   for (; !gsi_end_p (gsi); gsi_next (&gsi))
3208     {
3209       basic_block new_bb;
3210 
3211       stmt = gsi_stmt (gsi);
3212 
3213       /* If this statement is a non-debug one, and we generate debug
3214 	 insns, then this one might be the last real use of a TERed
3215 	 SSA_NAME, but where there are still some debug uses further
3216 	 down.  Expanding the current SSA name in such further debug
3217 	 uses by their RHS might lead to wrong debug info, as coalescing
3218 	 might make the operands of such RHS be placed into the same
3219 	 pseudo as something else.  Like so:
3220 	   a_1 = a_0 + 1;   // Assume a_1 is TERed and a_0 is dead
3221 	   use(a_1);
3222 	   a_2 = ...
3223            #DEBUG ... => a_1
3224 	 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3225 	 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3226 	 the write to a_2 would actually have clobbered the place which
3227 	 formerly held a_0.
3228 
3229 	 So, instead of that, we recognize the situation, and generate
3230 	 debug temporaries at the last real use of TERed SSA names:
3231 	   a_1 = a_0 + 1;
3232            #DEBUG #D1 => a_1
3233 	   use(a_1);
3234 	   a_2 = ...
3235            #DEBUG ... => #D1
3236 	 */
3237       if (MAY_HAVE_DEBUG_INSNS
3238 	  && SA.values
3239 	  && !is_gimple_debug (stmt))
3240 	{
3241 	  ssa_op_iter iter;
3242 	  tree op;
3243 	  gimple def;
3244 
3245 	  location_t sloc = get_curr_insn_source_location ();
3246 	  tree sblock = get_curr_insn_block ();
3247 
3248 	  /* Look for SSA names that have their last use here (TERed
3249 	     names always have only one real use).  */
3250 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3251 	    if ((def = get_gimple_for_ssa_name (op)))
3252 	      {
3253 		imm_use_iterator imm_iter;
3254 		use_operand_p use_p;
3255 		bool have_debug_uses = false;
3256 
3257 		FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3258 		  {
3259 		    if (gimple_debug_bind_p (USE_STMT (use_p)))
3260 		      {
3261 			have_debug_uses = true;
3262 			break;
3263 		      }
3264 		  }
3265 
3266 		if (have_debug_uses)
3267 		  {
3268 		    /* OP is a TERed SSA name, with DEF it's defining
3269 		       statement, and where OP is used in further debug
3270 		       instructions.  Generate a debug temporary, and
3271 		       replace all uses of OP in debug insns with that
3272 		       temporary.  */
3273 		    gimple debugstmt;
3274 		    tree value = gimple_assign_rhs_to_tree (def);
3275 		    tree vexpr = make_node (DEBUG_EXPR_DECL);
3276 		    rtx val;
3277 		    enum machine_mode mode;
3278 
3279 		    set_curr_insn_source_location (gimple_location (def));
3280 		    set_curr_insn_block (gimple_block (def));
3281 
3282 		    DECL_ARTIFICIAL (vexpr) = 1;
3283 		    TREE_TYPE (vexpr) = TREE_TYPE (value);
3284 		    if (DECL_P (value))
3285 		      mode = DECL_MODE (value);
3286 		    else
3287 		      mode = TYPE_MODE (TREE_TYPE (value));
3288 		    DECL_MODE (vexpr) = mode;
3289 
3290 		    val = gen_rtx_VAR_LOCATION
3291 			(mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3292 
3293 		    val = emit_debug_insn (val);
3294 
3295 		    FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3296 		      {
3297 			if (!gimple_debug_bind_p (debugstmt))
3298 			  continue;
3299 
3300 			FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3301 			  SET_USE (use_p, vexpr);
3302 
3303 			update_stmt (debugstmt);
3304 		      }
3305 		  }
3306 	      }
3307 	  set_curr_insn_source_location (sloc);
3308 	  set_curr_insn_block (sblock);
3309 	}
3310 
3311       currently_expanding_gimple_stmt = stmt;
3312 
3313       /* Expand this statement, then evaluate the resulting RTL and
3314 	 fixup the CFG accordingly.  */
3315       if (gimple_code (stmt) == GIMPLE_COND)
3316 	{
3317 	  new_bb = expand_gimple_cond (bb, stmt);
3318 	  if (new_bb)
3319 	    return new_bb;
3320 	}
3321       else if (gimple_debug_bind_p (stmt))
3322 	{
3323 	  location_t sloc = get_curr_insn_source_location ();
3324 	  tree sblock = get_curr_insn_block ();
3325 	  gimple_stmt_iterator nsi = gsi;
3326 
3327 	  for (;;)
3328 	    {
3329 	      tree var = gimple_debug_bind_get_var (stmt);
3330 	      tree value;
3331 	      rtx val;
3332 	      enum machine_mode mode;
3333 
3334 	      if (gimple_debug_bind_has_value_p (stmt))
3335 		value = gimple_debug_bind_get_value (stmt);
3336 	      else
3337 		value = NULL_TREE;
3338 
3339 	      last = get_last_insn ();
3340 
3341 	      set_curr_insn_source_location (gimple_location (stmt));
3342 	      set_curr_insn_block (gimple_block (stmt));
3343 
3344 	      if (DECL_P (var))
3345 		mode = DECL_MODE (var);
3346 	      else
3347 		mode = TYPE_MODE (TREE_TYPE (var));
3348 
3349 	      val = gen_rtx_VAR_LOCATION
3350 		(mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3351 
3352 	      val = emit_debug_insn (val);
3353 
3354 	      if (dump_file && (dump_flags & TDF_DETAILS))
3355 		{
3356 		  /* We can't dump the insn with a TREE where an RTX
3357 		     is expected.  */
3358 		  INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3359 		  maybe_dump_rtl_for_gimple_stmt (stmt, last);
3360 		  INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3361 		}
3362 
3363 	      /* In order not to generate too many debug temporaries,
3364 	         we delink all uses of debug statements we already expanded.
3365 		 Therefore debug statements between definition and real
3366 		 use of TERed SSA names will continue to use the SSA name,
3367 		 and not be replaced with debug temps.  */
3368 	      delink_stmt_imm_use (stmt);
3369 
3370 	      gsi = nsi;
3371 	      gsi_next (&nsi);
3372 	      if (gsi_end_p (nsi))
3373 		break;
3374 	      stmt = gsi_stmt (nsi);
3375 	      if (!gimple_debug_bind_p (stmt))
3376 		break;
3377 	    }
3378 
3379 	  set_curr_insn_source_location (sloc);
3380 	  set_curr_insn_block (sblock);
3381 	}
3382       else
3383 	{
3384 	  if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3385 	    {
3386 	      bool can_fallthru;
3387 	      new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3388 	      if (new_bb)
3389 		{
3390 		  if (can_fallthru)
3391 		    bb = new_bb;
3392 		  else
3393 		    return new_bb;
3394 		}
3395 	    }
3396 	  else
3397 	    {
3398 	      def_operand_p def_p;
3399 	      def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3400 
3401 	      if (def_p != NULL)
3402 		{
3403 		  /* Ignore this stmt if it is in the list of
3404 		     replaceable expressions.  */
3405 		  if (SA.values
3406 		      && bitmap_bit_p (SA.values,
3407 				       SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3408 		    continue;
3409 		}
3410 	      last = expand_gimple_stmt (stmt);
3411 	      maybe_dump_rtl_for_gimple_stmt (stmt, last);
3412 	    }
3413 	}
3414     }
3415 
3416   currently_expanding_gimple_stmt = NULL;
3417 
3418   /* Expand implicit goto and convert goto_locus.  */
3419   FOR_EACH_EDGE (e, ei, bb->succs)
3420     {
3421       if (e->goto_locus && e->goto_block)
3422 	{
3423 	  set_curr_insn_source_location (e->goto_locus);
3424 	  set_curr_insn_block (e->goto_block);
3425 	  e->goto_locus = curr_insn_locator ();
3426 	}
3427       e->goto_block = NULL;
3428       if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3429 	{
3430 	  emit_jump (label_rtx_for_bb (e->dest));
3431 	  e->flags &= ~EDGE_FALLTHRU;
3432 	}
3433     }
3434 
3435   /* Expanded RTL can create a jump in the last instruction of block.
3436      This later might be assumed to be a jump to successor and break edge insertion.
3437      We need to insert dummy move to prevent this. PR41440. */
3438   if (single_succ_p (bb)
3439       && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3440       && (last = get_last_insn ())
3441       && JUMP_P (last))
3442     {
3443       rtx dummy = gen_reg_rtx (SImode);
3444       emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3445     }
3446 
3447   do_pending_stack_adjust ();
3448 
3449   /* Find the block tail.  The last insn in the block is the insn
3450      before a barrier and/or table jump insn.  */
3451   last = get_last_insn ();
3452   if (BARRIER_P (last))
3453     last = PREV_INSN (last);
3454   if (JUMP_TABLE_DATA_P (last))
3455     last = PREV_INSN (PREV_INSN (last));
3456   BB_END (bb) = last;
3457 
3458   update_bb_for_insn (bb);
3459 
3460   return bb;
3461 }
3462 
3463 
3464 /* Create a basic block for initialization code.  */
3465 
3466 static basic_block
3467 construct_init_block (void)
3468 {
3469   basic_block init_block, first_block;
3470   edge e = NULL;
3471   int flags;
3472 
3473   /* Multiple entry points not supported yet.  */
3474   gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
3475   init_rtl_bb_info (ENTRY_BLOCK_PTR);
3476   init_rtl_bb_info (EXIT_BLOCK_PTR);
3477   ENTRY_BLOCK_PTR->flags |= BB_RTL;
3478   EXIT_BLOCK_PTR->flags |= BB_RTL;
3479 
3480   e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
3481 
3482   /* When entry edge points to first basic block, we don't need jump,
3483      otherwise we have to jump into proper target.  */
3484   if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3485     {
3486       tree label = gimple_block_label (e->dest);
3487 
3488       emit_jump (label_rtx (label));
3489       flags = 0;
3490     }
3491   else
3492     flags = EDGE_FALLTHRU;
3493 
3494   init_block = create_basic_block (NEXT_INSN (get_insns ()),
3495 				   get_last_insn (),
3496 				   ENTRY_BLOCK_PTR);
3497   init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3498   init_block->count = ENTRY_BLOCK_PTR->count;
3499   if (e)
3500     {
3501       first_block = e->dest;
3502       redirect_edge_succ (e, init_block);
3503       e = make_edge (init_block, first_block, flags);
3504     }
3505   else
3506     e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3507   e->probability = REG_BR_PROB_BASE;
3508   e->count = ENTRY_BLOCK_PTR->count;
3509 
3510   update_bb_for_insn (init_block);
3511   return init_block;
3512 }
3513 
3514 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3515    found in the block tree.  */
3516 
3517 static void
3518 set_block_levels (tree block, int level)
3519 {
3520   while (block)
3521     {
3522       BLOCK_NUMBER (block) = level;
3523       set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3524       block = BLOCK_CHAIN (block);
3525     }
3526 }
3527 
3528 /* Create a block containing landing pads and similar stuff.  */
3529 
3530 static void
3531 construct_exit_block (void)
3532 {
3533   rtx head = get_last_insn ();
3534   rtx end;
3535   basic_block exit_block;
3536   edge e, e2;
3537   unsigned ix;
3538   edge_iterator ei;
3539   rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
3540 
3541   rtl_profile_for_bb (EXIT_BLOCK_PTR);
3542 
3543   /* Make sure the locus is set to the end of the function, so that
3544      epilogue line numbers and warnings are set properly.  */
3545   if (cfun->function_end_locus != UNKNOWN_LOCATION)
3546     input_location = cfun->function_end_locus;
3547 
3548   /* The following insns belong to the top scope.  */
3549   set_curr_insn_block (DECL_INITIAL (current_function_decl));
3550 
3551   /* Generate rtl for function exit.  */
3552   expand_function_end ();
3553 
3554   end = get_last_insn ();
3555   if (head == end)
3556     return;
3557   /* While emitting the function end we could move end of the last basic block.
3558    */
3559   BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
3560   while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
3561     head = NEXT_INSN (head);
3562   exit_block = create_basic_block (NEXT_INSN (head), end,
3563 				   EXIT_BLOCK_PTR->prev_bb);
3564   exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3565   exit_block->count = EXIT_BLOCK_PTR->count;
3566 
3567   ix = 0;
3568   while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
3569     {
3570       e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
3571       if (!(e->flags & EDGE_ABNORMAL))
3572 	redirect_edge_succ (e, exit_block);
3573       else
3574 	ix++;
3575     }
3576 
3577   e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3578   e->probability = REG_BR_PROB_BASE;
3579   e->count = EXIT_BLOCK_PTR->count;
3580   FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
3581     if (e2 != e)
3582       {
3583 	e->count -= e2->count;
3584 	exit_block->count -= e2->count;
3585 	exit_block->frequency -= EDGE_FREQUENCY (e2);
3586       }
3587   if (e->count < 0)
3588     e->count = 0;
3589   if (exit_block->count < 0)
3590     exit_block->count = 0;
3591   if (exit_block->frequency < 0)
3592     exit_block->frequency = 0;
3593   update_bb_for_insn (exit_block);
3594 }
3595 
3596 /* Helper function for discover_nonconstant_array_refs.
3597    Look for ARRAY_REF nodes with non-constant indexes and mark them
3598    addressable.  */
3599 
3600 static tree
3601 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3602 				   void *data ATTRIBUTE_UNUSED)
3603 {
3604   tree t = *tp;
3605 
3606   if (IS_TYPE_OR_DECL_P (t))
3607     *walk_subtrees = 0;
3608   else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3609     {
3610       while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3611 	      && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3612 	      && (!TREE_OPERAND (t, 2)
3613 		  || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3614 	     || (TREE_CODE (t) == COMPONENT_REF
3615 		 && (!TREE_OPERAND (t,2)
3616 		     || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3617 	     || TREE_CODE (t) == BIT_FIELD_REF
3618 	     || TREE_CODE (t) == REALPART_EXPR
3619 	     || TREE_CODE (t) == IMAGPART_EXPR
3620 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
3621 	     || CONVERT_EXPR_P (t))
3622 	t = TREE_OPERAND (t, 0);
3623 
3624       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3625 	{
3626 	  t = get_base_address (t);
3627 	  if (t && DECL_P (t)
3628               && DECL_MODE (t) != BLKmode)
3629 	    TREE_ADDRESSABLE (t) = 1;
3630 	}
3631 
3632       *walk_subtrees = 0;
3633     }
3634 
3635   return NULL_TREE;
3636 }
3637 
3638 /* RTL expansion is not able to compile array references with variable
3639    offsets for arrays stored in single register.  Discover such
3640    expressions and mark variables as addressable to avoid this
3641    scenario.  */
3642 
3643 static void
3644 discover_nonconstant_array_refs (void)
3645 {
3646   basic_block bb;
3647   gimple_stmt_iterator gsi;
3648 
3649   FOR_EACH_BB (bb)
3650     for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3651       {
3652 	gimple stmt = gsi_stmt (gsi);
3653 	if (!is_gimple_debug (stmt))
3654 	  walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
3655       }
3656 }
3657 
3658 /* This function sets crtl->args.internal_arg_pointer to a virtual
3659    register if DRAP is needed.  Local register allocator will replace
3660    virtual_incoming_args_rtx with the virtual register.  */
3661 
3662 static void
3663 expand_stack_alignment (void)
3664 {
3665   rtx drap_rtx;
3666   unsigned int preferred_stack_boundary;
3667 
3668   if (! SUPPORTS_STACK_ALIGNMENT)
3669     return;
3670 
3671   if (cfun->calls_alloca
3672       || cfun->has_nonlocal_label
3673       || crtl->has_nonlocal_goto)
3674     crtl->need_drap = true;
3675 
3676   /* Call update_stack_boundary here again to update incoming stack
3677      boundary.  It may set incoming stack alignment to a different
3678      value after RTL expansion.  TARGET_FUNCTION_OK_FOR_SIBCALL may
3679      use the minimum incoming stack alignment to check if it is OK
3680      to perform sibcall optimization since sibcall optimization will
3681      only align the outgoing stack to incoming stack boundary.  */
3682   if (targetm.calls.update_stack_boundary)
3683     targetm.calls.update_stack_boundary ();
3684 
3685   /* The incoming stack frame has to be aligned at least at
3686      parm_stack_boundary.  */
3687   gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
3688 
3689   /* Update crtl->stack_alignment_estimated and use it later to align
3690      stack.  We check PREFERRED_STACK_BOUNDARY if there may be non-call
3691      exceptions since callgraph doesn't collect incoming stack alignment
3692      in this case.  */
3693   if (flag_non_call_exceptions
3694       && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3695     preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3696   else
3697     preferred_stack_boundary = crtl->preferred_stack_boundary;
3698   if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3699     crtl->stack_alignment_estimated = preferred_stack_boundary;
3700   if (preferred_stack_boundary > crtl->stack_alignment_needed)
3701     crtl->stack_alignment_needed = preferred_stack_boundary;
3702 
3703   gcc_assert (crtl->stack_alignment_needed
3704 	      <= crtl->stack_alignment_estimated);
3705 
3706   crtl->stack_realign_needed
3707     = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
3708   crtl->stack_realign_tried = crtl->stack_realign_needed;
3709 
3710   crtl->stack_realign_processed = true;
3711 
3712   /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3713      alignment.  */
3714   gcc_assert (targetm.calls.get_drap_rtx != NULL);
3715   drap_rtx = targetm.calls.get_drap_rtx ();
3716 
3717   /* stack_realign_drap and drap_rtx must match.  */
3718   gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3719 
3720   /* Do nothing if NULL is returned, which means DRAP is not needed.  */
3721   if (NULL != drap_rtx)
3722     {
3723       crtl->args.internal_arg_pointer = drap_rtx;
3724 
3725       /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3726          needed. */
3727       fixup_tail_calls ();
3728     }
3729 }
3730 
3731 /* Translate the intermediate representation contained in the CFG
3732    from GIMPLE trees to RTL.
3733 
3734    We do conversion per basic block and preserve/update the tree CFG.
3735    This implies we have to do some magic as the CFG can simultaneously
3736    consist of basic blocks containing RTL and GIMPLE trees.  This can
3737    confuse the CFG hooks, so be careful to not manipulate CFG during
3738    the expansion.  */
3739 
3740 static unsigned int
3741 gimple_expand_cfg (void)
3742 {
3743   basic_block bb, init_block;
3744   sbitmap blocks;
3745   edge_iterator ei;
3746   edge e;
3747   unsigned i;
3748 
3749   rewrite_out_of_ssa (&SA);
3750   SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3751 					   sizeof (rtx));
3752 
3753   /* Some backends want to know that we are expanding to RTL.  */
3754   currently_expanding_to_rtl = 1;
3755 
3756   rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3757 
3758   insn_locators_alloc ();
3759   if (!DECL_IS_BUILTIN (current_function_decl))
3760     {
3761       /* Eventually, all FEs should explicitly set function_start_locus.  */
3762       if (cfun->function_start_locus == UNKNOWN_LOCATION)
3763        set_curr_insn_source_location
3764          (DECL_SOURCE_LOCATION (current_function_decl));
3765       else
3766        set_curr_insn_source_location (cfun->function_start_locus);
3767     }
3768   set_curr_insn_block (DECL_INITIAL (current_function_decl));
3769   prologue_locator = curr_insn_locator ();
3770 
3771   /* Make sure first insn is a note even if we don't want linenums.
3772      This makes sure the first insn will never be deleted.
3773      Also, final expects a note to appear there.  */
3774   emit_note (NOTE_INSN_DELETED);
3775 
3776   /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE.  */
3777   discover_nonconstant_array_refs ();
3778 
3779   targetm.expand_to_rtl_hook ();
3780   crtl->stack_alignment_needed = STACK_BOUNDARY;
3781   crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
3782   crtl->stack_alignment_estimated = 0;
3783   crtl->preferred_stack_boundary = STACK_BOUNDARY;
3784   cfun->cfg->max_jumptable_ents = 0;
3785 
3786 
3787   /* Expand the variables recorded during gimple lowering.  */
3788   expand_used_vars ();
3789 
3790   /* Honor stack protection warnings.  */
3791   if (warn_stack_protect)
3792     {
3793       if (cfun->calls_alloca)
3794 	warning (OPT_Wstack_protector,
3795 		 "not protecting local variables: variable length buffer");
3796       if (has_short_buffer && !crtl->stack_protect_guard)
3797 	warning (OPT_Wstack_protector,
3798 		 "not protecting function: no buffer at least %d bytes long",
3799 		 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
3800     }
3801 
3802   /* Set up parameters and prepare for return, for the function.  */
3803   expand_function_start (current_function_decl);
3804 
3805   /* Now that we also have the parameter RTXs, copy them over to our
3806      partitions.  */
3807   for (i = 0; i < SA.map->num_partitions; i++)
3808     {
3809       tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
3810 
3811       if (TREE_CODE (var) != VAR_DECL
3812 	  && !SA.partition_to_pseudo[i])
3813 	SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
3814       gcc_assert (SA.partition_to_pseudo[i]);
3815 
3816       /* If this decl was marked as living in multiple places, reset
3817          this now to NULL.  */
3818       if (DECL_RTL_IF_SET (var) == pc_rtx)
3819 	SET_DECL_RTL (var, NULL);
3820 
3821       /* Some RTL parts really want to look at DECL_RTL(x) when x
3822          was a decl marked in REG_ATTR or MEM_ATTR.  We could use
3823 	 SET_DECL_RTL here making this available, but that would mean
3824 	 to select one of the potentially many RTLs for one DECL.  Instead
3825 	 of doing that we simply reset the MEM_EXPR of the RTL in question,
3826 	 then nobody can get at it and hence nobody can call DECL_RTL on it.  */
3827       if (!DECL_RTL_SET_P (var))
3828 	{
3829 	  if (MEM_P (SA.partition_to_pseudo[i]))
3830 	    set_mem_expr (SA.partition_to_pseudo[i], NULL);
3831 	}
3832     }
3833 
3834   /* If this function is `main', emit a call to `__main'
3835      to run global initializers, etc.  */
3836   if (DECL_NAME (current_function_decl)
3837       && MAIN_NAME_P (DECL_NAME (current_function_decl))
3838       && DECL_FILE_SCOPE_P (current_function_decl))
3839     expand_main_function ();
3840 
3841   /* Initialize the stack_protect_guard field.  This must happen after the
3842      call to __main (if any) so that the external decl is initialized.  */
3843   if (crtl->stack_protect_guard)
3844     stack_protect_prologue ();
3845 
3846   expand_phi_nodes (&SA);
3847 
3848   /* Register rtl specific functions for cfg.  */
3849   rtl_register_cfg_hooks ();
3850 
3851   init_block = construct_init_block ();
3852 
3853   /* Clear EDGE_EXECUTABLE on the entry edge(s).  It is cleaned from the
3854      remaining edges later.  */
3855   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
3856     e->flags &= ~EDGE_EXECUTABLE;
3857 
3858   lab_rtx_for_bb = pointer_map_create ();
3859   FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
3860     bb = expand_gimple_basic_block (bb);
3861 
3862   if (MAY_HAVE_DEBUG_INSNS)
3863     expand_debug_locations ();
3864 
3865   execute_free_datastructures ();
3866   finish_out_of_ssa (&SA);
3867 
3868   /* We are no longer in SSA form.  */
3869   cfun->gimple_df->in_ssa_p = false;
3870 
3871   /* Expansion is used by optimization passes too, set maybe_hot_insn_p
3872      conservatively to true until they are all profile aware.  */
3873   pointer_map_destroy (lab_rtx_for_bb);
3874   free_histograms ();
3875 
3876   construct_exit_block ();
3877   set_curr_insn_block (DECL_INITIAL (current_function_decl));
3878   insn_locators_finalize ();
3879 
3880   /* Zap the tree EH table.  */
3881   set_eh_throw_stmt_table (cfun, NULL);
3882 
3883   rebuild_jump_labels (get_insns ());
3884 
3885   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3886     {
3887       edge e;
3888       edge_iterator ei;
3889       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3890 	{
3891 	  if (e->insns.r)
3892 	    commit_one_edge_insertion (e);
3893 	  else
3894 	    ei_next (&ei);
3895 	}
3896     }
3897 
3898   /* We're done expanding trees to RTL.  */
3899   currently_expanding_to_rtl = 0;
3900 
3901   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
3902     {
3903       edge e;
3904       edge_iterator ei;
3905       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3906 	{
3907 	  /* Clear EDGE_EXECUTABLE.  This flag is never used in the backend.  */
3908 	  e->flags &= ~EDGE_EXECUTABLE;
3909 
3910 	  /* At the moment not all abnormal edges match the RTL
3911 	     representation.  It is safe to remove them here as
3912 	     find_many_sub_basic_blocks will rediscover them.
3913 	     In the future we should get this fixed properly.  */
3914 	  if ((e->flags & EDGE_ABNORMAL)
3915 	      && !(e->flags & EDGE_SIBCALL))
3916 	    remove_edge (e);
3917 	  else
3918 	    ei_next (&ei);
3919 	}
3920     }
3921 
3922   blocks = sbitmap_alloc (last_basic_block);
3923   sbitmap_ones (blocks);
3924   find_many_sub_basic_blocks (blocks);
3925   sbitmap_free (blocks);
3926   purge_all_dead_edges ();
3927 
3928   compact_blocks ();
3929 
3930   expand_stack_alignment ();
3931 
3932 #ifdef ENABLE_CHECKING
3933   verify_flow_info ();
3934 #endif
3935 
3936   /* There's no need to defer outputting this function any more; we
3937      know we want to output it.  */
3938   DECL_DEFER_OUTPUT (current_function_decl) = 0;
3939 
3940   /* Now that we're done expanding trees to RTL, we shouldn't have any
3941      more CONCATs anywhere.  */
3942   generating_concat_p = 0;
3943 
3944   if (dump_file)
3945     {
3946       fprintf (dump_file,
3947 	       "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
3948       /* And the pass manager will dump RTL for us.  */
3949     }
3950 
3951   /* If we're emitting a nested function, make sure its parent gets
3952      emitted as well.  Doing otherwise confuses debug info.  */
3953   {
3954     tree parent;
3955     for (parent = DECL_CONTEXT (current_function_decl);
3956 	 parent != NULL_TREE;
3957 	 parent = get_containing_scope (parent))
3958       if (TREE_CODE (parent) == FUNCTION_DECL)
3959 	TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
3960   }
3961 
3962   /* We are now committed to emitting code for this function.  Do any
3963      preparation, such as emitting abstract debug info for the inline
3964      before it gets mangled by optimization.  */
3965   if (cgraph_function_possibly_inlined_p (current_function_decl))
3966     (*debug_hooks->outlining_inline_function) (current_function_decl);
3967 
3968   TREE_ASM_WRITTEN (current_function_decl) = 1;
3969 
3970   /* After expanding, the return labels are no longer needed. */
3971   return_label = NULL;
3972   naked_return_label = NULL;
3973   /* Tag the blocks with a depth number so that change_scope can find
3974      the common parent easily.  */
3975   set_block_levels (DECL_INITIAL (cfun->decl), 0);
3976   default_rtl_profile ();
3977   return 0;
3978 }
3979 
3980 struct rtl_opt_pass pass_expand =
3981 {
3982  {
3983   RTL_PASS,
3984   "expand",				/* name */
3985   NULL,                                 /* gate */
3986   gimple_expand_cfg,			/* execute */
3987   NULL,                                 /* sub */
3988   NULL,                                 /* next */
3989   0,                                    /* static_pass_number */
3990   TV_EXPAND,				/* tv_id */
3991   PROP_ssa | PROP_gimple_leh | PROP_cfg
3992     | PROP_gimple_lcx,			/* properties_required */
3993   PROP_rtl,                             /* properties_provided */
3994   PROP_ssa | PROP_trees,		/* properties_destroyed */
3995   TODO_verify_ssa | TODO_verify_flow
3996     | TODO_verify_stmts,		/* todo_flags_start */
3997   TODO_dump_func
3998   | TODO_ggc_collect			/* todo_flags_finish */
3999  }
4000 };
4001