xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/gimplify.c (revision b7b7574d3bf8eeb51a1fa3977b59142ec6434a55)
1 /* Tree lowering pass.  This pass converts the GENERIC functions-as-trees
2    tree representation into the GIMPLE form.
3    Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4    Free Software Foundation, Inc.
5    Major work done by Sebastian Pop <s.pop@laposte.net>,
6    Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 
8 This file is part of GCC.
9 
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
14 
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
18 for more details.
19 
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3.  If not see
22 <http://www.gnu.org/licenses/>.  */
23 
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "rtl.h"
30 #include "varray.h"
31 #include "gimple.h"
32 #include "tree-iterator.h"
33 #include "tree-inline.h"
34 #include "diagnostic.h"
35 #include "langhooks.h"
36 #include "langhooks-def.h"
37 #include "tree-flow.h"
38 #include "cgraph.h"
39 #include "timevar.h"
40 #include "except.h"
41 #include "hashtab.h"
42 #include "flags.h"
43 #include "real.h"
44 #include "function.h"
45 #include "output.h"
46 #include "expr.h"
47 #include "ggc.h"
48 #include "toplev.h"
49 #include "target.h"
50 #include "optabs.h"
51 #include "pointer-set.h"
52 #include "splay-tree.h"
53 #include "vec.h"
54 #include "gimple.h"
55 #include "tree-pass.h"
56 
57 
58 enum gimplify_omp_var_data
59 {
60   GOVD_SEEN = 1,
61   GOVD_EXPLICIT = 2,
62   GOVD_SHARED = 4,
63   GOVD_PRIVATE = 8,
64   GOVD_FIRSTPRIVATE = 16,
65   GOVD_LASTPRIVATE = 32,
66   GOVD_REDUCTION = 64,
67   GOVD_LOCAL = 128,
68   GOVD_DEBUG_PRIVATE = 256,
69   GOVD_PRIVATE_OUTER_REF = 512,
70   GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
71 			   | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
72 };
73 
74 
75 enum omp_region_type
76 {
77   ORT_WORKSHARE = 0,
78   ORT_PARALLEL = 2,
79   ORT_COMBINED_PARALLEL = 3,
80   ORT_TASK = 4,
81   ORT_UNTIED_TASK = 5
82 };
83 
84 struct gimplify_omp_ctx
85 {
86   struct gimplify_omp_ctx *outer_context;
87   splay_tree variables;
88   struct pointer_set_t *privatized_types;
89   location_t location;
90   enum omp_clause_default_kind default_kind;
91   enum omp_region_type region_type;
92 };
93 
94 static struct gimplify_ctx *gimplify_ctxp;
95 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
96 
97 
98 /* Formal (expression) temporary table handling: Multiple occurrences of
99    the same scalar expression are evaluated into the same temporary.  */
100 
101 typedef struct gimple_temp_hash_elt
102 {
103   tree val;   /* Key */
104   tree temp;  /* Value */
105 } elt_t;
106 
107 /* Forward declarations.  */
108 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
109 
110 /* Mark X addressable.  Unlike the langhook we expect X to be in gimple
111    form and we don't do any syntax checking.  */
112 void
113 mark_addressable (tree x)
114 {
115   while (handled_component_p (x))
116     x = TREE_OPERAND (x, 0);
117   if (TREE_CODE (x) != VAR_DECL
118       && TREE_CODE (x) != PARM_DECL
119       && TREE_CODE (x) != RESULT_DECL)
120     return ;
121   TREE_ADDRESSABLE (x) = 1;
122 }
123 
124 /* Return a hash value for a formal temporary table entry.  */
125 
126 static hashval_t
127 gimple_tree_hash (const void *p)
128 {
129   tree t = ((const elt_t *) p)->val;
130   return iterative_hash_expr (t, 0);
131 }
132 
133 /* Compare two formal temporary table entries.  */
134 
135 static int
136 gimple_tree_eq (const void *p1, const void *p2)
137 {
138   tree t1 = ((const elt_t *) p1)->val;
139   tree t2 = ((const elt_t *) p2)->val;
140   enum tree_code code = TREE_CODE (t1);
141 
142   if (TREE_CODE (t2) != code
143       || TREE_TYPE (t1) != TREE_TYPE (t2))
144     return 0;
145 
146   if (!operand_equal_p (t1, t2, 0))
147     return 0;
148 
149   /* Only allow them to compare equal if they also hash equal; otherwise
150      results are nondeterminate, and we fail bootstrap comparison.  */
151   gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
152 
153   return 1;
154 }
155 
156 /* Link gimple statement GS to the end of the sequence *SEQ_P.  If
157    *SEQ_P is NULL, a new sequence is allocated.  This function is
158    similar to gimple_seq_add_stmt, but does not scan the operands.
159    During gimplification, we need to manipulate statement sequences
160    before the def/use vectors have been constructed.  */
161 
162 void
163 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
164 {
165   gimple_stmt_iterator si;
166 
167   if (gs == NULL)
168     return;
169 
170   if (*seq_p == NULL)
171     *seq_p = gimple_seq_alloc ();
172 
173   si = gsi_last (*seq_p);
174 
175   gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
176 }
177 
178 /* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
179    NULL, a new sequence is allocated.   This function is
180    similar to gimple_seq_add_seq, but does not scan the operands.
181    During gimplification, we need to manipulate statement sequences
182    before the def/use vectors have been constructed.  */
183 
184 static void
185 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
186 {
187   gimple_stmt_iterator si;
188 
189   if (src == NULL)
190     return;
191 
192   if (*dst_p == NULL)
193     *dst_p = gimple_seq_alloc ();
194 
195   si = gsi_last (*dst_p);
196   gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
197 }
198 
199 /* Set up a context for the gimplifier.  */
200 
201 void
202 push_gimplify_context (struct gimplify_ctx *c)
203 {
204   memset (c, '\0', sizeof (*c));
205   c->prev_context = gimplify_ctxp;
206   gimplify_ctxp = c;
207 }
208 
209 /* Tear down a context for the gimplifier.  If BODY is non-null, then
210    put the temporaries into the outer BIND_EXPR.  Otherwise, put them
211    in the local_decls.
212 
213    BODY is not a sequence, but the first tuple in a sequence.  */
214 
215 void
216 pop_gimplify_context (gimple body)
217 {
218   struct gimplify_ctx *c = gimplify_ctxp;
219 
220   gcc_assert (c && (c->bind_expr_stack == NULL
221 		    || VEC_empty (gimple, c->bind_expr_stack)));
222   VEC_free (gimple, heap, c->bind_expr_stack);
223   gimplify_ctxp = c->prev_context;
224 
225   if (body)
226     declare_vars (c->temps, body, false);
227   else
228     record_vars (c->temps);
229 
230   if (c->temp_htab)
231     htab_delete (c->temp_htab);
232 }
233 
234 static void
235 gimple_push_bind_expr (gimple gimple_bind)
236 {
237   if (gimplify_ctxp->bind_expr_stack == NULL)
238     gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8);
239   VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind);
240 }
241 
242 static void
243 gimple_pop_bind_expr (void)
244 {
245   VEC_pop (gimple, gimplify_ctxp->bind_expr_stack);
246 }
247 
248 gimple
249 gimple_current_bind_expr (void)
250 {
251   return VEC_last (gimple, gimplify_ctxp->bind_expr_stack);
252 }
253 
254 /* Return the stack GIMPLE_BINDs created during gimplification.  */
255 
256 VEC(gimple, heap) *
257 gimple_bind_expr_stack (void)
258 {
259   return gimplify_ctxp->bind_expr_stack;
260 }
261 
262 /* Returns true iff there is a COND_EXPR between us and the innermost
263    CLEANUP_POINT_EXPR.  This info is used by gimple_push_cleanup.  */
264 
265 static bool
266 gimple_conditional_context (void)
267 {
268   return gimplify_ctxp->conditions > 0;
269 }
270 
271 /* Note that we've entered a COND_EXPR.  */
272 
273 static void
274 gimple_push_condition (void)
275 {
276 #ifdef ENABLE_GIMPLE_CHECKING
277   if (gimplify_ctxp->conditions == 0)
278     gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
279 #endif
280   ++(gimplify_ctxp->conditions);
281 }
282 
283 /* Note that we've left a COND_EXPR.  If we're back at unconditional scope
284    now, add any conditional cleanups we've seen to the prequeue.  */
285 
286 static void
287 gimple_pop_condition (gimple_seq *pre_p)
288 {
289   int conds = --(gimplify_ctxp->conditions);
290 
291   gcc_assert (conds >= 0);
292   if (conds == 0)
293     {
294       gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
295       gimplify_ctxp->conditional_cleanups = NULL;
296     }
297 }
298 
299 /* A stable comparison routine for use with splay trees and DECLs.  */
300 
301 static int
302 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
303 {
304   tree a = (tree) xa;
305   tree b = (tree) xb;
306 
307   return DECL_UID (a) - DECL_UID (b);
308 }
309 
310 /* Create a new omp construct that deals with variable remapping.  */
311 
312 static struct gimplify_omp_ctx *
313 new_omp_context (enum omp_region_type region_type)
314 {
315   struct gimplify_omp_ctx *c;
316 
317   c = XCNEW (struct gimplify_omp_ctx);
318   c->outer_context = gimplify_omp_ctxp;
319   c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
320   c->privatized_types = pointer_set_create ();
321   c->location = input_location;
322   c->region_type = region_type;
323   if ((region_type & ORT_TASK) == 0)
324     c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
325   else
326     c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
327 
328   return c;
329 }
330 
331 /* Destroy an omp construct that deals with variable remapping.  */
332 
333 static void
334 delete_omp_context (struct gimplify_omp_ctx *c)
335 {
336   splay_tree_delete (c->variables);
337   pointer_set_destroy (c->privatized_types);
338   XDELETE (c);
339 }
340 
341 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
342 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
343 
344 /* A subroutine of append_to_statement_list{,_force}.  T is not NULL.  */
345 
346 static void
347 append_to_statement_list_1 (tree t, tree *list_p)
348 {
349   tree list = *list_p;
350   tree_stmt_iterator i;
351 
352   if (!list)
353     {
354       if (t && TREE_CODE (t) == STATEMENT_LIST)
355 	{
356 	  *list_p = t;
357 	  return;
358 	}
359       *list_p = list = alloc_stmt_list ();
360     }
361 
362   i = tsi_last (list);
363   tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
364 }
365 
366 /* Add T to the end of the list container pointed to by LIST_P.
367    If T is an expression with no effects, it is ignored.  */
368 
369 void
370 append_to_statement_list (tree t, tree *list_p)
371 {
372   if (t && TREE_SIDE_EFFECTS (t))
373     append_to_statement_list_1 (t, list_p);
374 }
375 
376 /* Similar, but the statement is always added, regardless of side effects.  */
377 
378 void
379 append_to_statement_list_force (tree t, tree *list_p)
380 {
381   if (t != NULL_TREE)
382     append_to_statement_list_1 (t, list_p);
383 }
384 
385 /* Both gimplify the statement T and append it to *SEQ_P.  This function
386    behaves exactly as gimplify_stmt, but you don't have to pass T as a
387    reference.  */
388 
389 void
390 gimplify_and_add (tree t, gimple_seq *seq_p)
391 {
392   gimplify_stmt (&t, seq_p);
393 }
394 
395 /* Gimplify statement T into sequence *SEQ_P, and return the first
396    tuple in the sequence of generated tuples for this statement.
397    Return NULL if gimplifying T produced no tuples.  */
398 
399 static gimple
400 gimplify_and_return_first (tree t, gimple_seq *seq_p)
401 {
402   gimple_stmt_iterator last = gsi_last (*seq_p);
403 
404   gimplify_and_add (t, seq_p);
405 
406   if (!gsi_end_p (last))
407     {
408       gsi_next (&last);
409       return gsi_stmt (last);
410     }
411   else
412     return gimple_seq_first_stmt (*seq_p);
413 }
414 
415 /* Strip off a legitimate source ending from the input string NAME of
416    length LEN.  Rather than having to know the names used by all of
417    our front ends, we strip off an ending of a period followed by
418    up to five characters.  (Java uses ".class".)  */
419 
420 static inline void
421 remove_suffix (char *name, int len)
422 {
423   int i;
424 
425   for (i = 2;  i < 8 && len > i;  i++)
426     {
427       if (name[len - i] == '.')
428 	{
429 	  name[len - i] = '\0';
430 	  break;
431 	}
432     }
433 }
434 
435 /* Create a new temporary name with PREFIX.  Returns an identifier.  */
436 
437 static GTY(()) unsigned int tmp_var_id_num;
438 
439 tree
440 create_tmp_var_name (const char *prefix)
441 {
442   char *tmp_name;
443 
444   if (prefix)
445     {
446       char *preftmp = ASTRDUP (prefix);
447 
448       remove_suffix (preftmp, strlen (preftmp));
449       prefix = preftmp;
450     }
451 
452   ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
453   return get_identifier (tmp_name);
454 }
455 
456 
457 /* Create a new temporary variable declaration of type TYPE.
458    Does NOT push it into the current binding.  */
459 
460 tree
461 create_tmp_var_raw (tree type, const char *prefix)
462 {
463   tree tmp_var;
464   tree new_type;
465 
466   /* Make the type of the variable writable.  */
467   new_type = build_type_variant (type, 0, 0);
468   TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
469 
470   tmp_var = build_decl (input_location,
471 			VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
472 			type);
473 
474   /* The variable was declared by the compiler.  */
475   DECL_ARTIFICIAL (tmp_var) = 1;
476   /* And we don't want debug info for it.  */
477   DECL_IGNORED_P (tmp_var) = 1;
478 
479   /* Make the variable writable.  */
480   TREE_READONLY (tmp_var) = 0;
481 
482   DECL_EXTERNAL (tmp_var) = 0;
483   TREE_STATIC (tmp_var) = 0;
484   TREE_USED (tmp_var) = 1;
485 
486   return tmp_var;
487 }
488 
489 /* Create a new temporary variable declaration of type TYPE.  DOES push the
490    variable into the current binding.  Further, assume that this is called
491    only from gimplification or optimization, at which point the creation of
492    certain types are bugs.  */
493 
494 tree
495 create_tmp_var (tree type, const char *prefix)
496 {
497   tree tmp_var;
498 
499   /* We don't allow types that are addressable (meaning we can't make copies),
500      or incomplete.  We also used to reject every variable size objects here,
501      but now support those for which a constant upper bound can be obtained.
502      The processing for variable sizes is performed in gimple_add_tmp_var,
503      point at which it really matters and possibly reached via paths not going
504      through this function, e.g. after direct calls to create_tmp_var_raw.  */
505   gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
506 
507   tmp_var = create_tmp_var_raw (type, prefix);
508   gimple_add_tmp_var (tmp_var);
509   return tmp_var;
510 }
511 
512 /* Create a temporary with a name derived from VAL.  Subroutine of
513    lookup_tmp_var; nobody else should call this function.  */
514 
515 static inline tree
516 create_tmp_from_val (tree val)
517 {
518   return create_tmp_var (TREE_TYPE (val), get_name (val));
519 }
520 
521 /* Create a temporary to hold the value of VAL.  If IS_FORMAL, try to reuse
522    an existing expression temporary.  */
523 
524 static tree
525 lookup_tmp_var (tree val, bool is_formal)
526 {
527   tree ret;
528 
529   /* If not optimizing, never really reuse a temporary.  local-alloc
530      won't allocate any variable that is used in more than one basic
531      block, which means it will go into memory, causing much extra
532      work in reload and final and poorer code generation, outweighing
533      the extra memory allocation here.  */
534   if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
535     ret = create_tmp_from_val (val);
536   else
537     {
538       elt_t elt, *elt_p;
539       void **slot;
540 
541       elt.val = val;
542       if (gimplify_ctxp->temp_htab == NULL)
543         gimplify_ctxp->temp_htab
544 	  = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
545       slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
546       if (*slot == NULL)
547 	{
548 	  elt_p = XNEW (elt_t);
549 	  elt_p->val = val;
550 	  elt_p->temp = ret = create_tmp_from_val (val);
551 	  *slot = (void *) elt_p;
552 	}
553       else
554 	{
555 	  elt_p = (elt_t *) *slot;
556           ret = elt_p->temp;
557 	}
558     }
559 
560   return ret;
561 }
562 
563 
564 /* Return true if T is a CALL_EXPR or an expression that can be
565    assignmed to a temporary.  Note that this predicate should only be
566    used during gimplification.  See the rationale for this in
567    gimplify_modify_expr.  */
568 
569 static bool
570 is_gimple_reg_rhs_or_call (tree t)
571 {
572   return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
573 	  || TREE_CODE (t) == CALL_EXPR);
574 }
575 
576 /* Return true if T is a valid memory RHS or a CALL_EXPR.  Note that
577    this predicate should only be used during gimplification.  See the
578    rationale for this in gimplify_modify_expr.  */
579 
580 static bool
581 is_gimple_mem_rhs_or_call (tree t)
582 {
583   /* If we're dealing with a renamable type, either source or dest must be
584      a renamed variable.  */
585   if (is_gimple_reg_type (TREE_TYPE (t)))
586     return is_gimple_val (t);
587   else
588     return (is_gimple_val (t) || is_gimple_lvalue (t)
589 	    || TREE_CODE (t) == CALL_EXPR);
590 }
591 
592 /* Helper for get_formal_tmp_var and get_initialized_tmp_var.  */
593 
594 static tree
595 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
596                       bool is_formal)
597 {
598   tree t, mod;
599 
600   /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
601      can create an INIT_EXPR and convert it into a GIMPLE_CALL below.  */
602   gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
603 		 fb_rvalue);
604 
605   t = lookup_tmp_var (val, is_formal);
606 
607   if (is_formal
608       && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
609 	  || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE))
610     DECL_GIMPLE_REG_P (t) = 1;
611 
612   mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
613 
614   if (EXPR_HAS_LOCATION (val))
615     SET_EXPR_LOCATION (mod, EXPR_LOCATION (val));
616   else
617     SET_EXPR_LOCATION (mod, input_location);
618 
619   /* gimplify_modify_expr might want to reduce this further.  */
620   gimplify_and_add (mod, pre_p);
621   ggc_free (mod);
622 
623   /* If we're gimplifying into ssa, gimplify_modify_expr will have
624      given our temporary an SSA name.  Find and return it.  */
625   if (gimplify_ctxp->into_ssa)
626     {
627       gimple last = gimple_seq_last_stmt (*pre_p);
628       t = gimple_get_lhs (last);
629     }
630 
631   return t;
632 }
633 
634 /* Returns a formal temporary variable initialized with VAL.  PRE_P is as
635    in gimplify_expr.  Only use this function if:
636 
637    1) The value of the unfactored expression represented by VAL will not
638       change between the initialization and use of the temporary, and
639    2) The temporary will not be otherwise modified.
640 
641    For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
642    and #2 means it is inappropriate for && temps.
643 
644    For other cases, use get_initialized_tmp_var instead.  */
645 
646 tree
647 get_formal_tmp_var (tree val, gimple_seq *pre_p)
648 {
649   return internal_get_tmp_var (val, pre_p, NULL, true);
650 }
651 
652 /* Returns a temporary variable initialized with VAL.  PRE_P and POST_P
653    are as in gimplify_expr.  */
654 
655 tree
656 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
657 {
658   return internal_get_tmp_var (val, pre_p, post_p, false);
659 }
660 
661 /* Declares all the variables in VARS in SCOPE.  If DEBUG_INFO is
662    true, generate debug info for them; otherwise don't.  */
663 
664 void
665 declare_vars (tree vars, gimple scope, bool debug_info)
666 {
667   tree last = vars;
668   if (last)
669     {
670       tree temps, block;
671 
672       gcc_assert (gimple_code (scope) == GIMPLE_BIND);
673 
674       temps = nreverse (last);
675 
676       block = gimple_bind_block (scope);
677       gcc_assert (!block || TREE_CODE (block) == BLOCK);
678       if (!block || !debug_info)
679 	{
680 	  TREE_CHAIN (last) = gimple_bind_vars (scope);
681 	  gimple_bind_set_vars (scope, temps);
682 	}
683       else
684 	{
685 	  /* We need to attach the nodes both to the BIND_EXPR and to its
686 	     associated BLOCK for debugging purposes.  The key point here
687 	     is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
688 	     is a subchain of the BIND_EXPR_VARS of the BIND_EXPR.  */
689 	  if (BLOCK_VARS (block))
690 	    BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
691 	  else
692 	    {
693 	      gimple_bind_set_vars (scope,
694 	      			    chainon (gimple_bind_vars (scope), temps));
695 	      BLOCK_VARS (block) = temps;
696 	    }
697 	}
698     }
699 }
700 
701 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
702    for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly.  Abort if
703    no such upper bound can be obtained.  */
704 
705 static void
706 force_constant_size (tree var)
707 {
708   /* The only attempt we make is by querying the maximum size of objects
709      of the variable's type.  */
710 
711   HOST_WIDE_INT max_size;
712 
713   gcc_assert (TREE_CODE (var) == VAR_DECL);
714 
715   max_size = max_int_size_in_bytes (TREE_TYPE (var));
716 
717   gcc_assert (max_size >= 0);
718 
719   DECL_SIZE_UNIT (var)
720     = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
721   DECL_SIZE (var)
722     = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
723 }
724 
725 void
726 gimple_add_tmp_var (tree tmp)
727 {
728   gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
729 
730   /* Later processing assumes that the object size is constant, which might
731      not be true at this point.  Force the use of a constant upper bound in
732      this case.  */
733   if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
734     force_constant_size (tmp);
735 
736   DECL_CONTEXT (tmp) = current_function_decl;
737   DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
738 
739   if (gimplify_ctxp)
740     {
741       TREE_CHAIN (tmp) = gimplify_ctxp->temps;
742       gimplify_ctxp->temps = tmp;
743 
744       /* Mark temporaries local within the nearest enclosing parallel.  */
745       if (gimplify_omp_ctxp)
746 	{
747 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
748 	  while (ctx && ctx->region_type == ORT_WORKSHARE)
749 	    ctx = ctx->outer_context;
750 	  if (ctx)
751 	    omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
752 	}
753     }
754   else if (cfun)
755     record_vars (tmp);
756   else
757     {
758       gimple_seq body_seq;
759 
760       /* This case is for nested functions.  We need to expose the locals
761 	 they create.  */
762       body_seq = gimple_body (current_function_decl);
763       declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
764     }
765 }
766 
767 /* Determines whether to assign a location to the statement GS.  */
768 
769 static bool
770 should_carry_location_p (gimple gs)
771 {
772   /* Don't emit a line note for a label.  We particularly don't want to
773      emit one for the break label, since it doesn't actually correspond
774      to the beginning of the loop/switch.  */
775   if (gimple_code (gs) == GIMPLE_LABEL)
776     return false;
777 
778   return true;
779 }
780 
781 
782 /* Return true if a location should not be emitted for this statement
783    by annotate_one_with_location.  */
784 
785 static inline bool
786 gimple_do_not_emit_location_p (gimple g)
787 {
788   return gimple_plf (g, GF_PLF_1);
789 }
790 
791 /* Mark statement G so a location will not be emitted by
792    annotate_one_with_location.  */
793 
794 static inline void
795 gimple_set_do_not_emit_location (gimple g)
796 {
797   /* The PLF flags are initialized to 0 when a new tuple is created,
798      so no need to initialize it anywhere.  */
799   gimple_set_plf (g, GF_PLF_1, true);
800 }
801 
802 /* Set the location for gimple statement GS to LOCATION.  */
803 
804 static void
805 annotate_one_with_location (gimple gs, location_t location)
806 {
807   if (!gimple_has_location (gs)
808       && !gimple_do_not_emit_location_p (gs)
809       && should_carry_location_p (gs))
810     gimple_set_location (gs, location);
811 }
812 
813 
814 /* Set LOCATION for all the statements after iterator GSI in sequence
815    SEQ.  If GSI is pointing to the end of the sequence, start with the
816    first statement in SEQ.  */
817 
818 static void
819 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
820 				  location_t location)
821 {
822   if (gsi_end_p (gsi))
823     gsi = gsi_start (seq);
824   else
825     gsi_next (&gsi);
826 
827   for (; !gsi_end_p (gsi); gsi_next (&gsi))
828     annotate_one_with_location (gsi_stmt (gsi), location);
829 }
830 
831 
832 /* Set the location for all the statements in a sequence STMT_P to LOCATION.  */
833 
834 void
835 annotate_all_with_location (gimple_seq stmt_p, location_t location)
836 {
837   gimple_stmt_iterator i;
838 
839   if (gimple_seq_empty_p (stmt_p))
840     return;
841 
842   for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
843     {
844       gimple gs = gsi_stmt (i);
845       annotate_one_with_location (gs, location);
846     }
847 }
848 
849 
850 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
851    These nodes model computations that should only be done once.  If we
852    were to unshare something like SAVE_EXPR(i++), the gimplification
853    process would create wrong code.  */
854 
855 static tree
856 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
857 {
858   enum tree_code code = TREE_CODE (*tp);
859   /* Don't unshare types, decls, constants and SAVE_EXPR nodes.  */
860   if (TREE_CODE_CLASS (code) == tcc_type
861       || TREE_CODE_CLASS (code) == tcc_declaration
862       || TREE_CODE_CLASS (code) == tcc_constant
863       || code == SAVE_EXPR || code == TARGET_EXPR
864       /* We can't do anything sensible with a BLOCK used as an expression,
865 	 but we also can't just die when we see it because of non-expression
866 	 uses.  So just avert our eyes and cross our fingers.  Silly Java.  */
867       || code == BLOCK)
868     *walk_subtrees = 0;
869   else
870     {
871       gcc_assert (code != BIND_EXPR);
872       copy_tree_r (tp, walk_subtrees, data);
873     }
874 
875   return NULL_TREE;
876 }
877 
878 /* Callback for walk_tree to unshare most of the shared trees rooted at
879    *TP.  If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
880    then *TP is deep copied by calling copy_tree_r.
881 
882    This unshares the same trees as copy_tree_r with the exception of
883    SAVE_EXPR nodes.  These nodes model computations that should only be
884    done once.  If we were to unshare something like SAVE_EXPR(i++), the
885    gimplification process would create wrong code.  */
886 
887 static tree
888 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
889 		  void *data ATTRIBUTE_UNUSED)
890 {
891   tree t = *tp;
892   enum tree_code code = TREE_CODE (t);
893 
894   /* Skip types, decls, and constants.  But we do want to look at their
895      types and the bounds of types.  Mark them as visited so we properly
896      unmark their subtrees on the unmark pass.  If we've already seen them,
897      don't look down further.  */
898   if (TREE_CODE_CLASS (code) == tcc_type
899       || TREE_CODE_CLASS (code) == tcc_declaration
900       || TREE_CODE_CLASS (code) == tcc_constant)
901     {
902       if (TREE_VISITED (t))
903 	*walk_subtrees = 0;
904       else
905 	TREE_VISITED (t) = 1;
906     }
907 
908   /* If this node has been visited already, unshare it and don't look
909      any deeper.  */
910   else if (TREE_VISITED (t))
911     {
912       walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
913       *walk_subtrees = 0;
914     }
915 
916   /* Otherwise, mark the tree as visited and keep looking.  */
917   else
918     TREE_VISITED (t) = 1;
919 
920   return NULL_TREE;
921 }
922 
923 static tree
924 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
925 		  void *data ATTRIBUTE_UNUSED)
926 {
927   if (TREE_VISITED (*tp))
928     TREE_VISITED (*tp) = 0;
929   else
930     *walk_subtrees = 0;
931 
932   return NULL_TREE;
933 }
934 
935 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
936    bodies of any nested functions if we are unsharing the entire body of
937    FNDECL.  */
938 
939 static void
940 unshare_body (tree *body_p, tree fndecl)
941 {
942   struct cgraph_node *cgn = cgraph_node (fndecl);
943 
944   walk_tree (body_p, copy_if_shared_r, NULL, NULL);
945   if (body_p == &DECL_SAVED_TREE (fndecl))
946     for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
947       unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
948 }
949 
950 /* Likewise, but mark all trees as not visited.  */
951 
952 static void
953 unvisit_body (tree *body_p, tree fndecl)
954 {
955   struct cgraph_node *cgn = cgraph_node (fndecl);
956 
957   walk_tree (body_p, unmark_visited_r, NULL, NULL);
958   if (body_p == &DECL_SAVED_TREE (fndecl))
959     for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
960       unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
961 }
962 
963 /* Unconditionally make an unshared copy of EXPR.  This is used when using
964    stored expressions which span multiple functions, such as BINFO_VTABLE,
965    as the normal unsharing process can't tell that they're shared.  */
966 
967 tree
968 unshare_expr (tree expr)
969 {
970   walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
971   return expr;
972 }
973 
974 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
975    contain statements and have a value.  Assign its value to a temporary
976    and give it void_type_node.  Returns the temporary, or NULL_TREE if
977    WRAPPER was already void.  */
978 
979 tree
980 voidify_wrapper_expr (tree wrapper, tree temp)
981 {
982   tree type = TREE_TYPE (wrapper);
983   if (type && !VOID_TYPE_P (type))
984     {
985       tree *p;
986 
987       /* Set p to point to the body of the wrapper.  Loop until we find
988 	 something that isn't a wrapper.  */
989       for (p = &wrapper; p && *p; )
990 	{
991 	  switch (TREE_CODE (*p))
992 	    {
993 	    case BIND_EXPR:
994 	      TREE_SIDE_EFFECTS (*p) = 1;
995 	      TREE_TYPE (*p) = void_type_node;
996 	      /* For a BIND_EXPR, the body is operand 1.  */
997 	      p = &BIND_EXPR_BODY (*p);
998 	      break;
999 
1000 	    case CLEANUP_POINT_EXPR:
1001 	    case TRY_FINALLY_EXPR:
1002 	    case TRY_CATCH_EXPR:
1003 	      TREE_SIDE_EFFECTS (*p) = 1;
1004 	      TREE_TYPE (*p) = void_type_node;
1005 	      p = &TREE_OPERAND (*p, 0);
1006 	      break;
1007 
1008 	    case STATEMENT_LIST:
1009 	      {
1010 		tree_stmt_iterator i = tsi_last (*p);
1011 		TREE_SIDE_EFFECTS (*p) = 1;
1012 		TREE_TYPE (*p) = void_type_node;
1013 		p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1014 	      }
1015 	      break;
1016 
1017 	    case COMPOUND_EXPR:
1018 	      /* Advance to the last statement.  Set all container types to void.  */
1019 	      for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1020 		{
1021 		  TREE_SIDE_EFFECTS (*p) = 1;
1022 		  TREE_TYPE (*p) = void_type_node;
1023 		}
1024 	      break;
1025 
1026 	    default:
1027 	      goto out;
1028 	    }
1029 	}
1030 
1031     out:
1032       if (p == NULL || IS_EMPTY_STMT (*p))
1033 	temp = NULL_TREE;
1034       else if (temp)
1035 	{
1036 	  /* The wrapper is on the RHS of an assignment that we're pushing
1037 	     down.  */
1038 	  gcc_assert (TREE_CODE (temp) == INIT_EXPR
1039 		      || TREE_CODE (temp) == MODIFY_EXPR);
1040 	  TREE_OPERAND (temp, 1) = *p;
1041 	  *p = temp;
1042 	}
1043       else
1044 	{
1045 	  temp = create_tmp_var (type, "retval");
1046 	  *p = build2 (INIT_EXPR, type, temp, *p);
1047 	}
1048 
1049       return temp;
1050     }
1051 
1052   return NULL_TREE;
1053 }
1054 
1055 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1056    a temporary through which they communicate.  */
1057 
1058 static void
1059 build_stack_save_restore (gimple *save, gimple *restore)
1060 {
1061   tree tmp_var;
1062 
1063   *save = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0);
1064   tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1065   gimple_call_set_lhs (*save, tmp_var);
1066 
1067   *restore = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
1068 			    1, tmp_var);
1069 }
1070 
1071 /* Gimplify a BIND_EXPR.  Just voidify and recurse.  */
1072 
1073 static enum gimplify_status
1074 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1075 {
1076   tree bind_expr = *expr_p;
1077   bool old_save_stack = gimplify_ctxp->save_stack;
1078   tree t;
1079   gimple gimple_bind;
1080   gimple_seq body;
1081 
1082   tree temp = voidify_wrapper_expr (bind_expr, NULL);
1083 
1084   /* Mark variables seen in this bind expr.  */
1085   for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
1086     {
1087       if (TREE_CODE (t) == VAR_DECL)
1088 	{
1089 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1090 
1091 	  /* Mark variable as local.  */
1092 	  if (ctx && !is_global_var (t)
1093 	      && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1094 		  || splay_tree_lookup (ctx->variables,
1095 					(splay_tree_key) t) == NULL))
1096 	    omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1097 
1098 	  DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1099 
1100 	  if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1101 	    cfun->has_local_explicit_reg_vars = true;
1102 	}
1103 
1104       /* Preliminarily mark non-addressed complex variables as eligible
1105 	 for promotion to gimple registers.  We'll transform their uses
1106 	 as we find them.
1107 	 We exclude complex types if not optimizing because they can be
1108 	 subject to partial stores in GNU C by means of the __real__ and
1109 	 __imag__ operators and we cannot promote them to total stores
1110 	 (see gimplify_modify_expr_complex_part).  */
1111       if (optimize
1112 	  && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1113 	      || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1114 	  && !TREE_THIS_VOLATILE (t)
1115 	  && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1116 	  && !needs_to_live_in_memory (t))
1117 	DECL_GIMPLE_REG_P (t) = 1;
1118     }
1119 
1120   gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1121                                    BIND_EXPR_BLOCK (bind_expr));
1122   gimple_push_bind_expr (gimple_bind);
1123 
1124   gimplify_ctxp->save_stack = false;
1125 
1126   /* Gimplify the body into the GIMPLE_BIND tuple's body.  */
1127   body = NULL;
1128   gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1129   gimple_bind_set_body (gimple_bind, body);
1130 
1131   if (gimplify_ctxp->save_stack)
1132     {
1133       gimple stack_save, stack_restore, gs;
1134       gimple_seq cleanup, new_body;
1135 
1136       /* Save stack on entry and restore it on exit.  Add a try_finally
1137 	 block to achieve this.  Note that mudflap depends on the
1138 	 format of the emitted code: see mx_register_decls().  */
1139       build_stack_save_restore (&stack_save, &stack_restore);
1140 
1141       cleanup = new_body = NULL;
1142       gimplify_seq_add_stmt (&cleanup, stack_restore);
1143       gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1144 	  		     GIMPLE_TRY_FINALLY);
1145 
1146       gimplify_seq_add_stmt (&new_body, stack_save);
1147       gimplify_seq_add_stmt (&new_body, gs);
1148       gimple_bind_set_body (gimple_bind, new_body);
1149     }
1150 
1151   gimplify_ctxp->save_stack = old_save_stack;
1152   gimple_pop_bind_expr ();
1153 
1154   gimplify_seq_add_stmt (pre_p, gimple_bind);
1155 
1156   if (temp)
1157     {
1158       *expr_p = temp;
1159       return GS_OK;
1160     }
1161 
1162   *expr_p = NULL_TREE;
1163   return GS_ALL_DONE;
1164 }
1165 
1166 /* Gimplify a RETURN_EXPR.  If the expression to be returned is not a
1167    GIMPLE value, it is assigned to a new temporary and the statement is
1168    re-written to return the temporary.
1169 
1170    PRE_P points to the sequence where side effects that must happen before
1171    STMT should be stored.  */
1172 
1173 static enum gimplify_status
1174 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1175 {
1176   gimple ret;
1177   tree ret_expr = TREE_OPERAND (stmt, 0);
1178   tree result_decl, result;
1179 
1180   if (ret_expr == error_mark_node)
1181     return GS_ERROR;
1182 
1183   if (!ret_expr
1184       || TREE_CODE (ret_expr) == RESULT_DECL
1185       || ret_expr == error_mark_node)
1186     {
1187       gimple ret = gimple_build_return (ret_expr);
1188       gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1189       gimplify_seq_add_stmt (pre_p, ret);
1190       return GS_ALL_DONE;
1191     }
1192 
1193   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1194     result_decl = NULL_TREE;
1195   else
1196     {
1197       result_decl = TREE_OPERAND (ret_expr, 0);
1198 
1199       /* See through a return by reference.  */
1200       if (TREE_CODE (result_decl) == INDIRECT_REF)
1201 	result_decl = TREE_OPERAND (result_decl, 0);
1202 
1203       gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1204 		   || TREE_CODE (ret_expr) == INIT_EXPR)
1205 		  && TREE_CODE (result_decl) == RESULT_DECL);
1206     }
1207 
1208   /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1209      Recall that aggregate_value_p is FALSE for any aggregate type that is
1210      returned in registers.  If we're returning values in registers, then
1211      we don't want to extend the lifetime of the RESULT_DECL, particularly
1212      across another call.  In addition, for those aggregates for which
1213      hard_function_value generates a PARALLEL, we'll die during normal
1214      expansion of structure assignments; there's special code in expand_return
1215      to handle this case that does not exist in expand_expr.  */
1216   if (!result_decl
1217       || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1218     result = result_decl;
1219   else if (gimplify_ctxp->return_temp)
1220     result = gimplify_ctxp->return_temp;
1221   else
1222     {
1223       result = create_tmp_var (TREE_TYPE (result_decl), NULL);
1224       if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1225           || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1226         DECL_GIMPLE_REG_P (result) = 1;
1227 
1228       /* ??? With complex control flow (usually involving abnormal edges),
1229 	 we can wind up warning about an uninitialized value for this.  Due
1230 	 to how this variable is constructed and initialized, this is never
1231 	 true.  Give up and never warn.  */
1232       TREE_NO_WARNING (result) = 1;
1233 
1234       gimplify_ctxp->return_temp = result;
1235     }
1236 
1237   /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1238      Then gimplify the whole thing.  */
1239   if (result != result_decl)
1240     TREE_OPERAND (ret_expr, 0) = result;
1241 
1242   gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1243 
1244   ret = gimple_build_return (result);
1245   gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1246   gimplify_seq_add_stmt (pre_p, ret);
1247 
1248   return GS_ALL_DONE;
1249 }
1250 
1251 static void
1252 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1253 {
1254   /* This is a variable-sized decl.  Simplify its size and mark it
1255      for deferred expansion.  Note that mudflap depends on the format
1256      of the emitted code: see mx_register_decls().  */
1257   tree t, addr, ptr_type;
1258 
1259   gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1260   gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1261 
1262   /* All occurrences of this decl in final gimplified code will be
1263      replaced by indirection.  Setting DECL_VALUE_EXPR does two
1264      things: First, it lets the rest of the gimplifier know what
1265      replacement to use.  Second, it lets the debug info know
1266      where to find the value.  */
1267   ptr_type = build_pointer_type (TREE_TYPE (decl));
1268   addr = create_tmp_var (ptr_type, get_name (decl));
1269   DECL_IGNORED_P (addr) = 0;
1270   t = build_fold_indirect_ref (addr);
1271   SET_DECL_VALUE_EXPR (decl, t);
1272   DECL_HAS_VALUE_EXPR_P (decl) = 1;
1273 
1274   t = built_in_decls[BUILT_IN_ALLOCA];
1275   t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl));
1276   t = fold_convert (ptr_type, t);
1277   t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1278 
1279   gimplify_and_add (t, seq_p);
1280 
1281   /* Indicate that we need to restore the stack level when the
1282      enclosing BIND_EXPR is exited.  */
1283   gimplify_ctxp->save_stack = true;
1284 }
1285 
1286 
1287 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
1288    and initialization explicit.  */
1289 
1290 static enum gimplify_status
1291 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1292 {
1293   tree stmt = *stmt_p;
1294   tree decl = DECL_EXPR_DECL (stmt);
1295 
1296   *stmt_p = NULL_TREE;
1297 
1298   if (TREE_TYPE (decl) == error_mark_node)
1299     return GS_ERROR;
1300 
1301   if ((TREE_CODE (decl) == TYPE_DECL
1302        || TREE_CODE (decl) == VAR_DECL)
1303       && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1304     gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1305 
1306   if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1307     {
1308       tree init = DECL_INITIAL (decl);
1309 
1310       if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1311 	  || (!TREE_STATIC (decl)
1312 	      && flag_stack_check == GENERIC_STACK_CHECK
1313 	      && compare_tree_int (DECL_SIZE_UNIT (decl),
1314 				   STACK_CHECK_MAX_VAR_SIZE) > 0))
1315 	gimplify_vla_decl (decl, seq_p);
1316 
1317       if (init && init != error_mark_node)
1318 	{
1319 	  if (!TREE_STATIC (decl))
1320 	    {
1321 	      DECL_INITIAL (decl) = NULL_TREE;
1322 	      init = build2 (INIT_EXPR, void_type_node, decl, init);
1323 	      gimplify_and_add (init, seq_p);
1324 	      ggc_free (init);
1325 	    }
1326 	  else
1327 	    /* We must still examine initializers for static variables
1328 	       as they may contain a label address.  */
1329 	    walk_tree (&init, force_labels_r, NULL, NULL);
1330 	}
1331 
1332       /* Some front ends do not explicitly declare all anonymous
1333 	 artificial variables.  We compensate here by declaring the
1334 	 variables, though it would be better if the front ends would
1335 	 explicitly declare them.  */
1336       if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1337 	  && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1338 	gimple_add_tmp_var (decl);
1339     }
1340 
1341   return GS_ALL_DONE;
1342 }
1343 
1344 /* Gimplify a LOOP_EXPR.  Normally this just involves gimplifying the body
1345    and replacing the LOOP_EXPR with goto, but if the loop contains an
1346    EXIT_EXPR, we need to append a label for it to jump to.  */
1347 
1348 static enum gimplify_status
1349 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1350 {
1351   tree saved_label = gimplify_ctxp->exit_label;
1352   tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1353 
1354   gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1355 
1356   gimplify_ctxp->exit_label = NULL_TREE;
1357 
1358   gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1359 
1360   gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1361 
1362   if (gimplify_ctxp->exit_label)
1363     gimplify_seq_add_stmt (pre_p, gimple_build_label (gimplify_ctxp->exit_label));
1364 
1365   gimplify_ctxp->exit_label = saved_label;
1366 
1367   *expr_p = NULL;
1368   return GS_ALL_DONE;
1369 }
1370 
1371 /* Gimplifies a statement list onto a sequence.  These may be created either
1372    by an enlightened front-end, or by shortcut_cond_expr.  */
1373 
1374 static enum gimplify_status
1375 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1376 {
1377   tree temp = voidify_wrapper_expr (*expr_p, NULL);
1378 
1379   tree_stmt_iterator i = tsi_start (*expr_p);
1380 
1381   while (!tsi_end_p (i))
1382     {
1383       gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1384       tsi_delink (&i);
1385     }
1386 
1387   if (temp)
1388     {
1389       *expr_p = temp;
1390       return GS_OK;
1391     }
1392 
1393   return GS_ALL_DONE;
1394 }
1395 
1396 /* Compare two case labels.  Because the front end should already have
1397    made sure that case ranges do not overlap, it is enough to only compare
1398    the CASE_LOW values of each case label.  */
1399 
1400 static int
1401 compare_case_labels (const void *p1, const void *p2)
1402 {
1403   const_tree const case1 = *(const_tree const*)p1;
1404   const_tree const case2 = *(const_tree const*)p2;
1405 
1406   /* The 'default' case label always goes first.  */
1407   if (!CASE_LOW (case1))
1408     return -1;
1409   else if (!CASE_LOW (case2))
1410     return 1;
1411   else
1412     return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1413 }
1414 
1415 
1416 /* Sort the case labels in LABEL_VEC in place in ascending order.  */
1417 
1418 void
1419 sort_case_labels (VEC(tree,heap)* label_vec)
1420 {
1421   size_t len = VEC_length (tree, label_vec);
1422   qsort (VEC_address (tree, label_vec), len, sizeof (tree),
1423          compare_case_labels);
1424 }
1425 
1426 
1427 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1428    branch to.  */
1429 
1430 static enum gimplify_status
1431 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1432 {
1433   tree switch_expr = *expr_p;
1434   gimple_seq switch_body_seq = NULL;
1435   enum gimplify_status ret;
1436 
1437   ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1438                        fb_rvalue);
1439   if (ret == GS_ERROR || ret == GS_UNHANDLED)
1440     return ret;
1441 
1442   if (SWITCH_BODY (switch_expr))
1443     {
1444       VEC (tree,heap) *labels;
1445       VEC (tree,heap) *saved_labels;
1446       tree default_case = NULL_TREE;
1447       size_t i, len;
1448       gimple gimple_switch;
1449 
1450       /* If someone can be bothered to fill in the labels, they can
1451 	 be bothered to null out the body too.  */
1452       gcc_assert (!SWITCH_LABELS (switch_expr));
1453 
1454       /* save old labels, get new ones from body, then restore the old
1455          labels.  Save all the things from the switch body to append after.  */
1456       saved_labels = gimplify_ctxp->case_labels;
1457       gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1458 
1459       gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1460       labels = gimplify_ctxp->case_labels;
1461       gimplify_ctxp->case_labels = saved_labels;
1462 
1463       i = 0;
1464       while (i < VEC_length (tree, labels))
1465 	{
1466 	  tree elt = VEC_index (tree, labels, i);
1467 	  tree low = CASE_LOW (elt);
1468 	  bool remove_element = FALSE;
1469 
1470 	  if (low)
1471 	    {
1472 	      /* Discard empty ranges.  */
1473 	      tree high = CASE_HIGH (elt);
1474 	      if (high && tree_int_cst_lt (high, low))
1475 	        remove_element = TRUE;
1476 	    }
1477 	  else
1478 	    {
1479 	      /* The default case must be the last label in the list.  */
1480 	      gcc_assert (!default_case);
1481 	      default_case = elt;
1482 	      remove_element = TRUE;
1483 	    }
1484 
1485 	  if (remove_element)
1486 	    VEC_ordered_remove (tree, labels, i);
1487 	  else
1488 	    i++;
1489 	}
1490       len = i;
1491 
1492       if (!VEC_empty (tree, labels))
1493 	sort_case_labels (labels);
1494 
1495       if (!default_case)
1496 	{
1497 	  tree type = TREE_TYPE (switch_expr);
1498 
1499 	  /* If the switch has no default label, add one, so that we jump
1500 	     around the switch body.  If the labels already cover the whole
1501 	     range of type, add the default label pointing to one of the
1502 	     existing labels.  */
1503 	  if (type == void_type_node)
1504 	    type = TREE_TYPE (SWITCH_COND (switch_expr));
1505 	  if (len
1506 	      && INTEGRAL_TYPE_P (type)
1507 	      && TYPE_MIN_VALUE (type)
1508 	      && TYPE_MAX_VALUE (type)
1509 	      && tree_int_cst_equal (CASE_LOW (VEC_index (tree, labels, 0)),
1510 				     TYPE_MIN_VALUE (type)))
1511 	    {
1512 	      tree low, high = CASE_HIGH (VEC_index (tree, labels, len - 1));
1513 	      if (!high)
1514 		high = CASE_LOW (VEC_index (tree, labels, len - 1));
1515 	      if (tree_int_cst_equal (high, TYPE_MAX_VALUE (type)))
1516 		{
1517 		  for (i = 1; i < len; i++)
1518 		    {
1519 		      high = CASE_LOW (VEC_index (tree, labels, i));
1520 		      low = CASE_HIGH (VEC_index (tree, labels, i - 1));
1521 		      if (!low)
1522 			low = CASE_LOW (VEC_index (tree, labels, i - 1));
1523 		      if ((TREE_INT_CST_LOW (low) + 1
1524 			   != TREE_INT_CST_LOW (high))
1525 			  || (TREE_INT_CST_HIGH (low)
1526 			      + (TREE_INT_CST_LOW (high) == 0)
1527 			      != TREE_INT_CST_HIGH (high)))
1528 			break;
1529 		    }
1530 		  if (i == len)
1531 		    default_case = build3 (CASE_LABEL_EXPR, void_type_node,
1532 					   NULL_TREE, NULL_TREE,
1533 					   CASE_LABEL (VEC_index (tree,
1534 								  labels, 0)));
1535 		}
1536 	    }
1537 
1538 	  if (!default_case)
1539 	    {
1540 	      gimple new_default;
1541 
1542 	      default_case
1543 		= build3 (CASE_LABEL_EXPR, void_type_node,
1544 			  NULL_TREE, NULL_TREE,
1545 			  create_artificial_label (UNKNOWN_LOCATION));
1546 	      new_default = gimple_build_label (CASE_LABEL (default_case));
1547 	      gimplify_seq_add_stmt (&switch_body_seq, new_default);
1548 	    }
1549 	}
1550 
1551       gimple_switch = gimple_build_switch_vec (SWITCH_COND (switch_expr),
1552                                                default_case, labels);
1553       gimplify_seq_add_stmt (pre_p, gimple_switch);
1554       gimplify_seq_add_seq (pre_p, switch_body_seq);
1555       VEC_free(tree, heap, labels);
1556     }
1557   else
1558     gcc_assert (SWITCH_LABELS (switch_expr));
1559 
1560   return GS_ALL_DONE;
1561 }
1562 
1563 
1564 static enum gimplify_status
1565 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1566 {
1567   struct gimplify_ctx *ctxp;
1568   gimple gimple_label;
1569 
1570   /* Invalid OpenMP programs can play Duff's Device type games with
1571      #pragma omp parallel.  At least in the C front end, we don't
1572      detect such invalid branches until after gimplification.  */
1573   for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1574     if (ctxp->case_labels)
1575       break;
1576 
1577   gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1578   VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p);
1579   gimplify_seq_add_stmt (pre_p, gimple_label);
1580 
1581   return GS_ALL_DONE;
1582 }
1583 
1584 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1585    if necessary.  */
1586 
1587 tree
1588 build_and_jump (tree *label_p)
1589 {
1590   if (label_p == NULL)
1591     /* If there's nowhere to jump, just fall through.  */
1592     return NULL_TREE;
1593 
1594   if (*label_p == NULL_TREE)
1595     {
1596       tree label = create_artificial_label (UNKNOWN_LOCATION);
1597       *label_p = label;
1598     }
1599 
1600   return build1 (GOTO_EXPR, void_type_node, *label_p);
1601 }
1602 
1603 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1604    This also involves building a label to jump to and communicating it to
1605    gimplify_loop_expr through gimplify_ctxp->exit_label.  */
1606 
1607 static enum gimplify_status
1608 gimplify_exit_expr (tree *expr_p)
1609 {
1610   tree cond = TREE_OPERAND (*expr_p, 0);
1611   tree expr;
1612 
1613   expr = build_and_jump (&gimplify_ctxp->exit_label);
1614   expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1615   *expr_p = expr;
1616 
1617   return GS_OK;
1618 }
1619 
1620 /* A helper function to be called via walk_tree.  Mark all labels under *TP
1621    as being forced.  To be called for DECL_INITIAL of static variables.  */
1622 
1623 tree
1624 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1625 {
1626   if (TYPE_P (*tp))
1627     *walk_subtrees = 0;
1628   if (TREE_CODE (*tp) == LABEL_DECL)
1629     FORCED_LABEL (*tp) = 1;
1630 
1631   return NULL_TREE;
1632 }
1633 
1634 /* *EXPR_P is a COMPONENT_REF being used as an rvalue.  If its type is
1635    different from its canonical type, wrap the whole thing inside a
1636    NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1637    type.
1638 
1639    The canonical type of a COMPONENT_REF is the type of the field being
1640    referenced--unless the field is a bit-field which can be read directly
1641    in a smaller mode, in which case the canonical type is the
1642    sign-appropriate type corresponding to that mode.  */
1643 
1644 static void
1645 canonicalize_component_ref (tree *expr_p)
1646 {
1647   tree expr = *expr_p;
1648   tree type;
1649 
1650   gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1651 
1652   if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1653     type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1654   else
1655     type = TREE_TYPE (TREE_OPERAND (expr, 1));
1656 
1657   /* One could argue that all the stuff below is not necessary for
1658      the non-bitfield case and declare it a FE error if type
1659      adjustment would be needed.  */
1660   if (TREE_TYPE (expr) != type)
1661     {
1662 #ifdef ENABLE_TYPES_CHECKING
1663       tree old_type = TREE_TYPE (expr);
1664 #endif
1665       int type_quals;
1666 
1667       /* We need to preserve qualifiers and propagate them from
1668 	 operand 0.  */
1669       type_quals = TYPE_QUALS (type)
1670 	| TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1671       if (TYPE_QUALS (type) != type_quals)
1672 	type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1673 
1674       /* Set the type of the COMPONENT_REF to the underlying type.  */
1675       TREE_TYPE (expr) = type;
1676 
1677 #ifdef ENABLE_TYPES_CHECKING
1678       /* It is now a FE error, if the conversion from the canonical
1679 	 type to the original expression type is not useless.  */
1680       gcc_assert (useless_type_conversion_p (old_type, type));
1681 #endif
1682     }
1683 }
1684 
1685 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1686    to foo, embed that change in the ADDR_EXPR by converting
1687       T array[U];
1688       (T *)&array
1689    ==>
1690       &array[L]
1691    where L is the lower bound.  For simplicity, only do this for constant
1692    lower bound.
1693    The constraint is that the type of &array[L] is trivially convertible
1694    to T *.  */
1695 
1696 static void
1697 canonicalize_addr_expr (tree *expr_p)
1698 {
1699   tree expr = *expr_p;
1700   tree addr_expr = TREE_OPERAND (expr, 0);
1701   tree datype, ddatype, pddatype;
1702 
1703   /* We simplify only conversions from an ADDR_EXPR to a pointer type.  */
1704   if (!POINTER_TYPE_P (TREE_TYPE (expr))
1705       || TREE_CODE (addr_expr) != ADDR_EXPR)
1706     return;
1707 
1708   /* The addr_expr type should be a pointer to an array.  */
1709   datype = TREE_TYPE (TREE_TYPE (addr_expr));
1710   if (TREE_CODE (datype) != ARRAY_TYPE)
1711     return;
1712 
1713   /* The pointer to element type shall be trivially convertible to
1714      the expression pointer type.  */
1715   ddatype = TREE_TYPE (datype);
1716   pddatype = build_pointer_type (ddatype);
1717   if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1718 				  pddatype))
1719     return;
1720 
1721   /* The lower bound and element sizes must be constant.  */
1722   if (!TYPE_SIZE_UNIT (ddatype)
1723       || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1724       || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1725       || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1726     return;
1727 
1728   /* All checks succeeded.  Build a new node to merge the cast.  */
1729   *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1730 		    TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1731 		    NULL_TREE, NULL_TREE);
1732   *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1733 
1734   /* We can have stripped a required restrict qualifier above.  */
1735   if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1736     *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1737 }
1738 
1739 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR.  Remove it and/or other conversions
1740    underneath as appropriate.  */
1741 
1742 static enum gimplify_status
1743 gimplify_conversion (tree *expr_p)
1744 {
1745   tree tem;
1746   location_t loc = EXPR_LOCATION (*expr_p);
1747   gcc_assert (CONVERT_EXPR_P (*expr_p));
1748 
1749   /* Then strip away all but the outermost conversion.  */
1750   STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1751 
1752   /* And remove the outermost conversion if it's useless.  */
1753   if (tree_ssa_useless_type_conversion (*expr_p))
1754     *expr_p = TREE_OPERAND (*expr_p, 0);
1755 
1756   /* Attempt to avoid NOP_EXPR by producing reference to a subtype.
1757      For example this fold (subclass *)&A into &A->subclass avoiding
1758      a need for statement.  */
1759   if (CONVERT_EXPR_P (*expr_p)
1760       && POINTER_TYPE_P (TREE_TYPE (*expr_p))
1761       && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 0)))
1762       && (tem = maybe_fold_offset_to_address
1763 	  (EXPR_LOCATION (*expr_p), TREE_OPERAND (*expr_p, 0),
1764 	   integer_zero_node, TREE_TYPE (*expr_p))) != NULL_TREE)
1765     *expr_p = tem;
1766 
1767   /* If we still have a conversion at the toplevel,
1768      then canonicalize some constructs.  */
1769   if (CONVERT_EXPR_P (*expr_p))
1770     {
1771       tree sub = TREE_OPERAND (*expr_p, 0);
1772 
1773       /* If a NOP conversion is changing the type of a COMPONENT_REF
1774 	 expression, then canonicalize its type now in order to expose more
1775 	 redundant conversions.  */
1776       if (TREE_CODE (sub) == COMPONENT_REF)
1777 	canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1778 
1779       /* If a NOP conversion is changing a pointer to array of foo
1780 	 to a pointer to foo, embed that change in the ADDR_EXPR.  */
1781       else if (TREE_CODE (sub) == ADDR_EXPR)
1782 	canonicalize_addr_expr (expr_p);
1783     }
1784 
1785   /* If we have a conversion to a non-register type force the
1786      use of a VIEW_CONVERT_EXPR instead.  */
1787   if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1788     *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1789 			       TREE_OPERAND (*expr_p, 0));
1790 
1791   return GS_OK;
1792 }
1793 
1794 /* Nonlocal VLAs seen in the current function.  */
1795 static struct pointer_set_t *nonlocal_vlas;
1796 
1797 /* Gimplify a VAR_DECL or PARM_DECL.  Returns GS_OK if we expanded a
1798    DECL_VALUE_EXPR, and it's worth re-examining things.  */
1799 
1800 static enum gimplify_status
1801 gimplify_var_or_parm_decl (tree *expr_p)
1802 {
1803   tree decl = *expr_p;
1804 
1805   /* ??? If this is a local variable, and it has not been seen in any
1806      outer BIND_EXPR, then it's probably the result of a duplicate
1807      declaration, for which we've already issued an error.  It would
1808      be really nice if the front end wouldn't leak these at all.
1809      Currently the only known culprit is C++ destructors, as seen
1810      in g++.old-deja/g++.jason/binding.C.  */
1811   if (TREE_CODE (decl) == VAR_DECL
1812       && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1813       && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1814       && decl_function_context (decl) == current_function_decl)
1815     {
1816       gcc_assert (errorcount || sorrycount);
1817       return GS_ERROR;
1818     }
1819 
1820   /* When within an OpenMP context, notice uses of variables.  */
1821   if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1822     return GS_ALL_DONE;
1823 
1824   /* If the decl is an alias for another expression, substitute it now.  */
1825   if (DECL_HAS_VALUE_EXPR_P (decl))
1826     {
1827       tree value_expr = DECL_VALUE_EXPR (decl);
1828 
1829       /* For referenced nonlocal VLAs add a decl for debugging purposes
1830 	 to the current function.  */
1831       if (TREE_CODE (decl) == VAR_DECL
1832 	  && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1833 	  && nonlocal_vlas != NULL
1834 	  && TREE_CODE (value_expr) == INDIRECT_REF
1835 	  && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1836 	  && decl_function_context (decl) != current_function_decl)
1837 	{
1838 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1839 	  while (ctx && ctx->region_type == ORT_WORKSHARE)
1840 	    ctx = ctx->outer_context;
1841 	  if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1842 	    {
1843 	      tree copy = copy_node (decl), block;
1844 
1845 	      lang_hooks.dup_lang_specific_decl (copy);
1846 	      SET_DECL_RTL (copy, NULL_RTX);
1847 	      TREE_USED (copy) = 1;
1848 	      block = DECL_INITIAL (current_function_decl);
1849 	      TREE_CHAIN (copy) = BLOCK_VARS (block);
1850 	      BLOCK_VARS (block) = copy;
1851 	      SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1852 	      DECL_HAS_VALUE_EXPR_P (copy) = 1;
1853 	    }
1854 	}
1855 
1856       *expr_p = unshare_expr (value_expr);
1857       return GS_OK;
1858     }
1859 
1860   return GS_ALL_DONE;
1861 }
1862 
1863 
1864 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1865    node *EXPR_P.
1866 
1867       compound_lval
1868 	      : min_lval '[' val ']'
1869 	      | min_lval '.' ID
1870 	      | compound_lval '[' val ']'
1871 	      | compound_lval '.' ID
1872 
1873    This is not part of the original SIMPLE definition, which separates
1874    array and member references, but it seems reasonable to handle them
1875    together.  Also, this way we don't run into problems with union
1876    aliasing; gcc requires that for accesses through a union to alias, the
1877    union reference must be explicit, which was not always the case when we
1878    were splitting up array and member refs.
1879 
1880    PRE_P points to the sequence where side effects that must happen before
1881      *EXPR_P should be stored.
1882 
1883    POST_P points to the sequence where side effects that must happen after
1884      *EXPR_P should be stored.  */
1885 
1886 static enum gimplify_status
1887 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1888 			fallback_t fallback)
1889 {
1890   tree *p;
1891   VEC(tree,heap) *stack;
1892   enum gimplify_status ret = GS_OK, tret;
1893   int i;
1894   location_t loc = EXPR_LOCATION (*expr_p);
1895 
1896   /* Create a stack of the subexpressions so later we can walk them in
1897      order from inner to outer.  */
1898   stack = VEC_alloc (tree, heap, 10);
1899 
1900   /* We can handle anything that get_inner_reference can deal with.  */
1901   for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1902     {
1903     restart:
1904       /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs.  */
1905       if (TREE_CODE (*p) == INDIRECT_REF)
1906 	*p = fold_indirect_ref_loc (loc, *p);
1907 
1908       if (handled_component_p (*p))
1909 	;
1910       /* Expand DECL_VALUE_EXPR now.  In some cases that may expose
1911 	 additional COMPONENT_REFs.  */
1912       else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1913 	       && gimplify_var_or_parm_decl (p) == GS_OK)
1914 	goto restart;
1915       else
1916 	break;
1917 
1918       VEC_safe_push (tree, heap, stack, *p);
1919     }
1920 
1921   gcc_assert (VEC_length (tree, stack));
1922 
1923   /* Now STACK is a stack of pointers to all the refs we've walked through
1924      and P points to the innermost expression.
1925 
1926      Java requires that we elaborated nodes in source order.  That
1927      means we must gimplify the inner expression followed by each of
1928      the indices, in order.  But we can't gimplify the inner
1929      expression until we deal with any variable bounds, sizes, or
1930      positions in order to deal with PLACEHOLDER_EXPRs.
1931 
1932      So we do this in three steps.  First we deal with the annotations
1933      for any variables in the components, then we gimplify the base,
1934      then we gimplify any indices, from left to right.  */
1935   for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
1936     {
1937       tree t = VEC_index (tree, stack, i);
1938 
1939       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1940 	{
1941 	  /* Gimplify the low bound and element type size and put them into
1942 	     the ARRAY_REF.  If these values are set, they have already been
1943 	     gimplified.  */
1944 	  if (TREE_OPERAND (t, 2) == NULL_TREE)
1945 	    {
1946 	      tree low = unshare_expr (array_ref_low_bound (t));
1947 	      if (!is_gimple_min_invariant (low))
1948 		{
1949 		  TREE_OPERAND (t, 2) = low;
1950 		  tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1951 					post_p, is_gimple_reg,
1952 					fb_rvalue);
1953 		  ret = MIN (ret, tret);
1954 		}
1955 	    }
1956 	  else
1957 	    {
1958 	      tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1959 				    is_gimple_reg, fb_rvalue);
1960 	      ret = MIN (ret, tret);
1961 	    }
1962 
1963 	  if (TREE_OPERAND (t, 3) == NULL_TREE)
1964 	    {
1965 	      tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1966 	      tree elmt_size = unshare_expr (array_ref_element_size (t));
1967 	      tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
1968 
1969 	      /* Divide the element size by the alignment of the element
1970 		 type (above).  */
1971 	      elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
1972 
1973 	      if (!is_gimple_min_invariant (elmt_size))
1974 		{
1975 		  TREE_OPERAND (t, 3) = elmt_size;
1976 		  tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
1977 					post_p, is_gimple_reg,
1978 					fb_rvalue);
1979 		  ret = MIN (ret, tret);
1980 		}
1981 	    }
1982 	  else
1983 	    {
1984 	      tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
1985 				    is_gimple_reg, fb_rvalue);
1986 	      ret = MIN (ret, tret);
1987 	    }
1988 	}
1989       else if (TREE_CODE (t) == COMPONENT_REF)
1990 	{
1991 	  /* Set the field offset into T and gimplify it.  */
1992 	  if (TREE_OPERAND (t, 2) == NULL_TREE)
1993 	    {
1994 	      tree offset = unshare_expr (component_ref_field_offset (t));
1995 	      tree field = TREE_OPERAND (t, 1);
1996 	      tree factor
1997 		= size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1998 
1999 	      /* Divide the offset by its alignment.  */
2000 	      offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2001 
2002 	      if (!is_gimple_min_invariant (offset))
2003 		{
2004 		  TREE_OPERAND (t, 2) = offset;
2005 		  tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2006 					post_p, is_gimple_reg,
2007 					fb_rvalue);
2008 		  ret = MIN (ret, tret);
2009 		}
2010 	    }
2011 	  else
2012 	    {
2013 	      tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2014 				    is_gimple_reg, fb_rvalue);
2015 	      ret = MIN (ret, tret);
2016 	    }
2017 	}
2018     }
2019 
2020   /* Step 2 is to gimplify the base expression.  Make sure lvalue is set
2021      so as to match the min_lval predicate.  Failure to do so may result
2022      in the creation of large aggregate temporaries.  */
2023   tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2024 			fallback | fb_lvalue);
2025   ret = MIN (ret, tret);
2026 
2027   /* And finally, the indices and operands to BIT_FIELD_REF.  During this
2028      loop we also remove any useless conversions.  */
2029   for (; VEC_length (tree, stack) > 0; )
2030     {
2031       tree t = VEC_pop (tree, stack);
2032 
2033       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2034 	{
2035 	  /* Gimplify the dimension.  */
2036 	  if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2037 	    {
2038 	      tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2039 				    is_gimple_val, fb_rvalue);
2040 	      ret = MIN (ret, tret);
2041 	    }
2042 	}
2043       else if (TREE_CODE (t) == BIT_FIELD_REF)
2044 	{
2045 	  tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2046 				is_gimple_val, fb_rvalue);
2047 	  ret = MIN (ret, tret);
2048 	  tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2049 				is_gimple_val, fb_rvalue);
2050 	  ret = MIN (ret, tret);
2051 	}
2052 
2053       STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2054 
2055       /* The innermost expression P may have originally had
2056 	 TREE_SIDE_EFFECTS set which would have caused all the outer
2057 	 expressions in *EXPR_P leading to P to also have had
2058 	 TREE_SIDE_EFFECTS set.  */
2059       recalculate_side_effects (t);
2060     }
2061 
2062   /* If the outermost expression is a COMPONENT_REF, canonicalize its type.  */
2063   if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2064     {
2065       canonicalize_component_ref (expr_p);
2066       ret = MIN (ret, GS_OK);
2067     }
2068 
2069   VEC_free (tree, heap, stack);
2070 
2071   return ret;
2072 }
2073 
2074 /*  Gimplify the self modifying expression pointed to by EXPR_P
2075     (++, --, +=, -=).
2076 
2077     PRE_P points to the list where side effects that must happen before
2078 	*EXPR_P should be stored.
2079 
2080     POST_P points to the list where side effects that must happen after
2081 	*EXPR_P should be stored.
2082 
2083     WANT_VALUE is nonzero iff we want to use the value of this expression
2084 	in another expression.  */
2085 
2086 static enum gimplify_status
2087 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2088 			bool want_value)
2089 {
2090   enum tree_code code;
2091   tree lhs, lvalue, rhs, t1;
2092   gimple_seq post = NULL, *orig_post_p = post_p;
2093   bool postfix;
2094   enum tree_code arith_code;
2095   enum gimplify_status ret;
2096   location_t loc = EXPR_LOCATION (*expr_p);
2097 
2098   code = TREE_CODE (*expr_p);
2099 
2100   gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2101 	      || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2102 
2103   /* Prefix or postfix?  */
2104   if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2105     /* Faster to treat as prefix if result is not used.  */
2106     postfix = want_value;
2107   else
2108     postfix = false;
2109 
2110   /* For postfix, make sure the inner expression's post side effects
2111      are executed after side effects from this expression.  */
2112   if (postfix)
2113     post_p = &post;
2114 
2115   /* Add or subtract?  */
2116   if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2117     arith_code = PLUS_EXPR;
2118   else
2119     arith_code = MINUS_EXPR;
2120 
2121   /* Gimplify the LHS into a GIMPLE lvalue.  */
2122   lvalue = TREE_OPERAND (*expr_p, 0);
2123   ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2124   if (ret == GS_ERROR)
2125     return ret;
2126 
2127   /* Extract the operands to the arithmetic operation.  */
2128   lhs = lvalue;
2129   rhs = TREE_OPERAND (*expr_p, 1);
2130 
2131   /* For postfix operator, we evaluate the LHS to an rvalue and then use
2132      that as the result value and in the postqueue operation.  We also
2133      make sure to make lvalue a minimal lval, see
2134      gcc.c-torture/execute/20040313-1.c for an example where this matters.  */
2135   if (postfix)
2136     {
2137       if (!is_gimple_min_lval (lvalue))
2138 	{
2139 	  mark_addressable (lvalue);
2140 	  lvalue = build_fold_addr_expr_loc (input_location, lvalue);
2141 	  gimplify_expr (&lvalue, pre_p, post_p, is_gimple_val, fb_rvalue);
2142 	  lvalue = build_fold_indirect_ref_loc (input_location, lvalue);
2143 	}
2144       ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2145       if (ret == GS_ERROR)
2146 	return ret;
2147     }
2148 
2149   /* For POINTERs increment, use POINTER_PLUS_EXPR.  */
2150   if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2151     {
2152       rhs = fold_convert_loc (loc, sizetype, rhs);
2153       if (arith_code == MINUS_EXPR)
2154 	rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2155       arith_code = POINTER_PLUS_EXPR;
2156     }
2157 
2158   t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
2159 
2160   if (postfix)
2161     {
2162       gimplify_assign (lvalue, t1, orig_post_p);
2163       gimplify_seq_add_seq (orig_post_p, post);
2164       *expr_p = lhs;
2165       return GS_ALL_DONE;
2166     }
2167   else
2168     {
2169       *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2170       return GS_OK;
2171     }
2172 }
2173 
2174 
2175 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR.  */
2176 
2177 static void
2178 maybe_with_size_expr (tree *expr_p)
2179 {
2180   tree expr = *expr_p;
2181   tree type = TREE_TYPE (expr);
2182   tree size;
2183 
2184   /* If we've already wrapped this or the type is error_mark_node, we can't do
2185      anything.  */
2186   if (TREE_CODE (expr) == WITH_SIZE_EXPR
2187       || type == error_mark_node)
2188     return;
2189 
2190   /* If the size isn't known or is a constant, we have nothing to do.  */
2191   size = TYPE_SIZE_UNIT (type);
2192   if (!size || TREE_CODE (size) == INTEGER_CST)
2193     return;
2194 
2195   /* Otherwise, make a WITH_SIZE_EXPR.  */
2196   size = unshare_expr (size);
2197   size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2198   *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2199 }
2200 
2201 
2202 /* Helper for gimplify_call_expr.  Gimplify a single argument *ARG_P
2203    Store any side-effects in PRE_P.  CALL_LOCATION is the location of
2204    the CALL_EXPR.  */
2205 
2206 static enum gimplify_status
2207 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2208 {
2209   bool (*test) (tree);
2210   fallback_t fb;
2211 
2212   /* In general, we allow lvalues for function arguments to avoid
2213      extra overhead of copying large aggregates out of even larger
2214      aggregates into temporaries only to copy the temporaries to
2215      the argument list.  Make optimizers happy by pulling out to
2216      temporaries those types that fit in registers.  */
2217   if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2218     test = is_gimple_val, fb = fb_rvalue;
2219   else
2220     test = is_gimple_lvalue, fb = fb_either;
2221 
2222   /* If this is a variable sized type, we must remember the size.  */
2223   maybe_with_size_expr (arg_p);
2224 
2225   /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c.  */
2226   /* Make sure arguments have the same location as the function call
2227      itself.  */
2228   protected_set_expr_location (*arg_p, call_location);
2229 
2230   /* There is a sequence point before a function call.  Side effects in
2231      the argument list must occur before the actual call. So, when
2232      gimplifying arguments, force gimplify_expr to use an internal
2233      post queue which is then appended to the end of PRE_P.  */
2234   return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2235 }
2236 
2237 
2238 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2239    WANT_VALUE is true if the result of the call is desired.  */
2240 
2241 static enum gimplify_status
2242 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2243 {
2244   tree fndecl, parms, p;
2245   enum gimplify_status ret;
2246   int i, nargs;
2247   gimple call;
2248   bool builtin_va_start_p = FALSE;
2249   location_t loc = EXPR_LOCATION (*expr_p);
2250 
2251   gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2252 
2253   /* For reliable diagnostics during inlining, it is necessary that
2254      every call_expr be annotated with file and line.  */
2255   if (! EXPR_HAS_LOCATION (*expr_p))
2256     SET_EXPR_LOCATION (*expr_p, input_location);
2257 
2258   /* This may be a call to a builtin function.
2259 
2260      Builtin function calls may be transformed into different
2261      (and more efficient) builtin function calls under certain
2262      circumstances.  Unfortunately, gimplification can muck things
2263      up enough that the builtin expanders are not aware that certain
2264      transformations are still valid.
2265 
2266      So we attempt transformation/gimplification of the call before
2267      we gimplify the CALL_EXPR.  At this time we do not manage to
2268      transform all calls in the same manner as the expanders do, but
2269      we do transform most of them.  */
2270   fndecl = get_callee_fndecl (*expr_p);
2271   if (fndecl && DECL_BUILT_IN (fndecl))
2272     {
2273       tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2274 
2275       if (new_tree && new_tree != *expr_p)
2276 	{
2277 	  /* There was a transformation of this call which computes the
2278 	     same value, but in a more efficient way.  Return and try
2279 	     again.  */
2280 	  *expr_p = new_tree;
2281 	  return GS_OK;
2282 	}
2283 
2284       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2285 	  && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_VA_START)
2286         {
2287 	  builtin_va_start_p = TRUE;
2288 	  if (call_expr_nargs (*expr_p) < 2)
2289 	    {
2290 	      error ("too few arguments to function %<va_start%>");
2291 	      *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2292 	      return GS_OK;
2293 	    }
2294 
2295 	  if (fold_builtin_next_arg (*expr_p, true))
2296 	    {
2297 	      *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2298 	      return GS_OK;
2299 	    }
2300 	}
2301     }
2302 
2303   /* There is a sequence point before the call, so any side effects in
2304      the calling expression must occur before the actual call.  Force
2305      gimplify_expr to use an internal post queue.  */
2306   ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2307 		       is_gimple_call_addr, fb_rvalue);
2308 
2309   nargs = call_expr_nargs (*expr_p);
2310 
2311   /* Get argument types for verification.  */
2312   fndecl = get_callee_fndecl (*expr_p);
2313   parms = NULL_TREE;
2314   if (fndecl)
2315     parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2316   else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2317     parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2318 
2319   if (fndecl && DECL_ARGUMENTS (fndecl))
2320     p = DECL_ARGUMENTS (fndecl);
2321   else if (parms)
2322     p = parms;
2323   else
2324     p = NULL_TREE;
2325   for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2326     ;
2327 
2328   /* If the last argument is __builtin_va_arg_pack () and it is not
2329      passed as a named argument, decrease the number of CALL_EXPR
2330      arguments and set instead the CALL_EXPR_VA_ARG_PACK flag.  */
2331   if (!p
2332       && i < nargs
2333       && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2334     {
2335       tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2336       tree last_arg_fndecl = get_callee_fndecl (last_arg);
2337 
2338       if (last_arg_fndecl
2339 	  && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2340 	  && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2341 	  && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2342 	{
2343 	  tree call = *expr_p;
2344 
2345 	  --nargs;
2346 	  *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2347 					  CALL_EXPR_FN (call),
2348 					  nargs, CALL_EXPR_ARGP (call));
2349 
2350 	  /* Copy all CALL_EXPR flags, location and block, except
2351 	     CALL_EXPR_VA_ARG_PACK flag.  */
2352 	  CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2353 	  CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2354 	  CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2355 	    = CALL_EXPR_RETURN_SLOT_OPT (call);
2356 	  CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2357 	  CALL_CANNOT_INLINE_P (*expr_p) = CALL_CANNOT_INLINE_P (call);
2358 	  SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2359 	  TREE_BLOCK (*expr_p) = TREE_BLOCK (call);
2360 
2361 	  /* Set CALL_EXPR_VA_ARG_PACK.  */
2362 	  CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2363 	}
2364     }
2365 
2366   /* Finally, gimplify the function arguments.  */
2367   if (nargs > 0)
2368     {
2369       for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2370            PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2371            PUSH_ARGS_REVERSED ? i-- : i++)
2372         {
2373           enum gimplify_status t;
2374 
2375           /* Avoid gimplifying the second argument to va_start, which needs to
2376              be the plain PARM_DECL.  */
2377           if ((i != 1) || !builtin_va_start_p)
2378             {
2379               t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2380 				EXPR_LOCATION (*expr_p));
2381 
2382               if (t == GS_ERROR)
2383                 ret = GS_ERROR;
2384             }
2385         }
2386     }
2387 
2388   /* Verify the function result.  */
2389   if (want_value && fndecl
2390       && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl))))
2391     {
2392       error_at (loc, "using result of function returning %<void%>");
2393       ret = GS_ERROR;
2394     }
2395 
2396   /* Try this again in case gimplification exposed something.  */
2397   if (ret != GS_ERROR)
2398     {
2399       tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2400 
2401       if (new_tree && new_tree != *expr_p)
2402 	{
2403 	  /* There was a transformation of this call which computes the
2404 	     same value, but in a more efficient way.  Return and try
2405 	     again.  */
2406 	  *expr_p = new_tree;
2407 	  return GS_OK;
2408 	}
2409     }
2410   else
2411     {
2412       *expr_p = error_mark_node;
2413       return GS_ERROR;
2414     }
2415 
2416   /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2417      decl.  This allows us to eliminate redundant or useless
2418      calls to "const" functions.  */
2419   if (TREE_CODE (*expr_p) == CALL_EXPR)
2420     {
2421       int flags = call_expr_flags (*expr_p);
2422       if (flags & (ECF_CONST | ECF_PURE)
2423 	  /* An infinite loop is considered a side effect.  */
2424 	  && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2425 	TREE_SIDE_EFFECTS (*expr_p) = 0;
2426     }
2427 
2428   /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2429      and clear *EXPR_P.  Otherwise, leave *EXPR_P in its gimplified
2430      form and delegate the creation of a GIMPLE_CALL to
2431      gimplify_modify_expr.  This is always possible because when
2432      WANT_VALUE is true, the caller wants the result of this call into
2433      a temporary, which means that we will emit an INIT_EXPR in
2434      internal_get_tmp_var which will then be handled by
2435      gimplify_modify_expr.  */
2436   if (!want_value)
2437     {
2438       /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2439 	 have to do is replicate it as a GIMPLE_CALL tuple.  */
2440       call = gimple_build_call_from_tree (*expr_p);
2441       gimplify_seq_add_stmt (pre_p, call);
2442       *expr_p = NULL_TREE;
2443     }
2444 
2445   return ret;
2446 }
2447 
2448 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2449    rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2450 
2451    TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2452    condition is true or false, respectively.  If null, we should generate
2453    our own to skip over the evaluation of this specific expression.
2454 
2455    LOCUS is the source location of the COND_EXPR.
2456 
2457    This function is the tree equivalent of do_jump.
2458 
2459    shortcut_cond_r should only be called by shortcut_cond_expr.  */
2460 
2461 static tree
2462 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2463 		 location_t locus)
2464 {
2465   tree local_label = NULL_TREE;
2466   tree t, expr = NULL;
2467 
2468   /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2469      retain the shortcut semantics.  Just insert the gotos here;
2470      shortcut_cond_expr will append the real blocks later.  */
2471   if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2472     {
2473       location_t new_locus;
2474 
2475       /* Turn if (a && b) into
2476 
2477 	 if (a); else goto no;
2478 	 if (b) goto yes; else goto no;
2479 	 (no:) */
2480 
2481       if (false_label_p == NULL)
2482 	false_label_p = &local_label;
2483 
2484       /* Keep the original source location on the first 'if'.  */
2485       t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2486       append_to_statement_list (t, &expr);
2487 
2488       /* Set the source location of the && on the second 'if'.  */
2489       new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2490       t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2491 			   new_locus);
2492       append_to_statement_list (t, &expr);
2493     }
2494   else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2495     {
2496       location_t new_locus;
2497 
2498       /* Turn if (a || b) into
2499 
2500 	 if (a) goto yes;
2501 	 if (b) goto yes; else goto no;
2502 	 (yes:) */
2503 
2504       if (true_label_p == NULL)
2505 	true_label_p = &local_label;
2506 
2507       /* Keep the original source location on the first 'if'.  */
2508       t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2509       append_to_statement_list (t, &expr);
2510 
2511       /* Set the source location of the || on the second 'if'.  */
2512       new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2513       t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2514 			   new_locus);
2515       append_to_statement_list (t, &expr);
2516     }
2517   else if (TREE_CODE (pred) == COND_EXPR
2518 	   && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2519 	   && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2520     {
2521       location_t new_locus;
2522 
2523       /* As long as we're messing with gotos, turn if (a ? b : c) into
2524 	 if (a)
2525 	   if (b) goto yes; else goto no;
2526 	 else
2527 	   if (c) goto yes; else goto no;
2528 
2529 	 Don't do this if one of the arms has void type, which can happen
2530 	 in C++ when the arm is throw.  */
2531 
2532       /* Keep the original source location on the first 'if'.  Set the source
2533 	 location of the ? on the second 'if'.  */
2534       new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2535       expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2536 		     shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2537 				      false_label_p, locus),
2538 		     shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2539 				      false_label_p, new_locus));
2540     }
2541   else
2542     {
2543       expr = build3 (COND_EXPR, void_type_node, pred,
2544 		     build_and_jump (true_label_p),
2545 		     build_and_jump (false_label_p));
2546       SET_EXPR_LOCATION (expr, locus);
2547     }
2548 
2549   if (local_label)
2550     {
2551       t = build1 (LABEL_EXPR, void_type_node, local_label);
2552       append_to_statement_list (t, &expr);
2553     }
2554 
2555   return expr;
2556 }
2557 
2558 /* Given a conditional expression EXPR with short-circuit boolean
2559    predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2560    predicate appart into the equivalent sequence of conditionals.  */
2561 
2562 static tree
2563 shortcut_cond_expr (tree expr)
2564 {
2565   tree pred = TREE_OPERAND (expr, 0);
2566   tree then_ = TREE_OPERAND (expr, 1);
2567   tree else_ = TREE_OPERAND (expr, 2);
2568   tree true_label, false_label, end_label, t;
2569   tree *true_label_p;
2570   tree *false_label_p;
2571   bool emit_end, emit_false, jump_over_else;
2572   bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2573   bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2574 
2575   /* First do simple transformations.  */
2576   if (!else_se)
2577     {
2578       /* If there is no 'else', turn
2579 	   if (a && b) then c
2580 	 into
2581 	   if (a) if (b) then c.  */
2582       while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2583 	{
2584 	  /* Keep the original source location on the first 'if'.  */
2585 	  location_t locus = EXPR_HAS_LOCATION (expr)
2586 			     ? EXPR_LOCATION (expr) : input_location;
2587 	  TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2588 	  /* Set the source location of the && on the second 'if'.  */
2589 	  if (EXPR_HAS_LOCATION (pred))
2590 	    SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2591 	  then_ = shortcut_cond_expr (expr);
2592 	  then_se = then_ && TREE_SIDE_EFFECTS (then_);
2593 	  pred = TREE_OPERAND (pred, 0);
2594 	  expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2595 	  SET_EXPR_LOCATION (expr, locus);
2596 	}
2597     }
2598 
2599   if (!then_se)
2600     {
2601       /* If there is no 'then', turn
2602 	   if (a || b); else d
2603 	 into
2604 	   if (a); else if (b); else d.  */
2605       while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2606 	{
2607 	  /* Keep the original source location on the first 'if'.  */
2608 	  location_t locus = EXPR_HAS_LOCATION (expr)
2609 			     ? EXPR_LOCATION (expr) : input_location;
2610 	  TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2611 	  /* Set the source location of the || on the second 'if'.  */
2612 	  if (EXPR_HAS_LOCATION (pred))
2613 	    SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2614 	  else_ = shortcut_cond_expr (expr);
2615 	  else_se = else_ && TREE_SIDE_EFFECTS (else_);
2616 	  pred = TREE_OPERAND (pred, 0);
2617 	  expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2618 	  SET_EXPR_LOCATION (expr, locus);
2619 	}
2620     }
2621 
2622   /* If we're done, great.  */
2623   if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2624       && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2625     return expr;
2626 
2627   /* Otherwise we need to mess with gotos.  Change
2628        if (a) c; else d;
2629      to
2630        if (a); else goto no;
2631        c; goto end;
2632        no: d; end:
2633      and recursively gimplify the condition.  */
2634 
2635   true_label = false_label = end_label = NULL_TREE;
2636 
2637   /* If our arms just jump somewhere, hijack those labels so we don't
2638      generate jumps to jumps.  */
2639 
2640   if (then_
2641       && TREE_CODE (then_) == GOTO_EXPR
2642       && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2643     {
2644       true_label = GOTO_DESTINATION (then_);
2645       then_ = NULL;
2646       then_se = false;
2647     }
2648 
2649   if (else_
2650       && TREE_CODE (else_) == GOTO_EXPR
2651       && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2652     {
2653       false_label = GOTO_DESTINATION (else_);
2654       else_ = NULL;
2655       else_se = false;
2656     }
2657 
2658   /* If we aren't hijacking a label for the 'then' branch, it falls through.  */
2659   if (true_label)
2660     true_label_p = &true_label;
2661   else
2662     true_label_p = NULL;
2663 
2664   /* The 'else' branch also needs a label if it contains interesting code.  */
2665   if (false_label || else_se)
2666     false_label_p = &false_label;
2667   else
2668     false_label_p = NULL;
2669 
2670   /* If there was nothing else in our arms, just forward the label(s).  */
2671   if (!then_se && !else_se)
2672     return shortcut_cond_r (pred, true_label_p, false_label_p,
2673 			    EXPR_HAS_LOCATION (expr)
2674 			    ? EXPR_LOCATION (expr) : input_location);
2675 
2676   /* If our last subexpression already has a terminal label, reuse it.  */
2677   if (else_se)
2678     t = expr_last (else_);
2679   else if (then_se)
2680     t = expr_last (then_);
2681   else
2682     t = NULL;
2683   if (t && TREE_CODE (t) == LABEL_EXPR)
2684     end_label = LABEL_EXPR_LABEL (t);
2685 
2686   /* If we don't care about jumping to the 'else' branch, jump to the end
2687      if the condition is false.  */
2688   if (!false_label_p)
2689     false_label_p = &end_label;
2690 
2691   /* We only want to emit these labels if we aren't hijacking them.  */
2692   emit_end = (end_label == NULL_TREE);
2693   emit_false = (false_label == NULL_TREE);
2694 
2695   /* We only emit the jump over the else clause if we have to--if the
2696      then clause may fall through.  Otherwise we can wind up with a
2697      useless jump and a useless label at the end of gimplified code,
2698      which will cause us to think that this conditional as a whole
2699      falls through even if it doesn't.  If we then inline a function
2700      which ends with such a condition, that can cause us to issue an
2701      inappropriate warning about control reaching the end of a
2702      non-void function.  */
2703   jump_over_else = block_may_fallthru (then_);
2704 
2705   pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2706 			  EXPR_HAS_LOCATION (expr)
2707 			  ? EXPR_LOCATION (expr) : input_location);
2708 
2709   expr = NULL;
2710   append_to_statement_list (pred, &expr);
2711 
2712   append_to_statement_list (then_, &expr);
2713   if (else_se)
2714     {
2715       if (jump_over_else)
2716 	{
2717 	  tree last = expr_last (expr);
2718 	  t = build_and_jump (&end_label);
2719 	  if (EXPR_HAS_LOCATION (last))
2720 	    SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2721 	  append_to_statement_list (t, &expr);
2722 	}
2723       if (emit_false)
2724 	{
2725 	  t = build1 (LABEL_EXPR, void_type_node, false_label);
2726 	  append_to_statement_list (t, &expr);
2727 	}
2728       append_to_statement_list (else_, &expr);
2729     }
2730   if (emit_end && end_label)
2731     {
2732       t = build1 (LABEL_EXPR, void_type_node, end_label);
2733       append_to_statement_list (t, &expr);
2734     }
2735 
2736   return expr;
2737 }
2738 
2739 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE.  */
2740 
2741 tree
2742 gimple_boolify (tree expr)
2743 {
2744   tree type = TREE_TYPE (expr);
2745   location_t loc = EXPR_LOCATION (expr);
2746 
2747   if (TREE_CODE (expr) == NE_EXPR
2748       && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2749       && integer_zerop (TREE_OPERAND (expr, 1)))
2750     {
2751       tree call = TREE_OPERAND (expr, 0);
2752       tree fn = get_callee_fndecl (call);
2753 
2754       /* For __builtin_expect ((long) (x), y) recurse into x as well
2755 	 if x is truth_value_p.  */
2756       if (fn
2757 	  && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2758 	  && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2759 	  && call_expr_nargs (call) == 2)
2760 	{
2761 	  tree arg = CALL_EXPR_ARG (call, 0);
2762 	  if (arg)
2763 	    {
2764 	      if (TREE_CODE (arg) == NOP_EXPR
2765 		  && TREE_TYPE (arg) == TREE_TYPE (call))
2766 		arg = TREE_OPERAND (arg, 0);
2767 	      if (truth_value_p (TREE_CODE (arg)))
2768 		{
2769 		  arg = gimple_boolify (arg);
2770 		  CALL_EXPR_ARG (call, 0)
2771 		    = fold_convert_loc (loc, TREE_TYPE (call), arg);
2772 		}
2773 	    }
2774 	}
2775     }
2776 
2777   if (TREE_CODE (type) == BOOLEAN_TYPE)
2778     return expr;
2779 
2780   switch (TREE_CODE (expr))
2781     {
2782     case TRUTH_AND_EXPR:
2783     case TRUTH_OR_EXPR:
2784     case TRUTH_XOR_EXPR:
2785     case TRUTH_ANDIF_EXPR:
2786     case TRUTH_ORIF_EXPR:
2787       /* Also boolify the arguments of truth exprs.  */
2788       TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2789       /* FALLTHRU */
2790 
2791     case TRUTH_NOT_EXPR:
2792       TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2793       /* FALLTHRU */
2794 
2795     case EQ_EXPR: case NE_EXPR:
2796     case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
2797       /* These expressions always produce boolean results.  */
2798       TREE_TYPE (expr) = boolean_type_node;
2799       return expr;
2800 
2801     default:
2802       /* Other expressions that get here must have boolean values, but
2803 	 might need to be converted to the appropriate mode.  */
2804       return fold_convert_loc (loc, boolean_type_node, expr);
2805     }
2806 }
2807 
2808 /* Given a conditional expression *EXPR_P without side effects, gimplify
2809    its operands.  New statements are inserted to PRE_P.  */
2810 
2811 static enum gimplify_status
2812 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2813 {
2814   tree expr = *expr_p, cond;
2815   enum gimplify_status ret, tret;
2816   enum tree_code code;
2817 
2818   cond = gimple_boolify (COND_EXPR_COND (expr));
2819 
2820   /* We need to handle && and || specially, as their gimplification
2821      creates pure cond_expr, thus leading to an infinite cycle otherwise.  */
2822   code = TREE_CODE (cond);
2823   if (code == TRUTH_ANDIF_EXPR)
2824     TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2825   else if (code == TRUTH_ORIF_EXPR)
2826     TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2827   ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2828   COND_EXPR_COND (*expr_p) = cond;
2829 
2830   tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2831 				   is_gimple_val, fb_rvalue);
2832   ret = MIN (ret, tret);
2833   tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2834 				   is_gimple_val, fb_rvalue);
2835 
2836   return MIN (ret, tret);
2837 }
2838 
2839 /* Returns true if evaluating EXPR could trap.
2840    EXPR is GENERIC, while tree_could_trap_p can be called
2841    only on GIMPLE.  */
2842 
2843 static bool
2844 generic_expr_could_trap_p (tree expr)
2845 {
2846   unsigned i, n;
2847 
2848   if (!expr || is_gimple_val (expr))
2849     return false;
2850 
2851   if (!EXPR_P (expr) || tree_could_trap_p (expr))
2852     return true;
2853 
2854   n = TREE_OPERAND_LENGTH (expr);
2855   for (i = 0; i < n; i++)
2856     if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2857       return true;
2858 
2859   return false;
2860 }
2861 
2862 /*  Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2863     into
2864 
2865     if (p)			if (p)
2866       t1 = a;			  a;
2867     else		or	else
2868       t1 = b;			  b;
2869     t1;
2870 
2871     The second form is used when *EXPR_P is of type void.
2872 
2873     PRE_P points to the list where side effects that must happen before
2874       *EXPR_P should be stored.  */
2875 
2876 static enum gimplify_status
2877 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
2878 {
2879   tree expr = *expr_p;
2880   tree tmp, type, arm1, arm2;
2881   enum gimplify_status ret;
2882   tree label_true, label_false, label_cont;
2883   bool have_then_clause_p, have_else_clause_p;
2884   gimple gimple_cond;
2885   enum tree_code pred_code;
2886   gimple_seq seq = NULL;
2887   location_t loc = EXPR_LOCATION (*expr_p);
2888 
2889   type = TREE_TYPE (expr);
2890 
2891   /* If this COND_EXPR has a value, copy the values into a temporary within
2892      the arms.  */
2893   if (! VOID_TYPE_P (type))
2894     {
2895       tree result;
2896 
2897       /* If an rvalue is ok or we do not require an lvalue, avoid creating
2898 	 an addressable temporary.  */
2899       if (((fallback & fb_rvalue)
2900 	   || !(fallback & fb_lvalue))
2901 	  && !TREE_ADDRESSABLE (type))
2902 	{
2903 	  if (gimplify_ctxp->allow_rhs_cond_expr
2904 	      /* If either branch has side effects or could trap, it can't be
2905 		 evaluated unconditionally.  */
2906 	      && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1))
2907 	      && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 1))
2908 	      && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 2))
2909 	      && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 2)))
2910 	    return gimplify_pure_cond_expr (expr_p, pre_p);
2911 
2912 	  result = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
2913 	  ret = GS_ALL_DONE;
2914 	}
2915       else
2916 	{
2917 	  tree type = build_pointer_type (TREE_TYPE (expr));
2918 
2919 	  if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2920 	    TREE_OPERAND (expr, 1) =
2921 	      build_fold_addr_expr_loc (loc, TREE_OPERAND (expr, 1));
2922 
2923 	  if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2924 	    TREE_OPERAND (expr, 2) =
2925 	      build_fold_addr_expr_loc (loc, TREE_OPERAND (expr, 2));
2926 
2927 	  tmp = create_tmp_var (type, "iftmp");
2928 
2929 	  expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
2930 			 TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
2931 
2932 	  result = build_fold_indirect_ref_loc (loc, tmp);
2933 	}
2934 
2935       /* Build the then clause, 't1 = a;'.  But don't build an assignment
2936 	 if this branch is void; in C++ it can be, if it's a throw.  */
2937       if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2938 	TREE_OPERAND (expr, 1)
2939 	  = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 1));
2940 
2941       /* Build the else clause, 't1 = b;'.  */
2942       if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2943 	TREE_OPERAND (expr, 2)
2944 	  = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 2));
2945 
2946       TREE_TYPE (expr) = void_type_node;
2947       recalculate_side_effects (expr);
2948 
2949       /* Move the COND_EXPR to the prequeue.  */
2950       gimplify_stmt (&expr, pre_p);
2951 
2952       *expr_p = result;
2953       return GS_ALL_DONE;
2954     }
2955 
2956   /* Make sure the condition has BOOLEAN_TYPE.  */
2957   TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2958 
2959   /* Break apart && and || conditions.  */
2960   if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2961       || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2962     {
2963       expr = shortcut_cond_expr (expr);
2964 
2965       if (expr != *expr_p)
2966 	{
2967 	  *expr_p = expr;
2968 
2969 	  /* We can't rely on gimplify_expr to re-gimplify the expanded
2970 	     form properly, as cleanups might cause the target labels to be
2971 	     wrapped in a TRY_FINALLY_EXPR.  To prevent that, we need to
2972 	     set up a conditional context.  */
2973 	  gimple_push_condition ();
2974 	  gimplify_stmt (expr_p, &seq);
2975 	  gimple_pop_condition (pre_p);
2976 	  gimple_seq_add_seq (pre_p, seq);
2977 
2978 	  return GS_ALL_DONE;
2979 	}
2980     }
2981 
2982   /* Now do the normal gimplification.  */
2983 
2984   /* Gimplify condition.  */
2985   ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
2986 		       fb_rvalue);
2987   if (ret == GS_ERROR)
2988     return GS_ERROR;
2989   gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
2990 
2991   gimple_push_condition ();
2992 
2993   have_then_clause_p = have_else_clause_p = false;
2994   if (TREE_OPERAND (expr, 1) != NULL
2995       && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
2996       && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
2997       && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
2998 	  == current_function_decl)
2999       /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3000 	 have different locations, otherwise we end up with incorrect
3001 	 location information on the branches.  */
3002       && (optimize
3003 	  || !EXPR_HAS_LOCATION (expr)
3004 	  || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3005 	  || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3006     {
3007       label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3008       have_then_clause_p = true;
3009     }
3010   else
3011     label_true = create_artificial_label (UNKNOWN_LOCATION);
3012   if (TREE_OPERAND (expr, 2) != NULL
3013       && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3014       && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3015       && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3016 	  == current_function_decl)
3017       /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3018 	 have different locations, otherwise we end up with incorrect
3019 	 location information on the branches.  */
3020       && (optimize
3021 	  || !EXPR_HAS_LOCATION (expr)
3022 	  || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3023 	  || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3024     {
3025       label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3026       have_else_clause_p = true;
3027     }
3028   else
3029     label_false = create_artificial_label (UNKNOWN_LOCATION);
3030 
3031   gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3032 				 &arm2);
3033 
3034   gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3035                                    label_false);
3036 
3037   gimplify_seq_add_stmt (&seq, gimple_cond);
3038   label_cont = NULL_TREE;
3039   if (!have_then_clause_p)
3040     {
3041       /* For if (...) {} else { code; } put label_true after
3042 	 the else block.  */
3043       if (TREE_OPERAND (expr, 1) == NULL_TREE
3044 	  && !have_else_clause_p
3045 	  && TREE_OPERAND (expr, 2) != NULL_TREE)
3046 	label_cont = label_true;
3047       else
3048 	{
3049 	  gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3050 	  have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3051 	  /* For if (...) { code; } else {} or
3052 	     if (...) { code; } else goto label; or
3053 	     if (...) { code; return; } else { ... }
3054 	     label_cont isn't needed.  */
3055 	  if (!have_else_clause_p
3056 	      && TREE_OPERAND (expr, 2) != NULL_TREE
3057 	      && gimple_seq_may_fallthru (seq))
3058 	    {
3059 	      gimple g;
3060 	      label_cont = create_artificial_label (UNKNOWN_LOCATION);
3061 
3062 	      g = gimple_build_goto (label_cont);
3063 
3064 	      /* GIMPLE_COND's are very low level; they have embedded
3065 		 gotos.  This particular embedded goto should not be marked
3066 		 with the location of the original COND_EXPR, as it would
3067 		 correspond to the COND_EXPR's condition, not the ELSE or the
3068 		 THEN arms.  To avoid marking it with the wrong location, flag
3069 		 it as "no location".  */
3070 	      gimple_set_do_not_emit_location (g);
3071 
3072 	      gimplify_seq_add_stmt (&seq, g);
3073 	    }
3074 	}
3075     }
3076   if (!have_else_clause_p)
3077     {
3078       gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3079       have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3080     }
3081   if (label_cont)
3082     gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3083 
3084   gimple_pop_condition (pre_p);
3085   gimple_seq_add_seq (pre_p, seq);
3086 
3087   if (ret == GS_ERROR)
3088     ; /* Do nothing.  */
3089   else if (have_then_clause_p || have_else_clause_p)
3090     ret = GS_ALL_DONE;
3091   else
3092     {
3093       /* Both arms are empty; replace the COND_EXPR with its predicate.  */
3094       expr = TREE_OPERAND (expr, 0);
3095       gimplify_stmt (&expr, pre_p);
3096     }
3097 
3098   *expr_p = NULL;
3099   return ret;
3100 }
3101 
3102 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3103    to be marked addressable.
3104 
3105    We cannot rely on such an expression being directly markable if a temporary
3106    has been created by the gimplification.  In this case, we create another
3107    temporary and initialize it with a copy, which will become a store after we
3108    mark it addressable.  This can happen if the front-end passed us something
3109    that it could not mark addressable yet, like a Fortran pass-by-reference
3110    parameter (int) floatvar.  */
3111 
3112 static void
3113 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3114 {
3115   while (handled_component_p (*expr_p))
3116     expr_p = &TREE_OPERAND (*expr_p, 0);
3117   if (is_gimple_reg (*expr_p))
3118     *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3119 }
3120 
3121 /* A subroutine of gimplify_modify_expr.  Replace a MODIFY_EXPR with
3122    a call to __builtin_memcpy.  */
3123 
3124 static enum gimplify_status
3125 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3126     				gimple_seq *seq_p)
3127 {
3128   tree t, to, to_ptr, from, from_ptr;
3129   gimple gs;
3130   location_t loc = EXPR_LOCATION (*expr_p);
3131 
3132   to = TREE_OPERAND (*expr_p, 0);
3133   from = TREE_OPERAND (*expr_p, 1);
3134 
3135   /* Mark the RHS addressable.  Beware that it may not be possible to do so
3136      directly if a temporary has been created by the gimplification.  */
3137   prepare_gimple_addressable (&from, seq_p);
3138 
3139   mark_addressable (from);
3140   from_ptr = build_fold_addr_expr_loc (loc, from);
3141   gimplify_arg (&from_ptr, seq_p, loc);
3142 
3143   mark_addressable (to);
3144   to_ptr = build_fold_addr_expr_loc (loc, to);
3145   gimplify_arg (&to_ptr, seq_p, loc);
3146 
3147   t = implicit_built_in_decls[BUILT_IN_MEMCPY];
3148 
3149   gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3150 
3151   if (want_value)
3152     {
3153       /* tmp = memcpy() */
3154       t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3155       gimple_call_set_lhs (gs, t);
3156       gimplify_seq_add_stmt (seq_p, gs);
3157 
3158       *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3159       return GS_ALL_DONE;
3160     }
3161 
3162   gimplify_seq_add_stmt (seq_p, gs);
3163   *expr_p = NULL;
3164   return GS_ALL_DONE;
3165 }
3166 
3167 /* A subroutine of gimplify_modify_expr.  Replace a MODIFY_EXPR with
3168    a call to __builtin_memset.  In this case we know that the RHS is
3169    a CONSTRUCTOR with an empty element list.  */
3170 
3171 static enum gimplify_status
3172 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3173     				gimple_seq *seq_p)
3174 {
3175   tree t, from, to, to_ptr;
3176   gimple gs;
3177   location_t loc = EXPR_LOCATION (*expr_p);
3178 
3179   /* Assert our assumptions, to abort instead of producing wrong code
3180      silently if they are not met.  Beware that the RHS CONSTRUCTOR might
3181      not be immediately exposed.  */
3182   from = TREE_OPERAND (*expr_p, 1);
3183   if (TREE_CODE (from) == WITH_SIZE_EXPR)
3184     from = TREE_OPERAND (from, 0);
3185 
3186   gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3187 	      && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from)));
3188 
3189   /* Now proceed.  */
3190   to = TREE_OPERAND (*expr_p, 0);
3191 
3192   to_ptr = build_fold_addr_expr_loc (loc, to);
3193   gimplify_arg (&to_ptr, seq_p, loc);
3194   t = implicit_built_in_decls[BUILT_IN_MEMSET];
3195 
3196   gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3197 
3198   if (want_value)
3199     {
3200       /* tmp = memset() */
3201       t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3202       gimple_call_set_lhs (gs, t);
3203       gimplify_seq_add_stmt (seq_p, gs);
3204 
3205       *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3206       return GS_ALL_DONE;
3207     }
3208 
3209   gimplify_seq_add_stmt (seq_p, gs);
3210   *expr_p = NULL;
3211   return GS_ALL_DONE;
3212 }
3213 
3214 /* A subroutine of gimplify_init_ctor_preeval.  Called via walk_tree,
3215    determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3216    assignment.  Returns non-null if we detect a potential overlap.  */
3217 
3218 struct gimplify_init_ctor_preeval_data
3219 {
3220   /* The base decl of the lhs object.  May be NULL, in which case we
3221      have to assume the lhs is indirect.  */
3222   tree lhs_base_decl;
3223 
3224   /* The alias set of the lhs object.  */
3225   alias_set_type lhs_alias_set;
3226 };
3227 
3228 static tree
3229 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3230 {
3231   struct gimplify_init_ctor_preeval_data *data
3232     = (struct gimplify_init_ctor_preeval_data *) xdata;
3233   tree t = *tp;
3234 
3235   /* If we find the base object, obviously we have overlap.  */
3236   if (data->lhs_base_decl == t)
3237     return t;
3238 
3239   /* If the constructor component is indirect, determine if we have a
3240      potential overlap with the lhs.  The only bits of information we
3241      have to go on at this point are addressability and alias sets.  */
3242   if (TREE_CODE (t) == INDIRECT_REF
3243       && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3244       && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3245     return t;
3246 
3247   /* If the constructor component is a call, determine if it can hide a
3248      potential overlap with the lhs through an INDIRECT_REF like above.  */
3249   if (TREE_CODE (t) == CALL_EXPR)
3250     {
3251       tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3252 
3253       for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3254 	if (POINTER_TYPE_P (TREE_VALUE (type))
3255 	    && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3256 	    && alias_sets_conflict_p (data->lhs_alias_set,
3257 				      get_alias_set
3258 				        (TREE_TYPE (TREE_VALUE (type)))))
3259 	  return t;
3260     }
3261 
3262   if (IS_TYPE_OR_DECL_P (t))
3263     *walk_subtrees = 0;
3264   return NULL;
3265 }
3266 
3267 /* A subroutine of gimplify_init_constructor.  Pre-evaluate EXPR,
3268    force values that overlap with the lhs (as described by *DATA)
3269    into temporaries.  */
3270 
3271 static void
3272 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3273 			    struct gimplify_init_ctor_preeval_data *data)
3274 {
3275   enum gimplify_status one;
3276 
3277   /* If the value is constant, then there's nothing to pre-evaluate.  */
3278   if (TREE_CONSTANT (*expr_p))
3279     {
3280       /* Ensure it does not have side effects, it might contain a reference to
3281 	 the object we're initializing.  */
3282       gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3283       return;
3284     }
3285 
3286   /* If the type has non-trivial constructors, we can't pre-evaluate.  */
3287   if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3288     return;
3289 
3290   /* Recurse for nested constructors.  */
3291   if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3292     {
3293       unsigned HOST_WIDE_INT ix;
3294       constructor_elt *ce;
3295       VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
3296 
3297       for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
3298 	gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3299 
3300       return;
3301     }
3302 
3303   /* If this is a variable sized type, we must remember the size.  */
3304   maybe_with_size_expr (expr_p);
3305 
3306   /* Gimplify the constructor element to something appropriate for the rhs
3307      of a MODIFY_EXPR.  Given that we know the LHS is an aggregate, we know
3308      the gimplifier will consider this a store to memory.  Doing this
3309      gimplification now means that we won't have to deal with complicated
3310      language-specific trees, nor trees like SAVE_EXPR that can induce
3311      exponential search behavior.  */
3312   one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3313   if (one == GS_ERROR)
3314     {
3315       *expr_p = NULL;
3316       return;
3317     }
3318 
3319   /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3320      with the lhs, since "a = { .x=a }" doesn't make sense.  This will
3321      always be true for all scalars, since is_gimple_mem_rhs insists on a
3322      temporary variable for them.  */
3323   if (DECL_P (*expr_p))
3324     return;
3325 
3326   /* If this is of variable size, we have no choice but to assume it doesn't
3327      overlap since we can't make a temporary for it.  */
3328   if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3329     return;
3330 
3331   /* Otherwise, we must search for overlap ...  */
3332   if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3333     return;
3334 
3335   /* ... and if found, force the value into a temporary.  */
3336   *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3337 }
3338 
3339 /* A subroutine of gimplify_init_ctor_eval.  Create a loop for
3340    a RANGE_EXPR in a CONSTRUCTOR for an array.
3341 
3342       var = lower;
3343     loop_entry:
3344       object[var] = value;
3345       if (var == upper)
3346 	goto loop_exit;
3347       var = var + 1;
3348       goto loop_entry;
3349     loop_exit:
3350 
3351    We increment var _after_ the loop exit check because we might otherwise
3352    fail if upper == TYPE_MAX_VALUE (type for upper).
3353 
3354    Note that we never have to deal with SAVE_EXPRs here, because this has
3355    already been taken care of for us, in gimplify_init_ctor_preeval().  */
3356 
3357 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
3358 				     gimple_seq *, bool);
3359 
3360 static void
3361 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3362 			       tree value, tree array_elt_type,
3363 			       gimple_seq *pre_p, bool cleared)
3364 {
3365   tree loop_entry_label, loop_exit_label, fall_thru_label;
3366   tree var, var_type, cref, tmp;
3367 
3368   loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3369   loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3370   fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3371 
3372   /* Create and initialize the index variable.  */
3373   var_type = TREE_TYPE (upper);
3374   var = create_tmp_var (var_type, NULL);
3375   gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3376 
3377   /* Add the loop entry label.  */
3378   gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3379 
3380   /* Build the reference.  */
3381   cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3382 		 var, NULL_TREE, NULL_TREE);
3383 
3384   /* If we are a constructor, just call gimplify_init_ctor_eval to do
3385      the store.  Otherwise just assign value to the reference.  */
3386 
3387   if (TREE_CODE (value) == CONSTRUCTOR)
3388     /* NB we might have to call ourself recursively through
3389        gimplify_init_ctor_eval if the value is a constructor.  */
3390     gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3391 			     pre_p, cleared);
3392   else
3393     gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3394 
3395   /* We exit the loop when the index var is equal to the upper bound.  */
3396   gimplify_seq_add_stmt (pre_p,
3397 			 gimple_build_cond (EQ_EXPR, var, upper,
3398 					    loop_exit_label, fall_thru_label));
3399 
3400   gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3401 
3402   /* Otherwise, increment the index var...  */
3403   tmp = build2 (PLUS_EXPR, var_type, var,
3404 		fold_convert (var_type, integer_one_node));
3405   gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3406 
3407   /* ...and jump back to the loop entry.  */
3408   gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3409 
3410   /* Add the loop exit label.  */
3411   gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3412 }
3413 
3414 /* Return true if FDECL is accessing a field that is zero sized.  */
3415 
3416 static bool
3417 zero_sized_field_decl (const_tree fdecl)
3418 {
3419   if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3420       && integer_zerop (DECL_SIZE (fdecl)))
3421     return true;
3422   return false;
3423 }
3424 
3425 /* Return true if TYPE is zero sized.  */
3426 
3427 static bool
3428 zero_sized_type (const_tree type)
3429 {
3430   if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3431       && integer_zerop (TYPE_SIZE (type)))
3432     return true;
3433   return false;
3434 }
3435 
3436 /* A subroutine of gimplify_init_constructor.  Generate individual
3437    MODIFY_EXPRs for a CONSTRUCTOR.  OBJECT is the LHS against which the
3438    assignments should happen.  ELTS is the CONSTRUCTOR_ELTS of the
3439    CONSTRUCTOR.  CLEARED is true if the entire LHS object has been
3440    zeroed first.  */
3441 
3442 static void
3443 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
3444 			 gimple_seq *pre_p, bool cleared)
3445 {
3446   tree array_elt_type = NULL;
3447   unsigned HOST_WIDE_INT ix;
3448   tree purpose, value;
3449 
3450   if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3451     array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3452 
3453   FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3454     {
3455       tree cref;
3456 
3457       /* NULL values are created above for gimplification errors.  */
3458       if (value == NULL)
3459 	continue;
3460 
3461       if (cleared && initializer_zerop (value))
3462 	continue;
3463 
3464       /* ??? Here's to hoping the front end fills in all of the indices,
3465 	 so we don't have to figure out what's missing ourselves.  */
3466       gcc_assert (purpose);
3467 
3468       /* Skip zero-sized fields, unless value has side-effects.  This can
3469 	 happen with calls to functions returning a zero-sized type, which
3470 	 we shouldn't discard.  As a number of downstream passes don't
3471 	 expect sets of zero-sized fields, we rely on the gimplification of
3472 	 the MODIFY_EXPR we make below to drop the assignment statement.  */
3473       if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3474 	continue;
3475 
3476       /* If we have a RANGE_EXPR, we have to build a loop to assign the
3477 	 whole range.  */
3478       if (TREE_CODE (purpose) == RANGE_EXPR)
3479 	{
3480 	  tree lower = TREE_OPERAND (purpose, 0);
3481 	  tree upper = TREE_OPERAND (purpose, 1);
3482 
3483 	  /* If the lower bound is equal to upper, just treat it as if
3484 	     upper was the index.  */
3485 	  if (simple_cst_equal (lower, upper))
3486 	    purpose = upper;
3487 	  else
3488 	    {
3489 	      gimplify_init_ctor_eval_range (object, lower, upper, value,
3490 					     array_elt_type, pre_p, cleared);
3491 	      continue;
3492 	    }
3493 	}
3494 
3495       if (array_elt_type)
3496 	{
3497 	  /* Do not use bitsizetype for ARRAY_REF indices.  */
3498 	  if (TYPE_DOMAIN (TREE_TYPE (object)))
3499 	    purpose = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3500 				    purpose);
3501 	  cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3502 			 purpose, NULL_TREE, NULL_TREE);
3503 	}
3504       else
3505 	{
3506 	  gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3507 	  cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3508 			 unshare_expr (object), purpose, NULL_TREE);
3509 	}
3510 
3511       if (TREE_CODE (value) == CONSTRUCTOR
3512 	  && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3513 	gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3514 				 pre_p, cleared);
3515       else
3516 	{
3517 	  tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3518 	  gimplify_and_add (init, pre_p);
3519 	  ggc_free (init);
3520 	}
3521     }
3522 }
3523 
3524 
3525 /* Returns the appropriate RHS predicate for this LHS.  */
3526 
3527 gimple_predicate
3528 rhs_predicate_for (tree lhs)
3529 {
3530   if (is_gimple_reg (lhs))
3531     return is_gimple_reg_rhs_or_call;
3532   else
3533     return is_gimple_mem_rhs_or_call;
3534 }
3535 
3536 /* Gimplify a C99 compound literal expression.  This just means adding
3537    the DECL_EXPR before the current statement and using its anonymous
3538    decl instead.  */
3539 
3540 static enum gimplify_status
3541 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p)
3542 {
3543   tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3544   tree decl = DECL_EXPR_DECL (decl_s);
3545   /* Mark the decl as addressable if the compound literal
3546      expression is addressable now, otherwise it is marked too late
3547      after we gimplify the initialization expression.  */
3548   if (TREE_ADDRESSABLE (*expr_p))
3549     TREE_ADDRESSABLE (decl) = 1;
3550 
3551   /* Preliminarily mark non-addressed complex variables as eligible
3552      for promotion to gimple registers.  We'll transform their uses
3553      as we find them.  */
3554   if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3555        || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3556       && !TREE_THIS_VOLATILE (decl)
3557       && !needs_to_live_in_memory (decl))
3558     DECL_GIMPLE_REG_P (decl) = 1;
3559 
3560   /* This decl isn't mentioned in the enclosing block, so add it to the
3561      list of temps.  FIXME it seems a bit of a kludge to say that
3562      anonymous artificial vars aren't pushed, but everything else is.  */
3563   if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3564     gimple_add_tmp_var (decl);
3565 
3566   gimplify_and_add (decl_s, pre_p);
3567   *expr_p = decl;
3568   return GS_OK;
3569 }
3570 
3571 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3572    return a new CONSTRUCTOR if something changed.  */
3573 
3574 static tree
3575 optimize_compound_literals_in_ctor (tree orig_ctor)
3576 {
3577   tree ctor = orig_ctor;
3578   VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (ctor);
3579   unsigned int idx, num = VEC_length (constructor_elt, elts);
3580 
3581   for (idx = 0; idx < num; idx++)
3582     {
3583       tree value = VEC_index (constructor_elt, elts, idx)->value;
3584       tree newval = value;
3585       if (TREE_CODE (value) == CONSTRUCTOR)
3586 	newval = optimize_compound_literals_in_ctor (value);
3587       else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3588 	{
3589 	  tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3590 	  tree decl = DECL_EXPR_DECL (decl_s);
3591 	  tree init = DECL_INITIAL (decl);
3592 
3593 	  if (!TREE_ADDRESSABLE (value)
3594 	      && !TREE_ADDRESSABLE (decl)
3595 	      && init)
3596 	    newval = optimize_compound_literals_in_ctor (init);
3597 	}
3598       if (newval == value)
3599 	continue;
3600 
3601       if (ctor == orig_ctor)
3602 	{
3603 	  ctor = copy_node (orig_ctor);
3604 	  CONSTRUCTOR_ELTS (ctor) = VEC_copy (constructor_elt, gc, elts);
3605 	  elts = CONSTRUCTOR_ELTS (ctor);
3606 	}
3607       VEC_index (constructor_elt, elts, idx)->value = newval;
3608     }
3609   return ctor;
3610 }
3611 
3612 
3613 
3614 /* A subroutine of gimplify_modify_expr.  Break out elements of a
3615    CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3616 
3617    Note that we still need to clear any elements that don't have explicit
3618    initializers, so if not all elements are initialized we keep the
3619    original MODIFY_EXPR, we just remove all of the constructor elements.
3620 
3621    If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3622    GS_ERROR if we would have to create a temporary when gimplifying
3623    this constructor.  Otherwise, return GS_OK.
3624 
3625    If NOTIFY_TEMP_CREATION is false, just do the gimplification.  */
3626 
3627 static enum gimplify_status
3628 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3629 			   bool want_value, bool notify_temp_creation)
3630 {
3631   tree object, ctor, type;
3632   enum gimplify_status ret;
3633   VEC(constructor_elt,gc) *elts;
3634 
3635   gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3636 
3637   if (!notify_temp_creation)
3638     {
3639       ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3640 			   is_gimple_lvalue, fb_lvalue);
3641       if (ret == GS_ERROR)
3642 	return ret;
3643     }
3644 
3645   object = TREE_OPERAND (*expr_p, 0);
3646   ctor = TREE_OPERAND (*expr_p, 1) =
3647     optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3648   type = TREE_TYPE (ctor);
3649   elts = CONSTRUCTOR_ELTS (ctor);
3650   ret = GS_ALL_DONE;
3651 
3652   switch (TREE_CODE (type))
3653     {
3654     case RECORD_TYPE:
3655     case UNION_TYPE:
3656     case QUAL_UNION_TYPE:
3657     case ARRAY_TYPE:
3658       {
3659 	struct gimplify_init_ctor_preeval_data preeval_data;
3660 	HOST_WIDE_INT num_type_elements, num_ctor_elements;
3661 	HOST_WIDE_INT num_nonzero_elements;
3662 	bool cleared, valid_const_initializer;
3663 
3664 	/* Aggregate types must lower constructors to initialization of
3665 	   individual elements.  The exception is that a CONSTRUCTOR node
3666 	   with no elements indicates zero-initialization of the whole.  */
3667 	if (VEC_empty (constructor_elt, elts))
3668 	  {
3669 	    if (notify_temp_creation)
3670 	      return GS_OK;
3671 	    break;
3672 	  }
3673 
3674 	/* Fetch information about the constructor to direct later processing.
3675 	   We might want to make static versions of it in various cases, and
3676 	   can only do so if it known to be a valid constant initializer.  */
3677 	valid_const_initializer
3678 	  = categorize_ctor_elements (ctor, &num_nonzero_elements,
3679 				      &num_ctor_elements, &cleared);
3680 
3681 	/* If a const aggregate variable is being initialized, then it
3682 	   should never be a lose to promote the variable to be static.  */
3683 	if (valid_const_initializer
3684 	    && num_nonzero_elements > 1
3685 	    && TREE_READONLY (object)
3686 	    && TREE_CODE (object) == VAR_DECL
3687 	    && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3688 	  {
3689 	    if (notify_temp_creation)
3690 	      return GS_ERROR;
3691 	    DECL_INITIAL (object) = ctor;
3692 	    TREE_STATIC (object) = 1;
3693 	    if (!DECL_NAME (object))
3694 	      DECL_NAME (object) = create_tmp_var_name ("C");
3695 	    walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3696 
3697 	    /* ??? C++ doesn't automatically append a .<number> to the
3698 	       assembler name, and even when it does, it looks a FE private
3699 	       data structures to figure out what that number should be,
3700 	       which are not set for this variable.  I suppose this is
3701 	       important for local statics for inline functions, which aren't
3702 	       "local" in the object file sense.  So in order to get a unique
3703 	       TU-local symbol, we must invoke the lhd version now.  */
3704 	    lhd_set_decl_assembler_name (object);
3705 
3706 	    *expr_p = NULL_TREE;
3707 	    break;
3708 	  }
3709 
3710 	/* If there are "lots" of initialized elements, even discounting
3711 	   those that are not address constants (and thus *must* be
3712 	   computed at runtime), then partition the constructor into
3713 	   constant and non-constant parts.  Block copy the constant
3714 	   parts in, then generate code for the non-constant parts.  */
3715 	/* TODO.  There's code in cp/typeck.c to do this.  */
3716 
3717 	num_type_elements = count_type_elements (type, true);
3718 
3719 	/* If count_type_elements could not determine number of type elements
3720 	   for a constant-sized object, assume clearing is needed.
3721 	   Don't do this for variable-sized objects, as store_constructor
3722 	   will ignore the clearing of variable-sized objects.  */
3723 	if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
3724 	  cleared = true;
3725 	/* If there are "lots" of zeros, then block clear the object first.  */
3726 	else if (num_type_elements - num_nonzero_elements
3727 		 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3728 		 && num_nonzero_elements < num_type_elements/4)
3729 	  cleared = true;
3730 	/* ??? This bit ought not be needed.  For any element not present
3731 	   in the initializer, we should simply set them to zero.  Except
3732 	   we'd need to *find* the elements that are not present, and that
3733 	   requires trickery to avoid quadratic compile-time behavior in
3734 	   large cases or excessive memory use in small cases.  */
3735 	else if (num_ctor_elements < num_type_elements)
3736 	  cleared = true;
3737 
3738 	/* If there are "lots" of initialized elements, and all of them
3739 	   are valid address constants, then the entire initializer can
3740 	   be dropped to memory, and then memcpy'd out.  Don't do this
3741 	   for sparse arrays, though, as it's more efficient to follow
3742 	   the standard CONSTRUCTOR behavior of memset followed by
3743 	   individual element initialization.  Also don't do this for small
3744 	   all-zero initializers (which aren't big enough to merit
3745 	   clearing), and don't try to make bitwise copies of
3746 	   TREE_ADDRESSABLE types.  */
3747 	if (valid_const_initializer
3748 	    && !(cleared || num_nonzero_elements == 0)
3749 	    && !TREE_ADDRESSABLE (type))
3750 	  {
3751 	    HOST_WIDE_INT size = int_size_in_bytes (type);
3752 	    unsigned int align;
3753 
3754 	    /* ??? We can still get unbounded array types, at least
3755 	       from the C++ front end.  This seems wrong, but attempt
3756 	       to work around it for now.  */
3757 	    if (size < 0)
3758 	      {
3759 		size = int_size_in_bytes (TREE_TYPE (object));
3760 		if (size >= 0)
3761 		  TREE_TYPE (ctor) = type = TREE_TYPE (object);
3762 	      }
3763 
3764 	    /* Find the maximum alignment we can assume for the object.  */
3765 	    /* ??? Make use of DECL_OFFSET_ALIGN.  */
3766 	    if (DECL_P (object))
3767 	      align = DECL_ALIGN (object);
3768 	    else
3769 	      align = TYPE_ALIGN (type);
3770 
3771 	    if (size > 0
3772 		&& num_nonzero_elements > 1
3773 		&& !can_move_by_pieces (size, align))
3774 	      {
3775 		tree new_tree;
3776 
3777 		if (notify_temp_creation)
3778 		  return GS_ERROR;
3779 
3780 		new_tree = create_tmp_var_raw (type, "C");
3781 
3782 		gimple_add_tmp_var (new_tree);
3783 		TREE_STATIC (new_tree) = 1;
3784 		TREE_READONLY (new_tree) = 1;
3785 		DECL_INITIAL (new_tree) = ctor;
3786 		if (align > DECL_ALIGN (new_tree))
3787 		  {
3788 		    DECL_ALIGN (new_tree) = align;
3789 		    DECL_USER_ALIGN (new_tree) = 1;
3790 		  }
3791 	        walk_tree (&DECL_INITIAL (new_tree), force_labels_r, NULL, NULL);
3792 
3793 		TREE_OPERAND (*expr_p, 1) = new_tree;
3794 
3795 		/* This is no longer an assignment of a CONSTRUCTOR, but
3796 		   we still may have processing to do on the LHS.  So
3797 		   pretend we didn't do anything here to let that happen.  */
3798 		return GS_UNHANDLED;
3799 	      }
3800 	  }
3801 
3802 	/* If the target is volatile, we have non-zero elements and more than
3803 	   one field to assign, initialize the target from a temporary.  */
3804 	if (TREE_THIS_VOLATILE (object)
3805 	    && !TREE_ADDRESSABLE (type)
3806 	    && num_nonzero_elements > 0
3807 	    && VEC_length (constructor_elt, elts) > 1)
3808 	  {
3809 	    tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
3810 	    TREE_OPERAND (*expr_p, 0) = temp;
3811 	    *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3812 			      *expr_p,
3813 			      build2 (MODIFY_EXPR, void_type_node,
3814 				      object, temp));
3815 	    return GS_OK;
3816 	  }
3817 
3818 	if (notify_temp_creation)
3819 	  return GS_OK;
3820 
3821 	/* If there are nonzero elements, pre-evaluate to capture elements
3822 	   overlapping with the lhs into temporaries.  We must do this before
3823 	   clearing to fetch the values before they are zeroed-out.  */
3824 	if (num_nonzero_elements > 0)
3825 	  {
3826 	    preeval_data.lhs_base_decl = get_base_address (object);
3827 	    if (!DECL_P (preeval_data.lhs_base_decl))
3828 	      preeval_data.lhs_base_decl = NULL;
3829 	    preeval_data.lhs_alias_set = get_alias_set (object);
3830 
3831 	    gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3832 					pre_p, post_p, &preeval_data);
3833 	  }
3834 
3835 	if (cleared)
3836 	  {
3837 	    /* Zap the CONSTRUCTOR element list, which simplifies this case.
3838 	       Note that we still have to gimplify, in order to handle the
3839 	       case of variable sized types.  Avoid shared tree structures.  */
3840 	    CONSTRUCTOR_ELTS (ctor) = NULL;
3841 	    TREE_SIDE_EFFECTS (ctor) = 0;
3842 	    object = unshare_expr (object);
3843 	    gimplify_stmt (expr_p, pre_p);
3844 	  }
3845 
3846 	/* If we have not block cleared the object, or if there are nonzero
3847 	   elements in the constructor, add assignments to the individual
3848 	   scalar fields of the object.  */
3849 	if (!cleared || num_nonzero_elements > 0)
3850 	  gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3851 
3852 	*expr_p = NULL_TREE;
3853       }
3854       break;
3855 
3856     case COMPLEX_TYPE:
3857       {
3858 	tree r, i;
3859 
3860 	if (notify_temp_creation)
3861 	  return GS_OK;
3862 
3863 	/* Extract the real and imaginary parts out of the ctor.  */
3864 	gcc_assert (VEC_length (constructor_elt, elts) == 2);
3865 	r = VEC_index (constructor_elt, elts, 0)->value;
3866 	i = VEC_index (constructor_elt, elts, 1)->value;
3867 	if (r == NULL || i == NULL)
3868 	  {
3869 	    tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
3870 	    if (r == NULL)
3871 	      r = zero;
3872 	    if (i == NULL)
3873 	      i = zero;
3874 	  }
3875 
3876 	/* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3877 	   represent creation of a complex value.  */
3878 	if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3879 	  {
3880 	    ctor = build_complex (type, r, i);
3881 	    TREE_OPERAND (*expr_p, 1) = ctor;
3882 	  }
3883 	else
3884 	  {
3885 	    ctor = build2 (COMPLEX_EXPR, type, r, i);
3886 	    TREE_OPERAND (*expr_p, 1) = ctor;
3887 	    ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3888 				 pre_p,
3889 				 post_p,
3890 				 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3891 				 fb_rvalue);
3892 	  }
3893       }
3894       break;
3895 
3896     case VECTOR_TYPE:
3897       {
3898 	unsigned HOST_WIDE_INT ix;
3899 	constructor_elt *ce;
3900 
3901 	if (notify_temp_creation)
3902 	  return GS_OK;
3903 
3904 	/* Go ahead and simplify constant constructors to VECTOR_CST.  */
3905 	if (TREE_CONSTANT (ctor))
3906 	  {
3907 	    bool constant_p = true;
3908 	    tree value;
3909 
3910 	    /* Even when ctor is constant, it might contain non-*_CST
3911 	       elements, such as addresses or trapping values like
3912 	       1.0/0.0 - 1.0/0.0.  Such expressions don't belong
3913 	       in VECTOR_CST nodes.  */
3914 	    FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3915 	      if (!CONSTANT_CLASS_P (value))
3916 		{
3917 		  constant_p = false;
3918 		  break;
3919 		}
3920 
3921 	    if (constant_p)
3922 	      {
3923 		TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3924 		break;
3925 	      }
3926 
3927 	    /* Don't reduce an initializer constant even if we can't
3928 	       make a VECTOR_CST.  It won't do anything for us, and it'll
3929 	       prevent us from representing it as a single constant.  */
3930 	    if (initializer_constant_valid_p (ctor, type))
3931 	      break;
3932 
3933 	    TREE_CONSTANT (ctor) = 0;
3934 	  }
3935 
3936 	/* Vector types use CONSTRUCTOR all the way through gimple
3937 	  compilation as a general initializer.  */
3938 	for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
3939 	  {
3940 	    enum gimplify_status tret;
3941 	    tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
3942 				  fb_rvalue);
3943 	    if (tret == GS_ERROR)
3944 	      ret = GS_ERROR;
3945 	  }
3946 	if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
3947 	  TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
3948       }
3949       break;
3950 
3951     default:
3952       /* So how did we get a CONSTRUCTOR for a scalar type?  */
3953       gcc_unreachable ();
3954     }
3955 
3956   if (ret == GS_ERROR)
3957     return GS_ERROR;
3958   else if (want_value)
3959     {
3960       *expr_p = object;
3961       return GS_OK;
3962     }
3963   else
3964     {
3965       /* If we have gimplified both sides of the initializer but have
3966 	 not emitted an assignment, do so now.  */
3967       if (*expr_p)
3968 	{
3969 	  tree lhs = TREE_OPERAND (*expr_p, 0);
3970 	  tree rhs = TREE_OPERAND (*expr_p, 1);
3971 	  gimple init = gimple_build_assign (lhs, rhs);
3972 	  gimplify_seq_add_stmt (pre_p, init);
3973 	  *expr_p = NULL;
3974 	}
3975 
3976       return GS_ALL_DONE;
3977     }
3978 }
3979 
3980 /* Given a pointer value OP0, return a simplified version of an
3981    indirection through OP0, or NULL_TREE if no simplification is
3982    possible.  Note that the resulting type may be different from
3983    the type pointed to in the sense that it is still compatible
3984    from the langhooks point of view. */
3985 
3986 tree
3987 gimple_fold_indirect_ref (tree t)
3988 {
3989   tree type = TREE_TYPE (TREE_TYPE (t));
3990   tree sub = t;
3991   tree subtype;
3992 
3993   STRIP_NOPS (sub);
3994   subtype = TREE_TYPE (sub);
3995   if (!POINTER_TYPE_P (subtype))
3996     return NULL_TREE;
3997 
3998   if (TREE_CODE (sub) == ADDR_EXPR)
3999     {
4000       tree op = TREE_OPERAND (sub, 0);
4001       tree optype = TREE_TYPE (op);
4002       /* *&p => p */
4003       if (useless_type_conversion_p (type, optype))
4004         return op;
4005 
4006       /* *(foo *)&fooarray => fooarray[0] */
4007       if (TREE_CODE (optype) == ARRAY_TYPE
4008 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
4009 	  && useless_type_conversion_p (type, TREE_TYPE (optype)))
4010        {
4011          tree type_domain = TYPE_DOMAIN (optype);
4012          tree min_val = size_zero_node;
4013          if (type_domain && TYPE_MIN_VALUE (type_domain))
4014            min_val = TYPE_MIN_VALUE (type_domain);
4015 	 if (TREE_CODE (min_val) == INTEGER_CST)
4016 	   return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
4017        }
4018       /* *(foo *)&complexfoo => __real__ complexfoo */
4019       else if (TREE_CODE (optype) == COMPLEX_TYPE
4020                && useless_type_conversion_p (type, TREE_TYPE (optype)))
4021         return fold_build1 (REALPART_EXPR, type, op);
4022       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
4023       else if (TREE_CODE (optype) == VECTOR_TYPE
4024                && useless_type_conversion_p (type, TREE_TYPE (optype)))
4025         {
4026           tree part_width = TYPE_SIZE (type);
4027           tree index = bitsize_int (0);
4028           return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
4029         }
4030     }
4031 
4032   /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
4033   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
4034       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
4035     {
4036       tree op00 = TREE_OPERAND (sub, 0);
4037       tree op01 = TREE_OPERAND (sub, 1);
4038       tree op00type;
4039 
4040       STRIP_NOPS (op00);
4041       op00type = TREE_TYPE (op00);
4042       if (TREE_CODE (op00) == ADDR_EXPR
4043 	  && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
4044 	  && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (op00type))))
4045 	{
4046 	  HOST_WIDE_INT offset = tree_low_cst (op01, 0);
4047 	  tree part_width = TYPE_SIZE (type);
4048 	  unsigned HOST_WIDE_INT part_widthi
4049 	    = tree_low_cst (part_width, 0) / BITS_PER_UNIT;
4050 	  unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
4051 	  tree index = bitsize_int (indexi);
4052 	  if (offset / part_widthi
4053 	      <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
4054 	    return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
4055 				part_width, index);
4056 	}
4057     }
4058 
4059   /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
4060   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
4061       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
4062     {
4063       tree op00 = TREE_OPERAND (sub, 0);
4064       tree op01 = TREE_OPERAND (sub, 1);
4065       tree op00type;
4066 
4067       STRIP_NOPS (op00);
4068       op00type = TREE_TYPE (op00);
4069       if (TREE_CODE (op00) == ADDR_EXPR
4070 	  && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
4071 	  && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (op00type))))
4072 	{
4073 	  tree size = TYPE_SIZE_UNIT (type);
4074 	  if (tree_int_cst_equal (size, op01))
4075 	    return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
4076 	}
4077     }
4078 
4079   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
4080   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
4081       && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
4082       && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
4083     {
4084       tree type_domain;
4085       tree min_val = size_zero_node;
4086       tree osub = sub;
4087       sub = gimple_fold_indirect_ref (sub);
4088       if (! sub)
4089 	sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
4090       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
4091       if (type_domain && TYPE_MIN_VALUE (type_domain))
4092         min_val = TYPE_MIN_VALUE (type_domain);
4093       if (TREE_CODE (min_val) == INTEGER_CST)
4094 	return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
4095     }
4096 
4097   return NULL_TREE;
4098 }
4099 
4100 /* Given a pointer value OP0, return a simplified version of an
4101    indirection through OP0, or NULL_TREE if no simplification is
4102    possible.  This may only be applied to a rhs of an expression.
4103    Note that the resulting type may be different from the type pointed
4104    to in the sense that it is still compatible from the langhooks
4105    point of view. */
4106 
4107 static tree
4108 gimple_fold_indirect_ref_rhs (tree t)
4109 {
4110   return gimple_fold_indirect_ref (t);
4111 }
4112 
4113 /* Subroutine of gimplify_modify_expr to do simplifications of
4114    MODIFY_EXPRs based on the code of the RHS.  We loop for as long as
4115    something changes.  */
4116 
4117 static enum gimplify_status
4118 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4119 			  gimple_seq *pre_p, gimple_seq *post_p,
4120 			  bool want_value)
4121 {
4122   enum gimplify_status ret = GS_UNHANDLED;
4123   bool changed;
4124 
4125   do
4126     {
4127       changed = false;
4128       switch (TREE_CODE (*from_p))
4129 	{
4130 	case VAR_DECL:
4131 	  /* If we're assigning from a read-only variable initialized with
4132 	     a constructor, do the direct assignment from the constructor,
4133 	     but only if neither source nor target are volatile since this
4134 	     latter assignment might end up being done on a per-field basis.  */
4135 	  if (DECL_INITIAL (*from_p)
4136 	      && TREE_READONLY (*from_p)
4137 	      && !TREE_THIS_VOLATILE (*from_p)
4138 	      && !TREE_THIS_VOLATILE (*to_p)
4139 	      && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4140 	    {
4141 	      tree old_from = *from_p;
4142 	      enum gimplify_status subret;
4143 
4144 	      /* Move the constructor into the RHS.  */
4145 	      *from_p = unshare_expr (DECL_INITIAL (*from_p));
4146 
4147 	      /* Let's see if gimplify_init_constructor will need to put
4148 		 it in memory.  */
4149 	      subret = gimplify_init_constructor (expr_p, NULL, NULL,
4150 						  false, true);
4151 	      if (subret == GS_ERROR)
4152 		{
4153 		  /* If so, revert the change.  */
4154 		  *from_p = old_from;
4155 		}
4156 	      else
4157 		{
4158 		  ret = GS_OK;
4159 		  changed = true;
4160 		}
4161 	    }
4162 	  break;
4163 	case INDIRECT_REF:
4164 	  {
4165 	    /* If we have code like
4166 
4167 	     *(const A*)(A*)&x
4168 
4169 	     where the type of "x" is a (possibly cv-qualified variant
4170 	     of "A"), treat the entire expression as identical to "x".
4171 	     This kind of code arises in C++ when an object is bound
4172 	     to a const reference, and if "x" is a TARGET_EXPR we want
4173 	     to take advantage of the optimization below.  */
4174 	    bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4175 	    tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4176 	    if (t
4177 		&& (TREE_THIS_VOLATILE (t) == volatile_p
4178 		    || REFERENCE_CLASS_P (t)))
4179 	      {
4180 		TREE_THIS_VOLATILE (t) = volatile_p;
4181 		*from_p = t;
4182 		ret = GS_OK;
4183 		changed = true;
4184 	      }
4185 	    break;
4186 	  }
4187 
4188 	case TARGET_EXPR:
4189 	  {
4190 	    /* If we are initializing something from a TARGET_EXPR, strip the
4191 	       TARGET_EXPR and initialize it directly, if possible.  This can't
4192 	       be done if the initializer is void, since that implies that the
4193 	       temporary is set in some non-trivial way.
4194 
4195 	       ??? What about code that pulls out the temp and uses it
4196 	       elsewhere? I think that such code never uses the TARGET_EXPR as
4197 	       an initializer.  If I'm wrong, we'll die because the temp won't
4198 	       have any RTL.  In that case, I guess we'll need to replace
4199 	       references somehow.  */
4200 	    tree init = TARGET_EXPR_INITIAL (*from_p);
4201 
4202 	    if (init
4203 		&& !VOID_TYPE_P (TREE_TYPE (init)))
4204 	      {
4205 		*from_p = init;
4206 		ret = GS_OK;
4207 		changed = true;
4208 	      }
4209 	  }
4210 	  break;
4211 
4212 	case COMPOUND_EXPR:
4213 	  /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4214 	     caught.  */
4215 	  gimplify_compound_expr (from_p, pre_p, true);
4216 	  ret = GS_OK;
4217 	  changed = true;
4218 	  break;
4219 
4220 	case CONSTRUCTOR:
4221 	  /* If we're initializing from a CONSTRUCTOR, break this into
4222 	     individual MODIFY_EXPRs.  */
4223 	  return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4224 					    false);
4225 
4226 	case COND_EXPR:
4227 	  /* If we're assigning to a non-register type, push the assignment
4228 	     down into the branches.  This is mandatory for ADDRESSABLE types,
4229 	     since we cannot generate temporaries for such, but it saves a
4230 	     copy in other cases as well.  */
4231 	  if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4232 	    {
4233 	      /* This code should mirror the code in gimplify_cond_expr. */
4234 	      enum tree_code code = TREE_CODE (*expr_p);
4235 	      tree cond = *from_p;
4236 	      tree result = *to_p;
4237 
4238 	      ret = gimplify_expr (&result, pre_p, post_p,
4239 				   is_gimple_lvalue, fb_lvalue);
4240 	      if (ret != GS_ERROR)
4241 		ret = GS_OK;
4242 
4243 	      if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4244 		TREE_OPERAND (cond, 1)
4245 		  = build2 (code, void_type_node, result,
4246 			    TREE_OPERAND (cond, 1));
4247 	      if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4248 		TREE_OPERAND (cond, 2)
4249 		  = build2 (code, void_type_node, unshare_expr (result),
4250 			    TREE_OPERAND (cond, 2));
4251 
4252 	      TREE_TYPE (cond) = void_type_node;
4253 	      recalculate_side_effects (cond);
4254 
4255 	      if (want_value)
4256 		{
4257 		  gimplify_and_add (cond, pre_p);
4258 		  *expr_p = unshare_expr (result);
4259 		}
4260 	      else
4261 		*expr_p = cond;
4262 	      return ret;
4263 	    }
4264 	  break;
4265 
4266 	case CALL_EXPR:
4267 	  /* For calls that return in memory, give *to_p as the CALL_EXPR's
4268 	     return slot so that we don't generate a temporary.  */
4269 	  if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4270 	      && aggregate_value_p (*from_p, *from_p))
4271 	    {
4272 	      bool use_target;
4273 
4274 	      if (!(rhs_predicate_for (*to_p))(*from_p))
4275 		/* If we need a temporary, *to_p isn't accurate.  */
4276 		use_target = false;
4277 	      else if (TREE_CODE (*to_p) == RESULT_DECL
4278 		       && DECL_NAME (*to_p) == NULL_TREE
4279 		       && needs_to_live_in_memory (*to_p))
4280 		/* It's OK to use the return slot directly unless it's an NRV. */
4281 		use_target = true;
4282 	      else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4283 		       || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4284 		/* Don't force regs into memory.  */
4285 		use_target = false;
4286 	      else if (TREE_CODE (*expr_p) == INIT_EXPR)
4287 		/* It's OK to use the target directly if it's being
4288 		   initialized. */
4289 		use_target = true;
4290 	      else if (!is_gimple_non_addressable (*to_p))
4291 		/* Don't use the original target if it's already addressable;
4292 		   if its address escapes, and the called function uses the
4293 		   NRV optimization, a conforming program could see *to_p
4294 		   change before the called function returns; see c++/19317.
4295 		   When optimizing, the return_slot pass marks more functions
4296 		   as safe after we have escape info.  */
4297 		use_target = false;
4298 	      else
4299 		use_target = true;
4300 
4301 	      if (use_target)
4302 		{
4303 		  CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4304 		  mark_addressable (*to_p);
4305 		}
4306 	    }
4307 	  break;
4308 
4309 	  /* If we're initializing from a container, push the initialization
4310 	     inside it.  */
4311 	case CLEANUP_POINT_EXPR:
4312 	case BIND_EXPR:
4313 	case STATEMENT_LIST:
4314 	  {
4315 	    tree wrap = *from_p;
4316 	    tree t;
4317 
4318 	    ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4319 				 fb_lvalue);
4320 	    if (ret != GS_ERROR)
4321 	      ret = GS_OK;
4322 
4323 	    t = voidify_wrapper_expr (wrap, *expr_p);
4324 	    gcc_assert (t == *expr_p);
4325 
4326 	    if (want_value)
4327 	      {
4328 		gimplify_and_add (wrap, pre_p);
4329 		*expr_p = unshare_expr (*to_p);
4330 	      }
4331 	    else
4332 	      *expr_p = wrap;
4333 	    return GS_OK;
4334 	  }
4335 
4336 	case COMPOUND_LITERAL_EXPR:
4337 	  {
4338 	    tree complit = TREE_OPERAND (*expr_p, 1);
4339 	    tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4340 	    tree decl = DECL_EXPR_DECL (decl_s);
4341 	    tree init = DECL_INITIAL (decl);
4342 
4343 	    /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4344 	       into struct T x = { 0, 1, 2 } if the address of the
4345 	       compound literal has never been taken.  */
4346 	    if (!TREE_ADDRESSABLE (complit)
4347 		&& !TREE_ADDRESSABLE (decl)
4348 		&& init)
4349 	      {
4350 		*expr_p = copy_node (*expr_p);
4351 		TREE_OPERAND (*expr_p, 1) = init;
4352 		return GS_OK;
4353 	      }
4354 	  }
4355 
4356 	default:
4357 	  break;
4358 	}
4359     }
4360   while (changed);
4361 
4362   return ret;
4363 }
4364 
4365 
4366 /* Promote partial stores to COMPLEX variables to total stores.  *EXPR_P is
4367    a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4368    DECL_GIMPLE_REG_P set.
4369 
4370    IMPORTANT NOTE: This promotion is performed by introducing a load of the
4371    other, unmodified part of the complex object just before the total store.
4372    As a consequence, if the object is still uninitialized, an undefined value
4373    will be loaded into a register, which may result in a spurious exception
4374    if the register is floating-point and the value happens to be a signaling
4375    NaN for example.  Then the fully-fledged complex operations lowering pass
4376    followed by a DCE pass are necessary in order to fix things up.  */
4377 
4378 static enum gimplify_status
4379 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4380                                    bool want_value)
4381 {
4382   enum tree_code code, ocode;
4383   tree lhs, rhs, new_rhs, other, realpart, imagpart;
4384 
4385   lhs = TREE_OPERAND (*expr_p, 0);
4386   rhs = TREE_OPERAND (*expr_p, 1);
4387   code = TREE_CODE (lhs);
4388   lhs = TREE_OPERAND (lhs, 0);
4389 
4390   ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4391   other = build1 (ocode, TREE_TYPE (rhs), lhs);
4392   other = get_formal_tmp_var (other, pre_p);
4393 
4394   realpart = code == REALPART_EXPR ? rhs : other;
4395   imagpart = code == REALPART_EXPR ? other : rhs;
4396 
4397   if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4398     new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4399   else
4400     new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4401 
4402   gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4403   *expr_p = (want_value) ? rhs : NULL_TREE;
4404 
4405   return GS_ALL_DONE;
4406 }
4407 
4408 
4409 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4410 
4411       modify_expr
4412 	      : varname '=' rhs
4413 	      | '*' ID '=' rhs
4414 
4415     PRE_P points to the list where side effects that must happen before
4416 	*EXPR_P should be stored.
4417 
4418     POST_P points to the list where side effects that must happen after
4419 	*EXPR_P should be stored.
4420 
4421     WANT_VALUE is nonzero iff we want to use the value of this expression
4422 	in another expression.  */
4423 
4424 static enum gimplify_status
4425 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4426 		      bool want_value)
4427 {
4428   tree *from_p = &TREE_OPERAND (*expr_p, 1);
4429   tree *to_p = &TREE_OPERAND (*expr_p, 0);
4430   enum gimplify_status ret = GS_UNHANDLED;
4431   gimple assign;
4432   location_t loc = EXPR_LOCATION (*expr_p);
4433 
4434   gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4435 	      || TREE_CODE (*expr_p) == INIT_EXPR);
4436 
4437   /* Insert pointer conversions required by the middle-end that are not
4438      required by the frontend.  This fixes middle-end type checking for
4439      for example gcc.dg/redecl-6.c.  */
4440   if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4441     {
4442       STRIP_USELESS_TYPE_CONVERSION (*from_p);
4443       if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4444 	*from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4445     }
4446 
4447   /* See if any simplifications can be done based on what the RHS is.  */
4448   ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4449 				  want_value);
4450   if (ret != GS_UNHANDLED)
4451     return ret;
4452 
4453   /* For zero sized types only gimplify the left hand side and right hand
4454      side as statements and throw away the assignment.  Do this after
4455      gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4456      types properly.  */
4457   if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4458     {
4459       gimplify_stmt (from_p, pre_p);
4460       gimplify_stmt (to_p, pre_p);
4461       *expr_p = NULL_TREE;
4462       return GS_ALL_DONE;
4463     }
4464 
4465   /* If the value being copied is of variable width, compute the length
4466      of the copy into a WITH_SIZE_EXPR.   Note that we need to do this
4467      before gimplifying any of the operands so that we can resolve any
4468      PLACEHOLDER_EXPRs in the size.  Also note that the RTL expander uses
4469      the size of the expression to be copied, not of the destination, so
4470      that is what we must do here.  */
4471   maybe_with_size_expr (from_p);
4472 
4473   ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4474   if (ret == GS_ERROR)
4475     return ret;
4476 
4477   /* As a special case, we have to temporarily allow for assignments
4478      with a CALL_EXPR on the RHS.  Since in GIMPLE a function call is
4479      a toplevel statement, when gimplifying the GENERIC expression
4480      MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4481      GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4482 
4483      Instead, we need to create the tuple GIMPLE_CALL <a, foo>.  To
4484      prevent gimplify_expr from trying to create a new temporary for
4485      foo's LHS, we tell it that it should only gimplify until it
4486      reaches the CALL_EXPR.  On return from gimplify_expr, the newly
4487      created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4488      and all we need to do here is set 'a' to be its LHS.  */
4489   ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4490 		       fb_rvalue);
4491   if (ret == GS_ERROR)
4492     return ret;
4493 
4494   /* Now see if the above changed *from_p to something we handle specially.  */
4495   ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4496 				  want_value);
4497   if (ret != GS_UNHANDLED)
4498     return ret;
4499 
4500   /* If we've got a variable sized assignment between two lvalues (i.e. does
4501      not involve a call), then we can make things a bit more straightforward
4502      by converting the assignment to memcpy or memset.  */
4503   if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4504     {
4505       tree from = TREE_OPERAND (*from_p, 0);
4506       tree size = TREE_OPERAND (*from_p, 1);
4507 
4508       if (TREE_CODE (from) == CONSTRUCTOR)
4509 	return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4510 
4511       if (is_gimple_addressable (from))
4512 	{
4513 	  *from_p = from;
4514 	  return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4515 	      					 pre_p);
4516 	}
4517     }
4518 
4519   /* Transform partial stores to non-addressable complex variables into
4520      total stores.  This allows us to use real instead of virtual operands
4521      for these variables, which improves optimization.  */
4522   if ((TREE_CODE (*to_p) == REALPART_EXPR
4523        || TREE_CODE (*to_p) == IMAGPART_EXPR)
4524       && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4525     return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4526 
4527   /* Try to alleviate the effects of the gimplification creating artificial
4528      temporaries (see for example is_gimple_reg_rhs) on the debug info.  */
4529   if (!gimplify_ctxp->into_ssa
4530       && DECL_P (*from_p)
4531       && DECL_IGNORED_P (*from_p)
4532       && DECL_P (*to_p)
4533       && !DECL_IGNORED_P (*to_p))
4534     {
4535       if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4536 	DECL_NAME (*from_p)
4537 	  = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4538       DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
4539       SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4540    }
4541 
4542   if (TREE_CODE (*from_p) == CALL_EXPR)
4543     {
4544       /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4545 	 instead of a GIMPLE_ASSIGN.  */
4546       assign = gimple_build_call_from_tree (*from_p);
4547       if (!gimple_call_noreturn_p (assign))
4548 	gimple_call_set_lhs (assign, *to_p);
4549     }
4550   else
4551     {
4552       assign = gimple_build_assign (*to_p, *from_p);
4553       gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4554     }
4555 
4556   gimplify_seq_add_stmt (pre_p, assign);
4557 
4558   if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4559     {
4560       /* If we've somehow already got an SSA_NAME on the LHS, then
4561 	 we've probably modified it twice.  Not good.  */
4562       gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
4563       *to_p = make_ssa_name (*to_p, assign);
4564       gimple_set_lhs (assign, *to_p);
4565     }
4566 
4567   if (want_value)
4568     {
4569       *expr_p = unshare_expr (*to_p);
4570       return GS_OK;
4571     }
4572   else
4573     *expr_p = NULL;
4574 
4575   return GS_ALL_DONE;
4576 }
4577 
4578 /*  Gimplify a comparison between two variable-sized objects.  Do this
4579     with a call to BUILT_IN_MEMCMP.  */
4580 
4581 static enum gimplify_status
4582 gimplify_variable_sized_compare (tree *expr_p)
4583 {
4584   tree op0 = TREE_OPERAND (*expr_p, 0);
4585   tree op1 = TREE_OPERAND (*expr_p, 1);
4586   tree t, arg, dest, src;
4587   location_t loc = EXPR_LOCATION (*expr_p);
4588 
4589   arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4590   arg = unshare_expr (arg);
4591   arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4592   src = build_fold_addr_expr_loc (loc, op1);
4593   dest = build_fold_addr_expr_loc (loc, op0);
4594   t = implicit_built_in_decls[BUILT_IN_MEMCMP];
4595   t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4596   *expr_p
4597     = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4598 
4599   return GS_OK;
4600 }
4601 
4602 /*  Gimplify a comparison between two aggregate objects of integral scalar
4603     mode as a comparison between the bitwise equivalent scalar values.  */
4604 
4605 static enum gimplify_status
4606 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4607 {
4608   location_t loc = EXPR_LOCATION (*expr_p);
4609   tree op0 = TREE_OPERAND (*expr_p, 0);
4610   tree op1 = TREE_OPERAND (*expr_p, 1);
4611 
4612   tree type = TREE_TYPE (op0);
4613   tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4614 
4615   op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4616   op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4617 
4618   *expr_p
4619     = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4620 
4621   return GS_OK;
4622 }
4623 
4624 /*  Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions.  EXPR_P
4625     points to the expression to gimplify.
4626 
4627     Expressions of the form 'a && b' are gimplified to:
4628 
4629 	a && b ? true : false
4630 
4631     LOCUS is the source location to be put on the generated COND_EXPR.
4632     gimplify_cond_expr will do the rest.  */
4633 
4634 static enum gimplify_status
4635 gimplify_boolean_expr (tree *expr_p, location_t locus)
4636 {
4637   /* Preserve the original type of the expression.  */
4638   tree type = TREE_TYPE (*expr_p);
4639 
4640   *expr_p = build3 (COND_EXPR, type, *expr_p,
4641 		    fold_convert_loc (locus, type, boolean_true_node),
4642 		    fold_convert_loc (locus, type, boolean_false_node));
4643 
4644   SET_EXPR_LOCATION (*expr_p, locus);
4645 
4646   return GS_OK;
4647 }
4648 
4649 /* Gimplifies an expression sequence.  This function gimplifies each
4650    expression and re-writes the original expression with the last
4651    expression of the sequence in GIMPLE form.
4652 
4653    PRE_P points to the list where the side effects for all the
4654        expressions in the sequence will be emitted.
4655 
4656    WANT_VALUE is true when the result of the last COMPOUND_EXPR is used.  */
4657 
4658 static enum gimplify_status
4659 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4660 {
4661   tree t = *expr_p;
4662 
4663   do
4664     {
4665       tree *sub_p = &TREE_OPERAND (t, 0);
4666 
4667       if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4668 	gimplify_compound_expr (sub_p, pre_p, false);
4669       else
4670 	gimplify_stmt (sub_p, pre_p);
4671 
4672       t = TREE_OPERAND (t, 1);
4673     }
4674   while (TREE_CODE (t) == COMPOUND_EXPR);
4675 
4676   *expr_p = t;
4677   if (want_value)
4678     return GS_OK;
4679   else
4680     {
4681       gimplify_stmt (expr_p, pre_p);
4682       return GS_ALL_DONE;
4683     }
4684 }
4685 
4686 
4687 /* Gimplify a SAVE_EXPR node.  EXPR_P points to the expression to
4688    gimplify.  After gimplification, EXPR_P will point to a new temporary
4689    that holds the original value of the SAVE_EXPR node.
4690 
4691    PRE_P points to the list where side effects that must happen before
4692       *EXPR_P should be stored.  */
4693 
4694 static enum gimplify_status
4695 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4696 {
4697   enum gimplify_status ret = GS_ALL_DONE;
4698   tree val;
4699 
4700   gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4701   val = TREE_OPERAND (*expr_p, 0);
4702 
4703   /* If the SAVE_EXPR has not been resolved, then evaluate it once.  */
4704   if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4705     {
4706       /* The operand may be a void-valued expression such as SAVE_EXPRs
4707 	 generated by the Java frontend for class initialization.  It is
4708 	 being executed only for its side-effects.  */
4709       if (TREE_TYPE (val) == void_type_node)
4710 	{
4711 	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4712 			       is_gimple_stmt, fb_none);
4713 	  val = NULL;
4714 	}
4715       else
4716 	val = get_initialized_tmp_var (val, pre_p, post_p);
4717 
4718       TREE_OPERAND (*expr_p, 0) = val;
4719       SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4720     }
4721 
4722   *expr_p = val;
4723 
4724   return ret;
4725 }
4726 
4727 /*  Re-write the ADDR_EXPR node pointed to by EXPR_P
4728 
4729       unary_expr
4730 	      : ...
4731 	      | '&' varname
4732 	      ...
4733 
4734     PRE_P points to the list where side effects that must happen before
4735 	*EXPR_P should be stored.
4736 
4737     POST_P points to the list where side effects that must happen after
4738 	*EXPR_P should be stored.  */
4739 
4740 static enum gimplify_status
4741 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4742 {
4743   tree expr = *expr_p;
4744   tree op0 = TREE_OPERAND (expr, 0);
4745   enum gimplify_status ret;
4746   location_t loc = EXPR_LOCATION (*expr_p);
4747 
4748   switch (TREE_CODE (op0))
4749     {
4750     case INDIRECT_REF:
4751     case MISALIGNED_INDIRECT_REF:
4752     do_indirect_ref:
4753       /* Check if we are dealing with an expression of the form '&*ptr'.
4754 	 While the front end folds away '&*ptr' into 'ptr', these
4755 	 expressions may be generated internally by the compiler (e.g.,
4756 	 builtins like __builtin_va_end).  */
4757       /* Caution: the silent array decomposition semantics we allow for
4758 	 ADDR_EXPR means we can't always discard the pair.  */
4759       /* Gimplification of the ADDR_EXPR operand may drop
4760 	 cv-qualification conversions, so make sure we add them if
4761 	 needed.  */
4762       {
4763 	tree op00 = TREE_OPERAND (op0, 0);
4764 	tree t_expr = TREE_TYPE (expr);
4765 	tree t_op00 = TREE_TYPE (op00);
4766 
4767         if (!useless_type_conversion_p (t_expr, t_op00))
4768 	  op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
4769         *expr_p = op00;
4770         ret = GS_OK;
4771       }
4772       break;
4773 
4774     case VIEW_CONVERT_EXPR:
4775       /* Take the address of our operand and then convert it to the type of
4776 	 this ADDR_EXPR.
4777 
4778 	 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4779 	 all clear.  The impact of this transformation is even less clear.  */
4780 
4781       /* If the operand is a useless conversion, look through it.  Doing so
4782 	 guarantees that the ADDR_EXPR and its operand will remain of the
4783 	 same type.  */
4784       if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4785 	op0 = TREE_OPERAND (op0, 0);
4786 
4787       *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4788 				  build_fold_addr_expr_loc (loc,
4789 							TREE_OPERAND (op0, 0)));
4790       ret = GS_OK;
4791       break;
4792 
4793     default:
4794       /* We use fb_either here because the C frontend sometimes takes
4795 	 the address of a call that returns a struct; see
4796 	 gcc.dg/c99-array-lval-1.c.  The gimplifier will correctly make
4797 	 the implied temporary explicit.  */
4798 
4799       /* Make the operand addressable.  */
4800       ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4801 			   is_gimple_addressable, fb_either);
4802       if (ret == GS_ERROR)
4803 	break;
4804 
4805       /* Then mark it.  Beware that it may not be possible to do so directly
4806 	 if a temporary has been created by the gimplification.  */
4807       prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
4808 
4809       op0 = TREE_OPERAND (expr, 0);
4810 
4811       /* For various reasons, the gimplification of the expression
4812 	 may have made a new INDIRECT_REF.  */
4813       if (TREE_CODE (op0) == INDIRECT_REF)
4814 	goto do_indirect_ref;
4815 
4816       mark_addressable (TREE_OPERAND (expr, 0));
4817 
4818       /* The FEs may end up building ADDR_EXPRs early on a decl with
4819 	 an incomplete type.  Re-build ADDR_EXPRs in canonical form
4820 	 here.  */
4821       if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
4822 	*expr_p = build_fold_addr_expr (op0);
4823 
4824       /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly.  */
4825       recompute_tree_invariant_for_addr_expr (*expr_p);
4826 
4827       /* If we re-built the ADDR_EXPR add a conversion to the original type
4828          if required.  */
4829       if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
4830 	*expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
4831 
4832       break;
4833     }
4834 
4835   return ret;
4836 }
4837 
4838 /* Gimplify the operands of an ASM_EXPR.  Input operands should be a gimple
4839    value; output operands should be a gimple lvalue.  */
4840 
4841 static enum gimplify_status
4842 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4843 {
4844   tree expr;
4845   int noutputs;
4846   const char **oconstraints;
4847   int i;
4848   tree link;
4849   const char *constraint;
4850   bool allows_mem, allows_reg, is_inout;
4851   enum gimplify_status ret, tret;
4852   gimple stmt;
4853   VEC(tree, gc) *inputs;
4854   VEC(tree, gc) *outputs;
4855   VEC(tree, gc) *clobbers;
4856   VEC(tree, gc) *labels;
4857   tree link_next;
4858 
4859   expr = *expr_p;
4860   noutputs = list_length (ASM_OUTPUTS (expr));
4861   oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
4862 
4863   inputs = outputs = clobbers = labels = NULL;
4864 
4865   ret = GS_ALL_DONE;
4866   link_next = NULL_TREE;
4867   for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
4868     {
4869       bool ok;
4870       size_t constraint_len;
4871 
4872       link_next = TREE_CHAIN (link);
4873 
4874       oconstraints[i]
4875 	= constraint
4876 	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4877       constraint_len = strlen (constraint);
4878       if (constraint_len == 0)
4879         continue;
4880 
4881       ok = parse_output_constraint (&constraint, i, 0, 0,
4882 				    &allows_mem, &allows_reg, &is_inout);
4883       if (!ok)
4884 	{
4885 	  ret = GS_ERROR;
4886 	  is_inout = false;
4887 	}
4888 
4889       if (!allows_reg && allows_mem)
4890 	mark_addressable (TREE_VALUE (link));
4891 
4892       tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4893 			    is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4894 			    fb_lvalue | fb_mayfail);
4895       if (tret == GS_ERROR)
4896 	{
4897 	  error ("invalid lvalue in asm output %d", i);
4898 	  ret = tret;
4899 	}
4900 
4901       VEC_safe_push (tree, gc, outputs, link);
4902       TREE_CHAIN (link) = NULL_TREE;
4903 
4904       if (is_inout)
4905 	{
4906 	  /* An input/output operand.  To give the optimizers more
4907 	     flexibility, split it into separate input and output
4908  	     operands.  */
4909 	  tree input;
4910 	  char buf[10];
4911 
4912 	  /* Turn the in/out constraint into an output constraint.  */
4913 	  char *p = xstrdup (constraint);
4914 	  p[0] = '=';
4915 	  TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4916 
4917 	  /* And add a matching input constraint.  */
4918 	  if (allows_reg)
4919 	    {
4920 	      sprintf (buf, "%d", i);
4921 
4922 	      /* If there are multiple alternatives in the constraint,
4923 		 handle each of them individually.  Those that allow register
4924 		 will be replaced with operand number, the others will stay
4925 		 unchanged.  */
4926 	      if (strchr (p, ',') != NULL)
4927 		{
4928 		  size_t len = 0, buflen = strlen (buf);
4929 		  char *beg, *end, *str, *dst;
4930 
4931 		  for (beg = p + 1;;)
4932 		    {
4933 		      end = strchr (beg, ',');
4934 		      if (end == NULL)
4935 			end = strchr (beg, '\0');
4936 		      if ((size_t) (end - beg) < buflen)
4937 			len += buflen + 1;
4938 		      else
4939 			len += end - beg + 1;
4940 		      if (*end)
4941 			beg = end + 1;
4942 		      else
4943 			break;
4944 		    }
4945 
4946 		  str = (char *) alloca (len);
4947 		  for (beg = p + 1, dst = str;;)
4948 		    {
4949 		      const char *tem;
4950 		      bool mem_p, reg_p, inout_p;
4951 
4952 		      end = strchr (beg, ',');
4953 		      if (end)
4954 			*end = '\0';
4955 		      beg[-1] = '=';
4956 		      tem = beg - 1;
4957 		      parse_output_constraint (&tem, i, 0, 0,
4958 					       &mem_p, &reg_p, &inout_p);
4959 		      if (dst != str)
4960 			*dst++ = ',';
4961 		      if (reg_p)
4962 			{
4963 			  memcpy (dst, buf, buflen);
4964 			  dst += buflen;
4965 			}
4966 		      else
4967 			{
4968 			  if (end)
4969 			    len = end - beg;
4970 			  else
4971 			    len = strlen (beg);
4972 			  memcpy (dst, beg, len);
4973 			  dst += len;
4974 			}
4975 		      if (end)
4976 			beg = end + 1;
4977 		      else
4978 			break;
4979 		    }
4980 		  *dst = '\0';
4981 		  input = build_string (dst - str, str);
4982 		}
4983 	      else
4984 		input = build_string (strlen (buf), buf);
4985 	    }
4986 	  else
4987 	    input = build_string (constraint_len - 1, constraint + 1);
4988 
4989 	  free (p);
4990 
4991 	  input = build_tree_list (build_tree_list (NULL_TREE, input),
4992 				   unshare_expr (TREE_VALUE (link)));
4993 	  ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
4994 	}
4995     }
4996 
4997   link_next = NULL_TREE;
4998   for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
4999     {
5000       link_next = TREE_CHAIN (link);
5001       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5002       parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5003 			      oconstraints, &allows_mem, &allows_reg);
5004 
5005       /* If we can't make copies, we can only accept memory.  */
5006       if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5007 	{
5008 	  if (allows_mem)
5009 	    allows_reg = 0;
5010 	  else
5011 	    {
5012 	      error ("impossible constraint in %<asm%>");
5013 	      error ("non-memory input %d must stay in memory", i);
5014 	      return GS_ERROR;
5015 	    }
5016 	}
5017 
5018       /* If the operand is a memory input, it should be an lvalue.  */
5019       if (!allows_reg && allows_mem)
5020 	{
5021 	  tree inputv = TREE_VALUE (link);
5022 	  STRIP_NOPS (inputv);
5023 	  if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5024 	      || TREE_CODE (inputv) == PREINCREMENT_EXPR
5025 	      || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5026 	      || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
5027 	    TREE_VALUE (link) = error_mark_node;
5028 	  tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5029 				is_gimple_lvalue, fb_lvalue | fb_mayfail);
5030 	  mark_addressable (TREE_VALUE (link));
5031 	  if (tret == GS_ERROR)
5032 	    {
5033 	      if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5034 	        input_location = EXPR_LOCATION (TREE_VALUE (link));
5035 	      error ("memory input %d is not directly addressable", i);
5036 	      ret = tret;
5037 	    }
5038 	}
5039       else
5040 	{
5041 	  tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5042 				is_gimple_asm_val, fb_rvalue);
5043 	  if (tret == GS_ERROR)
5044 	    ret = tret;
5045 	}
5046 
5047       TREE_CHAIN (link) = NULL_TREE;
5048       VEC_safe_push (tree, gc, inputs, link);
5049     }
5050 
5051   for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link))
5052     VEC_safe_push (tree, gc, clobbers, link);
5053 
5054   for (link = ASM_LABELS (expr); link; ++i, link = TREE_CHAIN (link))
5055     VEC_safe_push (tree, gc, labels, link);
5056 
5057   /* Do not add ASMs with errors to the gimple IL stream.  */
5058   if (ret != GS_ERROR)
5059     {
5060       stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5061 				   inputs, outputs, clobbers, labels);
5062 
5063       gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5064       gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5065 
5066       gimplify_seq_add_stmt (pre_p, stmt);
5067     }
5068 
5069   return ret;
5070 }
5071 
5072 /* Gimplify a CLEANUP_POINT_EXPR.  Currently this works by adding
5073    GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5074    gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5075    return to this function.
5076 
5077    FIXME should we complexify the prequeue handling instead?  Or use flags
5078    for all the cleanups and let the optimizer tighten them up?  The current
5079    code seems pretty fragile; it will break on a cleanup within any
5080    non-conditional nesting.  But any such nesting would be broken, anyway;
5081    we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5082    and continues out of it.  We can do that at the RTL level, though, so
5083    having an optimizer to tighten up try/finally regions would be a Good
5084    Thing.  */
5085 
5086 static enum gimplify_status
5087 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5088 {
5089   gimple_stmt_iterator iter;
5090   gimple_seq body_sequence = NULL;
5091 
5092   tree temp = voidify_wrapper_expr (*expr_p, NULL);
5093 
5094   /* We only care about the number of conditions between the innermost
5095      CLEANUP_POINT_EXPR and the cleanup.  So save and reset the count and
5096      any cleanups collected outside the CLEANUP_POINT_EXPR.  */
5097   int old_conds = gimplify_ctxp->conditions;
5098   gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5099   gimplify_ctxp->conditions = 0;
5100   gimplify_ctxp->conditional_cleanups = NULL;
5101 
5102   gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5103 
5104   gimplify_ctxp->conditions = old_conds;
5105   gimplify_ctxp->conditional_cleanups = old_cleanups;
5106 
5107   for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5108     {
5109       gimple wce = gsi_stmt (iter);
5110 
5111       if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5112 	{
5113 	  if (gsi_one_before_end_p (iter))
5114 	    {
5115               /* Note that gsi_insert_seq_before and gsi_remove do not
5116                  scan operands, unlike some other sequence mutators.  */
5117 	      gsi_insert_seq_before_without_update (&iter,
5118                                                     gimple_wce_cleanup (wce),
5119                                                     GSI_SAME_STMT);
5120 	      gsi_remove (&iter, true);
5121 	      break;
5122 	    }
5123 	  else
5124 	    {
5125 	      gimple gtry;
5126 	      gimple_seq seq;
5127 	      enum gimple_try_flags kind;
5128 
5129 	      if (gimple_wce_cleanup_eh_only (wce))
5130 		kind = GIMPLE_TRY_CATCH;
5131 	      else
5132 		kind = GIMPLE_TRY_FINALLY;
5133 	      seq = gsi_split_seq_after (iter);
5134 
5135 	      gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5136               /* Do not use gsi_replace here, as it may scan operands.
5137                  We want to do a simple structural modification only.  */
5138               *gsi_stmt_ptr (&iter) = gtry;
5139 	      iter = gsi_start (seq);
5140 	    }
5141 	}
5142       else
5143 	gsi_next (&iter);
5144     }
5145 
5146   gimplify_seq_add_seq (pre_p, body_sequence);
5147   if (temp)
5148     {
5149       *expr_p = temp;
5150       return GS_OK;
5151     }
5152   else
5153     {
5154       *expr_p = NULL;
5155       return GS_ALL_DONE;
5156     }
5157 }
5158 
5159 /* Insert a cleanup marker for gimplify_cleanup_point_expr.  CLEANUP
5160    is the cleanup action required.  EH_ONLY is true if the cleanup should
5161    only be executed if an exception is thrown, not on normal exit.  */
5162 
5163 static void
5164 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5165 {
5166   gimple wce;
5167   gimple_seq cleanup_stmts = NULL;
5168 
5169   /* Errors can result in improperly nested cleanups.  Which results in
5170      confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR.  */
5171   if (errorcount || sorrycount)
5172     return;
5173 
5174   if (gimple_conditional_context ())
5175     {
5176       /* If we're in a conditional context, this is more complex.  We only
5177 	 want to run the cleanup if we actually ran the initialization that
5178 	 necessitates it, but we want to run it after the end of the
5179 	 conditional context.  So we wrap the try/finally around the
5180 	 condition and use a flag to determine whether or not to actually
5181 	 run the destructor.  Thus
5182 
5183 	   test ? f(A()) : 0
5184 
5185 	 becomes (approximately)
5186 
5187 	   flag = 0;
5188 	   try {
5189 	     if (test) { A::A(temp); flag = 1; val = f(temp); }
5190 	     else { val = 0; }
5191 	   } finally {
5192 	     if (flag) A::~A(temp);
5193 	   }
5194 	   val
5195       */
5196       tree flag = create_tmp_var (boolean_type_node, "cleanup");
5197       gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5198       gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5199 
5200       cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5201       gimplify_stmt (&cleanup, &cleanup_stmts);
5202       wce = gimple_build_wce (cleanup_stmts);
5203 
5204       gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5205       gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5206       gimplify_seq_add_stmt (pre_p, ftrue);
5207 
5208       /* Because of this manipulation, and the EH edges that jump
5209 	 threading cannot redirect, the temporary (VAR) will appear
5210 	 to be used uninitialized.  Don't warn.  */
5211       TREE_NO_WARNING (var) = 1;
5212     }
5213   else
5214     {
5215       gimplify_stmt (&cleanup, &cleanup_stmts);
5216       wce = gimple_build_wce (cleanup_stmts);
5217       gimple_wce_set_cleanup_eh_only (wce, eh_only);
5218       gimplify_seq_add_stmt (pre_p, wce);
5219     }
5220 }
5221 
5222 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR.  */
5223 
5224 static enum gimplify_status
5225 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5226 {
5227   tree targ = *expr_p;
5228   tree temp = TARGET_EXPR_SLOT (targ);
5229   tree init = TARGET_EXPR_INITIAL (targ);
5230   enum gimplify_status ret;
5231 
5232   if (init)
5233     {
5234       /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5235 	 to the temps list.  Handle also variable length TARGET_EXPRs.  */
5236       if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5237 	{
5238 	  if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5239 	    gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5240 	  gimplify_vla_decl (temp, pre_p);
5241 	}
5242       else
5243 	gimple_add_tmp_var (temp);
5244 
5245       /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5246 	 expression is supposed to initialize the slot.  */
5247       if (VOID_TYPE_P (TREE_TYPE (init)))
5248 	ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5249       else
5250 	{
5251 	  tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5252 	  init = init_expr;
5253 	  ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5254 	  init = NULL;
5255 	  ggc_free (init_expr);
5256 	}
5257       if (ret == GS_ERROR)
5258 	{
5259 	  /* PR c++/28266 Make sure this is expanded only once. */
5260 	  TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5261 	  return GS_ERROR;
5262 	}
5263       if (init)
5264 	gimplify_and_add (init, pre_p);
5265 
5266       /* If needed, push the cleanup for the temp.  */
5267       if (TARGET_EXPR_CLEANUP (targ))
5268 	gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5269 			     CLEANUP_EH_ONLY (targ), pre_p);
5270 
5271       /* Only expand this once.  */
5272       TREE_OPERAND (targ, 3) = init;
5273       TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5274     }
5275   else
5276     /* We should have expanded this before.  */
5277     gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5278 
5279   *expr_p = temp;
5280   return GS_OK;
5281 }
5282 
5283 /* Gimplification of expression trees.  */
5284 
5285 /* Gimplify an expression which appears at statement context.  The
5286    corresponding GIMPLE statements are added to *SEQ_P.  If *SEQ_P is
5287    NULL, a new sequence is allocated.
5288 
5289    Return true if we actually added a statement to the queue.  */
5290 
5291 bool
5292 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5293 {
5294   gimple_seq_node last;
5295 
5296   if (!*seq_p)
5297     *seq_p = gimple_seq_alloc ();
5298 
5299   last = gimple_seq_last (*seq_p);
5300   gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5301   return last != gimple_seq_last (*seq_p);
5302 }
5303 
5304 
5305 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5306    to CTX.  If entries already exist, force them to be some flavor of private.
5307    If there is no enclosing parallel, do nothing.  */
5308 
5309 void
5310 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5311 {
5312   splay_tree_node n;
5313 
5314   if (decl == NULL || !DECL_P (decl))
5315     return;
5316 
5317   do
5318     {
5319       n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5320       if (n != NULL)
5321 	{
5322 	  if (n->value & GOVD_SHARED)
5323 	    n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5324 	  else
5325 	    return;
5326 	}
5327       else if (ctx->region_type != ORT_WORKSHARE)
5328 	omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5329 
5330       ctx = ctx->outer_context;
5331     }
5332   while (ctx);
5333 }
5334 
5335 /* Similarly for each of the type sizes of TYPE.  */
5336 
5337 static void
5338 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5339 {
5340   if (type == NULL || type == error_mark_node)
5341     return;
5342   type = TYPE_MAIN_VARIANT (type);
5343 
5344   if (pointer_set_insert (ctx->privatized_types, type))
5345     return;
5346 
5347   switch (TREE_CODE (type))
5348     {
5349     case INTEGER_TYPE:
5350     case ENUMERAL_TYPE:
5351     case BOOLEAN_TYPE:
5352     case REAL_TYPE:
5353     case FIXED_POINT_TYPE:
5354       omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5355       omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5356       break;
5357 
5358     case ARRAY_TYPE:
5359       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5360       omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5361       break;
5362 
5363     case RECORD_TYPE:
5364     case UNION_TYPE:
5365     case QUAL_UNION_TYPE:
5366       {
5367 	tree field;
5368 	for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
5369 	  if (TREE_CODE (field) == FIELD_DECL)
5370 	    {
5371 	      omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5372 	      omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5373 	    }
5374       }
5375       break;
5376 
5377     case POINTER_TYPE:
5378     case REFERENCE_TYPE:
5379       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5380       break;
5381 
5382     default:
5383       break;
5384     }
5385 
5386   omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5387   omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5388   lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5389 }
5390 
5391 /* Add an entry for DECL in the OpenMP context CTX with FLAGS.  */
5392 
5393 static void
5394 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5395 {
5396   splay_tree_node n;
5397   unsigned int nflags;
5398   tree t;
5399 
5400   if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5401     return;
5402 
5403   /* Never elide decls whose type has TREE_ADDRESSABLE set.  This means
5404      there are constructors involved somewhere.  */
5405   if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5406       || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5407     flags |= GOVD_SEEN;
5408 
5409   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5410   if (n != NULL)
5411     {
5412       /* We shouldn't be re-adding the decl with the same data
5413 	 sharing class.  */
5414       gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5415       /* The only combination of data sharing classes we should see is
5416 	 FIRSTPRIVATE and LASTPRIVATE.  */
5417       nflags = n->value | flags;
5418       gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5419 		  == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
5420       n->value = nflags;
5421       return;
5422     }
5423 
5424   /* When adding a variable-sized variable, we have to handle all sorts
5425      of additional bits of data: the pointer replacement variable, and
5426      the parameters of the type.  */
5427   if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5428     {
5429       /* Add the pointer replacement variable as PRIVATE if the variable
5430 	 replacement is private, else FIRSTPRIVATE since we'll need the
5431 	 address of the original variable either for SHARED, or for the
5432 	 copy into or out of the context.  */
5433       if (!(flags & GOVD_LOCAL))
5434 	{
5435 	  nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5436 	  nflags |= flags & GOVD_SEEN;
5437 	  t = DECL_VALUE_EXPR (decl);
5438 	  gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5439 	  t = TREE_OPERAND (t, 0);
5440 	  gcc_assert (DECL_P (t));
5441 	  omp_add_variable (ctx, t, nflags);
5442 	}
5443 
5444       /* Add all of the variable and type parameters (which should have
5445 	 been gimplified to a formal temporary) as FIRSTPRIVATE.  */
5446       omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5447       omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5448       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5449 
5450       /* The variable-sized variable itself is never SHARED, only some form
5451 	 of PRIVATE.  The sharing would take place via the pointer variable
5452 	 which we remapped above.  */
5453       if (flags & GOVD_SHARED)
5454 	flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5455 		| (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5456 
5457       /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5458 	 alloca statement we generate for the variable, so make sure it
5459 	 is available.  This isn't automatically needed for the SHARED
5460 	 case, since we won't be allocating local storage then.
5461 	 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5462 	 in this case omp_notice_variable will be called later
5463 	 on when it is gimplified.  */
5464       else if (! (flags & GOVD_LOCAL)
5465 	       && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5466 	omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5467     }
5468   else if (lang_hooks.decls.omp_privatize_by_reference (decl))
5469     {
5470       gcc_assert ((flags & GOVD_LOCAL) == 0);
5471       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5472 
5473       /* Similar to the direct variable sized case above, we'll need the
5474 	 size of references being privatized.  */
5475       if ((flags & GOVD_SHARED) == 0)
5476 	{
5477 	  t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5478 	  if (TREE_CODE (t) != INTEGER_CST)
5479 	    omp_notice_variable (ctx, t, true);
5480 	}
5481     }
5482 
5483   splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5484 }
5485 
5486 /* Notice a threadprivate variable DECL used in OpenMP context CTX.
5487    This just prints out diagnostics about threadprivate variable uses
5488    in untied tasks.  If DECL2 is non-NULL, prevent this warning
5489    on that variable.  */
5490 
5491 static bool
5492 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5493 				   tree decl2)
5494 {
5495   splay_tree_node n;
5496 
5497   if (ctx->region_type != ORT_UNTIED_TASK)
5498     return false;
5499   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5500   if (n == NULL)
5501     {
5502       error ("threadprivate variable %qE used in untied task", DECL_NAME (decl));
5503       error_at (ctx->location, "enclosing task");
5504       splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5505     }
5506   if (decl2)
5507     splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5508   return false;
5509 }
5510 
5511 /* Record the fact that DECL was used within the OpenMP context CTX.
5512    IN_CODE is true when real code uses DECL, and false when we should
5513    merely emit default(none) errors.  Return true if DECL is going to
5514    be remapped and thus DECL shouldn't be gimplified into its
5515    DECL_VALUE_EXPR (if any).  */
5516 
5517 static bool
5518 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5519 {
5520   splay_tree_node n;
5521   unsigned flags = in_code ? GOVD_SEEN : 0;
5522   bool ret = false, shared;
5523 
5524   if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5525     return false;
5526 
5527   /* Threadprivate variables are predetermined.  */
5528   if (is_global_var (decl))
5529     {
5530       if (DECL_THREAD_LOCAL_P (decl))
5531 	return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
5532 
5533       if (DECL_HAS_VALUE_EXPR_P (decl))
5534 	{
5535 	  tree value = get_base_address (DECL_VALUE_EXPR (decl));
5536 
5537 	  if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5538 	    return omp_notice_threadprivate_variable (ctx, decl, value);
5539 	}
5540     }
5541 
5542   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5543   if (n == NULL)
5544     {
5545       enum omp_clause_default_kind default_kind, kind;
5546       struct gimplify_omp_ctx *octx;
5547 
5548       if (ctx->region_type == ORT_WORKSHARE)
5549 	goto do_outer;
5550 
5551       /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5552 	 remapped firstprivate instead of shared.  To some extent this is
5553 	 addressed in omp_firstprivatize_type_sizes, but not effectively.  */
5554       default_kind = ctx->default_kind;
5555       kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5556       if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5557 	default_kind = kind;
5558 
5559       switch (default_kind)
5560 	{
5561 	case OMP_CLAUSE_DEFAULT_NONE:
5562 	  error ("%qE not specified in enclosing parallel",
5563 		 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5564 	  if ((ctx->region_type & ORT_TASK) != 0)
5565 	    error_at (ctx->location, "enclosing task");
5566 	  else
5567 	    error_at (ctx->location, "enclosing parallel");
5568 	  /* FALLTHRU */
5569 	case OMP_CLAUSE_DEFAULT_SHARED:
5570 	  flags |= GOVD_SHARED;
5571 	  break;
5572 	case OMP_CLAUSE_DEFAULT_PRIVATE:
5573 	  flags |= GOVD_PRIVATE;
5574 	  break;
5575 	case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5576 	  flags |= GOVD_FIRSTPRIVATE;
5577 	  break;
5578 	case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5579 	  /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED.  */
5580 	  gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5581 	  if (ctx->outer_context)
5582 	    omp_notice_variable (ctx->outer_context, decl, in_code);
5583 	  for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5584 	    {
5585 	      splay_tree_node n2;
5586 
5587 	      n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5588 	      if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5589 		{
5590 		  flags |= GOVD_FIRSTPRIVATE;
5591 		  break;
5592 		}
5593 	      if ((octx->region_type & ORT_PARALLEL) != 0)
5594 		break;
5595 	    }
5596 	  if (flags & GOVD_FIRSTPRIVATE)
5597 	    break;
5598 	  if (octx == NULL
5599 	      && (TREE_CODE (decl) == PARM_DECL
5600 		  || (!is_global_var (decl)
5601 		      && DECL_CONTEXT (decl) == current_function_decl)))
5602 	    {
5603 	      flags |= GOVD_FIRSTPRIVATE;
5604 	      break;
5605 	    }
5606 	  flags |= GOVD_SHARED;
5607 	  break;
5608 	default:
5609 	  gcc_unreachable ();
5610 	}
5611 
5612       if ((flags & GOVD_PRIVATE)
5613 	  && lang_hooks.decls.omp_private_outer_ref (decl))
5614 	flags |= GOVD_PRIVATE_OUTER_REF;
5615 
5616       omp_add_variable (ctx, decl, flags);
5617 
5618       shared = (flags & GOVD_SHARED) != 0;
5619       ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5620       goto do_outer;
5621     }
5622 
5623   if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5624       && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5625       && DECL_SIZE (decl)
5626       && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5627     {
5628       splay_tree_node n2;
5629       tree t = DECL_VALUE_EXPR (decl);
5630       gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5631       t = TREE_OPERAND (t, 0);
5632       gcc_assert (DECL_P (t));
5633       n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5634       n2->value |= GOVD_SEEN;
5635     }
5636 
5637   shared = ((flags | n->value) & GOVD_SHARED) != 0;
5638   ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5639 
5640   /* If nothing changed, there's nothing left to do.  */
5641   if ((n->value & flags) == flags)
5642     return ret;
5643   flags |= n->value;
5644   n->value = flags;
5645 
5646  do_outer:
5647   /* If the variable is private in the current context, then we don't
5648      need to propagate anything to an outer context.  */
5649   if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5650     return ret;
5651   if (ctx->outer_context
5652       && omp_notice_variable (ctx->outer_context, decl, in_code))
5653     return true;
5654   return ret;
5655 }
5656 
5657 /* Verify that DECL is private within CTX.  If there's specific information
5658    to the contrary in the innermost scope, generate an error.  */
5659 
5660 static bool
5661 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
5662 {
5663   splay_tree_node n;
5664 
5665   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5666   if (n != NULL)
5667     {
5668       if (n->value & GOVD_SHARED)
5669 	{
5670 	  if (ctx == gimplify_omp_ctxp)
5671 	    {
5672 	      error ("iteration variable %qE should be private",
5673 		     DECL_NAME (decl));
5674 	      n->value = GOVD_PRIVATE;
5675 	      return true;
5676 	    }
5677 	  else
5678 	    return false;
5679 	}
5680       else if ((n->value & GOVD_EXPLICIT) != 0
5681 	       && (ctx == gimplify_omp_ctxp
5682 		   || (ctx->region_type == ORT_COMBINED_PARALLEL
5683 		       && gimplify_omp_ctxp->outer_context == ctx)))
5684 	{
5685 	  if ((n->value & GOVD_FIRSTPRIVATE) != 0)
5686 	    error ("iteration variable %qE should not be firstprivate",
5687 		   DECL_NAME (decl));
5688 	  else if ((n->value & GOVD_REDUCTION) != 0)
5689 	    error ("iteration variable %qE should not be reduction",
5690 		   DECL_NAME (decl));
5691 	}
5692       return (ctx == gimplify_omp_ctxp
5693 	      || (ctx->region_type == ORT_COMBINED_PARALLEL
5694 		  && gimplify_omp_ctxp->outer_context == ctx));
5695     }
5696 
5697   if (ctx->region_type != ORT_WORKSHARE)
5698     return false;
5699   else if (ctx->outer_context)
5700     return omp_is_private (ctx->outer_context, decl);
5701   return false;
5702 }
5703 
5704 /* Return true if DECL is private within a parallel region
5705    that binds to the current construct's context or in parallel
5706    region's REDUCTION clause.  */
5707 
5708 static bool
5709 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
5710 {
5711   splay_tree_node n;
5712 
5713   do
5714     {
5715       ctx = ctx->outer_context;
5716       if (ctx == NULL)
5717 	return !(is_global_var (decl)
5718 		 /* References might be private, but might be shared too.  */
5719 		 || lang_hooks.decls.omp_privatize_by_reference (decl));
5720 
5721       n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5722       if (n != NULL)
5723 	return (n->value & GOVD_SHARED) == 0;
5724     }
5725   while (ctx->region_type == ORT_WORKSHARE);
5726   return false;
5727 }
5728 
5729 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5730    and previous omp contexts.  */
5731 
5732 static void
5733 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
5734 			   enum omp_region_type region_type)
5735 {
5736   struct gimplify_omp_ctx *ctx, *outer_ctx;
5737   struct gimplify_ctx gctx;
5738   tree c;
5739 
5740   ctx = new_omp_context (region_type);
5741   outer_ctx = ctx->outer_context;
5742 
5743   while ((c = *list_p) != NULL)
5744     {
5745       bool remove = false;
5746       bool notice_outer = true;
5747       const char *check_non_private = NULL;
5748       unsigned int flags;
5749       tree decl;
5750 
5751       switch (OMP_CLAUSE_CODE (c))
5752 	{
5753 	case OMP_CLAUSE_PRIVATE:
5754 	  flags = GOVD_PRIVATE | GOVD_EXPLICIT;
5755 	  if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5756 	    {
5757 	      flags |= GOVD_PRIVATE_OUTER_REF;
5758 	      OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5759 	    }
5760 	  else
5761 	    notice_outer = false;
5762 	  goto do_add;
5763 	case OMP_CLAUSE_SHARED:
5764 	  flags = GOVD_SHARED | GOVD_EXPLICIT;
5765 	  goto do_add;
5766 	case OMP_CLAUSE_FIRSTPRIVATE:
5767 	  flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
5768 	  check_non_private = "firstprivate";
5769 	  goto do_add;
5770 	case OMP_CLAUSE_LASTPRIVATE:
5771 	  flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
5772 	  check_non_private = "lastprivate";
5773 	  goto do_add;
5774 	case OMP_CLAUSE_REDUCTION:
5775 	  flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
5776 	  check_non_private = "reduction";
5777 	  goto do_add;
5778 
5779 	do_add:
5780 	  decl = OMP_CLAUSE_DECL (c);
5781 	  if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5782 	    {
5783 	      remove = true;
5784 	      break;
5785 	    }
5786 	  omp_add_variable (ctx, decl, flags);
5787 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5788 	      && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5789 	    {
5790 	      omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
5791 				GOVD_LOCAL | GOVD_SEEN);
5792 	      gimplify_omp_ctxp = ctx;
5793 	      push_gimplify_context (&gctx);
5794 
5795 	      OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = gimple_seq_alloc ();
5796 	      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = gimple_seq_alloc ();
5797 
5798 	      gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
5799 		  		&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
5800 	      pop_gimplify_context
5801 		(gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
5802 	      push_gimplify_context (&gctx);
5803 	      gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
5804 		  		&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5805 	      pop_gimplify_context
5806 		(gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
5807 	      OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
5808 	      OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
5809 
5810 	      gimplify_omp_ctxp = outer_ctx;
5811 	    }
5812 	  else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5813 		   && OMP_CLAUSE_LASTPRIVATE_STMT (c))
5814 	    {
5815 	      gimplify_omp_ctxp = ctx;
5816 	      push_gimplify_context (&gctx);
5817 	      if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
5818 		{
5819 		  tree bind = build3 (BIND_EXPR, void_type_node, NULL,
5820 				      NULL, NULL);
5821 		  TREE_SIDE_EFFECTS (bind) = 1;
5822 		  BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
5823 		  OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
5824 		}
5825 	      gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
5826 				&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5827 	      pop_gimplify_context
5828 		(gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
5829 	      OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
5830 
5831 	      gimplify_omp_ctxp = outer_ctx;
5832 	    }
5833 	  if (notice_outer)
5834 	    goto do_notice;
5835 	  break;
5836 
5837 	case OMP_CLAUSE_COPYIN:
5838 	case OMP_CLAUSE_COPYPRIVATE:
5839 	  decl = OMP_CLAUSE_DECL (c);
5840 	  if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5841 	    {
5842 	      remove = true;
5843 	      break;
5844 	    }
5845 	do_notice:
5846 	  if (outer_ctx)
5847 	    omp_notice_variable (outer_ctx, decl, true);
5848 	  if (check_non_private
5849 	      && region_type == ORT_WORKSHARE
5850 	      && omp_check_private (ctx, decl))
5851 	    {
5852 	      error ("%s variable %qE is private in outer context",
5853 		     check_non_private, DECL_NAME (decl));
5854 	      remove = true;
5855 	    }
5856 	  break;
5857 
5858 	case OMP_CLAUSE_IF:
5859 	  OMP_CLAUSE_OPERAND (c, 0)
5860 	    = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
5861 	  /* Fall through.  */
5862 
5863 	case OMP_CLAUSE_SCHEDULE:
5864 	case OMP_CLAUSE_NUM_THREADS:
5865 	  if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
5866 			     is_gimple_val, fb_rvalue) == GS_ERROR)
5867 	      remove = true;
5868 	  break;
5869 
5870 	case OMP_CLAUSE_NOWAIT:
5871 	case OMP_CLAUSE_ORDERED:
5872 	case OMP_CLAUSE_UNTIED:
5873 	case OMP_CLAUSE_COLLAPSE:
5874 	  break;
5875 
5876 	case OMP_CLAUSE_DEFAULT:
5877 	  ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
5878 	  break;
5879 
5880 	default:
5881 	  gcc_unreachable ();
5882 	}
5883 
5884       if (remove)
5885 	*list_p = OMP_CLAUSE_CHAIN (c);
5886       else
5887 	list_p = &OMP_CLAUSE_CHAIN (c);
5888     }
5889 
5890   gimplify_omp_ctxp = ctx;
5891 }
5892 
5893 /* For all variables that were not actually used within the context,
5894    remove PRIVATE, SHARED, and FIRSTPRIVATE clauses.  */
5895 
5896 static int
5897 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
5898 {
5899   tree *list_p = (tree *) data;
5900   tree decl = (tree) n->key;
5901   unsigned flags = n->value;
5902   enum omp_clause_code code;
5903   tree clause;
5904   bool private_debug;
5905 
5906   if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
5907     return 0;
5908   if ((flags & GOVD_SEEN) == 0)
5909     return 0;
5910   if (flags & GOVD_DEBUG_PRIVATE)
5911     {
5912       gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
5913       private_debug = true;
5914     }
5915   else
5916     private_debug
5917       = lang_hooks.decls.omp_private_debug_clause (decl,
5918 						   !!(flags & GOVD_SHARED));
5919   if (private_debug)
5920     code = OMP_CLAUSE_PRIVATE;
5921   else if (flags & GOVD_SHARED)
5922     {
5923       if (is_global_var (decl))
5924 	{
5925 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
5926 	  while (ctx != NULL)
5927 	    {
5928 	      splay_tree_node on
5929 		= splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5930 	      if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
5931 				      | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
5932 		break;
5933 	      ctx = ctx->outer_context;
5934 	    }
5935 	  if (ctx == NULL)
5936 	    return 0;
5937 	}
5938       code = OMP_CLAUSE_SHARED;
5939     }
5940   else if (flags & GOVD_PRIVATE)
5941     code = OMP_CLAUSE_PRIVATE;
5942   else if (flags & GOVD_FIRSTPRIVATE)
5943     code = OMP_CLAUSE_FIRSTPRIVATE;
5944   else
5945     gcc_unreachable ();
5946 
5947   clause = build_omp_clause (input_location, code);
5948   OMP_CLAUSE_DECL (clause) = decl;
5949   OMP_CLAUSE_CHAIN (clause) = *list_p;
5950   if (private_debug)
5951     OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
5952   else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
5953     OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
5954   *list_p = clause;
5955   lang_hooks.decls.omp_finish_clause (clause);
5956 
5957   return 0;
5958 }
5959 
5960 static void
5961 gimplify_adjust_omp_clauses (tree *list_p)
5962 {
5963   struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
5964   tree c, decl;
5965 
5966   while ((c = *list_p) != NULL)
5967     {
5968       splay_tree_node n;
5969       bool remove = false;
5970 
5971       switch (OMP_CLAUSE_CODE (c))
5972 	{
5973 	case OMP_CLAUSE_PRIVATE:
5974 	case OMP_CLAUSE_SHARED:
5975 	case OMP_CLAUSE_FIRSTPRIVATE:
5976 	  decl = OMP_CLAUSE_DECL (c);
5977 	  n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5978 	  remove = !(n->value & GOVD_SEEN);
5979 	  if (! remove)
5980 	    {
5981 	      bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
5982 	      if ((n->value & GOVD_DEBUG_PRIVATE)
5983 		  || lang_hooks.decls.omp_private_debug_clause (decl, shared))
5984 		{
5985 		  gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
5986 			      || ((n->value & GOVD_DATA_SHARE_CLASS)
5987 				  == GOVD_PRIVATE));
5988 		  OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
5989 		  OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
5990 		}
5991 	    }
5992 	  break;
5993 
5994 	case OMP_CLAUSE_LASTPRIVATE:
5995 	  /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
5996 	     accurately reflect the presence of a FIRSTPRIVATE clause.  */
5997 	  decl = OMP_CLAUSE_DECL (c);
5998 	  n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5999 	  OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6000 	    = (n->value & GOVD_FIRSTPRIVATE) != 0;
6001 	  break;
6002 
6003 	case OMP_CLAUSE_REDUCTION:
6004 	case OMP_CLAUSE_COPYIN:
6005 	case OMP_CLAUSE_COPYPRIVATE:
6006 	case OMP_CLAUSE_IF:
6007 	case OMP_CLAUSE_NUM_THREADS:
6008 	case OMP_CLAUSE_SCHEDULE:
6009 	case OMP_CLAUSE_NOWAIT:
6010 	case OMP_CLAUSE_ORDERED:
6011 	case OMP_CLAUSE_DEFAULT:
6012 	case OMP_CLAUSE_UNTIED:
6013 	case OMP_CLAUSE_COLLAPSE:
6014 	  break;
6015 
6016 	default:
6017 	  gcc_unreachable ();
6018 	}
6019 
6020       if (remove)
6021 	*list_p = OMP_CLAUSE_CHAIN (c);
6022       else
6023 	list_p = &OMP_CLAUSE_CHAIN (c);
6024     }
6025 
6026   /* Add in any implicit data sharing.  */
6027   splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
6028 
6029   gimplify_omp_ctxp = ctx->outer_context;
6030   delete_omp_context (ctx);
6031 }
6032 
6033 /* Gimplify the contents of an OMP_PARALLEL statement.  This involves
6034    gimplification of the body, as well as scanning the body for used
6035    variables.  We need to do this scan now, because variable-sized
6036    decls will be decomposed during gimplification.  */
6037 
6038 static void
6039 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
6040 {
6041   tree expr = *expr_p;
6042   gimple g;
6043   gimple_seq body = NULL;
6044   struct gimplify_ctx gctx;
6045 
6046   gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6047 			     OMP_PARALLEL_COMBINED (expr)
6048 			     ? ORT_COMBINED_PARALLEL
6049 			     : ORT_PARALLEL);
6050 
6051   push_gimplify_context (&gctx);
6052 
6053   g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6054   if (gimple_code (g) == GIMPLE_BIND)
6055     pop_gimplify_context (g);
6056   else
6057     pop_gimplify_context (NULL);
6058 
6059   gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
6060 
6061   g = gimple_build_omp_parallel (body,
6062 				 OMP_PARALLEL_CLAUSES (expr),
6063 				 NULL_TREE, NULL_TREE);
6064   if (OMP_PARALLEL_COMBINED (expr))
6065     gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6066   gimplify_seq_add_stmt (pre_p, g);
6067   *expr_p = NULL_TREE;
6068 }
6069 
6070 /* Gimplify the contents of an OMP_TASK statement.  This involves
6071    gimplification of the body, as well as scanning the body for used
6072    variables.  We need to do this scan now, because variable-sized
6073    decls will be decomposed during gimplification.  */
6074 
6075 static void
6076 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
6077 {
6078   tree expr = *expr_p;
6079   gimple g;
6080   gimple_seq body = NULL;
6081   struct gimplify_ctx gctx;
6082 
6083   gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6084 			     find_omp_clause (OMP_TASK_CLAUSES (expr),
6085 					      OMP_CLAUSE_UNTIED)
6086 			     ? ORT_UNTIED_TASK : ORT_TASK);
6087 
6088   push_gimplify_context (&gctx);
6089 
6090   g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6091   if (gimple_code (g) == GIMPLE_BIND)
6092     pop_gimplify_context (g);
6093   else
6094     pop_gimplify_context (NULL);
6095 
6096   gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
6097 
6098   g = gimple_build_omp_task (body,
6099 			     OMP_TASK_CLAUSES (expr),
6100 			     NULL_TREE, NULL_TREE,
6101 			     NULL_TREE, NULL_TREE, NULL_TREE);
6102   gimplify_seq_add_stmt (pre_p, g);
6103   *expr_p = NULL_TREE;
6104 }
6105 
6106 /* Gimplify the gross structure of an OMP_FOR statement.  */
6107 
6108 static enum gimplify_status
6109 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
6110 {
6111   tree for_stmt, decl, var, t;
6112   enum gimplify_status ret = GS_ALL_DONE;
6113   enum gimplify_status tret;
6114   gimple gfor;
6115   gimple_seq for_body, for_pre_body;
6116   int i;
6117 
6118   for_stmt = *expr_p;
6119 
6120   gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
6121 			     ORT_WORKSHARE);
6122 
6123   /* Handle OMP_FOR_INIT.  */
6124   for_pre_body = NULL;
6125   gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6126   OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
6127 
6128   for_body = gimple_seq_alloc ();
6129   gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6130 	      == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6131   gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6132 	      == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6133   for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6134     {
6135       t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6136       gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6137       decl = TREE_OPERAND (t, 0);
6138       gcc_assert (DECL_P (decl));
6139       gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6140 		  || POINTER_TYPE_P (TREE_TYPE (decl)));
6141 
6142       /* Make sure the iteration variable is private.  */
6143       if (omp_is_private (gimplify_omp_ctxp, decl))
6144 	omp_notice_variable (gimplify_omp_ctxp, decl, true);
6145       else
6146 	omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6147 
6148       /* If DECL is not a gimple register, create a temporary variable to act
6149 	 as an iteration counter.  This is valid, since DECL cannot be
6150 	 modified in the body of the loop.  */
6151       if (!is_gimple_reg (decl))
6152 	{
6153 	  var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6154 	  TREE_OPERAND (t, 0) = var;
6155 
6156 	  gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
6157 
6158 	  omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6159 	}
6160       else
6161 	var = decl;
6162 
6163       tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6164 			    is_gimple_val, fb_rvalue);
6165       ret = MIN (ret, tret);
6166       if (ret == GS_ERROR)
6167 	return ret;
6168 
6169       /* Handle OMP_FOR_COND.  */
6170       t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6171       gcc_assert (COMPARISON_CLASS_P (t));
6172       gcc_assert (TREE_OPERAND (t, 0) == decl);
6173 
6174       tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6175 			    is_gimple_val, fb_rvalue);
6176       ret = MIN (ret, tret);
6177 
6178       /* Handle OMP_FOR_INCR.  */
6179       t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6180       switch (TREE_CODE (t))
6181 	{
6182 	case PREINCREMENT_EXPR:
6183 	case POSTINCREMENT_EXPR:
6184 	  t = build_int_cst (TREE_TYPE (decl), 1);
6185 	  t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6186 	  t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6187 	  TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6188 	  break;
6189 
6190 	case PREDECREMENT_EXPR:
6191 	case POSTDECREMENT_EXPR:
6192 	  t = build_int_cst (TREE_TYPE (decl), -1);
6193 	  t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6194 	  t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6195 	  TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6196 	  break;
6197 
6198 	case MODIFY_EXPR:
6199 	  gcc_assert (TREE_OPERAND (t, 0) == decl);
6200 	  TREE_OPERAND (t, 0) = var;
6201 
6202 	  t = TREE_OPERAND (t, 1);
6203 	  switch (TREE_CODE (t))
6204 	    {
6205 	    case PLUS_EXPR:
6206 	      if (TREE_OPERAND (t, 1) == decl)
6207 		{
6208 		  TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6209 		  TREE_OPERAND (t, 0) = var;
6210 		  break;
6211 		}
6212 
6213 	      /* Fallthru.  */
6214 	    case MINUS_EXPR:
6215 	    case POINTER_PLUS_EXPR:
6216 	      gcc_assert (TREE_OPERAND (t, 0) == decl);
6217 	      TREE_OPERAND (t, 0) = var;
6218 	      break;
6219 	    default:
6220 	      gcc_unreachable ();
6221 	    }
6222 
6223 	  tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6224 				is_gimple_val, fb_rvalue);
6225 	  ret = MIN (ret, tret);
6226 	  break;
6227 
6228 	default:
6229 	  gcc_unreachable ();
6230 	}
6231 
6232       if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6233 	{
6234 	  tree c;
6235 	  for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
6236 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6237 		&& OMP_CLAUSE_DECL (c) == decl
6238 		&& OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6239 	      {
6240 		t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6241 		gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6242 		gcc_assert (TREE_OPERAND (t, 0) == var);
6243 		t = TREE_OPERAND (t, 1);
6244 		gcc_assert (TREE_CODE (t) == PLUS_EXPR
6245 			    || TREE_CODE (t) == MINUS_EXPR
6246 			    || TREE_CODE (t) == POINTER_PLUS_EXPR);
6247 		gcc_assert (TREE_OPERAND (t, 0) == var);
6248 		t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6249 			    TREE_OPERAND (t, 1));
6250 		gimplify_assign (decl, t,
6251 				 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6252 	    }
6253 	}
6254     }
6255 
6256   gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body);
6257 
6258   gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
6259 
6260   gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt),
6261 			       TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6262 			       for_pre_body);
6263 
6264   for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6265     {
6266       t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6267       gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
6268       gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
6269       t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6270       gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
6271       gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
6272       t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6273       gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
6274     }
6275 
6276   gimplify_seq_add_stmt (pre_p, gfor);
6277   return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
6278 }
6279 
6280 /* Gimplify the gross structure of other OpenMP worksharing constructs.
6281    In particular, OMP_SECTIONS and OMP_SINGLE.  */
6282 
6283 static void
6284 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
6285 {
6286   tree expr = *expr_p;
6287   gimple stmt;
6288   gimple_seq body = NULL;
6289 
6290   gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE);
6291   gimplify_and_add (OMP_BODY (expr), &body);
6292   gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
6293 
6294   if (TREE_CODE (expr) == OMP_SECTIONS)
6295     stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
6296   else if (TREE_CODE (expr) == OMP_SINGLE)
6297     stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
6298   else
6299     gcc_unreachable ();
6300 
6301   gimplify_seq_add_stmt (pre_p, stmt);
6302 }
6303 
6304 /* A subroutine of gimplify_omp_atomic.  The front end is supposed to have
6305    stabilized the lhs of the atomic operation as *ADDR.  Return true if
6306    EXPR is this stabilized form.  */
6307 
6308 static bool
6309 goa_lhs_expr_p (tree expr, tree addr)
6310 {
6311   /* Also include casts to other type variants.  The C front end is fond
6312      of adding these for e.g. volatile variables.  This is like
6313      STRIP_TYPE_NOPS but includes the main variant lookup.  */
6314   STRIP_USELESS_TYPE_CONVERSION (expr);
6315 
6316   if (TREE_CODE (expr) == INDIRECT_REF)
6317     {
6318       expr = TREE_OPERAND (expr, 0);
6319       while (expr != addr
6320 	     && (CONVERT_EXPR_P (expr)
6321 		 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6322 	     && TREE_CODE (expr) == TREE_CODE (addr)
6323 	     && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
6324 	{
6325 	  expr = TREE_OPERAND (expr, 0);
6326 	  addr = TREE_OPERAND (addr, 0);
6327 	}
6328       if (expr == addr)
6329 	return true;
6330       return (TREE_CODE (addr) == ADDR_EXPR
6331 	      && TREE_CODE (expr) == ADDR_EXPR
6332 	      && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
6333     }
6334   if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6335     return true;
6336   return false;
6337 }
6338 
6339 /* Walk *EXPR_P and replace
6340    appearances of *LHS_ADDR with LHS_VAR.  If an expression does not involve
6341    the lhs, evaluate it into a temporary.  Return 1 if the lhs appeared as
6342    a subexpression, 0 if it did not, or -1 if an error was encountered.  */
6343 
6344 static int
6345 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6346 		    tree lhs_var)
6347 {
6348   tree expr = *expr_p;
6349   int saw_lhs;
6350 
6351   if (goa_lhs_expr_p (expr, lhs_addr))
6352     {
6353       *expr_p = lhs_var;
6354       return 1;
6355     }
6356   if (is_gimple_val (expr))
6357     return 0;
6358 
6359   saw_lhs = 0;
6360   switch (TREE_CODE_CLASS (TREE_CODE (expr)))
6361     {
6362     case tcc_binary:
6363     case tcc_comparison:
6364       saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
6365 				     lhs_var);
6366     case tcc_unary:
6367       saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6368 				     lhs_var);
6369       break;
6370     case tcc_expression:
6371       switch (TREE_CODE (expr))
6372 	{
6373 	case TRUTH_ANDIF_EXPR:
6374 	case TRUTH_ORIF_EXPR:
6375 	case TRUTH_AND_EXPR:
6376 	case TRUTH_OR_EXPR:
6377 	case TRUTH_XOR_EXPR:
6378 	  saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
6379 					 lhs_addr, lhs_var);
6380 	case TRUTH_NOT_EXPR:
6381 	  saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
6382 					 lhs_addr, lhs_var);
6383 	  break;
6384 	default:
6385 	  break;
6386 	}
6387       break;
6388     default:
6389       break;
6390     }
6391 
6392   if (saw_lhs == 0)
6393     {
6394       enum gimplify_status gs;
6395       gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
6396       if (gs != GS_ALL_DONE)
6397 	saw_lhs = -1;
6398     }
6399 
6400   return saw_lhs;
6401 }
6402 
6403 
6404 /* Gimplify an OMP_ATOMIC statement.  */
6405 
6406 static enum gimplify_status
6407 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
6408 {
6409   tree addr = TREE_OPERAND (*expr_p, 0);
6410   tree rhs = TREE_OPERAND (*expr_p, 1);
6411   tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
6412   tree tmp_load;
6413 
6414    tmp_load = create_tmp_var (type, NULL);
6415    if (TREE_CODE (type) == COMPLEX_TYPE || TREE_CODE (type) == VECTOR_TYPE)
6416      DECL_GIMPLE_REG_P (tmp_load) = 1;
6417    if (goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6418      return GS_ERROR;
6419 
6420    if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6421        != GS_ALL_DONE)
6422      return GS_ERROR;
6423 
6424    gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_load (tmp_load, addr));
6425    if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
6426        != GS_ALL_DONE)
6427      return GS_ERROR;
6428    gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_store (rhs));
6429    *expr_p = NULL;
6430 
6431    return GS_ALL_DONE;
6432 }
6433 
6434 
6435 /* Converts the GENERIC expression tree *EXPR_P to GIMPLE.  If the
6436    expression produces a value to be used as an operand inside a GIMPLE
6437    statement, the value will be stored back in *EXPR_P.  This value will
6438    be a tree of class tcc_declaration, tcc_constant, tcc_reference or
6439    an SSA_NAME.  The corresponding sequence of GIMPLE statements is
6440    emitted in PRE_P and POST_P.
6441 
6442    Additionally, this process may overwrite parts of the input
6443    expression during gimplification.  Ideally, it should be
6444    possible to do non-destructive gimplification.
6445 
6446    EXPR_P points to the GENERIC expression to convert to GIMPLE.  If
6447       the expression needs to evaluate to a value to be used as
6448       an operand in a GIMPLE statement, this value will be stored in
6449       *EXPR_P on exit.  This happens when the caller specifies one
6450       of fb_lvalue or fb_rvalue fallback flags.
6451 
6452    PRE_P will contain the sequence of GIMPLE statements corresponding
6453        to the evaluation of EXPR and all the side-effects that must
6454        be executed before the main expression.  On exit, the last
6455        statement of PRE_P is the core statement being gimplified.  For
6456        instance, when gimplifying 'if (++a)' the last statement in
6457        PRE_P will be 'if (t.1)' where t.1 is the result of
6458        pre-incrementing 'a'.
6459 
6460    POST_P will contain the sequence of GIMPLE statements corresponding
6461        to the evaluation of all the side-effects that must be executed
6462        after the main expression.  If this is NULL, the post
6463        side-effects are stored at the end of PRE_P.
6464 
6465        The reason why the output is split in two is to handle post
6466        side-effects explicitly.  In some cases, an expression may have
6467        inner and outer post side-effects which need to be emitted in
6468        an order different from the one given by the recursive
6469        traversal.  For instance, for the expression (*p--)++ the post
6470        side-effects of '--' must actually occur *after* the post
6471        side-effects of '++'.  However, gimplification will first visit
6472        the inner expression, so if a separate POST sequence was not
6473        used, the resulting sequence would be:
6474 
6475        	    1	t.1 = *p
6476        	    2	p = p - 1
6477        	    3	t.2 = t.1 + 1
6478        	    4	*p = t.2
6479 
6480        However, the post-decrement operation in line #2 must not be
6481        evaluated until after the store to *p at line #4, so the
6482        correct sequence should be:
6483 
6484        	    1	t.1 = *p
6485        	    2	t.2 = t.1 + 1
6486        	    3	*p = t.2
6487        	    4	p = p - 1
6488 
6489        So, by specifying a separate post queue, it is possible
6490        to emit the post side-effects in the correct order.
6491        If POST_P is NULL, an internal queue will be used.  Before
6492        returning to the caller, the sequence POST_P is appended to
6493        the main output sequence PRE_P.
6494 
6495    GIMPLE_TEST_F points to a function that takes a tree T and
6496        returns nonzero if T is in the GIMPLE form requested by the
6497        caller.  The GIMPLE predicates are in tree-gimple.c.
6498 
6499    FALLBACK tells the function what sort of a temporary we want if
6500        gimplification cannot produce an expression that complies with
6501        GIMPLE_TEST_F.
6502 
6503        fb_none means that no temporary should be generated
6504        fb_rvalue means that an rvalue is OK to generate
6505        fb_lvalue means that an lvalue is OK to generate
6506        fb_either means that either is OK, but an lvalue is preferable.
6507        fb_mayfail means that gimplification may fail (in which case
6508        GS_ERROR will be returned)
6509 
6510    The return value is either GS_ERROR or GS_ALL_DONE, since this
6511    function iterates until EXPR is completely gimplified or an error
6512    occurs.  */
6513 
6514 enum gimplify_status
6515 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6516 	       bool (*gimple_test_f) (tree), fallback_t fallback)
6517 {
6518   tree tmp;
6519   gimple_seq internal_pre = NULL;
6520   gimple_seq internal_post = NULL;
6521   tree save_expr;
6522   bool is_statement;
6523   location_t saved_location;
6524   enum gimplify_status ret;
6525   gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6526 
6527   save_expr = *expr_p;
6528   if (save_expr == NULL_TREE)
6529     return GS_ALL_DONE;
6530 
6531   /* If we are gimplifying a top-level statement, PRE_P must be valid.  */
6532   is_statement = gimple_test_f == is_gimple_stmt;
6533   if (is_statement)
6534     gcc_assert (pre_p);
6535 
6536   /* Consistency checks.  */
6537   if (gimple_test_f == is_gimple_reg)
6538     gcc_assert (fallback & (fb_rvalue | fb_lvalue));
6539   else if (gimple_test_f == is_gimple_val
6540            || gimple_test_f == is_gimple_call_addr
6541            || gimple_test_f == is_gimple_condexpr
6542            || gimple_test_f == is_gimple_mem_rhs
6543            || gimple_test_f == is_gimple_mem_rhs_or_call
6544            || gimple_test_f == is_gimple_reg_rhs
6545            || gimple_test_f == is_gimple_reg_rhs_or_call
6546            || gimple_test_f == is_gimple_asm_val)
6547     gcc_assert (fallback & fb_rvalue);
6548   else if (gimple_test_f == is_gimple_min_lval
6549 	   || gimple_test_f == is_gimple_lvalue)
6550     gcc_assert (fallback & fb_lvalue);
6551   else if (gimple_test_f == is_gimple_addressable)
6552     gcc_assert (fallback & fb_either);
6553   else if (gimple_test_f == is_gimple_stmt)
6554     gcc_assert (fallback == fb_none);
6555   else
6556     {
6557       /* We should have recognized the GIMPLE_TEST_F predicate to
6558 	 know what kind of fallback to use in case a temporary is
6559 	 needed to hold the value or address of *EXPR_P.  */
6560       gcc_unreachable ();
6561     }
6562 
6563   /* We used to check the predicate here and return immediately if it
6564      succeeds.  This is wrong; the design is for gimplification to be
6565      idempotent, and for the predicates to only test for valid forms, not
6566      whether they are fully simplified.  */
6567   if (pre_p == NULL)
6568     pre_p = &internal_pre;
6569 
6570   if (post_p == NULL)
6571     post_p = &internal_post;
6572 
6573   /* Remember the last statements added to PRE_P and POST_P.  Every
6574      new statement added by the gimplification helpers needs to be
6575      annotated with location information.  To centralize the
6576      responsibility, we remember the last statement that had been
6577      added to both queues before gimplifying *EXPR_P.  If
6578      gimplification produces new statements in PRE_P and POST_P, those
6579      statements will be annotated with the same location information
6580      as *EXPR_P.  */
6581   pre_last_gsi = gsi_last (*pre_p);
6582   post_last_gsi = gsi_last (*post_p);
6583 
6584   saved_location = input_location;
6585   if (save_expr != error_mark_node
6586       && EXPR_HAS_LOCATION (*expr_p))
6587     input_location = EXPR_LOCATION (*expr_p);
6588 
6589   /* Loop over the specific gimplifiers until the toplevel node
6590      remains the same.  */
6591   do
6592     {
6593       /* Strip away as many useless type conversions as possible
6594 	 at the toplevel.  */
6595       STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6596 
6597       /* Remember the expr.  */
6598       save_expr = *expr_p;
6599 
6600       /* Die, die, die, my darling.  */
6601       if (save_expr == error_mark_node
6602 	  || (TREE_TYPE (save_expr)
6603 	      && TREE_TYPE (save_expr) == error_mark_node))
6604 	{
6605 	  ret = GS_ERROR;
6606 	  break;
6607 	}
6608 
6609       /* Do any language-specific gimplification.  */
6610       ret = ((enum gimplify_status)
6611 	     lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6612       if (ret == GS_OK)
6613 	{
6614 	  if (*expr_p == NULL_TREE)
6615 	    break;
6616 	  if (*expr_p != save_expr)
6617 	    continue;
6618 	}
6619       else if (ret != GS_UNHANDLED)
6620 	break;
6621 
6622       ret = GS_OK;
6623       switch (TREE_CODE (*expr_p))
6624 	{
6625 	  /* First deal with the special cases.  */
6626 
6627 	case POSTINCREMENT_EXPR:
6628 	case POSTDECREMENT_EXPR:
6629 	case PREINCREMENT_EXPR:
6630 	case PREDECREMENT_EXPR:
6631 	  ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
6632 					fallback != fb_none);
6633 	  break;
6634 
6635 	case ARRAY_REF:
6636 	case ARRAY_RANGE_REF:
6637 	case REALPART_EXPR:
6638 	case IMAGPART_EXPR:
6639 	case COMPONENT_REF:
6640 	case VIEW_CONVERT_EXPR:
6641 	  ret = gimplify_compound_lval (expr_p, pre_p, post_p,
6642 					fallback ? fallback : fb_rvalue);
6643 	  break;
6644 
6645 	case COND_EXPR:
6646 	  ret = gimplify_cond_expr (expr_p, pre_p, fallback);
6647 
6648 	  /* C99 code may assign to an array in a structure value of a
6649 	     conditional expression, and this has undefined behavior
6650 	     only on execution, so create a temporary if an lvalue is
6651 	     required.  */
6652 	  if (fallback == fb_lvalue)
6653 	    {
6654 	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6655 	      mark_addressable (*expr_p);
6656 	    }
6657 	  break;
6658 
6659 	case CALL_EXPR:
6660 	  ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
6661 
6662 	  /* C99 code may assign to an array in a structure returned
6663 	     from a function, and this has undefined behavior only on
6664 	     execution, so create a temporary if an lvalue is
6665 	     required.  */
6666 	  if (fallback == fb_lvalue)
6667 	    {
6668 	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6669 	      mark_addressable (*expr_p);
6670 	    }
6671 	  break;
6672 
6673 	case TREE_LIST:
6674 	  gcc_unreachable ();
6675 
6676 	case COMPOUND_EXPR:
6677 	  ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
6678 	  break;
6679 
6680 	case COMPOUND_LITERAL_EXPR:
6681 	  ret = gimplify_compound_literal_expr (expr_p, pre_p);
6682 	  break;
6683 
6684 	case MODIFY_EXPR:
6685 	case INIT_EXPR:
6686 	  ret = gimplify_modify_expr (expr_p, pre_p, post_p,
6687 				      fallback != fb_none);
6688 	  /* Don't let the end of loop logic change GS_OK to GS_ALL_DONE;
6689 	     gimplify_modify_expr_rhs might have changed the RHS.  */
6690 	  if (ret == GS_OK && *expr_p)
6691 	    continue;
6692 	  break;
6693 
6694 	case TRUTH_ANDIF_EXPR:
6695 	case TRUTH_ORIF_EXPR:
6696 	  /* Pass the source location of the outer expression.  */
6697 	  ret = gimplify_boolean_expr (expr_p, saved_location);
6698 	  break;
6699 
6700 	case TRUTH_NOT_EXPR:
6701 	  if (TREE_CODE (TREE_TYPE (*expr_p)) != BOOLEAN_TYPE)
6702 	    {
6703 	      tree type = TREE_TYPE (*expr_p);
6704 	      *expr_p = fold_convert (type, gimple_boolify (*expr_p));
6705 	      ret = GS_OK;
6706 	      break;
6707 	    }
6708 
6709 	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6710 			       is_gimple_val, fb_rvalue);
6711 	  recalculate_side_effects (*expr_p);
6712 	  break;
6713 
6714 	case ADDR_EXPR:
6715 	  ret = gimplify_addr_expr (expr_p, pre_p, post_p);
6716 	  break;
6717 
6718 	case VA_ARG_EXPR:
6719 	  ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6720 	  break;
6721 
6722 	CASE_CONVERT:
6723 	  if (IS_EMPTY_STMT (*expr_p))
6724 	    {
6725 	      ret = GS_ALL_DONE;
6726 	      break;
6727 	    }
6728 
6729 	  if (VOID_TYPE_P (TREE_TYPE (*expr_p))
6730 	      || fallback == fb_none)
6731 	    {
6732 	      /* Just strip a conversion to void (or in void context) and
6733 		 try again.  */
6734 	      *expr_p = TREE_OPERAND (*expr_p, 0);
6735 	      break;
6736 	    }
6737 
6738 	  ret = gimplify_conversion (expr_p);
6739 	  if (ret == GS_ERROR)
6740 	    break;
6741 	  if (*expr_p != save_expr)
6742 	    break;
6743 	  /* FALLTHRU */
6744 
6745 	case FIX_TRUNC_EXPR:
6746 	  /* unary_expr: ... | '(' cast ')' val | ...  */
6747 	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6748 			       is_gimple_val, fb_rvalue);
6749 	  recalculate_side_effects (*expr_p);
6750 	  break;
6751 
6752 	case INDIRECT_REF:
6753 	  *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
6754 	  if (*expr_p != save_expr)
6755 	    break;
6756 	  /* else fall through.  */
6757 	case ALIGN_INDIRECT_REF:
6758 	case MISALIGNED_INDIRECT_REF:
6759 	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6760 			       is_gimple_reg, fb_rvalue);
6761 	  recalculate_side_effects (*expr_p);
6762 	  break;
6763 
6764 	  /* Constants need not be gimplified.  */
6765 	case INTEGER_CST:
6766 	case REAL_CST:
6767 	case FIXED_CST:
6768 	case STRING_CST:
6769 	case COMPLEX_CST:
6770 	case VECTOR_CST:
6771 	  ret = GS_ALL_DONE;
6772 	  break;
6773 
6774 	case CONST_DECL:
6775 	  /* If we require an lvalue, such as for ADDR_EXPR, retain the
6776 	     CONST_DECL node.  Otherwise the decl is replaceable by its
6777 	     value.  */
6778 	  /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either.  */
6779 	  if (fallback & fb_lvalue)
6780 	    ret = GS_ALL_DONE;
6781 	  else
6782 	    *expr_p = DECL_INITIAL (*expr_p);
6783 	  break;
6784 
6785 	case DECL_EXPR:
6786 	  ret = gimplify_decl_expr (expr_p, pre_p);
6787 	  break;
6788 
6789 	case BIND_EXPR:
6790 	  ret = gimplify_bind_expr (expr_p, pre_p);
6791 	  break;
6792 
6793 	case LOOP_EXPR:
6794 	  ret = gimplify_loop_expr (expr_p, pre_p);
6795 	  break;
6796 
6797 	case SWITCH_EXPR:
6798 	  ret = gimplify_switch_expr (expr_p, pre_p);
6799 	  break;
6800 
6801 	case EXIT_EXPR:
6802 	  ret = gimplify_exit_expr (expr_p);
6803 	  break;
6804 
6805 	case GOTO_EXPR:
6806 	  /* If the target is not LABEL, then it is a computed jump
6807 	     and the target needs to be gimplified.  */
6808 	  if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
6809 	    {
6810 	      ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
6811 				   NULL, is_gimple_val, fb_rvalue);
6812 	      if (ret == GS_ERROR)
6813 		break;
6814 	    }
6815 	  gimplify_seq_add_stmt (pre_p,
6816 			  gimple_build_goto (GOTO_DESTINATION (*expr_p)));
6817 	  break;
6818 
6819 	case PREDICT_EXPR:
6820 	  gimplify_seq_add_stmt (pre_p,
6821 			gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
6822 					      PREDICT_EXPR_OUTCOME (*expr_p)));
6823 	  ret = GS_ALL_DONE;
6824 	  break;
6825 
6826 	case LABEL_EXPR:
6827 	  ret = GS_ALL_DONE;
6828 	  gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
6829 		      == current_function_decl);
6830 	  gimplify_seq_add_stmt (pre_p,
6831 			  gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
6832 	  break;
6833 
6834 	case CASE_LABEL_EXPR:
6835 	  ret = gimplify_case_label_expr (expr_p, pre_p);
6836 	  break;
6837 
6838 	case RETURN_EXPR:
6839 	  ret = gimplify_return_expr (*expr_p, pre_p);
6840 	  break;
6841 
6842 	case CONSTRUCTOR:
6843 	  /* Don't reduce this in place; let gimplify_init_constructor work its
6844 	     magic.  Buf if we're just elaborating this for side effects, just
6845 	     gimplify any element that has side-effects.  */
6846 	  if (fallback == fb_none)
6847 	    {
6848 	      unsigned HOST_WIDE_INT ix;
6849 	      constructor_elt *ce;
6850 	      tree temp = NULL_TREE;
6851 	      for (ix = 0;
6852 		   VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
6853 				ix, ce);
6854 		   ix++)
6855 		if (TREE_SIDE_EFFECTS (ce->value))
6856 		  append_to_statement_list (ce->value, &temp);
6857 
6858 	      *expr_p = temp;
6859 	      ret = GS_OK;
6860 	    }
6861 	  /* C99 code may assign to an array in a constructed
6862 	     structure or union, and this has undefined behavior only
6863 	     on execution, so create a temporary if an lvalue is
6864 	     required.  */
6865 	  else if (fallback == fb_lvalue)
6866 	    {
6867 	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6868 	      mark_addressable (*expr_p);
6869 	    }
6870 	  else
6871 	    ret = GS_ALL_DONE;
6872 	  break;
6873 
6874 	  /* The following are special cases that are not handled by the
6875 	     original GIMPLE grammar.  */
6876 
6877 	  /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
6878 	     eliminated.  */
6879 	case SAVE_EXPR:
6880 	  ret = gimplify_save_expr (expr_p, pre_p, post_p);
6881 	  break;
6882 
6883 	case BIT_FIELD_REF:
6884 	  {
6885 	    enum gimplify_status r0, r1, r2;
6886 
6887 	    r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6888 				post_p, is_gimple_lvalue, fb_either);
6889 	    r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
6890 				post_p, is_gimple_val, fb_rvalue);
6891 	    r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
6892 				post_p, is_gimple_val, fb_rvalue);
6893 	    recalculate_side_effects (*expr_p);
6894 
6895 	    ret = MIN (r0, MIN (r1, r2));
6896 	  }
6897 	  break;
6898 
6899 	case TARGET_MEM_REF:
6900 	  {
6901 	    enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
6902 
6903 	    if (TMR_SYMBOL (*expr_p))
6904 	      r0 = gimplify_expr (&TMR_SYMBOL (*expr_p), pre_p,
6905 				  post_p, is_gimple_lvalue, fb_either);
6906 	    else if (TMR_BASE (*expr_p))
6907 	      r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
6908 				  post_p, is_gimple_val, fb_either);
6909 	    if (TMR_INDEX (*expr_p))
6910 	      r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
6911 				  post_p, is_gimple_val, fb_rvalue);
6912 	    /* TMR_STEP and TMR_OFFSET are always integer constants.  */
6913 	    ret = MIN (r0, r1);
6914 	  }
6915 	  break;
6916 
6917 	case NON_LVALUE_EXPR:
6918 	  /* This should have been stripped above.  */
6919 	  gcc_unreachable ();
6920 
6921 	case ASM_EXPR:
6922 	  ret = gimplify_asm_expr (expr_p, pre_p, post_p);
6923 	  break;
6924 
6925 	case TRY_FINALLY_EXPR:
6926 	case TRY_CATCH_EXPR:
6927 	  {
6928 	    gimple_seq eval, cleanup;
6929 	    gimple try_;
6930 
6931 	    eval = cleanup = NULL;
6932 	    gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
6933 	    gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
6934 	    /* Don't create bogus GIMPLE_TRY with empty cleanup.  */
6935 	    if (gimple_seq_empty_p (cleanup))
6936 	      {
6937 		gimple_seq_add_seq (pre_p, eval);
6938 		ret = GS_ALL_DONE;
6939 		break;
6940 	      }
6941 	    try_ = gimple_build_try (eval, cleanup,
6942 				     TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
6943 				     ? GIMPLE_TRY_FINALLY
6944 				     : GIMPLE_TRY_CATCH);
6945 	    if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
6946 	      gimple_try_set_catch_is_cleanup (try_,
6947 					       TRY_CATCH_IS_CLEANUP (*expr_p));
6948 	    gimplify_seq_add_stmt (pre_p, try_);
6949 	    ret = GS_ALL_DONE;
6950 	    break;
6951 	  }
6952 
6953 	case CLEANUP_POINT_EXPR:
6954 	  ret = gimplify_cleanup_point_expr (expr_p, pre_p);
6955 	  break;
6956 
6957 	case TARGET_EXPR:
6958 	  ret = gimplify_target_expr (expr_p, pre_p, post_p);
6959 	  break;
6960 
6961 	case CATCH_EXPR:
6962 	  {
6963 	    gimple c;
6964 	    gimple_seq handler = NULL;
6965 	    gimplify_and_add (CATCH_BODY (*expr_p), &handler);
6966 	    c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
6967 	    gimplify_seq_add_stmt (pre_p, c);
6968 	    ret = GS_ALL_DONE;
6969 	    break;
6970 	  }
6971 
6972 	case EH_FILTER_EXPR:
6973 	  {
6974 	    gimple ehf;
6975 	    gimple_seq failure = NULL;
6976 
6977 	    gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
6978 	    ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
6979 	    gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
6980 	    gimplify_seq_add_stmt (pre_p, ehf);
6981 	    ret = GS_ALL_DONE;
6982 	    break;
6983 	  }
6984 
6985 	case OBJ_TYPE_REF:
6986 	  {
6987 	    enum gimplify_status r0, r1;
6988 	    r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
6989 				post_p, is_gimple_val, fb_rvalue);
6990 	    r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
6991 				post_p, is_gimple_val, fb_rvalue);
6992 	    TREE_SIDE_EFFECTS (*expr_p) = 0;
6993 	    ret = MIN (r0, r1);
6994 	  }
6995 	  break;
6996 
6997 	case LABEL_DECL:
6998 	  /* We get here when taking the address of a label.  We mark
6999 	     the label as "forced"; meaning it can never be removed and
7000 	     it is a potential target for any computed goto.  */
7001 	  FORCED_LABEL (*expr_p) = 1;
7002 	  ret = GS_ALL_DONE;
7003 	  break;
7004 
7005 	case STATEMENT_LIST:
7006 	  ret = gimplify_statement_list (expr_p, pre_p);
7007 	  break;
7008 
7009 	case WITH_SIZE_EXPR:
7010 	  {
7011 	    gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7012 			   post_p == &internal_post ? NULL : post_p,
7013 			   gimple_test_f, fallback);
7014 	    gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7015 			   is_gimple_val, fb_rvalue);
7016 	  }
7017 	  break;
7018 
7019 	case VAR_DECL:
7020 	case PARM_DECL:
7021 	  ret = gimplify_var_or_parm_decl (expr_p);
7022 	  break;
7023 
7024 	case RESULT_DECL:
7025 	  /* When within an OpenMP context, notice uses of variables.  */
7026 	  if (gimplify_omp_ctxp)
7027 	    omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
7028 	  ret = GS_ALL_DONE;
7029 	  break;
7030 
7031 	case SSA_NAME:
7032 	  /* Allow callbacks into the gimplifier during optimization.  */
7033 	  ret = GS_ALL_DONE;
7034 	  break;
7035 
7036 	case OMP_PARALLEL:
7037 	  gimplify_omp_parallel (expr_p, pre_p);
7038 	  ret = GS_ALL_DONE;
7039 	  break;
7040 
7041 	case OMP_TASK:
7042 	  gimplify_omp_task (expr_p, pre_p);
7043 	  ret = GS_ALL_DONE;
7044 	  break;
7045 
7046 	case OMP_FOR:
7047 	  ret = gimplify_omp_for (expr_p, pre_p);
7048 	  break;
7049 
7050 	case OMP_SECTIONS:
7051 	case OMP_SINGLE:
7052 	  gimplify_omp_workshare (expr_p, pre_p);
7053 	  ret = GS_ALL_DONE;
7054 	  break;
7055 
7056 	case OMP_SECTION:
7057 	case OMP_MASTER:
7058 	case OMP_ORDERED:
7059 	case OMP_CRITICAL:
7060 	  {
7061 	    gimple_seq body = NULL;
7062 	    gimple g;
7063 
7064 	    gimplify_and_add (OMP_BODY (*expr_p), &body);
7065 	    switch (TREE_CODE (*expr_p))
7066 	      {
7067 	      case OMP_SECTION:
7068 	        g = gimple_build_omp_section (body);
7069 	        break;
7070 	      case OMP_MASTER:
7071 	        g = gimple_build_omp_master (body);
7072 		break;
7073 	      case OMP_ORDERED:
7074 		g = gimple_build_omp_ordered (body);
7075 		break;
7076 	      case OMP_CRITICAL:
7077 		g = gimple_build_omp_critical (body,
7078 		    			       OMP_CRITICAL_NAME (*expr_p));
7079 		break;
7080 	      default:
7081 		gcc_unreachable ();
7082 	      }
7083 	    gimplify_seq_add_stmt (pre_p, g);
7084 	    ret = GS_ALL_DONE;
7085 	    break;
7086 	  }
7087 
7088 	case OMP_ATOMIC:
7089 	  ret = gimplify_omp_atomic (expr_p, pre_p);
7090 	  break;
7091 
7092 	case POINTER_PLUS_EXPR:
7093           /* Convert ((type *)A)+offset into &A->field_of_type_and_offset.
7094 	     The second is gimple immediate saving a need for extra statement.
7095 	   */
7096 	  if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
7097 	      && (tmp = maybe_fold_offset_to_address
7098 		  (EXPR_LOCATION (*expr_p),
7099 		   TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1),
7100 		   TREE_TYPE (*expr_p))))
7101 	    {
7102 	      *expr_p = tmp;
7103 	      break;
7104 	    }
7105 	  /* Convert (void *)&a + 4 into (void *)&a[1].  */
7106 	  if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
7107 	      && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
7108 	      && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p,
7109 									0),0)))
7110 	      && (tmp = maybe_fold_offset_to_address
7111 		  (EXPR_LOCATION (*expr_p),
7112 		   TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0),
7113 		   TREE_OPERAND (*expr_p, 1),
7114 		   TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, 0),
7115 					    0)))))
7116 	     {
7117                *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp);
7118 	       break;
7119 	     }
7120           /* FALLTHRU */
7121 
7122 	default:
7123 	  switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
7124 	    {
7125 	    case tcc_comparison:
7126 	      /* Handle comparison of objects of non scalar mode aggregates
7127 	     	 with a call to memcmp.  It would be nice to only have to do
7128 	     	 this for variable-sized objects, but then we'd have to allow
7129 	     	 the same nest of reference nodes we allow for MODIFY_EXPR and
7130 	     	 that's too complex.
7131 
7132 		 Compare scalar mode aggregates as scalar mode values.  Using
7133 		 memcmp for them would be very inefficient at best, and is
7134 		 plain wrong if bitfields are involved.  */
7135 		{
7136 		  tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
7137 
7138 		  if (!AGGREGATE_TYPE_P (type))
7139 		    goto expr_2;
7140 		  else if (TYPE_MODE (type) != BLKmode)
7141 		    ret = gimplify_scalar_mode_aggregate_compare (expr_p);
7142 		  else
7143 		    ret = gimplify_variable_sized_compare (expr_p);
7144 
7145 		  break;
7146 		}
7147 
7148 	    /* If *EXPR_P does not need to be special-cased, handle it
7149 	       according to its class.  */
7150 	    case tcc_unary:
7151 	      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7152 				   post_p, is_gimple_val, fb_rvalue);
7153 	      break;
7154 
7155 	    case tcc_binary:
7156 	    expr_2:
7157 	      {
7158 		enum gimplify_status r0, r1;
7159 
7160 		r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7161 		                    post_p, is_gimple_val, fb_rvalue);
7162 		r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7163 				    post_p, is_gimple_val, fb_rvalue);
7164 
7165 		ret = MIN (r0, r1);
7166 		break;
7167 	      }
7168 
7169 	    case tcc_declaration:
7170 	    case tcc_constant:
7171 	      ret = GS_ALL_DONE;
7172 	      goto dont_recalculate;
7173 
7174 	    default:
7175 	      gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
7176 			  || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
7177 			  || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
7178 	      goto expr_2;
7179 	    }
7180 
7181 	  recalculate_side_effects (*expr_p);
7182 
7183 	dont_recalculate:
7184 	  break;
7185 	}
7186 
7187       /* If we replaced *expr_p, gimplify again.  */
7188       if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr))
7189 	ret = GS_ALL_DONE;
7190     }
7191   while (ret == GS_OK);
7192 
7193   /* If we encountered an error_mark somewhere nested inside, either
7194      stub out the statement or propagate the error back out.  */
7195   if (ret == GS_ERROR)
7196     {
7197       if (is_statement)
7198 	*expr_p = NULL;
7199       goto out;
7200     }
7201 
7202   /* This was only valid as a return value from the langhook, which
7203      we handled.  Make sure it doesn't escape from any other context.  */
7204   gcc_assert (ret != GS_UNHANDLED);
7205 
7206   if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
7207     {
7208       /* We aren't looking for a value, and we don't have a valid
7209 	 statement.  If it doesn't have side-effects, throw it away.  */
7210       if (!TREE_SIDE_EFFECTS (*expr_p))
7211 	*expr_p = NULL;
7212       else if (!TREE_THIS_VOLATILE (*expr_p))
7213 	{
7214 	  /* This is probably a _REF that contains something nested that
7215 	     has side effects.  Recurse through the operands to find it.  */
7216 	  enum tree_code code = TREE_CODE (*expr_p);
7217 
7218 	  switch (code)
7219 	    {
7220 	    case COMPONENT_REF:
7221 	    case REALPART_EXPR:
7222 	    case IMAGPART_EXPR:
7223 	    case VIEW_CONVERT_EXPR:
7224 	      gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7225 			     gimple_test_f, fallback);
7226 	      break;
7227 
7228 	    case ARRAY_REF:
7229 	    case ARRAY_RANGE_REF:
7230 	      gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7231 			     gimple_test_f, fallback);
7232 	      gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7233 			     gimple_test_f, fallback);
7234 	      break;
7235 
7236 	    default:
7237 	       /* Anything else with side-effects must be converted to
7238 		  a valid statement before we get here.  */
7239 	      gcc_unreachable ();
7240 	    }
7241 
7242 	  *expr_p = NULL;
7243 	}
7244       else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
7245 	       && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
7246 	{
7247 	  /* Historically, the compiler has treated a bare reference
7248 	     to a non-BLKmode volatile lvalue as forcing a load.  */
7249 	  tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
7250 
7251 	  /* Normally, we do not want to create a temporary for a
7252 	     TREE_ADDRESSABLE type because such a type should not be
7253 	     copied by bitwise-assignment.  However, we make an
7254 	     exception here, as all we are doing here is ensuring that
7255 	     we read the bytes that make up the type.  We use
7256 	     create_tmp_var_raw because create_tmp_var will abort when
7257 	     given a TREE_ADDRESSABLE type.  */
7258 	  tree tmp = create_tmp_var_raw (type, "vol");
7259 	  gimple_add_tmp_var (tmp);
7260 	  gimplify_assign (tmp, *expr_p, pre_p);
7261 	  *expr_p = NULL;
7262 	}
7263       else
7264 	/* We can't do anything useful with a volatile reference to
7265 	   an incomplete type, so just throw it away.  Likewise for
7266 	   a BLKmode type, since any implicit inner load should
7267 	   already have been turned into an explicit one by the
7268 	   gimplification process.  */
7269 	*expr_p = NULL;
7270     }
7271 
7272   /* If we are gimplifying at the statement level, we're done.  Tack
7273      everything together and return.  */
7274   if (fallback == fb_none || is_statement)
7275     {
7276       /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
7277          it out for GC to reclaim it.  */
7278       *expr_p = NULL_TREE;
7279 
7280       if (!gimple_seq_empty_p (internal_pre)
7281 	  || !gimple_seq_empty_p (internal_post))
7282 	{
7283 	  gimplify_seq_add_seq (&internal_pre, internal_post);
7284 	  gimplify_seq_add_seq (pre_p, internal_pre);
7285 	}
7286 
7287       /* The result of gimplifying *EXPR_P is going to be the last few
7288 	 statements in *PRE_P and *POST_P.  Add location information
7289 	 to all the statements that were added by the gimplification
7290 	 helpers.  */
7291       if (!gimple_seq_empty_p (*pre_p))
7292 	annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
7293 
7294       if (!gimple_seq_empty_p (*post_p))
7295 	annotate_all_with_location_after (*post_p, post_last_gsi,
7296 					  input_location);
7297 
7298       goto out;
7299     }
7300 
7301 #ifdef ENABLE_GIMPLE_CHECKING
7302   if (*expr_p)
7303     {
7304       enum tree_code code = TREE_CODE (*expr_p);
7305       /* These expressions should already be in gimple IR form.  */
7306       gcc_assert (code != MODIFY_EXPR
7307 		  && code != ASM_EXPR
7308 		  && code != BIND_EXPR
7309 		  && code != CATCH_EXPR
7310 		  && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
7311 		  && code != EH_FILTER_EXPR
7312 		  && code != GOTO_EXPR
7313 		  && code != LABEL_EXPR
7314 		  && code != LOOP_EXPR
7315 		  && code != SWITCH_EXPR
7316 		  && code != TRY_FINALLY_EXPR
7317 		  && code != OMP_CRITICAL
7318 		  && code != OMP_FOR
7319 		  && code != OMP_MASTER
7320 		  && code != OMP_ORDERED
7321 		  && code != OMP_PARALLEL
7322 		  && code != OMP_SECTIONS
7323 		  && code != OMP_SECTION
7324 		  && code != OMP_SINGLE);
7325     }
7326 #endif
7327 
7328   /* Otherwise we're gimplifying a subexpression, so the resulting
7329      value is interesting.  If it's a valid operand that matches
7330      GIMPLE_TEST_F, we're done. Unless we are handling some
7331      post-effects internally; if that's the case, we need to copy into
7332      a temporary before adding the post-effects to POST_P.  */
7333   if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
7334     goto out;
7335 
7336   /* Otherwise, we need to create a new temporary for the gimplified
7337      expression.  */
7338 
7339   /* We can't return an lvalue if we have an internal postqueue.  The
7340      object the lvalue refers to would (probably) be modified by the
7341      postqueue; we need to copy the value out first, which means an
7342      rvalue.  */
7343   if ((fallback & fb_lvalue)
7344       && gimple_seq_empty_p (internal_post)
7345       && is_gimple_addressable (*expr_p))
7346     {
7347       /* An lvalue will do.  Take the address of the expression, store it
7348 	 in a temporary, and replace the expression with an INDIRECT_REF of
7349 	 that temporary.  */
7350       tmp = build_fold_addr_expr_loc (input_location, *expr_p);
7351       gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7352       *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
7353     }
7354   else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
7355     {
7356       /* An rvalue will do.  Assign the gimplified expression into a
7357 	 new temporary TMP and replace the original expression with
7358 	 TMP.  First, make sure that the expression has a type so that
7359 	 it can be assigned into a temporary.  */
7360       gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
7361 
7362       if (!gimple_seq_empty_p (internal_post) || (fallback & fb_lvalue))
7363 	/* The postqueue might change the value of the expression between
7364 	   the initialization and use of the temporary, so we can't use a
7365 	   formal temp.  FIXME do we care?  */
7366 	{
7367 	  *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7368 	  if (TREE_CODE (TREE_TYPE (*expr_p)) == COMPLEX_TYPE
7369 	      || TREE_CODE (TREE_TYPE (*expr_p)) == VECTOR_TYPE)
7370 	    DECL_GIMPLE_REG_P (*expr_p) = 1;
7371 	}
7372       else
7373 	*expr_p = get_formal_tmp_var (*expr_p, pre_p);
7374     }
7375   else
7376     {
7377 #ifdef ENABLE_GIMPLE_CHECKING
7378       if (!(fallback & fb_mayfail))
7379 	{
7380 	  fprintf (stderr, "gimplification failed:\n");
7381 	  print_generic_expr (stderr, *expr_p, 0);
7382 	  debug_tree (*expr_p);
7383 	  internal_error ("gimplification failed");
7384 	}
7385 #endif
7386       gcc_assert (fallback & fb_mayfail);
7387 
7388       /* If this is an asm statement, and the user asked for the
7389 	 impossible, don't die.  Fail and let gimplify_asm_expr
7390 	 issue an error.  */
7391       ret = GS_ERROR;
7392       goto out;
7393     }
7394 
7395   /* Make sure the temporary matches our predicate.  */
7396   gcc_assert ((*gimple_test_f) (*expr_p));
7397 
7398   if (!gimple_seq_empty_p (internal_post))
7399     {
7400       annotate_all_with_location (internal_post, input_location);
7401       gimplify_seq_add_seq (pre_p, internal_post);
7402     }
7403 
7404  out:
7405   input_location = saved_location;
7406   return ret;
7407 }
7408 
7409 /* Look through TYPE for variable-sized objects and gimplify each such
7410    size that we find.  Add to LIST_P any statements generated.  */
7411 
7412 void
7413 gimplify_type_sizes (tree type, gimple_seq *list_p)
7414 {
7415   tree field, t;
7416 
7417   if (type == NULL || type == error_mark_node)
7418     return;
7419 
7420   /* We first do the main variant, then copy into any other variants.  */
7421   type = TYPE_MAIN_VARIANT (type);
7422 
7423   /* Avoid infinite recursion.  */
7424   if (TYPE_SIZES_GIMPLIFIED (type))
7425     return;
7426 
7427   TYPE_SIZES_GIMPLIFIED (type) = 1;
7428 
7429   switch (TREE_CODE (type))
7430     {
7431     case INTEGER_TYPE:
7432     case ENUMERAL_TYPE:
7433     case BOOLEAN_TYPE:
7434     case REAL_TYPE:
7435     case FIXED_POINT_TYPE:
7436       gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
7437       gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
7438 
7439       for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7440 	{
7441 	  TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
7442 	  TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
7443 	}
7444       break;
7445 
7446     case ARRAY_TYPE:
7447       /* These types may not have declarations, so handle them here.  */
7448       gimplify_type_sizes (TREE_TYPE (type), list_p);
7449       gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
7450       /* Ensure VLA bounds aren't removed, for -O0 they should be variables
7451 	 with assigned stack slots, for -O1+ -g they should be tracked
7452 	 by VTA.  */
7453       if (!(TYPE_NAME (type)
7454 	    && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
7455 	    && DECL_IGNORED_P (TYPE_NAME (type)))
7456 	  && TYPE_DOMAIN (type)
7457 	  && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
7458 	{
7459 	  t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
7460 	  if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7461 	    DECL_IGNORED_P (t) = 0;
7462 	  t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7463 	  if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7464 	    DECL_IGNORED_P (t) = 0;
7465 	}
7466       break;
7467 
7468     case RECORD_TYPE:
7469     case UNION_TYPE:
7470     case QUAL_UNION_TYPE:
7471       for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
7472 	if (TREE_CODE (field) == FIELD_DECL)
7473 	  {
7474 	    gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
7475 	    gimplify_one_sizepos (&DECL_SIZE (field), list_p);
7476 	    gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
7477 	    gimplify_type_sizes (TREE_TYPE (field), list_p);
7478 	  }
7479       break;
7480 
7481     case POINTER_TYPE:
7482     case REFERENCE_TYPE:
7483 	/* We used to recurse on the pointed-to type here, which turned out to
7484 	   be incorrect because its definition might refer to variables not
7485 	   yet initialized at this point if a forward declaration is involved.
7486 
7487 	   It was actually useful for anonymous pointed-to types to ensure
7488 	   that the sizes evaluation dominates every possible later use of the
7489 	   values.  Restricting to such types here would be safe since there
7490 	   is no possible forward declaration around, but would introduce an
7491 	   undesirable middle-end semantic to anonymity.  We then defer to
7492 	   front-ends the responsibility of ensuring that the sizes are
7493 	   evaluated both early and late enough, e.g. by attaching artificial
7494 	   type declarations to the tree.  */
7495       break;
7496 
7497     default:
7498       break;
7499     }
7500 
7501   gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
7502   gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
7503 
7504   for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7505     {
7506       TYPE_SIZE (t) = TYPE_SIZE (type);
7507       TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
7508       TYPE_SIZES_GIMPLIFIED (t) = 1;
7509     }
7510 }
7511 
7512 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
7513    a size or position, has had all of its SAVE_EXPRs evaluated.
7514    We add any required statements to *STMT_P.  */
7515 
7516 void
7517 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
7518 {
7519   tree type, expr = *expr_p;
7520 
7521   /* We don't do anything if the value isn't there, is constant, or contains
7522      A PLACEHOLDER_EXPR.  We also don't want to do anything if it's already
7523      a VAR_DECL.  If it's a VAR_DECL from another function, the gimplifier
7524      will want to replace it with a new variable, but that will cause problems
7525      if this type is from outside the function.  It's OK to have that here.  */
7526   if (expr == NULL_TREE || TREE_CONSTANT (expr)
7527       || TREE_CODE (expr) == VAR_DECL
7528       || CONTAINS_PLACEHOLDER_P (expr))
7529     return;
7530 
7531   type = TREE_TYPE (expr);
7532   *expr_p = unshare_expr (expr);
7533 
7534   gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
7535   expr = *expr_p;
7536 
7537   /* Verify that we've an exact type match with the original expression.
7538      In particular, we do not wish to drop a "sizetype" in favour of a
7539      type of similar dimensions.  We don't want to pollute the generic
7540      type-stripping code with this knowledge because it doesn't matter
7541      for the bulk of GENERIC/GIMPLE.  It only matters that TYPE_SIZE_UNIT
7542      and friends retain their "sizetype-ness".  */
7543   if (TREE_TYPE (expr) != type
7544       && TREE_CODE (type) == INTEGER_TYPE
7545       && TYPE_IS_SIZETYPE (type))
7546     {
7547       tree tmp;
7548       gimple stmt;
7549 
7550       *expr_p = create_tmp_var (type, NULL);
7551       tmp = build1 (NOP_EXPR, type, expr);
7552       stmt = gimplify_assign (*expr_p, tmp, stmt_p);
7553       if (EXPR_HAS_LOCATION (expr))
7554 	gimple_set_location (stmt, EXPR_LOCATION (expr));
7555       else
7556 	gimple_set_location (stmt, input_location);
7557     }
7558 }
7559 
7560 
7561 /* Gimplify the body of statements pointed to by BODY_P and return a
7562    GIMPLE_BIND containing the sequence of GIMPLE statements
7563    corresponding to BODY_P.  FNDECL is the function decl containing
7564    *BODY_P.  */
7565 
7566 gimple
7567 gimplify_body (tree *body_p, tree fndecl, bool do_parms)
7568 {
7569   location_t saved_location = input_location;
7570   gimple_seq parm_stmts, seq;
7571   gimple outer_bind;
7572   struct gimplify_ctx gctx;
7573 
7574   timevar_push (TV_TREE_GIMPLIFY);
7575 
7576   /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
7577      gimplification.  */
7578   default_rtl_profile ();
7579 
7580   gcc_assert (gimplify_ctxp == NULL);
7581   push_gimplify_context (&gctx);
7582 
7583   /* Unshare most shared trees in the body and in that of any nested functions.
7584      It would seem we don't have to do this for nested functions because
7585      they are supposed to be output and then the outer function gimplified
7586      first, but the g++ front end doesn't always do it that way.  */
7587   unshare_body (body_p, fndecl);
7588   unvisit_body (body_p, fndecl);
7589 
7590   if (cgraph_node (fndecl)->origin)
7591     nonlocal_vlas = pointer_set_create ();
7592 
7593   /* Make sure input_location isn't set to something weird.  */
7594   input_location = DECL_SOURCE_LOCATION (fndecl);
7595 
7596   /* Resolve callee-copies.  This has to be done before processing
7597      the body so that DECL_VALUE_EXPR gets processed correctly.  */
7598   parm_stmts = (do_parms) ? gimplify_parameters () : NULL;
7599 
7600   /* Gimplify the function's body.  */
7601   seq = NULL;
7602   gimplify_stmt (body_p, &seq);
7603   outer_bind = gimple_seq_first_stmt (seq);
7604   if (!outer_bind)
7605     {
7606       outer_bind = gimple_build_nop ();
7607       gimplify_seq_add_stmt (&seq, outer_bind);
7608     }
7609 
7610   /* The body must contain exactly one statement, a GIMPLE_BIND.  If this is
7611      not the case, wrap everything in a GIMPLE_BIND to make it so.  */
7612   if (gimple_code (outer_bind) == GIMPLE_BIND
7613       && gimple_seq_first (seq) == gimple_seq_last (seq))
7614     ;
7615   else
7616     outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
7617 
7618   *body_p = NULL_TREE;
7619 
7620   /* If we had callee-copies statements, insert them at the beginning
7621      of the function and clear DECL_VALUE_EXPR_P on the parameters.  */
7622   if (!gimple_seq_empty_p (parm_stmts))
7623     {
7624       tree parm;
7625 
7626       gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
7627       gimple_bind_set_body (outer_bind, parm_stmts);
7628 
7629       for (parm = DECL_ARGUMENTS (current_function_decl);
7630 	   parm; parm = TREE_CHAIN (parm))
7631 	if (DECL_HAS_VALUE_EXPR_P (parm))
7632 	  {
7633 	    DECL_HAS_VALUE_EXPR_P (parm) = 0;
7634 	    DECL_IGNORED_P (parm) = 0;
7635 	  }
7636     }
7637 
7638   if (nonlocal_vlas)
7639     {
7640       pointer_set_destroy (nonlocal_vlas);
7641       nonlocal_vlas = NULL;
7642     }
7643 
7644   pop_gimplify_context (outer_bind);
7645   gcc_assert (gimplify_ctxp == NULL);
7646 
7647 #ifdef ENABLE_TYPES_CHECKING
7648   if (!errorcount && !sorrycount)
7649     verify_types_in_gimple_seq (gimple_bind_body (outer_bind));
7650 #endif
7651 
7652   timevar_pop (TV_TREE_GIMPLIFY);
7653   input_location = saved_location;
7654 
7655   return outer_bind;
7656 }
7657 
7658 /* Entry point to the gimplification pass.  FNDECL is the FUNCTION_DECL
7659    node for the function we want to gimplify.
7660 
7661    Returns the sequence of GIMPLE statements corresponding to the body
7662    of FNDECL.  */
7663 
7664 void
7665 gimplify_function_tree (tree fndecl)
7666 {
7667   tree oldfn, parm, ret;
7668   gimple_seq seq;
7669   gimple bind;
7670 
7671   gcc_assert (!gimple_body (fndecl));
7672 
7673   oldfn = current_function_decl;
7674   current_function_decl = fndecl;
7675   if (DECL_STRUCT_FUNCTION (fndecl))
7676     push_cfun (DECL_STRUCT_FUNCTION (fndecl));
7677   else
7678     push_struct_function (fndecl);
7679 
7680   for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
7681     {
7682       /* Preliminarily mark non-addressed complex variables as eligible
7683          for promotion to gimple registers.  We'll transform their uses
7684          as we find them.  */
7685       if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
7686 	   || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
7687           && !TREE_THIS_VOLATILE (parm)
7688           && !needs_to_live_in_memory (parm))
7689         DECL_GIMPLE_REG_P (parm) = 1;
7690     }
7691 
7692   ret = DECL_RESULT (fndecl);
7693   if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
7694        || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
7695       && !needs_to_live_in_memory (ret))
7696     DECL_GIMPLE_REG_P (ret) = 1;
7697 
7698   bind = gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
7699 
7700   /* The tree body of the function is no longer needed, replace it
7701      with the new GIMPLE body.  */
7702   seq = gimple_seq_alloc ();
7703   gimple_seq_add_stmt (&seq, bind);
7704   gimple_set_body (fndecl, seq);
7705 
7706   /* If we're instrumenting function entry/exit, then prepend the call to
7707      the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
7708      catch the exit hook.  */
7709   /* ??? Add some way to ignore exceptions for this TFE.  */
7710   if (flag_instrument_function_entry_exit
7711       && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
7712       && !flag_instrument_functions_exclude_p (fndecl))
7713     {
7714       tree x;
7715       gimple new_bind;
7716       gimple tf;
7717       gimple_seq cleanup = NULL, body = NULL;
7718 
7719       x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
7720       gimplify_seq_add_stmt (&cleanup, gimple_build_call (x, 0));
7721       tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
7722 
7723       x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
7724       gimplify_seq_add_stmt (&body, gimple_build_call (x, 0));
7725       gimplify_seq_add_stmt (&body, tf);
7726       new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
7727       /* Clear the block for BIND, since it is no longer directly inside
7728          the function, but within a try block.  */
7729       gimple_bind_set_block (bind, NULL);
7730 
7731       /* Replace the current function body with the body
7732          wrapped in the try/finally TF.  */
7733       seq = gimple_seq_alloc ();
7734       gimple_seq_add_stmt (&seq, new_bind);
7735       gimple_set_body (fndecl, seq);
7736     }
7737 
7738   DECL_SAVED_TREE (fndecl) = NULL_TREE;
7739   cfun->curr_properties = PROP_gimple_any;
7740 
7741   current_function_decl = oldfn;
7742   pop_cfun ();
7743 }
7744 
7745 
7746 /* Some transformations like inlining may invalidate the GIMPLE form
7747    for operands.  This function traverses all the operands in STMT and
7748    gimplifies anything that is not a valid gimple operand.  Any new
7749    GIMPLE statements are inserted before *GSI_P.  */
7750 
7751 void
7752 gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
7753 {
7754   size_t i, num_ops;
7755   tree orig_lhs = NULL_TREE, lhs, t;
7756   gimple_seq pre = NULL;
7757   gimple post_stmt = NULL;
7758   struct gimplify_ctx gctx;
7759 
7760   push_gimplify_context (&gctx);
7761   gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7762 
7763   switch (gimple_code (stmt))
7764     {
7765     case GIMPLE_COND:
7766       gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
7767 		     is_gimple_val, fb_rvalue);
7768       gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
7769 		     is_gimple_val, fb_rvalue);
7770       break;
7771     case GIMPLE_SWITCH:
7772       gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
7773 		     is_gimple_val, fb_rvalue);
7774       break;
7775     case GIMPLE_OMP_ATOMIC_LOAD:
7776       gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
7777 		     is_gimple_val, fb_rvalue);
7778       break;
7779     case GIMPLE_ASM:
7780       {
7781 	size_t i, noutputs = gimple_asm_noutputs (stmt);
7782 	const char *constraint, **oconstraints;
7783 	bool allows_mem, allows_reg, is_inout;
7784 
7785 	oconstraints
7786 	  = (const char **) alloca ((noutputs) * sizeof (const char *));
7787 	for (i = 0; i < noutputs; i++)
7788 	  {
7789 	    tree op = gimple_asm_output_op (stmt, i);
7790 	    constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7791 	    oconstraints[i] = constraint;
7792 	    parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
7793 				     &allows_reg, &is_inout);
7794 	    gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7795 			   is_inout ? is_gimple_min_lval : is_gimple_lvalue,
7796 			   fb_lvalue | fb_mayfail);
7797 	  }
7798 	for (i = 0; i < gimple_asm_ninputs (stmt); i++)
7799 	  {
7800 	    tree op = gimple_asm_input_op (stmt, i);
7801 	    constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7802 	    parse_input_constraint (&constraint, 0, 0, noutputs, 0,
7803 				    oconstraints, &allows_mem, &allows_reg);
7804 	    if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
7805 	      allows_reg = 0;
7806 	    if (!allows_reg && allows_mem)
7807 	      gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7808 			     is_gimple_lvalue, fb_lvalue | fb_mayfail);
7809 	    else
7810 	      gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7811 			     is_gimple_asm_val, fb_rvalue);
7812 	  }
7813       }
7814       break;
7815     default:
7816       /* NOTE: We start gimplifying operands from last to first to
7817 	 make sure that side-effects on the RHS of calls, assignments
7818 	 and ASMs are executed before the LHS.  The ordering is not
7819 	 important for other statements.  */
7820       num_ops = gimple_num_ops (stmt);
7821       orig_lhs = gimple_get_lhs (stmt);
7822       for (i = num_ops; i > 0; i--)
7823 	{
7824 	  tree op = gimple_op (stmt, i - 1);
7825 	  if (op == NULL_TREE)
7826 	    continue;
7827 	  if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
7828 	    gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
7829 	  else if (i == 2
7830 		   && is_gimple_assign (stmt)
7831 		   && num_ops == 2
7832 		   && get_gimple_rhs_class (gimple_expr_code (stmt))
7833 		      == GIMPLE_SINGLE_RHS)
7834 	    gimplify_expr (&op, &pre, NULL,
7835 			   rhs_predicate_for (gimple_assign_lhs (stmt)),
7836 			   fb_rvalue);
7837 	  else if (i == 2 && is_gimple_call (stmt))
7838 	    {
7839 	      if (TREE_CODE (op) == FUNCTION_DECL)
7840 		continue;
7841 	      gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
7842 	    }
7843 	  else
7844 	    gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
7845 	  gimple_set_op (stmt, i - 1, op);
7846 	}
7847 
7848       lhs = gimple_get_lhs (stmt);
7849       /* If the LHS changed it in a way that requires a simple RHS,
7850 	 create temporary.  */
7851       if (lhs && !is_gimple_reg (lhs))
7852 	{
7853 	  bool need_temp = false;
7854 
7855 	  if (is_gimple_assign (stmt)
7856 	      && num_ops == 2
7857 	      && get_gimple_rhs_class (gimple_expr_code (stmt))
7858 		 == GIMPLE_SINGLE_RHS)
7859 	    gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
7860 			   rhs_predicate_for (gimple_assign_lhs (stmt)),
7861 			   fb_rvalue);
7862 	  else if (is_gimple_reg (lhs))
7863 	    {
7864 	      if (is_gimple_reg_type (TREE_TYPE (lhs)))
7865 		{
7866 		  if (is_gimple_call (stmt))
7867 		    {
7868 		      i = gimple_call_flags (stmt);
7869 		      if ((i & ECF_LOOPING_CONST_OR_PURE)
7870 			  || !(i & (ECF_CONST | ECF_PURE)))
7871 			need_temp = true;
7872 		    }
7873 		  if (stmt_can_throw_internal (stmt))
7874 		    need_temp = true;
7875 		}
7876 	    }
7877 	  else
7878 	    {
7879 	      if (is_gimple_reg_type (TREE_TYPE (lhs)))
7880 		need_temp = true;
7881 	      else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
7882 		{
7883 		  if (is_gimple_call (stmt))
7884 		    {
7885 		      tree fndecl = gimple_call_fndecl (stmt);
7886 
7887 		      if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
7888 			  && !(fndecl && DECL_RESULT (fndecl)
7889 			       && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
7890 			need_temp = true;
7891 		    }
7892 		  else
7893 		    need_temp = true;
7894 		}
7895 	    }
7896 	  if (need_temp)
7897 	    {
7898 	      tree temp = create_tmp_var (TREE_TYPE (lhs), NULL);
7899 
7900 	      if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
7901 		  || TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE)
7902 		DECL_GIMPLE_REG_P (temp) = 1;
7903 	      if (TREE_CODE (orig_lhs) == SSA_NAME)
7904 		orig_lhs = SSA_NAME_VAR (orig_lhs);
7905 
7906 	      if (gimple_in_ssa_p (cfun))
7907 		temp = make_ssa_name (temp, NULL);
7908 	      gimple_set_lhs (stmt, temp);
7909 	      post_stmt = gimple_build_assign (lhs, temp);
7910 	      if (TREE_CODE (lhs) == SSA_NAME)
7911 		SSA_NAME_DEF_STMT (lhs) = post_stmt;
7912 	    }
7913 	}
7914       break;
7915     }
7916 
7917   if (gimple_referenced_vars (cfun))
7918     for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7919       add_referenced_var (t);
7920 
7921   if (!gimple_seq_empty_p (pre))
7922     {
7923       if (gimple_in_ssa_p (cfun))
7924 	{
7925 	  gimple_stmt_iterator i;
7926 
7927 	  for (i = gsi_start (pre); !gsi_end_p (i); gsi_next (&i))
7928 	    mark_symbols_for_renaming (gsi_stmt (i));
7929 	}
7930       gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
7931     }
7932   if (post_stmt)
7933     gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
7934 
7935   pop_gimplify_context (NULL);
7936 }
7937 
7938 
7939 /* Expands EXPR to list of gimple statements STMTS.  If SIMPLE is true,
7940    force the result to be either ssa_name or an invariant, otherwise
7941    just force it to be a rhs expression.  If VAR is not NULL, make the
7942    base variable of the final destination be VAR if suitable.  */
7943 
7944 tree
7945 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
7946 {
7947   tree t;
7948   enum gimplify_status ret;
7949   gimple_predicate gimple_test_f;
7950   struct gimplify_ctx gctx;
7951 
7952   *stmts = NULL;
7953 
7954   if (is_gimple_val (expr))
7955     return expr;
7956 
7957   gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
7958 
7959   push_gimplify_context (&gctx);
7960   gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7961   gimplify_ctxp->allow_rhs_cond_expr = true;
7962 
7963   if (var)
7964     expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
7965 
7966   if (TREE_CODE (expr) != MODIFY_EXPR
7967       && TREE_TYPE (expr) == void_type_node)
7968     {
7969       gimplify_and_add (expr, stmts);
7970       expr = NULL_TREE;
7971     }
7972   else
7973     {
7974       ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
7975       gcc_assert (ret != GS_ERROR);
7976     }
7977 
7978   if (gimple_referenced_vars (cfun))
7979     for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7980       add_referenced_var (t);
7981 
7982   pop_gimplify_context (NULL);
7983 
7984   return expr;
7985 }
7986 
7987 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR.  If
7988    some statements are produced, emits them at GSI.  If BEFORE is true.
7989    the statements are appended before GSI, otherwise they are appended after
7990    it.  M specifies the way GSI moves after insertion (GSI_SAME_STMT or
7991    GSI_CONTINUE_LINKING are the usual values).  */
7992 
7993 tree
7994 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
7995 			  bool simple_p, tree var, bool before,
7996 			  enum gsi_iterator_update m)
7997 {
7998   gimple_seq stmts;
7999 
8000   expr = force_gimple_operand (expr, &stmts, simple_p, var);
8001 
8002   if (!gimple_seq_empty_p (stmts))
8003     {
8004       if (gimple_in_ssa_p (cfun))
8005 	{
8006 	  gimple_stmt_iterator i;
8007 
8008 	  for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
8009 	    mark_symbols_for_renaming (gsi_stmt (i));
8010 	}
8011 
8012       if (before)
8013 	gsi_insert_seq_before (gsi, stmts, m);
8014       else
8015 	gsi_insert_seq_after (gsi, stmts, m);
8016     }
8017 
8018   return expr;
8019 }
8020 
8021 #include "gt-gimplify.h"
8022