xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/gimplify.c (revision f3cfa6f6ce31685c6c4a758bc430e69eb99f50a4)
1 /* Tree lowering pass.  This pass converts the GENERIC functions-as-trees
2    tree representation into the GIMPLE form.
3    Copyright (C) 2002-2016 Free Software Foundation, Inc.
4    Major work done by Sebastian Pop <s.pop@laposte.net>,
5    Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "gimple-predict.h"
32 #include "tree-pass.h"		/* FIXME: only for PROP_gimple_any */
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "tree-pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "alias.h"
38 #include "fold-const.h"
39 #include "calls.h"
40 #include "varasm.h"
41 #include "stmt.h"
42 #include "expr.h"
43 #include "gimple-fold.h"
44 #include "tree-eh.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "stor-layout.h"
48 #include "print-tree.h"
49 #include "tree-iterator.h"
50 #include "tree-inline.h"
51 #include "langhooks.h"
52 #include "tree-cfg.h"
53 #include "tree-ssa.h"
54 #include "omp-low.h"
55 #include "gimple-low.h"
56 #include "cilk.h"
57 #include "gomp-constants.h"
58 #include "tree-dump.h"
59 #include "gimple-walk.h"
60 #include "langhooks-def.h"	/* FIXME: for lhd_set_decl_assembler_name */
61 #include "builtins.h"
62 
63 enum gimplify_omp_var_data
64 {
65   GOVD_SEEN = 1,
66   GOVD_EXPLICIT = 2,
67   GOVD_SHARED = 4,
68   GOVD_PRIVATE = 8,
69   GOVD_FIRSTPRIVATE = 16,
70   GOVD_LASTPRIVATE = 32,
71   GOVD_REDUCTION = 64,
72   GOVD_LOCAL = 128,
73   GOVD_MAP = 256,
74   GOVD_DEBUG_PRIVATE = 512,
75   GOVD_PRIVATE_OUTER_REF = 1024,
76   GOVD_LINEAR = 2048,
77   GOVD_ALIGNED = 4096,
78 
79   /* Flag for GOVD_MAP: don't copy back.  */
80   GOVD_MAP_TO_ONLY = 8192,
81 
82   /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference.  */
83   GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
84 
85   GOVD_MAP_0LEN_ARRAY = 32768,
86 
87   /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping.  */
88   GOVD_MAP_ALWAYS_TO = 65536,
89 
90   /* Flag for shared vars that are or might be stored to in the region.  */
91   GOVD_WRITTEN = 131072,
92 
93   /* Flag for GOVD_MAP, if it is a forced mapping.  */
94   GOVD_MAP_FORCE = 262144,
95 
96   GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
97 			   | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
98 			   | GOVD_LOCAL)
99 };
100 
101 
102 enum omp_region_type
103 {
104   ORT_WORKSHARE = 0x00,
105   ORT_SIMD 	= 0x01,
106 
107   ORT_PARALLEL	= 0x02,
108   ORT_COMBINED_PARALLEL = 0x03,
109 
110   ORT_TASK	= 0x04,
111   ORT_UNTIED_TASK = 0x05,
112 
113   ORT_TEAMS	= 0x08,
114   ORT_COMBINED_TEAMS = 0x09,
115 
116   /* Data region.  */
117   ORT_TARGET_DATA = 0x10,
118 
119   /* Data region with offloading.  */
120   ORT_TARGET	= 0x20,
121   ORT_COMBINED_TARGET = 0x21,
122 
123   /* OpenACC variants.  */
124   ORT_ACC	= 0x40,  /* A generic OpenACC region.  */
125   ORT_ACC_DATA	= ORT_ACC | ORT_TARGET_DATA, /* Data construct.  */
126   ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET,  /* Parallel construct */
127   ORT_ACC_KERNELS  = ORT_ACC | ORT_TARGET | 0x80,  /* Kernels construct.  */
128   ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80,  /* Host data.  */
129 
130   /* Dummy OpenMP region, used to disable expansion of
131      DECL_VALUE_EXPRs in taskloop pre body.  */
132   ORT_NONE	= 0x100
133 };
134 
135 /* Gimplify hashtable helper.  */
136 
137 struct gimplify_hasher : free_ptr_hash <elt_t>
138 {
139   static inline hashval_t hash (const elt_t *);
140   static inline bool equal (const elt_t *, const elt_t *);
141 };
142 
143 struct gimplify_ctx
144 {
145   struct gimplify_ctx *prev_context;
146 
147   vec<gbind *> bind_expr_stack;
148   tree temps;
149   gimple_seq conditional_cleanups;
150   tree exit_label;
151   tree return_temp;
152 
153   vec<tree> case_labels;
154   /* The formal temporary table.  Should this be persistent?  */
155   hash_table<gimplify_hasher> *temp_htab;
156 
157   int conditions;
158   unsigned into_ssa : 1;
159   unsigned allow_rhs_cond_expr : 1;
160   unsigned in_cleanup_point_expr : 1;
161   unsigned keep_stack : 1;
162   unsigned save_stack : 1;
163 };
164 
165 struct gimplify_omp_ctx
166 {
167   struct gimplify_omp_ctx *outer_context;
168   splay_tree variables;
169   hash_set<tree> *privatized_types;
170   /* Iteration variables in an OMP_FOR.  */
171   vec<tree> loop_iter_var;
172   location_t location;
173   enum omp_clause_default_kind default_kind;
174   enum omp_region_type region_type;
175   bool combined_loop;
176   bool distribute;
177   bool target_map_scalars_firstprivate;
178   bool target_map_pointers_as_0len_arrays;
179   bool target_firstprivatize_array_bases;
180 };
181 
182 static struct gimplify_ctx *gimplify_ctxp;
183 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
184 
185 /* Forward declaration.  */
186 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
187 static hash_map<tree, tree> *oacc_declare_returns;
188 
189 /* Shorter alias name for the above function for use in gimplify.c
190    only.  */
191 
192 static inline void
193 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
194 {
195   gimple_seq_add_stmt_without_update (seq_p, gs);
196 }
197 
198 /* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
199    NULL, a new sequence is allocated.   This function is
200    similar to gimple_seq_add_seq, but does not scan the operands.
201    During gimplification, we need to manipulate statement sequences
202    before the def/use vectors have been constructed.  */
203 
204 static void
205 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
206 {
207   gimple_stmt_iterator si;
208 
209   if (src == NULL)
210     return;
211 
212   si = gsi_last (*dst_p);
213   gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
214 }
215 
216 
217 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
218    and popping gimplify contexts.  */
219 
220 static struct gimplify_ctx *ctx_pool = NULL;
221 
222 /* Return a gimplify context struct from the pool.  */
223 
224 static inline struct gimplify_ctx *
225 ctx_alloc (void)
226 {
227   struct gimplify_ctx * c = ctx_pool;
228 
229   if (c)
230     ctx_pool = c->prev_context;
231   else
232     c = XNEW (struct gimplify_ctx);
233 
234   memset (c, '\0', sizeof (*c));
235   return c;
236 }
237 
238 /* Put gimplify context C back into the pool.  */
239 
240 static inline void
241 ctx_free (struct gimplify_ctx *c)
242 {
243   c->prev_context = ctx_pool;
244   ctx_pool = c;
245 }
246 
247 /* Free allocated ctx stack memory.  */
248 
249 void
250 free_gimplify_stack (void)
251 {
252   struct gimplify_ctx *c;
253 
254   while ((c = ctx_pool))
255     {
256       ctx_pool = c->prev_context;
257       free (c);
258     }
259 }
260 
261 
262 /* Set up a context for the gimplifier.  */
263 
264 void
265 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
266 {
267   struct gimplify_ctx *c = ctx_alloc ();
268 
269   c->prev_context = gimplify_ctxp;
270   gimplify_ctxp = c;
271   gimplify_ctxp->into_ssa = in_ssa;
272   gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
273 }
274 
275 /* Tear down a context for the gimplifier.  If BODY is non-null, then
276    put the temporaries into the outer BIND_EXPR.  Otherwise, put them
277    in the local_decls.
278 
279    BODY is not a sequence, but the first tuple in a sequence.  */
280 
281 void
282 pop_gimplify_context (gimple *body)
283 {
284   struct gimplify_ctx *c = gimplify_ctxp;
285 
286   gcc_assert (c
287               && (!c->bind_expr_stack.exists ()
288 		  || c->bind_expr_stack.is_empty ()));
289   c->bind_expr_stack.release ();
290   gimplify_ctxp = c->prev_context;
291 
292   if (body)
293     declare_vars (c->temps, body, false);
294   else
295     record_vars (c->temps);
296 
297   delete c->temp_htab;
298   c->temp_htab = NULL;
299   ctx_free (c);
300 }
301 
302 /* Push a GIMPLE_BIND tuple onto the stack of bindings.  */
303 
304 static void
305 gimple_push_bind_expr (gbind *bind_stmt)
306 {
307   gimplify_ctxp->bind_expr_stack.reserve (8);
308   gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
309 }
310 
311 /* Pop the first element off the stack of bindings.  */
312 
313 static void
314 gimple_pop_bind_expr (void)
315 {
316   gimplify_ctxp->bind_expr_stack.pop ();
317 }
318 
319 /* Return the first element of the stack of bindings.  */
320 
321 gbind *
322 gimple_current_bind_expr (void)
323 {
324   return gimplify_ctxp->bind_expr_stack.last ();
325 }
326 
327 /* Return the stack of bindings created during gimplification.  */
328 
329 vec<gbind *>
330 gimple_bind_expr_stack (void)
331 {
332   return gimplify_ctxp->bind_expr_stack;
333 }
334 
335 /* Return true iff there is a COND_EXPR between us and the innermost
336    CLEANUP_POINT_EXPR.  This info is used by gimple_push_cleanup.  */
337 
338 static bool
339 gimple_conditional_context (void)
340 {
341   return gimplify_ctxp->conditions > 0;
342 }
343 
344 /* Note that we've entered a COND_EXPR.  */
345 
346 static void
347 gimple_push_condition (void)
348 {
349 #ifdef ENABLE_GIMPLE_CHECKING
350   if (gimplify_ctxp->conditions == 0)
351     gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
352 #endif
353   ++(gimplify_ctxp->conditions);
354 }
355 
356 /* Note that we've left a COND_EXPR.  If we're back at unconditional scope
357    now, add any conditional cleanups we've seen to the prequeue.  */
358 
359 static void
360 gimple_pop_condition (gimple_seq *pre_p)
361 {
362   int conds = --(gimplify_ctxp->conditions);
363 
364   gcc_assert (conds >= 0);
365   if (conds == 0)
366     {
367       gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
368       gimplify_ctxp->conditional_cleanups = NULL;
369     }
370 }
371 
372 /* A stable comparison routine for use with splay trees and DECLs.  */
373 
374 static int
375 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
376 {
377   tree a = (tree) xa;
378   tree b = (tree) xb;
379 
380   return DECL_UID (a) - DECL_UID (b);
381 }
382 
383 /* Create a new omp construct that deals with variable remapping.  */
384 
385 static struct gimplify_omp_ctx *
386 new_omp_context (enum omp_region_type region_type)
387 {
388   struct gimplify_omp_ctx *c;
389 
390   c = XCNEW (struct gimplify_omp_ctx);
391   c->outer_context = gimplify_omp_ctxp;
392   c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
393   c->privatized_types = new hash_set<tree>;
394   c->location = input_location;
395   c->region_type = region_type;
396   if ((region_type & ORT_TASK) == 0)
397     c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
398   else
399     c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
400 
401   return c;
402 }
403 
404 /* Destroy an omp construct that deals with variable remapping.  */
405 
406 static void
407 delete_omp_context (struct gimplify_omp_ctx *c)
408 {
409   splay_tree_delete (c->variables);
410   delete c->privatized_types;
411   c->loop_iter_var.release ();
412   XDELETE (c);
413 }
414 
415 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
416 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
417 
418 /* Both gimplify the statement T and append it to *SEQ_P.  This function
419    behaves exactly as gimplify_stmt, but you don't have to pass T as a
420    reference.  */
421 
422 void
423 gimplify_and_add (tree t, gimple_seq *seq_p)
424 {
425   gimplify_stmt (&t, seq_p);
426 }
427 
428 /* Gimplify statement T into sequence *SEQ_P, and return the first
429    tuple in the sequence of generated tuples for this statement.
430    Return NULL if gimplifying T produced no tuples.  */
431 
432 static gimple *
433 gimplify_and_return_first (tree t, gimple_seq *seq_p)
434 {
435   gimple_stmt_iterator last = gsi_last (*seq_p);
436 
437   gimplify_and_add (t, seq_p);
438 
439   if (!gsi_end_p (last))
440     {
441       gsi_next (&last);
442       return gsi_stmt (last);
443     }
444   else
445     return gimple_seq_first_stmt (*seq_p);
446 }
447 
448 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
449    LHS, or for a call argument.  */
450 
451 static bool
452 is_gimple_mem_rhs (tree t)
453 {
454   /* If we're dealing with a renamable type, either source or dest must be
455      a renamed variable.  */
456   if (is_gimple_reg_type (TREE_TYPE (t)))
457     return is_gimple_val (t);
458   else
459     return is_gimple_val (t) || is_gimple_lvalue (t);
460 }
461 
462 /* Return true if T is a CALL_EXPR or an expression that can be
463    assigned to a temporary.  Note that this predicate should only be
464    used during gimplification.  See the rationale for this in
465    gimplify_modify_expr.  */
466 
467 static bool
468 is_gimple_reg_rhs_or_call (tree t)
469 {
470   return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
471 	  || TREE_CODE (t) == CALL_EXPR);
472 }
473 
474 /* Return true if T is a valid memory RHS or a CALL_EXPR.  Note that
475    this predicate should only be used during gimplification.  See the
476    rationale for this in gimplify_modify_expr.  */
477 
478 static bool
479 is_gimple_mem_rhs_or_call (tree t)
480 {
481   /* If we're dealing with a renamable type, either source or dest must be
482      a renamed variable.  */
483   if (is_gimple_reg_type (TREE_TYPE (t)))
484     return is_gimple_val (t);
485   else
486     return (is_gimple_val (t) || is_gimple_lvalue (t)
487 	    || TREE_CODE (t) == CALL_EXPR);
488 }
489 
490 /* Create a temporary with a name derived from VAL.  Subroutine of
491    lookup_tmp_var; nobody else should call this function.  */
492 
493 static inline tree
494 create_tmp_from_val (tree val)
495 {
496   /* Drop all qualifiers and address-space information from the value type.  */
497   tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
498   tree var = create_tmp_var (type, get_name (val));
499   if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
500       || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
501     DECL_GIMPLE_REG_P (var) = 1;
502   return var;
503 }
504 
505 /* Create a temporary to hold the value of VAL.  If IS_FORMAL, try to reuse
506    an existing expression temporary.  */
507 
508 static tree
509 lookup_tmp_var (tree val, bool is_formal)
510 {
511   tree ret;
512 
513   /* If not optimizing, never really reuse a temporary.  local-alloc
514      won't allocate any variable that is used in more than one basic
515      block, which means it will go into memory, causing much extra
516      work in reload and final and poorer code generation, outweighing
517      the extra memory allocation here.  */
518   if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
519     ret = create_tmp_from_val (val);
520   else
521     {
522       elt_t elt, *elt_p;
523       elt_t **slot;
524 
525       elt.val = val;
526       if (!gimplify_ctxp->temp_htab)
527         gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
528       slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
529       if (*slot == NULL)
530 	{
531 	  elt_p = XNEW (elt_t);
532 	  elt_p->val = val;
533 	  elt_p->temp = ret = create_tmp_from_val (val);
534 	  *slot = elt_p;
535 	}
536       else
537 	{
538 	  elt_p = *slot;
539           ret = elt_p->temp;
540 	}
541     }
542 
543   return ret;
544 }
545 
546 /* Helper for get_formal_tmp_var and get_initialized_tmp_var.  */
547 
548 static tree
549 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
550                       bool is_formal)
551 {
552   tree t, mod;
553 
554   /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
555      can create an INIT_EXPR and convert it into a GIMPLE_CALL below.  */
556   gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
557 		 fb_rvalue);
558 
559   if (gimplify_ctxp->into_ssa
560       && is_gimple_reg_type (TREE_TYPE (val)))
561     t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
562   else
563     t = lookup_tmp_var (val, is_formal);
564 
565   mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
566 
567   SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
568 
569   /* gimplify_modify_expr might want to reduce this further.  */
570   gimplify_and_add (mod, pre_p);
571   ggc_free (mod);
572 
573   return t;
574 }
575 
576 /* Return a formal temporary variable initialized with VAL.  PRE_P is as
577    in gimplify_expr.  Only use this function if:
578 
579    1) The value of the unfactored expression represented by VAL will not
580       change between the initialization and use of the temporary, and
581    2) The temporary will not be otherwise modified.
582 
583    For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
584    and #2 means it is inappropriate for && temps.
585 
586    For other cases, use get_initialized_tmp_var instead.  */
587 
588 tree
589 get_formal_tmp_var (tree val, gimple_seq *pre_p)
590 {
591   return internal_get_tmp_var (val, pre_p, NULL, true);
592 }
593 
594 /* Return a temporary variable initialized with VAL.  PRE_P and POST_P
595    are as in gimplify_expr.  */
596 
597 tree
598 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
599 {
600   return internal_get_tmp_var (val, pre_p, post_p, false);
601 }
602 
603 /* Declare all the variables in VARS in SCOPE.  If DEBUG_INFO is true,
604    generate debug info for them; otherwise don't.  */
605 
606 void
607 declare_vars (tree vars, gimple *gs, bool debug_info)
608 {
609   tree last = vars;
610   if (last)
611     {
612       tree temps, block;
613 
614       gbind *scope = as_a <gbind *> (gs);
615 
616       temps = nreverse (last);
617 
618       block = gimple_bind_block (scope);
619       gcc_assert (!block || TREE_CODE (block) == BLOCK);
620       if (!block || !debug_info)
621 	{
622 	  DECL_CHAIN (last) = gimple_bind_vars (scope);
623 	  gimple_bind_set_vars (scope, temps);
624 	}
625       else
626 	{
627 	  /* We need to attach the nodes both to the BIND_EXPR and to its
628 	     associated BLOCK for debugging purposes.  The key point here
629 	     is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
630 	     is a subchain of the BIND_EXPR_VARS of the BIND_EXPR.  */
631 	  if (BLOCK_VARS (block))
632 	    BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
633 	  else
634 	    {
635 	      gimple_bind_set_vars (scope,
636 	      			    chainon (gimple_bind_vars (scope), temps));
637 	      BLOCK_VARS (block) = temps;
638 	    }
639 	}
640     }
641 }
642 
643 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
644    for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly.  Abort if
645    no such upper bound can be obtained.  */
646 
647 static void
648 force_constant_size (tree var)
649 {
650   /* The only attempt we make is by querying the maximum size of objects
651      of the variable's type.  */
652 
653   HOST_WIDE_INT max_size;
654 
655   gcc_assert (TREE_CODE (var) == VAR_DECL);
656 
657   max_size = max_int_size_in_bytes (TREE_TYPE (var));
658 
659   gcc_assert (max_size >= 0);
660 
661   DECL_SIZE_UNIT (var)
662     = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
663   DECL_SIZE (var)
664     = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
665 }
666 
667 /* Push the temporary variable TMP into the current binding.  */
668 
669 void
670 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
671 {
672   gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
673 
674   /* Later processing assumes that the object size is constant, which might
675      not be true at this point.  Force the use of a constant upper bound in
676      this case.  */
677   if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
678     force_constant_size (tmp);
679 
680   DECL_CONTEXT (tmp) = fn->decl;
681   DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
682 
683   record_vars_into (tmp, fn->decl);
684 }
685 
686 /* Push the temporary variable TMP into the current binding.  */
687 
688 void
689 gimple_add_tmp_var (tree tmp)
690 {
691   gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
692 
693   /* Later processing assumes that the object size is constant, which might
694      not be true at this point.  Force the use of a constant upper bound in
695      this case.  */
696   if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
697     force_constant_size (tmp);
698 
699   DECL_CONTEXT (tmp) = current_function_decl;
700   DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
701 
702   if (gimplify_ctxp)
703     {
704       DECL_CHAIN (tmp) = gimplify_ctxp->temps;
705       gimplify_ctxp->temps = tmp;
706 
707       /* Mark temporaries local within the nearest enclosing parallel.  */
708       if (gimplify_omp_ctxp)
709 	{
710 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
711 	  while (ctx
712 		 && (ctx->region_type == ORT_WORKSHARE
713 		     || ctx->region_type == ORT_SIMD
714 		     || ctx->region_type == ORT_ACC))
715 	    ctx = ctx->outer_context;
716 	  if (ctx)
717 	    omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
718 	}
719     }
720   else if (cfun)
721     record_vars (tmp);
722   else
723     {
724       gimple_seq body_seq;
725 
726       /* This case is for nested functions.  We need to expose the locals
727 	 they create.  */
728       body_seq = gimple_body (current_function_decl);
729       declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
730     }
731 }
732 
733 
734 
735 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
736    nodes that are referenced more than once in GENERIC functions.  This is
737    necessary because gimplification (translation into GIMPLE) is performed
738    by modifying tree nodes in-place, so gimplication of a shared node in a
739    first context could generate an invalid GIMPLE form in a second context.
740 
741    This is achieved with a simple mark/copy/unmark algorithm that walks the
742    GENERIC representation top-down, marks nodes with TREE_VISITED the first
743    time it encounters them, duplicates them if they already have TREE_VISITED
744    set, and finally removes the TREE_VISITED marks it has set.
745 
746    The algorithm works only at the function level, i.e. it generates a GENERIC
747    representation of a function with no nodes shared within the function when
748    passed a GENERIC function (except for nodes that are allowed to be shared).
749 
750    At the global level, it is also necessary to unshare tree nodes that are
751    referenced in more than one function, for the same aforementioned reason.
752    This requires some cooperation from the front-end.  There are 2 strategies:
753 
754      1. Manual unsharing.  The front-end needs to call unshare_expr on every
755         expression that might end up being shared across functions.
756 
757      2. Deep unsharing.  This is an extension of regular unsharing.  Instead
758         of calling unshare_expr on expressions that might be shared across
759         functions, the front-end pre-marks them with TREE_VISITED.  This will
760         ensure that they are unshared on the first reference within functions
761         when the regular unsharing algorithm runs.  The counterpart is that
762         this algorithm must look deeper than for manual unsharing, which is
763         specified by LANG_HOOKS_DEEP_UNSHARING.
764 
765   If there are only few specific cases of node sharing across functions, it is
766   probably easier for a front-end to unshare the expressions manually.  On the
767   contrary, if the expressions generated at the global level are as widespread
768   as expressions generated within functions, deep unsharing is very likely the
769   way to go.  */
770 
771 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
772    These nodes model computations that must be done once.  If we were to
773    unshare something like SAVE_EXPR(i++), the gimplification process would
774    create wrong code.  However, if DATA is non-null, it must hold a pointer
775    set that is used to unshare the subtrees of these nodes.  */
776 
777 static tree
778 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
779 {
780   tree t = *tp;
781   enum tree_code code = TREE_CODE (t);
782 
783   /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
784      copy their subtrees if we can make sure to do it only once.  */
785   if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
786     {
787       if (data && !((hash_set<tree> *)data)->add (t))
788 	;
789       else
790 	*walk_subtrees = 0;
791     }
792 
793   /* Stop at types, decls, constants like copy_tree_r.  */
794   else if (TREE_CODE_CLASS (code) == tcc_type
795 	   || TREE_CODE_CLASS (code) == tcc_declaration
796 	   || TREE_CODE_CLASS (code) == tcc_constant
797 	   /* We can't do anything sensible with a BLOCK used as an
798 	      expression, but we also can't just die when we see it
799 	      because of non-expression uses.  So we avert our eyes
800 	      and cross our fingers.  Silly Java.  */
801 	   || code == BLOCK)
802     *walk_subtrees = 0;
803 
804   /* Cope with the statement expression extension.  */
805   else if (code == STATEMENT_LIST)
806     ;
807 
808   /* Leave the bulk of the work to copy_tree_r itself.  */
809   else
810     copy_tree_r (tp, walk_subtrees, NULL);
811 
812   return NULL_TREE;
813 }
814 
815 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
816    If *TP has been visited already, then *TP is deeply copied by calling
817    mostly_copy_tree_r.  DATA is passed to mostly_copy_tree_r unmodified.  */
818 
819 static tree
820 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
821 {
822   tree t = *tp;
823   enum tree_code code = TREE_CODE (t);
824 
825   /* Skip types, decls, and constants.  But we do want to look at their
826      types and the bounds of types.  Mark them as visited so we properly
827      unmark their subtrees on the unmark pass.  If we've already seen them,
828      don't look down further.  */
829   if (TREE_CODE_CLASS (code) == tcc_type
830       || TREE_CODE_CLASS (code) == tcc_declaration
831       || TREE_CODE_CLASS (code) == tcc_constant)
832     {
833       if (TREE_VISITED (t))
834 	*walk_subtrees = 0;
835       else
836 	TREE_VISITED (t) = 1;
837     }
838 
839   /* If this node has been visited already, unshare it and don't look
840      any deeper.  */
841   else if (TREE_VISITED (t))
842     {
843       walk_tree (tp, mostly_copy_tree_r, data, NULL);
844       *walk_subtrees = 0;
845     }
846 
847   /* Otherwise, mark the node as visited and keep looking.  */
848   else
849     TREE_VISITED (t) = 1;
850 
851   return NULL_TREE;
852 }
853 
854 /* Unshare most of the shared trees rooted at *TP.  DATA is passed to the
855    copy_if_shared_r callback unmodified.  */
856 
857 static inline void
858 copy_if_shared (tree *tp, void *data)
859 {
860   walk_tree (tp, copy_if_shared_r, data, NULL);
861 }
862 
863 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
864    any nested functions.  */
865 
866 static void
867 unshare_body (tree fndecl)
868 {
869   struct cgraph_node *cgn = cgraph_node::get (fndecl);
870   /* If the language requires deep unsharing, we need a pointer set to make
871      sure we don't repeatedly unshare subtrees of unshareable nodes.  */
872   hash_set<tree> *visited
873     = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
874 
875   copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
876   copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
877   copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
878 
879   delete visited;
880 
881   if (cgn)
882     for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
883       unshare_body (cgn->decl);
884 }
885 
886 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
887    Subtrees are walked until the first unvisited node is encountered.  */
888 
889 static tree
890 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
891 {
892   tree t = *tp;
893 
894   /* If this node has been visited, unmark it and keep looking.  */
895   if (TREE_VISITED (t))
896     TREE_VISITED (t) = 0;
897 
898   /* Otherwise, don't look any deeper.  */
899   else
900     *walk_subtrees = 0;
901 
902   return NULL_TREE;
903 }
904 
905 /* Unmark the visited trees rooted at *TP.  */
906 
907 static inline void
908 unmark_visited (tree *tp)
909 {
910   walk_tree (tp, unmark_visited_r, NULL, NULL);
911 }
912 
913 /* Likewise, but mark all trees as not visited.  */
914 
915 static void
916 unvisit_body (tree fndecl)
917 {
918   struct cgraph_node *cgn = cgraph_node::get (fndecl);
919 
920   unmark_visited (&DECL_SAVED_TREE (fndecl));
921   unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
922   unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
923 
924   if (cgn)
925     for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
926       unvisit_body (cgn->decl);
927 }
928 
929 /* Unconditionally make an unshared copy of EXPR.  This is used when using
930    stored expressions which span multiple functions, such as BINFO_VTABLE,
931    as the normal unsharing process can't tell that they're shared.  */
932 
933 tree
934 unshare_expr (tree expr)
935 {
936   walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
937   return expr;
938 }
939 
940 /* Worker for unshare_expr_without_location.  */
941 
942 static tree
943 prune_expr_location (tree *tp, int *walk_subtrees, void *)
944 {
945   if (EXPR_P (*tp))
946     SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
947   else
948     *walk_subtrees = 0;
949   return NULL_TREE;
950 }
951 
952 /* Similar to unshare_expr but also prune all expression locations
953    from EXPR.  */
954 
955 tree
956 unshare_expr_without_location (tree expr)
957 {
958   walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
959   if (EXPR_P (expr))
960     walk_tree (&expr, prune_expr_location, NULL, NULL);
961   return expr;
962 }
963 
964 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
965    contain statements and have a value.  Assign its value to a temporary
966    and give it void_type_node.  Return the temporary, or NULL_TREE if
967    WRAPPER was already void.  */
968 
969 tree
970 voidify_wrapper_expr (tree wrapper, tree temp)
971 {
972   tree type = TREE_TYPE (wrapper);
973   if (type && !VOID_TYPE_P (type))
974     {
975       tree *p;
976 
977       /* Set p to point to the body of the wrapper.  Loop until we find
978 	 something that isn't a wrapper.  */
979       for (p = &wrapper; p && *p; )
980 	{
981 	  switch (TREE_CODE (*p))
982 	    {
983 	    case BIND_EXPR:
984 	      TREE_SIDE_EFFECTS (*p) = 1;
985 	      TREE_TYPE (*p) = void_type_node;
986 	      /* For a BIND_EXPR, the body is operand 1.  */
987 	      p = &BIND_EXPR_BODY (*p);
988 	      break;
989 
990 	    case CLEANUP_POINT_EXPR:
991 	    case TRY_FINALLY_EXPR:
992 	    case TRY_CATCH_EXPR:
993 	      TREE_SIDE_EFFECTS (*p) = 1;
994 	      TREE_TYPE (*p) = void_type_node;
995 	      p = &TREE_OPERAND (*p, 0);
996 	      break;
997 
998 	    case STATEMENT_LIST:
999 	      {
1000 		tree_stmt_iterator i = tsi_last (*p);
1001 		TREE_SIDE_EFFECTS (*p) = 1;
1002 		TREE_TYPE (*p) = void_type_node;
1003 		p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1004 	      }
1005 	      break;
1006 
1007 	    case COMPOUND_EXPR:
1008 	      /* Advance to the last statement.  Set all container types to
1009 		 void.  */
1010 	      for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1011 		{
1012 		  TREE_SIDE_EFFECTS (*p) = 1;
1013 		  TREE_TYPE (*p) = void_type_node;
1014 		}
1015 	      break;
1016 
1017 	    case TRANSACTION_EXPR:
1018 	      TREE_SIDE_EFFECTS (*p) = 1;
1019 	      TREE_TYPE (*p) = void_type_node;
1020 	      p = &TRANSACTION_EXPR_BODY (*p);
1021 	      break;
1022 
1023 	    default:
1024 	      /* Assume that any tree upon which voidify_wrapper_expr is
1025 		 directly called is a wrapper, and that its body is op0.  */
1026 	      if (p == &wrapper)
1027 		{
1028 		  TREE_SIDE_EFFECTS (*p) = 1;
1029 		  TREE_TYPE (*p) = void_type_node;
1030 		  p = &TREE_OPERAND (*p, 0);
1031 		  break;
1032 		}
1033 	      goto out;
1034 	    }
1035 	}
1036 
1037     out:
1038       if (p == NULL || IS_EMPTY_STMT (*p))
1039 	temp = NULL_TREE;
1040       else if (temp)
1041 	{
1042 	  /* The wrapper is on the RHS of an assignment that we're pushing
1043 	     down.  */
1044 	  gcc_assert (TREE_CODE (temp) == INIT_EXPR
1045 		      || TREE_CODE (temp) == MODIFY_EXPR);
1046 	  TREE_OPERAND (temp, 1) = *p;
1047 	  *p = temp;
1048 	}
1049       else
1050 	{
1051 	  temp = create_tmp_var (type, "retval");
1052 	  *p = build2 (INIT_EXPR, type, temp, *p);
1053 	}
1054 
1055       return temp;
1056     }
1057 
1058   return NULL_TREE;
1059 }
1060 
1061 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1062    a temporary through which they communicate.  */
1063 
1064 static void
1065 build_stack_save_restore (gcall **save, gcall **restore)
1066 {
1067   tree tmp_var;
1068 
1069   *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1070   tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1071   gimple_call_set_lhs (*save, tmp_var);
1072 
1073   *restore
1074     = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1075 			 1, tmp_var);
1076 }
1077 
1078 /* Gimplify a BIND_EXPR.  Just voidify and recurse.  */
1079 
1080 static enum gimplify_status
1081 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1082 {
1083   tree bind_expr = *expr_p;
1084   bool old_keep_stack = gimplify_ctxp->keep_stack;
1085   bool old_save_stack = gimplify_ctxp->save_stack;
1086   tree t;
1087   gbind *bind_stmt;
1088   gimple_seq body, cleanup;
1089   gcall *stack_save;
1090   location_t start_locus = 0, end_locus = 0;
1091   tree ret_clauses = NULL;
1092 
1093   tree temp = voidify_wrapper_expr (bind_expr, NULL);
1094 
1095   /* Mark variables seen in this bind expr.  */
1096   for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1097     {
1098       if (TREE_CODE (t) == VAR_DECL)
1099 	{
1100 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1101 
1102 	  /* Mark variable as local.  */
1103 	  if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1104 	      && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1105 		  || splay_tree_lookup (ctx->variables,
1106 					(splay_tree_key) t) == NULL))
1107 	    {
1108 	      if (ctx->region_type == ORT_SIMD
1109 		  && TREE_ADDRESSABLE (t)
1110 		  && !TREE_STATIC (t))
1111 		omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1112 	      else
1113 		omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1114 	    }
1115 
1116 	  DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1117 
1118 	  if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1119 	    cfun->has_local_explicit_reg_vars = true;
1120 	}
1121 
1122       /* Preliminarily mark non-addressed complex variables as eligible
1123 	 for promotion to gimple registers.  We'll transform their uses
1124 	 as we find them.  */
1125       if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1126 	   || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1127 	  && !TREE_THIS_VOLATILE (t)
1128 	  && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1129 	  && !needs_to_live_in_memory (t))
1130 	DECL_GIMPLE_REG_P (t) = 1;
1131     }
1132 
1133   bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1134 				 BIND_EXPR_BLOCK (bind_expr));
1135   gimple_push_bind_expr (bind_stmt);
1136 
1137   gimplify_ctxp->keep_stack = false;
1138   gimplify_ctxp->save_stack = false;
1139 
1140   /* Gimplify the body into the GIMPLE_BIND tuple's body.  */
1141   body = NULL;
1142   gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1143   gimple_bind_set_body (bind_stmt, body);
1144 
1145   /* Source location wise, the cleanup code (stack_restore and clobbers)
1146      belongs to the end of the block, so propagate what we have.  The
1147      stack_save operation belongs to the beginning of block, which we can
1148      infer from the bind_expr directly if the block has no explicit
1149      assignment.  */
1150   if (BIND_EXPR_BLOCK (bind_expr))
1151     {
1152       end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1153       start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1154     }
1155   if (start_locus == 0)
1156     start_locus = EXPR_LOCATION (bind_expr);
1157 
1158   cleanup = NULL;
1159   stack_save = NULL;
1160 
1161   /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1162      the stack space allocated to the VLAs.  */
1163   if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1164     {
1165       gcall *stack_restore;
1166 
1167       /* Save stack on entry and restore it on exit.  Add a try_finally
1168 	 block to achieve this.  */
1169       build_stack_save_restore (&stack_save, &stack_restore);
1170 
1171       gimple_set_location (stack_save, start_locus);
1172       gimple_set_location (stack_restore, end_locus);
1173 
1174       gimplify_seq_add_stmt (&cleanup, stack_restore);
1175     }
1176 
1177   /* Add clobbers for all variables that go out of scope.  */
1178   for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1179     {
1180       if (TREE_CODE (t) == VAR_DECL
1181 	  && !is_global_var (t)
1182 	  && DECL_CONTEXT (t) == current_function_decl
1183 	  && !DECL_HARD_REGISTER (t)
1184 	  && !TREE_THIS_VOLATILE (t)
1185 	  && !DECL_HAS_VALUE_EXPR_P (t)
1186 	  /* Only care for variables that have to be in memory.  Others
1187 	     will be rewritten into SSA names, hence moved to the top-level.  */
1188 	  && !is_gimple_reg (t)
1189 	  && flag_stack_reuse != SR_NONE)
1190 	{
1191 	  tree clobber = build_constructor (TREE_TYPE (t), NULL);
1192 	  gimple *clobber_stmt;
1193 	  TREE_THIS_VOLATILE (clobber) = 1;
1194 	  clobber_stmt = gimple_build_assign (t, clobber);
1195 	  gimple_set_location (clobber_stmt, end_locus);
1196 	  gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1197 
1198 	  if (flag_openacc && oacc_declare_returns != NULL)
1199 	    {
1200 	      tree *c = oacc_declare_returns->get (t);
1201 	      if (c != NULL)
1202 		{
1203 		  if (ret_clauses)
1204 		    OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1205 
1206 		  ret_clauses = *c;
1207 
1208 		  oacc_declare_returns->remove (t);
1209 
1210 		  if (oacc_declare_returns->elements () == 0)
1211 		    {
1212 		      delete oacc_declare_returns;
1213 		      oacc_declare_returns = NULL;
1214 		    }
1215 		}
1216 	    }
1217 	}
1218     }
1219 
1220   if (ret_clauses)
1221     {
1222       gomp_target *stmt;
1223       gimple_stmt_iterator si = gsi_start (cleanup);
1224 
1225       stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1226 				      ret_clauses);
1227       gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1228     }
1229 
1230   if (cleanup)
1231     {
1232       gtry *gs;
1233       gimple_seq new_body;
1234 
1235       new_body = NULL;
1236       gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1237 	  		     GIMPLE_TRY_FINALLY);
1238 
1239       if (stack_save)
1240 	gimplify_seq_add_stmt (&new_body, stack_save);
1241       gimplify_seq_add_stmt (&new_body, gs);
1242       gimple_bind_set_body (bind_stmt, new_body);
1243     }
1244 
1245   /* keep_stack propagates all the way up to the outermost BIND_EXPR.  */
1246   if (!gimplify_ctxp->keep_stack)
1247     gimplify_ctxp->keep_stack = old_keep_stack;
1248   gimplify_ctxp->save_stack = old_save_stack;
1249 
1250   gimple_pop_bind_expr ();
1251 
1252   gimplify_seq_add_stmt (pre_p, bind_stmt);
1253 
1254   if (temp)
1255     {
1256       *expr_p = temp;
1257       return GS_OK;
1258     }
1259 
1260   *expr_p = NULL_TREE;
1261   return GS_ALL_DONE;
1262 }
1263 
1264 /* Gimplify a RETURN_EXPR.  If the expression to be returned is not a
1265    GIMPLE value, it is assigned to a new temporary and the statement is
1266    re-written to return the temporary.
1267 
1268    PRE_P points to the sequence where side effects that must happen before
1269    STMT should be stored.  */
1270 
1271 static enum gimplify_status
1272 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1273 {
1274   greturn *ret;
1275   tree ret_expr = TREE_OPERAND (stmt, 0);
1276   tree result_decl, result;
1277 
1278   if (ret_expr == error_mark_node)
1279     return GS_ERROR;
1280 
1281   /* Implicit _Cilk_sync must be inserted right before any return statement
1282      if there is a _Cilk_spawn in the function.  If the user has provided a
1283      _Cilk_sync, the optimizer should remove this duplicate one.  */
1284   if (fn_contains_cilk_spawn_p (cfun))
1285     {
1286       tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1287       gimplify_and_add (impl_sync, pre_p);
1288     }
1289 
1290   if (!ret_expr
1291       || TREE_CODE (ret_expr) == RESULT_DECL
1292       || ret_expr == error_mark_node)
1293     {
1294       greturn *ret = gimple_build_return (ret_expr);
1295       gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1296       gimplify_seq_add_stmt (pre_p, ret);
1297       return GS_ALL_DONE;
1298     }
1299 
1300   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1301     result_decl = NULL_TREE;
1302   else
1303     {
1304       result_decl = TREE_OPERAND (ret_expr, 0);
1305 
1306       /* See through a return by reference.  */
1307       if (TREE_CODE (result_decl) == INDIRECT_REF)
1308 	result_decl = TREE_OPERAND (result_decl, 0);
1309 
1310       gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1311 		   || TREE_CODE (ret_expr) == INIT_EXPR)
1312 		  && TREE_CODE (result_decl) == RESULT_DECL);
1313     }
1314 
1315   /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1316      Recall that aggregate_value_p is FALSE for any aggregate type that is
1317      returned in registers.  If we're returning values in registers, then
1318      we don't want to extend the lifetime of the RESULT_DECL, particularly
1319      across another call.  In addition, for those aggregates for which
1320      hard_function_value generates a PARALLEL, we'll die during normal
1321      expansion of structure assignments; there's special code in expand_return
1322      to handle this case that does not exist in expand_expr.  */
1323   if (!result_decl)
1324     result = NULL_TREE;
1325   else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1326     {
1327       if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1328 	{
1329 	  if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1330 	    gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1331 	  /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1332 	     should be effectively allocated by the caller, i.e. all calls to
1333 	     this function must be subject to the Return Slot Optimization.  */
1334 	  gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1335 	  gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1336 	}
1337       result = result_decl;
1338     }
1339   else if (gimplify_ctxp->return_temp)
1340     result = gimplify_ctxp->return_temp;
1341   else
1342     {
1343       result = create_tmp_reg (TREE_TYPE (result_decl));
1344 
1345       /* ??? With complex control flow (usually involving abnormal edges),
1346 	 we can wind up warning about an uninitialized value for this.  Due
1347 	 to how this variable is constructed and initialized, this is never
1348 	 true.  Give up and never warn.  */
1349       TREE_NO_WARNING (result) = 1;
1350 
1351       gimplify_ctxp->return_temp = result;
1352     }
1353 
1354   /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1355      Then gimplify the whole thing.  */
1356   if (result != result_decl)
1357     TREE_OPERAND (ret_expr, 0) = result;
1358 
1359   gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1360 
1361   ret = gimple_build_return (result);
1362   gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1363   gimplify_seq_add_stmt (pre_p, ret);
1364 
1365   return GS_ALL_DONE;
1366 }
1367 
1368 /* Gimplify a variable-length array DECL.  */
1369 
1370 static void
1371 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1372 {
1373   /* This is a variable-sized decl.  Simplify its size and mark it
1374      for deferred expansion.  */
1375   tree t, addr, ptr_type;
1376 
1377   gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1378   gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1379 
1380   /* Don't mess with a DECL_VALUE_EXPR set by the front-end.  */
1381   if (DECL_HAS_VALUE_EXPR_P (decl))
1382     return;
1383 
1384   /* All occurrences of this decl in final gimplified code will be
1385      replaced by indirection.  Setting DECL_VALUE_EXPR does two
1386      things: First, it lets the rest of the gimplifier know what
1387      replacement to use.  Second, it lets the debug info know
1388      where to find the value.  */
1389   ptr_type = build_pointer_type (TREE_TYPE (decl));
1390   addr = create_tmp_var (ptr_type, get_name (decl));
1391   DECL_IGNORED_P (addr) = 0;
1392   t = build_fold_indirect_ref (addr);
1393   TREE_THIS_NOTRAP (t) = 1;
1394   SET_DECL_VALUE_EXPR (decl, t);
1395   DECL_HAS_VALUE_EXPR_P (decl) = 1;
1396 
1397   t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1398   t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1399 		       size_int (DECL_ALIGN (decl)));
1400   /* The call has been built for a variable-sized object.  */
1401   CALL_ALLOCA_FOR_VAR_P (t) = 1;
1402   t = fold_convert (ptr_type, t);
1403   t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1404 
1405   gimplify_and_add (t, seq_p);
1406 }
1407 
1408 /* A helper function to be called via walk_tree.  Mark all labels under *TP
1409    as being forced.  To be called for DECL_INITIAL of static variables.  */
1410 
1411 static tree
1412 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1413 {
1414   if (TYPE_P (*tp))
1415     *walk_subtrees = 0;
1416   if (TREE_CODE (*tp) == LABEL_DECL)
1417     {
1418       FORCED_LABEL (*tp) = 1;
1419       cfun->has_forced_label_in_static = 1;
1420     }
1421 
1422   return NULL_TREE;
1423 }
1424 
1425 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1426    and initialization explicit.  */
1427 
1428 static enum gimplify_status
1429 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1430 {
1431   tree stmt = *stmt_p;
1432   tree decl = DECL_EXPR_DECL (stmt);
1433 
1434   *stmt_p = NULL_TREE;
1435 
1436   if (TREE_TYPE (decl) == error_mark_node)
1437     return GS_ERROR;
1438 
1439   if ((TREE_CODE (decl) == TYPE_DECL
1440        || TREE_CODE (decl) == VAR_DECL)
1441       && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1442     {
1443       gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1444       if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1445 	gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1446     }
1447 
1448   /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1449      in case its size expressions contain problematic nodes like CALL_EXPR.  */
1450   if (TREE_CODE (decl) == TYPE_DECL
1451       && DECL_ORIGINAL_TYPE (decl)
1452       && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1453     {
1454       gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1455       if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1456 	gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1457     }
1458 
1459   if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1460     {
1461       tree init = DECL_INITIAL (decl);
1462 
1463       if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1464 	  || (!TREE_STATIC (decl)
1465 	      && flag_stack_check == GENERIC_STACK_CHECK
1466 	      && compare_tree_int (DECL_SIZE_UNIT (decl),
1467 				   STACK_CHECK_MAX_VAR_SIZE) > 0))
1468 	gimplify_vla_decl (decl, seq_p);
1469 
1470       /* Some front ends do not explicitly declare all anonymous
1471 	 artificial variables.  We compensate here by declaring the
1472 	 variables, though it would be better if the front ends would
1473 	 explicitly declare them.  */
1474       if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1475 	  && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1476 	gimple_add_tmp_var (decl);
1477 
1478       if (init && init != error_mark_node)
1479 	{
1480 	  if (!TREE_STATIC (decl))
1481 	    {
1482 	      DECL_INITIAL (decl) = NULL_TREE;
1483 	      init = build2 (INIT_EXPR, void_type_node, decl, init);
1484 	      gimplify_and_add (init, seq_p);
1485 	      ggc_free (init);
1486 	    }
1487 	  else
1488 	    /* We must still examine initializers for static variables
1489 	       as they may contain a label address.  */
1490 	    walk_tree (&init, force_labels_r, NULL, NULL);
1491 	}
1492     }
1493 
1494   return GS_ALL_DONE;
1495 }
1496 
1497 /* Gimplify a LOOP_EXPR.  Normally this just involves gimplifying the body
1498    and replacing the LOOP_EXPR with goto, but if the loop contains an
1499    EXIT_EXPR, we need to append a label for it to jump to.  */
1500 
1501 static enum gimplify_status
1502 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1503 {
1504   tree saved_label = gimplify_ctxp->exit_label;
1505   tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1506 
1507   gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1508 
1509   gimplify_ctxp->exit_label = NULL_TREE;
1510 
1511   gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1512 
1513   gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1514 
1515   if (gimplify_ctxp->exit_label)
1516     gimplify_seq_add_stmt (pre_p,
1517 			   gimple_build_label (gimplify_ctxp->exit_label));
1518 
1519   gimplify_ctxp->exit_label = saved_label;
1520 
1521   *expr_p = NULL;
1522   return GS_ALL_DONE;
1523 }
1524 
1525 /* Gimplify a statement list onto a sequence.  These may be created either
1526    by an enlightened front-end, or by shortcut_cond_expr.  */
1527 
1528 static enum gimplify_status
1529 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1530 {
1531   tree temp = voidify_wrapper_expr (*expr_p, NULL);
1532 
1533   tree_stmt_iterator i = tsi_start (*expr_p);
1534 
1535   while (!tsi_end_p (i))
1536     {
1537       gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1538       tsi_delink (&i);
1539     }
1540 
1541   if (temp)
1542     {
1543       *expr_p = temp;
1544       return GS_OK;
1545     }
1546 
1547   return GS_ALL_DONE;
1548 }
1549 
1550 
1551 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1552    branch to.  */
1553 
1554 static enum gimplify_status
1555 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1556 {
1557   tree switch_expr = *expr_p;
1558   gimple_seq switch_body_seq = NULL;
1559   enum gimplify_status ret;
1560   tree index_type = TREE_TYPE (switch_expr);
1561   if (index_type == NULL_TREE)
1562     index_type = TREE_TYPE (SWITCH_COND (switch_expr));
1563 
1564   ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1565                        fb_rvalue);
1566   if (ret == GS_ERROR || ret == GS_UNHANDLED)
1567     return ret;
1568 
1569   if (SWITCH_BODY (switch_expr))
1570     {
1571       vec<tree> labels;
1572       vec<tree> saved_labels;
1573       tree default_case = NULL_TREE;
1574       gswitch *switch_stmt;
1575 
1576       /* If someone can be bothered to fill in the labels, they can
1577 	 be bothered to null out the body too.  */
1578       gcc_assert (!SWITCH_LABELS (switch_expr));
1579 
1580       /* Save old labels, get new ones from body, then restore the old
1581          labels.  Save all the things from the switch body to append after.  */
1582       saved_labels = gimplify_ctxp->case_labels;
1583       gimplify_ctxp->case_labels.create (8);
1584 
1585       gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1586       labels = gimplify_ctxp->case_labels;
1587       gimplify_ctxp->case_labels = saved_labels;
1588 
1589       preprocess_case_label_vec_for_gimple (labels, index_type,
1590 					    &default_case);
1591 
1592       if (!default_case)
1593 	{
1594 	  glabel *new_default;
1595 
1596 	  default_case
1597 	    = build_case_label (NULL_TREE, NULL_TREE,
1598 				create_artificial_label (UNKNOWN_LOCATION));
1599 	  new_default = gimple_build_label (CASE_LABEL (default_case));
1600 	  gimplify_seq_add_stmt (&switch_body_seq, new_default);
1601 	}
1602 
1603       switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
1604 					   default_case, labels);
1605       gimplify_seq_add_stmt (pre_p, switch_stmt);
1606       gimplify_seq_add_seq (pre_p, switch_body_seq);
1607       labels.release ();
1608     }
1609   else
1610     gcc_assert (SWITCH_LABELS (switch_expr));
1611 
1612   return GS_ALL_DONE;
1613 }
1614 
1615 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P.  */
1616 
1617 static enum gimplify_status
1618 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1619 {
1620   struct gimplify_ctx *ctxp;
1621   glabel *label_stmt;
1622 
1623   /* Invalid programs can play Duff's Device type games with, for example,
1624      #pragma omp parallel.  At least in the C front end, we don't
1625      detect such invalid branches until after gimplification, in the
1626      diagnose_omp_blocks pass.  */
1627   for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1628     if (ctxp->case_labels.exists ())
1629       break;
1630 
1631   label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
1632   ctxp->case_labels.safe_push (*expr_p);
1633   gimplify_seq_add_stmt (pre_p, label_stmt);
1634 
1635   return GS_ALL_DONE;
1636 }
1637 
1638 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1639    if necessary.  */
1640 
1641 tree
1642 build_and_jump (tree *label_p)
1643 {
1644   if (label_p == NULL)
1645     /* If there's nowhere to jump, just fall through.  */
1646     return NULL_TREE;
1647 
1648   if (*label_p == NULL_TREE)
1649     {
1650       tree label = create_artificial_label (UNKNOWN_LOCATION);
1651       *label_p = label;
1652     }
1653 
1654   return build1 (GOTO_EXPR, void_type_node, *label_p);
1655 }
1656 
1657 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1658    This also involves building a label to jump to and communicating it to
1659    gimplify_loop_expr through gimplify_ctxp->exit_label.  */
1660 
1661 static enum gimplify_status
1662 gimplify_exit_expr (tree *expr_p)
1663 {
1664   tree cond = TREE_OPERAND (*expr_p, 0);
1665   tree expr;
1666 
1667   expr = build_and_jump (&gimplify_ctxp->exit_label);
1668   expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1669   *expr_p = expr;
1670 
1671   return GS_OK;
1672 }
1673 
1674 /* *EXPR_P is a COMPONENT_REF being used as an rvalue.  If its type is
1675    different from its canonical type, wrap the whole thing inside a
1676    NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1677    type.
1678 
1679    The canonical type of a COMPONENT_REF is the type of the field being
1680    referenced--unless the field is a bit-field which can be read directly
1681    in a smaller mode, in which case the canonical type is the
1682    sign-appropriate type corresponding to that mode.  */
1683 
1684 static void
1685 canonicalize_component_ref (tree *expr_p)
1686 {
1687   tree expr = *expr_p;
1688   tree type;
1689 
1690   gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1691 
1692   if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1693     type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1694   else
1695     type = TREE_TYPE (TREE_OPERAND (expr, 1));
1696 
1697   /* One could argue that all the stuff below is not necessary for
1698      the non-bitfield case and declare it a FE error if type
1699      adjustment would be needed.  */
1700   if (TREE_TYPE (expr) != type)
1701     {
1702 #ifdef ENABLE_TYPES_CHECKING
1703       tree old_type = TREE_TYPE (expr);
1704 #endif
1705       int type_quals;
1706 
1707       /* We need to preserve qualifiers and propagate them from
1708 	 operand 0.  */
1709       type_quals = TYPE_QUALS (type)
1710 	| TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1711       if (TYPE_QUALS (type) != type_quals)
1712 	type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1713 
1714       /* Set the type of the COMPONENT_REF to the underlying type.  */
1715       TREE_TYPE (expr) = type;
1716 
1717 #ifdef ENABLE_TYPES_CHECKING
1718       /* It is now a FE error, if the conversion from the canonical
1719 	 type to the original expression type is not useless.  */
1720       gcc_assert (useless_type_conversion_p (old_type, type));
1721 #endif
1722     }
1723 }
1724 
1725 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1726    to foo, embed that change in the ADDR_EXPR by converting
1727       T array[U];
1728       (T *)&array
1729    ==>
1730       &array[L]
1731    where L is the lower bound.  For simplicity, only do this for constant
1732    lower bound.
1733    The constraint is that the type of &array[L] is trivially convertible
1734    to T *.  */
1735 
1736 static void
1737 canonicalize_addr_expr (tree *expr_p)
1738 {
1739   tree expr = *expr_p;
1740   tree addr_expr = TREE_OPERAND (expr, 0);
1741   tree datype, ddatype, pddatype;
1742 
1743   /* We simplify only conversions from an ADDR_EXPR to a pointer type.  */
1744   if (!POINTER_TYPE_P (TREE_TYPE (expr))
1745       || TREE_CODE (addr_expr) != ADDR_EXPR)
1746     return;
1747 
1748   /* The addr_expr type should be a pointer to an array.  */
1749   datype = TREE_TYPE (TREE_TYPE (addr_expr));
1750   if (TREE_CODE (datype) != ARRAY_TYPE)
1751     return;
1752 
1753   /* The pointer to element type shall be trivially convertible to
1754      the expression pointer type.  */
1755   ddatype = TREE_TYPE (datype);
1756   pddatype = build_pointer_type (ddatype);
1757   if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1758 				  pddatype))
1759     return;
1760 
1761   /* The lower bound and element sizes must be constant.  */
1762   if (!TYPE_SIZE_UNIT (ddatype)
1763       || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1764       || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1765       || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1766     return;
1767 
1768   /* All checks succeeded.  Build a new node to merge the cast.  */
1769   *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1770 		    TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1771 		    NULL_TREE, NULL_TREE);
1772   *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1773 
1774   /* We can have stripped a required restrict qualifier above.  */
1775   if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1776     *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1777 }
1778 
1779 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR.  Remove it and/or other conversions
1780    underneath as appropriate.  */
1781 
1782 static enum gimplify_status
1783 gimplify_conversion (tree *expr_p)
1784 {
1785   location_t loc = EXPR_LOCATION (*expr_p);
1786   gcc_assert (CONVERT_EXPR_P (*expr_p));
1787 
1788   /* Then strip away all but the outermost conversion.  */
1789   STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1790 
1791   /* And remove the outermost conversion if it's useless.  */
1792   if (tree_ssa_useless_type_conversion (*expr_p))
1793     *expr_p = TREE_OPERAND (*expr_p, 0);
1794 
1795   /* If we still have a conversion at the toplevel,
1796      then canonicalize some constructs.  */
1797   if (CONVERT_EXPR_P (*expr_p))
1798     {
1799       tree sub = TREE_OPERAND (*expr_p, 0);
1800 
1801       /* If a NOP conversion is changing the type of a COMPONENT_REF
1802 	 expression, then canonicalize its type now in order to expose more
1803 	 redundant conversions.  */
1804       if (TREE_CODE (sub) == COMPONENT_REF)
1805 	canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1806 
1807       /* If a NOP conversion is changing a pointer to array of foo
1808 	 to a pointer to foo, embed that change in the ADDR_EXPR.  */
1809       else if (TREE_CODE (sub) == ADDR_EXPR)
1810 	canonicalize_addr_expr (expr_p);
1811     }
1812 
1813   /* If we have a conversion to a non-register type force the
1814      use of a VIEW_CONVERT_EXPR instead.  */
1815   if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1816     *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1817 			       TREE_OPERAND (*expr_p, 0));
1818 
1819   /* Canonicalize CONVERT_EXPR to NOP_EXPR.  */
1820   if (TREE_CODE (*expr_p) == CONVERT_EXPR)
1821     TREE_SET_CODE (*expr_p, NOP_EXPR);
1822 
1823   return GS_OK;
1824 }
1825 
1826 /* Nonlocal VLAs seen in the current function.  */
1827 static hash_set<tree> *nonlocal_vlas;
1828 
1829 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes.  */
1830 static tree nonlocal_vla_vars;
1831 
1832 /* Gimplify a VAR_DECL or PARM_DECL.  Return GS_OK if we expanded a
1833    DECL_VALUE_EXPR, and it's worth re-examining things.  */
1834 
1835 static enum gimplify_status
1836 gimplify_var_or_parm_decl (tree *expr_p)
1837 {
1838   tree decl = *expr_p;
1839 
1840   /* ??? If this is a local variable, and it has not been seen in any
1841      outer BIND_EXPR, then it's probably the result of a duplicate
1842      declaration, for which we've already issued an error.  It would
1843      be really nice if the front end wouldn't leak these at all.
1844      Currently the only known culprit is C++ destructors, as seen
1845      in g++.old-deja/g++.jason/binding.C.  */
1846   if (TREE_CODE (decl) == VAR_DECL
1847       && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1848       && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1849       && decl_function_context (decl) == current_function_decl)
1850     {
1851       gcc_assert (seen_error ());
1852       return GS_ERROR;
1853     }
1854 
1855   /* When within an OMP context, notice uses of variables.  */
1856   if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1857     return GS_ALL_DONE;
1858 
1859   /* If the decl is an alias for another expression, substitute it now.  */
1860   if (DECL_HAS_VALUE_EXPR_P (decl))
1861     {
1862       tree value_expr = DECL_VALUE_EXPR (decl);
1863 
1864       /* For referenced nonlocal VLAs add a decl for debugging purposes
1865 	 to the current function.  */
1866       if (TREE_CODE (decl) == VAR_DECL
1867 	  && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1868 	  && nonlocal_vlas != NULL
1869 	  && TREE_CODE (value_expr) == INDIRECT_REF
1870 	  && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1871 	  && decl_function_context (decl) != current_function_decl)
1872 	{
1873 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1874 	  while (ctx
1875 		 && (ctx->region_type == ORT_WORKSHARE
1876 		     || ctx->region_type == ORT_SIMD
1877 		     || ctx->region_type == ORT_ACC))
1878 	    ctx = ctx->outer_context;
1879 	  if (!ctx && !nonlocal_vlas->add (decl))
1880 	    {
1881 	      tree copy = copy_node (decl);
1882 
1883 	      lang_hooks.dup_lang_specific_decl (copy);
1884 	      SET_DECL_RTL (copy, 0);
1885 	      TREE_USED (copy) = 1;
1886 	      DECL_CHAIN (copy) = nonlocal_vla_vars;
1887 	      nonlocal_vla_vars = copy;
1888 	      SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1889 	      DECL_HAS_VALUE_EXPR_P (copy) = 1;
1890 	    }
1891 	}
1892 
1893       *expr_p = unshare_expr (value_expr);
1894       return GS_OK;
1895     }
1896 
1897   return GS_ALL_DONE;
1898 }
1899 
1900 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T.  */
1901 
1902 static void
1903 recalculate_side_effects (tree t)
1904 {
1905   enum tree_code code = TREE_CODE (t);
1906   int len = TREE_OPERAND_LENGTH (t);
1907   int i;
1908 
1909   switch (TREE_CODE_CLASS (code))
1910     {
1911     case tcc_expression:
1912       switch (code)
1913 	{
1914 	case INIT_EXPR:
1915 	case MODIFY_EXPR:
1916 	case VA_ARG_EXPR:
1917 	case PREDECREMENT_EXPR:
1918 	case PREINCREMENT_EXPR:
1919 	case POSTDECREMENT_EXPR:
1920 	case POSTINCREMENT_EXPR:
1921 	  /* All of these have side-effects, no matter what their
1922 	     operands are.  */
1923 	  return;
1924 
1925 	default:
1926 	  break;
1927 	}
1928       /* Fall through.  */
1929 
1930     case tcc_comparison:  /* a comparison expression */
1931     case tcc_unary:       /* a unary arithmetic expression */
1932     case tcc_binary:      /* a binary arithmetic expression */
1933     case tcc_reference:   /* a reference */
1934     case tcc_vl_exp:        /* a function call */
1935       TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
1936       for (i = 0; i < len; ++i)
1937 	{
1938 	  tree op = TREE_OPERAND (t, i);
1939 	  if (op && TREE_SIDE_EFFECTS (op))
1940 	    TREE_SIDE_EFFECTS (t) = 1;
1941 	}
1942       break;
1943 
1944     case tcc_constant:
1945       /* No side-effects.  */
1946       return;
1947 
1948     default:
1949       gcc_unreachable ();
1950    }
1951 }
1952 
1953 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1954    node *EXPR_P.
1955 
1956       compound_lval
1957 	      : min_lval '[' val ']'
1958 	      | min_lval '.' ID
1959 	      | compound_lval '[' val ']'
1960 	      | compound_lval '.' ID
1961 
1962    This is not part of the original SIMPLE definition, which separates
1963    array and member references, but it seems reasonable to handle them
1964    together.  Also, this way we don't run into problems with union
1965    aliasing; gcc requires that for accesses through a union to alias, the
1966    union reference must be explicit, which was not always the case when we
1967    were splitting up array and member refs.
1968 
1969    PRE_P points to the sequence where side effects that must happen before
1970      *EXPR_P should be stored.
1971 
1972    POST_P points to the sequence where side effects that must happen after
1973      *EXPR_P should be stored.  */
1974 
1975 static enum gimplify_status
1976 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1977 			fallback_t fallback)
1978 {
1979   tree *p;
1980   enum gimplify_status ret = GS_ALL_DONE, tret;
1981   int i;
1982   location_t loc = EXPR_LOCATION (*expr_p);
1983   tree expr = *expr_p;
1984 
1985   /* Create a stack of the subexpressions so later we can walk them in
1986      order from inner to outer.  */
1987   auto_vec<tree, 10> expr_stack;
1988 
1989   /* We can handle anything that get_inner_reference can deal with.  */
1990   for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1991     {
1992     restart:
1993       /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs.  */
1994       if (TREE_CODE (*p) == INDIRECT_REF)
1995 	*p = fold_indirect_ref_loc (loc, *p);
1996 
1997       if (handled_component_p (*p))
1998 	;
1999       /* Expand DECL_VALUE_EXPR now.  In some cases that may expose
2000 	 additional COMPONENT_REFs.  */
2001       else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
2002 	       && gimplify_var_or_parm_decl (p) == GS_OK)
2003 	goto restart;
2004       else
2005 	break;
2006 
2007       expr_stack.safe_push (*p);
2008     }
2009 
2010   gcc_assert (expr_stack.length ());
2011 
2012   /* Now EXPR_STACK is a stack of pointers to all the refs we've
2013      walked through and P points to the innermost expression.
2014 
2015      Java requires that we elaborated nodes in source order.  That
2016      means we must gimplify the inner expression followed by each of
2017      the indices, in order.  But we can't gimplify the inner
2018      expression until we deal with any variable bounds, sizes, or
2019      positions in order to deal with PLACEHOLDER_EXPRs.
2020 
2021      So we do this in three steps.  First we deal with the annotations
2022      for any variables in the components, then we gimplify the base,
2023      then we gimplify any indices, from left to right.  */
2024   for (i = expr_stack.length () - 1; i >= 0; i--)
2025     {
2026       tree t = expr_stack[i];
2027 
2028       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2029 	{
2030 	  /* Gimplify the low bound and element type size and put them into
2031 	     the ARRAY_REF.  If these values are set, they have already been
2032 	     gimplified.  */
2033 	  if (TREE_OPERAND (t, 2) == NULL_TREE)
2034 	    {
2035 	      tree low = unshare_expr (array_ref_low_bound (t));
2036 	      if (!is_gimple_min_invariant (low))
2037 		{
2038 		  TREE_OPERAND (t, 2) = low;
2039 		  tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2040 					post_p, is_gimple_reg,
2041 					fb_rvalue);
2042 		  ret = MIN (ret, tret);
2043 		}
2044 	    }
2045 	  else
2046 	    {
2047 	      tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2048 				    is_gimple_reg, fb_rvalue);
2049 	      ret = MIN (ret, tret);
2050 	    }
2051 
2052 	  if (TREE_OPERAND (t, 3) == NULL_TREE)
2053 	    {
2054 	      tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2055 	      tree elmt_size = unshare_expr (array_ref_element_size (t));
2056 	      tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2057 
2058 	      /* Divide the element size by the alignment of the element
2059 		 type (above).  */
2060 	      elmt_size
2061 		= size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2062 
2063 	      if (!is_gimple_min_invariant (elmt_size))
2064 		{
2065 		  TREE_OPERAND (t, 3) = elmt_size;
2066 		  tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2067 					post_p, is_gimple_reg,
2068 					fb_rvalue);
2069 		  ret = MIN (ret, tret);
2070 		}
2071 	    }
2072 	  else
2073 	    {
2074 	      tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2075 				    is_gimple_reg, fb_rvalue);
2076 	      ret = MIN (ret, tret);
2077 	    }
2078 	}
2079       else if (TREE_CODE (t) == COMPONENT_REF)
2080 	{
2081 	  /* Set the field offset into T and gimplify it.  */
2082 	  if (TREE_OPERAND (t, 2) == NULL_TREE)
2083 	    {
2084 	      tree offset = unshare_expr (component_ref_field_offset (t));
2085 	      tree field = TREE_OPERAND (t, 1);
2086 	      tree factor
2087 		= size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2088 
2089 	      /* Divide the offset by its alignment.  */
2090 	      offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2091 
2092 	      if (!is_gimple_min_invariant (offset))
2093 		{
2094 		  TREE_OPERAND (t, 2) = offset;
2095 		  tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2096 					post_p, is_gimple_reg,
2097 					fb_rvalue);
2098 		  ret = MIN (ret, tret);
2099 		}
2100 	    }
2101 	  else
2102 	    {
2103 	      tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2104 				    is_gimple_reg, fb_rvalue);
2105 	      ret = MIN (ret, tret);
2106 	    }
2107 	}
2108     }
2109 
2110   /* Step 2 is to gimplify the base expression.  Make sure lvalue is set
2111      so as to match the min_lval predicate.  Failure to do so may result
2112      in the creation of large aggregate temporaries.  */
2113   tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2114 			fallback | fb_lvalue);
2115   ret = MIN (ret, tret);
2116 
2117   /* And finally, the indices and operands of ARRAY_REF.  During this
2118      loop we also remove any useless conversions.  */
2119   for (; expr_stack.length () > 0; )
2120     {
2121       tree t = expr_stack.pop ();
2122 
2123       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2124 	{
2125 	  /* Gimplify the dimension.  */
2126 	  if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2127 	    {
2128 	      tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2129 				    is_gimple_val, fb_rvalue);
2130 	      ret = MIN (ret, tret);
2131 	    }
2132 	}
2133 
2134       STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2135 
2136       /* The innermost expression P may have originally had
2137 	 TREE_SIDE_EFFECTS set which would have caused all the outer
2138 	 expressions in *EXPR_P leading to P to also have had
2139 	 TREE_SIDE_EFFECTS set.  */
2140       recalculate_side_effects (t);
2141     }
2142 
2143   /* If the outermost expression is a COMPONENT_REF, canonicalize its type.  */
2144   if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2145     {
2146       canonicalize_component_ref (expr_p);
2147     }
2148 
2149   expr_stack.release ();
2150 
2151   gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2152 
2153   return ret;
2154 }
2155 
2156 /*  Gimplify the self modifying expression pointed to by EXPR_P
2157     (++, --, +=, -=).
2158 
2159     PRE_P points to the list where side effects that must happen before
2160 	*EXPR_P should be stored.
2161 
2162     POST_P points to the list where side effects that must happen after
2163 	*EXPR_P should be stored.
2164 
2165     WANT_VALUE is nonzero iff we want to use the value of this expression
2166 	in another expression.
2167 
2168     ARITH_TYPE is the type the computation should be performed in.  */
2169 
2170 enum gimplify_status
2171 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2172 			bool want_value, tree arith_type)
2173 {
2174   enum tree_code code;
2175   tree lhs, lvalue, rhs, t1;
2176   gimple_seq post = NULL, *orig_post_p = post_p;
2177   bool postfix;
2178   enum tree_code arith_code;
2179   enum gimplify_status ret;
2180   location_t loc = EXPR_LOCATION (*expr_p);
2181 
2182   code = TREE_CODE (*expr_p);
2183 
2184   gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2185 	      || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2186 
2187   /* Prefix or postfix?  */
2188   if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2189     /* Faster to treat as prefix if result is not used.  */
2190     postfix = want_value;
2191   else
2192     postfix = false;
2193 
2194   /* For postfix, make sure the inner expression's post side effects
2195      are executed after side effects from this expression.  */
2196   if (postfix)
2197     post_p = &post;
2198 
2199   /* Add or subtract?  */
2200   if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2201     arith_code = PLUS_EXPR;
2202   else
2203     arith_code = MINUS_EXPR;
2204 
2205   /* Gimplify the LHS into a GIMPLE lvalue.  */
2206   lvalue = TREE_OPERAND (*expr_p, 0);
2207   ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2208   if (ret == GS_ERROR)
2209     return ret;
2210 
2211   /* Extract the operands to the arithmetic operation.  */
2212   lhs = lvalue;
2213   rhs = TREE_OPERAND (*expr_p, 1);
2214 
2215   /* For postfix operator, we evaluate the LHS to an rvalue and then use
2216      that as the result value and in the postqueue operation.  */
2217   if (postfix)
2218     {
2219       ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2220       if (ret == GS_ERROR)
2221 	return ret;
2222 
2223       lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2224     }
2225 
2226   /* For POINTERs increment, use POINTER_PLUS_EXPR.  */
2227   if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2228     {
2229       rhs = convert_to_ptrofftype_loc (loc, rhs);
2230       if (arith_code == MINUS_EXPR)
2231 	rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2232       t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2233     }
2234   else
2235     t1 = fold_convert (TREE_TYPE (*expr_p),
2236 		       fold_build2 (arith_code, arith_type,
2237 				    fold_convert (arith_type, lhs),
2238 				    fold_convert (arith_type, rhs)));
2239 
2240   if (postfix)
2241     {
2242       gimplify_assign (lvalue, t1, pre_p);
2243       gimplify_seq_add_seq (orig_post_p, post);
2244       *expr_p = lhs;
2245       return GS_ALL_DONE;
2246     }
2247   else
2248     {
2249       *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2250       return GS_OK;
2251     }
2252 }
2253 
2254 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR.  */
2255 
2256 static void
2257 maybe_with_size_expr (tree *expr_p)
2258 {
2259   tree expr = *expr_p;
2260   tree type = TREE_TYPE (expr);
2261   tree size;
2262 
2263   /* If we've already wrapped this or the type is error_mark_node, we can't do
2264      anything.  */
2265   if (TREE_CODE (expr) == WITH_SIZE_EXPR
2266       || type == error_mark_node)
2267     return;
2268 
2269   /* If the size isn't known or is a constant, we have nothing to do.  */
2270   size = TYPE_SIZE_UNIT (type);
2271   if (!size || TREE_CODE (size) == INTEGER_CST)
2272     return;
2273 
2274   /* Otherwise, make a WITH_SIZE_EXPR.  */
2275   size = unshare_expr (size);
2276   size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2277   *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2278 }
2279 
2280 /* Helper for gimplify_call_expr.  Gimplify a single argument *ARG_P
2281    Store any side-effects in PRE_P.  CALL_LOCATION is the location of
2282    the CALL_EXPR.  */
2283 
2284 enum gimplify_status
2285 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2286 {
2287   bool (*test) (tree);
2288   fallback_t fb;
2289 
2290   /* In general, we allow lvalues for function arguments to avoid
2291      extra overhead of copying large aggregates out of even larger
2292      aggregates into temporaries only to copy the temporaries to
2293      the argument list.  Make optimizers happy by pulling out to
2294      temporaries those types that fit in registers.  */
2295   if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2296     test = is_gimple_val, fb = fb_rvalue;
2297   else
2298     {
2299       test = is_gimple_lvalue, fb = fb_either;
2300       /* Also strip a TARGET_EXPR that would force an extra copy.  */
2301       if (TREE_CODE (*arg_p) == TARGET_EXPR)
2302 	{
2303 	  tree init = TARGET_EXPR_INITIAL (*arg_p);
2304 	  if (init
2305 	      && !VOID_TYPE_P (TREE_TYPE (init)))
2306 	    *arg_p = init;
2307 	}
2308     }
2309 
2310   /* If this is a variable sized type, we must remember the size.  */
2311   maybe_with_size_expr (arg_p);
2312 
2313   /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c.  */
2314   /* Make sure arguments have the same location as the function call
2315      itself.  */
2316   protected_set_expr_location (*arg_p, call_location);
2317 
2318   /* There is a sequence point before a function call.  Side effects in
2319      the argument list must occur before the actual call. So, when
2320      gimplifying arguments, force gimplify_expr to use an internal
2321      post queue which is then appended to the end of PRE_P.  */
2322   return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2323 }
2324 
2325 /* Don't fold inside offloading or taskreg regions: it can break code by
2326    adding decl references that weren't in the source.  We'll do it during
2327    omplower pass instead.  */
2328 
2329 static bool
2330 maybe_fold_stmt (gimple_stmt_iterator *gsi)
2331 {
2332   struct gimplify_omp_ctx *ctx;
2333   for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2334     if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
2335       return false;
2336   return fold_stmt (gsi);
2337 }
2338 
2339 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2340    WANT_VALUE is true if the result of the call is desired.  */
2341 
2342 static enum gimplify_status
2343 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2344 {
2345   tree fndecl, parms, p, fnptrtype;
2346   enum gimplify_status ret;
2347   int i, nargs;
2348   gcall *call;
2349   bool builtin_va_start_p = false;
2350   location_t loc = EXPR_LOCATION (*expr_p);
2351 
2352   gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2353 
2354   /* For reliable diagnostics during inlining, it is necessary that
2355      every call_expr be annotated with file and line.  */
2356   if (! EXPR_HAS_LOCATION (*expr_p))
2357     SET_EXPR_LOCATION (*expr_p, input_location);
2358 
2359   /* Gimplify internal functions created in the FEs.  */
2360   if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
2361     {
2362       if (want_value)
2363 	return GS_ALL_DONE;
2364 
2365       nargs = call_expr_nargs (*expr_p);
2366       enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
2367       auto_vec<tree> vargs (nargs);
2368 
2369       for (i = 0; i < nargs; i++)
2370 	{
2371 	  gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2372 			EXPR_LOCATION (*expr_p));
2373 	  vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
2374 	}
2375       gimple *call = gimple_build_call_internal_vec (ifn, vargs);
2376       gimplify_seq_add_stmt (pre_p, call);
2377       return GS_ALL_DONE;
2378     }
2379 
2380   /* This may be a call to a builtin function.
2381 
2382      Builtin function calls may be transformed into different
2383      (and more efficient) builtin function calls under certain
2384      circumstances.  Unfortunately, gimplification can muck things
2385      up enough that the builtin expanders are not aware that certain
2386      transformations are still valid.
2387 
2388      So we attempt transformation/gimplification of the call before
2389      we gimplify the CALL_EXPR.  At this time we do not manage to
2390      transform all calls in the same manner as the expanders do, but
2391      we do transform most of them.  */
2392   fndecl = get_callee_fndecl (*expr_p);
2393   if (fndecl
2394       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2395     switch (DECL_FUNCTION_CODE (fndecl))
2396       {
2397       case BUILT_IN_ALLOCA:
2398       case BUILT_IN_ALLOCA_WITH_ALIGN:
2399 	/* If the call has been built for a variable-sized object, then we
2400 	   want to restore the stack level when the enclosing BIND_EXPR is
2401 	   exited to reclaim the allocated space; otherwise, we precisely
2402 	   need to do the opposite and preserve the latest stack level.  */
2403 	if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
2404 	  gimplify_ctxp->save_stack = true;
2405 	else
2406 	  gimplify_ctxp->keep_stack = true;
2407 	break;
2408 
2409       case BUILT_IN_VA_START:
2410         {
2411 	  builtin_va_start_p = TRUE;
2412 	  if (call_expr_nargs (*expr_p) < 2)
2413 	    {
2414 	      error ("too few arguments to function %<va_start%>");
2415 	      *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2416 	      return GS_OK;
2417 	    }
2418 
2419 	  if (fold_builtin_next_arg (*expr_p, true))
2420 	    {
2421 	      *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2422 	      return GS_OK;
2423 	    }
2424 	  break;
2425 	}
2426       case BUILT_IN_LINE:
2427 	{
2428 	  *expr_p = build_int_cst (TREE_TYPE (*expr_p),
2429 				   LOCATION_LINE (EXPR_LOCATION (*expr_p)));
2430 	  return GS_OK;
2431 	}
2432       case BUILT_IN_FILE:
2433 	{
2434 	  const char *locfile = LOCATION_FILE (EXPR_LOCATION (*expr_p));
2435 	  *expr_p = build_string_literal (strlen (locfile) + 1, locfile);
2436 	  return GS_OK;
2437 	}
2438       case BUILT_IN_FUNCTION:
2439 	{
2440 	  const char *function;
2441 	  function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2442 	  *expr_p = build_string_literal (strlen (function) + 1, function);
2443 	  return GS_OK;
2444 	}
2445       default:
2446         ;
2447       }
2448   if (fndecl && DECL_BUILT_IN (fndecl))
2449     {
2450       tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2451       if (new_tree && new_tree != *expr_p)
2452 	{
2453 	  /* There was a transformation of this call which computes the
2454 	     same value, but in a more efficient way.  Return and try
2455 	     again.  */
2456 	  *expr_p = new_tree;
2457 	  return GS_OK;
2458 	}
2459     }
2460 
2461   /* Remember the original function pointer type.  */
2462   fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2463 
2464   /* There is a sequence point before the call, so any side effects in
2465      the calling expression must occur before the actual call.  Force
2466      gimplify_expr to use an internal post queue.  */
2467   ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2468 		       is_gimple_call_addr, fb_rvalue);
2469 
2470   nargs = call_expr_nargs (*expr_p);
2471 
2472   /* Get argument types for verification.  */
2473   fndecl = get_callee_fndecl (*expr_p);
2474   parms = NULL_TREE;
2475   if (fndecl)
2476     parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2477   else
2478     parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
2479 
2480   if (fndecl && DECL_ARGUMENTS (fndecl))
2481     p = DECL_ARGUMENTS (fndecl);
2482   else if (parms)
2483     p = parms;
2484   else
2485     p = NULL_TREE;
2486   for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2487     ;
2488 
2489   /* If the last argument is __builtin_va_arg_pack () and it is not
2490      passed as a named argument, decrease the number of CALL_EXPR
2491      arguments and set instead the CALL_EXPR_VA_ARG_PACK flag.  */
2492   if (!p
2493       && i < nargs
2494       && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2495     {
2496       tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2497       tree last_arg_fndecl = get_callee_fndecl (last_arg);
2498 
2499       if (last_arg_fndecl
2500 	  && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2501 	  && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2502 	  && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2503 	{
2504 	  tree call = *expr_p;
2505 
2506 	  --nargs;
2507 	  *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2508 					  CALL_EXPR_FN (call),
2509 					  nargs, CALL_EXPR_ARGP (call));
2510 
2511 	  /* Copy all CALL_EXPR flags, location and block, except
2512 	     CALL_EXPR_VA_ARG_PACK flag.  */
2513 	  CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2514 	  CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2515 	  CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2516 	    = CALL_EXPR_RETURN_SLOT_OPT (call);
2517 	  CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2518 	  SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2519 
2520 	  /* Set CALL_EXPR_VA_ARG_PACK.  */
2521 	  CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2522 	}
2523     }
2524 
2525   /* Gimplify the function arguments.  */
2526   if (nargs > 0)
2527     {
2528       for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2529            PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2530            PUSH_ARGS_REVERSED ? i-- : i++)
2531         {
2532           enum gimplify_status t;
2533 
2534           /* Avoid gimplifying the second argument to va_start, which needs to
2535              be the plain PARM_DECL.  */
2536           if ((i != 1) || !builtin_va_start_p)
2537             {
2538               t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2539 				EXPR_LOCATION (*expr_p));
2540 
2541               if (t == GS_ERROR)
2542                 ret = GS_ERROR;
2543             }
2544         }
2545     }
2546 
2547   /* Gimplify the static chain.  */
2548   if (CALL_EXPR_STATIC_CHAIN (*expr_p))
2549     {
2550       if (fndecl && !DECL_STATIC_CHAIN (fndecl))
2551 	CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
2552       else
2553 	{
2554 	  enum gimplify_status t;
2555 	  t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
2556 			    EXPR_LOCATION (*expr_p));
2557 	  if (t == GS_ERROR)
2558 	    ret = GS_ERROR;
2559 	}
2560     }
2561 
2562   /* Verify the function result.  */
2563   if (want_value && fndecl
2564       && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2565     {
2566       error_at (loc, "using result of function returning %<void%>");
2567       ret = GS_ERROR;
2568     }
2569 
2570   /* Try this again in case gimplification exposed something.  */
2571   if (ret != GS_ERROR)
2572     {
2573       tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2574 
2575       if (new_tree && new_tree != *expr_p)
2576 	{
2577 	  /* There was a transformation of this call which computes the
2578 	     same value, but in a more efficient way.  Return and try
2579 	     again.  */
2580 	  *expr_p = new_tree;
2581 	  return GS_OK;
2582 	}
2583     }
2584   else
2585     {
2586       *expr_p = error_mark_node;
2587       return GS_ERROR;
2588     }
2589 
2590   /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2591      decl.  This allows us to eliminate redundant or useless
2592      calls to "const" functions.  */
2593   if (TREE_CODE (*expr_p) == CALL_EXPR)
2594     {
2595       int flags = call_expr_flags (*expr_p);
2596       if (flags & (ECF_CONST | ECF_PURE)
2597 	  /* An infinite loop is considered a side effect.  */
2598 	  && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2599 	TREE_SIDE_EFFECTS (*expr_p) = 0;
2600     }
2601 
2602   /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2603      and clear *EXPR_P.  Otherwise, leave *EXPR_P in its gimplified
2604      form and delegate the creation of a GIMPLE_CALL to
2605      gimplify_modify_expr.  This is always possible because when
2606      WANT_VALUE is true, the caller wants the result of this call into
2607      a temporary, which means that we will emit an INIT_EXPR in
2608      internal_get_tmp_var which will then be handled by
2609      gimplify_modify_expr.  */
2610   if (!want_value)
2611     {
2612       /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2613 	 have to do is replicate it as a GIMPLE_CALL tuple.  */
2614       gimple_stmt_iterator gsi;
2615       call = gimple_build_call_from_tree (*expr_p);
2616       gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2617       notice_special_calls (call);
2618       gimplify_seq_add_stmt (pre_p, call);
2619       gsi = gsi_last (*pre_p);
2620       maybe_fold_stmt (&gsi);
2621       *expr_p = NULL_TREE;
2622     }
2623   else
2624     /* Remember the original function type.  */
2625     CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2626 				     CALL_EXPR_FN (*expr_p));
2627 
2628   return ret;
2629 }
2630 
2631 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2632    rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2633 
2634    TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2635    condition is true or false, respectively.  If null, we should generate
2636    our own to skip over the evaluation of this specific expression.
2637 
2638    LOCUS is the source location of the COND_EXPR.
2639 
2640    This function is the tree equivalent of do_jump.
2641 
2642    shortcut_cond_r should only be called by shortcut_cond_expr.  */
2643 
2644 static tree
2645 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2646 		 location_t locus)
2647 {
2648   tree local_label = NULL_TREE;
2649   tree t, expr = NULL;
2650 
2651   /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2652      retain the shortcut semantics.  Just insert the gotos here;
2653      shortcut_cond_expr will append the real blocks later.  */
2654   if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2655     {
2656       location_t new_locus;
2657 
2658       /* Turn if (a && b) into
2659 
2660 	 if (a); else goto no;
2661 	 if (b) goto yes; else goto no;
2662 	 (no:) */
2663 
2664       if (false_label_p == NULL)
2665 	false_label_p = &local_label;
2666 
2667       /* Keep the original source location on the first 'if'.  */
2668       t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2669       append_to_statement_list (t, &expr);
2670 
2671       /* Set the source location of the && on the second 'if'.  */
2672       new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2673       t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2674 			   new_locus);
2675       append_to_statement_list (t, &expr);
2676     }
2677   else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2678     {
2679       location_t new_locus;
2680 
2681       /* Turn if (a || b) into
2682 
2683 	 if (a) goto yes;
2684 	 if (b) goto yes; else goto no;
2685 	 (yes:) */
2686 
2687       if (true_label_p == NULL)
2688 	true_label_p = &local_label;
2689 
2690       /* Keep the original source location on the first 'if'.  */
2691       t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2692       append_to_statement_list (t, &expr);
2693 
2694       /* Set the source location of the || on the second 'if'.  */
2695       new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2696       t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2697 			   new_locus);
2698       append_to_statement_list (t, &expr);
2699     }
2700   else if (TREE_CODE (pred) == COND_EXPR
2701 	   && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2702 	   && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2703     {
2704       location_t new_locus;
2705 
2706       /* As long as we're messing with gotos, turn if (a ? b : c) into
2707 	 if (a)
2708 	   if (b) goto yes; else goto no;
2709 	 else
2710 	   if (c) goto yes; else goto no;
2711 
2712 	 Don't do this if one of the arms has void type, which can happen
2713 	 in C++ when the arm is throw.  */
2714 
2715       /* Keep the original source location on the first 'if'.  Set the source
2716 	 location of the ? on the second 'if'.  */
2717       new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2718       expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2719 		     shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2720 				      false_label_p, locus),
2721 		     shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2722 				      false_label_p, new_locus));
2723     }
2724   else
2725     {
2726       expr = build3 (COND_EXPR, void_type_node, pred,
2727 		     build_and_jump (true_label_p),
2728 		     build_and_jump (false_label_p));
2729       SET_EXPR_LOCATION (expr, locus);
2730     }
2731 
2732   if (local_label)
2733     {
2734       t = build1 (LABEL_EXPR, void_type_node, local_label);
2735       append_to_statement_list (t, &expr);
2736     }
2737 
2738   return expr;
2739 }
2740 
2741 /* Given a conditional expression EXPR with short-circuit boolean
2742    predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2743    predicate apart into the equivalent sequence of conditionals.  */
2744 
2745 static tree
2746 shortcut_cond_expr (tree expr)
2747 {
2748   tree pred = TREE_OPERAND (expr, 0);
2749   tree then_ = TREE_OPERAND (expr, 1);
2750   tree else_ = TREE_OPERAND (expr, 2);
2751   tree true_label, false_label, end_label, t;
2752   tree *true_label_p;
2753   tree *false_label_p;
2754   bool emit_end, emit_false, jump_over_else;
2755   bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2756   bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2757 
2758   /* First do simple transformations.  */
2759   if (!else_se)
2760     {
2761       /* If there is no 'else', turn
2762 	   if (a && b) then c
2763 	 into
2764 	   if (a) if (b) then c.  */
2765       while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2766 	{
2767 	  /* Keep the original source location on the first 'if'.  */
2768 	  location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2769 	  TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2770 	  /* Set the source location of the && on the second 'if'.  */
2771 	  if (EXPR_HAS_LOCATION (pred))
2772 	    SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2773 	  then_ = shortcut_cond_expr (expr);
2774 	  then_se = then_ && TREE_SIDE_EFFECTS (then_);
2775 	  pred = TREE_OPERAND (pred, 0);
2776 	  expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2777 	  SET_EXPR_LOCATION (expr, locus);
2778 	}
2779     }
2780 
2781   if (!then_se)
2782     {
2783       /* If there is no 'then', turn
2784 	   if (a || b); else d
2785 	 into
2786 	   if (a); else if (b); else d.  */
2787       while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2788 	{
2789 	  /* Keep the original source location on the first 'if'.  */
2790 	  location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2791 	  TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2792 	  /* Set the source location of the || on the second 'if'.  */
2793 	  if (EXPR_HAS_LOCATION (pred))
2794 	    SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2795 	  else_ = shortcut_cond_expr (expr);
2796 	  else_se = else_ && TREE_SIDE_EFFECTS (else_);
2797 	  pred = TREE_OPERAND (pred, 0);
2798 	  expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2799 	  SET_EXPR_LOCATION (expr, locus);
2800 	}
2801     }
2802 
2803   /* If we're done, great.  */
2804   if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2805       && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2806     return expr;
2807 
2808   /* Otherwise we need to mess with gotos.  Change
2809        if (a) c; else d;
2810      to
2811        if (a); else goto no;
2812        c; goto end;
2813        no: d; end:
2814      and recursively gimplify the condition.  */
2815 
2816   true_label = false_label = end_label = NULL_TREE;
2817 
2818   /* If our arms just jump somewhere, hijack those labels so we don't
2819      generate jumps to jumps.  */
2820 
2821   if (then_
2822       && TREE_CODE (then_) == GOTO_EXPR
2823       && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2824     {
2825       true_label = GOTO_DESTINATION (then_);
2826       then_ = NULL;
2827       then_se = false;
2828     }
2829 
2830   if (else_
2831       && TREE_CODE (else_) == GOTO_EXPR
2832       && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2833     {
2834       false_label = GOTO_DESTINATION (else_);
2835       else_ = NULL;
2836       else_se = false;
2837     }
2838 
2839   /* If we aren't hijacking a label for the 'then' branch, it falls through.  */
2840   if (true_label)
2841     true_label_p = &true_label;
2842   else
2843     true_label_p = NULL;
2844 
2845   /* The 'else' branch also needs a label if it contains interesting code.  */
2846   if (false_label || else_se)
2847     false_label_p = &false_label;
2848   else
2849     false_label_p = NULL;
2850 
2851   /* If there was nothing else in our arms, just forward the label(s).  */
2852   if (!then_se && !else_se)
2853     return shortcut_cond_r (pred, true_label_p, false_label_p,
2854 			    EXPR_LOC_OR_LOC (expr, input_location));
2855 
2856   /* If our last subexpression already has a terminal label, reuse it.  */
2857   if (else_se)
2858     t = expr_last (else_);
2859   else if (then_se)
2860     t = expr_last (then_);
2861   else
2862     t = NULL;
2863   if (t && TREE_CODE (t) == LABEL_EXPR)
2864     end_label = LABEL_EXPR_LABEL (t);
2865 
2866   /* If we don't care about jumping to the 'else' branch, jump to the end
2867      if the condition is false.  */
2868   if (!false_label_p)
2869     false_label_p = &end_label;
2870 
2871   /* We only want to emit these labels if we aren't hijacking them.  */
2872   emit_end = (end_label == NULL_TREE);
2873   emit_false = (false_label == NULL_TREE);
2874 
2875   /* We only emit the jump over the else clause if we have to--if the
2876      then clause may fall through.  Otherwise we can wind up with a
2877      useless jump and a useless label at the end of gimplified code,
2878      which will cause us to think that this conditional as a whole
2879      falls through even if it doesn't.  If we then inline a function
2880      which ends with such a condition, that can cause us to issue an
2881      inappropriate warning about control reaching the end of a
2882      non-void function.  */
2883   jump_over_else = block_may_fallthru (then_);
2884 
2885   pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2886 			  EXPR_LOC_OR_LOC (expr, input_location));
2887 
2888   expr = NULL;
2889   append_to_statement_list (pred, &expr);
2890 
2891   append_to_statement_list (then_, &expr);
2892   if (else_se)
2893     {
2894       if (jump_over_else)
2895 	{
2896 	  tree last = expr_last (expr);
2897 	  t = build_and_jump (&end_label);
2898 	  if (EXPR_HAS_LOCATION (last))
2899 	    SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2900 	  append_to_statement_list (t, &expr);
2901 	}
2902       if (emit_false)
2903 	{
2904 	  t = build1 (LABEL_EXPR, void_type_node, false_label);
2905 	  append_to_statement_list (t, &expr);
2906 	}
2907       append_to_statement_list (else_, &expr);
2908     }
2909   if (emit_end && end_label)
2910     {
2911       t = build1 (LABEL_EXPR, void_type_node, end_label);
2912       append_to_statement_list (t, &expr);
2913     }
2914 
2915   return expr;
2916 }
2917 
2918 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE.  */
2919 
2920 tree
2921 gimple_boolify (tree expr)
2922 {
2923   tree type = TREE_TYPE (expr);
2924   location_t loc = EXPR_LOCATION (expr);
2925 
2926   if (TREE_CODE (expr) == NE_EXPR
2927       && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2928       && integer_zerop (TREE_OPERAND (expr, 1)))
2929     {
2930       tree call = TREE_OPERAND (expr, 0);
2931       tree fn = get_callee_fndecl (call);
2932 
2933       /* For __builtin_expect ((long) (x), y) recurse into x as well
2934 	 if x is truth_value_p.  */
2935       if (fn
2936 	  && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2937 	  && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2938 	  && call_expr_nargs (call) == 2)
2939 	{
2940 	  tree arg = CALL_EXPR_ARG (call, 0);
2941 	  if (arg)
2942 	    {
2943 	      if (TREE_CODE (arg) == NOP_EXPR
2944 		  && TREE_TYPE (arg) == TREE_TYPE (call))
2945 		arg = TREE_OPERAND (arg, 0);
2946 	      if (truth_value_p (TREE_CODE (arg)))
2947 		{
2948 		  arg = gimple_boolify (arg);
2949 		  CALL_EXPR_ARG (call, 0)
2950 		    = fold_convert_loc (loc, TREE_TYPE (call), arg);
2951 		}
2952 	    }
2953 	}
2954     }
2955 
2956   switch (TREE_CODE (expr))
2957     {
2958     case TRUTH_AND_EXPR:
2959     case TRUTH_OR_EXPR:
2960     case TRUTH_XOR_EXPR:
2961     case TRUTH_ANDIF_EXPR:
2962     case TRUTH_ORIF_EXPR:
2963       /* Also boolify the arguments of truth exprs.  */
2964       TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2965       /* FALLTHRU */
2966 
2967     case TRUTH_NOT_EXPR:
2968       TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2969 
2970       /* These expressions always produce boolean results.  */
2971       if (TREE_CODE (type) != BOOLEAN_TYPE)
2972 	TREE_TYPE (expr) = boolean_type_node;
2973       return expr;
2974 
2975     case ANNOTATE_EXPR:
2976       switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
2977 	{
2978 	case annot_expr_ivdep_kind:
2979 	case annot_expr_no_vector_kind:
2980 	case annot_expr_vector_kind:
2981 	  TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2982 	  if (TREE_CODE (type) != BOOLEAN_TYPE)
2983 	    TREE_TYPE (expr) = boolean_type_node;
2984 	  return expr;
2985 	default:
2986 	  gcc_unreachable ();
2987 	}
2988 
2989     default:
2990       if (COMPARISON_CLASS_P (expr))
2991 	{
2992 	  /* There expressions always prduce boolean results.  */
2993 	  if (TREE_CODE (type) != BOOLEAN_TYPE)
2994 	    TREE_TYPE (expr) = boolean_type_node;
2995 	  return expr;
2996 	}
2997       /* Other expressions that get here must have boolean values, but
2998 	 might need to be converted to the appropriate mode.  */
2999       if (TREE_CODE (type) == BOOLEAN_TYPE)
3000 	return expr;
3001       return fold_convert_loc (loc, boolean_type_node, expr);
3002     }
3003 }
3004 
3005 /* Given a conditional expression *EXPR_P without side effects, gimplify
3006    its operands.  New statements are inserted to PRE_P.  */
3007 
3008 static enum gimplify_status
3009 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3010 {
3011   tree expr = *expr_p, cond;
3012   enum gimplify_status ret, tret;
3013   enum tree_code code;
3014 
3015   cond = gimple_boolify (COND_EXPR_COND (expr));
3016 
3017   /* We need to handle && and || specially, as their gimplification
3018      creates pure cond_expr, thus leading to an infinite cycle otherwise.  */
3019   code = TREE_CODE (cond);
3020   if (code == TRUTH_ANDIF_EXPR)
3021     TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3022   else if (code == TRUTH_ORIF_EXPR)
3023     TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3024   ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3025   COND_EXPR_COND (*expr_p) = cond;
3026 
3027   tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3028 				   is_gimple_val, fb_rvalue);
3029   ret = MIN (ret, tret);
3030   tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3031 				   is_gimple_val, fb_rvalue);
3032 
3033   return MIN (ret, tret);
3034 }
3035 
3036 /* Return true if evaluating EXPR could trap.
3037    EXPR is GENERIC, while tree_could_trap_p can be called
3038    only on GIMPLE.  */
3039 
3040 static bool
3041 generic_expr_could_trap_p (tree expr)
3042 {
3043   unsigned i, n;
3044 
3045   if (!expr || is_gimple_val (expr))
3046     return false;
3047 
3048   if (!EXPR_P (expr) || tree_could_trap_p (expr))
3049     return true;
3050 
3051   n = TREE_OPERAND_LENGTH (expr);
3052   for (i = 0; i < n; i++)
3053     if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3054       return true;
3055 
3056   return false;
3057 }
3058 
3059 /*  Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3060     into
3061 
3062     if (p)			if (p)
3063       t1 = a;			  a;
3064     else		or	else
3065       t1 = b;			  b;
3066     t1;
3067 
3068     The second form is used when *EXPR_P is of type void.
3069 
3070     PRE_P points to the list where side effects that must happen before
3071       *EXPR_P should be stored.  */
3072 
3073 static enum gimplify_status
3074 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3075 {
3076   tree expr = *expr_p;
3077   tree type = TREE_TYPE (expr);
3078   location_t loc = EXPR_LOCATION (expr);
3079   tree tmp, arm1, arm2;
3080   enum gimplify_status ret;
3081   tree label_true, label_false, label_cont;
3082   bool have_then_clause_p, have_else_clause_p;
3083   gcond *cond_stmt;
3084   enum tree_code pred_code;
3085   gimple_seq seq = NULL;
3086 
3087   /* If this COND_EXPR has a value, copy the values into a temporary within
3088      the arms.  */
3089   if (!VOID_TYPE_P (type))
3090     {
3091       tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3092       tree result;
3093 
3094       /* If either an rvalue is ok or we do not require an lvalue, create the
3095 	 temporary.  But we cannot do that if the type is addressable.  */
3096       if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3097 	  && !TREE_ADDRESSABLE (type))
3098 	{
3099 	  if (gimplify_ctxp->allow_rhs_cond_expr
3100 	      /* If either branch has side effects or could trap, it can't be
3101 		 evaluated unconditionally.  */
3102 	      && !TREE_SIDE_EFFECTS (then_)
3103 	      && !generic_expr_could_trap_p (then_)
3104 	      && !TREE_SIDE_EFFECTS (else_)
3105 	      && !generic_expr_could_trap_p (else_))
3106 	    return gimplify_pure_cond_expr (expr_p, pre_p);
3107 
3108 	  tmp = create_tmp_var (type, "iftmp");
3109 	  result = tmp;
3110 	}
3111 
3112       /* Otherwise, only create and copy references to the values.  */
3113       else
3114 	{
3115 	  type = build_pointer_type (type);
3116 
3117 	  if (!VOID_TYPE_P (TREE_TYPE (then_)))
3118 	    then_ = build_fold_addr_expr_loc (loc, then_);
3119 
3120 	  if (!VOID_TYPE_P (TREE_TYPE (else_)))
3121 	    else_ = build_fold_addr_expr_loc (loc, else_);
3122 
3123 	  expr
3124 	    = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3125 
3126 	  tmp = create_tmp_var (type, "iftmp");
3127 	  result = build_simple_mem_ref_loc (loc, tmp);
3128 	}
3129 
3130       /* Build the new then clause, `tmp = then_;'.  But don't build the
3131 	 assignment if the value is void; in C++ it can be if it's a throw.  */
3132       if (!VOID_TYPE_P (TREE_TYPE (then_)))
3133 	TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3134 
3135       /* Similarly, build the new else clause, `tmp = else_;'.  */
3136       if (!VOID_TYPE_P (TREE_TYPE (else_)))
3137 	TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3138 
3139       TREE_TYPE (expr) = void_type_node;
3140       recalculate_side_effects (expr);
3141 
3142       /* Move the COND_EXPR to the prequeue.  */
3143       gimplify_stmt (&expr, pre_p);
3144 
3145       *expr_p = result;
3146       return GS_ALL_DONE;
3147     }
3148 
3149   /* Remove any COMPOUND_EXPR so the following cases will be caught.  */
3150   STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3151   if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3152     gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3153 
3154   /* Make sure the condition has BOOLEAN_TYPE.  */
3155   TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3156 
3157   /* Break apart && and || conditions.  */
3158   if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3159       || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3160     {
3161       expr = shortcut_cond_expr (expr);
3162 
3163       if (expr != *expr_p)
3164 	{
3165 	  *expr_p = expr;
3166 
3167 	  /* We can't rely on gimplify_expr to re-gimplify the expanded
3168 	     form properly, as cleanups might cause the target labels to be
3169 	     wrapped in a TRY_FINALLY_EXPR.  To prevent that, we need to
3170 	     set up a conditional context.  */
3171 	  gimple_push_condition ();
3172 	  gimplify_stmt (expr_p, &seq);
3173 	  gimple_pop_condition (pre_p);
3174 	  gimple_seq_add_seq (pre_p, seq);
3175 
3176 	  return GS_ALL_DONE;
3177 	}
3178     }
3179 
3180   /* Now do the normal gimplification.  */
3181 
3182   /* Gimplify condition.  */
3183   ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3184 		       fb_rvalue);
3185   if (ret == GS_ERROR)
3186     return GS_ERROR;
3187   gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3188 
3189   gimple_push_condition ();
3190 
3191   have_then_clause_p = have_else_clause_p = false;
3192   if (TREE_OPERAND (expr, 1) != NULL
3193       && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3194       && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3195       && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3196 	  == current_function_decl)
3197       /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3198 	 have different locations, otherwise we end up with incorrect
3199 	 location information on the branches.  */
3200       && (optimize
3201 	  || !EXPR_HAS_LOCATION (expr)
3202 	  || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3203 	  || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3204     {
3205       label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3206       have_then_clause_p = true;
3207     }
3208   else
3209     label_true = create_artificial_label (UNKNOWN_LOCATION);
3210   if (TREE_OPERAND (expr, 2) != NULL
3211       && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3212       && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3213       && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3214 	  == current_function_decl)
3215       /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3216 	 have different locations, otherwise we end up with incorrect
3217 	 location information on the branches.  */
3218       && (optimize
3219 	  || !EXPR_HAS_LOCATION (expr)
3220 	  || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3221 	  || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3222     {
3223       label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3224       have_else_clause_p = true;
3225     }
3226   else
3227     label_false = create_artificial_label (UNKNOWN_LOCATION);
3228 
3229   gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3230 				 &arm2);
3231   cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
3232 				 label_false);
3233   gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
3234   gimplify_seq_add_stmt (&seq, cond_stmt);
3235   gimple_stmt_iterator gsi = gsi_last (seq);
3236   maybe_fold_stmt (&gsi);
3237 
3238   label_cont = NULL_TREE;
3239   if (!have_then_clause_p)
3240     {
3241       /* For if (...) {} else { code; } put label_true after
3242 	 the else block.  */
3243       if (TREE_OPERAND (expr, 1) == NULL_TREE
3244 	  && !have_else_clause_p
3245 	  && TREE_OPERAND (expr, 2) != NULL_TREE)
3246 	label_cont = label_true;
3247       else
3248 	{
3249 	  gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3250 	  have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3251 	  /* For if (...) { code; } else {} or
3252 	     if (...) { code; } else goto label; or
3253 	     if (...) { code; return; } else { ... }
3254 	     label_cont isn't needed.  */
3255 	  if (!have_else_clause_p
3256 	      && TREE_OPERAND (expr, 2) != NULL_TREE
3257 	      && gimple_seq_may_fallthru (seq))
3258 	    {
3259 	      gimple *g;
3260 	      label_cont = create_artificial_label (UNKNOWN_LOCATION);
3261 
3262 	      g = gimple_build_goto (label_cont);
3263 
3264 	      /* GIMPLE_COND's are very low level; they have embedded
3265 		 gotos.  This particular embedded goto should not be marked
3266 		 with the location of the original COND_EXPR, as it would
3267 		 correspond to the COND_EXPR's condition, not the ELSE or the
3268 		 THEN arms.  To avoid marking it with the wrong location, flag
3269 		 it as "no location".  */
3270 	      gimple_set_do_not_emit_location (g);
3271 
3272 	      gimplify_seq_add_stmt (&seq, g);
3273 	    }
3274 	}
3275     }
3276   if (!have_else_clause_p)
3277     {
3278       gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3279       have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3280     }
3281   if (label_cont)
3282     gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3283 
3284   gimple_pop_condition (pre_p);
3285   gimple_seq_add_seq (pre_p, seq);
3286 
3287   if (ret == GS_ERROR)
3288     ; /* Do nothing.  */
3289   else if (have_then_clause_p || have_else_clause_p)
3290     ret = GS_ALL_DONE;
3291   else
3292     {
3293       /* Both arms are empty; replace the COND_EXPR with its predicate.  */
3294       expr = TREE_OPERAND (expr, 0);
3295       gimplify_stmt (&expr, pre_p);
3296     }
3297 
3298   *expr_p = NULL;
3299   return ret;
3300 }
3301 
3302 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3303    to be marked addressable.
3304 
3305    We cannot rely on such an expression being directly markable if a temporary
3306    has been created by the gimplification.  In this case, we create another
3307    temporary and initialize it with a copy, which will become a store after we
3308    mark it addressable.  This can happen if the front-end passed us something
3309    that it could not mark addressable yet, like a Fortran pass-by-reference
3310    parameter (int) floatvar.  */
3311 
3312 static void
3313 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3314 {
3315   while (handled_component_p (*expr_p))
3316     expr_p = &TREE_OPERAND (*expr_p, 0);
3317   if (is_gimple_reg (*expr_p))
3318     {
3319       tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3320       DECL_GIMPLE_REG_P (var) = 0;
3321       *expr_p = var;
3322     }
3323 }
3324 
3325 /* A subroutine of gimplify_modify_expr.  Replace a MODIFY_EXPR with
3326    a call to __builtin_memcpy.  */
3327 
3328 static enum gimplify_status
3329 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3330     				gimple_seq *seq_p)
3331 {
3332   tree t, to, to_ptr, from, from_ptr;
3333   gcall *gs;
3334   location_t loc = EXPR_LOCATION (*expr_p);
3335 
3336   to = TREE_OPERAND (*expr_p, 0);
3337   from = TREE_OPERAND (*expr_p, 1);
3338 
3339   /* Mark the RHS addressable.  Beware that it may not be possible to do so
3340      directly if a temporary has been created by the gimplification.  */
3341   prepare_gimple_addressable (&from, seq_p);
3342 
3343   mark_addressable (from);
3344   from_ptr = build_fold_addr_expr_loc (loc, from);
3345   gimplify_arg (&from_ptr, seq_p, loc);
3346 
3347   mark_addressable (to);
3348   to_ptr = build_fold_addr_expr_loc (loc, to);
3349   gimplify_arg (&to_ptr, seq_p, loc);
3350 
3351   t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3352 
3353   gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3354 
3355   if (want_value)
3356     {
3357       /* tmp = memcpy() */
3358       t = create_tmp_var (TREE_TYPE (to_ptr));
3359       gimple_call_set_lhs (gs, t);
3360       gimplify_seq_add_stmt (seq_p, gs);
3361 
3362       *expr_p = build_simple_mem_ref (t);
3363       return GS_ALL_DONE;
3364     }
3365 
3366   gimplify_seq_add_stmt (seq_p, gs);
3367   *expr_p = NULL;
3368   return GS_ALL_DONE;
3369 }
3370 
3371 /* A subroutine of gimplify_modify_expr.  Replace a MODIFY_EXPR with
3372    a call to __builtin_memset.  In this case we know that the RHS is
3373    a CONSTRUCTOR with an empty element list.  */
3374 
3375 static enum gimplify_status
3376 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3377     				gimple_seq *seq_p)
3378 {
3379   tree t, from, to, to_ptr;
3380   gcall *gs;
3381   location_t loc = EXPR_LOCATION (*expr_p);
3382 
3383   /* Assert our assumptions, to abort instead of producing wrong code
3384      silently if they are not met.  Beware that the RHS CONSTRUCTOR might
3385      not be immediately exposed.  */
3386   from = TREE_OPERAND (*expr_p, 1);
3387   if (TREE_CODE (from) == WITH_SIZE_EXPR)
3388     from = TREE_OPERAND (from, 0);
3389 
3390   gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3391 	      && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
3392 
3393   /* Now proceed.  */
3394   to = TREE_OPERAND (*expr_p, 0);
3395 
3396   to_ptr = build_fold_addr_expr_loc (loc, to);
3397   gimplify_arg (&to_ptr, seq_p, loc);
3398   t = builtin_decl_implicit (BUILT_IN_MEMSET);
3399 
3400   gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3401 
3402   if (want_value)
3403     {
3404       /* tmp = memset() */
3405       t = create_tmp_var (TREE_TYPE (to_ptr));
3406       gimple_call_set_lhs (gs, t);
3407       gimplify_seq_add_stmt (seq_p, gs);
3408 
3409       *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3410       return GS_ALL_DONE;
3411     }
3412 
3413   gimplify_seq_add_stmt (seq_p, gs);
3414   *expr_p = NULL;
3415   return GS_ALL_DONE;
3416 }
3417 
3418 /* A subroutine of gimplify_init_ctor_preeval.  Called via walk_tree,
3419    determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3420    assignment.  Return non-null if we detect a potential overlap.  */
3421 
3422 struct gimplify_init_ctor_preeval_data
3423 {
3424   /* The base decl of the lhs object.  May be NULL, in which case we
3425      have to assume the lhs is indirect.  */
3426   tree lhs_base_decl;
3427 
3428   /* The alias set of the lhs object.  */
3429   alias_set_type lhs_alias_set;
3430 };
3431 
3432 static tree
3433 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3434 {
3435   struct gimplify_init_ctor_preeval_data *data
3436     = (struct gimplify_init_ctor_preeval_data *) xdata;
3437   tree t = *tp;
3438 
3439   /* If we find the base object, obviously we have overlap.  */
3440   if (data->lhs_base_decl == t)
3441     return t;
3442 
3443   /* If the constructor component is indirect, determine if we have a
3444      potential overlap with the lhs.  The only bits of information we
3445      have to go on at this point are addressability and alias sets.  */
3446   if ((INDIRECT_REF_P (t)
3447        || TREE_CODE (t) == MEM_REF)
3448       && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3449       && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3450     return t;
3451 
3452   /* If the constructor component is a call, determine if it can hide a
3453      potential overlap with the lhs through an INDIRECT_REF like above.
3454      ??? Ugh - this is completely broken.  In fact this whole analysis
3455      doesn't look conservative.  */
3456   if (TREE_CODE (t) == CALL_EXPR)
3457     {
3458       tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3459 
3460       for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3461 	if (POINTER_TYPE_P (TREE_VALUE (type))
3462 	    && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3463 	    && alias_sets_conflict_p (data->lhs_alias_set,
3464 				      get_alias_set
3465 				        (TREE_TYPE (TREE_VALUE (type)))))
3466 	  return t;
3467     }
3468 
3469   if (IS_TYPE_OR_DECL_P (t))
3470     *walk_subtrees = 0;
3471   return NULL;
3472 }
3473 
3474 /* A subroutine of gimplify_init_constructor.  Pre-evaluate EXPR,
3475    force values that overlap with the lhs (as described by *DATA)
3476    into temporaries.  */
3477 
3478 static void
3479 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3480 			    struct gimplify_init_ctor_preeval_data *data)
3481 {
3482   enum gimplify_status one;
3483 
3484   /* If the value is constant, then there's nothing to pre-evaluate.  */
3485   if (TREE_CONSTANT (*expr_p))
3486     {
3487       /* Ensure it does not have side effects, it might contain a reference to
3488 	 the object we're initializing.  */
3489       gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3490       return;
3491     }
3492 
3493   /* If the type has non-trivial constructors, we can't pre-evaluate.  */
3494   if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3495     return;
3496 
3497   /* Recurse for nested constructors.  */
3498   if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3499     {
3500       unsigned HOST_WIDE_INT ix;
3501       constructor_elt *ce;
3502       vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
3503 
3504       FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
3505 	gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3506 
3507       return;
3508     }
3509 
3510   /* If this is a variable sized type, we must remember the size.  */
3511   maybe_with_size_expr (expr_p);
3512 
3513   /* Gimplify the constructor element to something appropriate for the rhs
3514      of a MODIFY_EXPR.  Given that we know the LHS is an aggregate, we know
3515      the gimplifier will consider this a store to memory.  Doing this
3516      gimplification now means that we won't have to deal with complicated
3517      language-specific trees, nor trees like SAVE_EXPR that can induce
3518      exponential search behavior.  */
3519   one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3520   if (one == GS_ERROR)
3521     {
3522       *expr_p = NULL;
3523       return;
3524     }
3525 
3526   /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3527      with the lhs, since "a = { .x=a }" doesn't make sense.  This will
3528      always be true for all scalars, since is_gimple_mem_rhs insists on a
3529      temporary variable for them.  */
3530   if (DECL_P (*expr_p))
3531     return;
3532 
3533   /* If this is of variable size, we have no choice but to assume it doesn't
3534      overlap since we can't make a temporary for it.  */
3535   if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3536     return;
3537 
3538   /* Otherwise, we must search for overlap ...  */
3539   if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3540     return;
3541 
3542   /* ... and if found, force the value into a temporary.  */
3543   *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3544 }
3545 
3546 /* A subroutine of gimplify_init_ctor_eval.  Create a loop for
3547    a RANGE_EXPR in a CONSTRUCTOR for an array.
3548 
3549       var = lower;
3550     loop_entry:
3551       object[var] = value;
3552       if (var == upper)
3553 	goto loop_exit;
3554       var = var + 1;
3555       goto loop_entry;
3556     loop_exit:
3557 
3558    We increment var _after_ the loop exit check because we might otherwise
3559    fail if upper == TYPE_MAX_VALUE (type for upper).
3560 
3561    Note that we never have to deal with SAVE_EXPRs here, because this has
3562    already been taken care of for us, in gimplify_init_ctor_preeval().  */
3563 
3564 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
3565 				     gimple_seq *, bool);
3566 
3567 static void
3568 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3569 			       tree value, tree array_elt_type,
3570 			       gimple_seq *pre_p, bool cleared)
3571 {
3572   tree loop_entry_label, loop_exit_label, fall_thru_label;
3573   tree var, var_type, cref, tmp;
3574 
3575   loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3576   loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3577   fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3578 
3579   /* Create and initialize the index variable.  */
3580   var_type = TREE_TYPE (upper);
3581   var = create_tmp_var (var_type);
3582   gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3583 
3584   /* Add the loop entry label.  */
3585   gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3586 
3587   /* Build the reference.  */
3588   cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3589 		 var, NULL_TREE, NULL_TREE);
3590 
3591   /* If we are a constructor, just call gimplify_init_ctor_eval to do
3592      the store.  Otherwise just assign value to the reference.  */
3593 
3594   if (TREE_CODE (value) == CONSTRUCTOR)
3595     /* NB we might have to call ourself recursively through
3596        gimplify_init_ctor_eval if the value is a constructor.  */
3597     gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3598 			     pre_p, cleared);
3599   else
3600     gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3601 
3602   /* We exit the loop when the index var is equal to the upper bound.  */
3603   gimplify_seq_add_stmt (pre_p,
3604 			 gimple_build_cond (EQ_EXPR, var, upper,
3605 					    loop_exit_label, fall_thru_label));
3606 
3607   gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3608 
3609   /* Otherwise, increment the index var...  */
3610   tmp = build2 (PLUS_EXPR, var_type, var,
3611 		fold_convert (var_type, integer_one_node));
3612   gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3613 
3614   /* ...and jump back to the loop entry.  */
3615   gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3616 
3617   /* Add the loop exit label.  */
3618   gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3619 }
3620 
3621 /* Return true if FDECL is accessing a field that is zero sized.  */
3622 
3623 static bool
3624 zero_sized_field_decl (const_tree fdecl)
3625 {
3626   if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3627       && integer_zerop (DECL_SIZE (fdecl)))
3628     return true;
3629   return false;
3630 }
3631 
3632 /* Return true if TYPE is zero sized.  */
3633 
3634 static bool
3635 zero_sized_type (const_tree type)
3636 {
3637   if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3638       && integer_zerop (TYPE_SIZE (type)))
3639     return true;
3640   return false;
3641 }
3642 
3643 /* A subroutine of gimplify_init_constructor.  Generate individual
3644    MODIFY_EXPRs for a CONSTRUCTOR.  OBJECT is the LHS against which the
3645    assignments should happen.  ELTS is the CONSTRUCTOR_ELTS of the
3646    CONSTRUCTOR.  CLEARED is true if the entire LHS object has been
3647    zeroed first.  */
3648 
3649 static void
3650 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
3651 			 gimple_seq *pre_p, bool cleared)
3652 {
3653   tree array_elt_type = NULL;
3654   unsigned HOST_WIDE_INT ix;
3655   tree purpose, value;
3656 
3657   if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3658     array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3659 
3660   FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3661     {
3662       tree cref;
3663 
3664       /* NULL values are created above for gimplification errors.  */
3665       if (value == NULL)
3666 	continue;
3667 
3668       if (cleared && initializer_zerop (value))
3669 	continue;
3670 
3671       /* ??? Here's to hoping the front end fills in all of the indices,
3672 	 so we don't have to figure out what's missing ourselves.  */
3673       gcc_assert (purpose);
3674 
3675       /* Skip zero-sized fields, unless value has side-effects.  This can
3676 	 happen with calls to functions returning a zero-sized type, which
3677 	 we shouldn't discard.  As a number of downstream passes don't
3678 	 expect sets of zero-sized fields, we rely on the gimplification of
3679 	 the MODIFY_EXPR we make below to drop the assignment statement.  */
3680       if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3681 	continue;
3682 
3683       /* If we have a RANGE_EXPR, we have to build a loop to assign the
3684 	 whole range.  */
3685       if (TREE_CODE (purpose) == RANGE_EXPR)
3686 	{
3687 	  tree lower = TREE_OPERAND (purpose, 0);
3688 	  tree upper = TREE_OPERAND (purpose, 1);
3689 
3690 	  /* If the lower bound is equal to upper, just treat it as if
3691 	     upper was the index.  */
3692 	  if (simple_cst_equal (lower, upper))
3693 	    purpose = upper;
3694 	  else
3695 	    {
3696 	      gimplify_init_ctor_eval_range (object, lower, upper, value,
3697 					     array_elt_type, pre_p, cleared);
3698 	      continue;
3699 	    }
3700 	}
3701 
3702       if (array_elt_type)
3703 	{
3704 	  /* Do not use bitsizetype for ARRAY_REF indices.  */
3705 	  if (TYPE_DOMAIN (TREE_TYPE (object)))
3706 	    purpose
3707 	      = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3708 			      purpose);
3709 	  cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3710 			 purpose, NULL_TREE, NULL_TREE);
3711 	}
3712       else
3713 	{
3714 	  gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3715 	  cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3716 			 unshare_expr (object), purpose, NULL_TREE);
3717 	}
3718 
3719       if (TREE_CODE (value) == CONSTRUCTOR
3720 	  && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3721 	gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3722 				 pre_p, cleared);
3723       else
3724 	{
3725 	  tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3726 	  gimplify_and_add (init, pre_p);
3727 	  ggc_free (init);
3728 	}
3729     }
3730 }
3731 
3732 /* Return the appropriate RHS predicate for this LHS.  */
3733 
3734 gimple_predicate
3735 rhs_predicate_for (tree lhs)
3736 {
3737   if (is_gimple_reg (lhs))
3738     return is_gimple_reg_rhs_or_call;
3739   else
3740     return is_gimple_mem_rhs_or_call;
3741 }
3742 
3743 /* Gimplify a C99 compound literal expression.  This just means adding
3744    the DECL_EXPR before the current statement and using its anonymous
3745    decl instead.  */
3746 
3747 static enum gimplify_status
3748 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3749 				bool (*gimple_test_f) (tree),
3750 				fallback_t fallback)
3751 {
3752   tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3753   tree decl = DECL_EXPR_DECL (decl_s);
3754   tree init = DECL_INITIAL (decl);
3755   /* Mark the decl as addressable if the compound literal
3756      expression is addressable now, otherwise it is marked too late
3757      after we gimplify the initialization expression.  */
3758   if (TREE_ADDRESSABLE (*expr_p))
3759     TREE_ADDRESSABLE (decl) = 1;
3760   /* Otherwise, if we don't need an lvalue and have a literal directly
3761      substitute it.  Check if it matches the gimple predicate, as
3762      otherwise we'd generate a new temporary, and we can as well just
3763      use the decl we already have.  */
3764   else if (!TREE_ADDRESSABLE (decl)
3765 	   && init
3766 	   && (fallback & fb_lvalue) == 0
3767 	   && gimple_test_f (init))
3768     {
3769       *expr_p = init;
3770       return GS_OK;
3771     }
3772 
3773   /* Preliminarily mark non-addressed complex variables as eligible
3774      for promotion to gimple registers.  We'll transform their uses
3775      as we find them.  */
3776   if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3777        || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3778       && !TREE_THIS_VOLATILE (decl)
3779       && !needs_to_live_in_memory (decl))
3780     DECL_GIMPLE_REG_P (decl) = 1;
3781 
3782   /* If the decl is not addressable, then it is being used in some
3783      expression or on the right hand side of a statement, and it can
3784      be put into a readonly data section.  */
3785   if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3786     TREE_READONLY (decl) = 1;
3787 
3788   /* This decl isn't mentioned in the enclosing block, so add it to the
3789      list of temps.  FIXME it seems a bit of a kludge to say that
3790      anonymous artificial vars aren't pushed, but everything else is.  */
3791   if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3792     gimple_add_tmp_var (decl);
3793 
3794   gimplify_and_add (decl_s, pre_p);
3795   *expr_p = decl;
3796   return GS_OK;
3797 }
3798 
3799 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3800    return a new CONSTRUCTOR if something changed.  */
3801 
3802 static tree
3803 optimize_compound_literals_in_ctor (tree orig_ctor)
3804 {
3805   tree ctor = orig_ctor;
3806   vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3807   unsigned int idx, num = vec_safe_length (elts);
3808 
3809   for (idx = 0; idx < num; idx++)
3810     {
3811       tree value = (*elts)[idx].value;
3812       tree newval = value;
3813       if (TREE_CODE (value) == CONSTRUCTOR)
3814 	newval = optimize_compound_literals_in_ctor (value);
3815       else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3816 	{
3817 	  tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3818 	  tree decl = DECL_EXPR_DECL (decl_s);
3819 	  tree init = DECL_INITIAL (decl);
3820 
3821 	  if (!TREE_ADDRESSABLE (value)
3822 	      && !TREE_ADDRESSABLE (decl)
3823 	      && init
3824 	      && TREE_CODE (init) == CONSTRUCTOR)
3825 	    newval = optimize_compound_literals_in_ctor (init);
3826 	}
3827       if (newval == value)
3828 	continue;
3829 
3830       if (ctor == orig_ctor)
3831 	{
3832 	  ctor = copy_node (orig_ctor);
3833 	  CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
3834 	  elts = CONSTRUCTOR_ELTS (ctor);
3835 	}
3836       (*elts)[idx].value = newval;
3837     }
3838   return ctor;
3839 }
3840 
3841 /* A subroutine of gimplify_modify_expr.  Break out elements of a
3842    CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3843 
3844    Note that we still need to clear any elements that don't have explicit
3845    initializers, so if not all elements are initialized we keep the
3846    original MODIFY_EXPR, we just remove all of the constructor elements.
3847 
3848    If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3849    GS_ERROR if we would have to create a temporary when gimplifying
3850    this constructor.  Otherwise, return GS_OK.
3851 
3852    If NOTIFY_TEMP_CREATION is false, just do the gimplification.  */
3853 
3854 static enum gimplify_status
3855 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3856 			   bool want_value, bool notify_temp_creation)
3857 {
3858   tree object, ctor, type;
3859   enum gimplify_status ret;
3860   vec<constructor_elt, va_gc> *elts;
3861 
3862   gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3863 
3864   if (!notify_temp_creation)
3865     {
3866       ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3867 			   is_gimple_lvalue, fb_lvalue);
3868       if (ret == GS_ERROR)
3869 	return ret;
3870     }
3871 
3872   object = TREE_OPERAND (*expr_p, 0);
3873   ctor = TREE_OPERAND (*expr_p, 1) =
3874     optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3875   type = TREE_TYPE (ctor);
3876   elts = CONSTRUCTOR_ELTS (ctor);
3877   ret = GS_ALL_DONE;
3878 
3879   switch (TREE_CODE (type))
3880     {
3881     case RECORD_TYPE:
3882     case UNION_TYPE:
3883     case QUAL_UNION_TYPE:
3884     case ARRAY_TYPE:
3885       {
3886 	struct gimplify_init_ctor_preeval_data preeval_data;
3887 	HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3888 	bool cleared, complete_p, valid_const_initializer;
3889 
3890 	/* Aggregate types must lower constructors to initialization of
3891 	   individual elements.  The exception is that a CONSTRUCTOR node
3892 	   with no elements indicates zero-initialization of the whole.  */
3893 	if (vec_safe_is_empty (elts))
3894 	  {
3895 	    if (notify_temp_creation)
3896 	      return GS_OK;
3897 	    break;
3898 	  }
3899 
3900 	/* Fetch information about the constructor to direct later processing.
3901 	   We might want to make static versions of it in various cases, and
3902 	   can only do so if it known to be a valid constant initializer.  */
3903 	valid_const_initializer
3904 	  = categorize_ctor_elements (ctor, &num_nonzero_elements,
3905 				      &num_ctor_elements, &complete_p);
3906 
3907 	/* If a const aggregate variable is being initialized, then it
3908 	   should never be a lose to promote the variable to be static.  */
3909 	if (valid_const_initializer
3910 	    && num_nonzero_elements > 1
3911 	    && TREE_READONLY (object)
3912 	    && TREE_CODE (object) == VAR_DECL
3913 	    && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3914 	  {
3915 	    if (notify_temp_creation)
3916 	      return GS_ERROR;
3917 	    DECL_INITIAL (object) = ctor;
3918 	    TREE_STATIC (object) = 1;
3919 	    if (!DECL_NAME (object))
3920 	      DECL_NAME (object) = create_tmp_var_name ("C");
3921 	    walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3922 
3923 	    /* ??? C++ doesn't automatically append a .<number> to the
3924 	       assembler name, and even when it does, it looks at FE private
3925 	       data structures to figure out what that number should be,
3926 	       which are not set for this variable.  I suppose this is
3927 	       important for local statics for inline functions, which aren't
3928 	       "local" in the object file sense.  So in order to get a unique
3929 	       TU-local symbol, we must invoke the lhd version now.  */
3930 	    lhd_set_decl_assembler_name (object);
3931 
3932 	    *expr_p = NULL_TREE;
3933 	    break;
3934 	  }
3935 
3936 	/* If there are "lots" of initialized elements, even discounting
3937 	   those that are not address constants (and thus *must* be
3938 	   computed at runtime), then partition the constructor into
3939 	   constant and non-constant parts.  Block copy the constant
3940 	   parts in, then generate code for the non-constant parts.  */
3941 	/* TODO.  There's code in cp/typeck.c to do this.  */
3942 
3943 	if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3944 	  /* store_constructor will ignore the clearing of variable-sized
3945 	     objects.  Initializers for such objects must explicitly set
3946 	     every field that needs to be set.  */
3947 	  cleared = false;
3948 	else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
3949 	  /* If the constructor isn't complete, clear the whole object
3950 	     beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
3951 
3952 	     ??? This ought not to be needed.  For any element not present
3953 	     in the initializer, we should simply set them to zero.  Except
3954 	     we'd need to *find* the elements that are not present, and that
3955 	     requires trickery to avoid quadratic compile-time behavior in
3956 	     large cases or excessive memory use in small cases.  */
3957 	  cleared = true;
3958 	else if (num_ctor_elements - num_nonzero_elements
3959 		 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3960 		 && num_nonzero_elements < num_ctor_elements / 4)
3961 	  /* If there are "lots" of zeros, it's more efficient to clear
3962 	     the memory and then set the nonzero elements.  */
3963 	  cleared = true;
3964 	else
3965 	  cleared = false;
3966 
3967 	/* If there are "lots" of initialized elements, and all of them
3968 	   are valid address constants, then the entire initializer can
3969 	   be dropped to memory, and then memcpy'd out.  Don't do this
3970 	   for sparse arrays, though, as it's more efficient to follow
3971 	   the standard CONSTRUCTOR behavior of memset followed by
3972 	   individual element initialization.  Also don't do this for small
3973 	   all-zero initializers (which aren't big enough to merit
3974 	   clearing), and don't try to make bitwise copies of
3975 	   TREE_ADDRESSABLE types.
3976 
3977 	   We cannot apply such transformation when compiling chkp static
3978 	   initializer because creation of initializer image in the memory
3979 	   will require static initialization of bounds for it.  It should
3980 	   result in another gimplification of similar initializer and we
3981 	   may fall into infinite loop.  */
3982 	if (valid_const_initializer
3983 	    && !(cleared || num_nonzero_elements == 0)
3984 	    && !TREE_ADDRESSABLE (type)
3985 	    && (!current_function_decl
3986 		|| !lookup_attribute ("chkp ctor",
3987 				      DECL_ATTRIBUTES (current_function_decl))))
3988 	  {
3989 	    HOST_WIDE_INT size = int_size_in_bytes (type);
3990 	    unsigned int align;
3991 
3992 	    /* ??? We can still get unbounded array types, at least
3993 	       from the C++ front end.  This seems wrong, but attempt
3994 	       to work around it for now.  */
3995 	    if (size < 0)
3996 	      {
3997 		size = int_size_in_bytes (TREE_TYPE (object));
3998 		if (size >= 0)
3999 		  TREE_TYPE (ctor) = type = TREE_TYPE (object);
4000 	      }
4001 
4002 	    /* Find the maximum alignment we can assume for the object.  */
4003 	    /* ??? Make use of DECL_OFFSET_ALIGN.  */
4004 	    if (DECL_P (object))
4005 	      align = DECL_ALIGN (object);
4006 	    else
4007 	      align = TYPE_ALIGN (type);
4008 
4009 	    /* Do a block move either if the size is so small as to make
4010 	       each individual move a sub-unit move on average, or if it
4011 	       is so large as to make individual moves inefficient.  */
4012 	    if (size > 0
4013 		&& num_nonzero_elements > 1
4014 		&& (size < num_nonzero_elements
4015 		    || !can_move_by_pieces (size, align)))
4016 	      {
4017 		if (notify_temp_creation)
4018 		  return GS_ERROR;
4019 
4020 		walk_tree (&ctor, force_labels_r, NULL, NULL);
4021 		ctor = tree_output_constant_def (ctor);
4022 		if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4023 		  ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4024 		TREE_OPERAND (*expr_p, 1) = ctor;
4025 
4026 		/* This is no longer an assignment of a CONSTRUCTOR, but
4027 		   we still may have processing to do on the LHS.  So
4028 		   pretend we didn't do anything here to let that happen.  */
4029 		return GS_UNHANDLED;
4030 	      }
4031 	  }
4032 
4033 	/* If the target is volatile, we have non-zero elements and more than
4034 	   one field to assign, initialize the target from a temporary.  */
4035 	if (TREE_THIS_VOLATILE (object)
4036 	    && !TREE_ADDRESSABLE (type)
4037 	    && num_nonzero_elements > 0
4038 	    && vec_safe_length (elts) > 1)
4039 	  {
4040 	    tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4041 	    TREE_OPERAND (*expr_p, 0) = temp;
4042 	    *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4043 			      *expr_p,
4044 			      build2 (MODIFY_EXPR, void_type_node,
4045 				      object, temp));
4046 	    return GS_OK;
4047 	  }
4048 
4049 	if (notify_temp_creation)
4050 	  return GS_OK;
4051 
4052 	/* If there are nonzero elements and if needed, pre-evaluate to capture
4053 	   elements overlapping with the lhs into temporaries.  We must do this
4054 	   before clearing to fetch the values before they are zeroed-out.  */
4055 	if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4056 	  {
4057 	    preeval_data.lhs_base_decl = get_base_address (object);
4058 	    if (!DECL_P (preeval_data.lhs_base_decl))
4059 	      preeval_data.lhs_base_decl = NULL;
4060 	    preeval_data.lhs_alias_set = get_alias_set (object);
4061 
4062 	    gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4063 					pre_p, post_p, &preeval_data);
4064 	  }
4065 
4066 	bool ctor_has_side_effects_p
4067 	  = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4068 
4069 	if (cleared)
4070 	  {
4071 	    /* Zap the CONSTRUCTOR element list, which simplifies this case.
4072 	       Note that we still have to gimplify, in order to handle the
4073 	       case of variable sized types.  Avoid shared tree structures.  */
4074 	    CONSTRUCTOR_ELTS (ctor) = NULL;
4075 	    TREE_SIDE_EFFECTS (ctor) = 0;
4076 	    object = unshare_expr (object);
4077 	    gimplify_stmt (expr_p, pre_p);
4078 	  }
4079 
4080 	/* If we have not block cleared the object, or if there are nonzero
4081 	   elements in the constructor, or if the constructor has side effects,
4082 	   add assignments to the individual scalar fields of the object.  */
4083 	if (!cleared
4084 	    || num_nonzero_elements > 0
4085 	    || ctor_has_side_effects_p)
4086 	  gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4087 
4088 	*expr_p = NULL_TREE;
4089       }
4090       break;
4091 
4092     case COMPLEX_TYPE:
4093       {
4094 	tree r, i;
4095 
4096 	if (notify_temp_creation)
4097 	  return GS_OK;
4098 
4099 	/* Extract the real and imaginary parts out of the ctor.  */
4100 	gcc_assert (elts->length () == 2);
4101 	r = (*elts)[0].value;
4102 	i = (*elts)[1].value;
4103 	if (r == NULL || i == NULL)
4104 	  {
4105 	    tree zero = build_zero_cst (TREE_TYPE (type));
4106 	    if (r == NULL)
4107 	      r = zero;
4108 	    if (i == NULL)
4109 	      i = zero;
4110 	  }
4111 
4112 	/* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4113 	   represent creation of a complex value.  */
4114 	if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4115 	  {
4116 	    ctor = build_complex (type, r, i);
4117 	    TREE_OPERAND (*expr_p, 1) = ctor;
4118 	  }
4119 	else
4120 	  {
4121 	    ctor = build2 (COMPLEX_EXPR, type, r, i);
4122 	    TREE_OPERAND (*expr_p, 1) = ctor;
4123 	    ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4124 				 pre_p,
4125 				 post_p,
4126 				 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4127 				 fb_rvalue);
4128 	  }
4129       }
4130       break;
4131 
4132     case VECTOR_TYPE:
4133       {
4134 	unsigned HOST_WIDE_INT ix;
4135 	constructor_elt *ce;
4136 
4137 	if (notify_temp_creation)
4138 	  return GS_OK;
4139 
4140 	/* Go ahead and simplify constant constructors to VECTOR_CST.  */
4141 	if (TREE_CONSTANT (ctor))
4142 	  {
4143 	    bool constant_p = true;
4144 	    tree value;
4145 
4146 	    /* Even when ctor is constant, it might contain non-*_CST
4147 	       elements, such as addresses or trapping values like
4148 	       1.0/0.0 - 1.0/0.0.  Such expressions don't belong
4149 	       in VECTOR_CST nodes.  */
4150 	    FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4151 	      if (!CONSTANT_CLASS_P (value))
4152 		{
4153 		  constant_p = false;
4154 		  break;
4155 		}
4156 
4157 	    if (constant_p)
4158 	      {
4159 		TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4160 		break;
4161 	      }
4162 
4163 	    TREE_CONSTANT (ctor) = 0;
4164 	  }
4165 
4166 	/* Vector types use CONSTRUCTOR all the way through gimple
4167 	   compilation as a general initializer.  */
4168 	FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4169 	  {
4170 	    enum gimplify_status tret;
4171 	    tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4172 				  fb_rvalue);
4173 	    if (tret == GS_ERROR)
4174 	      ret = GS_ERROR;
4175 	    else if (TREE_STATIC (ctor)
4176 		     && !initializer_constant_valid_p (ce->value,
4177 						       TREE_TYPE (ce->value)))
4178 	      TREE_STATIC (ctor) = 0;
4179 	  }
4180 	if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4181 	  TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4182       }
4183       break;
4184 
4185     default:
4186       /* So how did we get a CONSTRUCTOR for a scalar type?  */
4187       gcc_unreachable ();
4188     }
4189 
4190   if (ret == GS_ERROR)
4191     return GS_ERROR;
4192   /* If we have gimplified both sides of the initializer but have
4193      not emitted an assignment, do so now.  */
4194   if (*expr_p)
4195     {
4196       tree lhs = TREE_OPERAND (*expr_p, 0);
4197       tree rhs = TREE_OPERAND (*expr_p, 1);
4198       gassign *init = gimple_build_assign (lhs, rhs);
4199       gimplify_seq_add_stmt (pre_p, init);
4200     }
4201   if (want_value)
4202     {
4203       *expr_p = object;
4204       return GS_OK;
4205     }
4206   else
4207     {
4208       *expr_p = NULL;
4209       return GS_ALL_DONE;
4210     }
4211 }
4212 
4213 /* Given a pointer value OP0, return a simplified version of an
4214    indirection through OP0, or NULL_TREE if no simplification is
4215    possible.  This may only be applied to a rhs of an expression.
4216    Note that the resulting type may be different from the type pointed
4217    to in the sense that it is still compatible from the langhooks
4218    point of view. */
4219 
4220 static tree
4221 gimple_fold_indirect_ref_rhs (tree t)
4222 {
4223   return gimple_fold_indirect_ref (t);
4224 }
4225 
4226 /* Subroutine of gimplify_modify_expr to do simplifications of
4227    MODIFY_EXPRs based on the code of the RHS.  We loop for as long as
4228    something changes.  */
4229 
4230 static enum gimplify_status
4231 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4232 			  gimple_seq *pre_p, gimple_seq *post_p,
4233 			  bool want_value)
4234 {
4235   enum gimplify_status ret = GS_UNHANDLED;
4236   bool changed;
4237 
4238   do
4239     {
4240       changed = false;
4241       switch (TREE_CODE (*from_p))
4242 	{
4243 	case VAR_DECL:
4244 	  /* If we're assigning from a read-only variable initialized with
4245 	     a constructor, do the direct assignment from the constructor,
4246 	     but only if neither source nor target are volatile since this
4247 	     latter assignment might end up being done on a per-field basis.  */
4248 	  if (DECL_INITIAL (*from_p)
4249 	      && TREE_READONLY (*from_p)
4250 	      && !TREE_THIS_VOLATILE (*from_p)
4251 	      && !TREE_THIS_VOLATILE (*to_p)
4252 	      && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4253 	    {
4254 	      tree old_from = *from_p;
4255 	      enum gimplify_status subret;
4256 
4257 	      /* Move the constructor into the RHS.  */
4258 	      *from_p = unshare_expr (DECL_INITIAL (*from_p));
4259 
4260 	      /* Let's see if gimplify_init_constructor will need to put
4261 		 it in memory.  */
4262 	      subret = gimplify_init_constructor (expr_p, NULL, NULL,
4263 						  false, true);
4264 	      if (subret == GS_ERROR)
4265 		{
4266 		  /* If so, revert the change.  */
4267 		  *from_p = old_from;
4268 		}
4269 	      else
4270 		{
4271 		  ret = GS_OK;
4272 		  changed = true;
4273 		}
4274 	    }
4275 	  break;
4276 	case INDIRECT_REF:
4277 	  {
4278 	    /* If we have code like
4279 
4280 	     *(const A*)(A*)&x
4281 
4282 	     where the type of "x" is a (possibly cv-qualified variant
4283 	     of "A"), treat the entire expression as identical to "x".
4284 	     This kind of code arises in C++ when an object is bound
4285 	     to a const reference, and if "x" is a TARGET_EXPR we want
4286 	     to take advantage of the optimization below.  */
4287 	    bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4288 	    tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4289 	    if (t)
4290 	      {
4291 		if (TREE_THIS_VOLATILE (t) != volatile_p)
4292 		  {
4293 		    if (DECL_P (t))
4294 		      t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4295 						    build_fold_addr_expr (t));
4296 		    if (REFERENCE_CLASS_P (t))
4297 		      TREE_THIS_VOLATILE (t) = volatile_p;
4298 		  }
4299 		*from_p = t;
4300 		ret = GS_OK;
4301 		changed = true;
4302 	      }
4303 	    break;
4304 	  }
4305 
4306 	case TARGET_EXPR:
4307 	  {
4308 	    /* If we are initializing something from a TARGET_EXPR, strip the
4309 	       TARGET_EXPR and initialize it directly, if possible.  This can't
4310 	       be done if the initializer is void, since that implies that the
4311 	       temporary is set in some non-trivial way.
4312 
4313 	       ??? What about code that pulls out the temp and uses it
4314 	       elsewhere? I think that such code never uses the TARGET_EXPR as
4315 	       an initializer.  If I'm wrong, we'll die because the temp won't
4316 	       have any RTL.  In that case, I guess we'll need to replace
4317 	       references somehow.  */
4318 	    tree init = TARGET_EXPR_INITIAL (*from_p);
4319 
4320 	    if (init
4321 		&& !VOID_TYPE_P (TREE_TYPE (init)))
4322 	      {
4323 		*from_p = init;
4324 		ret = GS_OK;
4325 		changed = true;
4326 	      }
4327 	  }
4328 	  break;
4329 
4330 	case COMPOUND_EXPR:
4331 	  /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4332 	     caught.  */
4333 	  gimplify_compound_expr (from_p, pre_p, true);
4334 	  ret = GS_OK;
4335 	  changed = true;
4336 	  break;
4337 
4338 	case CONSTRUCTOR:
4339 	  /* If we already made some changes, let the front end have a
4340 	     crack at this before we break it down.  */
4341 	  if (ret != GS_UNHANDLED)
4342 	    break;
4343 	  /* If we're initializing from a CONSTRUCTOR, break this into
4344 	     individual MODIFY_EXPRs.  */
4345 	  return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4346 					    false);
4347 
4348 	case COND_EXPR:
4349 	  /* If we're assigning to a non-register type, push the assignment
4350 	     down into the branches.  This is mandatory for ADDRESSABLE types,
4351 	     since we cannot generate temporaries for such, but it saves a
4352 	     copy in other cases as well.  */
4353 	  if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4354 	    {
4355 	      /* This code should mirror the code in gimplify_cond_expr. */
4356 	      enum tree_code code = TREE_CODE (*expr_p);
4357 	      tree cond = *from_p;
4358 	      tree result = *to_p;
4359 
4360 	      ret = gimplify_expr (&result, pre_p, post_p,
4361 				   is_gimple_lvalue, fb_lvalue);
4362 	      if (ret != GS_ERROR)
4363 		ret = GS_OK;
4364 
4365 	      /* If we are going to write RESULT more than once, clear
4366 		 TREE_READONLY flag, otherwise we might incorrectly promote
4367 		 the variable to static const and initialize it at compile
4368 		 time in one of the branches.  */
4369 	      if (VAR_P (result)
4370 		  && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
4371 		  && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4372 		TREE_READONLY (result) = 0;
4373 	      if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4374 		TREE_OPERAND (cond, 1)
4375 		  = build2 (code, void_type_node, result,
4376 			    TREE_OPERAND (cond, 1));
4377 	      if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4378 		TREE_OPERAND (cond, 2)
4379 		  = build2 (code, void_type_node, unshare_expr (result),
4380 			    TREE_OPERAND (cond, 2));
4381 
4382 	      TREE_TYPE (cond) = void_type_node;
4383 	      recalculate_side_effects (cond);
4384 
4385 	      if (want_value)
4386 		{
4387 		  gimplify_and_add (cond, pre_p);
4388 		  *expr_p = unshare_expr (result);
4389 		}
4390 	      else
4391 		*expr_p = cond;
4392 	      return ret;
4393 	    }
4394 	  break;
4395 
4396 	case CALL_EXPR:
4397 	  /* For calls that return in memory, give *to_p as the CALL_EXPR's
4398 	     return slot so that we don't generate a temporary.  */
4399 	  if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4400 	      && aggregate_value_p (*from_p, *from_p))
4401 	    {
4402 	      bool use_target;
4403 
4404 	      if (!(rhs_predicate_for (*to_p))(*from_p))
4405 		/* If we need a temporary, *to_p isn't accurate.  */
4406 		use_target = false;
4407 	      /* It's OK to use the return slot directly unless it's an NRV. */
4408 	      else if (TREE_CODE (*to_p) == RESULT_DECL
4409 		       && DECL_NAME (*to_p) == NULL_TREE
4410 		       && needs_to_live_in_memory (*to_p))
4411 		use_target = true;
4412 	      else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4413 		       || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4414 		/* Don't force regs into memory.  */
4415 		use_target = false;
4416 	      else if (TREE_CODE (*expr_p) == INIT_EXPR)
4417 		/* It's OK to use the target directly if it's being
4418 		   initialized. */
4419 		use_target = true;
4420 	      else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
4421 		       != INTEGER_CST)
4422 		/* Always use the target and thus RSO for variable-sized types.
4423 		   GIMPLE cannot deal with a variable-sized assignment
4424 		   embedded in a call statement.  */
4425 		use_target = true;
4426 	      else if (TREE_CODE (*to_p) != SSA_NAME
4427 		      && (!is_gimple_variable (*to_p)
4428 			  || needs_to_live_in_memory (*to_p)))
4429 		/* Don't use the original target if it's already addressable;
4430 		   if its address escapes, and the called function uses the
4431 		   NRV optimization, a conforming program could see *to_p
4432 		   change before the called function returns; see c++/19317.
4433 		   When optimizing, the return_slot pass marks more functions
4434 		   as safe after we have escape info.  */
4435 		use_target = false;
4436 	      else
4437 		use_target = true;
4438 
4439 	      if (use_target)
4440 		{
4441 		  CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4442 		  mark_addressable (*to_p);
4443 		}
4444 	    }
4445 	  break;
4446 
4447 	case WITH_SIZE_EXPR:
4448 	  /* Likewise for calls that return an aggregate of non-constant size,
4449 	     since we would not be able to generate a temporary at all.  */
4450 	  if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4451 	    {
4452 	      *from_p = TREE_OPERAND (*from_p, 0);
4453 	      /* We don't change ret in this case because the
4454 		 WITH_SIZE_EXPR might have been added in
4455 		 gimplify_modify_expr, so returning GS_OK would lead to an
4456 		 infinite loop.  */
4457 	      changed = true;
4458 	    }
4459 	  break;
4460 
4461 	  /* If we're initializing from a container, push the initialization
4462 	     inside it.  */
4463 	case CLEANUP_POINT_EXPR:
4464 	case BIND_EXPR:
4465 	case STATEMENT_LIST:
4466 	  {
4467 	    tree wrap = *from_p;
4468 	    tree t;
4469 
4470 	    ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4471 				 fb_lvalue);
4472 	    if (ret != GS_ERROR)
4473 	      ret = GS_OK;
4474 
4475 	    t = voidify_wrapper_expr (wrap, *expr_p);
4476 	    gcc_assert (t == *expr_p);
4477 
4478 	    if (want_value)
4479 	      {
4480 		gimplify_and_add (wrap, pre_p);
4481 		*expr_p = unshare_expr (*to_p);
4482 	      }
4483 	    else
4484 	      *expr_p = wrap;
4485 	    return GS_OK;
4486 	  }
4487 
4488 	case COMPOUND_LITERAL_EXPR:
4489 	  {
4490 	    tree complit = TREE_OPERAND (*expr_p, 1);
4491 	    tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4492 	    tree decl = DECL_EXPR_DECL (decl_s);
4493 	    tree init = DECL_INITIAL (decl);
4494 
4495 	    /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4496 	       into struct T x = { 0, 1, 2 } if the address of the
4497 	       compound literal has never been taken.  */
4498 	    if (!TREE_ADDRESSABLE (complit)
4499 		&& !TREE_ADDRESSABLE (decl)
4500 		&& init)
4501 	      {
4502 		*expr_p = copy_node (*expr_p);
4503 		TREE_OPERAND (*expr_p, 1) = init;
4504 		return GS_OK;
4505 	      }
4506 	  }
4507 
4508 	default:
4509 	  break;
4510 	}
4511     }
4512   while (changed);
4513 
4514   return ret;
4515 }
4516 
4517 
4518 /* Return true if T looks like a valid GIMPLE statement.  */
4519 
4520 static bool
4521 is_gimple_stmt (tree t)
4522 {
4523   const enum tree_code code = TREE_CODE (t);
4524 
4525   switch (code)
4526     {
4527     case NOP_EXPR:
4528       /* The only valid NOP_EXPR is the empty statement.  */
4529       return IS_EMPTY_STMT (t);
4530 
4531     case BIND_EXPR:
4532     case COND_EXPR:
4533       /* These are only valid if they're void.  */
4534       return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4535 
4536     case SWITCH_EXPR:
4537     case GOTO_EXPR:
4538     case RETURN_EXPR:
4539     case LABEL_EXPR:
4540     case CASE_LABEL_EXPR:
4541     case TRY_CATCH_EXPR:
4542     case TRY_FINALLY_EXPR:
4543     case EH_FILTER_EXPR:
4544     case CATCH_EXPR:
4545     case ASM_EXPR:
4546     case STATEMENT_LIST:
4547     case OACC_PARALLEL:
4548     case OACC_KERNELS:
4549     case OACC_DATA:
4550     case OACC_HOST_DATA:
4551     case OACC_DECLARE:
4552     case OACC_UPDATE:
4553     case OACC_ENTER_DATA:
4554     case OACC_EXIT_DATA:
4555     case OACC_CACHE:
4556     case OMP_PARALLEL:
4557     case OMP_FOR:
4558     case OMP_SIMD:
4559     case CILK_SIMD:
4560     case OMP_DISTRIBUTE:
4561     case OACC_LOOP:
4562     case OMP_SECTIONS:
4563     case OMP_SECTION:
4564     case OMP_SINGLE:
4565     case OMP_MASTER:
4566     case OMP_TASKGROUP:
4567     case OMP_ORDERED:
4568     case OMP_CRITICAL:
4569     case OMP_TASK:
4570     case OMP_TARGET:
4571     case OMP_TARGET_DATA:
4572     case OMP_TARGET_UPDATE:
4573     case OMP_TARGET_ENTER_DATA:
4574     case OMP_TARGET_EXIT_DATA:
4575     case OMP_TASKLOOP:
4576     case OMP_TEAMS:
4577       /* These are always void.  */
4578       return true;
4579 
4580     case CALL_EXPR:
4581     case MODIFY_EXPR:
4582     case PREDICT_EXPR:
4583       /* These are valid regardless of their type.  */
4584       return true;
4585 
4586     default:
4587       return false;
4588     }
4589 }
4590 
4591 
4592 /* Promote partial stores to COMPLEX variables to total stores.  *EXPR_P is
4593    a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4594    DECL_GIMPLE_REG_P set.
4595 
4596    IMPORTANT NOTE: This promotion is performed by introducing a load of the
4597    other, unmodified part of the complex object just before the total store.
4598    As a consequence, if the object is still uninitialized, an undefined value
4599    will be loaded into a register, which may result in a spurious exception
4600    if the register is floating-point and the value happens to be a signaling
4601    NaN for example.  Then the fully-fledged complex operations lowering pass
4602    followed by a DCE pass are necessary in order to fix things up.  */
4603 
4604 static enum gimplify_status
4605 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4606                                    bool want_value)
4607 {
4608   enum tree_code code, ocode;
4609   tree lhs, rhs, new_rhs, other, realpart, imagpart;
4610 
4611   lhs = TREE_OPERAND (*expr_p, 0);
4612   rhs = TREE_OPERAND (*expr_p, 1);
4613   code = TREE_CODE (lhs);
4614   lhs = TREE_OPERAND (lhs, 0);
4615 
4616   ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4617   other = build1 (ocode, TREE_TYPE (rhs), lhs);
4618   TREE_NO_WARNING (other) = 1;
4619   other = get_formal_tmp_var (other, pre_p);
4620 
4621   realpart = code == REALPART_EXPR ? rhs : other;
4622   imagpart = code == REALPART_EXPR ? other : rhs;
4623 
4624   if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4625     new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4626   else
4627     new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4628 
4629   gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4630   *expr_p = (want_value) ? rhs : NULL_TREE;
4631 
4632   return GS_ALL_DONE;
4633 }
4634 
4635 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4636 
4637       modify_expr
4638 	      : varname '=' rhs
4639 	      | '*' ID '=' rhs
4640 
4641     PRE_P points to the list where side effects that must happen before
4642 	*EXPR_P should be stored.
4643 
4644     POST_P points to the list where side effects that must happen after
4645 	*EXPR_P should be stored.
4646 
4647     WANT_VALUE is nonzero iff we want to use the value of this expression
4648 	in another expression.  */
4649 
4650 static enum gimplify_status
4651 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4652 		      bool want_value)
4653 {
4654   tree *from_p = &TREE_OPERAND (*expr_p, 1);
4655   tree *to_p = &TREE_OPERAND (*expr_p, 0);
4656   enum gimplify_status ret = GS_UNHANDLED;
4657   gimple *assign;
4658   location_t loc = EXPR_LOCATION (*expr_p);
4659   gimple_stmt_iterator gsi;
4660 
4661   gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4662 	      || TREE_CODE (*expr_p) == INIT_EXPR);
4663 
4664   /* Trying to simplify a clobber using normal logic doesn't work,
4665      so handle it here.  */
4666   if (TREE_CLOBBER_P (*from_p))
4667     {
4668       ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4669       if (ret == GS_ERROR)
4670 	return ret;
4671       gcc_assert (!want_value
4672 		  && (TREE_CODE (*to_p) == VAR_DECL
4673 		      || TREE_CODE (*to_p) == MEM_REF));
4674       gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4675       *expr_p = NULL;
4676       return GS_ALL_DONE;
4677     }
4678 
4679   /* Insert pointer conversions required by the middle-end that are not
4680      required by the frontend.  This fixes middle-end type checking for
4681      for example gcc.dg/redecl-6.c.  */
4682   if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4683     {
4684       STRIP_USELESS_TYPE_CONVERSION (*from_p);
4685       if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4686 	*from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4687     }
4688 
4689   /* See if any simplifications can be done based on what the RHS is.  */
4690   ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4691 				  want_value);
4692   if (ret != GS_UNHANDLED)
4693     return ret;
4694 
4695   /* For zero sized types only gimplify the left hand side and right hand
4696      side as statements and throw away the assignment.  Do this after
4697      gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4698      types properly.  */
4699   if (zero_sized_type (TREE_TYPE (*from_p))
4700       && !want_value
4701       /* Don't do this for calls that return addressable types, expand_call
4702 	 relies on those having a lhs.  */
4703       && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
4704 	   && TREE_CODE (*from_p) == CALL_EXPR))
4705     {
4706       gimplify_stmt (from_p, pre_p);
4707       gimplify_stmt (to_p, pre_p);
4708       *expr_p = NULL_TREE;
4709       return GS_ALL_DONE;
4710     }
4711 
4712   /* If the value being copied is of variable width, compute the length
4713      of the copy into a WITH_SIZE_EXPR.   Note that we need to do this
4714      before gimplifying any of the operands so that we can resolve any
4715      PLACEHOLDER_EXPRs in the size.  Also note that the RTL expander uses
4716      the size of the expression to be copied, not of the destination, so
4717      that is what we must do here.  */
4718   maybe_with_size_expr (from_p);
4719 
4720   ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4721   if (ret == GS_ERROR)
4722     return ret;
4723 
4724   /* As a special case, we have to temporarily allow for assignments
4725      with a CALL_EXPR on the RHS.  Since in GIMPLE a function call is
4726      a toplevel statement, when gimplifying the GENERIC expression
4727      MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4728      GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4729 
4730      Instead, we need to create the tuple GIMPLE_CALL <a, foo>.  To
4731      prevent gimplify_expr from trying to create a new temporary for
4732      foo's LHS, we tell it that it should only gimplify until it
4733      reaches the CALL_EXPR.  On return from gimplify_expr, the newly
4734      created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4735      and all we need to do here is set 'a' to be its LHS.  */
4736   ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4737 		       fb_rvalue);
4738   if (ret == GS_ERROR)
4739     return ret;
4740 
4741   /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
4742      size as argument to the call.  */
4743   if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4744     {
4745       tree call = TREE_OPERAND (*from_p, 0);
4746       tree vlasize = TREE_OPERAND (*from_p, 1);
4747 
4748       if (TREE_CODE (call) == CALL_EXPR
4749 	  && CALL_EXPR_IFN (call) == IFN_VA_ARG)
4750 	{
4751 	  int nargs = call_expr_nargs (call);
4752 	  tree type = TREE_TYPE (call);
4753 	  tree ap = CALL_EXPR_ARG (call, 0);
4754 	  tree tag = CALL_EXPR_ARG (call, 1);
4755 	  tree aptag = CALL_EXPR_ARG (call, 2);
4756 	  tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
4757 						       IFN_VA_ARG, type,
4758 						       nargs + 1, ap, tag,
4759 						       aptag, vlasize);
4760 	  TREE_OPERAND (*from_p, 0) = newcall;
4761 	}
4762     }
4763 
4764   /* Now see if the above changed *from_p to something we handle specially.  */
4765   ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4766 				  want_value);
4767   if (ret != GS_UNHANDLED)
4768     return ret;
4769 
4770   /* If we've got a variable sized assignment between two lvalues (i.e. does
4771      not involve a call), then we can make things a bit more straightforward
4772      by converting the assignment to memcpy or memset.  */
4773   if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4774     {
4775       tree from = TREE_OPERAND (*from_p, 0);
4776       tree size = TREE_OPERAND (*from_p, 1);
4777 
4778       if (TREE_CODE (from) == CONSTRUCTOR)
4779 	return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4780 
4781       if (is_gimple_addressable (from))
4782 	{
4783 	  *from_p = from;
4784 	  return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4785 	      					 pre_p);
4786 	}
4787     }
4788 
4789   /* Transform partial stores to non-addressable complex variables into
4790      total stores.  This allows us to use real instead of virtual operands
4791      for these variables, which improves optimization.  */
4792   if ((TREE_CODE (*to_p) == REALPART_EXPR
4793        || TREE_CODE (*to_p) == IMAGPART_EXPR)
4794       && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4795     return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4796 
4797   /* Try to alleviate the effects of the gimplification creating artificial
4798      temporaries (see for example is_gimple_reg_rhs) on the debug info, but
4799      make sure not to create DECL_DEBUG_EXPR links across functions.  */
4800   if (!gimplify_ctxp->into_ssa
4801       && TREE_CODE (*from_p) == VAR_DECL
4802       && DECL_IGNORED_P (*from_p)
4803       && DECL_P (*to_p)
4804       && !DECL_IGNORED_P (*to_p)
4805       && decl_function_context (*to_p) == current_function_decl)
4806     {
4807       if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4808 	DECL_NAME (*from_p)
4809 	  = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4810       DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
4811       SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4812    }
4813 
4814   if (want_value && TREE_THIS_VOLATILE (*to_p))
4815     *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4816 
4817   if (TREE_CODE (*from_p) == CALL_EXPR)
4818     {
4819       /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4820 	 instead of a GIMPLE_ASSIGN.  */
4821       gcall *call_stmt;
4822       if (CALL_EXPR_FN (*from_p) == NULL_TREE)
4823 	{
4824 	  /* Gimplify internal functions created in the FEs.  */
4825 	  int nargs = call_expr_nargs (*from_p), i;
4826 	  enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
4827 	  auto_vec<tree> vargs (nargs);
4828 
4829 	  for (i = 0; i < nargs; i++)
4830 	    {
4831 	      gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
4832 			    EXPR_LOCATION (*from_p));
4833 	      vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
4834 	    }
4835 	  call_stmt = gimple_build_call_internal_vec (ifn, vargs);
4836 	  gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
4837 	}
4838       else
4839 	{
4840 	  tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4841 	  CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4842 	  STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4843 	  tree fndecl = get_callee_fndecl (*from_p);
4844 	  if (fndecl
4845 	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4846 	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
4847 	      && call_expr_nargs (*from_p) == 3)
4848 	    call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
4849 						    CALL_EXPR_ARG (*from_p, 0),
4850 						    CALL_EXPR_ARG (*from_p, 1),
4851 						    CALL_EXPR_ARG (*from_p, 2));
4852 	  else
4853 	    {
4854 	      call_stmt = gimple_build_call_from_tree (*from_p);
4855 	      gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
4856 	    }
4857 	}
4858       notice_special_calls (call_stmt);
4859       if (!gimple_call_noreturn_p (call_stmt)
4860 	  || TREE_ADDRESSABLE (TREE_TYPE (*to_p))
4861 	  || TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p))) != INTEGER_CST)
4862 	gimple_call_set_lhs (call_stmt, *to_p);
4863       assign = call_stmt;
4864     }
4865   else
4866     {
4867       assign = gimple_build_assign (*to_p, *from_p);
4868       gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4869       if (COMPARISON_CLASS_P (*from_p))
4870 	gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
4871     }
4872 
4873   if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4874     {
4875       /* We should have got an SSA name from the start.  */
4876       gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
4877     }
4878 
4879   gimplify_seq_add_stmt (pre_p, assign);
4880   gsi = gsi_last (*pre_p);
4881   maybe_fold_stmt (&gsi);
4882 
4883   if (want_value)
4884     {
4885       *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4886       return GS_OK;
4887     }
4888   else
4889     *expr_p = NULL;
4890 
4891   return GS_ALL_DONE;
4892 }
4893 
4894 /* Gimplify a comparison between two variable-sized objects.  Do this
4895    with a call to BUILT_IN_MEMCMP.  */
4896 
4897 static enum gimplify_status
4898 gimplify_variable_sized_compare (tree *expr_p)
4899 {
4900   location_t loc = EXPR_LOCATION (*expr_p);
4901   tree op0 = TREE_OPERAND (*expr_p, 0);
4902   tree op1 = TREE_OPERAND (*expr_p, 1);
4903   tree t, arg, dest, src, expr;
4904 
4905   arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4906   arg = unshare_expr (arg);
4907   arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4908   src = build_fold_addr_expr_loc (loc, op1);
4909   dest = build_fold_addr_expr_loc (loc, op0);
4910   t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4911   t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4912 
4913   expr
4914     = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4915   SET_EXPR_LOCATION (expr, loc);
4916   *expr_p = expr;
4917 
4918   return GS_OK;
4919 }
4920 
4921 /* Gimplify a comparison between two aggregate objects of integral scalar
4922    mode as a comparison between the bitwise equivalent scalar values.  */
4923 
4924 static enum gimplify_status
4925 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4926 {
4927   location_t loc = EXPR_LOCATION (*expr_p);
4928   tree op0 = TREE_OPERAND (*expr_p, 0);
4929   tree op1 = TREE_OPERAND (*expr_p, 1);
4930 
4931   tree type = TREE_TYPE (op0);
4932   tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4933 
4934   op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4935   op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4936 
4937   *expr_p
4938     = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4939 
4940   return GS_OK;
4941 }
4942 
4943 /* Gimplify an expression sequence.  This function gimplifies each
4944    expression and rewrites the original expression with the last
4945    expression of the sequence in GIMPLE form.
4946 
4947    PRE_P points to the list where the side effects for all the
4948        expressions in the sequence will be emitted.
4949 
4950    WANT_VALUE is true when the result of the last COMPOUND_EXPR is used.  */
4951 
4952 static enum gimplify_status
4953 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4954 {
4955   tree t = *expr_p;
4956 
4957   do
4958     {
4959       tree *sub_p = &TREE_OPERAND (t, 0);
4960 
4961       if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4962 	gimplify_compound_expr (sub_p, pre_p, false);
4963       else
4964 	gimplify_stmt (sub_p, pre_p);
4965 
4966       t = TREE_OPERAND (t, 1);
4967     }
4968   while (TREE_CODE (t) == COMPOUND_EXPR);
4969 
4970   *expr_p = t;
4971   if (want_value)
4972     return GS_OK;
4973   else
4974     {
4975       gimplify_stmt (expr_p, pre_p);
4976       return GS_ALL_DONE;
4977     }
4978 }
4979 
4980 /* Gimplify a SAVE_EXPR node.  EXPR_P points to the expression to
4981    gimplify.  After gimplification, EXPR_P will point to a new temporary
4982    that holds the original value of the SAVE_EXPR node.
4983 
4984    PRE_P points to the list where side effects that must happen before
4985    *EXPR_P should be stored.  */
4986 
4987 static enum gimplify_status
4988 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4989 {
4990   enum gimplify_status ret = GS_ALL_DONE;
4991   tree val;
4992 
4993   gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4994   val = TREE_OPERAND (*expr_p, 0);
4995 
4996   /* If the SAVE_EXPR has not been resolved, then evaluate it once.  */
4997   if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4998     {
4999       /* The operand may be a void-valued expression such as SAVE_EXPRs
5000 	 generated by the Java frontend for class initialization.  It is
5001 	 being executed only for its side-effects.  */
5002       if (TREE_TYPE (val) == void_type_node)
5003 	{
5004 	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5005 			       is_gimple_stmt, fb_none);
5006 	  val = NULL;
5007 	}
5008       else
5009 	val = get_initialized_tmp_var (val, pre_p, post_p);
5010 
5011       TREE_OPERAND (*expr_p, 0) = val;
5012       SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5013     }
5014 
5015   *expr_p = val;
5016 
5017   return ret;
5018 }
5019 
5020 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5021 
5022       unary_expr
5023 	      : ...
5024 	      | '&' varname
5025 	      ...
5026 
5027     PRE_P points to the list where side effects that must happen before
5028 	*EXPR_P should be stored.
5029 
5030     POST_P points to the list where side effects that must happen after
5031 	*EXPR_P should be stored.  */
5032 
5033 static enum gimplify_status
5034 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5035 {
5036   tree expr = *expr_p;
5037   tree op0 = TREE_OPERAND (expr, 0);
5038   enum gimplify_status ret;
5039   location_t loc = EXPR_LOCATION (*expr_p);
5040 
5041   switch (TREE_CODE (op0))
5042     {
5043     case INDIRECT_REF:
5044     do_indirect_ref:
5045       /* Check if we are dealing with an expression of the form '&*ptr'.
5046 	 While the front end folds away '&*ptr' into 'ptr', these
5047 	 expressions may be generated internally by the compiler (e.g.,
5048 	 builtins like __builtin_va_end).  */
5049       /* Caution: the silent array decomposition semantics we allow for
5050 	 ADDR_EXPR means we can't always discard the pair.  */
5051       /* Gimplification of the ADDR_EXPR operand may drop
5052 	 cv-qualification conversions, so make sure we add them if
5053 	 needed.  */
5054       {
5055 	tree op00 = TREE_OPERAND (op0, 0);
5056 	tree t_expr = TREE_TYPE (expr);
5057 	tree t_op00 = TREE_TYPE (op00);
5058 
5059         if (!useless_type_conversion_p (t_expr, t_op00))
5060 	  op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5061         *expr_p = op00;
5062         ret = GS_OK;
5063       }
5064       break;
5065 
5066     case VIEW_CONVERT_EXPR:
5067       /* Take the address of our operand and then convert it to the type of
5068 	 this ADDR_EXPR.
5069 
5070 	 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5071 	 all clear.  The impact of this transformation is even less clear.  */
5072 
5073       /* If the operand is a useless conversion, look through it.  Doing so
5074 	 guarantees that the ADDR_EXPR and its operand will remain of the
5075 	 same type.  */
5076       if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5077 	op0 = TREE_OPERAND (op0, 0);
5078 
5079       *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5080 				  build_fold_addr_expr_loc (loc,
5081 							TREE_OPERAND (op0, 0)));
5082       ret = GS_OK;
5083       break;
5084 
5085     case MEM_REF:
5086       if (integer_zerop (TREE_OPERAND (op0, 1)))
5087 	goto do_indirect_ref;
5088 
5089       /* ... fall through ... */
5090 
5091     default:
5092       /* If we see a call to a declared builtin or see its address
5093 	 being taken (we can unify those cases here) then we can mark
5094 	 the builtin for implicit generation by GCC.  */
5095       if (TREE_CODE (op0) == FUNCTION_DECL
5096 	  && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5097 	  && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5098 	set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5099 
5100       /* We use fb_either here because the C frontend sometimes takes
5101 	 the address of a call that returns a struct; see
5102 	 gcc.dg/c99-array-lval-1.c.  The gimplifier will correctly make
5103 	 the implied temporary explicit.  */
5104 
5105       /* Make the operand addressable.  */
5106       ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5107 			   is_gimple_addressable, fb_either);
5108       if (ret == GS_ERROR)
5109 	break;
5110 
5111       /* Then mark it.  Beware that it may not be possible to do so directly
5112 	 if a temporary has been created by the gimplification.  */
5113       prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5114 
5115       op0 = TREE_OPERAND (expr, 0);
5116 
5117       /* For various reasons, the gimplification of the expression
5118 	 may have made a new INDIRECT_REF.  */
5119       if (TREE_CODE (op0) == INDIRECT_REF)
5120 	goto do_indirect_ref;
5121 
5122       mark_addressable (TREE_OPERAND (expr, 0));
5123 
5124       /* The FEs may end up building ADDR_EXPRs early on a decl with
5125 	 an incomplete type.  Re-build ADDR_EXPRs in canonical form
5126 	 here.  */
5127       if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5128 	*expr_p = build_fold_addr_expr (op0);
5129 
5130       /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly.  */
5131       recompute_tree_invariant_for_addr_expr (*expr_p);
5132 
5133       /* If we re-built the ADDR_EXPR add a conversion to the original type
5134          if required.  */
5135       if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5136 	*expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5137 
5138       break;
5139     }
5140 
5141   return ret;
5142 }
5143 
5144 /* Gimplify the operands of an ASM_EXPR.  Input operands should be a gimple
5145    value; output operands should be a gimple lvalue.  */
5146 
5147 static enum gimplify_status
5148 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5149 {
5150   tree expr;
5151   int noutputs;
5152   const char **oconstraints;
5153   int i;
5154   tree link;
5155   const char *constraint;
5156   bool allows_mem, allows_reg, is_inout;
5157   enum gimplify_status ret, tret;
5158   gasm *stmt;
5159   vec<tree, va_gc> *inputs;
5160   vec<tree, va_gc> *outputs;
5161   vec<tree, va_gc> *clobbers;
5162   vec<tree, va_gc> *labels;
5163   tree link_next;
5164 
5165   expr = *expr_p;
5166   noutputs = list_length (ASM_OUTPUTS (expr));
5167   oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5168 
5169   inputs = NULL;
5170   outputs = NULL;
5171   clobbers = NULL;
5172   labels = NULL;
5173 
5174   ret = GS_ALL_DONE;
5175   link_next = NULL_TREE;
5176   for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5177     {
5178       bool ok;
5179       size_t constraint_len;
5180 
5181       link_next = TREE_CHAIN (link);
5182 
5183       oconstraints[i]
5184 	= constraint
5185 	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5186       constraint_len = strlen (constraint);
5187       if (constraint_len == 0)
5188         continue;
5189 
5190       ok = parse_output_constraint (&constraint, i, 0, 0,
5191 				    &allows_mem, &allows_reg, &is_inout);
5192       if (!ok)
5193 	{
5194 	  ret = GS_ERROR;
5195 	  is_inout = false;
5196 	}
5197 
5198       if (!allows_reg && allows_mem)
5199 	mark_addressable (TREE_VALUE (link));
5200 
5201       tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5202 			    is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5203 			    fb_lvalue | fb_mayfail);
5204       if (tret == GS_ERROR)
5205 	{
5206 	  error ("invalid lvalue in asm output %d", i);
5207 	  ret = tret;
5208 	}
5209 
5210       /* If the constraint does not allow memory make sure we gimplify
5211          it to a register if it is not already but its base is.  This
5212 	 happens for complex and vector components.  */
5213       if (!allows_mem)
5214 	{
5215 	  tree op = TREE_VALUE (link);
5216 	  if (! is_gimple_val (op)
5217 	      && is_gimple_reg_type (TREE_TYPE (op))
5218 	      && is_gimple_reg (get_base_address (op)))
5219 	    {
5220 	      tree tem = create_tmp_reg (TREE_TYPE (op));
5221 	      tree ass;
5222 	      if (is_inout)
5223 		{
5224 		  ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
5225 				tem, unshare_expr (op));
5226 		  gimplify_and_add (ass, pre_p);
5227 		}
5228 	      ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
5229 	      gimplify_and_add (ass, post_p);
5230 
5231 	      TREE_VALUE (link) = tem;
5232 	      tret = GS_OK;
5233 	    }
5234 	}
5235 
5236       vec_safe_push (outputs, link);
5237       TREE_CHAIN (link) = NULL_TREE;
5238 
5239       if (is_inout)
5240 	{
5241 	  /* An input/output operand.  To give the optimizers more
5242 	     flexibility, split it into separate input and output
5243  	     operands.  */
5244 	  tree input;
5245 	  char buf[10];
5246 
5247 	  /* Turn the in/out constraint into an output constraint.  */
5248 	  char *p = xstrdup (constraint);
5249 	  p[0] = '=';
5250 	  TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
5251 
5252 	  /* And add a matching input constraint.  */
5253 	  if (allows_reg)
5254 	    {
5255 	      sprintf (buf, "%d", i);
5256 
5257 	      /* If there are multiple alternatives in the constraint,
5258 		 handle each of them individually.  Those that allow register
5259 		 will be replaced with operand number, the others will stay
5260 		 unchanged.  */
5261 	      if (strchr (p, ',') != NULL)
5262 		{
5263 		  size_t len = 0, buflen = strlen (buf);
5264 		  char *beg, *end, *str, *dst;
5265 
5266 		  for (beg = p + 1;;)
5267 		    {
5268 		      end = strchr (beg, ',');
5269 		      if (end == NULL)
5270 			end = strchr (beg, '\0');
5271 		      if ((size_t) (end - beg) < buflen)
5272 			len += buflen + 1;
5273 		      else
5274 			len += end - beg + 1;
5275 		      if (*end)
5276 			beg = end + 1;
5277 		      else
5278 			break;
5279 		    }
5280 
5281 		  str = (char *) alloca (len);
5282 		  for (beg = p + 1, dst = str;;)
5283 		    {
5284 		      const char *tem;
5285 		      bool mem_p, reg_p, inout_p;
5286 
5287 		      end = strchr (beg, ',');
5288 		      if (end)
5289 			*end = '\0';
5290 		      beg[-1] = '=';
5291 		      tem = beg - 1;
5292 		      parse_output_constraint (&tem, i, 0, 0,
5293 					       &mem_p, &reg_p, &inout_p);
5294 		      if (dst != str)
5295 			*dst++ = ',';
5296 		      if (reg_p)
5297 			{
5298 			  memcpy (dst, buf, buflen);
5299 			  dst += buflen;
5300 			}
5301 		      else
5302 			{
5303 			  if (end)
5304 			    len = end - beg;
5305 			  else
5306 			    len = strlen (beg);
5307 			  memcpy (dst, beg, len);
5308 			  dst += len;
5309 			}
5310 		      if (end)
5311 			beg = end + 1;
5312 		      else
5313 			break;
5314 		    }
5315 		  *dst = '\0';
5316 		  input = build_string (dst - str, str);
5317 		}
5318 	      else
5319 		input = build_string (strlen (buf), buf);
5320 	    }
5321 	  else
5322 	    input = build_string (constraint_len - 1, constraint + 1);
5323 
5324 	  free (p);
5325 
5326 	  input = build_tree_list (build_tree_list (NULL_TREE, input),
5327 				   unshare_expr (TREE_VALUE (link)));
5328 	  ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5329 	}
5330     }
5331 
5332   link_next = NULL_TREE;
5333   for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5334     {
5335       link_next = TREE_CHAIN (link);
5336       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5337       parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5338 			      oconstraints, &allows_mem, &allows_reg);
5339 
5340       /* If we can't make copies, we can only accept memory.  */
5341       if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5342 	{
5343 	  if (allows_mem)
5344 	    allows_reg = 0;
5345 	  else
5346 	    {
5347 	      error ("impossible constraint in %<asm%>");
5348 	      error ("non-memory input %d must stay in memory", i);
5349 	      return GS_ERROR;
5350 	    }
5351 	}
5352 
5353       /* If the operand is a memory input, it should be an lvalue.  */
5354       if (!allows_reg && allows_mem)
5355 	{
5356 	  tree inputv = TREE_VALUE (link);
5357 	  STRIP_NOPS (inputv);
5358 	  if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5359 	      || TREE_CODE (inputv) == PREINCREMENT_EXPR
5360 	      || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5361 	      || TREE_CODE (inputv) == POSTINCREMENT_EXPR
5362 	      || TREE_CODE (inputv) == MODIFY_EXPR)
5363 	    TREE_VALUE (link) = error_mark_node;
5364 	  tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5365 				is_gimple_lvalue, fb_lvalue | fb_mayfail);
5366 	  if (tret != GS_ERROR)
5367 	    {
5368 	      /* Unlike output operands, memory inputs are not guaranteed
5369 		 to be lvalues by the FE, and while the expressions are
5370 		 marked addressable there, if it is e.g. a statement
5371 		 expression, temporaries in it might not end up being
5372 		 addressable.  They might be already used in the IL and thus
5373 		 it is too late to make them addressable now though.  */
5374 	      tree x = TREE_VALUE (link);
5375 	      while (handled_component_p (x))
5376 		x = TREE_OPERAND (x, 0);
5377 	      if (TREE_CODE (x) == MEM_REF
5378 		  && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
5379 		x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
5380 	      if ((TREE_CODE (x) == VAR_DECL
5381 		   || TREE_CODE (x) == PARM_DECL
5382 		   || TREE_CODE (x) == RESULT_DECL)
5383 		  && !TREE_ADDRESSABLE (x)
5384 		  && is_gimple_reg (x))
5385 		{
5386 		  warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
5387 					       input_location), 0,
5388 			      "memory input %d is not directly addressable",
5389 			      i);
5390 		  prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
5391 		}
5392 	    }
5393 	  mark_addressable (TREE_VALUE (link));
5394 	  if (tret == GS_ERROR)
5395 	    {
5396 	      error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
5397 			"memory input %d is not directly addressable", i);
5398 	      ret = tret;
5399 	    }
5400 	}
5401       else
5402 	{
5403 	  tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5404 				is_gimple_asm_val, fb_rvalue);
5405 	  if (tret == GS_ERROR)
5406 	    ret = tret;
5407 	}
5408 
5409       TREE_CHAIN (link) = NULL_TREE;
5410       vec_safe_push (inputs, link);
5411     }
5412 
5413   link_next = NULL_TREE;
5414   for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5415     {
5416       link_next = TREE_CHAIN (link);
5417       TREE_CHAIN (link) = NULL_TREE;
5418       vec_safe_push (clobbers, link);
5419     }
5420 
5421   link_next = NULL_TREE;
5422   for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5423     {
5424       link_next = TREE_CHAIN (link);
5425       TREE_CHAIN (link) = NULL_TREE;
5426       vec_safe_push (labels, link);
5427     }
5428 
5429   /* Do not add ASMs with errors to the gimple IL stream.  */
5430   if (ret != GS_ERROR)
5431     {
5432       stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5433 				   inputs, outputs, clobbers, labels);
5434 
5435       gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
5436       gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5437 
5438       gimplify_seq_add_stmt (pre_p, stmt);
5439     }
5440 
5441   return ret;
5442 }
5443 
5444 /* Gimplify a CLEANUP_POINT_EXPR.  Currently this works by adding
5445    GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5446    gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5447    return to this function.
5448 
5449    FIXME should we complexify the prequeue handling instead?  Or use flags
5450    for all the cleanups and let the optimizer tighten them up?  The current
5451    code seems pretty fragile; it will break on a cleanup within any
5452    non-conditional nesting.  But any such nesting would be broken, anyway;
5453    we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5454    and continues out of it.  We can do that at the RTL level, though, so
5455    having an optimizer to tighten up try/finally regions would be a Good
5456    Thing.  */
5457 
5458 static enum gimplify_status
5459 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5460 {
5461   gimple_stmt_iterator iter;
5462   gimple_seq body_sequence = NULL;
5463 
5464   tree temp = voidify_wrapper_expr (*expr_p, NULL);
5465 
5466   /* We only care about the number of conditions between the innermost
5467      CLEANUP_POINT_EXPR and the cleanup.  So save and reset the count and
5468      any cleanups collected outside the CLEANUP_POINT_EXPR.  */
5469   int old_conds = gimplify_ctxp->conditions;
5470   gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5471   bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5472   gimplify_ctxp->conditions = 0;
5473   gimplify_ctxp->conditional_cleanups = NULL;
5474   gimplify_ctxp->in_cleanup_point_expr = true;
5475 
5476   gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5477 
5478   gimplify_ctxp->conditions = old_conds;
5479   gimplify_ctxp->conditional_cleanups = old_cleanups;
5480   gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5481 
5482   for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5483     {
5484       gimple *wce = gsi_stmt (iter);
5485 
5486       if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5487 	{
5488 	  if (gsi_one_before_end_p (iter))
5489 	    {
5490               /* Note that gsi_insert_seq_before and gsi_remove do not
5491                  scan operands, unlike some other sequence mutators.  */
5492 	      if (!gimple_wce_cleanup_eh_only (wce))
5493 		gsi_insert_seq_before_without_update (&iter,
5494 						      gimple_wce_cleanup (wce),
5495 						      GSI_SAME_STMT);
5496 	      gsi_remove (&iter, true);
5497 	      break;
5498 	    }
5499 	  else
5500 	    {
5501 	      gtry *gtry;
5502 	      gimple_seq seq;
5503 	      enum gimple_try_flags kind;
5504 
5505 	      if (gimple_wce_cleanup_eh_only (wce))
5506 		kind = GIMPLE_TRY_CATCH;
5507 	      else
5508 		kind = GIMPLE_TRY_FINALLY;
5509 	      seq = gsi_split_seq_after (iter);
5510 
5511 	      gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5512               /* Do not use gsi_replace here, as it may scan operands.
5513                  We want to do a simple structural modification only.  */
5514 	      gsi_set_stmt (&iter, gtry);
5515 	      iter = gsi_start (gtry->eval);
5516 	    }
5517 	}
5518       else
5519 	gsi_next (&iter);
5520     }
5521 
5522   gimplify_seq_add_seq (pre_p, body_sequence);
5523   if (temp)
5524     {
5525       *expr_p = temp;
5526       return GS_OK;
5527     }
5528   else
5529     {
5530       *expr_p = NULL;
5531       return GS_ALL_DONE;
5532     }
5533 }
5534 
5535 /* Insert a cleanup marker for gimplify_cleanup_point_expr.  CLEANUP
5536    is the cleanup action required.  EH_ONLY is true if the cleanup should
5537    only be executed if an exception is thrown, not on normal exit.
5538    If FORCE_UNCOND is true perform the cleanup unconditionally;  this is
5539    only valid for clobbers.  */
5540 
5541 static void
5542 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
5543 		     bool force_uncond = false)
5544 {
5545   gimple *wce;
5546   gimple_seq cleanup_stmts = NULL;
5547 
5548   /* Errors can result in improperly nested cleanups.  Which results in
5549      confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR.  */
5550   if (seen_error ())
5551     return;
5552 
5553   if (gimple_conditional_context ())
5554     {
5555       /* If we're in a conditional context, this is more complex.  We only
5556 	 want to run the cleanup if we actually ran the initialization that
5557 	 necessitates it, but we want to run it after the end of the
5558 	 conditional context.  So we wrap the try/finally around the
5559 	 condition and use a flag to determine whether or not to actually
5560 	 run the destructor.  Thus
5561 
5562 	   test ? f(A()) : 0
5563 
5564 	 becomes (approximately)
5565 
5566 	   flag = 0;
5567 	   try {
5568 	     if (test) { A::A(temp); flag = 1; val = f(temp); }
5569 	     else { val = 0; }
5570 	   } finally {
5571 	     if (flag) A::~A(temp);
5572 	   }
5573 	   val
5574       */
5575       if (force_uncond)
5576 	{
5577 	  gimplify_stmt (&cleanup, &cleanup_stmts);
5578 	  wce = gimple_build_wce (cleanup_stmts);
5579 	  gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5580 	}
5581       else
5582 	{
5583 	  tree flag = create_tmp_var (boolean_type_node, "cleanup");
5584 	  gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
5585 	  gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
5586 
5587 	  cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5588 	  gimplify_stmt (&cleanup, &cleanup_stmts);
5589 	  wce = gimple_build_wce (cleanup_stmts);
5590 
5591 	  gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5592 	  gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5593 	  gimplify_seq_add_stmt (pre_p, ftrue);
5594 
5595 	  /* Because of this manipulation, and the EH edges that jump
5596 	     threading cannot redirect, the temporary (VAR) will appear
5597 	     to be used uninitialized.  Don't warn.  */
5598 	  TREE_NO_WARNING (var) = 1;
5599 	}
5600     }
5601   else
5602     {
5603       gimplify_stmt (&cleanup, &cleanup_stmts);
5604       wce = gimple_build_wce (cleanup_stmts);
5605       gimple_wce_set_cleanup_eh_only (wce, eh_only);
5606       gimplify_seq_add_stmt (pre_p, wce);
5607     }
5608 }
5609 
5610 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR.  */
5611 
5612 static enum gimplify_status
5613 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5614 {
5615   tree targ = *expr_p;
5616   tree temp = TARGET_EXPR_SLOT (targ);
5617   tree init = TARGET_EXPR_INITIAL (targ);
5618   enum gimplify_status ret;
5619 
5620   if (init)
5621     {
5622       tree cleanup = NULL_TREE;
5623 
5624       /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5625 	 to the temps list.  Handle also variable length TARGET_EXPRs.  */
5626       if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5627 	{
5628 	  if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5629 	    gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5630 	  gimplify_vla_decl (temp, pre_p);
5631 	}
5632       else
5633 	gimple_add_tmp_var (temp);
5634 
5635       /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5636 	 expression is supposed to initialize the slot.  */
5637       if (VOID_TYPE_P (TREE_TYPE (init)))
5638 	ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5639       else
5640 	{
5641 	  tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5642 	  init = init_expr;
5643 	  ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5644 	  init = NULL;
5645 	  ggc_free (init_expr);
5646 	}
5647       if (ret == GS_ERROR)
5648 	{
5649 	  /* PR c++/28266 Make sure this is expanded only once. */
5650 	  TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5651 	  return GS_ERROR;
5652 	}
5653       if (init)
5654 	gimplify_and_add (init, pre_p);
5655 
5656       /* If needed, push the cleanup for the temp.  */
5657       if (TARGET_EXPR_CLEANUP (targ))
5658 	{
5659 	  if (CLEANUP_EH_ONLY (targ))
5660 	    gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5661 				 CLEANUP_EH_ONLY (targ), pre_p);
5662 	  else
5663 	    cleanup = TARGET_EXPR_CLEANUP (targ);
5664 	}
5665 
5666       /* Add a clobber for the temporary going out of scope, like
5667 	 gimplify_bind_expr.  */
5668       if (gimplify_ctxp->in_cleanup_point_expr
5669 	  && needs_to_live_in_memory (temp)
5670 	  && flag_stack_reuse == SR_ALL)
5671 	{
5672 	  tree clobber = build_constructor (TREE_TYPE (temp),
5673 					    NULL);
5674 	  TREE_THIS_VOLATILE (clobber) = true;
5675 	  clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5676 	  gimple_push_cleanup (temp, clobber, false, pre_p, true);
5677 	}
5678 
5679       if (cleanup)
5680 	gimple_push_cleanup (temp, cleanup, false, pre_p);
5681 
5682       /* Only expand this once.  */
5683       TREE_OPERAND (targ, 3) = init;
5684       TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5685     }
5686   else
5687     /* We should have expanded this before.  */
5688     gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5689 
5690   *expr_p = temp;
5691   return GS_OK;
5692 }
5693 
5694 /* Gimplification of expression trees.  */
5695 
5696 /* Gimplify an expression which appears at statement context.  The
5697    corresponding GIMPLE statements are added to *SEQ_P.  If *SEQ_P is
5698    NULL, a new sequence is allocated.
5699 
5700    Return true if we actually added a statement to the queue.  */
5701 
5702 bool
5703 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5704 {
5705   gimple_seq_node last;
5706 
5707   last = gimple_seq_last (*seq_p);
5708   gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5709   return last != gimple_seq_last (*seq_p);
5710 }
5711 
5712 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5713    to CTX.  If entries already exist, force them to be some flavor of private.
5714    If there is no enclosing parallel, do nothing.  */
5715 
5716 void
5717 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5718 {
5719   splay_tree_node n;
5720 
5721   if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
5722     return;
5723 
5724   do
5725     {
5726       n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5727       if (n != NULL)
5728 	{
5729 	  if (n->value & GOVD_SHARED)
5730 	    n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5731 	  else if (n->value & GOVD_MAP)
5732 	    n->value |= GOVD_MAP_TO_ONLY;
5733 	  else
5734 	    return;
5735 	}
5736       else if ((ctx->region_type & ORT_TARGET) != 0)
5737 	{
5738 	  if (ctx->target_map_scalars_firstprivate)
5739 	    omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5740 	  else
5741 	    omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
5742 	}
5743       else if (ctx->region_type != ORT_WORKSHARE
5744 	       && ctx->region_type != ORT_SIMD
5745 	       && ctx->region_type != ORT_ACC
5746 	       && !(ctx->region_type & ORT_TARGET_DATA))
5747 	omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5748 
5749       ctx = ctx->outer_context;
5750     }
5751   while (ctx);
5752 }
5753 
5754 /* Similarly for each of the type sizes of TYPE.  */
5755 
5756 static void
5757 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5758 {
5759   if (type == NULL || type == error_mark_node)
5760     return;
5761   type = TYPE_MAIN_VARIANT (type);
5762 
5763   if (ctx->privatized_types->add (type))
5764     return;
5765 
5766   switch (TREE_CODE (type))
5767     {
5768     case INTEGER_TYPE:
5769     case ENUMERAL_TYPE:
5770     case BOOLEAN_TYPE:
5771     case REAL_TYPE:
5772     case FIXED_POINT_TYPE:
5773       omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5774       omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5775       break;
5776 
5777     case ARRAY_TYPE:
5778       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5779       omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5780       break;
5781 
5782     case RECORD_TYPE:
5783     case UNION_TYPE:
5784     case QUAL_UNION_TYPE:
5785       {
5786 	tree field;
5787 	for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5788 	  if (TREE_CODE (field) == FIELD_DECL)
5789 	    {
5790 	      omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5791 	      omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5792 	    }
5793       }
5794       break;
5795 
5796     case POINTER_TYPE:
5797     case REFERENCE_TYPE:
5798       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5799       break;
5800 
5801     default:
5802       break;
5803     }
5804 
5805   omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5806   omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5807   lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5808 }
5809 
5810 /* Add an entry for DECL in the OMP context CTX with FLAGS.  */
5811 
5812 static void
5813 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5814 {
5815   splay_tree_node n;
5816   unsigned int nflags;
5817   tree t;
5818 
5819   if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
5820     return;
5821 
5822   /* Never elide decls whose type has TREE_ADDRESSABLE set.  This means
5823      there are constructors involved somewhere.  Exception is a shared clause,
5824      there is nothing privatized in that case.  */
5825   if ((flags & GOVD_SHARED) == 0
5826       && (TREE_ADDRESSABLE (TREE_TYPE (decl))
5827 	  || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
5828     flags |= GOVD_SEEN;
5829 
5830   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5831   if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
5832     {
5833       /* We shouldn't be re-adding the decl with the same data
5834 	 sharing class.  */
5835       gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5836       nflags = n->value | flags;
5837       /* The only combination of data sharing classes we should see is
5838 	 FIRSTPRIVATE and LASTPRIVATE.  However, OpenACC permits
5839 	 reduction variables to be used in data sharing clauses.  */
5840       gcc_assert ((ctx->region_type & ORT_ACC) != 0
5841 		  || ((nflags & GOVD_DATA_SHARE_CLASS)
5842 		      == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
5843 		  || (flags & GOVD_DATA_SHARE_CLASS) == 0);
5844       n->value = nflags;
5845       return;
5846     }
5847 
5848   /* When adding a variable-sized variable, we have to handle all sorts
5849      of additional bits of data: the pointer replacement variable, and
5850      the parameters of the type.  */
5851   if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5852     {
5853       /* Add the pointer replacement variable as PRIVATE if the variable
5854 	 replacement is private, else FIRSTPRIVATE since we'll need the
5855 	 address of the original variable either for SHARED, or for the
5856 	 copy into or out of the context.  */
5857       if (!(flags & GOVD_LOCAL))
5858 	{
5859 	  if (flags & GOVD_MAP)
5860 	    nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
5861 	  else if (flags & GOVD_PRIVATE)
5862 	    nflags = GOVD_PRIVATE;
5863 	  else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
5864 		   && (flags & GOVD_FIRSTPRIVATE))
5865 	    nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
5866 	  else
5867 	    nflags = GOVD_FIRSTPRIVATE;
5868 	  nflags |= flags & GOVD_SEEN;
5869 	  t = DECL_VALUE_EXPR (decl);
5870 	  gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5871 	  t = TREE_OPERAND (t, 0);
5872 	  gcc_assert (DECL_P (t));
5873 	  omp_add_variable (ctx, t, nflags);
5874 	}
5875 
5876       /* Add all of the variable and type parameters (which should have
5877 	 been gimplified to a formal temporary) as FIRSTPRIVATE.  */
5878       omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5879       omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5880       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5881 
5882       /* The variable-sized variable itself is never SHARED, only some form
5883 	 of PRIVATE.  The sharing would take place via the pointer variable
5884 	 which we remapped above.  */
5885       if (flags & GOVD_SHARED)
5886 	flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
5887 		| (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5888 
5889       /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5890 	 alloca statement we generate for the variable, so make sure it
5891 	 is available.  This isn't automatically needed for the SHARED
5892 	 case, since we won't be allocating local storage then.
5893 	 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5894 	 in this case omp_notice_variable will be called later
5895 	 on when it is gimplified.  */
5896       else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
5897 	       && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5898 	omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5899     }
5900   else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5901 	   && lang_hooks.decls.omp_privatize_by_reference (decl))
5902     {
5903       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5904 
5905       /* Similar to the direct variable sized case above, we'll need the
5906 	 size of references being privatized.  */
5907       if ((flags & GOVD_SHARED) == 0)
5908 	{
5909 	  t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5910 	  if (DECL_P (t))
5911 	    omp_notice_variable (ctx, t, true);
5912 	}
5913     }
5914 
5915   if (n != NULL)
5916     n->value |= flags;
5917   else
5918     splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5919 
5920   /* For reductions clauses in OpenACC loop directives, by default create a
5921      copy clause on the enclosing parallel construct for carrying back the
5922      results.  */
5923   if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
5924     {
5925       struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
5926       while (outer_ctx)
5927 	{
5928 	  n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
5929 	  if (n != NULL)
5930 	    {
5931 	      /* Ignore local variables and explicitly declared clauses.  */
5932 	      if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
5933 		break;
5934 	      else if (outer_ctx->region_type == ORT_ACC_KERNELS)
5935 		{
5936 		  /* According to the OpenACC spec, such a reduction variable
5937 		     should already have a copy map on a kernels construct,
5938 		     verify that here.  */
5939 		  gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
5940 			      && (n->value & GOVD_MAP));
5941 		}
5942 	      else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
5943 		{
5944 		  /* Remove firstprivate and make it a copy map.  */
5945 		  n->value &= ~GOVD_FIRSTPRIVATE;
5946 		  n->value |= GOVD_MAP;
5947 		}
5948 	    }
5949 	  else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
5950 	    {
5951 	      splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
5952 				 GOVD_MAP | GOVD_SEEN);
5953 	      break;
5954 	    }
5955 	  outer_ctx = outer_ctx->outer_context;
5956 	}
5957     }
5958 }
5959 
5960 /* Notice a threadprivate variable DECL used in OMP context CTX.
5961    This just prints out diagnostics about threadprivate variable uses
5962    in untied tasks.  If DECL2 is non-NULL, prevent this warning
5963    on that variable.  */
5964 
5965 static bool
5966 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5967 				   tree decl2)
5968 {
5969   splay_tree_node n;
5970   struct gimplify_omp_ctx *octx;
5971 
5972   for (octx = ctx; octx; octx = octx->outer_context)
5973     if ((octx->region_type & ORT_TARGET) != 0)
5974       {
5975 	n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5976 	if (n == NULL)
5977 	  {
5978 	    error ("threadprivate variable %qE used in target region",
5979 		   DECL_NAME (decl));
5980 	    error_at (octx->location, "enclosing target region");
5981 	    splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5982 	  }
5983 	if (decl2)
5984 	  splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5985       }
5986 
5987   if (ctx->region_type != ORT_UNTIED_TASK)
5988     return false;
5989   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5990   if (n == NULL)
5991     {
5992       error ("threadprivate variable %qE used in untied task",
5993 	     DECL_NAME (decl));
5994       error_at (ctx->location, "enclosing task");
5995       splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5996     }
5997   if (decl2)
5998     splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5999   return false;
6000 }
6001 
6002 /* Return true if global var DECL is device resident.  */
6003 
6004 static bool
6005 device_resident_p (tree decl)
6006 {
6007   tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
6008 
6009   if (!attr)
6010     return false;
6011 
6012   for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
6013     {
6014       tree c = TREE_VALUE (t);
6015       if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
6016 	return true;
6017     }
6018 
6019   return false;
6020 }
6021 
6022 /* Determine outer default flags for DECL mentioned in an OMP region
6023    but not declared in an enclosing clause.
6024 
6025    ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6026    remapped firstprivate instead of shared.  To some extent this is
6027    addressed in omp_firstprivatize_type_sizes, but not
6028    effectively.  */
6029 
6030 static unsigned
6031 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
6032 		    bool in_code, unsigned flags)
6033 {
6034   enum omp_clause_default_kind default_kind = ctx->default_kind;
6035   enum omp_clause_default_kind kind;
6036 
6037   kind = lang_hooks.decls.omp_predetermined_sharing (decl);
6038   if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
6039     default_kind = kind;
6040 
6041   switch (default_kind)
6042     {
6043     case OMP_CLAUSE_DEFAULT_NONE:
6044       {
6045 	const char *rtype;
6046 
6047 	if (ctx->region_type & ORT_PARALLEL)
6048 	  rtype = "parallel";
6049 	else if (ctx->region_type & ORT_TASK)
6050 	  rtype = "task";
6051 	else if (ctx->region_type & ORT_TEAMS)
6052 	  rtype = "teams";
6053 	else
6054 	  gcc_unreachable ();
6055 
6056 	error ("%qE not specified in enclosing %s",
6057 	       DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
6058 	error_at (ctx->location, "enclosing %s", rtype);
6059       }
6060       /* FALLTHRU */
6061     case OMP_CLAUSE_DEFAULT_SHARED:
6062       flags |= GOVD_SHARED;
6063       break;
6064     case OMP_CLAUSE_DEFAULT_PRIVATE:
6065       flags |= GOVD_PRIVATE;
6066       break;
6067     case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
6068       flags |= GOVD_FIRSTPRIVATE;
6069       break;
6070     case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
6071       /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED.  */
6072       gcc_assert ((ctx->region_type & ORT_TASK) != 0);
6073       if (struct gimplify_omp_ctx *octx = ctx->outer_context)
6074 	{
6075 	  omp_notice_variable (octx, decl, in_code);
6076 	  for (; octx; octx = octx->outer_context)
6077 	    {
6078 	      splay_tree_node n2;
6079 
6080 	      n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
6081 	      if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
6082 		  && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
6083 		continue;
6084 	      if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
6085 		{
6086 		  flags |= GOVD_FIRSTPRIVATE;
6087 		  goto found_outer;
6088 		}
6089 	      if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
6090 		{
6091 		  flags |= GOVD_SHARED;
6092 		  goto found_outer;
6093 		}
6094 	    }
6095 	}
6096 
6097       if (TREE_CODE (decl) == PARM_DECL
6098 	  || (!is_global_var (decl)
6099 	      && DECL_CONTEXT (decl) == current_function_decl))
6100 	flags |= GOVD_FIRSTPRIVATE;
6101       else
6102 	flags |= GOVD_SHARED;
6103     found_outer:
6104       break;
6105 
6106     default:
6107       gcc_unreachable ();
6108     }
6109 
6110   return flags;
6111 }
6112 
6113 
6114 /* Determine outer default flags for DECL mentioned in an OACC region
6115    but not declared in an enclosing clause.  */
6116 
6117 static unsigned
6118 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
6119 {
6120   const char *rkind;
6121   bool on_device = false;
6122   tree type = TREE_TYPE (decl);
6123 
6124   if (lang_hooks.decls.omp_privatize_by_reference (decl))
6125     type = TREE_TYPE (type);
6126 
6127   if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
6128       && is_global_var (decl)
6129       && device_resident_p (decl))
6130     {
6131       on_device = true;
6132       flags |= GOVD_MAP_TO_ONLY;
6133     }
6134 
6135   switch (ctx->region_type)
6136     {
6137     default:
6138       gcc_unreachable ();
6139 
6140     case ORT_ACC_KERNELS:
6141       /* Scalars are default 'copy' under kernels, non-scalars are default
6142 	 'present_or_copy'.  */
6143       flags |= GOVD_MAP;
6144       if (!AGGREGATE_TYPE_P (type))
6145 	flags |= GOVD_MAP_FORCE;
6146 
6147       rkind = "kernels";
6148       break;
6149 
6150     case ORT_ACC_PARALLEL:
6151       {
6152 	if (on_device || AGGREGATE_TYPE_P (type))
6153 	  /* Aggregates default to 'present_or_copy'.  */
6154 	  flags |= GOVD_MAP;
6155 	else
6156 	  /* Scalars default to 'firstprivate'.  */
6157 	  flags |= GOVD_FIRSTPRIVATE;
6158 	rkind = "parallel";
6159       }
6160       break;
6161     }
6162 
6163   if (DECL_ARTIFICIAL (decl))
6164     ; /* We can get compiler-generated decls, and should not complain
6165 	 about them.  */
6166   else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
6167     {
6168       error ("%qE not specified in enclosing OpenACC %qs construct",
6169 	     DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
6170       inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
6171     }
6172   else
6173     gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
6174 
6175   return flags;
6176 }
6177 
6178 /* Record the fact that DECL was used within the OMP context CTX.
6179    IN_CODE is true when real code uses DECL, and false when we should
6180    merely emit default(none) errors.  Return true if DECL is going to
6181    be remapped and thus DECL shouldn't be gimplified into its
6182    DECL_VALUE_EXPR (if any).  */
6183 
6184 static bool
6185 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
6186 {
6187   splay_tree_node n;
6188   unsigned flags = in_code ? GOVD_SEEN : 0;
6189   bool ret = false, shared;
6190 
6191   if (error_operand_p (decl))
6192     return false;
6193 
6194   if (ctx->region_type == ORT_NONE)
6195     return lang_hooks.decls.omp_disregard_value_expr (decl, false);
6196 
6197   if (is_global_var (decl))
6198     {
6199       /* Threadprivate variables are predetermined.  */
6200       if (DECL_THREAD_LOCAL_P (decl))
6201 	return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
6202 
6203       if (DECL_HAS_VALUE_EXPR_P (decl))
6204 	{
6205 	  tree value = get_base_address (DECL_VALUE_EXPR (decl));
6206 
6207 	  if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
6208 	    return omp_notice_threadprivate_variable (ctx, decl, value);
6209 	}
6210 
6211       if (gimplify_omp_ctxp->outer_context == NULL
6212 	  && VAR_P (decl)
6213 	  && get_oacc_fn_attrib (current_function_decl))
6214 	{
6215 	  location_t loc = DECL_SOURCE_LOCATION (decl);
6216 
6217 	  if (lookup_attribute ("omp declare target link",
6218 				DECL_ATTRIBUTES (decl)))
6219 	    {
6220 	      error_at (loc,
6221 			"%qE with %<link%> clause used in %<routine%> function",
6222 			DECL_NAME (decl));
6223 	      return false;
6224 	    }
6225 	  else if (!lookup_attribute ("omp declare target",
6226 				      DECL_ATTRIBUTES (decl)))
6227 	    {
6228 	      error_at (loc,
6229 			"%qE requires a %<declare%> directive for use "
6230 			"in a %<routine%> function", DECL_NAME (decl));
6231 	      return false;
6232 	    }
6233 	}
6234     }
6235 
6236   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6237   if ((ctx->region_type & ORT_TARGET) != 0)
6238     {
6239       ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
6240       if (n == NULL)
6241 	{
6242 	  unsigned nflags = flags;
6243 	  if (ctx->target_map_pointers_as_0len_arrays
6244 	      || ctx->target_map_scalars_firstprivate)
6245 	    {
6246 	      bool is_declare_target = false;
6247 	      bool is_scalar = false;
6248 	      if (is_global_var (decl)
6249 		  && varpool_node::get_create (decl)->offloadable)
6250 		{
6251 		  struct gimplify_omp_ctx *octx;
6252 		  for (octx = ctx->outer_context;
6253 		       octx; octx = octx->outer_context)
6254 		    {
6255 		      n = splay_tree_lookup (octx->variables,
6256 					     (splay_tree_key)decl);
6257 		      if (n
6258 			  && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
6259 			  && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6260 			break;
6261 		    }
6262 		  is_declare_target = octx == NULL;
6263 		}
6264 	      if (!is_declare_target && ctx->target_map_scalars_firstprivate)
6265 		{
6266 		  tree type = TREE_TYPE (decl);
6267 		  if (TREE_CODE (type) == REFERENCE_TYPE)
6268 		    type = TREE_TYPE (type);
6269 		  if (TREE_CODE (type) == COMPLEX_TYPE)
6270 		    type = TREE_TYPE (type);
6271 		  if (INTEGRAL_TYPE_P (type)
6272 		      || SCALAR_FLOAT_TYPE_P (type)
6273 		      || TREE_CODE (type) == POINTER_TYPE)
6274 		    is_scalar = true;
6275 		}
6276 	      if (is_declare_target)
6277 		;
6278 	      else if (ctx->target_map_pointers_as_0len_arrays
6279 		       && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
6280 			   || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
6281 			       && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
6282 				  == POINTER_TYPE)))
6283 		nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
6284 	      else if (is_scalar)
6285 		nflags |= GOVD_FIRSTPRIVATE;
6286 	    }
6287 
6288 	  struct gimplify_omp_ctx *octx = ctx->outer_context;
6289 	  if ((ctx->region_type & ORT_ACC) && octx)
6290 	    {
6291 	      /* Look in outer OpenACC contexts, to see if there's a
6292 		 data attribute for this variable.  */
6293 	      omp_notice_variable (octx, decl, in_code);
6294 
6295 	      for (; octx; octx = octx->outer_context)
6296 		{
6297 		  if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
6298 		    break;
6299 		  splay_tree_node n2
6300 		    = splay_tree_lookup (octx->variables,
6301 					 (splay_tree_key) decl);
6302 		  if (n2)
6303 		    {
6304 		      if (octx->region_type == ORT_ACC_HOST_DATA)
6305 		        error ("variable %qE declared in enclosing "
6306 			       "%<host_data%> region", DECL_NAME (decl));
6307 		      nflags |= GOVD_MAP;
6308 		      goto found_outer;
6309 		    }
6310 		}
6311 	    }
6312 
6313 	  {
6314 	    tree type = TREE_TYPE (decl);
6315 
6316 	    if (nflags == flags
6317 		&& gimplify_omp_ctxp->target_firstprivatize_array_bases
6318 		&& lang_hooks.decls.omp_privatize_by_reference (decl))
6319 	      type = TREE_TYPE (type);
6320 	    if (nflags == flags
6321 		&& !lang_hooks.types.omp_mappable_type (type))
6322 	      {
6323 		error ("%qD referenced in target region does not have "
6324 		       "a mappable type", decl);
6325 		nflags |= GOVD_MAP | GOVD_EXPLICIT;
6326 	      }
6327 	    else if (nflags == flags)
6328 	      {
6329 		if ((ctx->region_type & ORT_ACC) != 0)
6330 		  nflags = oacc_default_clause (ctx, decl, flags);
6331 		else
6332 		  nflags |= GOVD_MAP;
6333 	      }
6334 	  }
6335 	found_outer:
6336 	  omp_add_variable (ctx, decl, nflags);
6337 	}
6338       else
6339 	{
6340 	  /* If nothing changed, there's nothing left to do.  */
6341 	  if ((n->value & flags) == flags)
6342 	    return ret;
6343 	  flags |= n->value;
6344 	  n->value = flags;
6345 	}
6346       goto do_outer;
6347     }
6348 
6349   if (n == NULL)
6350     {
6351       if (ctx->region_type == ORT_WORKSHARE
6352 	  || ctx->region_type == ORT_SIMD
6353 	  || ctx->region_type == ORT_ACC
6354 	  || (ctx->region_type & ORT_TARGET_DATA) != 0)
6355 	goto do_outer;
6356 
6357       flags = omp_default_clause (ctx, decl, in_code, flags);
6358 
6359       if ((flags & GOVD_PRIVATE)
6360 	  && lang_hooks.decls.omp_private_outer_ref (decl))
6361 	flags |= GOVD_PRIVATE_OUTER_REF;
6362 
6363       omp_add_variable (ctx, decl, flags);
6364 
6365       shared = (flags & GOVD_SHARED) != 0;
6366       ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6367       goto do_outer;
6368     }
6369 
6370   if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
6371       && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
6372       && DECL_SIZE (decl))
6373     {
6374       if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6375 	{
6376 	  splay_tree_node n2;
6377 	  tree t = DECL_VALUE_EXPR (decl);
6378 	  gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6379 	  t = TREE_OPERAND (t, 0);
6380 	  gcc_assert (DECL_P (t));
6381 	  n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
6382 	  n2->value |= GOVD_SEEN;
6383 	}
6384       else if (lang_hooks.decls.omp_privatize_by_reference (decl)
6385 	       && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
6386 	       && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
6387 		   != INTEGER_CST))
6388 	{
6389 	  splay_tree_node n2;
6390 	  tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6391 	  gcc_assert (DECL_P (t));
6392 	  n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
6393 	  if (n2)
6394 	    n2->value |= GOVD_SEEN;
6395 	}
6396     }
6397 
6398   shared = ((flags | n->value) & GOVD_SHARED) != 0;
6399   ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6400 
6401   /* If nothing changed, there's nothing left to do.  */
6402   if ((n->value & flags) == flags)
6403     return ret;
6404   flags |= n->value;
6405   n->value = flags;
6406 
6407  do_outer:
6408   /* If the variable is private in the current context, then we don't
6409      need to propagate anything to an outer context.  */
6410   if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
6411     return ret;
6412   if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6413       == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6414     return ret;
6415   if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6416 		| GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6417       == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6418     return ret;
6419   if (ctx->outer_context
6420       && omp_notice_variable (ctx->outer_context, decl, in_code))
6421     return true;
6422   return ret;
6423 }
6424 
6425 /* Verify that DECL is private within CTX.  If there's specific information
6426    to the contrary in the innermost scope, generate an error.  */
6427 
6428 static bool
6429 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
6430 {
6431   splay_tree_node n;
6432 
6433   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6434   if (n != NULL)
6435     {
6436       if (n->value & GOVD_SHARED)
6437 	{
6438 	  if (ctx == gimplify_omp_ctxp)
6439 	    {
6440 	      if (simd)
6441 		error ("iteration variable %qE is predetermined linear",
6442 		       DECL_NAME (decl));
6443 	      else
6444 		error ("iteration variable %qE should be private",
6445 		       DECL_NAME (decl));
6446 	      n->value = GOVD_PRIVATE;
6447 	      return true;
6448 	    }
6449 	  else
6450 	    return false;
6451 	}
6452       else if ((n->value & GOVD_EXPLICIT) != 0
6453 	       && (ctx == gimplify_omp_ctxp
6454 		   || (ctx->region_type == ORT_COMBINED_PARALLEL
6455 		       && gimplify_omp_ctxp->outer_context == ctx)))
6456 	{
6457 	  if ((n->value & GOVD_FIRSTPRIVATE) != 0)
6458 	    error ("iteration variable %qE should not be firstprivate",
6459 		   DECL_NAME (decl));
6460 	  else if ((n->value & GOVD_REDUCTION) != 0)
6461 	    error ("iteration variable %qE should not be reduction",
6462 		   DECL_NAME (decl));
6463 	  else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
6464 	    error ("iteration variable %qE should not be linear",
6465 		   DECL_NAME (decl));
6466 	  else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
6467 	    error ("iteration variable %qE should not be lastprivate",
6468 		   DECL_NAME (decl));
6469 	  else if (simd && (n->value & GOVD_PRIVATE) != 0)
6470 	    error ("iteration variable %qE should not be private",
6471 		   DECL_NAME (decl));
6472 	  else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
6473 	    error ("iteration variable %qE is predetermined linear",
6474 		   DECL_NAME (decl));
6475 	}
6476       return (ctx == gimplify_omp_ctxp
6477 	      || (ctx->region_type == ORT_COMBINED_PARALLEL
6478 		  && gimplify_omp_ctxp->outer_context == ctx));
6479     }
6480 
6481   if (ctx->region_type != ORT_WORKSHARE
6482       && ctx->region_type != ORT_SIMD
6483       && ctx->region_type != ORT_ACC)
6484     return false;
6485   else if (ctx->outer_context)
6486     return omp_is_private (ctx->outer_context, decl, simd);
6487   return false;
6488 }
6489 
6490 /* Return true if DECL is private within a parallel region
6491    that binds to the current construct's context or in parallel
6492    region's REDUCTION clause.  */
6493 
6494 static bool
6495 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
6496 {
6497   splay_tree_node n;
6498 
6499   do
6500     {
6501       ctx = ctx->outer_context;
6502       if (ctx == NULL)
6503 	{
6504 	  if (is_global_var (decl))
6505 	    return false;
6506 
6507 	  /* References might be private, but might be shared too,
6508 	     when checking for copyprivate, assume they might be
6509 	     private, otherwise assume they might be shared.  */
6510 	  if (copyprivate)
6511 	    return true;
6512 
6513 	  if (lang_hooks.decls.omp_privatize_by_reference (decl))
6514 	    return false;
6515 
6516 	  /* Treat C++ privatized non-static data members outside
6517 	     of the privatization the same.  */
6518 	  if (omp_member_access_dummy_var (decl))
6519 	    return false;
6520 
6521 	  return true;
6522 	}
6523 
6524       n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6525 
6526       if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6527 	  && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
6528 	continue;
6529 
6530       if (n != NULL)
6531 	{
6532 	  if ((n->value & GOVD_LOCAL) != 0
6533 	      && omp_member_access_dummy_var (decl))
6534 	    return false;
6535 	  return (n->value & GOVD_SHARED) == 0;
6536 	}
6537     }
6538   while (ctx->region_type == ORT_WORKSHARE
6539 	 || ctx->region_type == ORT_SIMD
6540 	 || ctx->region_type == ORT_ACC);
6541   return false;
6542 }
6543 
6544 /* Return true if the CTX is combined with distribute and thus
6545    lastprivate can't be supported.  */
6546 
6547 static bool
6548 omp_no_lastprivate (struct gimplify_omp_ctx *ctx)
6549 {
6550   do
6551     {
6552       if (ctx->outer_context == NULL)
6553 	return false;
6554       ctx = ctx->outer_context;
6555       switch (ctx->region_type)
6556 	{
6557 	case ORT_WORKSHARE:
6558 	  if (!ctx->combined_loop)
6559 	    return false;
6560 	  if (ctx->distribute)
6561 	    return lang_GNU_Fortran ();
6562 	  break;
6563 	case ORT_COMBINED_PARALLEL:
6564 	  break;
6565 	case ORT_COMBINED_TEAMS:
6566 	  return lang_GNU_Fortran ();
6567 	default:
6568 	  return false;
6569 	}
6570     }
6571   while (1);
6572 }
6573 
6574 /* Callback for walk_tree to find a DECL_EXPR for the given DECL.  */
6575 
6576 static tree
6577 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
6578 {
6579   tree t = *tp;
6580 
6581   /* If this node has been visited, unmark it and keep looking.  */
6582   if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
6583     return t;
6584 
6585   if (IS_TYPE_OR_DECL_P (t))
6586     *walk_subtrees = 0;
6587   return NULL_TREE;
6588 }
6589 
6590 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
6591    and previous omp contexts.  */
6592 
6593 static void
6594 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
6595 			   enum omp_region_type region_type,
6596 			   enum tree_code code)
6597 {
6598   struct gimplify_omp_ctx *ctx, *outer_ctx;
6599   tree c;
6600   hash_map<tree, tree> *struct_map_to_clause = NULL;
6601   tree *prev_list_p = NULL;
6602 
6603   ctx = new_omp_context (region_type);
6604   outer_ctx = ctx->outer_context;
6605   if (code == OMP_TARGET && !lang_GNU_Fortran ())
6606     {
6607       ctx->target_map_pointers_as_0len_arrays = true;
6608       /* FIXME: For Fortran we want to set this too, when
6609 	 the Fortran FE is updated to OpenMP 4.5.  */
6610       ctx->target_map_scalars_firstprivate = true;
6611     }
6612   if (!lang_GNU_Fortran ())
6613     switch (code)
6614       {
6615       case OMP_TARGET:
6616       case OMP_TARGET_DATA:
6617       case OMP_TARGET_ENTER_DATA:
6618       case OMP_TARGET_EXIT_DATA:
6619       case OACC_HOST_DATA:
6620 	ctx->target_firstprivatize_array_bases = true;
6621       default:
6622 	break;
6623       }
6624 
6625   while ((c = *list_p) != NULL)
6626     {
6627       bool remove = false;
6628       bool notice_outer = true;
6629       const char *check_non_private = NULL;
6630       unsigned int flags;
6631       tree decl;
6632 
6633       switch (OMP_CLAUSE_CODE (c))
6634 	{
6635 	case OMP_CLAUSE_PRIVATE:
6636 	  flags = GOVD_PRIVATE | GOVD_EXPLICIT;
6637 	  if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
6638 	    {
6639 	      flags |= GOVD_PRIVATE_OUTER_REF;
6640 	      OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
6641 	    }
6642 	  else
6643 	    notice_outer = false;
6644 	  goto do_add;
6645 	case OMP_CLAUSE_SHARED:
6646 	  flags = GOVD_SHARED | GOVD_EXPLICIT;
6647 	  goto do_add;
6648 	case OMP_CLAUSE_FIRSTPRIVATE:
6649 	  flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
6650 	  check_non_private = "firstprivate";
6651 	  goto do_add;
6652 	case OMP_CLAUSE_LASTPRIVATE:
6653 	  flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
6654 	  check_non_private = "lastprivate";
6655 	  decl = OMP_CLAUSE_DECL (c);
6656 	  if (omp_no_lastprivate (ctx))
6657 	    {
6658 	      notice_outer = false;
6659 	      flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6660 	    }
6661 	  else if (error_operand_p (decl))
6662 	    goto do_add;
6663 	  else if (outer_ctx
6664 		   && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
6665 		       || outer_ctx->region_type == ORT_COMBINED_TEAMS)
6666 		   && splay_tree_lookup (outer_ctx->variables,
6667 					 (splay_tree_key) decl) == NULL)
6668 	    {
6669 	      omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
6670 	      if (outer_ctx->outer_context)
6671 		omp_notice_variable (outer_ctx->outer_context, decl, true);
6672 	    }
6673 	  else if (outer_ctx
6674 		   && (outer_ctx->region_type & ORT_TASK) != 0
6675 		   && outer_ctx->combined_loop
6676 		   && splay_tree_lookup (outer_ctx->variables,
6677 					 (splay_tree_key) decl) == NULL)
6678 	    {
6679 	      omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6680 	      if (outer_ctx->outer_context)
6681 		omp_notice_variable (outer_ctx->outer_context, decl, true);
6682 	    }
6683 	  else if (outer_ctx
6684 		   && (outer_ctx->region_type == ORT_WORKSHARE
6685 		       || outer_ctx->region_type == ORT_ACC)
6686 		   && outer_ctx->combined_loop
6687 		   && splay_tree_lookup (outer_ctx->variables,
6688 					 (splay_tree_key) decl) == NULL
6689 		   && !omp_check_private (outer_ctx, decl, false))
6690 	    {
6691 	      omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6692 	      if (outer_ctx->outer_context
6693 		  && (outer_ctx->outer_context->region_type
6694 		      == ORT_COMBINED_PARALLEL)
6695 		  && splay_tree_lookup (outer_ctx->outer_context->variables,
6696 					(splay_tree_key) decl) == NULL)
6697 		{
6698 		  struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
6699 		  omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
6700 		  if (octx->outer_context)
6701 		    omp_notice_variable (octx->outer_context, decl, true);
6702 		}
6703 	      else if (outer_ctx->outer_context)
6704 		omp_notice_variable (outer_ctx->outer_context, decl, true);
6705 	    }
6706 	  goto do_add;
6707 	case OMP_CLAUSE_REDUCTION:
6708 	  flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
6709 	  /* OpenACC permits reductions on private variables.  */
6710 	  if (!(region_type & ORT_ACC))
6711 	    check_non_private = "reduction";
6712 	  decl = OMP_CLAUSE_DECL (c);
6713 	  if (TREE_CODE (decl) == MEM_REF)
6714 	    {
6715 	      tree type = TREE_TYPE (decl);
6716 	      if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
6717 				 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6718 		{
6719 		  remove = true;
6720 		  break;
6721 		}
6722 	      tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6723 	      if (DECL_P (v))
6724 		{
6725 		  omp_firstprivatize_variable (ctx, v);
6726 		  omp_notice_variable (ctx, v, true);
6727 		}
6728 	      decl = TREE_OPERAND (decl, 0);
6729 	      if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
6730 		{
6731 		  if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
6732 				     NULL, is_gimple_val, fb_rvalue)
6733 		      == GS_ERROR)
6734 		    {
6735 		      remove = true;
6736 		      break;
6737 		    }
6738 		  v = TREE_OPERAND (decl, 1);
6739 		  if (DECL_P (v))
6740 		    {
6741 		      omp_firstprivatize_variable (ctx, v);
6742 		      omp_notice_variable (ctx, v, true);
6743 		    }
6744 		  decl = TREE_OPERAND (decl, 0);
6745 		}
6746 	      if (TREE_CODE (decl) == ADDR_EXPR
6747 		  || TREE_CODE (decl) == INDIRECT_REF)
6748 		decl = TREE_OPERAND (decl, 0);
6749 	    }
6750 	  goto do_add_decl;
6751 	case OMP_CLAUSE_LINEAR:
6752 	  if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
6753 			     is_gimple_val, fb_rvalue) == GS_ERROR)
6754 	    {
6755 	      remove = true;
6756 	      break;
6757 	    }
6758 	  else
6759 	    {
6760 	      if (code == OMP_SIMD
6761 		  && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6762 		{
6763 		  struct gimplify_omp_ctx *octx = outer_ctx;
6764 		  if (octx
6765 		      && octx->region_type == ORT_WORKSHARE
6766 		      && octx->combined_loop
6767 		      && !octx->distribute)
6768 		    {
6769 		      if (octx->outer_context
6770 			  && (octx->outer_context->region_type
6771 			      == ORT_COMBINED_PARALLEL))
6772 			octx = octx->outer_context->outer_context;
6773 		      else
6774 			octx = octx->outer_context;
6775 		    }
6776 		  if (octx
6777 		      && octx->region_type == ORT_WORKSHARE
6778 		      && octx->combined_loop
6779 		      && octx->distribute
6780 		      && !lang_GNU_Fortran ())
6781 		    {
6782 		      error_at (OMP_CLAUSE_LOCATION (c),
6783 				"%<linear%> clause for variable other than "
6784 				"loop iterator specified on construct "
6785 				"combined with %<distribute%>");
6786 		      remove = true;
6787 		      break;
6788 		    }
6789 		}
6790 	      /* For combined #pragma omp parallel for simd, need to put
6791 		 lastprivate and perhaps firstprivate too on the
6792 		 parallel.  Similarly for #pragma omp for simd.  */
6793 	      struct gimplify_omp_ctx *octx = outer_ctx;
6794 	      decl = NULL_TREE;
6795 	      if (omp_no_lastprivate (ctx))
6796 		OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6797 	      do
6798 		{
6799 		  if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6800 		      && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6801 		    break;
6802 		  decl = OMP_CLAUSE_DECL (c);
6803 		  if (error_operand_p (decl))
6804 		    {
6805 		      decl = NULL_TREE;
6806 		      break;
6807 		    }
6808 		  flags = GOVD_SEEN;
6809 		  if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6810 		    flags |= GOVD_FIRSTPRIVATE;
6811 		  if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6812 		    flags |= GOVD_LASTPRIVATE;
6813 		  if (octx
6814 		      && octx->region_type == ORT_WORKSHARE
6815 		      && octx->combined_loop)
6816 		    {
6817 		      if (octx->outer_context
6818 			  && (octx->outer_context->region_type
6819 			      == ORT_COMBINED_PARALLEL))
6820 			octx = octx->outer_context;
6821 		      else if (omp_check_private (octx, decl, false))
6822 			break;
6823 		    }
6824 		  else if (octx
6825 			   && (octx->region_type & ORT_TASK) != 0
6826 			   && octx->combined_loop)
6827 		    ;
6828 		  else if (octx
6829 			   && octx->region_type == ORT_COMBINED_PARALLEL
6830 			   && ctx->region_type == ORT_WORKSHARE
6831 			   && octx == outer_ctx)
6832 		    flags = GOVD_SEEN | GOVD_SHARED;
6833 		  else if (octx
6834 			   && octx->region_type == ORT_COMBINED_TEAMS)
6835 		    flags = GOVD_SEEN | GOVD_SHARED;
6836 		  else if (octx
6837 			   && octx->region_type == ORT_COMBINED_TARGET)
6838 		    {
6839 		      flags &= ~GOVD_LASTPRIVATE;
6840 		      if (flags == GOVD_SEEN)
6841 			break;
6842 		    }
6843 		  else
6844 		    break;
6845 		  splay_tree_node on
6846 		    = splay_tree_lookup (octx->variables,
6847 					 (splay_tree_key) decl);
6848 		  if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
6849 		    {
6850 		      octx = NULL;
6851 		      break;
6852 		    }
6853 		  omp_add_variable (octx, decl, flags);
6854 		  if (octx->outer_context == NULL)
6855 		    break;
6856 		  octx = octx->outer_context;
6857 		}
6858 	      while (1);
6859 	      if (octx
6860 		  && decl
6861 		  && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6862 		      || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6863 		omp_notice_variable (octx, decl, true);
6864 	    }
6865 	  flags = GOVD_LINEAR | GOVD_EXPLICIT;
6866 	  if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6867 	      && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6868 	    {
6869 	      notice_outer = false;
6870 	      flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6871 	    }
6872 	  goto do_add;
6873 
6874 	case OMP_CLAUSE_MAP:
6875 	  decl = OMP_CLAUSE_DECL (c);
6876 	  if (error_operand_p (decl))
6877 	    remove = true;
6878 	  switch (code)
6879 	    {
6880 	    case OMP_TARGET:
6881 	      break;
6882 	    case OMP_TARGET_DATA:
6883 	    case OMP_TARGET_ENTER_DATA:
6884 	    case OMP_TARGET_EXIT_DATA:
6885 	    case OACC_HOST_DATA:
6886 	      if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
6887 		  || (OMP_CLAUSE_MAP_KIND (c)
6888 		      == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
6889 		/* For target {,enter ,exit }data only the array slice is
6890 		   mapped, but not the pointer to it.  */
6891 		remove = true;
6892 	      break;
6893 	    default:
6894 	      break;
6895 	    }
6896 	  if (remove)
6897 	    break;
6898 	  if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
6899 	    {
6900 	      struct gimplify_omp_ctx *octx;
6901 	      for (octx = outer_ctx; octx; octx = octx->outer_context)
6902 	        {
6903 		  if (octx->region_type != ORT_ACC_HOST_DATA)
6904 		    break;
6905 		  splay_tree_node n2
6906 		    = splay_tree_lookup (octx->variables,
6907 					 (splay_tree_key) decl);
6908 		  if (n2)
6909 		    error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
6910 			      "declared in enclosing %<host_data%> region",
6911 			      DECL_NAME (decl));
6912 		}
6913 	    }
6914 	  if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6915 	    OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6916 				  : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6917 	  if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6918 			     NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6919 	    {
6920 	      remove = true;
6921 	      break;
6922 	    }
6923 	  else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
6924 		    || (OMP_CLAUSE_MAP_KIND (c)
6925 			== GOMP_MAP_FIRSTPRIVATE_REFERENCE))
6926 		   && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
6927 	    {
6928 	      OMP_CLAUSE_SIZE (c)
6929 		= get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL);
6930 	      omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
6931 				GOVD_FIRSTPRIVATE | GOVD_SEEN);
6932 	    }
6933 	  if (!DECL_P (decl))
6934 	    {
6935 	      tree d = decl, *pd;
6936 	      if (TREE_CODE (d) == ARRAY_REF)
6937 		{
6938 		  while (TREE_CODE (d) == ARRAY_REF)
6939 		    d = TREE_OPERAND (d, 0);
6940 		  if (TREE_CODE (d) == COMPONENT_REF
6941 		      && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
6942 		    decl = d;
6943 		}
6944 	      pd = &OMP_CLAUSE_DECL (c);
6945 	      if (d == decl
6946 		  && TREE_CODE (decl) == INDIRECT_REF
6947 		  && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
6948 		  && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
6949 		      == REFERENCE_TYPE))
6950 		{
6951 		  pd = &TREE_OPERAND (decl, 0);
6952 		  decl = TREE_OPERAND (decl, 0);
6953 		}
6954 	      if (TREE_CODE (decl) == COMPONENT_REF)
6955 		{
6956 		  while (TREE_CODE (decl) == COMPONENT_REF)
6957 		    decl = TREE_OPERAND (decl, 0);
6958 		  if (TREE_CODE (decl) == INDIRECT_REF
6959 		      && DECL_P (TREE_OPERAND (decl, 0))
6960 		      && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
6961 			  == REFERENCE_TYPE))
6962 		    decl = TREE_OPERAND (decl, 0);
6963 		}
6964 	      if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
6965 		  == GS_ERROR)
6966 		{
6967 		  remove = true;
6968 		  break;
6969 		}
6970 	      if (DECL_P (decl))
6971 		{
6972 		  if (error_operand_p (decl))
6973 		    {
6974 		      remove = true;
6975 		      break;
6976 		    }
6977 
6978 		  tree stype = TREE_TYPE (decl);
6979 		  if (TREE_CODE (stype) == REFERENCE_TYPE)
6980 		    stype = TREE_TYPE (stype);
6981 		  if (TYPE_SIZE_UNIT (stype) == NULL
6982 		      || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
6983 		    {
6984 		      error_at (OMP_CLAUSE_LOCATION (c),
6985 				"mapping field %qE of variable length "
6986 				"structure", OMP_CLAUSE_DECL (c));
6987 		      remove = true;
6988 		      break;
6989 		    }
6990 
6991 		  if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
6992 		    {
6993 		      /* Error recovery.  */
6994 		      if (prev_list_p == NULL)
6995 			{
6996 			  remove = true;
6997 			  break;
6998 			}
6999 		      if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7000 			{
7001 			  tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
7002 			  if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
7003 			    {
7004 			      remove = true;
7005 			      break;
7006 			    }
7007 			}
7008 		    }
7009 
7010 		  tree offset;
7011 		  HOST_WIDE_INT bitsize, bitpos;
7012 		  machine_mode mode;
7013 		  int unsignedp, reversep, volatilep = 0;
7014 		  tree base = OMP_CLAUSE_DECL (c);
7015 		  while (TREE_CODE (base) == ARRAY_REF)
7016 		    base = TREE_OPERAND (base, 0);
7017 		  if (TREE_CODE (base) == INDIRECT_REF)
7018 		    base = TREE_OPERAND (base, 0);
7019 		  base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7020 					      &mode, &unsignedp, &reversep,
7021 					      &volatilep, false);
7022 		  tree orig_base = base;
7023 		  if ((TREE_CODE (base) == INDIRECT_REF
7024 		       || (TREE_CODE (base) == MEM_REF
7025 			   && integer_zerop (TREE_OPERAND (base, 1))))
7026 		      && DECL_P (TREE_OPERAND (base, 0))
7027 		      && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
7028 			  == REFERENCE_TYPE))
7029 		    base = TREE_OPERAND (base, 0);
7030 		  gcc_assert (base == decl
7031 			      && (offset == NULL_TREE
7032 				  || TREE_CODE (offset) == INTEGER_CST));
7033 
7034 		  splay_tree_node n
7035 		    = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7036 		  bool ptr = (OMP_CLAUSE_MAP_KIND (c)
7037 			      == GOMP_MAP_ALWAYS_POINTER);
7038 		  if (n == NULL || (n->value & GOVD_MAP) == 0)
7039 		    {
7040 		      tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7041 						 OMP_CLAUSE_MAP);
7042 		      OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
7043 		      if (orig_base != base)
7044 			OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
7045 		      else
7046 			OMP_CLAUSE_DECL (l) = decl;
7047 		      OMP_CLAUSE_SIZE (l) = size_int (1);
7048 		      if (struct_map_to_clause == NULL)
7049 			struct_map_to_clause = new hash_map<tree, tree>;
7050 		      struct_map_to_clause->put (decl, l);
7051 		      if (ptr)
7052 			{
7053 			  enum gomp_map_kind mkind
7054 			    = code == OMP_TARGET_EXIT_DATA
7055 			      ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7056 			  tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7057 						      OMP_CLAUSE_MAP);
7058 			  OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7059 			  OMP_CLAUSE_DECL (c2)
7060 			    = unshare_expr (OMP_CLAUSE_DECL (c));
7061 			  OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
7062 			  OMP_CLAUSE_SIZE (c2)
7063 			    = TYPE_SIZE_UNIT (ptr_type_node);
7064 			  OMP_CLAUSE_CHAIN (l) = c2;
7065 			  if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7066 			    {
7067 			      tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7068 			      tree c3
7069 				= build_omp_clause (OMP_CLAUSE_LOCATION (c),
7070 						    OMP_CLAUSE_MAP);
7071 			      OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7072 			      OMP_CLAUSE_DECL (c3)
7073 				= unshare_expr (OMP_CLAUSE_DECL (c4));
7074 			      OMP_CLAUSE_SIZE (c3)
7075 				= TYPE_SIZE_UNIT (ptr_type_node);
7076 			      OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7077 			      OMP_CLAUSE_CHAIN (c2) = c3;
7078 			    }
7079 			  *prev_list_p = l;
7080 			  prev_list_p = NULL;
7081 			}
7082 		      else
7083 			{
7084 			  OMP_CLAUSE_CHAIN (l) = c;
7085 			  *list_p = l;
7086 			  list_p = &OMP_CLAUSE_CHAIN (l);
7087 			}
7088 		      if (orig_base != base && code == OMP_TARGET)
7089 			{
7090 			  tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7091 						      OMP_CLAUSE_MAP);
7092 			  enum gomp_map_kind mkind
7093 			    = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
7094 			  OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7095 			  OMP_CLAUSE_DECL (c2) = decl;
7096 			  OMP_CLAUSE_SIZE (c2) = size_zero_node;
7097 			  OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
7098 			  OMP_CLAUSE_CHAIN (l) = c2;
7099 			}
7100 		      flags = GOVD_MAP | GOVD_EXPLICIT;
7101 		      if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7102 			flags |= GOVD_SEEN;
7103 		      goto do_add_decl;
7104 		    }
7105 		  else
7106 		    {
7107 		      tree *osc = struct_map_to_clause->get (decl);
7108 		      tree *sc = NULL, *scp = NULL;
7109 		      if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7110 			n->value |= GOVD_SEEN;
7111 		      offset_int o1, o2;
7112 		      if (offset)
7113 			o1 = wi::to_offset (offset);
7114 		      else
7115 			o1 = 0;
7116 		      if (bitpos)
7117 			o1 = o1 + bitpos / BITS_PER_UNIT;
7118 		      sc = &OMP_CLAUSE_CHAIN (*osc);
7119 		      if (*sc != c
7120 			  && (OMP_CLAUSE_MAP_KIND (*sc)
7121 			      == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7122 			sc = &OMP_CLAUSE_CHAIN (*sc);
7123 		      for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
7124 			if (ptr && sc == prev_list_p)
7125 			  break;
7126 			else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7127 				 != COMPONENT_REF
7128 				 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7129 				     != INDIRECT_REF)
7130 				 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7131 				     != ARRAY_REF))
7132 			  break;
7133 			else
7134 			  {
7135 			    tree offset2;
7136 			    HOST_WIDE_INT bitsize2, bitpos2;
7137 			    base = OMP_CLAUSE_DECL (*sc);
7138 			    if (TREE_CODE (base) == ARRAY_REF)
7139 			      {
7140 				while (TREE_CODE (base) == ARRAY_REF)
7141 				  base = TREE_OPERAND (base, 0);
7142 				if (TREE_CODE (base) != COMPONENT_REF
7143 				    || (TREE_CODE (TREE_TYPE (base))
7144 					!= ARRAY_TYPE))
7145 				  break;
7146 			      }
7147 			    else if (TREE_CODE (base) == INDIRECT_REF
7148 				     && (TREE_CODE (TREE_OPERAND (base, 0))
7149 					 == COMPONENT_REF)
7150 				     && (TREE_CODE (TREE_TYPE
7151 						     (TREE_OPERAND (base, 0)))
7152 					 == REFERENCE_TYPE))
7153 			      base = TREE_OPERAND (base, 0);
7154 			    base = get_inner_reference (base, &bitsize2,
7155 							&bitpos2, &offset2,
7156 							&mode, &unsignedp,
7157 							&reversep, &volatilep,
7158 							false);
7159 			    if ((TREE_CODE (base) == INDIRECT_REF
7160 				 || (TREE_CODE (base) == MEM_REF
7161 				     && integer_zerop (TREE_OPERAND (base,
7162 								     1))))
7163 				&& DECL_P (TREE_OPERAND (base, 0))
7164 				&& (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
7165 									0)))
7166 				    == REFERENCE_TYPE))
7167 			      base = TREE_OPERAND (base, 0);
7168 			    if (base != decl)
7169 			      break;
7170 			    if (scp)
7171 			      continue;
7172 			    gcc_assert (offset == NULL_TREE
7173 					|| TREE_CODE (offset) == INTEGER_CST);
7174 			    tree d1 = OMP_CLAUSE_DECL (*sc);
7175 			    tree d2 = OMP_CLAUSE_DECL (c);
7176 			    while (TREE_CODE (d1) == ARRAY_REF)
7177 			      d1 = TREE_OPERAND (d1, 0);
7178 			    while (TREE_CODE (d2) == ARRAY_REF)
7179 			      d2 = TREE_OPERAND (d2, 0);
7180 			    if (TREE_CODE (d1) == INDIRECT_REF)
7181 			      d1 = TREE_OPERAND (d1, 0);
7182 			    if (TREE_CODE (d2) == INDIRECT_REF)
7183 			      d2 = TREE_OPERAND (d2, 0);
7184 			    while (TREE_CODE (d1) == COMPONENT_REF)
7185 			      if (TREE_CODE (d2) == COMPONENT_REF
7186 				  && TREE_OPERAND (d1, 1)
7187 				     == TREE_OPERAND (d2, 1))
7188 				{
7189 				  d1 = TREE_OPERAND (d1, 0);
7190 				  d2 = TREE_OPERAND (d2, 0);
7191 				}
7192 			      else
7193 				break;
7194 			    if (d1 == d2)
7195 			      {
7196 				error_at (OMP_CLAUSE_LOCATION (c),
7197 					  "%qE appears more than once in map "
7198 					  "clauses", OMP_CLAUSE_DECL (c));
7199 				remove = true;
7200 				break;
7201 			      }
7202 			    if (offset2)
7203 			      o2 = wi::to_offset (offset2);
7204 			    else
7205 			      o2 = 0;
7206 			    if (bitpos2)
7207 			      o2 = o2 + bitpos2 / BITS_PER_UNIT;
7208 			    if (wi::ltu_p (o1, o2)
7209 				|| (wi::eq_p (o1, o2) && bitpos < bitpos2))
7210 			      {
7211 				if (ptr)
7212 				  scp = sc;
7213 				else
7214 				  break;
7215 			      }
7216 			  }
7217 		      if (remove)
7218 			break;
7219 		      OMP_CLAUSE_SIZE (*osc)
7220 			= size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
7221 				      size_one_node);
7222 		      if (ptr)
7223 			{
7224 			  tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7225 						      OMP_CLAUSE_MAP);
7226 			  tree cl = NULL_TREE;
7227 			  enum gomp_map_kind mkind
7228 			    = code == OMP_TARGET_EXIT_DATA
7229 			      ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7230 			  OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7231 			  OMP_CLAUSE_DECL (c2)
7232 			    = unshare_expr (OMP_CLAUSE_DECL (c));
7233 			  OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
7234 			  OMP_CLAUSE_SIZE (c2)
7235 			    = TYPE_SIZE_UNIT (ptr_type_node);
7236 			  cl = scp ? *prev_list_p : c2;
7237 			  if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7238 			    {
7239 			      tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7240 			      tree c3
7241 				= build_omp_clause (OMP_CLAUSE_LOCATION (c),
7242 						    OMP_CLAUSE_MAP);
7243 			      OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7244 			      OMP_CLAUSE_DECL (c3)
7245 				= unshare_expr (OMP_CLAUSE_DECL (c4));
7246 			      OMP_CLAUSE_SIZE (c3)
7247 				= TYPE_SIZE_UNIT (ptr_type_node);
7248 			      OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7249 			      if (!scp)
7250 				OMP_CLAUSE_CHAIN (c2) = c3;
7251 			      else
7252 				cl = c3;
7253 			    }
7254 			  if (scp)
7255 			    *scp = c2;
7256 			  if (sc == prev_list_p)
7257 			    {
7258 			      *sc = cl;
7259 			      prev_list_p = NULL;
7260 			    }
7261 			  else
7262 			    {
7263 			      *prev_list_p = OMP_CLAUSE_CHAIN (c);
7264 			      list_p = prev_list_p;
7265 			      prev_list_p = NULL;
7266 			      OMP_CLAUSE_CHAIN (c) = *sc;
7267 			      *sc = cl;
7268 			      continue;
7269 			    }
7270 			}
7271 		      else if (*sc != c)
7272 			{
7273 			  *list_p = OMP_CLAUSE_CHAIN (c);
7274 			  OMP_CLAUSE_CHAIN (c) = *sc;
7275 			  *sc = c;
7276 			  continue;
7277 			}
7278 		    }
7279 		}
7280 	      if (!remove
7281 		  && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
7282 		  && OMP_CLAUSE_CHAIN (c)
7283 		  && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
7284 		  && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
7285 		      == GOMP_MAP_ALWAYS_POINTER))
7286 		prev_list_p = list_p;
7287 	      break;
7288 	    }
7289 	  flags = GOVD_MAP | GOVD_EXPLICIT;
7290 	  if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
7291 	      || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
7292 	    flags |= GOVD_MAP_ALWAYS_TO;
7293 	  goto do_add;
7294 
7295 	case OMP_CLAUSE_DEPEND:
7296 	  if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
7297 	      || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
7298 	    {
7299 	      /* Nothing to do.  OMP_CLAUSE_DECL will be lowered in
7300 		 omp-low.c.  */
7301 	      break;
7302 	    }
7303 	  if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
7304 	    {
7305 	      gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
7306 			     NULL, is_gimple_val, fb_rvalue);
7307 	      OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
7308 	    }
7309 	  if (error_operand_p (OMP_CLAUSE_DECL (c)))
7310 	    {
7311 	      remove = true;
7312 	      break;
7313 	    }
7314 	  OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
7315 	  if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
7316 			     is_gimple_val, fb_rvalue) == GS_ERROR)
7317 	    {
7318 	      remove = true;
7319 	      break;
7320 	    }
7321 	  break;
7322 
7323 	case OMP_CLAUSE_TO:
7324 	case OMP_CLAUSE_FROM:
7325 	case OMP_CLAUSE__CACHE_:
7326 	  decl = OMP_CLAUSE_DECL (c);
7327 	  if (error_operand_p (decl))
7328 	    {
7329 	      remove = true;
7330 	      break;
7331 	    }
7332 	  if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7333 	    OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7334 				  : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7335 	  if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7336 			     NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7337 	    {
7338 	      remove = true;
7339 	      break;
7340 	    }
7341 	  if (!DECL_P (decl))
7342 	    {
7343 	      if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
7344 				 NULL, is_gimple_lvalue, fb_lvalue)
7345 		  == GS_ERROR)
7346 		{
7347 		  remove = true;
7348 		  break;
7349 		}
7350 	      break;
7351 	    }
7352 	  goto do_notice;
7353 
7354 	case OMP_CLAUSE_USE_DEVICE_PTR:
7355 	  flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7356 	  goto do_add;
7357 	case OMP_CLAUSE_IS_DEVICE_PTR:
7358 	  flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7359 	  goto do_add;
7360 
7361 	do_add:
7362 	  decl = OMP_CLAUSE_DECL (c);
7363 	do_add_decl:
7364 	  if (error_operand_p (decl))
7365 	    {
7366 	      remove = true;
7367 	      break;
7368 	    }
7369 	  if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
7370 	    {
7371 	      tree t = omp_member_access_dummy_var (decl);
7372 	      if (t)
7373 		{
7374 		  tree v = DECL_VALUE_EXPR (decl);
7375 		  DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
7376 		  if (outer_ctx)
7377 		    omp_notice_variable (outer_ctx, t, true);
7378 		}
7379 	    }
7380 	  omp_add_variable (ctx, decl, flags);
7381 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7382 	      && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7383 	    {
7384 	      omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
7385 				GOVD_LOCAL | GOVD_SEEN);
7386 	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
7387 		  && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
7388 				find_decl_expr,
7389 				OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
7390 				NULL) == NULL_TREE)
7391 		omp_add_variable (ctx,
7392 				  OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
7393 				  GOVD_LOCAL | GOVD_SEEN);
7394 	      gimplify_omp_ctxp = ctx;
7395 	      push_gimplify_context ();
7396 
7397 	      OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
7398 	      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7399 
7400 	      gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
7401 		  		&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
7402 	      pop_gimplify_context
7403 		(gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
7404 	      push_gimplify_context ();
7405 	      gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
7406 		  		&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7407 	      pop_gimplify_context
7408 		(gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
7409 	      OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
7410 	      OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
7411 
7412 	      gimplify_omp_ctxp = outer_ctx;
7413 	    }
7414 	  else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7415 		   && OMP_CLAUSE_LASTPRIVATE_STMT (c))
7416 	    {
7417 	      gimplify_omp_ctxp = ctx;
7418 	      push_gimplify_context ();
7419 	      if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
7420 		{
7421 		  tree bind = build3 (BIND_EXPR, void_type_node, NULL,
7422 				      NULL, NULL);
7423 		  TREE_SIDE_EFFECTS (bind) = 1;
7424 		  BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
7425 		  OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
7426 		}
7427 	      gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
7428 				&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7429 	      pop_gimplify_context
7430 		(gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
7431 	      OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
7432 
7433 	      gimplify_omp_ctxp = outer_ctx;
7434 	    }
7435 	  else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7436 		   && OMP_CLAUSE_LINEAR_STMT (c))
7437 	    {
7438 	      gimplify_omp_ctxp = ctx;
7439 	      push_gimplify_context ();
7440 	      if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
7441 		{
7442 		  tree bind = build3 (BIND_EXPR, void_type_node, NULL,
7443 				      NULL, NULL);
7444 		  TREE_SIDE_EFFECTS (bind) = 1;
7445 		  BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
7446 		  OMP_CLAUSE_LINEAR_STMT (c) = bind;
7447 		}
7448 	      gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
7449 				&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7450 	      pop_gimplify_context
7451 		(gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
7452 	      OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
7453 
7454 	      gimplify_omp_ctxp = outer_ctx;
7455 	    }
7456 	  if (notice_outer)
7457 	    goto do_notice;
7458 	  break;
7459 
7460 	case OMP_CLAUSE_COPYIN:
7461 	case OMP_CLAUSE_COPYPRIVATE:
7462 	  decl = OMP_CLAUSE_DECL (c);
7463 	  if (error_operand_p (decl))
7464 	    {
7465 	      remove = true;
7466 	      break;
7467 	    }
7468 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
7469 	      && !remove
7470 	      && !omp_check_private (ctx, decl, true))
7471 	    {
7472 	      remove = true;
7473 	      if (is_global_var (decl))
7474 		{
7475 		  if (DECL_THREAD_LOCAL_P (decl))
7476 		    remove = false;
7477 		  else if (DECL_HAS_VALUE_EXPR_P (decl))
7478 		    {
7479 		      tree value = get_base_address (DECL_VALUE_EXPR (decl));
7480 
7481 		      if (value
7482 			  && DECL_P (value)
7483 			  && DECL_THREAD_LOCAL_P (value))
7484 			remove = false;
7485 		    }
7486 		}
7487 	      if (remove)
7488 		error_at (OMP_CLAUSE_LOCATION (c),
7489 			  "copyprivate variable %qE is not threadprivate"
7490 			  " or private in outer context", DECL_NAME (decl));
7491 	    }
7492 	do_notice:
7493 	  if (outer_ctx)
7494 	    omp_notice_variable (outer_ctx, decl, true);
7495 	  if (check_non_private
7496 	      && region_type == ORT_WORKSHARE
7497 	      && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7498 		  || decl == OMP_CLAUSE_DECL (c)
7499 		  || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
7500 		      && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
7501 			  == ADDR_EXPR
7502 			  || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
7503 			      == POINTER_PLUS_EXPR
7504 			      && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
7505 						(OMP_CLAUSE_DECL (c), 0), 0))
7506 				  == ADDR_EXPR)))))
7507 	      && omp_check_private (ctx, decl, false))
7508 	    {
7509 	      error ("%s variable %qE is private in outer context",
7510 		     check_non_private, DECL_NAME (decl));
7511 	      remove = true;
7512 	    }
7513 	  break;
7514 
7515 	case OMP_CLAUSE_IF:
7516 	  if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
7517 	      && OMP_CLAUSE_IF_MODIFIER (c) != code)
7518 	    {
7519 	      const char *p[2];
7520 	      for (int i = 0; i < 2; i++)
7521 		switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
7522 		  {
7523 		  case OMP_PARALLEL: p[i] = "parallel"; break;
7524 		  case OMP_TASK: p[i] = "task"; break;
7525 		  case OMP_TASKLOOP: p[i] = "taskloop"; break;
7526 		  case OMP_TARGET_DATA: p[i] = "target data"; break;
7527 		  case OMP_TARGET: p[i] = "target"; break;
7528 		  case OMP_TARGET_UPDATE: p[i] = "target update"; break;
7529 		  case OMP_TARGET_ENTER_DATA:
7530 		    p[i] = "target enter data"; break;
7531 		  case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
7532 		  default: gcc_unreachable ();
7533 		  }
7534 	      error_at (OMP_CLAUSE_LOCATION (c),
7535 			"expected %qs %<if%> clause modifier rather than %qs",
7536 			p[0], p[1]);
7537 	      remove = true;
7538 	    }
7539 	  /* Fall through.  */
7540 
7541 	case OMP_CLAUSE_FINAL:
7542 	  OMP_CLAUSE_OPERAND (c, 0)
7543 	    = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
7544 	  /* Fall through.  */
7545 
7546 	case OMP_CLAUSE_SCHEDULE:
7547 	case OMP_CLAUSE_NUM_THREADS:
7548 	case OMP_CLAUSE_NUM_TEAMS:
7549 	case OMP_CLAUSE_THREAD_LIMIT:
7550 	case OMP_CLAUSE_DIST_SCHEDULE:
7551 	case OMP_CLAUSE_DEVICE:
7552 	case OMP_CLAUSE_PRIORITY:
7553 	case OMP_CLAUSE_GRAINSIZE:
7554 	case OMP_CLAUSE_NUM_TASKS:
7555 	case OMP_CLAUSE_HINT:
7556 	case OMP_CLAUSE__CILK_FOR_COUNT_:
7557 	case OMP_CLAUSE_ASYNC:
7558 	case OMP_CLAUSE_WAIT:
7559 	case OMP_CLAUSE_NUM_GANGS:
7560 	case OMP_CLAUSE_NUM_WORKERS:
7561 	case OMP_CLAUSE_VECTOR_LENGTH:
7562 	case OMP_CLAUSE_WORKER:
7563 	case OMP_CLAUSE_VECTOR:
7564 	  if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
7565 			     is_gimple_val, fb_rvalue) == GS_ERROR)
7566 	    remove = true;
7567 	  break;
7568 
7569 	case OMP_CLAUSE_GANG:
7570 	  if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
7571 			     is_gimple_val, fb_rvalue) == GS_ERROR)
7572 	    remove = true;
7573 	  if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
7574 			     is_gimple_val, fb_rvalue) == GS_ERROR)
7575 	    remove = true;
7576 	  break;
7577 
7578 	case OMP_CLAUSE_TILE:
7579 	  for (tree list = OMP_CLAUSE_TILE_LIST (c); !remove && list;
7580 	       list = TREE_CHAIN (list))
7581 	    {
7582 	      if (gimplify_expr (&TREE_VALUE (list), pre_p, NULL,
7583 				 is_gimple_val, fb_rvalue) == GS_ERROR)
7584 		remove = true;
7585 	    }
7586 	  break;
7587 
7588 	case OMP_CLAUSE_DEVICE_RESIDENT:
7589 	  remove = true;
7590 	  break;
7591 
7592 	case OMP_CLAUSE_NOWAIT:
7593 	case OMP_CLAUSE_ORDERED:
7594 	case OMP_CLAUSE_UNTIED:
7595 	case OMP_CLAUSE_COLLAPSE:
7596 	case OMP_CLAUSE_AUTO:
7597 	case OMP_CLAUSE_SEQ:
7598 	case OMP_CLAUSE_INDEPENDENT:
7599 	case OMP_CLAUSE_MERGEABLE:
7600 	case OMP_CLAUSE_PROC_BIND:
7601 	case OMP_CLAUSE_SAFELEN:
7602 	case OMP_CLAUSE_SIMDLEN:
7603 	case OMP_CLAUSE_NOGROUP:
7604 	case OMP_CLAUSE_THREADS:
7605 	case OMP_CLAUSE_SIMD:
7606 	  break;
7607 
7608 	case OMP_CLAUSE_DEFAULTMAP:
7609 	  ctx->target_map_scalars_firstprivate = false;
7610 	  break;
7611 
7612 	case OMP_CLAUSE_ALIGNED:
7613 	  decl = OMP_CLAUSE_DECL (c);
7614 	  if (error_operand_p (decl))
7615 	    {
7616 	      remove = true;
7617 	      break;
7618 	    }
7619 	  if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
7620 			     is_gimple_val, fb_rvalue) == GS_ERROR)
7621 	    {
7622 	      remove = true;
7623 	      break;
7624 	    }
7625 	  if (!is_global_var (decl)
7626 	      && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
7627 	    omp_add_variable (ctx, decl, GOVD_ALIGNED);
7628 	  break;
7629 
7630 	case OMP_CLAUSE_DEFAULT:
7631 	  ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
7632 	  break;
7633 
7634 	default:
7635 	  gcc_unreachable ();
7636 	}
7637 
7638       if (remove)
7639 	*list_p = OMP_CLAUSE_CHAIN (c);
7640       else
7641 	list_p = &OMP_CLAUSE_CHAIN (c);
7642     }
7643 
7644   gimplify_omp_ctxp = ctx;
7645   if (struct_map_to_clause)
7646     delete struct_map_to_clause;
7647 }
7648 
7649 /* Return true if DECL is a candidate for shared to firstprivate
7650    optimization.  We only consider non-addressable scalars, not
7651    too big, and not references.  */
7652 
7653 static bool
7654 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
7655 {
7656   if (TREE_ADDRESSABLE (decl))
7657     return false;
7658   tree type = TREE_TYPE (decl);
7659   if (!is_gimple_reg_type (type)
7660       || TREE_CODE (type) == REFERENCE_TYPE
7661       || TREE_ADDRESSABLE (type))
7662     return false;
7663   /* Don't optimize too large decls, as each thread/task will have
7664      its own.  */
7665   HOST_WIDE_INT len = int_size_in_bytes (type);
7666   if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
7667     return false;
7668   if (lang_hooks.decls.omp_privatize_by_reference (decl))
7669     return false;
7670   return true;
7671 }
7672 
7673 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
7674    For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
7675    GOVD_WRITTEN in outer contexts.  */
7676 
7677 static void
7678 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
7679 {
7680   for (; ctx; ctx = ctx->outer_context)
7681     {
7682       splay_tree_node n = splay_tree_lookup (ctx->variables,
7683 					     (splay_tree_key) decl);
7684       if (n == NULL)
7685 	continue;
7686       else if (n->value & GOVD_SHARED)
7687 	{
7688 	  n->value |= GOVD_WRITTEN;
7689 	  return;
7690 	}
7691       else if (n->value & GOVD_DATA_SHARE_CLASS)
7692 	return;
7693     }
7694 }
7695 
7696 /* Helper callback for walk_gimple_seq to discover possible stores
7697    to omp_shared_to_firstprivate_optimizable_decl_p decls and set
7698    GOVD_WRITTEN if they are GOVD_SHARED in some outer context
7699    for those.  */
7700 
7701 static tree
7702 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
7703 {
7704   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
7705 
7706   *walk_subtrees = 0;
7707   if (!wi->is_lhs)
7708     return NULL_TREE;
7709 
7710   tree op = *tp;
7711   do
7712     {
7713       if (handled_component_p (op))
7714 	op = TREE_OPERAND (op, 0);
7715       else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
7716 	       && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
7717 	op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
7718       else
7719 	break;
7720     }
7721   while (1);
7722   if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
7723     return NULL_TREE;
7724 
7725   omp_mark_stores (gimplify_omp_ctxp, op);
7726   return NULL_TREE;
7727 }
7728 
7729 /* Helper callback for walk_gimple_seq to discover possible stores
7730    to omp_shared_to_firstprivate_optimizable_decl_p decls and set
7731    GOVD_WRITTEN if they are GOVD_SHARED in some outer context
7732    for those.  */
7733 
7734 static tree
7735 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
7736 		      bool *handled_ops_p,
7737 		      struct walk_stmt_info *wi)
7738 {
7739   gimple *stmt = gsi_stmt (*gsi_p);
7740   switch (gimple_code (stmt))
7741     {
7742     /* Don't recurse on OpenMP constructs for which
7743        gimplify_adjust_omp_clauses already handled the bodies,
7744        except handle gimple_omp_for_pre_body.  */
7745     case GIMPLE_OMP_FOR:
7746       *handled_ops_p = true;
7747       if (gimple_omp_for_pre_body (stmt))
7748 	walk_gimple_seq (gimple_omp_for_pre_body (stmt),
7749 			 omp_find_stores_stmt, omp_find_stores_op, wi);
7750       break;
7751     case GIMPLE_OMP_PARALLEL:
7752     case GIMPLE_OMP_TASK:
7753     case GIMPLE_OMP_SECTIONS:
7754     case GIMPLE_OMP_SINGLE:
7755     case GIMPLE_OMP_TARGET:
7756     case GIMPLE_OMP_TEAMS:
7757     case GIMPLE_OMP_CRITICAL:
7758       *handled_ops_p = true;
7759       break;
7760     default:
7761       break;
7762     }
7763   return NULL_TREE;
7764 }
7765 
7766 struct gimplify_adjust_omp_clauses_data
7767 {
7768   tree *list_p;
7769   gimple_seq *pre_p;
7770 };
7771 
7772 /* For all variables that were not actually used within the context,
7773    remove PRIVATE, SHARED, and FIRSTPRIVATE clauses.  */
7774 
7775 static int
7776 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
7777 {
7778   tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
7779   gimple_seq *pre_p
7780     = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
7781   tree decl = (tree) n->key;
7782   unsigned flags = n->value;
7783   enum omp_clause_code code;
7784   tree clause;
7785   bool private_debug;
7786 
7787   if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
7788     return 0;
7789   if ((flags & GOVD_SEEN) == 0)
7790     return 0;
7791   if (flags & GOVD_DEBUG_PRIVATE)
7792     {
7793       gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
7794       private_debug = true;
7795     }
7796   else if (flags & GOVD_MAP)
7797     private_debug = false;
7798   else
7799     private_debug
7800       = lang_hooks.decls.omp_private_debug_clause (decl,
7801 						   !!(flags & GOVD_SHARED));
7802   if (private_debug)
7803     code = OMP_CLAUSE_PRIVATE;
7804   else if (flags & GOVD_MAP)
7805     code = OMP_CLAUSE_MAP;
7806   else if (flags & GOVD_SHARED)
7807     {
7808       if (is_global_var (decl))
7809 	{
7810 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
7811 	  while (ctx != NULL)
7812 	    {
7813 	      splay_tree_node on
7814 		= splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7815 	      if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7816 				      | GOVD_PRIVATE | GOVD_REDUCTION
7817 				      | GOVD_LINEAR | GOVD_MAP)) != 0)
7818 		break;
7819 	      ctx = ctx->outer_context;
7820 	    }
7821 	  if (ctx == NULL)
7822 	    return 0;
7823 	}
7824       code = OMP_CLAUSE_SHARED;
7825     }
7826   else if (flags & GOVD_PRIVATE)
7827     code = OMP_CLAUSE_PRIVATE;
7828   else if (flags & GOVD_FIRSTPRIVATE)
7829     code = OMP_CLAUSE_FIRSTPRIVATE;
7830   else if (flags & GOVD_LASTPRIVATE)
7831     code = OMP_CLAUSE_LASTPRIVATE;
7832   else if (flags & GOVD_ALIGNED)
7833     return 0;
7834   else
7835     gcc_unreachable ();
7836 
7837   if (((flags & GOVD_LASTPRIVATE)
7838        || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
7839       && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7840     omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
7841 
7842   tree chain = *list_p;
7843   clause = build_omp_clause (input_location, code);
7844   OMP_CLAUSE_DECL (clause) = decl;
7845   OMP_CLAUSE_CHAIN (clause) = chain;
7846   if (private_debug)
7847     OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
7848   else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
7849     OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
7850   else if (code == OMP_CLAUSE_SHARED
7851 	   && (flags & GOVD_WRITTEN) == 0
7852 	   && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7853     OMP_CLAUSE_SHARED_READONLY (clause) = 1;
7854   else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
7855     OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
7856   else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
7857     {
7858       tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
7859       OMP_CLAUSE_DECL (nc) = decl;
7860       if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7861 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
7862 	OMP_CLAUSE_DECL (clause)
7863 	  = build_simple_mem_ref_loc (input_location, decl);
7864       OMP_CLAUSE_DECL (clause)
7865 	= build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
7866 		  build_int_cst (build_pointer_type (char_type_node), 0));
7867       OMP_CLAUSE_SIZE (clause) = size_zero_node;
7868       OMP_CLAUSE_SIZE (nc) = size_zero_node;
7869       OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
7870       OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
7871       OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
7872       OMP_CLAUSE_CHAIN (nc) = chain;
7873       OMP_CLAUSE_CHAIN (clause) = nc;
7874       struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7875       gimplify_omp_ctxp = ctx->outer_context;
7876       gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
7877 		     pre_p, NULL, is_gimple_val, fb_rvalue);
7878       gimplify_omp_ctxp = ctx;
7879     }
7880   else if (code == OMP_CLAUSE_MAP)
7881     {
7882       int kind = (flags & GOVD_MAP_TO_ONLY
7883 		  ? GOMP_MAP_TO
7884 		  : GOMP_MAP_TOFROM);
7885       if (flags & GOVD_MAP_FORCE)
7886 	kind |= GOMP_MAP_FLAG_FORCE;
7887       OMP_CLAUSE_SET_MAP_KIND (clause, kind);
7888       if (DECL_SIZE (decl)
7889 	  && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7890 	{
7891 	  tree decl2 = DECL_VALUE_EXPR (decl);
7892 	  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
7893 	  decl2 = TREE_OPERAND (decl2, 0);
7894 	  gcc_assert (DECL_P (decl2));
7895 	  tree mem = build_simple_mem_ref (decl2);
7896 	  OMP_CLAUSE_DECL (clause) = mem;
7897 	  OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
7898 	  if (gimplify_omp_ctxp->outer_context)
7899 	    {
7900 	      struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
7901 	      omp_notice_variable (ctx, decl2, true);
7902 	      omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
7903 	    }
7904 	  tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
7905 				      OMP_CLAUSE_MAP);
7906 	  OMP_CLAUSE_DECL (nc) = decl;
7907 	  OMP_CLAUSE_SIZE (nc) = size_zero_node;
7908 	  if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
7909 	    OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
7910 	  else
7911 	    OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
7912 	  OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
7913 	  OMP_CLAUSE_CHAIN (clause) = nc;
7914 	}
7915       else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7916 	       && lang_hooks.decls.omp_privatize_by_reference (decl))
7917 	{
7918 	  OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
7919 	  OMP_CLAUSE_SIZE (clause)
7920 	    = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
7921 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7922 	  gimplify_omp_ctxp = ctx->outer_context;
7923 	  gimplify_expr (&OMP_CLAUSE_SIZE (clause),
7924 			 pre_p, NULL, is_gimple_val, fb_rvalue);
7925 	  gimplify_omp_ctxp = ctx;
7926 	  tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
7927 				      OMP_CLAUSE_MAP);
7928 	  OMP_CLAUSE_DECL (nc) = decl;
7929 	  OMP_CLAUSE_SIZE (nc) = size_zero_node;
7930 	  OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
7931 	  OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
7932 	  OMP_CLAUSE_CHAIN (clause) = nc;
7933 	}
7934       else
7935 	OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
7936     }
7937   if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
7938     {
7939       tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
7940       OMP_CLAUSE_DECL (nc) = decl;
7941       OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
7942       OMP_CLAUSE_CHAIN (nc) = chain;
7943       OMP_CLAUSE_CHAIN (clause) = nc;
7944       struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7945       gimplify_omp_ctxp = ctx->outer_context;
7946       lang_hooks.decls.omp_finish_clause (nc, pre_p);
7947       gimplify_omp_ctxp = ctx;
7948     }
7949   *list_p = clause;
7950   struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7951   gimplify_omp_ctxp = ctx->outer_context;
7952   lang_hooks.decls.omp_finish_clause (clause, pre_p);
7953   if (gimplify_omp_ctxp)
7954     for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
7955       if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
7956 	  && DECL_P (OMP_CLAUSE_SIZE (clause)))
7957 	omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
7958 			     true);
7959   gimplify_omp_ctxp = ctx;
7960   return 0;
7961 }
7962 
7963 static void
7964 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
7965 			     enum tree_code code)
7966 {
7967   struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7968   tree c, decl;
7969 
7970   if (body)
7971     {
7972       struct gimplify_omp_ctx *octx;
7973       for (octx = ctx; octx; octx = octx->outer_context)
7974 	if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
7975 	  break;
7976       if (octx)
7977 	{
7978 	  struct walk_stmt_info wi;
7979 	  memset (&wi, 0, sizeof (wi));
7980 	  walk_gimple_seq (body, omp_find_stores_stmt,
7981 			   omp_find_stores_op, &wi);
7982 	}
7983     }
7984   while ((c = *list_p) != NULL)
7985     {
7986       splay_tree_node n;
7987       bool remove = false;
7988 
7989       switch (OMP_CLAUSE_CODE (c))
7990 	{
7991 	case OMP_CLAUSE_PRIVATE:
7992 	case OMP_CLAUSE_SHARED:
7993 	case OMP_CLAUSE_FIRSTPRIVATE:
7994 	case OMP_CLAUSE_LINEAR:
7995 	  decl = OMP_CLAUSE_DECL (c);
7996 	  n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7997 	  remove = !(n->value & GOVD_SEEN);
7998 	  if (! remove)
7999 	    {
8000 	      bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
8001 	      if ((n->value & GOVD_DEBUG_PRIVATE)
8002 		  || lang_hooks.decls.omp_private_debug_clause (decl, shared))
8003 		{
8004 		  gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
8005 			      || ((n->value & GOVD_DATA_SHARE_CLASS)
8006 				  == GOVD_SHARED));
8007 		  OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
8008 		  OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
8009 		}
8010 	      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8011 		  && (n->value & GOVD_WRITTEN) == 0
8012 		  && DECL_P (decl)
8013 		  && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8014 		OMP_CLAUSE_SHARED_READONLY (c) = 1;
8015 	      else if (DECL_P (decl)
8016 		       && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8017 			    && (n->value & GOVD_WRITTEN) != 1)
8018 			   || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8019 			       && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8020 		       && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8021 		omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8022 	    }
8023 	  break;
8024 
8025 	case OMP_CLAUSE_LASTPRIVATE:
8026 	  /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
8027 	     accurately reflect the presence of a FIRSTPRIVATE clause.  */
8028 	  decl = OMP_CLAUSE_DECL (c);
8029 	  n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8030 	  OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
8031 	    = (n->value & GOVD_FIRSTPRIVATE) != 0;
8032 	  if (omp_no_lastprivate (ctx))
8033 	    {
8034 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8035 		remove = true;
8036 	      else
8037 		OMP_CLAUSE_CODE (c) = OMP_CLAUSE_PRIVATE;
8038 	    }
8039 	  else if (code == OMP_DISTRIBUTE
8040 		   && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8041 	    {
8042 	      remove = true;
8043 	      error_at (OMP_CLAUSE_LOCATION (c),
8044 			"same variable used in %<firstprivate%> and "
8045 			"%<lastprivate%> clauses on %<distribute%> "
8046 			"construct");
8047 	    }
8048 	  if (!remove
8049 	      && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8050 	      && DECL_P (decl)
8051 	      && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8052 	    omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8053 	  break;
8054 
8055 	case OMP_CLAUSE_ALIGNED:
8056 	  decl = OMP_CLAUSE_DECL (c);
8057 	  if (!is_global_var (decl))
8058 	    {
8059 	      n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8060 	      remove = n == NULL || !(n->value & GOVD_SEEN);
8061 	      if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8062 		{
8063 		  struct gimplify_omp_ctx *octx;
8064 		  if (n != NULL
8065 		      && (n->value & (GOVD_DATA_SHARE_CLASS
8066 				      & ~GOVD_FIRSTPRIVATE)))
8067 		    remove = true;
8068 		  else
8069 		    for (octx = ctx->outer_context; octx;
8070 			 octx = octx->outer_context)
8071 		      {
8072 			n = splay_tree_lookup (octx->variables,
8073 					       (splay_tree_key) decl);
8074 			if (n == NULL)
8075 			  continue;
8076 			if (n->value & GOVD_LOCAL)
8077 			  break;
8078 			/* We have to avoid assigning a shared variable
8079 			   to itself when trying to add
8080 			   __builtin_assume_aligned.  */
8081 			if (n->value & GOVD_SHARED)
8082 			  {
8083 			    remove = true;
8084 			    break;
8085 			  }
8086 		      }
8087 		}
8088 	    }
8089 	  else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
8090 	    {
8091 	      n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8092 	      if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8093 		remove = true;
8094 	    }
8095 	  break;
8096 
8097 	case OMP_CLAUSE_MAP:
8098 	  if (code == OMP_TARGET_EXIT_DATA
8099 	      && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
8100 	    {
8101 	      remove = true;
8102 	      break;
8103 	    }
8104 	  decl = OMP_CLAUSE_DECL (c);
8105 	  /* Data clasues associated with acc parallel reductions must be
8106 	     compatible with present_or_copy.  Warn and adjust the clause
8107 	     if that is not the case.  */
8108 	  if (ctx->region_type == ORT_ACC_PARALLEL)
8109 	    {
8110 	      tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
8111 	      n = NULL;
8112 
8113 	      if (DECL_P (t))
8114 		n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8115 
8116 	      if (n && (n->value & GOVD_REDUCTION))
8117 		{
8118 		  enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
8119 
8120 		  OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
8121 		  if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
8122 		      && kind != GOMP_MAP_FORCE_PRESENT
8123 		      && kind != GOMP_MAP_POINTER)
8124 		    {
8125 		      warning_at (OMP_CLAUSE_LOCATION (c), 0,
8126 				  "incompatible data clause with reduction "
8127 				  "on %qE; promoting to present_or_copy",
8128 				  DECL_NAME (t));
8129 		      OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
8130 		    }
8131 		}
8132 	    }
8133 	  if (!DECL_P (decl))
8134 	    {
8135 	      if ((ctx->region_type & ORT_TARGET) != 0
8136 		  && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8137 		{
8138 		  if (TREE_CODE (decl) == INDIRECT_REF
8139 		      && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8140 		      && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8141 			  == REFERENCE_TYPE))
8142 		    decl = TREE_OPERAND (decl, 0);
8143 		  if (TREE_CODE (decl) == COMPONENT_REF)
8144 		    {
8145 		      while (TREE_CODE (decl) == COMPONENT_REF)
8146 			decl = TREE_OPERAND (decl, 0);
8147 		      if (DECL_P (decl))
8148 			{
8149 			  n = splay_tree_lookup (ctx->variables,
8150 						 (splay_tree_key) decl);
8151 			  if (!(n->value & GOVD_SEEN))
8152 			    remove = true;
8153 			}
8154 		    }
8155 		}
8156 	      break;
8157 	    }
8158 	  n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8159 	  if ((ctx->region_type & ORT_TARGET) != 0
8160 	      && !(n->value & GOVD_SEEN)
8161 	      && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
8162 	      && (!is_global_var (decl)
8163 		  || !lookup_attribute ("omp declare target link",
8164 					DECL_ATTRIBUTES (decl))))
8165 	    {
8166 	      remove = true;
8167 	      /* For struct element mapping, if struct is never referenced
8168 		 in target block and none of the mapping has always modifier,
8169 		 remove all the struct element mappings, which immediately
8170 		 follow the GOMP_MAP_STRUCT map clause.  */
8171 	      if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
8172 		{
8173 		  HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
8174 		  while (cnt--)
8175 		    OMP_CLAUSE_CHAIN (c)
8176 		      = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
8177 		}
8178 	    }
8179 	  else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
8180 		   && code == OMP_TARGET_EXIT_DATA)
8181 	    remove = true;
8182 	  else if (DECL_SIZE (decl)
8183 		   && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
8184 		   && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
8185 		   && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
8186 		   && (OMP_CLAUSE_MAP_KIND (c)
8187 		       != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8188 	    {
8189 	      /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
8190 		 for these, TREE_CODE (DECL_SIZE (decl)) will always be
8191 		 INTEGER_CST.  */
8192 	      gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
8193 
8194 	      tree decl2 = DECL_VALUE_EXPR (decl);
8195 	      gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8196 	      decl2 = TREE_OPERAND (decl2, 0);
8197 	      gcc_assert (DECL_P (decl2));
8198 	      tree mem = build_simple_mem_ref (decl2);
8199 	      OMP_CLAUSE_DECL (c) = mem;
8200 	      OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8201 	      if (ctx->outer_context)
8202 		{
8203 		  omp_notice_variable (ctx->outer_context, decl2, true);
8204 		  omp_notice_variable (ctx->outer_context,
8205 				       OMP_CLAUSE_SIZE (c), true);
8206 		}
8207 	      if (((ctx->region_type & ORT_TARGET) != 0
8208 		   || !ctx->target_firstprivatize_array_bases)
8209 		  && ((n->value & GOVD_SEEN) == 0
8210 		      || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
8211 		{
8212 		  tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8213 					      OMP_CLAUSE_MAP);
8214 		  OMP_CLAUSE_DECL (nc) = decl;
8215 		  OMP_CLAUSE_SIZE (nc) = size_zero_node;
8216 		  if (ctx->target_firstprivatize_array_bases)
8217 		    OMP_CLAUSE_SET_MAP_KIND (nc,
8218 					     GOMP_MAP_FIRSTPRIVATE_POINTER);
8219 		  else
8220 		    OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8221 		  OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
8222 		  OMP_CLAUSE_CHAIN (c) = nc;
8223 		  c = nc;
8224 		}
8225 	    }
8226 	  else
8227 	    {
8228 	      if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8229 		OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
8230 	      gcc_assert ((n->value & GOVD_SEEN) == 0
8231 			  || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
8232 			      == 0));
8233 	    }
8234 	  break;
8235 
8236 	case OMP_CLAUSE_TO:
8237 	case OMP_CLAUSE_FROM:
8238 	case OMP_CLAUSE__CACHE_:
8239 	  decl = OMP_CLAUSE_DECL (c);
8240 	  if (!DECL_P (decl))
8241 	    break;
8242 	  if (DECL_SIZE (decl)
8243 	      && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8244 	    {
8245 	      tree decl2 = DECL_VALUE_EXPR (decl);
8246 	      gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8247 	      decl2 = TREE_OPERAND (decl2, 0);
8248 	      gcc_assert (DECL_P (decl2));
8249 	      tree mem = build_simple_mem_ref (decl2);
8250 	      OMP_CLAUSE_DECL (c) = mem;
8251 	      OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8252 	      if (ctx->outer_context)
8253 		{
8254 		  omp_notice_variable (ctx->outer_context, decl2, true);
8255 		  omp_notice_variable (ctx->outer_context,
8256 				       OMP_CLAUSE_SIZE (c), true);
8257 		}
8258 	    }
8259 	  else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8260 	    OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
8261 	  break;
8262 
8263 	case OMP_CLAUSE_REDUCTION:
8264 	  decl = OMP_CLAUSE_DECL (c);
8265 	  /* OpenACC reductions need a present_or_copy data clause.
8266 	     Add one if necessary.  Error is the reduction is private.  */
8267 	  if (ctx->region_type == ORT_ACC_PARALLEL)
8268 	    {
8269 	      n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8270 	      if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
8271 		error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
8272 			  "reduction on %qE", DECL_NAME (decl));
8273 	      else if ((n->value & GOVD_MAP) == 0)
8274 		{
8275 		  tree next = OMP_CLAUSE_CHAIN (c);
8276 		  tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
8277 		  OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
8278 		  OMP_CLAUSE_DECL (nc) = decl;
8279 		  OMP_CLAUSE_CHAIN (c) = nc;
8280 		  lang_hooks.decls.omp_finish_clause (nc, pre_p);
8281 		  while (1)
8282 		    {
8283 		      OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
8284 		      if (OMP_CLAUSE_CHAIN (nc) == NULL)
8285 			break;
8286 		      nc = OMP_CLAUSE_CHAIN (nc);
8287 		    }
8288 		  OMP_CLAUSE_CHAIN (nc) = next;
8289 		  n->value |= GOVD_MAP;
8290 		}
8291 	    }
8292 	  if (DECL_P (decl)
8293 	      && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8294 	    omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8295 	  break;
8296 	case OMP_CLAUSE_COPYIN:
8297 	case OMP_CLAUSE_COPYPRIVATE:
8298 	case OMP_CLAUSE_IF:
8299 	case OMP_CLAUSE_NUM_THREADS:
8300 	case OMP_CLAUSE_NUM_TEAMS:
8301 	case OMP_CLAUSE_THREAD_LIMIT:
8302 	case OMP_CLAUSE_DIST_SCHEDULE:
8303 	case OMP_CLAUSE_DEVICE:
8304 	case OMP_CLAUSE_SCHEDULE:
8305 	case OMP_CLAUSE_NOWAIT:
8306 	case OMP_CLAUSE_ORDERED:
8307 	case OMP_CLAUSE_DEFAULT:
8308 	case OMP_CLAUSE_UNTIED:
8309 	case OMP_CLAUSE_COLLAPSE:
8310 	case OMP_CLAUSE_FINAL:
8311 	case OMP_CLAUSE_MERGEABLE:
8312 	case OMP_CLAUSE_PROC_BIND:
8313 	case OMP_CLAUSE_SAFELEN:
8314 	case OMP_CLAUSE_SIMDLEN:
8315 	case OMP_CLAUSE_DEPEND:
8316 	case OMP_CLAUSE_PRIORITY:
8317 	case OMP_CLAUSE_GRAINSIZE:
8318 	case OMP_CLAUSE_NUM_TASKS:
8319 	case OMP_CLAUSE_NOGROUP:
8320 	case OMP_CLAUSE_THREADS:
8321 	case OMP_CLAUSE_SIMD:
8322 	case OMP_CLAUSE_HINT:
8323 	case OMP_CLAUSE_DEFAULTMAP:
8324 	case OMP_CLAUSE_USE_DEVICE_PTR:
8325 	case OMP_CLAUSE_IS_DEVICE_PTR:
8326 	case OMP_CLAUSE__CILK_FOR_COUNT_:
8327 	case OMP_CLAUSE_ASYNC:
8328 	case OMP_CLAUSE_WAIT:
8329 	case OMP_CLAUSE_DEVICE_RESIDENT:
8330 	case OMP_CLAUSE_INDEPENDENT:
8331 	case OMP_CLAUSE_NUM_GANGS:
8332 	case OMP_CLAUSE_NUM_WORKERS:
8333 	case OMP_CLAUSE_VECTOR_LENGTH:
8334 	case OMP_CLAUSE_GANG:
8335 	case OMP_CLAUSE_WORKER:
8336 	case OMP_CLAUSE_VECTOR:
8337 	case OMP_CLAUSE_AUTO:
8338 	case OMP_CLAUSE_SEQ:
8339 	  break;
8340 
8341 	case OMP_CLAUSE_TILE:
8342 	  /* We're not yet making use of the information provided by OpenACC
8343 	     tile clauses.  Discard these here, to simplify later middle end
8344 	     processing.  */
8345 	  remove = true;
8346 	  break;
8347 
8348 	default:
8349 	  gcc_unreachable ();
8350 	}
8351 
8352       if (remove)
8353 	*list_p = OMP_CLAUSE_CHAIN (c);
8354       else
8355 	list_p = &OMP_CLAUSE_CHAIN (c);
8356     }
8357 
8358   /* Add in any implicit data sharing.  */
8359   struct gimplify_adjust_omp_clauses_data data;
8360   data.list_p = list_p;
8361   data.pre_p = pre_p;
8362   splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
8363 
8364   gimplify_omp_ctxp = ctx->outer_context;
8365   delete_omp_context (ctx);
8366 }
8367 
8368 /* Gimplify OACC_CACHE.  */
8369 
8370 static void
8371 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
8372 {
8373   tree expr = *expr_p;
8374 
8375   gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
8376 			     OACC_CACHE);
8377   gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
8378 			       OACC_CACHE);
8379 
8380   /* TODO: Do something sensible with this information.  */
8381 
8382   *expr_p = NULL_TREE;
8383 }
8384 
8385 /* Helper function of gimplify_oacc_declare.  The helper's purpose is to,
8386    if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
8387    kind.  The entry kind will replace the one in CLAUSE, while the exit
8388    kind will be used in a new omp_clause and returned to the caller.  */
8389 
8390 static tree
8391 gimplify_oacc_declare_1 (tree clause)
8392 {
8393   HOST_WIDE_INT kind, new_op;
8394   bool ret = false;
8395   tree c = NULL;
8396 
8397   kind = OMP_CLAUSE_MAP_KIND (clause);
8398 
8399   switch (kind)
8400     {
8401       case GOMP_MAP_ALLOC:
8402       case GOMP_MAP_FORCE_ALLOC:
8403       case GOMP_MAP_FORCE_TO:
8404 	new_op = GOMP_MAP_DELETE;
8405 	ret = true;
8406 	break;
8407 
8408       case GOMP_MAP_FORCE_FROM:
8409 	OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
8410 	new_op = GOMP_MAP_FORCE_FROM;
8411 	ret = true;
8412 	break;
8413 
8414       case GOMP_MAP_FORCE_TOFROM:
8415 	OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
8416 	new_op = GOMP_MAP_FORCE_FROM;
8417 	ret = true;
8418 	break;
8419 
8420       case GOMP_MAP_FROM:
8421 	OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
8422 	new_op = GOMP_MAP_FROM;
8423 	ret = true;
8424 	break;
8425 
8426       case GOMP_MAP_TOFROM:
8427 	OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
8428 	new_op = GOMP_MAP_FROM;
8429 	ret = true;
8430 	break;
8431 
8432       case GOMP_MAP_DEVICE_RESIDENT:
8433       case GOMP_MAP_FORCE_DEVICEPTR:
8434       case GOMP_MAP_FORCE_PRESENT:
8435       case GOMP_MAP_LINK:
8436       case GOMP_MAP_POINTER:
8437       case GOMP_MAP_TO:
8438 	break;
8439 
8440       default:
8441 	gcc_unreachable ();
8442 	break;
8443     }
8444 
8445   if (ret)
8446     {
8447       c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
8448       OMP_CLAUSE_SET_MAP_KIND (c, new_op);
8449       OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
8450     }
8451 
8452   return c;
8453 }
8454 
8455 /* Gimplify OACC_DECLARE.  */
8456 
8457 static void
8458 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
8459 {
8460   tree expr = *expr_p;
8461   gomp_target *stmt;
8462   tree clauses, t;
8463 
8464   clauses = OACC_DECLARE_CLAUSES (expr);
8465 
8466   gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
8467 
8468   for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
8469     {
8470       tree decl = OMP_CLAUSE_DECL (t);
8471 
8472       if (TREE_CODE (decl) == MEM_REF)
8473 	continue;
8474 
8475       if (TREE_CODE (decl) == VAR_DECL
8476 	  && !is_global_var (decl)
8477 	  && DECL_CONTEXT (decl) == current_function_decl)
8478 	{
8479 	  tree c = gimplify_oacc_declare_1 (t);
8480 	  if (c)
8481 	    {
8482 	      if (oacc_declare_returns == NULL)
8483 		oacc_declare_returns = new hash_map<tree, tree>;
8484 
8485 	      oacc_declare_returns->put (decl, c);
8486 	    }
8487 	}
8488 
8489       omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
8490     }
8491 
8492   stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
8493 				  clauses);
8494 
8495   gimplify_seq_add_stmt (pre_p, stmt);
8496 
8497   *expr_p = NULL_TREE;
8498 }
8499 
8500 /* Gimplify the contents of an OMP_PARALLEL statement.  This involves
8501    gimplification of the body, as well as scanning the body for used
8502    variables.  We need to do this scan now, because variable-sized
8503    decls will be decomposed during gimplification.  */
8504 
8505 static void
8506 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
8507 {
8508   tree expr = *expr_p;
8509   gimple *g;
8510   gimple_seq body = NULL;
8511 
8512   gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
8513 			     OMP_PARALLEL_COMBINED (expr)
8514 			     ? ORT_COMBINED_PARALLEL
8515 			     : ORT_PARALLEL, OMP_PARALLEL);
8516 
8517   push_gimplify_context ();
8518 
8519   g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
8520   if (gimple_code (g) == GIMPLE_BIND)
8521     pop_gimplify_context (g);
8522   else
8523     pop_gimplify_context (NULL);
8524 
8525   gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
8526 			       OMP_PARALLEL);
8527 
8528   g = gimple_build_omp_parallel (body,
8529 				 OMP_PARALLEL_CLAUSES (expr),
8530 				 NULL_TREE, NULL_TREE);
8531   if (OMP_PARALLEL_COMBINED (expr))
8532     gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
8533   gimplify_seq_add_stmt (pre_p, g);
8534   *expr_p = NULL_TREE;
8535 }
8536 
8537 /* Gimplify the contents of an OMP_TASK statement.  This involves
8538    gimplification of the body, as well as scanning the body for used
8539    variables.  We need to do this scan now, because variable-sized
8540    decls will be decomposed during gimplification.  */
8541 
8542 static void
8543 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
8544 {
8545   tree expr = *expr_p;
8546   gimple *g;
8547   gimple_seq body = NULL;
8548 
8549   gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
8550 			     find_omp_clause (OMP_TASK_CLAUSES (expr),
8551 					      OMP_CLAUSE_UNTIED)
8552 			     ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
8553 
8554   push_gimplify_context ();
8555 
8556   g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
8557   if (gimple_code (g) == GIMPLE_BIND)
8558     pop_gimplify_context (g);
8559   else
8560     pop_gimplify_context (NULL);
8561 
8562   gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
8563 			       OMP_TASK);
8564 
8565   g = gimple_build_omp_task (body,
8566 			     OMP_TASK_CLAUSES (expr),
8567 			     NULL_TREE, NULL_TREE,
8568 			     NULL_TREE, NULL_TREE, NULL_TREE);
8569   gimplify_seq_add_stmt (pre_p, g);
8570   *expr_p = NULL_TREE;
8571 }
8572 
8573 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
8574    with non-NULL OMP_FOR_INIT.  */
8575 
8576 static tree
8577 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
8578 {
8579   *walk_subtrees = 0;
8580   switch (TREE_CODE (*tp))
8581     {
8582     case OMP_FOR:
8583       *walk_subtrees = 1;
8584       /* FALLTHRU */
8585     case OMP_SIMD:
8586       if (OMP_FOR_INIT (*tp) != NULL_TREE)
8587 	return *tp;
8588       break;
8589     case BIND_EXPR:
8590     case STATEMENT_LIST:
8591     case OMP_PARALLEL:
8592       *walk_subtrees = 1;
8593       break;
8594     default:
8595       break;
8596     }
8597   return NULL_TREE;
8598 }
8599 
8600 /* Gimplify the gross structure of an OMP_FOR statement.  */
8601 
8602 static enum gimplify_status
8603 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
8604 {
8605   tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
8606   enum gimplify_status ret = GS_ALL_DONE;
8607   enum gimplify_status tret;
8608   gomp_for *gfor;
8609   gimple_seq for_body, for_pre_body;
8610   int i;
8611   bitmap has_decl_expr = NULL;
8612   enum omp_region_type ort = ORT_WORKSHARE;
8613 
8614   orig_for_stmt = for_stmt = *expr_p;
8615 
8616   switch (TREE_CODE (for_stmt))
8617     {
8618     case OMP_FOR:
8619     case CILK_FOR:
8620     case OMP_DISTRIBUTE:
8621       break;
8622     case OACC_LOOP:
8623       ort = ORT_ACC;
8624       break;
8625     case OMP_TASKLOOP:
8626       if (find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
8627 	ort = ORT_UNTIED_TASK;
8628       else
8629 	ort = ORT_TASK;
8630       break;
8631     case OMP_SIMD:
8632     case CILK_SIMD:
8633       ort = ORT_SIMD;
8634       break;
8635     default:
8636       gcc_unreachable ();
8637     }
8638 
8639   /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
8640      clause for the IV.  */
8641   if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
8642     {
8643       t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
8644       gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8645       decl = TREE_OPERAND (t, 0);
8646       for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8647 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8648 	    && OMP_CLAUSE_DECL (c) == decl)
8649 	  {
8650 	    OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
8651 	    break;
8652 	  }
8653     }
8654 
8655   if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
8656     {
8657       gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
8658       inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
8659 				  find_combined_omp_for, NULL, NULL);
8660       if (inner_for_stmt == NULL_TREE)
8661 	{
8662 	  gcc_assert (seen_error ());
8663 	  *expr_p = NULL_TREE;
8664 	  return GS_ERROR;
8665 	}
8666     }
8667 
8668   if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
8669     gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
8670 			       TREE_CODE (for_stmt));
8671 
8672   if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
8673     gimplify_omp_ctxp->distribute = true;
8674 
8675   /* Handle OMP_FOR_INIT.  */
8676   for_pre_body = NULL;
8677   if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
8678     {
8679       has_decl_expr = BITMAP_ALLOC (NULL);
8680       if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
8681 	  && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
8682 	     == VAR_DECL)
8683 	{
8684 	  t = OMP_FOR_PRE_BODY (for_stmt);
8685 	  bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
8686 	}
8687       else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
8688 	{
8689 	  tree_stmt_iterator si;
8690 	  for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
8691 	       tsi_next (&si))
8692 	    {
8693 	      t = tsi_stmt (si);
8694 	      if (TREE_CODE (t) == DECL_EXPR
8695 		  && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
8696 		bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
8697 	    }
8698 	}
8699     }
8700   if (OMP_FOR_PRE_BODY (for_stmt))
8701     {
8702       if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
8703 	gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
8704       else
8705 	{
8706 	  struct gimplify_omp_ctx ctx;
8707 	  memset (&ctx, 0, sizeof (ctx));
8708 	  ctx.region_type = ORT_NONE;
8709 	  gimplify_omp_ctxp = &ctx;
8710 	  gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
8711 	  gimplify_omp_ctxp = NULL;
8712 	}
8713     }
8714   OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
8715 
8716   if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
8717     for_stmt = inner_for_stmt;
8718 
8719   /* For taskloop, need to gimplify the start, end and step before the
8720      taskloop, outside of the taskloop omp context.  */
8721   if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
8722     {
8723       for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8724 	{
8725 	  t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8726 	  if (!is_gimple_constant (TREE_OPERAND (t, 1)))
8727 	    {
8728 	      tree type = TREE_TYPE (TREE_OPERAND (t, 0));
8729 	      TREE_OPERAND (t, 1)
8730 		= get_initialized_tmp_var (TREE_OPERAND (t, 1),
8731 					   gimple_seq_empty_p (for_pre_body)
8732 					   ? pre_p : &for_pre_body, NULL);
8733 	      /* Reference to pointer conversion is considered useless,
8734 		 but is significant for firstprivate clause.  Force it
8735 		 here.  */
8736 	      if (TREE_CODE (type) == POINTER_TYPE
8737 		  && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
8738 		      == REFERENCE_TYPE))
8739 		{
8740 		  tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
8741 		  tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
8742 				   TREE_OPERAND (t, 1));
8743 		  gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
8744 				       ? pre_p : &for_pre_body);
8745 		  TREE_OPERAND (t, 1) = v;
8746 		}
8747 	      tree c = build_omp_clause (input_location,
8748 					 OMP_CLAUSE_FIRSTPRIVATE);
8749 	      OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
8750 	      OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8751 	      OMP_FOR_CLAUSES (orig_for_stmt) = c;
8752 	    }
8753 
8754 	  /* Handle OMP_FOR_COND.  */
8755 	  t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
8756 	  if (!is_gimple_constant (TREE_OPERAND (t, 1)))
8757 	    {
8758 	      tree type = TREE_TYPE (TREE_OPERAND (t, 0));
8759 	      TREE_OPERAND (t, 1)
8760 		= get_initialized_tmp_var (TREE_OPERAND (t, 1),
8761 					   gimple_seq_empty_p (for_pre_body)
8762 					   ? pre_p : &for_pre_body, NULL);
8763 	      /* Reference to pointer conversion is considered useless,
8764 		 but is significant for firstprivate clause.  Force it
8765 		 here.  */
8766 	      if (TREE_CODE (type) == POINTER_TYPE
8767 		  && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
8768 		      == REFERENCE_TYPE))
8769 		{
8770 		  tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
8771 		  tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
8772 				   TREE_OPERAND (t, 1));
8773 		  gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
8774 				       ? pre_p : &for_pre_body);
8775 		  TREE_OPERAND (t, 1) = v;
8776 		}
8777 	      tree c = build_omp_clause (input_location,
8778 					 OMP_CLAUSE_FIRSTPRIVATE);
8779 	      OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
8780 	      OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8781 	      OMP_FOR_CLAUSES (orig_for_stmt) = c;
8782 	    }
8783 
8784 	  /* Handle OMP_FOR_INCR.  */
8785 	  t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8786 	  if (TREE_CODE (t) == MODIFY_EXPR)
8787 	    {
8788 	      decl = TREE_OPERAND (t, 0);
8789 	      t = TREE_OPERAND (t, 1);
8790 	      tree *tp = &TREE_OPERAND (t, 1);
8791 	      if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
8792 		tp = &TREE_OPERAND (t, 0);
8793 
8794 	      if (!is_gimple_constant (*tp))
8795 		{
8796 		  gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
8797 				    ? pre_p : &for_pre_body;
8798 		  *tp = get_initialized_tmp_var (*tp, seq, NULL);
8799 		  tree c = build_omp_clause (input_location,
8800 					     OMP_CLAUSE_FIRSTPRIVATE);
8801 		  OMP_CLAUSE_DECL (c) = *tp;
8802 		  OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8803 		  OMP_FOR_CLAUSES (orig_for_stmt) = c;
8804 		}
8805 	    }
8806 	}
8807 
8808       gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
8809 				 OMP_TASKLOOP);
8810     }
8811 
8812   if (orig_for_stmt != for_stmt)
8813     gimplify_omp_ctxp->combined_loop = true;
8814 
8815   for_body = NULL;
8816   gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8817 	      == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
8818   gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8819 	      == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
8820 
8821   tree c = find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
8822   bool is_doacross = false;
8823   if (c && OMP_CLAUSE_ORDERED_EXPR (c))
8824     {
8825       is_doacross = true;
8826       gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
8827 						 (OMP_FOR_INIT (for_stmt))
8828 					       * 2);
8829     }
8830   int collapse = 1;
8831   c = find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
8832   if (c)
8833     collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
8834   for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8835     {
8836       t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8837       gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8838       decl = TREE_OPERAND (t, 0);
8839       gcc_assert (DECL_P (decl));
8840       gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
8841 		  || POINTER_TYPE_P (TREE_TYPE (decl)));
8842       if (is_doacross)
8843 	{
8844 	  if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
8845 	    gimplify_omp_ctxp->loop_iter_var.quick_push
8846 	      (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
8847 	  else
8848 	    gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
8849 	  gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
8850 	}
8851 
8852       /* Make sure the iteration variable is private.  */
8853       tree c = NULL_TREE;
8854       tree c2 = NULL_TREE;
8855       if (orig_for_stmt != for_stmt)
8856 	/* Do this only on innermost construct for combined ones.  */;
8857       else if (ort == ORT_SIMD)
8858 	{
8859 	  splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
8860 						 (splay_tree_key) decl);
8861 	  omp_is_private (gimplify_omp_ctxp, decl,
8862 			  1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8863 			       != 1));
8864 	  if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8865 	    omp_notice_variable (gimplify_omp_ctxp, decl, true);
8866 	  else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
8867 	    {
8868 	      c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
8869 	      OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
8870 	      unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
8871 	      if ((has_decl_expr
8872 		   && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
8873 		  || omp_no_lastprivate (gimplify_omp_ctxp))
8874 		{
8875 		  OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8876 		  flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8877 		}
8878 	      struct gimplify_omp_ctx *outer
8879 		= gimplify_omp_ctxp->outer_context;
8880 	      if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8881 		{
8882 		  if (outer->region_type == ORT_WORKSHARE
8883 		      && outer->combined_loop)
8884 		    {
8885 		      n = splay_tree_lookup (outer->variables,
8886 					     (splay_tree_key)decl);
8887 		      if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8888 			{
8889 			  OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8890 			  flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8891 			}
8892 		      else
8893 			{
8894 			  struct gimplify_omp_ctx *octx = outer->outer_context;
8895 			  if (octx
8896 			      && octx->region_type == ORT_COMBINED_PARALLEL
8897 			      && octx->outer_context
8898 			      && (octx->outer_context->region_type
8899 				  == ORT_WORKSHARE)
8900 			      && octx->outer_context->combined_loop)
8901 			    {
8902 			      octx = octx->outer_context;
8903 			      n = splay_tree_lookup (octx->variables,
8904 						     (splay_tree_key)decl);
8905 			      if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8906 				{
8907 				  OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8908 				  flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8909 				}
8910 			    }
8911 			}
8912 		    }
8913 		}
8914 
8915 	      OMP_CLAUSE_DECL (c) = decl;
8916 	      OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
8917 	      OMP_FOR_CLAUSES (for_stmt) = c;
8918 	      omp_add_variable (gimplify_omp_ctxp, decl, flags);
8919 	      if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8920 		{
8921 		  if (outer->region_type == ORT_WORKSHARE
8922 		      && outer->combined_loop)
8923 		    {
8924 		      if (outer->outer_context
8925 			  && (outer->outer_context->region_type
8926 			      == ORT_COMBINED_PARALLEL))
8927 			outer = outer->outer_context;
8928 		      else if (omp_check_private (outer, decl, false))
8929 			outer = NULL;
8930 		    }
8931 		  else if (((outer->region_type & ORT_TASK) != 0)
8932 			   && outer->combined_loop
8933 			   && !omp_check_private (gimplify_omp_ctxp,
8934 						  decl, false))
8935 		    ;
8936 		  else if (outer->region_type != ORT_COMBINED_PARALLEL)
8937 		    {
8938 		      omp_notice_variable (outer, decl, true);
8939 		      outer = NULL;
8940 		    }
8941 		  if (outer)
8942 		    {
8943 		      n = splay_tree_lookup (outer->variables,
8944 					     (splay_tree_key)decl);
8945 		      if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8946 			{
8947 			  omp_add_variable (outer, decl,
8948 					    GOVD_LASTPRIVATE | GOVD_SEEN);
8949 			  if (outer->region_type == ORT_COMBINED_PARALLEL
8950 			      && outer->outer_context
8951 			      && (outer->outer_context->region_type
8952 				  == ORT_WORKSHARE)
8953 			      && outer->outer_context->combined_loop)
8954 			    {
8955 			      outer = outer->outer_context;
8956 			      n = splay_tree_lookup (outer->variables,
8957 						     (splay_tree_key)decl);
8958 			      if (omp_check_private (outer, decl, false))
8959 				outer = NULL;
8960 			      else if (n == NULL
8961 				       || ((n->value & GOVD_DATA_SHARE_CLASS)
8962 					   == 0))
8963 				omp_add_variable (outer, decl,
8964 						  GOVD_LASTPRIVATE
8965 						  | GOVD_SEEN);
8966 			      else
8967 				outer = NULL;
8968 			    }
8969 			  if (outer && outer->outer_context
8970 			      && (outer->outer_context->region_type
8971 				  == ORT_COMBINED_TEAMS))
8972 			    {
8973 			      outer = outer->outer_context;
8974 			      n = splay_tree_lookup (outer->variables,
8975 						     (splay_tree_key)decl);
8976 			      if (n == NULL
8977 				  || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8978 				omp_add_variable (outer, decl,
8979 						  GOVD_SHARED | GOVD_SEEN);
8980 			      else
8981 				outer = NULL;
8982 			    }
8983 			  if (outer && outer->outer_context)
8984 			    omp_notice_variable (outer->outer_context, decl,
8985 						 true);
8986 			}
8987 		    }
8988 		}
8989 	    }
8990 	  else
8991 	    {
8992 	      bool lastprivate
8993 		= (!has_decl_expr
8994 		   || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
8995 		  && !omp_no_lastprivate (gimplify_omp_ctxp);
8996 	      struct gimplify_omp_ctx *outer
8997 		= gimplify_omp_ctxp->outer_context;
8998 	      if (outer && lastprivate)
8999 		{
9000 		  if (outer->region_type == ORT_WORKSHARE
9001 		      && outer->combined_loop)
9002 		    {
9003 		      n = splay_tree_lookup (outer->variables,
9004 					     (splay_tree_key)decl);
9005 		      if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9006 			{
9007 			  lastprivate = false;
9008 			  outer = NULL;
9009 			}
9010 		      else if (outer->outer_context
9011 			       && (outer->outer_context->region_type
9012 				   == ORT_COMBINED_PARALLEL))
9013 			outer = outer->outer_context;
9014 		      else if (omp_check_private (outer, decl, false))
9015 			outer = NULL;
9016 		    }
9017 		  else if (((outer->region_type & ORT_TASK) != 0)
9018 			   && outer->combined_loop
9019 			   && !omp_check_private (gimplify_omp_ctxp,
9020 						  decl, false))
9021 		    ;
9022 		  else if (outer->region_type != ORT_COMBINED_PARALLEL)
9023 		    {
9024 		      omp_notice_variable (outer, decl, true);
9025 		      outer = NULL;
9026 		    }
9027 		  if (outer)
9028 		    {
9029 		      n = splay_tree_lookup (outer->variables,
9030 					     (splay_tree_key)decl);
9031 		      if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9032 			{
9033 			  omp_add_variable (outer, decl,
9034 					    GOVD_LASTPRIVATE | GOVD_SEEN);
9035 			  if (outer->region_type == ORT_COMBINED_PARALLEL
9036 			      && outer->outer_context
9037 			      && (outer->outer_context->region_type
9038 				  == ORT_WORKSHARE)
9039 			      && outer->outer_context->combined_loop)
9040 			    {
9041 			      outer = outer->outer_context;
9042 			      n = splay_tree_lookup (outer->variables,
9043 						     (splay_tree_key)decl);
9044 			      if (omp_check_private (outer, decl, false))
9045 				outer = NULL;
9046 			      else if (n == NULL
9047 				       || ((n->value & GOVD_DATA_SHARE_CLASS)
9048 					   == 0))
9049 				omp_add_variable (outer, decl,
9050 						  GOVD_LASTPRIVATE
9051 						  | GOVD_SEEN);
9052 			      else
9053 				outer = NULL;
9054 			    }
9055 			  if (outer && outer->outer_context
9056 			      && (outer->outer_context->region_type
9057 				  == ORT_COMBINED_TEAMS))
9058 			    {
9059 			      outer = outer->outer_context;
9060 			      n = splay_tree_lookup (outer->variables,
9061 						     (splay_tree_key)decl);
9062 			      if (n == NULL
9063 				  || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9064 				omp_add_variable (outer, decl,
9065 						  GOVD_SHARED | GOVD_SEEN);
9066 			      else
9067 				outer = NULL;
9068 			    }
9069 			  if (outer && outer->outer_context)
9070 			    omp_notice_variable (outer->outer_context, decl,
9071 						 true);
9072 			}
9073 		    }
9074 		}
9075 
9076 	      c = build_omp_clause (input_location,
9077 				    lastprivate ? OMP_CLAUSE_LASTPRIVATE
9078 						: OMP_CLAUSE_PRIVATE);
9079 	      OMP_CLAUSE_DECL (c) = decl;
9080 	      OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9081 	      OMP_FOR_CLAUSES (for_stmt) = c;
9082 	      omp_add_variable (gimplify_omp_ctxp, decl,
9083 				(lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
9084 				| GOVD_EXPLICIT | GOVD_SEEN);
9085 	      c = NULL_TREE;
9086 	    }
9087 	}
9088       else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
9089 	omp_notice_variable (gimplify_omp_ctxp, decl, true);
9090       else
9091 	omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
9092 
9093       /* If DECL is not a gimple register, create a temporary variable to act
9094 	 as an iteration counter.  This is valid, since DECL cannot be
9095 	 modified in the body of the loop.  Similarly for any iteration vars
9096 	 in simd with collapse > 1 where the iterator vars must be
9097 	 lastprivate.  */
9098       if (orig_for_stmt != for_stmt)
9099 	var = decl;
9100       else if (!is_gimple_reg (decl)
9101 	       || (ort == ORT_SIMD
9102 		   && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
9103 	{
9104 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9105 	  /* Make sure omp_add_variable is not called on it prematurely.
9106 	     We call it ourselves a few lines later.  */
9107 	  gimplify_omp_ctxp = NULL;
9108 	  var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9109 	  gimplify_omp_ctxp = ctx;
9110 	  TREE_OPERAND (t, 0) = var;
9111 
9112 	  gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
9113 
9114 	  if (ort == ORT_SIMD
9115 	      && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9116 	    {
9117 	      c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9118 	      OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
9119 	      OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
9120 	      OMP_CLAUSE_DECL (c2) = var;
9121 	      OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
9122 	      OMP_FOR_CLAUSES (for_stmt) = c2;
9123 	      omp_add_variable (gimplify_omp_ctxp, var,
9124 				GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
9125 	      if (c == NULL_TREE)
9126 		{
9127 		  c = c2;
9128 		  c2 = NULL_TREE;
9129 		}
9130 	    }
9131 	  else
9132 	    omp_add_variable (gimplify_omp_ctxp, var,
9133 			      GOVD_PRIVATE | GOVD_SEEN);
9134 	}
9135       else
9136 	var = decl;
9137 
9138       tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9139 			    is_gimple_val, fb_rvalue);
9140       ret = MIN (ret, tret);
9141       if (ret == GS_ERROR)
9142 	return ret;
9143 
9144       /* Handle OMP_FOR_COND.  */
9145       t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9146       gcc_assert (COMPARISON_CLASS_P (t));
9147       gcc_assert (TREE_OPERAND (t, 0) == decl);
9148 
9149       tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9150 			    is_gimple_val, fb_rvalue);
9151       ret = MIN (ret, tret);
9152 
9153       /* Handle OMP_FOR_INCR.  */
9154       t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9155       switch (TREE_CODE (t))
9156 	{
9157 	case PREINCREMENT_EXPR:
9158 	case POSTINCREMENT_EXPR:
9159 	  {
9160 	    tree decl = TREE_OPERAND (t, 0);
9161 	    /* c_omp_for_incr_canonicalize_ptr() should have been
9162 	       called to massage things appropriately.  */
9163 	    gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
9164 
9165 	    if (orig_for_stmt != for_stmt)
9166 	      break;
9167 	    t = build_int_cst (TREE_TYPE (decl), 1);
9168 	    if (c)
9169 	      OMP_CLAUSE_LINEAR_STEP (c) = t;
9170 	    t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
9171 	    t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
9172 	    TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
9173 	    break;
9174 	  }
9175 
9176 	case PREDECREMENT_EXPR:
9177 	case POSTDECREMENT_EXPR:
9178 	  /* c_omp_for_incr_canonicalize_ptr() should have been
9179 	     called to massage things appropriately.  */
9180 	  gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
9181 	  if (orig_for_stmt != for_stmt)
9182 	    break;
9183 	  t = build_int_cst (TREE_TYPE (decl), -1);
9184 	  if (c)
9185 	    OMP_CLAUSE_LINEAR_STEP (c) = t;
9186 	  t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
9187 	  t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
9188 	  TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
9189 	  break;
9190 
9191 	case MODIFY_EXPR:
9192 	  gcc_assert (TREE_OPERAND (t, 0) == decl);
9193 	  TREE_OPERAND (t, 0) = var;
9194 
9195 	  t = TREE_OPERAND (t, 1);
9196 	  switch (TREE_CODE (t))
9197 	    {
9198 	    case PLUS_EXPR:
9199 	      if (TREE_OPERAND (t, 1) == decl)
9200 		{
9201 		  TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
9202 		  TREE_OPERAND (t, 0) = var;
9203 		  break;
9204 		}
9205 
9206 	      /* Fallthru.  */
9207 	    case MINUS_EXPR:
9208 	    case POINTER_PLUS_EXPR:
9209 	      gcc_assert (TREE_OPERAND (t, 0) == decl);
9210 	      TREE_OPERAND (t, 0) = var;
9211 	      break;
9212 	    default:
9213 	      gcc_unreachable ();
9214 	    }
9215 
9216 	  tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9217 				is_gimple_val, fb_rvalue);
9218 	  ret = MIN (ret, tret);
9219 	  if (c)
9220 	    {
9221 	      tree step = TREE_OPERAND (t, 1);
9222 	      tree stept = TREE_TYPE (decl);
9223 	      if (POINTER_TYPE_P (stept))
9224 		stept = sizetype;
9225 	      step = fold_convert (stept, step);
9226 	      if (TREE_CODE (t) == MINUS_EXPR)
9227 		step = fold_build1 (NEGATE_EXPR, stept, step);
9228 	      OMP_CLAUSE_LINEAR_STEP (c) = step;
9229 	      if (step != TREE_OPERAND (t, 1))
9230 		{
9231 		  tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
9232 					&for_pre_body, NULL,
9233 					is_gimple_val, fb_rvalue);
9234 		  ret = MIN (ret, tret);
9235 		}
9236 	    }
9237 	  break;
9238 
9239 	default:
9240 	  gcc_unreachable ();
9241 	}
9242 
9243       if (c2)
9244 	{
9245 	  gcc_assert (c);
9246 	  OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
9247 	}
9248 
9249       if ((var != decl || collapse > 1) && orig_for_stmt == for_stmt)
9250 	{
9251 	  for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
9252 	    if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9253 		  && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
9254 		 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9255 		     && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
9256 		     && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
9257 		&& OMP_CLAUSE_DECL (c) == decl)
9258 	      {
9259 		if (is_doacross && (collapse == 1 || i >= collapse))
9260 		  t = var;
9261 		else
9262 		  {
9263 		    t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9264 		    gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9265 		    gcc_assert (TREE_OPERAND (t, 0) == var);
9266 		    t = TREE_OPERAND (t, 1);
9267 		    gcc_assert (TREE_CODE (t) == PLUS_EXPR
9268 				|| TREE_CODE (t) == MINUS_EXPR
9269 				|| TREE_CODE (t) == POINTER_PLUS_EXPR);
9270 		    gcc_assert (TREE_OPERAND (t, 0) == var);
9271 		    t = build2 (TREE_CODE (t), TREE_TYPE (decl),
9272 				is_doacross ? var : decl,
9273 				TREE_OPERAND (t, 1));
9274 		  }
9275 		gimple_seq *seq;
9276 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
9277 		  seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
9278 		else
9279 		  seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
9280 		gimplify_assign (decl, t, seq);
9281 	    }
9282 	}
9283     }
9284 
9285   BITMAP_FREE (has_decl_expr);
9286 
9287   if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9288     {
9289       push_gimplify_context ();
9290       if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
9291 	{
9292 	  OMP_FOR_BODY (orig_for_stmt)
9293 	    = build3 (BIND_EXPR, void_type_node, NULL,
9294 		      OMP_FOR_BODY (orig_for_stmt), NULL);
9295 	  TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
9296 	}
9297     }
9298 
9299   gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
9300 					 &for_body);
9301 
9302   if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9303     {
9304       if (gimple_code (g) == GIMPLE_BIND)
9305 	pop_gimplify_context (g);
9306       else
9307 	pop_gimplify_context (NULL);
9308     }
9309 
9310   if (orig_for_stmt != for_stmt)
9311     for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9312       {
9313 	t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9314 	decl = TREE_OPERAND (t, 0);
9315 	struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9316 	if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9317 	  gimplify_omp_ctxp = ctx->outer_context;
9318 	var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9319 	gimplify_omp_ctxp = ctx;
9320 	omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
9321 	TREE_OPERAND (t, 0) = var;
9322 	t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9323 	TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
9324 	TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
9325       }
9326 
9327   gimplify_adjust_omp_clauses (pre_p, for_body,
9328 			       &OMP_FOR_CLAUSES (orig_for_stmt),
9329 			       TREE_CODE (orig_for_stmt));
9330 
9331   int kind;
9332   switch (TREE_CODE (orig_for_stmt))
9333     {
9334     case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
9335     case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
9336     case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
9337     case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
9338     case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
9339     case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
9340     case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
9341     default:
9342       gcc_unreachable ();
9343     }
9344   gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
9345 			       TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
9346 			       for_pre_body);
9347   if (orig_for_stmt != for_stmt)
9348     gimple_omp_for_set_combined_p (gfor, true);
9349   if (gimplify_omp_ctxp
9350       && (gimplify_omp_ctxp->combined_loop
9351 	  || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
9352 	      && gimplify_omp_ctxp->outer_context
9353 	      && gimplify_omp_ctxp->outer_context->combined_loop)))
9354     {
9355       gimple_omp_for_set_combined_into_p (gfor, true);
9356       if (gimplify_omp_ctxp->combined_loop)
9357 	gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
9358       else
9359 	gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
9360     }
9361 
9362   for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9363     {
9364       t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9365       gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
9366       gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
9367       t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9368       gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
9369       gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
9370       t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9371       gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
9372     }
9373 
9374   /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
9375      constructs with GIMPLE_OMP_TASK sandwiched in between them.
9376      The outer taskloop stands for computing the number of iterations,
9377      counts for collapsed loops and holding taskloop specific clauses.
9378      The task construct stands for the effect of data sharing on the
9379      explicit task it creates and the inner taskloop stands for expansion
9380      of the static loop inside of the explicit task construct.  */
9381   if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9382     {
9383       tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
9384       tree task_clauses = NULL_TREE;
9385       tree c = *gfor_clauses_ptr;
9386       tree *gtask_clauses_ptr = &task_clauses;
9387       tree outer_for_clauses = NULL_TREE;
9388       tree *gforo_clauses_ptr = &outer_for_clauses;
9389       for (; c; c = OMP_CLAUSE_CHAIN (c))
9390 	switch (OMP_CLAUSE_CODE (c))
9391 	  {
9392 	  /* These clauses are allowed on task, move them there.  */
9393 	  case OMP_CLAUSE_SHARED:
9394 	  case OMP_CLAUSE_FIRSTPRIVATE:
9395 	  case OMP_CLAUSE_DEFAULT:
9396 	  case OMP_CLAUSE_IF:
9397 	  case OMP_CLAUSE_UNTIED:
9398 	  case OMP_CLAUSE_FINAL:
9399 	  case OMP_CLAUSE_MERGEABLE:
9400 	  case OMP_CLAUSE_PRIORITY:
9401 	    *gtask_clauses_ptr = c;
9402 	    gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9403 	    break;
9404 	  case OMP_CLAUSE_PRIVATE:
9405 	    if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
9406 	      {
9407 		/* We want private on outer for and firstprivate
9408 		   on task.  */
9409 		*gtask_clauses_ptr
9410 		  = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9411 				      OMP_CLAUSE_FIRSTPRIVATE);
9412 		OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9413 		lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
9414 		gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9415 		*gforo_clauses_ptr = c;
9416 		gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9417 	      }
9418 	    else
9419 	      {
9420 		*gtask_clauses_ptr = c;
9421 		gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9422 	      }
9423 	    break;
9424 	  /* These clauses go into outer taskloop clauses.  */
9425 	  case OMP_CLAUSE_GRAINSIZE:
9426 	  case OMP_CLAUSE_NUM_TASKS:
9427 	  case OMP_CLAUSE_NOGROUP:
9428 	    *gforo_clauses_ptr = c;
9429 	    gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9430 	    break;
9431 	  /* Taskloop clause we duplicate on both taskloops.  */
9432 	  case OMP_CLAUSE_COLLAPSE:
9433 	    *gfor_clauses_ptr = c;
9434 	    gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9435 	    *gforo_clauses_ptr = copy_node (c);
9436 	    gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
9437 	    break;
9438 	  /* For lastprivate, keep the clause on inner taskloop, and add
9439 	     a shared clause on task.  If the same decl is also firstprivate,
9440 	     add also firstprivate clause on the inner taskloop.  */
9441 	  case OMP_CLAUSE_LASTPRIVATE:
9442 	    if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
9443 	      {
9444 		/* For taskloop C++ lastprivate IVs, we want:
9445 		   1) private on outer taskloop
9446 		   2) firstprivate and shared on task
9447 		   3) lastprivate on inner taskloop  */
9448 		*gtask_clauses_ptr
9449 		  = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9450 				      OMP_CLAUSE_FIRSTPRIVATE);
9451 		OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9452 		lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
9453 		gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9454 		OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
9455 		*gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9456 						       OMP_CLAUSE_PRIVATE);
9457 		OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
9458 		OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
9459 		TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
9460 		gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
9461 	      }
9462 	    *gfor_clauses_ptr = c;
9463 	    gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9464 	    *gtask_clauses_ptr
9465 	      = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
9466 	    OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9467 	    if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
9468 	      OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
9469 	    gtask_clauses_ptr
9470 	      = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9471 	    break;
9472 	  default:
9473 	    gcc_unreachable ();
9474 	  }
9475       *gfor_clauses_ptr = NULL_TREE;
9476       *gtask_clauses_ptr = NULL_TREE;
9477       *gforo_clauses_ptr = NULL_TREE;
9478       g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
9479       g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
9480 				 NULL_TREE, NULL_TREE, NULL_TREE);
9481       gimple_omp_task_set_taskloop_p (g, true);
9482       g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
9483       gomp_for *gforo
9484 	= gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
9485 				gimple_omp_for_collapse (gfor),
9486 				gimple_omp_for_pre_body (gfor));
9487       gimple_omp_for_set_pre_body (gfor, NULL);
9488       gimple_omp_for_set_combined_p (gforo, true);
9489       gimple_omp_for_set_combined_into_p (gfor, true);
9490       for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
9491 	{
9492 	  tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
9493 	  tree v = create_tmp_var (type);
9494 	  gimple_omp_for_set_index (gforo, i, v);
9495 	  t = unshare_expr (gimple_omp_for_initial (gfor, i));
9496 	  gimple_omp_for_set_initial (gforo, i, t);
9497 	  gimple_omp_for_set_cond (gforo, i,
9498 				   gimple_omp_for_cond (gfor, i));
9499 	  t = unshare_expr (gimple_omp_for_final (gfor, i));
9500 	  gimple_omp_for_set_final (gforo, i, t);
9501 	  t = unshare_expr (gimple_omp_for_incr (gfor, i));
9502 	  gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
9503 	  TREE_OPERAND (t, 0) = v;
9504 	  gimple_omp_for_set_incr (gforo, i, t);
9505 	  t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
9506 	  OMP_CLAUSE_DECL (t) = v;
9507 	  OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
9508 	  gimple_omp_for_set_clauses (gforo, t);
9509 	}
9510       gimplify_seq_add_stmt (pre_p, gforo);
9511     }
9512   else
9513     gimplify_seq_add_stmt (pre_p, gfor);
9514   if (ret != GS_ALL_DONE)
9515     return GS_ERROR;
9516   *expr_p = NULL_TREE;
9517   return GS_ALL_DONE;
9518 }
9519 
9520 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
9521    of OMP_TARGET's body.  */
9522 
9523 static tree
9524 find_omp_teams (tree *tp, int *walk_subtrees, void *)
9525 {
9526   *walk_subtrees = 0;
9527   switch (TREE_CODE (*tp))
9528     {
9529     case OMP_TEAMS:
9530       return *tp;
9531     case BIND_EXPR:
9532     case STATEMENT_LIST:
9533       *walk_subtrees = 1;
9534       break;
9535     default:
9536       break;
9537     }
9538   return NULL_TREE;
9539 }
9540 
9541 /* Helper function of optimize_target_teams, determine if the expression
9542    can be computed safely before the target construct on the host.  */
9543 
9544 static tree
9545 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
9546 {
9547   splay_tree_node n;
9548 
9549   if (TYPE_P (*tp))
9550     {
9551       *walk_subtrees = 0;
9552       return NULL_TREE;
9553     }
9554   switch (TREE_CODE (*tp))
9555     {
9556     case VAR_DECL:
9557     case PARM_DECL:
9558     case RESULT_DECL:
9559       *walk_subtrees = 0;
9560       if (error_operand_p (*tp)
9561 	  || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
9562 	  || DECL_HAS_VALUE_EXPR_P (*tp)
9563 	  || DECL_THREAD_LOCAL_P (*tp)
9564 	  || TREE_SIDE_EFFECTS (*tp)
9565 	  || TREE_THIS_VOLATILE (*tp))
9566 	return *tp;
9567       if (is_global_var (*tp)
9568 	  && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
9569 	      || lookup_attribute ("omp declare target link",
9570 				   DECL_ATTRIBUTES (*tp))))
9571 	return *tp;
9572       n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9573 			     (splay_tree_key) *tp);
9574       if (n == NULL)
9575 	{
9576 	  if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
9577 	    return NULL_TREE;
9578 	  return *tp;
9579 	}
9580       else if (n->value & GOVD_LOCAL)
9581 	return *tp;
9582       else if (n->value & GOVD_FIRSTPRIVATE)
9583 	return NULL_TREE;
9584       else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
9585 	       == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
9586 	return NULL_TREE;
9587       return *tp;
9588     case INTEGER_CST:
9589       if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
9590 	return *tp;
9591       return NULL_TREE;
9592     case TARGET_EXPR:
9593       if (TARGET_EXPR_INITIAL (*tp)
9594 	  || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
9595 	return *tp;
9596       return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
9597 				      walk_subtrees, NULL);
9598     /* Allow some reasonable subset of integral arithmetics.  */
9599     case PLUS_EXPR:
9600     case MINUS_EXPR:
9601     case MULT_EXPR:
9602     case TRUNC_DIV_EXPR:
9603     case CEIL_DIV_EXPR:
9604     case FLOOR_DIV_EXPR:
9605     case ROUND_DIV_EXPR:
9606     case TRUNC_MOD_EXPR:
9607     case CEIL_MOD_EXPR:
9608     case FLOOR_MOD_EXPR:
9609     case ROUND_MOD_EXPR:
9610     case RDIV_EXPR:
9611     case EXACT_DIV_EXPR:
9612     case MIN_EXPR:
9613     case MAX_EXPR:
9614     case LSHIFT_EXPR:
9615     case RSHIFT_EXPR:
9616     case BIT_IOR_EXPR:
9617     case BIT_XOR_EXPR:
9618     case BIT_AND_EXPR:
9619     case NEGATE_EXPR:
9620     case ABS_EXPR:
9621     case BIT_NOT_EXPR:
9622     case NON_LVALUE_EXPR:
9623     CASE_CONVERT:
9624       if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
9625 	return *tp;
9626       return NULL_TREE;
9627     /* And disallow anything else, except for comparisons.  */
9628     default:
9629       if (COMPARISON_CLASS_P (*tp))
9630 	return NULL_TREE;
9631       return *tp;
9632     }
9633 }
9634 
9635 /* Try to determine if the num_teams and/or thread_limit expressions
9636    can have their values determined already before entering the
9637    target construct.
9638    INTEGER_CSTs trivially are,
9639    integral decls that are firstprivate (explicitly or implicitly)
9640    or explicitly map(always, to:) or map(always, tofrom:) on the target
9641    region too, and expressions involving simple arithmetics on those
9642    too, function calls are not ok, dereferencing something neither etc.
9643    Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
9644    EXPR based on what we find:
9645    0 stands for clause not specified at all, use implementation default
9646    -1 stands for value that can't be determined easily before entering
9647       the target construct.
9648    If teams construct is not present at all, use 1 for num_teams
9649    and 0 for thread_limit (only one team is involved, and the thread
9650    limit is implementation defined.  */
9651 
9652 static void
9653 optimize_target_teams (tree target, gimple_seq *pre_p)
9654 {
9655   tree body = OMP_BODY (target);
9656   tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
9657   tree num_teams = integer_zero_node;
9658   tree thread_limit = integer_zero_node;
9659   location_t num_teams_loc = EXPR_LOCATION (target);
9660   location_t thread_limit_loc = EXPR_LOCATION (target);
9661   tree c, *p, expr;
9662   struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
9663 
9664   if (teams == NULL_TREE)
9665     num_teams = integer_one_node;
9666   else
9667     for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
9668       {
9669 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
9670 	  {
9671 	    p = &num_teams;
9672 	    num_teams_loc = OMP_CLAUSE_LOCATION (c);
9673 	  }
9674 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
9675 	  {
9676 	    p = &thread_limit;
9677 	    thread_limit_loc = OMP_CLAUSE_LOCATION (c);
9678 	  }
9679 	else
9680 	  continue;
9681 	expr = OMP_CLAUSE_OPERAND (c, 0);
9682 	if (TREE_CODE (expr) == INTEGER_CST)
9683 	  {
9684 	    *p = expr;
9685 	    continue;
9686 	  }
9687 	if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
9688 	  {
9689 	    *p = integer_minus_one_node;
9690 	    continue;
9691 	  }
9692 	*p = expr;
9693 	gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
9694 	if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue)
9695 	    == GS_ERROR)
9696 	  {
9697 	    gimplify_omp_ctxp = target_ctx;
9698 	    *p = integer_minus_one_node;
9699 	    continue;
9700 	  }
9701 	gimplify_omp_ctxp = target_ctx;
9702 	if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
9703 	  OMP_CLAUSE_OPERAND (c, 0) = *p;
9704       }
9705   c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
9706   OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
9707   OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
9708   OMP_TARGET_CLAUSES (target) = c;
9709   c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
9710   OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
9711   OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
9712   OMP_TARGET_CLAUSES (target) = c;
9713 }
9714 
9715 /* Gimplify the gross structure of several OMP constructs.  */
9716 
9717 static void
9718 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
9719 {
9720   tree expr = *expr_p;
9721   gimple *stmt;
9722   gimple_seq body = NULL;
9723   enum omp_region_type ort;
9724 
9725   switch (TREE_CODE (expr))
9726     {
9727     case OMP_SECTIONS:
9728     case OMP_SINGLE:
9729       ort = ORT_WORKSHARE;
9730       break;
9731     case OMP_TARGET:
9732       ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
9733       break;
9734     case OACC_KERNELS:
9735       ort = ORT_ACC_KERNELS;
9736       break;
9737     case OACC_PARALLEL:
9738       ort = ORT_ACC_PARALLEL;
9739       break;
9740     case OACC_DATA:
9741       ort = ORT_ACC_DATA;
9742       break;
9743     case OMP_TARGET_DATA:
9744       ort = ORT_TARGET_DATA;
9745       break;
9746     case OMP_TEAMS:
9747       ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
9748       break;
9749     case OACC_HOST_DATA:
9750       ort = ORT_ACC_HOST_DATA;
9751       break;
9752     default:
9753       gcc_unreachable ();
9754     }
9755   gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
9756 			     TREE_CODE (expr));
9757   if (TREE_CODE (expr) == OMP_TARGET)
9758     optimize_target_teams (expr, pre_p);
9759   if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
9760     {
9761       push_gimplify_context ();
9762       gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
9763       if (gimple_code (g) == GIMPLE_BIND)
9764 	pop_gimplify_context (g);
9765       else
9766 	pop_gimplify_context (NULL);
9767       if ((ort & ORT_TARGET_DATA) != 0)
9768 	{
9769 	  enum built_in_function end_ix;
9770 	  switch (TREE_CODE (expr))
9771 	    {
9772 	    case OACC_DATA:
9773 	    case OACC_HOST_DATA:
9774 	      end_ix = BUILT_IN_GOACC_DATA_END;
9775 	      break;
9776 	    case OMP_TARGET_DATA:
9777 	      end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
9778 	      break;
9779 	    default:
9780 	      gcc_unreachable ();
9781 	    }
9782 	  tree fn = builtin_decl_explicit (end_ix);
9783 	  g = gimple_build_call (fn, 0);
9784 	  gimple_seq cleanup = NULL;
9785 	  gimple_seq_add_stmt (&cleanup, g);
9786 	  g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
9787 	  body = NULL;
9788 	  gimple_seq_add_stmt (&body, g);
9789 	}
9790     }
9791   else
9792     gimplify_and_add (OMP_BODY (expr), &body);
9793   gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
9794 			       TREE_CODE (expr));
9795 
9796   switch (TREE_CODE (expr))
9797     {
9798     case OACC_DATA:
9799       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
9800 				      OMP_CLAUSES (expr));
9801       break;
9802     case OACC_KERNELS:
9803       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
9804 				      OMP_CLAUSES (expr));
9805       break;
9806     case OACC_HOST_DATA:
9807       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
9808 				      OMP_CLAUSES (expr));
9809       break;
9810     case OACC_PARALLEL:
9811       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
9812 				      OMP_CLAUSES (expr));
9813       break;
9814     case OMP_SECTIONS:
9815       stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
9816       break;
9817     case OMP_SINGLE:
9818       stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
9819       break;
9820     case OMP_TARGET:
9821       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
9822 				      OMP_CLAUSES (expr));
9823       break;
9824     case OMP_TARGET_DATA:
9825       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
9826 				      OMP_CLAUSES (expr));
9827       break;
9828     case OMP_TEAMS:
9829       stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
9830       break;
9831     default:
9832       gcc_unreachable ();
9833     }
9834 
9835   gimplify_seq_add_stmt (pre_p, stmt);
9836   *expr_p = NULL_TREE;
9837 }
9838 
9839 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
9840    target update constructs.  */
9841 
9842 static void
9843 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
9844 {
9845   tree expr = *expr_p;
9846   int kind;
9847   gomp_target *stmt;
9848   enum omp_region_type ort = ORT_WORKSHARE;
9849 
9850   switch (TREE_CODE (expr))
9851     {
9852     case OACC_ENTER_DATA:
9853     case OACC_EXIT_DATA:
9854       kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
9855       ort = ORT_ACC;
9856       break;
9857     case OACC_UPDATE:
9858       kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
9859       ort = ORT_ACC;
9860       break;
9861     case OMP_TARGET_UPDATE:
9862       kind = GF_OMP_TARGET_KIND_UPDATE;
9863       break;
9864     case OMP_TARGET_ENTER_DATA:
9865       kind = GF_OMP_TARGET_KIND_ENTER_DATA;
9866       break;
9867     case OMP_TARGET_EXIT_DATA:
9868       kind = GF_OMP_TARGET_KIND_EXIT_DATA;
9869       break;
9870     default:
9871       gcc_unreachable ();
9872     }
9873   gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
9874 			     ort, TREE_CODE (expr));
9875   gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
9876 			       TREE_CODE (expr));
9877   stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
9878 
9879   gimplify_seq_add_stmt (pre_p, stmt);
9880   *expr_p = NULL_TREE;
9881 }
9882 
9883 /* A subroutine of gimplify_omp_atomic.  The front end is supposed to have
9884    stabilized the lhs of the atomic operation as *ADDR.  Return true if
9885    EXPR is this stabilized form.  */
9886 
9887 static bool
9888 goa_lhs_expr_p (tree expr, tree addr)
9889 {
9890   /* Also include casts to other type variants.  The C front end is fond
9891      of adding these for e.g. volatile variables.  This is like
9892      STRIP_TYPE_NOPS but includes the main variant lookup.  */
9893   STRIP_USELESS_TYPE_CONVERSION (expr);
9894 
9895   if (TREE_CODE (expr) == INDIRECT_REF)
9896     {
9897       expr = TREE_OPERAND (expr, 0);
9898       while (expr != addr
9899 	     && (CONVERT_EXPR_P (expr)
9900 		 || TREE_CODE (expr) == NON_LVALUE_EXPR)
9901 	     && TREE_CODE (expr) == TREE_CODE (addr)
9902 	     && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
9903 	{
9904 	  expr = TREE_OPERAND (expr, 0);
9905 	  addr = TREE_OPERAND (addr, 0);
9906 	}
9907       if (expr == addr)
9908 	return true;
9909       return (TREE_CODE (addr) == ADDR_EXPR
9910 	      && TREE_CODE (expr) == ADDR_EXPR
9911 	      && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
9912     }
9913   if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
9914     return true;
9915   return false;
9916 }
9917 
9918 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR.  If an
9919    expression does not involve the lhs, evaluate it into a temporary.
9920    Return 1 if the lhs appeared as a subexpression, 0 if it did not,
9921    or -1 if an error was encountered.  */
9922 
9923 static int
9924 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
9925 		    tree lhs_var)
9926 {
9927   tree expr = *expr_p;
9928   int saw_lhs;
9929 
9930   if (goa_lhs_expr_p (expr, lhs_addr))
9931     {
9932       *expr_p = lhs_var;
9933       return 1;
9934     }
9935   if (is_gimple_val (expr))
9936     return 0;
9937 
9938   saw_lhs = 0;
9939   switch (TREE_CODE_CLASS (TREE_CODE (expr)))
9940     {
9941     case tcc_binary:
9942     case tcc_comparison:
9943       saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
9944 				     lhs_var);
9945     case tcc_unary:
9946       saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
9947 				     lhs_var);
9948       break;
9949     case tcc_expression:
9950       switch (TREE_CODE (expr))
9951 	{
9952 	case TRUTH_ANDIF_EXPR:
9953 	case TRUTH_ORIF_EXPR:
9954 	case TRUTH_AND_EXPR:
9955 	case TRUTH_OR_EXPR:
9956 	case TRUTH_XOR_EXPR:
9957 	  saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
9958 					 lhs_addr, lhs_var);
9959 	case TRUTH_NOT_EXPR:
9960 	  saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
9961 					 lhs_addr, lhs_var);
9962 	  break;
9963 	case COMPOUND_EXPR:
9964 	  /* Break out any preevaluations from cp_build_modify_expr.  */
9965 	  for (; TREE_CODE (expr) == COMPOUND_EXPR;
9966 	       expr = TREE_OPERAND (expr, 1))
9967 	    gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
9968 	  *expr_p = expr;
9969 	  return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
9970 	default:
9971 	  break;
9972 	}
9973       break;
9974     default:
9975       break;
9976     }
9977 
9978   if (saw_lhs == 0)
9979     {
9980       enum gimplify_status gs;
9981       gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
9982       if (gs != GS_ALL_DONE)
9983 	saw_lhs = -1;
9984     }
9985 
9986   return saw_lhs;
9987 }
9988 
9989 /* Gimplify an OMP_ATOMIC statement.  */
9990 
9991 static enum gimplify_status
9992 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
9993 {
9994   tree addr = TREE_OPERAND (*expr_p, 0);
9995   tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
9996 	     ? NULL : TREE_OPERAND (*expr_p, 1);
9997   tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
9998   tree tmp_load;
9999   gomp_atomic_load *loadstmt;
10000   gomp_atomic_store *storestmt;
10001 
10002   tmp_load = create_tmp_reg (type);
10003   if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
10004     return GS_ERROR;
10005 
10006   if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
10007       != GS_ALL_DONE)
10008     return GS_ERROR;
10009 
10010   loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
10011   gimplify_seq_add_stmt (pre_p, loadstmt);
10012   if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
10013       != GS_ALL_DONE)
10014     return GS_ERROR;
10015 
10016   if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
10017     rhs = tmp_load;
10018   storestmt = gimple_build_omp_atomic_store (rhs);
10019   gimplify_seq_add_stmt (pre_p, storestmt);
10020   if (OMP_ATOMIC_SEQ_CST (*expr_p))
10021     {
10022       gimple_omp_atomic_set_seq_cst (loadstmt);
10023       gimple_omp_atomic_set_seq_cst (storestmt);
10024     }
10025   switch (TREE_CODE (*expr_p))
10026     {
10027     case OMP_ATOMIC_READ:
10028     case OMP_ATOMIC_CAPTURE_OLD:
10029       *expr_p = tmp_load;
10030       gimple_omp_atomic_set_need_value (loadstmt);
10031       break;
10032     case OMP_ATOMIC_CAPTURE_NEW:
10033       *expr_p = rhs;
10034       gimple_omp_atomic_set_need_value (storestmt);
10035       break;
10036     default:
10037       *expr_p = NULL;
10038       break;
10039     }
10040 
10041   return GS_ALL_DONE;
10042 }
10043 
10044 /* Gimplify a TRANSACTION_EXPR.  This involves gimplification of the
10045    body, and adding some EH bits.  */
10046 
10047 static enum gimplify_status
10048 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
10049 {
10050   tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
10051   gimple *body_stmt;
10052   gtransaction *trans_stmt;
10053   gimple_seq body = NULL;
10054   int subcode = 0;
10055 
10056   /* Wrap the transaction body in a BIND_EXPR so we have a context
10057      where to put decls for OMP.  */
10058   if (TREE_CODE (tbody) != BIND_EXPR)
10059     {
10060       tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
10061       TREE_SIDE_EFFECTS (bind) = 1;
10062       SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
10063       TRANSACTION_EXPR_BODY (expr) = bind;
10064     }
10065 
10066   push_gimplify_context ();
10067   temp = voidify_wrapper_expr (*expr_p, NULL);
10068 
10069   body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
10070   pop_gimplify_context (body_stmt);
10071 
10072   trans_stmt = gimple_build_transaction (body);
10073   if (TRANSACTION_EXPR_OUTER (expr))
10074     subcode = GTMA_IS_OUTER;
10075   else if (TRANSACTION_EXPR_RELAXED (expr))
10076     subcode = GTMA_IS_RELAXED;
10077   gimple_transaction_set_subcode (trans_stmt, subcode);
10078 
10079   gimplify_seq_add_stmt (pre_p, trans_stmt);
10080 
10081   if (temp)
10082     {
10083       *expr_p = temp;
10084       return GS_OK;
10085     }
10086 
10087   *expr_p = NULL_TREE;
10088   return GS_ALL_DONE;
10089 }
10090 
10091 /* Gimplify an OMP_ORDERED construct.  EXPR is the tree version.  BODY
10092    is the OMP_BODY of the original EXPR (which has already been
10093    gimplified so it's not present in the EXPR).
10094 
10095    Return the gimplified GIMPLE_OMP_ORDERED tuple.  */
10096 
10097 static gimple *
10098 gimplify_omp_ordered (tree expr, gimple_seq body)
10099 {
10100   tree c, decls;
10101   int failures = 0;
10102   unsigned int i;
10103   tree source_c = NULL_TREE;
10104   tree sink_c = NULL_TREE;
10105 
10106   if (gimplify_omp_ctxp)
10107     {
10108       for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10109 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10110 	    && gimplify_omp_ctxp->loop_iter_var.is_empty ()
10111 	    && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
10112 		|| OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
10113 	  {
10114 	    error_at (OMP_CLAUSE_LOCATION (c),
10115 		      "%<ordered%> construct with %<depend%> clause must be "
10116 		      "closely nested inside a loop with %<ordered%> clause "
10117 		      "with a parameter");
10118 	    failures++;
10119 	  }
10120 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10121 		 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
10122 	  {
10123 	    bool fail = false;
10124 	    for (decls = OMP_CLAUSE_DECL (c), i = 0;
10125 		 decls && TREE_CODE (decls) == TREE_LIST;
10126 		 decls = TREE_CHAIN (decls), ++i)
10127 	      if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
10128 		continue;
10129 	      else if (TREE_VALUE (decls)
10130 		       != gimplify_omp_ctxp->loop_iter_var[2 * i])
10131 		{
10132 		  error_at (OMP_CLAUSE_LOCATION (c),
10133 			    "variable %qE is not an iteration "
10134 			    "of outermost loop %d, expected %qE",
10135 			    TREE_VALUE (decls), i + 1,
10136 			    gimplify_omp_ctxp->loop_iter_var[2 * i]);
10137 		  fail = true;
10138 		  failures++;
10139 		}
10140 	      else
10141 		TREE_VALUE (decls)
10142 		  = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
10143 	    if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
10144 	      {
10145 		error_at (OMP_CLAUSE_LOCATION (c),
10146 			  "number of variables in %<depend(sink)%> "
10147 			  "clause does not match number of "
10148 			  "iteration variables");
10149 		failures++;
10150 	      }
10151 	    sink_c = c;
10152 	  }
10153 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10154 		 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
10155 	  {
10156 	    if (source_c)
10157 	      {
10158 		error_at (OMP_CLAUSE_LOCATION (c),
10159 			  "more than one %<depend(source)%> clause on an "
10160 			  "%<ordered%> construct");
10161 		failures++;
10162 	      }
10163 	    else
10164 	      source_c = c;
10165 	  }
10166     }
10167   if (source_c && sink_c)
10168     {
10169       error_at (OMP_CLAUSE_LOCATION (source_c),
10170 		"%<depend(source)%> clause specified together with "
10171 		"%<depend(sink:)%> clauses on the same construct");
10172       failures++;
10173     }
10174 
10175   if (failures)
10176     return gimple_build_nop ();
10177   return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
10178 }
10179 
10180 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE.  If the
10181    expression produces a value to be used as an operand inside a GIMPLE
10182    statement, the value will be stored back in *EXPR_P.  This value will
10183    be a tree of class tcc_declaration, tcc_constant, tcc_reference or
10184    an SSA_NAME.  The corresponding sequence of GIMPLE statements is
10185    emitted in PRE_P and POST_P.
10186 
10187    Additionally, this process may overwrite parts of the input
10188    expression during gimplification.  Ideally, it should be
10189    possible to do non-destructive gimplification.
10190 
10191    EXPR_P points to the GENERIC expression to convert to GIMPLE.  If
10192       the expression needs to evaluate to a value to be used as
10193       an operand in a GIMPLE statement, this value will be stored in
10194       *EXPR_P on exit.  This happens when the caller specifies one
10195       of fb_lvalue or fb_rvalue fallback flags.
10196 
10197    PRE_P will contain the sequence of GIMPLE statements corresponding
10198        to the evaluation of EXPR and all the side-effects that must
10199        be executed before the main expression.  On exit, the last
10200        statement of PRE_P is the core statement being gimplified.  For
10201        instance, when gimplifying 'if (++a)' the last statement in
10202        PRE_P will be 'if (t.1)' where t.1 is the result of
10203        pre-incrementing 'a'.
10204 
10205    POST_P will contain the sequence of GIMPLE statements corresponding
10206        to the evaluation of all the side-effects that must be executed
10207        after the main expression.  If this is NULL, the post
10208        side-effects are stored at the end of PRE_P.
10209 
10210        The reason why the output is split in two is to handle post
10211        side-effects explicitly.  In some cases, an expression may have
10212        inner and outer post side-effects which need to be emitted in
10213        an order different from the one given by the recursive
10214        traversal.  For instance, for the expression (*p--)++ the post
10215        side-effects of '--' must actually occur *after* the post
10216        side-effects of '++'.  However, gimplification will first visit
10217        the inner expression, so if a separate POST sequence was not
10218        used, the resulting sequence would be:
10219 
10220        	    1	t.1 = *p
10221        	    2	p = p - 1
10222        	    3	t.2 = t.1 + 1
10223        	    4	*p = t.2
10224 
10225        However, the post-decrement operation in line #2 must not be
10226        evaluated until after the store to *p at line #4, so the
10227        correct sequence should be:
10228 
10229        	    1	t.1 = *p
10230        	    2	t.2 = t.1 + 1
10231        	    3	*p = t.2
10232        	    4	p = p - 1
10233 
10234        So, by specifying a separate post queue, it is possible
10235        to emit the post side-effects in the correct order.
10236        If POST_P is NULL, an internal queue will be used.  Before
10237        returning to the caller, the sequence POST_P is appended to
10238        the main output sequence PRE_P.
10239 
10240    GIMPLE_TEST_F points to a function that takes a tree T and
10241        returns nonzero if T is in the GIMPLE form requested by the
10242        caller.  The GIMPLE predicates are in gimple.c.
10243 
10244    FALLBACK tells the function what sort of a temporary we want if
10245        gimplification cannot produce an expression that complies with
10246        GIMPLE_TEST_F.
10247 
10248        fb_none means that no temporary should be generated
10249        fb_rvalue means that an rvalue is OK to generate
10250        fb_lvalue means that an lvalue is OK to generate
10251        fb_either means that either is OK, but an lvalue is preferable.
10252        fb_mayfail means that gimplification may fail (in which case
10253        GS_ERROR will be returned)
10254 
10255    The return value is either GS_ERROR or GS_ALL_DONE, since this
10256    function iterates until EXPR is completely gimplified or an error
10257    occurs.  */
10258 
10259 enum gimplify_status
10260 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
10261 	       bool (*gimple_test_f) (tree), fallback_t fallback)
10262 {
10263   tree tmp;
10264   gimple_seq internal_pre = NULL;
10265   gimple_seq internal_post = NULL;
10266   tree save_expr;
10267   bool is_statement;
10268   location_t saved_location;
10269   enum gimplify_status ret;
10270   gimple_stmt_iterator pre_last_gsi, post_last_gsi;
10271 
10272   save_expr = *expr_p;
10273   if (save_expr == NULL_TREE)
10274     return GS_ALL_DONE;
10275 
10276   /* If we are gimplifying a top-level statement, PRE_P must be valid.  */
10277   is_statement = gimple_test_f == is_gimple_stmt;
10278   if (is_statement)
10279     gcc_assert (pre_p);
10280 
10281   /* Consistency checks.  */
10282   if (gimple_test_f == is_gimple_reg)
10283     gcc_assert (fallback & (fb_rvalue | fb_lvalue));
10284   else if (gimple_test_f == is_gimple_val
10285            || gimple_test_f == is_gimple_call_addr
10286            || gimple_test_f == is_gimple_condexpr
10287            || gimple_test_f == is_gimple_mem_rhs
10288            || gimple_test_f == is_gimple_mem_rhs_or_call
10289            || gimple_test_f == is_gimple_reg_rhs
10290            || gimple_test_f == is_gimple_reg_rhs_or_call
10291            || gimple_test_f == is_gimple_asm_val
10292 	   || gimple_test_f == is_gimple_mem_ref_addr)
10293     gcc_assert (fallback & fb_rvalue);
10294   else if (gimple_test_f == is_gimple_min_lval
10295 	   || gimple_test_f == is_gimple_lvalue)
10296     gcc_assert (fallback & fb_lvalue);
10297   else if (gimple_test_f == is_gimple_addressable)
10298     gcc_assert (fallback & fb_either);
10299   else if (gimple_test_f == is_gimple_stmt)
10300     gcc_assert (fallback == fb_none);
10301   else
10302     {
10303       /* We should have recognized the GIMPLE_TEST_F predicate to
10304 	 know what kind of fallback to use in case a temporary is
10305 	 needed to hold the value or address of *EXPR_P.  */
10306       gcc_unreachable ();
10307     }
10308 
10309   /* We used to check the predicate here and return immediately if it
10310      succeeds.  This is wrong; the design is for gimplification to be
10311      idempotent, and for the predicates to only test for valid forms, not
10312      whether they are fully simplified.  */
10313   if (pre_p == NULL)
10314     pre_p = &internal_pre;
10315 
10316   if (post_p == NULL)
10317     post_p = &internal_post;
10318 
10319   /* Remember the last statements added to PRE_P and POST_P.  Every
10320      new statement added by the gimplification helpers needs to be
10321      annotated with location information.  To centralize the
10322      responsibility, we remember the last statement that had been
10323      added to both queues before gimplifying *EXPR_P.  If
10324      gimplification produces new statements in PRE_P and POST_P, those
10325      statements will be annotated with the same location information
10326      as *EXPR_P.  */
10327   pre_last_gsi = gsi_last (*pre_p);
10328   post_last_gsi = gsi_last (*post_p);
10329 
10330   saved_location = input_location;
10331   if (save_expr != error_mark_node
10332       && EXPR_HAS_LOCATION (*expr_p))
10333     input_location = EXPR_LOCATION (*expr_p);
10334 
10335   /* Loop over the specific gimplifiers until the toplevel node
10336      remains the same.  */
10337   do
10338     {
10339       /* Strip away as many useless type conversions as possible
10340 	 at the toplevel.  */
10341       STRIP_USELESS_TYPE_CONVERSION (*expr_p);
10342 
10343       /* Remember the expr.  */
10344       save_expr = *expr_p;
10345 
10346       /* Die, die, die, my darling.  */
10347       if (save_expr == error_mark_node
10348 	  || (TREE_TYPE (save_expr)
10349 	      && TREE_TYPE (save_expr) == error_mark_node))
10350 	{
10351 	  ret = GS_ERROR;
10352 	  break;
10353 	}
10354 
10355       /* Do any language-specific gimplification.  */
10356       ret = ((enum gimplify_status)
10357 	     lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
10358       if (ret == GS_OK)
10359 	{
10360 	  if (*expr_p == NULL_TREE)
10361 	    break;
10362 	  if (*expr_p != save_expr)
10363 	    continue;
10364 	}
10365       else if (ret != GS_UNHANDLED)
10366 	break;
10367 
10368       /* Make sure that all the cases set 'ret' appropriately.  */
10369       ret = GS_UNHANDLED;
10370       switch (TREE_CODE (*expr_p))
10371 	{
10372 	  /* First deal with the special cases.  */
10373 
10374 	case POSTINCREMENT_EXPR:
10375 	case POSTDECREMENT_EXPR:
10376 	case PREINCREMENT_EXPR:
10377 	case PREDECREMENT_EXPR:
10378 	  ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
10379 					fallback != fb_none,
10380 					TREE_TYPE (*expr_p));
10381 	  break;
10382 
10383 	case VIEW_CONVERT_EXPR:
10384 	  if (is_gimple_reg_type (TREE_TYPE (*expr_p))
10385 	      && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
10386 	    {
10387 	      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10388 				   post_p, is_gimple_val, fb_rvalue);
10389 	      recalculate_side_effects (*expr_p);
10390 	      break;
10391 	    }
10392 	  /* Fallthru.  */
10393 
10394 	case ARRAY_REF:
10395 	case ARRAY_RANGE_REF:
10396 	case REALPART_EXPR:
10397 	case IMAGPART_EXPR:
10398 	case COMPONENT_REF:
10399 	  ret = gimplify_compound_lval (expr_p, pre_p, post_p,
10400 					fallback ? fallback : fb_rvalue);
10401 	  break;
10402 
10403 	case COND_EXPR:
10404 	  ret = gimplify_cond_expr (expr_p, pre_p, fallback);
10405 
10406 	  /* C99 code may assign to an array in a structure value of a
10407 	     conditional expression, and this has undefined behavior
10408 	     only on execution, so create a temporary if an lvalue is
10409 	     required.  */
10410 	  if (fallback == fb_lvalue)
10411 	    {
10412 	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
10413 	      mark_addressable (*expr_p);
10414 	      ret = GS_OK;
10415 	    }
10416 	  break;
10417 
10418 	case CALL_EXPR:
10419 	  ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
10420 
10421 	  /* C99 code may assign to an array in a structure returned
10422 	     from a function, and this has undefined behavior only on
10423 	     execution, so create a temporary if an lvalue is
10424 	     required.  */
10425 	  if (fallback == fb_lvalue)
10426 	    {
10427 	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
10428 	      mark_addressable (*expr_p);
10429 	      ret = GS_OK;
10430 	    }
10431 	  break;
10432 
10433 	case TREE_LIST:
10434 	  gcc_unreachable ();
10435 
10436 	case COMPOUND_EXPR:
10437 	  ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
10438 	  break;
10439 
10440 	case COMPOUND_LITERAL_EXPR:
10441 	  ret = gimplify_compound_literal_expr (expr_p, pre_p,
10442 						gimple_test_f, fallback);
10443 	  break;
10444 
10445 	case MODIFY_EXPR:
10446 	case INIT_EXPR:
10447 	  ret = gimplify_modify_expr (expr_p, pre_p, post_p,
10448 				      fallback != fb_none);
10449 	  break;
10450 
10451 	case TRUTH_ANDIF_EXPR:
10452 	case TRUTH_ORIF_EXPR:
10453 	  {
10454 	    /* Preserve the original type of the expression and the
10455 	       source location of the outer expression.  */
10456 	    tree org_type = TREE_TYPE (*expr_p);
10457 	    *expr_p = gimple_boolify (*expr_p);
10458 	    *expr_p = build3_loc (input_location, COND_EXPR,
10459 				  org_type, *expr_p,
10460 				  fold_convert_loc
10461 				    (input_location,
10462 				     org_type, boolean_true_node),
10463 				  fold_convert_loc
10464 				    (input_location,
10465 				     org_type, boolean_false_node));
10466 	    ret = GS_OK;
10467 	    break;
10468 	  }
10469 
10470 	case TRUTH_NOT_EXPR:
10471 	  {
10472 	    tree type = TREE_TYPE (*expr_p);
10473 	    /* The parsers are careful to generate TRUTH_NOT_EXPR
10474 	       only with operands that are always zero or one.
10475 	       We do not fold here but handle the only interesting case
10476 	       manually, as fold may re-introduce the TRUTH_NOT_EXPR.  */
10477 	    *expr_p = gimple_boolify (*expr_p);
10478 	    if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
10479 	      *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
10480 				    TREE_TYPE (*expr_p),
10481 				    TREE_OPERAND (*expr_p, 0));
10482 	    else
10483 	      *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
10484 				    TREE_TYPE (*expr_p),
10485 				    TREE_OPERAND (*expr_p, 0),
10486 				    build_int_cst (TREE_TYPE (*expr_p), 1));
10487 	    if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
10488 	      *expr_p = fold_convert_loc (input_location, type, *expr_p);
10489 	    ret = GS_OK;
10490 	    break;
10491 	  }
10492 
10493 	case ADDR_EXPR:
10494 	  ret = gimplify_addr_expr (expr_p, pre_p, post_p);
10495 	  break;
10496 
10497 	case ANNOTATE_EXPR:
10498 	  {
10499 	    tree cond = TREE_OPERAND (*expr_p, 0);
10500 	    tree kind = TREE_OPERAND (*expr_p, 1);
10501 	    tree type = TREE_TYPE (cond);
10502 	    if (!INTEGRAL_TYPE_P (type))
10503 	      {
10504 		*expr_p = cond;
10505 		ret = GS_OK;
10506 		break;
10507 	      }
10508 	    tree tmp = create_tmp_var (type);
10509 	    gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
10510 	    gcall *call
10511 	      = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
10512 	    gimple_call_set_lhs (call, tmp);
10513 	    gimplify_seq_add_stmt (pre_p, call);
10514 	    *expr_p = tmp;
10515 	    ret = GS_ALL_DONE;
10516 	    break;
10517 	  }
10518 
10519 	case VA_ARG_EXPR:
10520 	  ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
10521 	  break;
10522 
10523 	CASE_CONVERT:
10524 	  if (IS_EMPTY_STMT (*expr_p))
10525 	    {
10526 	      ret = GS_ALL_DONE;
10527 	      break;
10528 	    }
10529 
10530 	  if (VOID_TYPE_P (TREE_TYPE (*expr_p))
10531 	      || fallback == fb_none)
10532 	    {
10533 	      /* Just strip a conversion to void (or in void context) and
10534 		 try again.  */
10535 	      *expr_p = TREE_OPERAND (*expr_p, 0);
10536 	      ret = GS_OK;
10537 	      break;
10538 	    }
10539 
10540 	  ret = gimplify_conversion (expr_p);
10541 	  if (ret == GS_ERROR)
10542 	    break;
10543 	  if (*expr_p != save_expr)
10544 	    break;
10545 	  /* FALLTHRU */
10546 
10547 	case FIX_TRUNC_EXPR:
10548 	  /* unary_expr: ... | '(' cast ')' val | ...  */
10549 	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10550 			       is_gimple_val, fb_rvalue);
10551 	  recalculate_side_effects (*expr_p);
10552 	  break;
10553 
10554 	case INDIRECT_REF:
10555 	  {
10556 	    bool volatilep = TREE_THIS_VOLATILE (*expr_p);
10557 	    bool notrap = TREE_THIS_NOTRAP (*expr_p);
10558 	    tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
10559 
10560 	    *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
10561 	    if (*expr_p != save_expr)
10562 	      {
10563 		ret = GS_OK;
10564 		break;
10565 	      }
10566 
10567 	    ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10568 				 is_gimple_reg, fb_rvalue);
10569 	    if (ret == GS_ERROR)
10570 	      break;
10571 
10572 	    recalculate_side_effects (*expr_p);
10573 	    *expr_p = fold_build2_loc (input_location, MEM_REF,
10574 				       TREE_TYPE (*expr_p),
10575 				       TREE_OPERAND (*expr_p, 0),
10576 				       build_int_cst (saved_ptr_type, 0));
10577 	    TREE_THIS_VOLATILE (*expr_p) = volatilep;
10578 	    TREE_THIS_NOTRAP (*expr_p) = notrap;
10579 	    ret = GS_OK;
10580 	    break;
10581 	  }
10582 
10583 	/* We arrive here through the various re-gimplifcation paths.  */
10584 	case MEM_REF:
10585 	  /* First try re-folding the whole thing.  */
10586 	  tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
10587 			     TREE_OPERAND (*expr_p, 0),
10588 			     TREE_OPERAND (*expr_p, 1));
10589 	  if (tmp)
10590 	    {
10591 	      REF_REVERSE_STORAGE_ORDER (tmp)
10592 	        = REF_REVERSE_STORAGE_ORDER (*expr_p);
10593 	      *expr_p = tmp;
10594 	      recalculate_side_effects (*expr_p);
10595 	      ret = GS_OK;
10596 	      break;
10597 	    }
10598 	  /* Avoid re-gimplifying the address operand if it is already
10599 	     in suitable form.  Re-gimplifying would mark the address
10600 	     operand addressable.  Always gimplify when not in SSA form
10601 	     as we still may have to gimplify decls with value-exprs.  */
10602 	  if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
10603 	      || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
10604 	    {
10605 	      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10606 				   is_gimple_mem_ref_addr, fb_rvalue);
10607 	      if (ret == GS_ERROR)
10608 		break;
10609 	    }
10610 	  recalculate_side_effects (*expr_p);
10611 	  ret = GS_ALL_DONE;
10612 	  break;
10613 
10614 	/* Constants need not be gimplified.  */
10615 	case INTEGER_CST:
10616 	case REAL_CST:
10617 	case FIXED_CST:
10618 	case STRING_CST:
10619 	case COMPLEX_CST:
10620 	case VECTOR_CST:
10621 	  /* Drop the overflow flag on constants, we do not want
10622 	     that in the GIMPLE IL.  */
10623 	  if (TREE_OVERFLOW_P (*expr_p))
10624 	    *expr_p = drop_tree_overflow (*expr_p);
10625 	  ret = GS_ALL_DONE;
10626 	  break;
10627 
10628 	case CONST_DECL:
10629 	  /* If we require an lvalue, such as for ADDR_EXPR, retain the
10630 	     CONST_DECL node.  Otherwise the decl is replaceable by its
10631 	     value.  */
10632 	  /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either.  */
10633 	  if (fallback & fb_lvalue)
10634 	    ret = GS_ALL_DONE;
10635 	  else
10636 	    {
10637 	      *expr_p = DECL_INITIAL (*expr_p);
10638 	      ret = GS_OK;
10639 	    }
10640 	  break;
10641 
10642 	case DECL_EXPR:
10643 	  ret = gimplify_decl_expr (expr_p, pre_p);
10644 	  break;
10645 
10646 	case BIND_EXPR:
10647 	  ret = gimplify_bind_expr (expr_p, pre_p);
10648 	  break;
10649 
10650 	case LOOP_EXPR:
10651 	  ret = gimplify_loop_expr (expr_p, pre_p);
10652 	  break;
10653 
10654 	case SWITCH_EXPR:
10655 	  ret = gimplify_switch_expr (expr_p, pre_p);
10656 	  break;
10657 
10658 	case EXIT_EXPR:
10659 	  ret = gimplify_exit_expr (expr_p);
10660 	  break;
10661 
10662 	case GOTO_EXPR:
10663 	  /* If the target is not LABEL, then it is a computed jump
10664 	     and the target needs to be gimplified.  */
10665 	  if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
10666 	    {
10667 	      ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
10668 				   NULL, is_gimple_val, fb_rvalue);
10669 	      if (ret == GS_ERROR)
10670 		break;
10671 	    }
10672 	  gimplify_seq_add_stmt (pre_p,
10673 			  gimple_build_goto (GOTO_DESTINATION (*expr_p)));
10674 	  ret = GS_ALL_DONE;
10675 	  break;
10676 
10677 	case PREDICT_EXPR:
10678 	  gimplify_seq_add_stmt (pre_p,
10679 			gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
10680 					      PREDICT_EXPR_OUTCOME (*expr_p)));
10681 	  ret = GS_ALL_DONE;
10682 	  break;
10683 
10684 	case LABEL_EXPR:
10685 	  ret = GS_ALL_DONE;
10686 	  gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
10687 		      == current_function_decl);
10688 	  gimplify_seq_add_stmt (pre_p,
10689 			  gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
10690 	  break;
10691 
10692 	case CASE_LABEL_EXPR:
10693 	  ret = gimplify_case_label_expr (expr_p, pre_p);
10694 	  break;
10695 
10696 	case RETURN_EXPR:
10697 	  ret = gimplify_return_expr (*expr_p, pre_p);
10698 	  break;
10699 
10700 	case CONSTRUCTOR:
10701 	  /* Don't reduce this in place; let gimplify_init_constructor work its
10702 	     magic.  Buf if we're just elaborating this for side effects, just
10703 	     gimplify any element that has side-effects.  */
10704 	  if (fallback == fb_none)
10705 	    {
10706 	      unsigned HOST_WIDE_INT ix;
10707 	      tree val;
10708 	      tree temp = NULL_TREE;
10709 	      FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
10710 		if (TREE_SIDE_EFFECTS (val))
10711 		  append_to_statement_list (val, &temp);
10712 
10713 	      *expr_p = temp;
10714 	      ret = temp ? GS_OK : GS_ALL_DONE;
10715 	    }
10716 	  /* C99 code may assign to an array in a constructed
10717 	     structure or union, and this has undefined behavior only
10718 	     on execution, so create a temporary if an lvalue is
10719 	     required.  */
10720 	  else if (fallback == fb_lvalue)
10721 	    {
10722 	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
10723 	      mark_addressable (*expr_p);
10724 	      ret = GS_OK;
10725 	    }
10726 	  else
10727 	    ret = GS_ALL_DONE;
10728 	  break;
10729 
10730 	  /* The following are special cases that are not handled by the
10731 	     original GIMPLE grammar.  */
10732 
10733 	  /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
10734 	     eliminated.  */
10735 	case SAVE_EXPR:
10736 	  ret = gimplify_save_expr (expr_p, pre_p, post_p);
10737 	  break;
10738 
10739 	case BIT_FIELD_REF:
10740 	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10741 			       post_p, is_gimple_lvalue, fb_either);
10742 	  recalculate_side_effects (*expr_p);
10743 	  break;
10744 
10745 	case TARGET_MEM_REF:
10746 	  {
10747 	    enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
10748 
10749 	    if (TMR_BASE (*expr_p))
10750 	      r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
10751 				  post_p, is_gimple_mem_ref_addr, fb_either);
10752 	    if (TMR_INDEX (*expr_p))
10753 	      r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
10754 				  post_p, is_gimple_val, fb_rvalue);
10755 	    if (TMR_INDEX2 (*expr_p))
10756 	      r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
10757 				  post_p, is_gimple_val, fb_rvalue);
10758 	    /* TMR_STEP and TMR_OFFSET are always integer constants.  */
10759 	    ret = MIN (r0, r1);
10760 	  }
10761 	  break;
10762 
10763 	case NON_LVALUE_EXPR:
10764 	  /* This should have been stripped above.  */
10765 	  gcc_unreachable ();
10766 
10767 	case ASM_EXPR:
10768 	  ret = gimplify_asm_expr (expr_p, pre_p, post_p);
10769 	  break;
10770 
10771 	case TRY_FINALLY_EXPR:
10772 	case TRY_CATCH_EXPR:
10773 	  {
10774 	    gimple_seq eval, cleanup;
10775 	    gtry *try_;
10776 
10777 	    /* Calls to destructors are generated automatically in FINALLY/CATCH
10778 	       block. They should have location as UNKNOWN_LOCATION. However,
10779 	       gimplify_call_expr will reset these call stmts to input_location
10780 	       if it finds stmt's location is unknown. To prevent resetting for
10781 	       destructors, we set the input_location to unknown.
10782 	       Note that this only affects the destructor calls in FINALLY/CATCH
10783 	       block, and will automatically reset to its original value by the
10784 	       end of gimplify_expr.  */
10785 	    input_location = UNKNOWN_LOCATION;
10786 	    eval = cleanup = NULL;
10787 	    gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
10788 	    gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
10789 	    /* Don't create bogus GIMPLE_TRY with empty cleanup.  */
10790 	    if (gimple_seq_empty_p (cleanup))
10791 	      {
10792 		gimple_seq_add_seq (pre_p, eval);
10793 		ret = GS_ALL_DONE;
10794 		break;
10795 	      }
10796 	    try_ = gimple_build_try (eval, cleanup,
10797 				     TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
10798 				     ? GIMPLE_TRY_FINALLY
10799 				     : GIMPLE_TRY_CATCH);
10800 	    if (EXPR_HAS_LOCATION (save_expr))
10801 	      gimple_set_location (try_, EXPR_LOCATION (save_expr));
10802 	    else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
10803 	      gimple_set_location (try_, saved_location);
10804 	    if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
10805 	      gimple_try_set_catch_is_cleanup (try_,
10806 					       TRY_CATCH_IS_CLEANUP (*expr_p));
10807 	    gimplify_seq_add_stmt (pre_p, try_);
10808 	    ret = GS_ALL_DONE;
10809 	    break;
10810 	  }
10811 
10812 	case CLEANUP_POINT_EXPR:
10813 	  ret = gimplify_cleanup_point_expr (expr_p, pre_p);
10814 	  break;
10815 
10816 	case TARGET_EXPR:
10817 	  ret = gimplify_target_expr (expr_p, pre_p, post_p);
10818 	  break;
10819 
10820 	case CATCH_EXPR:
10821 	  {
10822 	    gimple *c;
10823 	    gimple_seq handler = NULL;
10824 	    gimplify_and_add (CATCH_BODY (*expr_p), &handler);
10825 	    c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
10826 	    gimplify_seq_add_stmt (pre_p, c);
10827 	    ret = GS_ALL_DONE;
10828 	    break;
10829 	  }
10830 
10831 	case EH_FILTER_EXPR:
10832 	  {
10833 	    gimple *ehf;
10834 	    gimple_seq failure = NULL;
10835 
10836 	    gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
10837 	    ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
10838 	    gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
10839 	    gimplify_seq_add_stmt (pre_p, ehf);
10840 	    ret = GS_ALL_DONE;
10841 	    break;
10842 	  }
10843 
10844 	case OBJ_TYPE_REF:
10845 	  {
10846 	    enum gimplify_status r0, r1;
10847 	    r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
10848 				post_p, is_gimple_val, fb_rvalue);
10849 	    r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
10850 				post_p, is_gimple_val, fb_rvalue);
10851 	    TREE_SIDE_EFFECTS (*expr_p) = 0;
10852 	    ret = MIN (r0, r1);
10853 	  }
10854 	  break;
10855 
10856 	case LABEL_DECL:
10857 	  /* We get here when taking the address of a label.  We mark
10858 	     the label as "forced"; meaning it can never be removed and
10859 	     it is a potential target for any computed goto.  */
10860 	  FORCED_LABEL (*expr_p) = 1;
10861 	  ret = GS_ALL_DONE;
10862 	  break;
10863 
10864 	case STATEMENT_LIST:
10865 	  ret = gimplify_statement_list (expr_p, pre_p);
10866 	  break;
10867 
10868 	case WITH_SIZE_EXPR:
10869 	  {
10870 	    gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10871 			   post_p == &internal_post ? NULL : post_p,
10872 			   gimple_test_f, fallback);
10873 	    gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
10874 			   is_gimple_val, fb_rvalue);
10875 	    ret = GS_ALL_DONE;
10876 	  }
10877 	  break;
10878 
10879 	case VAR_DECL:
10880 	case PARM_DECL:
10881 	  ret = gimplify_var_or_parm_decl (expr_p);
10882 	  break;
10883 
10884 	case RESULT_DECL:
10885 	  /* When within an OMP context, notice uses of variables.  */
10886 	  if (gimplify_omp_ctxp)
10887 	    omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
10888 	  ret = GS_ALL_DONE;
10889 	  break;
10890 
10891 	case SSA_NAME:
10892 	  /* Allow callbacks into the gimplifier during optimization.  */
10893 	  ret = GS_ALL_DONE;
10894 	  break;
10895 
10896 	case OMP_PARALLEL:
10897 	  gimplify_omp_parallel (expr_p, pre_p);
10898 	  ret = GS_ALL_DONE;
10899 	  break;
10900 
10901 	case OMP_TASK:
10902 	  gimplify_omp_task (expr_p, pre_p);
10903 	  ret = GS_ALL_DONE;
10904 	  break;
10905 
10906 	case OMP_FOR:
10907 	case OMP_SIMD:
10908 	case CILK_SIMD:
10909 	case CILK_FOR:
10910 	case OMP_DISTRIBUTE:
10911 	case OMP_TASKLOOP:
10912 	case OACC_LOOP:
10913 	  ret = gimplify_omp_for (expr_p, pre_p);
10914 	  break;
10915 
10916 	case OACC_CACHE:
10917 	  gimplify_oacc_cache (expr_p, pre_p);
10918 	  ret = GS_ALL_DONE;
10919 	  break;
10920 
10921 	case OACC_DECLARE:
10922 	  gimplify_oacc_declare (expr_p, pre_p);
10923 	  ret = GS_ALL_DONE;
10924 	  break;
10925 
10926 	case OACC_HOST_DATA:
10927 	case OACC_DATA:
10928 	case OACC_KERNELS:
10929 	case OACC_PARALLEL:
10930 	case OMP_SECTIONS:
10931 	case OMP_SINGLE:
10932 	case OMP_TARGET:
10933 	case OMP_TARGET_DATA:
10934 	case OMP_TEAMS:
10935 	  gimplify_omp_workshare (expr_p, pre_p);
10936 	  ret = GS_ALL_DONE;
10937 	  break;
10938 
10939 	case OACC_ENTER_DATA:
10940 	case OACC_EXIT_DATA:
10941 	case OACC_UPDATE:
10942 	case OMP_TARGET_UPDATE:
10943 	case OMP_TARGET_ENTER_DATA:
10944 	case OMP_TARGET_EXIT_DATA:
10945 	  gimplify_omp_target_update (expr_p, pre_p);
10946 	  ret = GS_ALL_DONE;
10947 	  break;
10948 
10949 	case OMP_SECTION:
10950 	case OMP_MASTER:
10951 	case OMP_TASKGROUP:
10952 	case OMP_ORDERED:
10953 	case OMP_CRITICAL:
10954 	  {
10955 	    gimple_seq body = NULL;
10956 	    gimple *g;
10957 
10958 	    gimplify_and_add (OMP_BODY (*expr_p), &body);
10959 	    switch (TREE_CODE (*expr_p))
10960 	      {
10961 	      case OMP_SECTION:
10962 	        g = gimple_build_omp_section (body);
10963 	        break;
10964 	      case OMP_MASTER:
10965 	        g = gimple_build_omp_master (body);
10966 		break;
10967 	      case OMP_TASKGROUP:
10968 		{
10969 		  gimple_seq cleanup = NULL;
10970 		  tree fn
10971 		    = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
10972 		  g = gimple_build_call (fn, 0);
10973 		  gimple_seq_add_stmt (&cleanup, g);
10974 		  g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10975 		  body = NULL;
10976 		  gimple_seq_add_stmt (&body, g);
10977 		  g = gimple_build_omp_taskgroup (body);
10978 		}
10979 		break;
10980 	      case OMP_ORDERED:
10981 		g = gimplify_omp_ordered (*expr_p, body);
10982 		break;
10983 	      case OMP_CRITICAL:
10984 		gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
10985 					   pre_p, ORT_WORKSHARE, OMP_CRITICAL);
10986 		gimplify_adjust_omp_clauses (pre_p, body,
10987 					     &OMP_CRITICAL_CLAUSES (*expr_p),
10988 					     OMP_CRITICAL);
10989 		g = gimple_build_omp_critical (body,
10990 		    			       OMP_CRITICAL_NAME (*expr_p),
10991 		    			       OMP_CRITICAL_CLAUSES (*expr_p));
10992 		break;
10993 	      default:
10994 		gcc_unreachable ();
10995 	      }
10996 	    gimplify_seq_add_stmt (pre_p, g);
10997 	    ret = GS_ALL_DONE;
10998 	    break;
10999 	  }
11000 
11001 	case OMP_ATOMIC:
11002 	case OMP_ATOMIC_READ:
11003 	case OMP_ATOMIC_CAPTURE_OLD:
11004 	case OMP_ATOMIC_CAPTURE_NEW:
11005 	  ret = gimplify_omp_atomic (expr_p, pre_p);
11006 	  break;
11007 
11008 	case TRANSACTION_EXPR:
11009 	  ret = gimplify_transaction (expr_p, pre_p);
11010 	  break;
11011 
11012 	case TRUTH_AND_EXPR:
11013 	case TRUTH_OR_EXPR:
11014 	case TRUTH_XOR_EXPR:
11015 	  {
11016 	    tree orig_type = TREE_TYPE (*expr_p);
11017 	    tree new_type, xop0, xop1;
11018 	    *expr_p = gimple_boolify (*expr_p);
11019 	    new_type = TREE_TYPE (*expr_p);
11020 	    if (!useless_type_conversion_p (orig_type, new_type))
11021 	      {
11022 		*expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
11023 		ret = GS_OK;
11024 		break;
11025 	      }
11026 
11027 	  /* Boolified binary truth expressions are semantically equivalent
11028 	     to bitwise binary expressions.  Canonicalize them to the
11029 	     bitwise variant.  */
11030 	    switch (TREE_CODE (*expr_p))
11031 	      {
11032 	      case TRUTH_AND_EXPR:
11033 		TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
11034 		break;
11035 	      case TRUTH_OR_EXPR:
11036 		TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
11037 		break;
11038 	      case TRUTH_XOR_EXPR:
11039 		TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
11040 		break;
11041 	      default:
11042 		break;
11043 	      }
11044 	    /* Now make sure that operands have compatible type to
11045 	       expression's new_type.  */
11046 	    xop0 = TREE_OPERAND (*expr_p, 0);
11047 	    xop1 = TREE_OPERAND (*expr_p, 1);
11048 	    if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
11049 	      TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
11050 							    new_type,
11051 	      						    xop0);
11052 	    if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
11053 	      TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
11054 							    new_type,
11055 	      						    xop1);
11056 	    /* Continue classified as tcc_binary.  */
11057 	    goto expr_2;
11058 	  }
11059 
11060 	case VEC_COND_EXPR:
11061 	  {
11062 	    enum gimplify_status r0, r1, r2;
11063 
11064 	    r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11065 				post_p, is_gimple_condexpr, fb_rvalue);
11066 	    r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11067 				post_p, is_gimple_val, fb_rvalue);
11068 	    r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11069 				post_p, is_gimple_val, fb_rvalue);
11070 
11071 	    ret = MIN (MIN (r0, r1), r2);
11072 	    recalculate_side_effects (*expr_p);
11073 	  }
11074 	  break;
11075 
11076 	case FMA_EXPR:
11077 	case VEC_PERM_EXPR:
11078 	  /* Classified as tcc_expression.  */
11079 	  goto expr_3;
11080 
11081 	case POINTER_PLUS_EXPR:
11082 	  {
11083 	    enum gimplify_status r0, r1;
11084 	    r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11085 				post_p, is_gimple_val, fb_rvalue);
11086 	    r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11087 				post_p, is_gimple_val, fb_rvalue);
11088 	    recalculate_side_effects (*expr_p);
11089 	    ret = MIN (r0, r1);
11090 	    break;
11091 	  }
11092 
11093 	case CILK_SYNC_STMT:
11094 	  {
11095 	    if (!fn_contains_cilk_spawn_p (cfun))
11096 	      {
11097 		error_at (EXPR_LOCATION (*expr_p),
11098 			  "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
11099 		ret = GS_ERROR;
11100 	      }
11101 	    else
11102 	      {
11103 		gimplify_cilk_sync (expr_p, pre_p);
11104 		ret = GS_ALL_DONE;
11105 	      }
11106 	    break;
11107 	  }
11108 
11109 	default:
11110 	  switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
11111 	    {
11112 	    case tcc_comparison:
11113 	      /* Handle comparison of objects of non scalar mode aggregates
11114 	     	 with a call to memcmp.  It would be nice to only have to do
11115 	     	 this for variable-sized objects, but then we'd have to allow
11116 	     	 the same nest of reference nodes we allow for MODIFY_EXPR and
11117 	     	 that's too complex.
11118 
11119 		 Compare scalar mode aggregates as scalar mode values.  Using
11120 		 memcmp for them would be very inefficient at best, and is
11121 		 plain wrong if bitfields are involved.  */
11122 		{
11123 		  tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
11124 
11125 		  /* Vector comparisons need no boolification.  */
11126 		  if (TREE_CODE (type) == VECTOR_TYPE)
11127 		    goto expr_2;
11128 		  else if (!AGGREGATE_TYPE_P (type))
11129 		    {
11130 		      tree org_type = TREE_TYPE (*expr_p);
11131 		      *expr_p = gimple_boolify (*expr_p);
11132 		      if (!useless_type_conversion_p (org_type,
11133 						      TREE_TYPE (*expr_p)))
11134 			{
11135 			  *expr_p = fold_convert_loc (input_location,
11136 						      org_type, *expr_p);
11137 			  ret = GS_OK;
11138 			}
11139 		      else
11140 			goto expr_2;
11141 		    }
11142 		  else if (TYPE_MODE (type) != BLKmode)
11143 		    ret = gimplify_scalar_mode_aggregate_compare (expr_p);
11144 		  else
11145 		    ret = gimplify_variable_sized_compare (expr_p);
11146 
11147 		  break;
11148 		}
11149 
11150 	    /* If *EXPR_P does not need to be special-cased, handle it
11151 	       according to its class.  */
11152 	    case tcc_unary:
11153 	      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11154 				   post_p, is_gimple_val, fb_rvalue);
11155 	      break;
11156 
11157 	    case tcc_binary:
11158 	    expr_2:
11159 	      {
11160 		enum gimplify_status r0, r1;
11161 
11162 		r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11163 		                    post_p, is_gimple_val, fb_rvalue);
11164 		r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11165 				    post_p, is_gimple_val, fb_rvalue);
11166 
11167 		ret = MIN (r0, r1);
11168 		break;
11169 	      }
11170 
11171 	    expr_3:
11172 	      {
11173 		enum gimplify_status r0, r1, r2;
11174 
11175 		r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11176 		                    post_p, is_gimple_val, fb_rvalue);
11177 		r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11178 				    post_p, is_gimple_val, fb_rvalue);
11179 		r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11180 				    post_p, is_gimple_val, fb_rvalue);
11181 
11182 		ret = MIN (MIN (r0, r1), r2);
11183 		break;
11184 	      }
11185 
11186 	    case tcc_declaration:
11187 	    case tcc_constant:
11188 	      ret = GS_ALL_DONE;
11189 	      goto dont_recalculate;
11190 
11191 	    default:
11192 	      gcc_unreachable ();
11193 	    }
11194 
11195 	  recalculate_side_effects (*expr_p);
11196 
11197 	dont_recalculate:
11198 	  break;
11199 	}
11200 
11201       gcc_assert (*expr_p || ret != GS_OK);
11202     }
11203   while (ret == GS_OK);
11204 
11205   /* If we encountered an error_mark somewhere nested inside, either
11206      stub out the statement or propagate the error back out.  */
11207   if (ret == GS_ERROR)
11208     {
11209       if (is_statement)
11210 	*expr_p = NULL;
11211       goto out;
11212     }
11213 
11214   /* This was only valid as a return value from the langhook, which
11215      we handled.  Make sure it doesn't escape from any other context.  */
11216   gcc_assert (ret != GS_UNHANDLED);
11217 
11218   if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
11219     {
11220       /* We aren't looking for a value, and we don't have a valid
11221 	 statement.  If it doesn't have side-effects, throw it away.
11222 	 We can also get here with code such as "*&&L;", where L is
11223 	 a LABEL_DECL that is marked as FORCED_LABEL.  */
11224       if (TREE_CODE (*expr_p) == LABEL_DECL
11225 	  || !TREE_SIDE_EFFECTS (*expr_p))
11226 	*expr_p = NULL;
11227       else if (!TREE_THIS_VOLATILE (*expr_p))
11228 	{
11229 	  /* This is probably a _REF that contains something nested that
11230 	     has side effects.  Recurse through the operands to find it.  */
11231 	  enum tree_code code = TREE_CODE (*expr_p);
11232 
11233 	  switch (code)
11234 	    {
11235 	    case COMPONENT_REF:
11236 	    case REALPART_EXPR:
11237 	    case IMAGPART_EXPR:
11238 	    case VIEW_CONVERT_EXPR:
11239 	      gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11240 			     gimple_test_f, fallback);
11241 	      break;
11242 
11243 	    case ARRAY_REF:
11244 	    case ARRAY_RANGE_REF:
11245 	      gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11246 			     gimple_test_f, fallback);
11247 	      gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
11248 			     gimple_test_f, fallback);
11249 	      break;
11250 
11251 	    default:
11252 	       /* Anything else with side-effects must be converted to
11253 		  a valid statement before we get here.  */
11254 	      gcc_unreachable ();
11255 	    }
11256 
11257 	  *expr_p = NULL;
11258 	}
11259       else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
11260 	       && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
11261 	{
11262 	  /* Historically, the compiler has treated a bare reference
11263 	     to a non-BLKmode volatile lvalue as forcing a load.  */
11264 	  tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
11265 
11266 	  /* Normally, we do not want to create a temporary for a
11267 	     TREE_ADDRESSABLE type because such a type should not be
11268 	     copied by bitwise-assignment.  However, we make an
11269 	     exception here, as all we are doing here is ensuring that
11270 	     we read the bytes that make up the type.  We use
11271 	     create_tmp_var_raw because create_tmp_var will abort when
11272 	     given a TREE_ADDRESSABLE type.  */
11273 	  tree tmp = create_tmp_var_raw (type, "vol");
11274 	  gimple_add_tmp_var (tmp);
11275 	  gimplify_assign (tmp, *expr_p, pre_p);
11276 	  *expr_p = NULL;
11277 	}
11278       else
11279 	/* We can't do anything useful with a volatile reference to
11280 	   an incomplete type, so just throw it away.  Likewise for
11281 	   a BLKmode type, since any implicit inner load should
11282 	   already have been turned into an explicit one by the
11283 	   gimplification process.  */
11284 	*expr_p = NULL;
11285     }
11286 
11287   /* If we are gimplifying at the statement level, we're done.  Tack
11288      everything together and return.  */
11289   if (fallback == fb_none || is_statement)
11290     {
11291       /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
11292          it out for GC to reclaim it.  */
11293       *expr_p = NULL_TREE;
11294 
11295       if (!gimple_seq_empty_p (internal_pre)
11296 	  || !gimple_seq_empty_p (internal_post))
11297 	{
11298 	  gimplify_seq_add_seq (&internal_pre, internal_post);
11299 	  gimplify_seq_add_seq (pre_p, internal_pre);
11300 	}
11301 
11302       /* The result of gimplifying *EXPR_P is going to be the last few
11303 	 statements in *PRE_P and *POST_P.  Add location information
11304 	 to all the statements that were added by the gimplification
11305 	 helpers.  */
11306       if (!gimple_seq_empty_p (*pre_p))
11307 	annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
11308 
11309       if (!gimple_seq_empty_p (*post_p))
11310 	annotate_all_with_location_after (*post_p, post_last_gsi,
11311 					  input_location);
11312 
11313       goto out;
11314     }
11315 
11316 #ifdef ENABLE_GIMPLE_CHECKING
11317   if (*expr_p)
11318     {
11319       enum tree_code code = TREE_CODE (*expr_p);
11320       /* These expressions should already be in gimple IR form.  */
11321       gcc_assert (code != MODIFY_EXPR
11322 		  && code != ASM_EXPR
11323 		  && code != BIND_EXPR
11324 		  && code != CATCH_EXPR
11325 		  && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
11326 		  && code != EH_FILTER_EXPR
11327 		  && code != GOTO_EXPR
11328 		  && code != LABEL_EXPR
11329 		  && code != LOOP_EXPR
11330 		  && code != SWITCH_EXPR
11331 		  && code != TRY_FINALLY_EXPR
11332 		  && code != OACC_PARALLEL
11333 		  && code != OACC_KERNELS
11334 		  && code != OACC_DATA
11335 		  && code != OACC_HOST_DATA
11336 		  && code != OACC_DECLARE
11337 		  && code != OACC_UPDATE
11338 		  && code != OACC_ENTER_DATA
11339 		  && code != OACC_EXIT_DATA
11340 		  && code != OACC_CACHE
11341 		  && code != OMP_CRITICAL
11342 		  && code != OMP_FOR
11343 		  && code != OACC_LOOP
11344 		  && code != OMP_MASTER
11345 		  && code != OMP_TASKGROUP
11346 		  && code != OMP_ORDERED
11347 		  && code != OMP_PARALLEL
11348 		  && code != OMP_SECTIONS
11349 		  && code != OMP_SECTION
11350 		  && code != OMP_SINGLE);
11351     }
11352 #endif
11353 
11354   /* Otherwise we're gimplifying a subexpression, so the resulting
11355      value is interesting.  If it's a valid operand that matches
11356      GIMPLE_TEST_F, we're done. Unless we are handling some
11357      post-effects internally; if that's the case, we need to copy into
11358      a temporary before adding the post-effects to POST_P.  */
11359   if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
11360     goto out;
11361 
11362   /* Otherwise, we need to create a new temporary for the gimplified
11363      expression.  */
11364 
11365   /* We can't return an lvalue if we have an internal postqueue.  The
11366      object the lvalue refers to would (probably) be modified by the
11367      postqueue; we need to copy the value out first, which means an
11368      rvalue.  */
11369   if ((fallback & fb_lvalue)
11370       && gimple_seq_empty_p (internal_post)
11371       && is_gimple_addressable (*expr_p))
11372     {
11373       /* An lvalue will do.  Take the address of the expression, store it
11374 	 in a temporary, and replace the expression with an INDIRECT_REF of
11375 	 that temporary.  */
11376       tmp = build_fold_addr_expr_loc (input_location, *expr_p);
11377       gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
11378       *expr_p = build_simple_mem_ref (tmp);
11379     }
11380   else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
11381     {
11382       /* An rvalue will do.  Assign the gimplified expression into a
11383 	 new temporary TMP and replace the original expression with
11384 	 TMP.  First, make sure that the expression has a type so that
11385 	 it can be assigned into a temporary.  */
11386       gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
11387       *expr_p = get_formal_tmp_var (*expr_p, pre_p);
11388     }
11389   else
11390     {
11391 #ifdef ENABLE_GIMPLE_CHECKING
11392       if (!(fallback & fb_mayfail))
11393 	{
11394 	  fprintf (stderr, "gimplification failed:\n");
11395 	  print_generic_expr (stderr, *expr_p, 0);
11396 	  debug_tree (*expr_p);
11397 	  internal_error ("gimplification failed");
11398 	}
11399 #endif
11400       gcc_assert (fallback & fb_mayfail);
11401 
11402       /* If this is an asm statement, and the user asked for the
11403 	 impossible, don't die.  Fail and let gimplify_asm_expr
11404 	 issue an error.  */
11405       ret = GS_ERROR;
11406       goto out;
11407     }
11408 
11409   /* Make sure the temporary matches our predicate.  */
11410   gcc_assert ((*gimple_test_f) (*expr_p));
11411 
11412   if (!gimple_seq_empty_p (internal_post))
11413     {
11414       annotate_all_with_location (internal_post, input_location);
11415       gimplify_seq_add_seq (pre_p, internal_post);
11416     }
11417 
11418  out:
11419   input_location = saved_location;
11420   return ret;
11421 }
11422 
11423 /* Look through TYPE for variable-sized objects and gimplify each such
11424    size that we find.  Add to LIST_P any statements generated.  */
11425 
11426 void
11427 gimplify_type_sizes (tree type, gimple_seq *list_p)
11428 {
11429   tree field, t;
11430 
11431   if (type == NULL || type == error_mark_node)
11432     return;
11433 
11434   /* We first do the main variant, then copy into any other variants.  */
11435   type = TYPE_MAIN_VARIANT (type);
11436 
11437   /* Avoid infinite recursion.  */
11438   if (TYPE_SIZES_GIMPLIFIED (type))
11439     return;
11440 
11441   TYPE_SIZES_GIMPLIFIED (type) = 1;
11442 
11443   switch (TREE_CODE (type))
11444     {
11445     case INTEGER_TYPE:
11446     case ENUMERAL_TYPE:
11447     case BOOLEAN_TYPE:
11448     case REAL_TYPE:
11449     case FIXED_POINT_TYPE:
11450       gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
11451       gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
11452 
11453       for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
11454 	{
11455 	  TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
11456 	  TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
11457 	}
11458       break;
11459 
11460     case ARRAY_TYPE:
11461       /* These types may not have declarations, so handle them here.  */
11462       gimplify_type_sizes (TREE_TYPE (type), list_p);
11463       gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
11464       /* Ensure VLA bounds aren't removed, for -O0 they should be variables
11465 	 with assigned stack slots, for -O1+ -g they should be tracked
11466 	 by VTA.  */
11467       if (!(TYPE_NAME (type)
11468 	    && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
11469 	    && DECL_IGNORED_P (TYPE_NAME (type)))
11470 	  && TYPE_DOMAIN (type)
11471 	  && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
11472 	{
11473 	  t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
11474 	  if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
11475 	    DECL_IGNORED_P (t) = 0;
11476 	  t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
11477 	  if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
11478 	    DECL_IGNORED_P (t) = 0;
11479 	}
11480       break;
11481 
11482     case RECORD_TYPE:
11483     case UNION_TYPE:
11484     case QUAL_UNION_TYPE:
11485       for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
11486 	if (TREE_CODE (field) == FIELD_DECL)
11487 	  {
11488 	    gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
11489 	    gimplify_one_sizepos (&DECL_SIZE (field), list_p);
11490 	    gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
11491 	    gimplify_type_sizes (TREE_TYPE (field), list_p);
11492 	  }
11493       break;
11494 
11495     case POINTER_TYPE:
11496     case REFERENCE_TYPE:
11497 	/* We used to recurse on the pointed-to type here, which turned out to
11498 	   be incorrect because its definition might refer to variables not
11499 	   yet initialized at this point if a forward declaration is involved.
11500 
11501 	   It was actually useful for anonymous pointed-to types to ensure
11502 	   that the sizes evaluation dominates every possible later use of the
11503 	   values.  Restricting to such types here would be safe since there
11504 	   is no possible forward declaration around, but would introduce an
11505 	   undesirable middle-end semantic to anonymity.  We then defer to
11506 	   front-ends the responsibility of ensuring that the sizes are
11507 	   evaluated both early and late enough, e.g. by attaching artificial
11508 	   type declarations to the tree.  */
11509       break;
11510 
11511     default:
11512       break;
11513     }
11514 
11515   gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
11516   gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
11517 
11518   for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
11519     {
11520       TYPE_SIZE (t) = TYPE_SIZE (type);
11521       TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
11522       TYPE_SIZES_GIMPLIFIED (t) = 1;
11523     }
11524 }
11525 
11526 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
11527    a size or position, has had all of its SAVE_EXPRs evaluated.
11528    We add any required statements to *STMT_P.  */
11529 
11530 void
11531 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
11532 {
11533   tree expr = *expr_p;
11534 
11535   /* We don't do anything if the value isn't there, is constant, or contains
11536      A PLACEHOLDER_EXPR.  We also don't want to do anything if it's already
11537      a VAR_DECL.  If it's a VAR_DECL from another function, the gimplifier
11538      will want to replace it with a new variable, but that will cause problems
11539      if this type is from outside the function.  It's OK to have that here.  */
11540   if (is_gimple_sizepos (expr))
11541     return;
11542 
11543   *expr_p = unshare_expr (expr);
11544 
11545   gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
11546 }
11547 
11548 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
11549    containing the sequence of corresponding GIMPLE statements.  If DO_PARMS
11550    is true, also gimplify the parameters.  */
11551 
11552 gbind *
11553 gimplify_body (tree fndecl, bool do_parms)
11554 {
11555   location_t saved_location = input_location;
11556   gimple_seq parm_stmts, seq;
11557   gimple *outer_stmt;
11558   gbind *outer_bind;
11559   struct cgraph_node *cgn;
11560 
11561   timevar_push (TV_TREE_GIMPLIFY);
11562 
11563   /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
11564      gimplification.  */
11565   default_rtl_profile ();
11566 
11567   gcc_assert (gimplify_ctxp == NULL);
11568   push_gimplify_context ();
11569 
11570   if (flag_openacc || flag_openmp)
11571     {
11572       gcc_assert (gimplify_omp_ctxp == NULL);
11573       if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
11574 	gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
11575     }
11576 
11577   /* Unshare most shared trees in the body and in that of any nested functions.
11578      It would seem we don't have to do this for nested functions because
11579      they are supposed to be output and then the outer function gimplified
11580      first, but the g++ front end doesn't always do it that way.  */
11581   unshare_body (fndecl);
11582   unvisit_body (fndecl);
11583 
11584   cgn = cgraph_node::get (fndecl);
11585   if (cgn && cgn->origin)
11586     nonlocal_vlas = new hash_set<tree>;
11587 
11588   /* Make sure input_location isn't set to something weird.  */
11589   input_location = DECL_SOURCE_LOCATION (fndecl);
11590 
11591   /* Resolve callee-copies.  This has to be done before processing
11592      the body so that DECL_VALUE_EXPR gets processed correctly.  */
11593   parm_stmts = do_parms ? gimplify_parameters () : NULL;
11594 
11595   /* Gimplify the function's body.  */
11596   seq = NULL;
11597   gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
11598   outer_stmt = gimple_seq_first_stmt (seq);
11599   if (!outer_stmt)
11600     {
11601       outer_stmt = gimple_build_nop ();
11602       gimplify_seq_add_stmt (&seq, outer_stmt);
11603     }
11604 
11605   /* The body must contain exactly one statement, a GIMPLE_BIND.  If this is
11606      not the case, wrap everything in a GIMPLE_BIND to make it so.  */
11607   if (gimple_code (outer_stmt) == GIMPLE_BIND
11608       && gimple_seq_first (seq) == gimple_seq_last (seq))
11609     outer_bind = as_a <gbind *> (outer_stmt);
11610   else
11611     outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
11612 
11613   DECL_SAVED_TREE (fndecl) = NULL_TREE;
11614 
11615   /* If we had callee-copies statements, insert them at the beginning
11616      of the function and clear DECL_VALUE_EXPR_P on the parameters.  */
11617   if (!gimple_seq_empty_p (parm_stmts))
11618     {
11619       tree parm;
11620 
11621       gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
11622       gimple_bind_set_body (outer_bind, parm_stmts);
11623 
11624       for (parm = DECL_ARGUMENTS (current_function_decl);
11625 	   parm; parm = DECL_CHAIN (parm))
11626 	if (DECL_HAS_VALUE_EXPR_P (parm))
11627 	  {
11628 	    DECL_HAS_VALUE_EXPR_P (parm) = 0;
11629 	    DECL_IGNORED_P (parm) = 0;
11630 	  }
11631     }
11632 
11633   if (nonlocal_vlas)
11634     {
11635       if (nonlocal_vla_vars)
11636 	{
11637 	  /* tree-nested.c may later on call declare_vars (..., true);
11638 	     which relies on BLOCK_VARS chain to be the tail of the
11639 	     gimple_bind_vars chain.  Ensure we don't violate that
11640 	     assumption.  */
11641 	  if (gimple_bind_block (outer_bind)
11642 	      == DECL_INITIAL (current_function_decl))
11643 	    declare_vars (nonlocal_vla_vars, outer_bind, true);
11644 	  else
11645 	    BLOCK_VARS (DECL_INITIAL (current_function_decl))
11646 	      = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
11647 			 nonlocal_vla_vars);
11648 	  nonlocal_vla_vars = NULL_TREE;
11649 	}
11650       delete nonlocal_vlas;
11651       nonlocal_vlas = NULL;
11652     }
11653 
11654   if ((flag_openacc || flag_openmp || flag_openmp_simd)
11655       && gimplify_omp_ctxp)
11656     {
11657       delete_omp_context (gimplify_omp_ctxp);
11658       gimplify_omp_ctxp = NULL;
11659     }
11660 
11661   pop_gimplify_context (outer_bind);
11662   gcc_assert (gimplify_ctxp == NULL);
11663 
11664   if (flag_checking && !seen_error ())
11665     verify_gimple_in_seq (gimple_bind_body (outer_bind));
11666 
11667   timevar_pop (TV_TREE_GIMPLIFY);
11668   input_location = saved_location;
11669 
11670   return outer_bind;
11671 }
11672 
11673 typedef char *char_p; /* For DEF_VEC_P.  */
11674 
11675 /* Return whether we should exclude FNDECL from instrumentation.  */
11676 
11677 static bool
11678 flag_instrument_functions_exclude_p (tree fndecl)
11679 {
11680   vec<char_p> *v;
11681 
11682   v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
11683   if (v && v->length () > 0)
11684     {
11685       const char *name;
11686       int i;
11687       char *s;
11688 
11689       name = lang_hooks.decl_printable_name (fndecl, 0);
11690       FOR_EACH_VEC_ELT (*v, i, s)
11691 	if (strstr (name, s) != NULL)
11692 	  return true;
11693     }
11694 
11695   v = (vec<char_p> *) flag_instrument_functions_exclude_files;
11696   if (v && v->length () > 0)
11697     {
11698       const char *name;
11699       int i;
11700       char *s;
11701 
11702       name = DECL_SOURCE_FILE (fndecl);
11703       FOR_EACH_VEC_ELT (*v, i, s)
11704 	if (strstr (name, s) != NULL)
11705 	  return true;
11706     }
11707 
11708   return false;
11709 }
11710 
11711 /* Entry point to the gimplification pass.  FNDECL is the FUNCTION_DECL
11712    node for the function we want to gimplify.
11713 
11714    Return the sequence of GIMPLE statements corresponding to the body
11715    of FNDECL.  */
11716 
11717 void
11718 gimplify_function_tree (tree fndecl)
11719 {
11720   tree parm, ret;
11721   gimple_seq seq;
11722   gbind *bind;
11723 
11724   gcc_assert (!gimple_body (fndecl));
11725 
11726   if (DECL_STRUCT_FUNCTION (fndecl))
11727     push_cfun (DECL_STRUCT_FUNCTION (fndecl));
11728   else
11729     push_struct_function (fndecl);
11730 
11731   /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
11732      if necessary.  */
11733   cfun->curr_properties |= PROP_gimple_lva;
11734 
11735   for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
11736     {
11737       /* Preliminarily mark non-addressed complex variables as eligible
11738          for promotion to gimple registers.  We'll transform their uses
11739          as we find them.  */
11740       if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
11741 	   || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
11742           && !TREE_THIS_VOLATILE (parm)
11743           && !needs_to_live_in_memory (parm))
11744         DECL_GIMPLE_REG_P (parm) = 1;
11745     }
11746 
11747   ret = DECL_RESULT (fndecl);
11748   if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
11749        || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
11750       && !needs_to_live_in_memory (ret))
11751     DECL_GIMPLE_REG_P (ret) = 1;
11752 
11753   bind = gimplify_body (fndecl, true);
11754 
11755   /* The tree body of the function is no longer needed, replace it
11756      with the new GIMPLE body.  */
11757   seq = NULL;
11758   gimple_seq_add_stmt (&seq, bind);
11759   gimple_set_body (fndecl, seq);
11760 
11761   /* If we're instrumenting function entry/exit, then prepend the call to
11762      the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
11763      catch the exit hook.  */
11764   /* ??? Add some way to ignore exceptions for this TFE.  */
11765   if (flag_instrument_function_entry_exit
11766       && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
11767       /* Do not instrument extern inline functions.  */
11768       && !(DECL_DECLARED_INLINE_P (fndecl)
11769 	   && DECL_EXTERNAL (fndecl)
11770 	   && DECL_DISREGARD_INLINE_LIMITS (fndecl))
11771       && !flag_instrument_functions_exclude_p (fndecl))
11772     {
11773       tree x;
11774       gbind *new_bind;
11775       gimple *tf;
11776       gimple_seq cleanup = NULL, body = NULL;
11777       tree tmp_var;
11778       gcall *call;
11779 
11780       x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
11781       call = gimple_build_call (x, 1, integer_zero_node);
11782       tmp_var = create_tmp_var (ptr_type_node, "return_addr");
11783       gimple_call_set_lhs (call, tmp_var);
11784       gimplify_seq_add_stmt (&cleanup, call);
11785       x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
11786       call = gimple_build_call (x, 2,
11787 				build_fold_addr_expr (current_function_decl),
11788 				tmp_var);
11789       gimplify_seq_add_stmt (&cleanup, call);
11790       tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
11791 
11792       x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
11793       call = gimple_build_call (x, 1, integer_zero_node);
11794       tmp_var = create_tmp_var (ptr_type_node, "return_addr");
11795       gimple_call_set_lhs (call, tmp_var);
11796       gimplify_seq_add_stmt (&body, call);
11797       x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
11798       call = gimple_build_call (x, 2,
11799 				build_fold_addr_expr (current_function_decl),
11800 				tmp_var);
11801       gimplify_seq_add_stmt (&body, call);
11802       gimplify_seq_add_stmt (&body, tf);
11803       new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
11804       /* Clear the block for BIND, since it is no longer directly inside
11805          the function, but within a try block.  */
11806       gimple_bind_set_block (bind, NULL);
11807 
11808       /* Replace the current function body with the body
11809          wrapped in the try/finally TF.  */
11810       seq = NULL;
11811       gimple_seq_add_stmt (&seq, new_bind);
11812       gimple_set_body (fndecl, seq);
11813       bind = new_bind;
11814     }
11815 
11816   if ((flag_sanitize & SANITIZE_THREAD) != 0
11817       && !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl)))
11818     {
11819       gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
11820       gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
11821       gbind *new_bind = gimple_build_bind (NULL, tf, gimple_bind_block (bind));
11822       /* Clear the block for BIND, since it is no longer directly inside
11823 	 the function, but within a try block.  */
11824       gimple_bind_set_block (bind, NULL);
11825       /* Replace the current function body with the body
11826 	 wrapped in the try/finally TF.  */
11827       seq = NULL;
11828       gimple_seq_add_stmt (&seq, new_bind);
11829       gimple_set_body (fndecl, seq);
11830     }
11831 
11832   DECL_SAVED_TREE (fndecl) = NULL_TREE;
11833   cfun->curr_properties |= PROP_gimple_any;
11834 
11835   pop_cfun ();
11836 
11837   dump_function (TDI_generic, fndecl);
11838 }
11839 
11840 /* Return a dummy expression of type TYPE in order to keep going after an
11841    error.  */
11842 
11843 static tree
11844 dummy_object (tree type)
11845 {
11846   tree t = build_int_cst (build_pointer_type (type), 0);
11847   return build2 (MEM_REF, type, t, t);
11848 }
11849 
11850 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
11851    builtin function, but a very special sort of operator.  */
11852 
11853 enum gimplify_status
11854 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
11855 		      gimple_seq *post_p ATTRIBUTE_UNUSED)
11856 {
11857   tree promoted_type, have_va_type;
11858   tree valist = TREE_OPERAND (*expr_p, 0);
11859   tree type = TREE_TYPE (*expr_p);
11860   tree t, tag, aptag;
11861   location_t loc = EXPR_LOCATION (*expr_p);
11862 
11863   /* Verify that valist is of the proper type.  */
11864   have_va_type = TREE_TYPE (valist);
11865   if (have_va_type == error_mark_node)
11866     return GS_ERROR;
11867   have_va_type = targetm.canonical_va_list_type (have_va_type);
11868 
11869   if (have_va_type == NULL_TREE)
11870     {
11871       error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
11872       return GS_ERROR;
11873     }
11874 
11875   /* Generate a diagnostic for requesting data of a type that cannot
11876      be passed through `...' due to type promotion at the call site.  */
11877   if ((promoted_type = lang_hooks.types.type_promotes_to (type))
11878 	   != type)
11879     {
11880       static bool gave_help;
11881       bool warned;
11882       /* Use the expansion point to handle cases such as passing bool (defined
11883 	 in a system header) through `...'.  */
11884       source_location xloc
11885 	= expansion_point_location_if_in_system_header (loc);
11886 
11887       /* Unfortunately, this is merely undefined, rather than a constraint
11888 	 violation, so we cannot make this an error.  If this call is never
11889 	 executed, the program is still strictly conforming.  */
11890       warned = warning_at (xloc, 0,
11891 			   "%qT is promoted to %qT when passed through %<...%>",
11892 			   type, promoted_type);
11893       if (!gave_help && warned)
11894 	{
11895 	  gave_help = true;
11896 	  inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
11897 		  promoted_type, type);
11898 	}
11899 
11900       /* We can, however, treat "undefined" any way we please.
11901 	 Call abort to encourage the user to fix the program.  */
11902       if (warned)
11903 	inform (xloc, "if this code is reached, the program will abort");
11904       /* Before the abort, allow the evaluation of the va_list
11905 	 expression to exit or longjmp.  */
11906       gimplify_and_add (valist, pre_p);
11907       t = build_call_expr_loc (loc,
11908 			       builtin_decl_implicit (BUILT_IN_TRAP), 0);
11909       gimplify_and_add (t, pre_p);
11910 
11911       /* This is dead code, but go ahead and finish so that the
11912 	 mode of the result comes out right.  */
11913       *expr_p = dummy_object (type);
11914       return GS_ALL_DONE;
11915     }
11916 
11917   tag = build_int_cst (build_pointer_type (type), 0);
11918   aptag = build_int_cst (TREE_TYPE (valist), 0);
11919 
11920   *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
11921 					  valist, tag, aptag);
11922 
11923   /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
11924      needs to be expanded.  */
11925   cfun->curr_properties &= ~PROP_gimple_lva;
11926 
11927   return GS_OK;
11928 }
11929 
11930 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
11931 
11932    DST/SRC are the destination and source respectively.  You can pass
11933    ungimplified trees in DST or SRC, in which case they will be
11934    converted to a gimple operand if necessary.
11935 
11936    This function returns the newly created GIMPLE_ASSIGN tuple.  */
11937 
11938 gimple *
11939 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
11940 {
11941   tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11942   gimplify_and_add (t, seq_p);
11943   ggc_free (t);
11944   return gimple_seq_last_stmt (*seq_p);
11945 }
11946 
11947 inline hashval_t
11948 gimplify_hasher::hash (const elt_t *p)
11949 {
11950   tree t = p->val;
11951   return iterative_hash_expr (t, 0);
11952 }
11953 
11954 inline bool
11955 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
11956 {
11957   tree t1 = p1->val;
11958   tree t2 = p2->val;
11959   enum tree_code code = TREE_CODE (t1);
11960 
11961   if (TREE_CODE (t2) != code
11962       || TREE_TYPE (t1) != TREE_TYPE (t2))
11963     return false;
11964 
11965   if (!operand_equal_p (t1, t2, 0))
11966     return false;
11967 
11968   /* Only allow them to compare equal if they also hash equal; otherwise
11969      results are nondeterminate, and we fail bootstrap comparison.  */
11970   gcc_checking_assert (hash (p1) == hash (p2));
11971 
11972   return true;
11973 }
11974